blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7873de2b4692d3ff9b1e48c09365bc144c04070e | 53ba0b6f172abcade631ae1f52852c400302559e | /python_developer_tools/cv/bases/attentions/SimAM-master/mmdetection/configs/faster_rcnn/faster_rcnn_r101simam_fpn_1x_coco.py | 4f303a3883c2031e6b27de79add6502d76aa9e6a | [
"Apache-2.0"
]
| permissive | sssssshf/python_developer_tools | f97c64ee0aa0a7e9d31d173192805771c83abb7f | 44d2e67a2e2495a12d6b32da12c76cf0010ac7ea | refs/heads/main | 2023-08-19T02:44:53.536200 | 2021-10-13T02:10:19 | 2021-10-13T02:10:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 142 | py | _base_ = './faster_rcnn_r50simam_fpn_1x_coco.py'
model = dict(pretrained='checkpoints/simam-net/resnet101.pth.tar', backbone=dict(depth=101))
| [
"[email protected]"
]
| |
0e4a97c2bc6c7e6c2cfcd660c9c7457f9b9f3029 | 59fcc364f9a07aa7047824c66645ee406ea8aed4 | /tagger.py | ae7402a3284fee0e57f476bcedde92de0efcffb7 | []
| no_license | vaastav/VA102 | b8f1db3802320c9fd1c83788c0d0b027072439bf | 78137946a0325155f7a7cc26b894e79f66c97a00 | refs/heads/master | 2016-08-08T23:47:33.815551 | 2016-03-27T02:22:00 | 2016-03-28T01:36:00 | 47,371,404 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,208 | py | from nltk.tokenize import word_tokenize
from nltk.tag import pos_tag, map_tag
import nltk
import csv
keywordTags = ['ADJ','ADV','NOUN','NUM','VERB']
def isKeywordTag(tag):
return tag in keywordTags
def find_keywords(review):
text = word_tokenize(review.decode("utf8"))
tagged_review = pos_tag(text)
simplified_tagged_review = [(word,map_tag('en-ptb','universal',tag)) for word, tag in tagged_review]
keywords = []
for word,tag in simplified_tagged_review:
if isKeywordTag(tag):
keywords += [word]
return keywords
def readfile(filename):
inf = open(filename,'rU')
filereader = csv.DictReader(inf,delimiter=',')
reviews = [row["Cleaned_Review"] for row in filereader]
keywords = []
for review in reviews:
lok = [word.encode("utf8") for word in find_keywords(review)]
keywords += [lok]
inf.close()
with open(filename,'rb') as fin, open('keyworded_items.csv','wb') as fout:
reader = csv.reader(fin,lineterminator='\n')
writer = csv.writer(fout,lineterminator='\n')
writer.writerow(next(reader) + ["Keywords"])
for row,val in zip(reader,keywords):
writer.writerow(row + [' '.join(val)])
def main():
readfile("cleaned_items.csv")
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
94f8efdcedc3b47468b0d18a9b274757ad8ba2c1 | e638e9fda0e672fa9a414515d0c05a24ab55ad38 | /GroupAnagrams.py | 233b7f261dff5493b1a06428b1859c3ffd2a8664 | []
| no_license | zjuzpz/Algorithms | 8d1c7d50429aa5540eb817dc5495a20fc3f11125 | 2df1a58aa9474f2ecec2ee7c45ebf12466181391 | refs/heads/master | 2021-01-21T05:55:48.768728 | 2020-08-04T22:44:08 | 2020-08-04T22:44:08 | 44,586,024 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,128 | py | """
49. Group Anagrams
Given an array of strings, group anagrams together.
For example, given: ["eat", "tea", "tan", "ate", "nat", "bat"],
Return:
[
["ate", "eat","tea"],
["nat","tan"],
["bat"]
]
Note:
For the return value, each inner list's elements must follow the lexicographic order.
All inputs will be in lower-case.
"""
# O(nlogm) m is the max size of groups
# O(n)
class Solution(object):
def groupAnagrams(self, strs):
"""
:type strs: List[str]
:rtype: List[List[str]]
"""
res, n = [], []
lookup = {}
for i in range(len(strs)):
if strs[i] == "":
n.append("")
else:
s = "".join(sorted(strs[i]))
if s not in lookup:
lookup[s] = [strs[i]]
else:
lookup[s].append(strs[i])
for key in lookup:
lookup[key].sort()
res.append(lookup[key])
if n:
res.append(n)
return res
if __name__ == "__main__":
strs = ["ac", "", "", "ca"]
print(Solution().groupAnagrams(strs))
| [
"[email protected]"
]
| |
37f5c70ea8454936494a75f8f31143cebca074ab | b9e295b2ee933d134d0526445eac9ac022e39dde | /contentcuration/contentcuration/utils/sentry.py | aaa3dc10a3b7863e4b023b7159f5b0e8a9509249 | [
"MIT"
]
| permissive | ivanistheone/studio | b40fb9ab70e8f23e87013985470a8e44df30fe61 | 47605c51a0b806e580013b2b733f5bf68688aa59 | refs/heads/develop | 2021-06-04T01:35:05.589507 | 2020-12-09T00:03:40 | 2020-12-09T00:03:40 | 88,676,886 | 0 | 2 | MIT | 2019-04-10T04:43:10 | 2017-04-18T22:44:48 | Python | UTF-8 | Python | false | false | 210 | py | from django.conf import settings
def report_exception(exception=None):
if getattr(settings, "SENTRY_ACTIVE", False):
from sentry_sdk import capture_exception
capture_exception(exception)
| [
"[email protected]"
]
| |
12b14afbdcc78dddb3e1859f8bb22e7a1a08fb43 | 2d4af29250dca8c72b74e190e74d92f1467120a0 | /TaobaoSdk/Response/TradeContactGetResponse.py | fc57445cfc9a3544b5dffda3880e500cfa7b950f | []
| no_license | maimiaolmc/TaobaoOpenPythonSDK | 2c671be93c40cf487c0d7d644479ba7e1043004c | d349aa8ed6229ce6d76a09f279a0896a0f8075b3 | refs/heads/master | 2020-04-06T03:52:46.585927 | 2014-06-09T08:58:27 | 2014-06-09T08:58:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,132 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set ts=4 sts=4 sw=4 et:
## @brief 通过交易号获取单笔订单的联系信息
# @author [email protected]
# @date 2013-09-22 16:52:43
# @version: 0.0.0
from datetime import datetime
import os
import sys
import time
_jsonEnode = None
try:
import demjson
_jsonEnode = demjson.encode
except Exception:
try:
import simplejson
except Exception:
try:
import json
except Exception:
raise Exception("Can not import any json library")
else:
_jsonEnode = json.dumps
else:
_jsonEnode = simplejson.dumps
def __getCurrentPath():
return os.path.normpath(os.path.join(os.path.realpath(__file__), os.path.pardir))
__parentPath = os.path.normpath(os.path.join(__getCurrentPath(), os.path.pardir))
if __parentPath not in sys.path:
sys.path.insert(0, __parentPath)
from Domain.TradeContact import TradeContact
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">Response: 通过交易号获取单笔订单的联系信息</SPAN>
# <UL>
# </UL>
class TradeContactGetResponse(object):
def __init__(self, kargs=dict()):
super(self.__class__, self).__init__()
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">请求的返回信息,包含状态等</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">dict</SPAN>
# </LI>
# </UL>
self.responseStatus = None
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">请求的响应内容</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">str</SPAN>
# </LI>
# </UL>
self.responseBody = None
self.code = None
self.msg = None
self.sub_code = None
self.sub_msg = None
## @brief <SPAN style="font-size:16px; font-family:'宋体','Times New Roman',Georgia,Serif;">单笔订单联系信息</SPAN>
# <UL>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Type</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">TradeContact</SPAN>
# </LI>
# <LI>
# <SPAN style="color:DarkRed; font-size:18px; font-family:'Times New Roman',Georgia,Serif;">Level</SPAN>: <SPAN style="color:DarkMagenta; font-size:16px; font-family:'Times New Roman','宋体',Georgia,Serif;">Object</SPAN>
# </LI>
# </UL>
self.contact = None
self.__init(kargs)
def isSuccess(self):
return self.code == None and self.sub_code == None
def _newInstance(self, name, value):
types = self._getPropertyType(name)
propertyType = types[0]
isArray = types[1]
if propertyType == bool:
if isArray:
if not value:
return []
return [x for x in value[value.keys()[0]]]
else:
return value
elif propertyType == datetime:
format = "%Y-%m-%d %H:%M:%S"
if isArray:
if not value:
return []
return [datetime.strptime(x, format) for x in value[value.keys()[0]]]
else:
return datetime.strptime(value, format)
elif propertyType == str:
if isArray:
if not value:
return []
return [x for x in value[value.keys()[0]]]
else:
#like taobao.simba.rpt.adgroupbase.get, response.rpt_adgroup_base_list is a json string,but will be decode into a list via python json lib
if not isinstance(value, basestring):
#the value should be a json string
return _jsonEnode(value)
return value
else:
if isArray:
if not value:
return []
return [propertyType(x) for x in value[value.keys()[0]]]
else:
return propertyType(value)
def _getPropertyType(self, name):
properties = {
"contact": "TradeContact",
}
levels = {
"contact": "Object",
}
nameType = properties[name]
pythonType = None
if nameType == "Number":
pythonType = int
elif nameType == "String":
pythonType = str
elif nameType == 'Boolean':
pythonType = bool
elif nameType == "Date":
pythonType = datetime
elif nameType == 'Field List':
pythonType == str
elif nameType == 'Price':
pythonType = float
elif nameType == 'byte[]':
pythonType = str
else:
pythonType = getattr(sys.modules["Domain.%s" % nameType], nameType)
# 是单个元素还是一个对象
level = levels[name]
if "Array" in level:
return (pythonType, True)
else:
return (pythonType, False)
def __init(self, kargs):
if kargs.has_key("contact"):
self.contact = self._newInstance("contact", kargs["contact"])
if kargs.has_key("code"):
self.code = kargs["code"]
if kargs.has_key("msg"):
self.msg = kargs["msg"]
if kargs.has_key("sub_code"):
self.sub_code = kargs["sub_code"]
if kargs.has_key("sub_msg"):
self.sub_msg = kargs["sub_msg"]
| [
"[email protected]"
]
| |
bc2651b0aa33406897aec6872ca646ab3015a056 | ebe6a5bf993c80c9d3d55e275c1b8a23c456c83b | /abcclassroom/notebook.py | aad12aa764d5faec3f06dfca2f379ae0eb6d50f2 | [
"BSD-3-Clause"
]
| permissive | betatim/grading-workflow-experiments | fd5d2ec7ea23e460d26644c242034ff11c78a59a | b38742547a43f376724fee1e04e540688b1f5019 | refs/heads/master | 2020-03-23T22:15:47.125374 | 2018-12-20T23:04:37 | 2018-12-20T23:04:37 | 142,164,781 | 0 | 0 | null | 2018-07-24T13:47:45 | 2018-07-24T13:47:44 | null | UTF-8 | Python | false | false | 3,333 | py | import ast
import os
import nbformat
import papermill as pm
from nbclean import NotebookCleaner
from .utils import chdir
try:
from IPython.core.inputsplitter import IPythonInputSplitter
except ImportError:
raise ImportError('IPython needs to be installed for notebook grading')
def split_notebook(notebook, student_path, autograder_path):
"""Split a master notebook into student and autograder notebooks"""
print('Processing', notebook)
_, nb_name = os.path.split(notebook)
base_name, extension = os.path.splitext(nb_name)
# create test files and notebook for the student
nb = NotebookCleaner(notebook)
nb.create_tests(tag='private',
oktest_path=base_name,
base_dir=autograder_path)
nb.create_tests(tag='public',
oktest_path=base_name,
base_dir=student_path)
text_replace_begin = '### BEGIN SOLUTION'
text_replace_end = '### END SOLUTION'
nb.replace_text(text_replace_begin, text_replace_end)
nb.save(os.path.join(student_path, nb_name))
# create test files for the autograder
nb = NotebookCleaner(notebook)
nb.create_tests(tag='private',
oktest_path=base_name,
base_dir=autograder_path)
nb.create_tests(tag='public',
oktest_path=base_name,
base_dir=autograder_path)
def find_check_definition(tree):
"""Walk an AST and check for definitions of a function called `check`
Return True if one is found, False otherwise.
"""
for stmt in ast.walk(tree):
if not isinstance(stmt, ast.FunctionDef):
continue
if stmt.name == 'check':
return True
return False
def find_check_assignment(tree):
"""Walk an AST and check for assignments to a variable called `check`
Return True if one is found, False otherwise.
"""
for stmt in ast.walk(tree):
if not isinstance(stmt, ast.Assign):
continue
# check id for tuple target
target_names = []
for target in stmt.targets:
if isinstance(target, tuple):
target_names += [t.id for t in target]
else:
target_names.append(target.id)
if 'check' in target_names:
return True
return False
def execute_notebook(nb_path):
"""Execute a notebook under grading conditions"""
graded_nb_path = os.path.splitext(nb_path)[0] + '-graded.ipynb'
nb_directory = os.path.split(nb_path)[0]
# read in input notebook and check the source for shenanigans
nb = nbformat.read(nb_path, as_version=4)
source = ""
for cell in nb.cells:
if cell.cell_type != "code":
continue
isp = IPythonInputSplitter(line_input_checker=False)
cell_source = isp.transform_cell(cell.source)
source += cell_source
tree = ast.parse(source)
# no points for you if you try and cheat
# XXX add a check for people importing a function called `check`
if find_check_assignment(tree) or find_check_definition(tree):
return
# run the notebook
with chdir(nb_directory):
pm.execute_notebook(nb_path, graded_nb_path)
graded_nb = nbformat.read(graded_nb_path, as_version=4)
return graded_nb
| [
"[email protected]"
]
| |
8c4d5fd485c84b3512ba5be24c00b3af11158b39 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/costmanagement/azure-mgmt-costmanagement/generated_samples/generate_cost_details_report_by_subscription_and_time_period.py | fa959fac431751278f479b18f052ba906ad83b60 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,723 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.costmanagement import CostManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-costmanagement
# USAGE
python generate_cost_details_report_by_subscription_and_time_period.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = CostManagementClient(
credential=DefaultAzureCredential(),
)
response = client.generate_cost_details_report.begin_create_operation(
scope="subscriptions/00000000-0000-0000-0000-000000000000",
parameters={"metric": "ActualCost", "timePeriod": {"end": "2020-03-15", "start": "2020-03-01"}},
).result()
print(response)
# x-ms-original-file: specification/cost-management/resource-manager/Microsoft.CostManagement/stable/2022-10-01/examples/GenerateCostDetailsReportBySubscriptionAndTimePeriod.json
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
c664c730762bad69858a72e5f797d0714ba2e6da | f52997ac7e1b41f34018c3a0028ced8638072b2b | /src/extended_search/backends/backend.py | aed89d730ad7c0b675eb42d9352b4874e2ed0828 | [
"MIT"
]
| permissive | uktrade/digital-workspace-v2 | 49fae1fca819b625c6f6949fb5ce51b89fbcab96 | 7e328d0d55c9aa73be61f476823a743d96e792d0 | refs/heads/main | 2023-09-03T12:03:47.016608 | 2023-09-01T12:07:55 | 2023-09-01T12:07:55 | 232,302,840 | 6 | 0 | MIT | 2023-09-13T15:50:24 | 2020-01-07T10:41:18 | Python | UTF-8 | Python | false | false | 7,967 | py | from wagtail.search.backends.elasticsearch7 import (
Elasticsearch7SearchBackend,
Elasticsearch7SearchQueryCompiler,
)
from wagtail.search.index import SearchField
from wagtail.search.query import MATCH_NONE, Fuzzy, MatchAll, Phrase, PlainText
from extended_search.backends.query import Nested, OnlyFields
from extended_search.index import RelatedFields
class ExtendedSearchQueryCompiler(Elasticsearch7SearchQueryCompiler):
"""
Acting as a placeholder for upstream merges to Wagtail in a PR; this class
doesn't change any behaviour but instead assigns responsibility for
particular aspects to smaller methods to make it easier to override. In the
PR maybe worth referencing https://github.com/wagtail/wagtail/issues/5422
"""
# def __init__(self, *args, **kwargs):
# """
# This override doesn't do anything, it's just here as a reminder to
# modify the underlying class in this way when creating the upstream PR
# """
# super().__init__(*args, **kwargs)
# self.mapping = self.mapping_class(self.queryset.model)
# self.remapped_fields = self._remap_fields(self.fields)
def _remap_fields(self, fields):
"""
Convert field names into index column names
"""
if fields is None:
return None
remapped_fields = []
searchable_fields = {
f.field_name: f
for f in self.queryset.model.search_fields
if isinstance(f, SearchField) or isinstance(f, RelatedFields)
}
for field_name in fields:
if field_name in searchable_fields:
field_name = self.mapping.get_field_column_name(
searchable_fields[field_name]
)
else:
field_name_parts = field_name.split(".")
if (
len(field_name_parts) == 2
and field_name_parts[0] in searchable_fields
):
field_name = self.mapping.get_field_column_name(
searchable_fields[field_name_parts[0]]
)
field_name = f"{field_name}.{field_name_parts[1]}"
remapped_fields.append(field_name)
return remapped_fields
def _join_and_compile_queries(self, query, fields, boost=1.0):
"""
Handle a generalised situation of one or more queries that need
compilation and potentially joining as siblings. If more than one field
then compile a query for each field then combine with disjunction
max (or operator which takes the max score out of each of the
field queries)
"""
if len(fields) == 1:
return self._compile_query(query, fields[0], boost)
else:
field_queries = []
for field in fields:
field_queries.append(self._compile_query(query, field, boost))
return {"dis_max": {"queries": field_queries}}
def get_inner_query(self):
"""
This is a brittle override of the Elasticsearch7SearchQueryCompiler.
get_inner_query, acting as a standin for getting these changes merged
upstream. It exists in order to break out the _join_and_compile_queries
method
"""
if self.remapped_fields:
fields = self.remapped_fields
else:
fields = [self.mapping.all_field_name]
if len(fields) == 0:
# No fields. Return a query that'll match nothing
return {"bool": {"mustNot": {"match_all": {}}}}
# Handle MatchAll and PlainText separately as they were supported
# before "search query classes" was implemented and we'd like to
# keep the query the same as before
if isinstance(self.query, MatchAll):
return {"match_all": {}}
elif isinstance(self.query, PlainText):
return self._compile_plaintext_query(self.query, fields)
elif isinstance(self.query, Phrase):
return self._compile_phrase_query(self.query, fields)
elif isinstance(self.query, Fuzzy):
return self._compile_fuzzy_query(self.query, fields)
else:
return self._join_and_compile_queries(self.query, fields)
class OnlyFieldSearchQueryCompiler(ExtendedSearchQueryCompiler):
"""
Acting as a placeholder for upstream merges to Wagtail in a separate PR to
the ExtendedSearchQueryCompiler; this exists to support the new OnlyFields
SearchQuery
"""
def _compile_query(self, query, field, boost=1.0):
"""
Override the parent method to handle specifics of the OnlyFields
SearchQuery, and allow boosting of Fuzzy and Phrase queries
"""
if not isinstance(query, (Fuzzy, Phrase, OnlyFields, Nested)):
return super()._compile_query(query, field, boost)
# Overrides the existing functionality only to be able to pass Boost
# values to Fuzzy and Phrase types as well as PlainText
if isinstance(query, Fuzzy):
return self._compile_fuzzy_query(query, [field], boost)
elif isinstance(query, Phrase):
return self._compile_phrase_query(query, [field], boost)
# Handle Nested fields for RelatedFields on models
elif isinstance(query, Nested):
return self._compile_nested_query(query, [field], boost)
# Handle OnlyFields
remapped_fields = self._remap_fields(query.fields)
# Handle RelatedFields passing a list at this point
if isinstance(field, list) and len(field) == 1:
field = field[0]
if field == self.mapping.all_field_name:
# We are using the "_all_text" field proxy (i.e. the search()
# method was called without the fields kwarg), but now we want to
# limit the downstream fields compiled to those explicitly defined
# in the OnlyFields query
return self._join_and_compile_queries(
query.subquery, remapped_fields, boost
)
elif field in remapped_fields:
# Fields were defined explicitly upstream, and we are dealing with
# one that's in the OnlyFields filter
return self._compile_query(query.subquery, field, boost)
else:
# Exclude this field from any further downstream compilation: it
# was defined in the search() method but has been excluded from
# this part of the tree with an OnlyFields filter
return self._compile_query(MATCH_NONE, field, boost)
def _compile_fuzzy_query(self, query, fields, boost=1.0):
"""
Support boosting
"""
match_query = super()._compile_fuzzy_query(query, fields)
if boost != 1.0:
match_query["match"][fields[0]]["boost"] = boost
return match_query
def _compile_phrase_query(self, query, fields, boost=1.0):
"""
Support boosting
"""
match_query = super()._compile_phrase_query(query, fields)
if boost != 1.0:
if "multi_match" in match_query:
match_query["multi_match"]["boost"] = boost
else:
match_query["match_phrase"][fields[0]] = {
"query": match_query["match_phrase"][fields[0]],
"boost": boost,
}
return match_query
def _compile_nested_query(self, query, fields, boost=1.0):
"""
Add OS DSL elements to support Nested fields
"""
return {
"nested": {
"path": query.path,
"query": self._compile_query(query.subquery, fields, boost),
}
}
class CustomSearchBackend(Elasticsearch7SearchBackend):
query_compiler_class = OnlyFieldSearchQueryCompiler
SearchBackend = CustomSearchBackend
| [
"[email protected]"
]
| |
b6e028147abb5f242c4fbbc9df615e138999ea5a | bba618de189d579c1cc6026a94e0734dc8b89330 | /data/data_cluster.py | 13df72e75b0e45451decf75fcde1cc7b5f5b0011 | []
| no_license | Tetuwo181/KMeans | 9f9ab5b04a6d1d730f8db5a40ab99284e82ec444 | 7fbf62665af79e58df5e8d5bc882ded59f39938a | refs/heads/master | 2020-03-28T17:22:03.748501 | 2018-09-14T12:02:43 | 2018-09-14T12:02:43 | 148,782,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,581 | py | import numpy as np
from typing import Union
from typing import Tuple
from typing import Optional
from typing import Callable
from typing import List
Position = Union[np.ndarray, Tuple]
class AbsData(object):
"""
データを格納するクラスのベース
"""
def __init__(self, position: Position):
"""
:param position: データの座標
"""
self.__position = np.array(position)
@property
def position(self):
return self.__position
class Data(AbsData):
"""
データを格納するクラス
"""
def __init__(self, position: Position, cluster: Optional[int] = None, cluster_real: Optional[int] = None):
"""
:param position: データの座標
:param cluster: データが所属すると判定されたクラスタ ミュータブル
:param cluster_real: オリジナルのデータに記されているクラスタ
"""
self.cluster = cluster
self.__cluster_real = cluster_real
super.__init__(position)
@property
def cluster_real(self):
return self.__cluster_real
class ClusterCentral(AbsData):
"""
クラスタの中心地を表すクラス
"""
def __init__(self, position: Position, cluster: int):
"""
:param position: クラスタの中心地
:param cluster: クラスタ
"""
self.__cluster = cluster
super.__init__(position)
@property
def cluster(self):
return self.__cluster
DataInput = Union[AbsData, np.ndarray]
FuncDistance = Callable[[DataInput, DataInput], float]
def convert_data(raw_data: DataInput)->np.ndarray:
if type(raw_data) is np.ndarray:
return raw_data
return raw_data.position
def get_distance(calc_distance: FuncDistance,
raw_data1: DataInput,
raw_data2: DataInput)->float:
"""
データや座標から距離を求める
:param calc_distance: 距離を求める関数
:param raw_data1: 1個目のデータ
:param raw_data2: 2個目のデータ
:return: 2点間の距離
"""
data1 = convert_data(raw_data1)
data2 = convert_data(raw_data2)
return calc_distance(data1, data2)
def init_calc_distance(calc_distance: FuncDistance)->FuncDistance:
"""
先に距離を求める関数だけ初期化する
:param calc_distance:距離を求める関数
:return: 2つのデータや座標から距離を求める関数
"""
return lambda data1, data2: get_distance(calc_distance, data1, data2)
| [
"[email protected]"
]
| |
d0202a7b6d83f2e2b9581177ccc335588b29f1e4 | 7d1fec7f929c0d80707a8f46a5379abecb513ad8 | /crab_horizon_movement.py | a3e02b8fbcc5d67816798a218ddb17d6fbd70beb | []
| no_license | francescofilippini6/Sky_Coverage | 4cacdc8f2963d16c61c315772f311e6cc1f12e63 | 963d0fe18332eb6d0093c36402371b5f74655e82 | refs/heads/main | 2023-03-11T19:25:49.347341 | 2021-02-26T08:40:43 | 2021-02-26T08:40:43 | 326,218,835 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,296 | py | import math as m
import numpy as np
import csv
import matplotlib.pyplot as plt
import pandas as pd
import astropy.coordinates as coord
import astropy.units as u
from astropy.io import ascii
from astropy.coordinates import SkyCoord
from astropy.coordinates import SkyCoord, EarthLocation
from astropy import coordinates as coord
from astropy.coordinates.tests.utils import randomly_sample_sphere
from astropy.time import Time
from astropy import units as u
import numpy as np
from astropy.coordinates import SkyCoord, EarthLocation, AltAz
times = Time.now() + np.linspace(-5, 5, 300)*u.hour
lapalma = EarthLocation.from_geocentric(5327448.9957829, -1718665.73869569, 3051566.90295403, unit='m')
aa_frame = coord.AltAz(obstime=times[:, np.newaxis], location=lapalma)
#aa_coos = coos.transform_to(aa_frame)
obstime = Time('2010-01-01T20:00') + np.linspace(0, 6, 10000) * u.hour
location = EarthLocation(lon=-17.89 * u.deg, lat=28.76 * u.deg, height=2200 * u.m)
frame = AltAz(obstime=obstime, location=location)
crab = SkyCoord(ra='05h34m31.94s', dec='22d00m52.2s')
crab_altaz = crab.transform_to(frame)
print(len(crab_altaz))
plt.subplot(111, projection='aitoff')
plt.grid(True)
plt.scatter(crab_altaz.az.wrap_at('180d').radian, crab_altaz.alt.radian)
plt.savefig('crab_movement.png')
plt.show()
| [
"[email protected]"
]
| |
db85707e1b42d9a5d515def686085645078f4212 | 88be4d5657d19462eb1d74d2d4d98180b423a889 | /examples/rl_algos/ddpg/navigation2d_ddpg.py | cf57cc4a5f8d64c938f96c2f197acc556877cb65 | [
"BSD-3-Clause"
]
| permissive | domingoesteban/robolearn | bc58278fe38894f4ca9ec9e657ee13a479a368b7 | 0d20125425c352b80ef2eeed1c0b11ab6497b11a | refs/heads/master | 2020-04-15T22:38:25.343229 | 2019-01-29T17:01:42 | 2019-01-29T17:01:42 | 165,080,647 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,299 | py | """
Run PyTorch DDPG on Navigation2dGoalCompoEnv.
NOTE: You need PyTorch 0.4
"""
import os
from shutil import copyfile
import numpy as np
import robolearn.torch.utils.pytorch_util as ptu
from robolearn.envs.normalized_box_env import NormalizedBoxEnv
from robolearn.utils.launchers.launcher_util import setup_logger
from robolearn.torch.utils.data_management import SimpleReplayBuffer
from robolearn.envs.simple_envs.navigation2d import Navigation2dGoalCompoEnv
from robolearn.torch.algorithms.rl_algos.ddpg \
import DDPG
from robolearn.torch.models import NNQFunction
from robolearn.torch.policies import TanhMlpPolicy
from robolearn.utils.exploration_strategies import OUStrategy
from robolearn.utils.exploration_strategies import PolicyWrappedWithExplorationStrategy
import argparse
import joblib
np.set_printoptions(suppress=True, precision=4)
# np.seterr(all='raise') # WARNING RAISE ERROR IN NUMPY
PATH_LENGTH = 20 # time steps
PATHS_PER_EPOCH = 5
PATHS_PER_EVAL = 3
BATCH_SIZE = 256
SEED = 110
# NP_THREADS = 6
SUBTASK = None
POLICY = TanhMlpPolicy
EPOCHS = 100
OPTIMIZER = 'adam'
# OPTIMIZER = 'rmsprop'
NORMALIZE_OBS = False
expt_params = dict(
algo_name=DDPG.__name__,
policy_name=POLICY.__name__,
path_length=PATH_LENGTH,
steps_pretrain=max(100, BATCH_SIZE),
algo_params=dict(
# Common RL algorithm params
num_steps_per_epoch=PATHS_PER_EPOCH * PATH_LENGTH,
num_updates_per_train_call=1, # How to many run algorithm train fcn
num_steps_per_eval=PATHS_PER_EVAL * PATH_LENGTH,
min_start_eval=PATHS_PER_EPOCH * PATH_LENGTH, # Min nsteps to start to eval
# EnvSampler params
max_path_length=PATH_LENGTH, # max_path_length
render=False,
finite_horizon_eval=True,
# DDPG params
# Learning rates
optimizer=OPTIMIZER,
policy_lr=3.e-4,
qf_lr=3.e-4,
# Soft target update
use_soft_update=True,
tau=1e-2,
# Weight decays
policy_weight_decay=1.e-5,
qf_weight_decay=1.e-5,
discount=0.99,
reward_scale=1.0e-0,
),
replay_buffer_size=1e3,
net_size=32,
# NN Normalizations
# -----------------
shared_layer_norm=False,
# NN Activations
# --------------
hidden_activation='relu',
# hidden_activation='tanh',
# hidden_activation='elu',
# NN Initialization
# -----------------
# pol_hidden_w_init='xavier_normal',
# pol_output_w_init='xavier_normal',
pol_hidden_w_init='xavier_uniform',
pol_output_w_init='xavier_uniform',
# pol_hidden_w_init='uniform',
# pol_output_w_init='uniform',
# q_hidden_w_init='xavier_normal',
# q_output_w_init='xavier_normal',
q_hidden_w_init='xavier_uniform',
q_output_w_init='xavier_uniform',
# q_hidden_w_init='uniform',
# q_output_w_init='uniform',
# v_hidden_w_init='xavier_normal',
# v_output_w_init='xavier_normal',
v_hidden_w_init='xavier_uniform',
v_output_w_init='xavier_uniform',
# v_hidden_w_init='uniform',
# v_output_w_init='uniform',
)
env_params = dict(
goal_reward=0,
actuation_cost_coeff=5.0e+0,
distance_cost_coeff=1.0e+0,
log_distance_cost_coeff=2.0e+0,
alpha=1e-1,
# Initial Condition
init_position=(4., 4.),
init_sigma=1.00,
# Goal
goal_position=(-2.0, -2.0), # TODO: Make this a script param
goal_threshold=0.10,
# Others
dynamics_sigma=0.1,
# dynamics_sigma=0.0,
# horizon=PATH_LENGTH,
horizon=None,
subtask=SUBTASK,
seed=SEED,
)
def experiment(variant):
# os.environ['OMP_NUM_THREADS'] = str(NP_THREADS)
# Set seeds
np.random.seed(variant['seed'])
ptu.set_gpu_mode(variant['gpu'], gpu_id=0)
ptu.seed(variant['seed'])
variant['env_params']['seed'] = variant['seed']
env = NormalizedBoxEnv(
Navigation2dGoalCompoEnv(**variant['env_params']),
# normalize_obs=True,
normalize_obs=False,
online_normalization=False,
obs_mean=None,
obs_var=None,
obs_alpha=0.001,
)
obs_dim = env.obs_dim
action_dim = env.action_dim
if variant['load_dir']:
params_file = os.path.join(variant['log_dir'], 'params.pkl')
data = joblib.load(params_file)
start_epoch = data['epoch']
raise NotImplementedError
else:
start_epoch = 0
net_size = variant['net_size']
qf = NNQFunction(
obs_dim=obs_dim,
action_dim=action_dim,
hidden_activation=variant['hidden_activation'],
hidden_sizes=[net_size, net_size, net_size],
hidden_w_init=variant['q_hidden_w_init'],
output_w_init=variant['q_output_w_init'],
)
policy = POLICY(
obs_dim=obs_dim,
action_dim=action_dim,
hidden_activation=variant['hidden_activation'],
hidden_sizes=[net_size, net_size, net_size],
hidden_w_init=variant['pol_hidden_w_init'],
output_w_init=variant['pol_output_w_init'],
)
es = OUStrategy(
action_space=env.action_space,
mu=0,
theta=0.15,
max_sigma=0.3,
min_sigma=0.3,
decay_period=100000,
)
exploration_policy = PolicyWrappedWithExplorationStrategy(
exploration_strategy=es,
policy=policy,
)
replay_buffer = SimpleReplayBuffer(
max_size=variant['replay_buffer_size'],
obs_dim=obs_dim,
action_dim=action_dim,
)
algorithm = DDPG(
explo_env=env,
policy=policy,
explo_policy=exploration_policy,
qf=qf,
replay_buffer=replay_buffer,
batch_size=BATCH_SIZE,
eval_env=env,
save_environment=False,
**variant['algo_params']
)
if ptu.gpu_enabled():
algorithm.cuda(ptu.device)
algorithm.pretrain(variant['steps_pretrain'])
algorithm.train(start_epoch=start_epoch)
return algorithm
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--net_size', type=int, default=None)
parser.add_argument('--seed', type=int, default=SEED)
parser.add_argument('--expt_name', type=str, default=None)
parser.add_argument('--subtask', type=int, default=-1)
# Logging arguments
parser.add_argument('--snap_mode', type=str, default='gap_and_last')
parser.add_argument('--snap_gap', type=int, default=25)
# parser.add_argument('--mode', type=str, default='local')
parser.add_argument('--log_dir', type=str, default=None)
parser.add_argument('--load_dir', type=str, default=None)
# GPU arguments
parser.add_argument('--gpu', action="store_true")
# Other arguments
parser.add_argument('--render', action="store_true")
parser.add_argument('--render_q', action="store_true")
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_args()
# Experiment name
if args.expt_name is None:
expt_name = 'navigation2d'
else:
expt_name = args.expt_name
# Default experiment parameters
expt_variant = expt_params
# Default environment parameters
expt_variant['env_params'] = env_params
# Custom parameters
if args.subtask >= 0:
expt_variant['env_params']['subtask'] = args.subtask
expt_variant['log_dir'] = args.log_dir
expt_variant['load_dir'] = args.load_dir
# Net size
if args.net_size is not None:
expt_variant['net_size'] = args.net_size
expt_variant['gpu'] = args.gpu
expt_variant['seed'] = args.seed
expt_variant['render_q'] = args.render_q
# Algo params
expt_variant['algo_params']['render'] = args.render
log_dir = setup_logger(expt_name,
variant=expt_variant,
snapshot_mode=args.snap_mode,
snapshot_gap=args.snap_gap,
log_dir=args.log_dir)
dir_filename = os.path.realpath(__file__)
filename = os.path.split(dir_filename)[1]
copyfile(dir_filename, os.path.join(log_dir, filename))
algo = experiment(expt_variant)
# input('Press a key to close the script...')
| [
"[email protected]"
]
| |
7dc858864385ecf5b62f08496f7d9b930736c366 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/CTRON-ISDN-CONFIGURATION-MIB.py | 7a10208ea9156417294792de483c0c6eef1ba9cf | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 32,939 | py | #
# PySNMP MIB module CTRON-ISDN-CONFIGURATION-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CTRON-ISDN-CONFIGURATION-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:30:18 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Gauge32, IpAddress, iso, Counter32, Unsigned32, enterprises, TimeTicks, ModuleIdentity, ObjectIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, NotificationType, Bits, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "IpAddress", "iso", "Counter32", "Unsigned32", "enterprises", "TimeTicks", "ModuleIdentity", "ObjectIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "NotificationType", "Bits", "Integer32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
class DisplayString(OctetString):
pass
cabletron = MibIdentifier((1, 3, 6, 1, 4, 1, 52))
mibs = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4))
ctron = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1))
ctDataLink = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2))
ctronWan = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7))
ctISDNconfigMib = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4))
ctISDNcontrol = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1))
isdnDchTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1), )
if mibBuilder.loadTexts: isdnDchTable.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchTable.setDescription('A list of D channnel interfaces entries. The list consists of a single entry at this time.')
isdnDchEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1), ).setIndexNames((0, "CTRON-ISDN-CONFIGURATION-MIB", "isdnDchIndex"))
if mibBuilder.loadTexts: isdnDchEntry.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchEntry.setDescription('A D channnel interface entry containing objects relating to the particular D channel.')
isdnDchIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isdnDchIndex.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchIndex.setDescription('A unique index for this D Channel of this ISDN-Controller.')
isdnDchRateAccess = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("bri1", 2), ("pri1", 3), ("pri2", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isdnDchRateAccess.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchRateAccess.setDescription('The Rate Access of this ISDN interface.')
isdnDchAllowedCh = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isdnDchAllowedCh.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchAllowedCh.setDescription('A bit string with bit 1 signifiying time slot 1. A set bit means that a B channel may be allocated on that time slot. A reset bit means the channel is reserved or otherwise out of service.')
isdnDchChInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: isdnDchChInUse.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchChInUse.setDescription('A bit string with bit 1 signifying time slot 1. A set bit means that a B channel has been allocated on that time slot. A reset bit means the channel is available, reserved, or otherwise out of service.')
isdnDchSupportedSwitches = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 5, 10, 16, 17))).clone(namedValues=NamedValues(("bri5ESS", 2), ("bridms100", 5), ("brini1", 10), ("pri4ESS", 16), ("pri5ESS", 17)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isdnDchSupportedSwitches.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchSupportedSwitches.setDescription('A bit string with each bit position signifying support of a specific switch type as indicated by the list. A set bit means that that switch type is supported.')
isdnDchSwitchType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 5, 10, 16, 17))).clone(namedValues=NamedValues(("bri5ESS", 2), ("bridms100", 5), ("brini1", 10), ("pri4ESS", 16), ("pri5ESS", 17))).clone('brini1')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: isdnDchSwitchType.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchSwitchType.setDescription('Switch type selector as indicated by the list.')
isdnDchSPID1 = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 7), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: isdnDchSPID1.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchSPID1.setDescription('The Service profile identifier for BRI channel 1.')
isdnDchSPID2 = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 8), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: isdnDchSPID2.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchSPID2.setDescription('The Service profile identifier for BRI channel 2.')
isdnDchDirNum1 = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 9), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: isdnDchDirNum1.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchDirNum1.setDescription('The local directory number for BRI channel 1.')
isdnDchDirNum2 = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 10), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: isdnDchDirNum2.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchDirNum2.setDescription('The local directory number for BRI channel 2.')
isdnDchOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: isdnDchOperStatus.setStatus('mandatory')
if mibBuilder.loadTexts: isdnDchOperStatus.setDescription('The operational status of the signalling channel.')
dialCtlNbrCfgTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 2), )
if mibBuilder.loadTexts: dialCtlNbrCfgTable.setStatus('mandatory')
if mibBuilder.loadTexts: dialCtlNbrCfgTable.setDescription('The list of neighbors from which the managed device will accept calls or to which it will place them.')
dialCtlNbrCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 2, 1), ).setIndexNames((0, "CTRON-ISDN-CONFIGURATION-MIB", "dialCtlNbrCfgId"), (0, "CTRON-ISDN-CONFIGURATION-MIB", "dialCtlNbrCfgIndex"))
if mibBuilder.loadTexts: dialCtlNbrCfgEntry.setStatus('mandatory')
if mibBuilder.loadTexts: dialCtlNbrCfgEntry.setDescription('A single Neighbor. This entry is effectively permanent, and contains address information describing the neighbor. The value of dialCtlNbrCfgOriginateAddress must be specified before a new row in this table can become active(1). Any writeable parameters in an existing entry can be modified while the entry is active. The modification will take effect when the neighbor in question will be called the next time.')
dialCtlNbrCfgId = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dialCtlNbrCfgId.setStatus('mandatory')
if mibBuilder.loadTexts: dialCtlNbrCfgId.setDescription('This object defines a single neighbor. There may be several entries in this table for one neighbor, defining different ways of reaching this neighbor. Thus, there may be several entries in this table with the same value of dialCtlNbrCfgId. Multiple entries for one neighbor may be used to support multilink as well as backup lines. A single neighbor will be identified by a unique value of this object. Several entries for one neighbor MUST have the same value of dialCtlNbrCfgId and dialCtlNbrCfgIfIndex but still different ifEntries and thus different values of dialCtlNbrCfgIndex.')
dialCtlNbrCfgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dialCtlNbrCfgIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dialCtlNbrCfgIndex.setDescription('The index value which uniquely identifies an entry in this table.')
dialCtlNbrCfgIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dialCtlNbrCfgIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: dialCtlNbrCfgIfIndex.setDescription('The ifIndex value of the interface associated with this neighbor.')
dialCtlNbrCfgOriginateAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 2, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialCtlNbrCfgOriginateAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dialCtlNbrCfgOriginateAddress.setDescription("Call Address at which the neighbor will be called. Think of this as the set of characters following 'ATDT ' or the 'phone number' included in a D channel call request. The structure of this information will be switch type specific.")
dialCtlNbrCfgAnswerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 2, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialCtlNbrCfgAnswerAddress.setStatus('mandatory')
if mibBuilder.loadTexts: dialCtlNbrCfgAnswerAddress.setDescription('Calling Party Number information element, as for example passed in an ISDN SETUP message by a PBX or switch, for incoming calls. This address can be used to identify the neighbor. If this address is either unknown or identical to dialCtlNbrCfgOriginateAddress, this object will be a zero length string.')
rmtProfileTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3), )
if mibBuilder.loadTexts: rmtProfileTable.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileTable.setDescription('The list of neighbors from which this device will accept calls or to which it will place them.')
rmtProfileEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1), ).setIndexNames((0, "CTRON-ISDN-CONFIGURATION-MIB", "rmtProfileEntryIndex"))
if mibBuilder.loadTexts: rmtProfileEntry.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntry.setDescription('A single Neighbor. This entry is effectively permanent, and contains information describing the neighbor.')
rmtProfileEntryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtProfileEntryIndex.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryIndex.setDescription('The index value which uniquely identifies an entry in this table.')
rmtProfileEntryName = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 2), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryName.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryName.setDescription('ASCII name of the neighbor.')
rmtProfileEntryMakerName = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 3), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMakerName.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMakerName.setDescription('ASCII name of the manufacturer of the neighbor. In other words, it is a name by which to uniquely identify the remote access device to which the profile belongs.')
rmtProfileEntryAction = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("idle", 1), ("connect", 2), ("hangup", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryAction.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryAction.setDescription('Desired action for the neighbor interface')
rmtProfileEntryState = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("idle", 1), ("calling", 2), ("ringing", 3), ("connected", 4), ("answering", 5), ("answered", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtProfileEntryState.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryState.setDescription('Current state of the neighbor interface')
rmtProfileEntryMaxNeighbor = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 6), Integer32().clone(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtProfileEntryMaxNeighbor.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMaxNeighbor.setDescription('The maximum allowable dialCtlNbrCfgIndex. It is the number of instances of the profile.')
rmtProfileEntryBchInUse = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtProfileEntryBchInUse.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryBchInUse.setDescription('A bit string with bit 1 signifiying B channel 1. A set bit means that this channel was assigned for current or last call.')
rmtProfileEntryLinkHead = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtProfileEntryLinkHead.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryLinkHead.setDescription('A zero value signifies it is a primary profile. Otherwise, it is an instance profile and the value identifies the primary profile from which it was spawned.')
rmtProfileEntryNextLink = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtProfileEntryNextLink.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryNextLink.setDescription('A non-zero value identifies an instance profile. Whereas, a zero value either means it is a primary profile or the last instance of a primary profile.')
rmtProfileEntryMpCapable = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpCapable.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpCapable.setDescription('MP option selector. Set to a value of one when MP support is desired otherwise set to a value of two. The default setting is disabled. When enabled the attempt is made to negotiate MP support. Both parties must support MP to be able to successfully negotiate MP.')
rmtProfileEntryMpLineUtilization = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)).clone(50)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpLineUtilization.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpLineUtilization.setDescription('Used to set the Line Utilization Threshold (LUT) % to compare against the linear weighted percentage to determine when more/less bandwidth is to be added/removed. Linear weighting is computed (averaged) over the period of time specified by the rmtProfileEntryMpHistoryTime object. Additional bandwidth is added whenever the linear weighted percentage exceeds the LUT % for a consecutive number of average line utilization reading(s) (computation(s)) as specified by the rmtProfileEntryMpMoreBWSamples object. Conversely, additional possible previously added Bandwidth is removed whenever the linear weighted percentage falls below the LUT % value for a consecutive number of average line utilization reading(s) (computation(s)) as specified by the rmtProfileEntryMpLessBWSamples object.')
rmtProfileEntryMpHistoryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpHistoryTime.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpHistoryTime.setDescription('Used to set the history time value in seconds for the number of line utilization reading(s)/sample(s) desired to compute the average line utilization. It specifies the window size over which to to compute the average line utilization.')
rmtProfileEntryMpMoreBWSamples = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpMoreBWSamples.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpMoreBWSamples.setDescription('Used to set the number of consecutive line utilization average computations that must exceed the LUT % value as specified by the rmtProfileEntryMpLineUtilization object before allowing possible more bandwidth to be added.')
rmtProfileEntryMpLessBWSamples = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpLessBWSamples.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpLessBWSamples.setDescription('Used to set the number of consecutive line utilization average computations that must fall below the LUT % value as specified by the rmtProfileEntryMpLineUtilization object before removing possible previously added bandwidth.')
rmtProfileEntryMpMaxCallsAllowed = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpMaxCallsAllowed.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpMaxCallsAllowed.setDescription('Used to set the maxium number of channels an ISDN MP capable call is allowed.')
rmtProfileEntryMpCallsToAdd = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpCallsToAdd.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpCallsToAdd.setDescription('Used to set the number of additional channel(s) (call(s)) to increment by whenever the need for more bandwidth is determined.')
rmtProfileEntryMpCallsToRemove = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpCallsToRemove.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpCallsToRemove.setDescription('Used to set the number of channel(s) (call(s)) to decrement by whenever the need for possible previously added additional bandwidth is determined to no longer be needed/desired.')
rmtProfileEntryMpAvgPktSize = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1500)).clone(260)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpAvgPktSize.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpAvgPktSize.setDescription('Used to set the average packet size by which to determine when its best to split a packet. This is an attempt to minimize the amount of buffering necessary at the remote device to maintain packet sequentiality.')
rmtProfileEntryMpRmtBwCtrl = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 1, 3, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtProfileEntryMpRmtBwCtrl.setStatus('mandatory')
if mibBuilder.loadTexts: rmtProfileEntryMpRmtBwCtrl.setDescription('MP remote bandwidth control selector. Set to a one when bandwidth changes are permitted by either side ie by both parties otherwise set to a value of two. The default setting is disabled. That is to say, only the caller is permitted to change (control) the bandwidth.')
callHistory = MibIdentifier((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2))
callHistoryTableMaxLength = MibScalar((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 1), Integer32().clone(50)).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryTableMaxLength.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryTableMaxLength.setDescription('The upper limit on the number of entries that the callHistoryTable may contain. When this table is full, the oldest entry will be deleted and the new one will be created.')
callHistoryTable = MibTable((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2), )
if mibBuilder.loadTexts: callHistoryTable.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryTable.setDescription('A table containing information about specific calls to a specific destination.')
callHistoryEntry = MibTableRow((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1), ).setIndexNames((0, "CTRON-ISDN-CONFIGURATION-MIB", "callHistoryIndex"))
if mibBuilder.loadTexts: callHistoryEntry.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryEntry.setDescription('The information regarding a single Connection.')
callHistorySetupTime = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistorySetupTime.setStatus('mandatory')
if mibBuilder.loadTexts: callHistorySetupTime.setDescription('The value of system up time when the call associated to this entry was started. This will be useful for an NMS to retrieve all calls after a specific time. Also, this object can be useful in finding large delays between the time the call was started and the time the call was connected. For ISDN media, this will be the time when the setup message was received from or sent to the network.')
callHistoryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryIndex.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryIndex.setDescription('Index variable to access the CallHistoryEntry objects of the callHistoryTable.')
callHistoryPeerAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryPeerAddress.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryPeerAddress.setDescription('The number this call is connected to. If the number is not available, then it will have a length of zero.')
callHistoryNeighborId = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryNeighborId.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryNeighborId.setDescription('This is the Id value of the neighbor table entry to which this call was made. If a neighbor table entry for this call does not exist, the value of this object will be zero.')
callHistoryLogicalIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryLogicalIfIndex.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryLogicalIfIndex.setDescription('This is the ifIndex value of the logical interface through which this call was made.')
callHistoryDisconnectCause = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 6, 16, 17, 18, 21, 22, 28, 31, 34, 38, 41, 42, 43, 44, 52, 54, 58, 63, 65, 66, 70, 79, 81, 82, 85, 88, 90, 91, 95, 96, 97, 98, 99, 100, 111, 133, 134, 135, 136, 138))).clone(namedValues=NamedValues(("unassignedNumber", 1), ("noRouteToDestination", 2), ("channelUnacceptable", 6), ("normalCallClearing", 16), ("userBusy", 17), ("noUserResponding", 18), ("callRejected", 21), ("numberChangedAddress", 22), ("invalidNumberFormat", 28), ("normalUnspecified", 31), ("noChannelAvailable", 34), ("networkOutOfOrder", 38), ("temporaryFailure", 41), ("switchingEquipmentCongestion", 42), ("userInfoDiscarded", 43), ("requestedChannelNotAvailable", 44), ("outgoingCallsBarred", 52), ("incomingCallsBarred", 54), ("bearerCapabilityNotPresentlyAvail", 58), ("serviceNotAvailable", 63), ("bearerServiceNotImplemented", 65), ("channelTypeNotImplemented", 66), ("onlyRestrictedChannelAvailable", 70), ("serviceOrOptionNotImplemeted", 79), ("invalidCallReferenceValue", 81), ("identifiedChannelDoesNotExist", 82), ("invalidDigitValueForAddress", 85), ("incompatibleDestination", 88), ("destinationAddressMissing", 90), ("transitNetworkDoesNotExist", 91), ("invalidMessageSpecified", 95), ("mandatoryIEmissing", 96), ("messageTypeNonexistentOrNotImplemented", 97), ("messageNotCompatibleWithCallState", 98), ("iEnotImplemented", 99), ("invalidIEcontents", 100), ("protocolError", 111), ("callAlreadyActive", 133), ("lineDisabled", 134), ("badParameter", 135), ("timeoutOccured", 136), ("noCallActive", 138)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryDisconnectCause.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryDisconnectCause.setDescription('The encoded network cause value associated with this call. The value of this object will depend on the interface type as well as on the protocol and protocol version being used on this interface. The more common cause values are indicated in the list.')
callHistoryConnectTime = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryConnectTime.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryConnectTime.setDescription('The value of system up time when the call was connected.')
callHistoryDisconnectTime = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryDisconnectTime.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryDisconnectTime.setDescription('The value of system up time when the call was disconnected.')
callHistoryCallOrigin = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("originate", 1), ("answer", 2), ("callback", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryCallOrigin.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryCallOrigin.setDescription('The call origin.')
callHistoryInfoType = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("other", 1), ("speech", 2), ("unrestrictedDigital", 3), ("unrestrictedDigital56", 4), ("restrictedDigital", 5), ("audio31", 6), ("audio7", 7), ("video", 8), ("packetSwitched", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryInfoType.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryInfoType.setDescription('The information type for this call.')
callHistoryTransmitPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryTransmitPackets.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryTransmitPackets.setDescription('The number of packets which were transmitted while this call was active.')
callHistoryTransmitBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryTransmitBytes.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryTransmitBytes.setDescription('The number of bytes which were transmitted while this call was active.')
callHistoryReceivePackets = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryReceivePackets.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryReceivePackets.setDescription('The number of packets which were received while this call was active.')
callHistoryReceiveBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 52, 4, 1, 2, 7, 4, 2, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: callHistoryReceiveBytes.setStatus('mandatory')
if mibBuilder.loadTexts: callHistoryReceiveBytes.setDescription('The number of bytes which were received while this call was active.')
mibBuilder.exportSymbols("CTRON-ISDN-CONFIGURATION-MIB", callHistoryDisconnectCause=callHistoryDisconnectCause, callHistoryTableMaxLength=callHistoryTableMaxLength, isdnDchSupportedSwitches=isdnDchSupportedSwitches, rmtProfileEntryMpCapable=rmtProfileEntryMpCapable, rmtProfileEntryNextLink=rmtProfileEntryNextLink, callHistoryEntry=callHistoryEntry, cabletron=cabletron, isdnDchEntry=isdnDchEntry, callHistoryReceivePackets=callHistoryReceivePackets, isdnDchDirNum1=isdnDchDirNum1, callHistoryCallOrigin=callHistoryCallOrigin, isdnDchDirNum2=isdnDchDirNum2, ctDataLink=ctDataLink, DisplayString=DisplayString, rmtProfileEntryMpLessBWSamples=rmtProfileEntryMpLessBWSamples, dialCtlNbrCfgOriginateAddress=dialCtlNbrCfgOriginateAddress, isdnDchSPID1=isdnDchSPID1, isdnDchOperStatus=isdnDchOperStatus, callHistoryTable=callHistoryTable, ctron=ctron, isdnDchChInUse=isdnDchChInUse, rmtProfileEntryIndex=rmtProfileEntryIndex, ctISDNcontrol=ctISDNcontrol, dialCtlNbrCfgIfIndex=dialCtlNbrCfgIfIndex, rmtProfileEntryLinkHead=rmtProfileEntryLinkHead, rmtProfileEntryMpRmtBwCtrl=rmtProfileEntryMpRmtBwCtrl, callHistorySetupTime=callHistorySetupTime, dialCtlNbrCfgTable=dialCtlNbrCfgTable, rmtProfileEntryMpMoreBWSamples=rmtProfileEntryMpMoreBWSamples, rmtProfileTable=rmtProfileTable, rmtProfileEntryMpMaxCallsAllowed=rmtProfileEntryMpMaxCallsAllowed, rmtProfileEntryMpAvgPktSize=rmtProfileEntryMpAvgPktSize, rmtProfileEntryAction=rmtProfileEntryAction, rmtProfileEntryMpHistoryTime=rmtProfileEntryMpHistoryTime, dialCtlNbrCfgIndex=dialCtlNbrCfgIndex, callHistoryPeerAddress=callHistoryPeerAddress, callHistoryTransmitBytes=callHistoryTransmitBytes, rmtProfileEntryBchInUse=rmtProfileEntryBchInUse, rmtProfileEntryName=rmtProfileEntryName, isdnDchIndex=isdnDchIndex, rmtProfileEntryMpLineUtilization=rmtProfileEntryMpLineUtilization, mibs=mibs, callHistoryLogicalIfIndex=callHistoryLogicalIfIndex, callHistory=callHistory, rmtProfileEntryMaxNeighbor=rmtProfileEntryMaxNeighbor, callHistoryInfoType=callHistoryInfoType, callHistoryConnectTime=callHistoryConnectTime, rmtProfileEntryMakerName=rmtProfileEntryMakerName, rmtProfileEntryState=rmtProfileEntryState, dialCtlNbrCfgId=dialCtlNbrCfgId, callHistoryNeighborId=callHistoryNeighborId, isdnDchRateAccess=isdnDchRateAccess, callHistoryReceiveBytes=callHistoryReceiveBytes, callHistoryIndex=callHistoryIndex, rmtProfileEntryMpCallsToRemove=rmtProfileEntryMpCallsToRemove, rmtProfileEntry=rmtProfileEntry, isdnDchSwitchType=isdnDchSwitchType, callHistoryTransmitPackets=callHistoryTransmitPackets, ctISDNconfigMib=ctISDNconfigMib, dialCtlNbrCfgEntry=dialCtlNbrCfgEntry, ctronWan=ctronWan, isdnDchSPID2=isdnDchSPID2, rmtProfileEntryMpCallsToAdd=rmtProfileEntryMpCallsToAdd, isdnDchTable=isdnDchTable, isdnDchAllowedCh=isdnDchAllowedCh, dialCtlNbrCfgAnswerAddress=dialCtlNbrCfgAnswerAddress, callHistoryDisconnectTime=callHistoryDisconnectTime)
| [
"[email protected]"
]
| |
6a342b5c99bf52551cdf9a41ee3935805787d9f3 | c1a9da6e041c406edaf419338b46d497071a21f2 | /todo/migrations/0003_alter_todo_is_completed.py | 58f741a4001e33e16bf101448afed67c4baa85fc | []
| no_license | alex1the1great/learn-todo | a0016b89cebb8c35aaac2e9872be7cee33f99bb8 | e2faa1edaac7d450999c7aacb0f335bf40665e89 | refs/heads/master | 2023-04-20T21:47:23.012887 | 2021-05-05T13:08:51 | 2021-05-05T13:08:51 | 364,278,364 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | # Generated by Django 3.2.1 on 2021-05-05 06:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('todo', '0002_alter_todo_is_completed'),
]
operations = [
migrations.AlterField(
model_name='todo',
name='is_completed',
field=models.BooleanField(default=False),
),
]
| [
"[email protected]"
]
| |
503af0c00b4c87db9c91146e7317d4a5a7510834 | 32c56293475f49c6dd1b0f1334756b5ad8763da9 | /google-cloud-sdk/lib/googlecloudsdk/third_party/apis/file/v1/file_v1_messages.py | 8e519351b2083ba1185d933a75c06f86cb5cca03 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
]
| permissive | bopopescu/socialliteapp | b9041f17f8724ee86f2ecc6e2e45b8ff6a44b494 | 85bb264e273568b5a0408f733b403c56373e2508 | refs/heads/master | 2022-11-20T03:01:47.654498 | 2020-02-01T20:29:43 | 2020-02-01T20:29:43 | 282,403,750 | 0 | 0 | MIT | 2020-07-25T08:31:59 | 2020-07-25T08:31:59 | null | UTF-8 | Python | false | false | 48,735 | py | """Generated message classes for file version v1.
The Cloud Filestore API is used for creating and managing cloud file servers.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'file'
class CancelOperationRequest(_messages.Message):
r"""The request message for Operations.CancelOperation."""
class Empty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo {
rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
JSON representation for `Empty` is empty JSON object `{}`.
"""
class FileProjectsLocationsGetRequest(_messages.Message):
r"""A FileProjectsLocationsGetRequest object.
Fields:
name: Resource name for the location.
"""
name = _messages.StringField(1, required=True)
class FileProjectsLocationsInstancesCreateRequest(_messages.Message):
r"""A FileProjectsLocationsInstancesCreateRequest object.
Fields:
instance: A Instance resource to be passed as the request body.
instanceId: The name of the instance to create. The name must be unique
for the specified project and location.
parent: The instance's project and location, in the format
projects/{project_id}/locations/{location}. In Cloud Filestore,
locations map to GCP zones, for example **us-west1-b**.
"""
instance = _messages.MessageField('Instance', 1)
instanceId = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class FileProjectsLocationsInstancesDeleteRequest(_messages.Message):
r"""A FileProjectsLocationsInstancesDeleteRequest object.
Fields:
name: The instance resource name, in the format
projects/{project_id}/locations/{location}/instances/{instance_id}
"""
name = _messages.StringField(1, required=True)
class FileProjectsLocationsInstancesGetRequest(_messages.Message):
r"""A FileProjectsLocationsInstancesGetRequest object.
Fields:
name: The instance resource name, in the format
projects/{project_id}/locations/{location}/instances/{instance_id}.
"""
name = _messages.StringField(1, required=True)
class FileProjectsLocationsInstancesListRequest(_messages.Message):
r"""A FileProjectsLocationsInstancesListRequest object.
Fields:
filter: List filter.
orderBy: Sort results. Supported values are "name", "name desc" or ""
(unsorted).
pageSize: The maximum number of items to return.
pageToken: The next_page_token value to use if there are additional
results to retrieve for this list request.
parent: The project and location for which to retrieve instance
information, in the format projects/{project_id}/locations/{location}.
In Cloud Filestore, locations map to GCP zones, for example **us-
west1-b**. To retrieve instance information for all locations, use "-"
for the {location} value.
"""
filter = _messages.StringField(1)
orderBy = _messages.StringField(2)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
parent = _messages.StringField(5, required=True)
class FileProjectsLocationsInstancesPatchRequest(_messages.Message):
r"""A FileProjectsLocationsInstancesPatchRequest object.
Fields:
instance: A Instance resource to be passed as the request body.
name: Output only. The resource name of the instance, in the format
projects/{project_id}/locations/{location_id}/instances/{instance_id}.
updateMask: Mask of fields to update. At least one path must be supplied
in this field. The elements of the repeated paths field may only
include these fields: * "description" * "file_shares" * "labels"
"""
instance = _messages.MessageField('Instance', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class FileProjectsLocationsListRequest(_messages.Message):
r"""A FileProjectsLocationsListRequest object.
Fields:
filter: The standard list filter.
includeUnrevealedLocations: If true, the returned list will include
locations which are not yet revealed.
name: The resource that owns the locations collection, if applicable.
pageSize: The standard list page size.
pageToken: The standard list page token.
"""
filter = _messages.StringField(1)
includeUnrevealedLocations = _messages.BooleanField(2)
name = _messages.StringField(3, required=True)
pageSize = _messages.IntegerField(4, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(5)
class FileProjectsLocationsOperationsCancelRequest(_messages.Message):
r"""A FileProjectsLocationsOperationsCancelRequest object.
Fields:
cancelOperationRequest: A CancelOperationRequest resource to be passed as
the request body.
name: The name of the operation resource to be cancelled.
"""
cancelOperationRequest = _messages.MessageField('CancelOperationRequest', 1)
name = _messages.StringField(2, required=True)
class FileProjectsLocationsOperationsDeleteRequest(_messages.Message):
r"""A FileProjectsLocationsOperationsDeleteRequest object.
Fields:
name: The name of the operation resource to be deleted.
"""
name = _messages.StringField(1, required=True)
class FileProjectsLocationsOperationsGetRequest(_messages.Message):
r"""A FileProjectsLocationsOperationsGetRequest object.
Fields:
name: The name of the operation resource.
"""
name = _messages.StringField(1, required=True)
class FileProjectsLocationsOperationsListRequest(_messages.Message):
r"""A FileProjectsLocationsOperationsListRequest object.
Fields:
filter: The standard list filter.
name: The name of the operation's parent resource.
pageSize: The standard list page size.
pageToken: The standard list page token.
"""
filter = _messages.StringField(1)
name = _messages.StringField(2, required=True)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
class FileShareConfig(_messages.Message):
r"""File share configuration for the instance.
Fields:
capacityGb: File share capacity in gigabytes (GB). Cloud Filestore defines
1 GB as 1024^3 bytes.
name: The name of the file share (must be 16 characters or less).
"""
capacityGb = _messages.IntegerField(1)
name = _messages.StringField(2)
class GoogleCloudSaasacceleratorManagementProvidersV1Instance(_messages.Message):
r"""Instance represents the interface for SLM services to actuate the state
of control plane resources. Example Instance in JSON, where consumer-
project=snapchat, producer-project=cloud-sql: ```json Instance: {
"name": "projects/snapchat/locations/us-east1/instances/prod-instance",
"create_time": { "seconds": 1526406431, }, "labels": { "env":
"prod", "foo": "bar" }, "state": READY, "software_versions": {
"software_update": "cloud-sql-09-28-2018", },
"maintenance_policy_names": { "UpdatePolicy":
"projects/snapchat/locations/us-east1/maintenancePolicies/prod-update-
policy", } "rollout_metadata": { "projects/cloud-
sql/locations/global/rolloutTypes/software_update": { "release":
"projects/cloud-sql/locations/global/releases/cloud-sql-09-28-2018",
"rollout": "projects/cloud-sql/locations/us-east1/rollouts/cloud-
sql-09-28-2018-canary", }, "projects/cloud-
sql/locations/global/rolloutTypes/instance_restart": { "release":
"projects/cloud-sql/locations/global/releases/cloud-sql-09-20-repair",
"rollout": "projects/cloud-sql/locations/us-east1/rollouts/cloud-
sql-09-20-repair-100-percent", } } "tenant_project_id": "cloud-sql-
test-tenant", "producer_metadata": { "cloud-sql-tier": "basic",
"cloud-sql-instance-size": "1G", }, "provisioned_resources": [ {
"resource-type": "compute-instance", "resource-url":
"https://www.googleapis.com/compute/v1/projects/cloud-sql/zones/us-
east1-b/instances/vm-1", } ], "maintenance_schedules": {
"csa_rollout": { "start_time": { "seconds": 1526406431,
}, "end_time": { "seconds": 1535406431, }, },
"ncsa_rollout": { "start_time": { "seconds": 1526406431,
}, "end_time": { "seconds": 1535406431, }, }
}, "consumer_defined_name": "my-sql-instance1", } ```
Enums:
StateValueValuesEnum: Output only. Current lifecycle state of the resource
(e.g. if it's being created or ready to use).
Messages:
LabelsValue: Optional. Resource labels to represent user provided
metadata. Each label is a key-value pair, where both the key and the
value are arbitrary strings provided by the user.
MaintenancePolicyNamesValue: The MaintenancePolicies that have been
attached to the instance. The key must be of the type name of the oneof
policy name defined in MaintenancePolicy, and the referenced policy must
define the same policy type. For complete details of MaintenancePolicy,
please refer to go/cloud-saas-mw-ug.
MaintenanceSchedulesValue: The MaintenanceSchedule contains the scheduling
information of published maintenance schedule.
ProducerMetadataValue: Output only. Custom string attributes used
primarily to expose producer-specific information in monitoring
dashboards. See go/get-instance-metadata.
RolloutMetadataValue: The map between RolloutType and the corresponding
RolloutMetadata. This is only mutated by rollout service. For actuation
implementation, this information is pass-through for Rollout management.
Producer shall not modify by itself. For update of a single entry in
this map, the update field mask shall follow this sementics: go
/advanced-field-masks
SoftwareVersionsValue: Software versions that are used to deploy this
instance. This can be mutated by rollout services.
Fields:
consumerDefinedName: consumer_defined_name is the name that is set by the
consumer. On the other hand Name field represents system-assigned id of
an instance so consumers are not necessarily aware of it.
consumer_defined_name is used for notification/UI purposes for consumer
to recognize their instances.
createTime: Output only. Timestamp when the resource was created.
labels: Optional. Resource labels to represent user provided metadata.
Each label is a key-value pair, where both the key and the value are
arbitrary strings provided by the user.
maintenancePolicyNames: The MaintenancePolicies that have been attached to
the instance. The key must be of the type name of the oneof policy name
defined in MaintenancePolicy, and the referenced policy must define the
same policy type. For complete details of MaintenancePolicy, please
refer to go/cloud-saas-mw-ug.
maintenanceSchedules: The MaintenanceSchedule contains the scheduling
information of published maintenance schedule.
name: Unique name of the resource. It uses the form:
`projects/{project_id}/locations/{location_id}/instances/{instance_id}`
producerMetadata: Output only. Custom string attributes used primarily to
expose producer-specific information in monitoring dashboards. See go
/get-instance-metadata.
provisionedResources: Output only. The list of data plane resources
provisioned for this instance, e.g. compute VMs. See go/get-instance-
metadata.
rolloutMetadata: The map between RolloutType and the corresponding
RolloutMetadata. This is only mutated by rollout service. For actuation
implementation, this information is pass-through for Rollout management.
Producer shall not modify by itself. For update of a single entry in
this map, the update field mask shall follow this sementics: go
/advanced-field-masks
slmInstanceTemplate: Link to the SLM instance template. Only populated
when updating SLM instances via SSA's Actuation service adaptor. Service
producers with custom control plane (e.g. Cloud SQL) doesn't need to
populate this field. Instead they should use software_versions.
sloMetadata: Output only. SLO metadata for instance classification in the
Standardized dataplane SLO platform. See go/cloud-ssa-standard-slo for
feature description.
softwareVersions: Software versions that are used to deploy this instance.
This can be mutated by rollout services.
state: Output only. Current lifecycle state of the resource (e.g. if it's
being created or ready to use).
tenantProjectId: Output only. ID of the associated GCP tenant project. See
go/get-instance-metadata.
updateTime: Output only. Timestamp when the resource was last modified.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Output only. Current lifecycle state of the resource (e.g. if it's
being created or ready to use).
Values:
STATE_UNSPECIFIED: Unspecified state.
CREATING: Instance is being created.
READY: Instance has been created and is ready to use.
UPDATING: Instance is being updated.
REPAIRING: Instance is unheathy and under repair.
DELETING: Instance is being deleted.
ERROR: Instance encountered an error and is in indeterministic state.
"""
STATE_UNSPECIFIED = 0
CREATING = 1
READY = 2
UPDATING = 3
REPAIRING = 4
DELETING = 5
ERROR = 6
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""Optional. Resource labels to represent user provided metadata. Each
label is a key-value pair, where both the key and the value are arbitrary
strings provided by the user.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class MaintenancePolicyNamesValue(_messages.Message):
r"""The MaintenancePolicies that have been attached to the instance. The
key must be of the type name of the oneof policy name defined in
MaintenancePolicy, and the referenced policy must define the same policy
type. For complete details of MaintenancePolicy, please refer to go/cloud-
saas-mw-ug.
Messages:
AdditionalProperty: An additional property for a
MaintenancePolicyNamesValue object.
Fields:
additionalProperties: Additional properties of type
MaintenancePolicyNamesValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MaintenancePolicyNamesValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class MaintenanceSchedulesValue(_messages.Message):
r"""The MaintenanceSchedule contains the scheduling information of
published maintenance schedule.
Messages:
AdditionalProperty: An additional property for a
MaintenanceSchedulesValue object.
Fields:
additionalProperties: Additional properties of type
MaintenanceSchedulesValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MaintenanceSchedulesValue object.
Fields:
key: Name of the additional property.
value: A
GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSchedule
attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSchedule', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class ProducerMetadataValue(_messages.Message):
r"""Output only. Custom string attributes used primarily to expose
producer-specific information in monitoring dashboards. See go/get-
instance-metadata.
Messages:
AdditionalProperty: An additional property for a ProducerMetadataValue
object.
Fields:
additionalProperties: Additional properties of type
ProducerMetadataValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ProducerMetadataValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class RolloutMetadataValue(_messages.Message):
r"""The map between RolloutType and the corresponding RolloutMetadata.
This is only mutated by rollout service. For actuation implementation,
this information is pass-through for Rollout management. Producer shall
not modify by itself. For update of a single entry in this map, the update
field mask shall follow this sementics: go/advanced-field-masks
Messages:
AdditionalProperty: An additional property for a RolloutMetadataValue
object.
Fields:
additionalProperties: Additional properties of type RolloutMetadataValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a RolloutMetadataValue object.
Fields:
key: Name of the additional property.
value: A
GoogleCloudSaasacceleratorManagementProvidersV1RolloutMetadata
attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1RolloutMetadata', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class SoftwareVersionsValue(_messages.Message):
r"""Software versions that are used to deploy this instance. This can be
mutated by rollout services.
Messages:
AdditionalProperty: An additional property for a SoftwareVersionsValue
object.
Fields:
additionalProperties: Additional properties of type
SoftwareVersionsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SoftwareVersionsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
consumerDefinedName = _messages.StringField(1)
createTime = _messages.StringField(2)
labels = _messages.MessageField('LabelsValue', 3)
maintenancePolicyNames = _messages.MessageField('MaintenancePolicyNamesValue', 4)
maintenanceSchedules = _messages.MessageField('MaintenanceSchedulesValue', 5)
name = _messages.StringField(6)
producerMetadata = _messages.MessageField('ProducerMetadataValue', 7)
provisionedResources = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1ProvisionedResource', 8, repeated=True)
rolloutMetadata = _messages.MessageField('RolloutMetadataValue', 9)
slmInstanceTemplate = _messages.StringField(10)
sloMetadata = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1SloMetadata', 11)
softwareVersions = _messages.MessageField('SoftwareVersionsValue', 12)
state = _messages.EnumField('StateValueValuesEnum', 13)
tenantProjectId = _messages.StringField(14)
updateTime = _messages.StringField(15)
class GoogleCloudSaasacceleratorManagementProvidersV1MaintenanceSchedule(_messages.Message):
r"""Maintenance schedule which is exposed to customer and potentially end
user, indicating published upcoming future maintenance schedule
Fields:
canReschedule: Can this scheduled update be rescheduled? By default, it's
true and API needs to do explicitly check whether it's set, if it's set
as false explicitly, it's false
endTime: The scheduled end time for the maintenance.
rolloutManagementPolicy: The rollout management policy this maintenance
schedule is associated with. When doing reschedule update request, the
reschedule should be against this given policy.
startTime: The scheduled start time for the maintenance.
"""
canReschedule = _messages.BooleanField(1)
endTime = _messages.StringField(2)
rolloutManagementPolicy = _messages.StringField(3)
startTime = _messages.StringField(4)
class GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata(_messages.Message):
r"""Node information for custom per-node SLO implementations. SSA does not
support per-node SLO, but producers can populate per-node information in
SloMetadata for custom precomputations. SSA Eligibility Exporter will emit
per-node metric based on this information.
Fields:
exclusions: By default node is eligible if instance is eligible. But
individual node might be excluded from SLO by adding entry here. For
semantic see SloMetadata.exclusions. If both instance and node level
exclusions are present for time period, the node level's reason will be
reported by Eligibility Exporter.
location: The location of the node, if different from instance location.
nodeId: The id of the node. This should be equal to
SaasInstanceNode.node_id.
"""
exclusions = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1SloExclusion', 1, repeated=True)
location = _messages.StringField(2)
nodeId = _messages.StringField(3)
class GoogleCloudSaasacceleratorManagementProvidersV1NotificationMetadata(_messages.Message):
r"""NotificationMetadata is the notification state for an instance.
Fields:
rescheduled: Whether the instance update has been rescheduled.
scheduledEndTime: The scheduled end time for the maintenance window during
which update can be performed on the instance.
scheduledStartTime: The scheduled start time for the maintenance window
during which update can be performed on the instance.
targetRelease: The target release to be applied to the instance.
"""
rescheduled = _messages.BooleanField(1)
scheduledEndTime = _messages.StringField(2)
scheduledStartTime = _messages.StringField(3)
targetRelease = _messages.StringField(4)
class GoogleCloudSaasacceleratorManagementProvidersV1ProvisionedResource(_messages.Message):
r"""Describes provisioned dataplane resources.
Fields:
resourceType: Type of the resource. This can be either a GCP resource or a
custom one (e.g. another cloud provider's VM). For GCP compute resources
use singular form of the names listed in GCP compute API documentation
(https://cloud.google.com/compute/docs/reference/rest/v1/), prefixed
with 'compute-', for example: 'compute-instance', 'compute-disk',
'compute-autoscaler'.
resourceUrl: URL identifying the resource, e.g.
"https://www.googleapis.com/compute/v1/projects/...)".
"""
resourceType = _messages.StringField(1)
resourceUrl = _messages.StringField(2)
class GoogleCloudSaasacceleratorManagementProvidersV1RolloutMetadata(_messages.Message):
r"""RolloutMetadata for an actuation instance. It maps to a single
RolloutType.
Fields:
notification: Instance level notification metadata.
releaseName: The last Release that has been applied to the instance.
rolloutName: The last rollout that has been applied to the instance.
"""
notification = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1NotificationMetadata', 1)
releaseName = _messages.StringField(2)
rolloutName = _messages.StringField(3)
class GoogleCloudSaasacceleratorManagementProvidersV1SloEligibility(_messages.Message):
r"""SloEligibility is a tuple containing eligibility value: true if an
instance is eligible for SLO calculation or false if it should be excluded
from all SLO-related calculations along with a user-defined reason.
Fields:
eligible: Whether an instance is eligible or ineligible.
reason: User-defined reason for the current value of instance eligibility.
Usually, this can be directly mapped to the internal state. An empty
reason is allowed.
"""
eligible = _messages.BooleanField(1)
reason = _messages.StringField(2)
class GoogleCloudSaasacceleratorManagementProvidersV1SloExclusion(_messages.Message):
r"""SloExclusion represents an exclusion in SLI calculation applies to all
SLOs.
Fields:
duration: Exclusion duration. No restrictions on the possible values.
When an ongoing operation is taking longer than initially expected, an
existing entry in the exclusion list can be updated by extending the
duration. This is supported by the subsystem exporting eligibility data
as long as such extension is committed at least 10 minutes before the
original exclusion expiration - otherwise it is possible that there will
be "gaps" in the exclusion application in the exported timeseries.
reason: Human-readable reason for the exclusion. This should be a static
string (e.g. "Disruptive update in progress") and should not contain
dynamically generated data (e.g. instance name). Can be left empty.
sliName: Name of an SLI that this exclusion applies to. Can be left empty,
signaling that the instance should be excluded from all SLIs defined in
the service SLO configuration.
startTime: Start time of the exclusion. No alignment (e.g. to a full
minute) needed.
"""
duration = _messages.StringField(1)
reason = _messages.StringField(2)
sliName = _messages.StringField(3)
startTime = _messages.StringField(4)
class GoogleCloudSaasacceleratorManagementProvidersV1SloMetadata(_messages.Message):
r"""SloMetadata contains resources required for proper SLO classification of
the instance.
Fields:
eligibility: Optional: user-defined instance eligibility.
exclusions: List of SLO exclusion windows. When multiple entries in the
list match (matching the exclusion time-window against current time
point) the exclusion reason used in the first matching entry will be
published. It is not needed to include expired exclusion in this list,
as only the currently applicable exclusions are taken into account by
the eligibility exporting subsystem (the historical state of exclusions
will be reflected in the historically produced timeseries regardless of
the current state). This field can be used to mark the instance as
temporary ineligible for the purpose of SLO calculation. For permanent
instance SLO exclusion, use of custom instance eligibility is
recommended. See 'eligibility' field below.
nodes: Optional: list of nodes. Some producers need to use per-node
metadata to calculate SLO. This field allows such producers to publish
per-node SLO meta data, which will be consumed by SSA Eligibility
Exporter and published in the form of per node metric to Monarch.
tier: Name of the SLO tier the Instance belongs to. This name will be
expected to match the tiers specified in the service SLO configuration.
Field is mandatory and must not be empty.
"""
eligibility = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1SloEligibility', 1)
exclusions = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1SloExclusion', 2, repeated=True)
nodes = _messages.MessageField('GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata', 3, repeated=True)
tier = _messages.StringField(4)
class Instance(_messages.Message):
r"""A Cloud Filestore instance.
Enums:
StateValueValuesEnum: Output only. The instance state.
TierValueValuesEnum: The service tier of the instance.
Messages:
LabelsValue: Resource labels to represent user provided metadata.
Fields:
createTime: Output only. The time when the instance was created.
description: Optional. A description of the instance (2048 characters or
less).
etag: Server-specified ETag for the instance resource to prevent
simultaneous updates from overwriting each other.
fileShares: File system shares on the instance. For this version, only a
single file share is supported.
labels: Resource labels to represent user provided metadata.
name: Output only. The resource name of the instance, in the format
projects/{project_id}/locations/{location_id}/instances/{instance_id}.
networks: VPC networks to which the instance is connected. For this
version, only a single network is supported.
state: Output only. The instance state.
statusMessage: Output only. Additional information about the instance
state, if available.
tier: The service tier of the instance.
"""
class StateValueValuesEnum(_messages.Enum):
r"""Output only. The instance state.
Values:
STATE_UNSPECIFIED: State not set.
CREATING: The instance is being created.
READY: The instance is available for use.
REPAIRING: Work is being done on the instance. You can get further
details from the `statusMessage` field of the `Instance` resource.
DELETING: The instance is shutting down.
ERROR: The instance is experiencing an issue and might be unusable. You
can get further details from the `statusMessage` field of the
`Instance` resource.
"""
STATE_UNSPECIFIED = 0
CREATING = 1
READY = 2
REPAIRING = 3
DELETING = 4
ERROR = 5
class TierValueValuesEnum(_messages.Enum):
r"""The service tier of the instance.
Values:
TIER_UNSPECIFIED: Not set.
STANDARD: STANDARD tier.
PREMIUM: PREMIUM tier.
"""
TIER_UNSPECIFIED = 0
STANDARD = 1
PREMIUM = 2
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""Resource labels to represent user provided metadata.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
createTime = _messages.StringField(1)
description = _messages.StringField(2)
etag = _messages.StringField(3)
fileShares = _messages.MessageField('FileShareConfig', 4, repeated=True)
labels = _messages.MessageField('LabelsValue', 5)
name = _messages.StringField(6)
networks = _messages.MessageField('NetworkConfig', 7, repeated=True)
state = _messages.EnumField('StateValueValuesEnum', 8)
statusMessage = _messages.StringField(9)
tier = _messages.EnumField('TierValueValuesEnum', 10)
class ListInstancesResponse(_messages.Message):
r"""ListInstancesResponse is the result of ListInstancesRequest.
Fields:
instances: A list of instances in the project for the specified location.
If the {location} value in the request is "-", the response contains a
list of instances from all locations. If any location is unreachable,
the response will only return instances in reachable locations and the
"unreachable" field will be populated with a list of unreachable
locations.
nextPageToken: The token you can use to retrieve the next page of results.
Not returned if there are no more results in the list.
unreachable: Locations that could not be reached.
"""
instances = _messages.MessageField('Instance', 1, repeated=True)
nextPageToken = _messages.StringField(2)
unreachable = _messages.StringField(3, repeated=True)
class ListLocationsResponse(_messages.Message):
r"""The response message for Locations.ListLocations.
Fields:
locations: A list of locations that matches the specified filter in the
request.
nextPageToken: The standard List next-page token.
"""
locations = _messages.MessageField('Location', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListOperationsResponse(_messages.Message):
r"""The response message for Operations.ListOperations.
Fields:
nextPageToken: The standard List next-page token.
operations: A list of operations that matches the specified filter in the
request.
"""
nextPageToken = _messages.StringField(1)
operations = _messages.MessageField('Operation', 2, repeated=True)
class Location(_messages.Message):
r"""A resource that represents Google Cloud Platform location.
Messages:
LabelsValue: Cross-service attributes for the location. For example
{"cloud.googleapis.com/region": "us-east1"}
MetadataValue: Service-specific metadata. For example the available
capacity at the given location.
Fields:
displayName: The friendly name for this location, typically a nearby city
name. For example, "Tokyo".
labels: Cross-service attributes for the location. For example
{"cloud.googleapis.com/region": "us-east1"}
locationId: The canonical id for this location. For example: `"us-east1"`.
metadata: Service-specific metadata. For example the available capacity at
the given location.
name: Resource name for the location, which may vary between
implementations. For example: `"projects/example-project/locations/us-
east1"`
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""Cross-service attributes for the location. For example
{"cloud.googleapis.com/region": "us-east1"}
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class MetadataValue(_messages.Message):
r"""Service-specific metadata. For example the available capacity at the
given location.
Messages:
AdditionalProperty: An additional property for a MetadataValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MetadataValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
displayName = _messages.StringField(1)
labels = _messages.MessageField('LabelsValue', 2)
locationId = _messages.StringField(3)
metadata = _messages.MessageField('MetadataValue', 4)
name = _messages.StringField(5)
class NetworkConfig(_messages.Message):
r"""Network configuration for the instance.
Enums:
ModesValueListEntryValuesEnum:
Fields:
ipAddresses: Output only. IPv4 addresses in the format {octet 1}.{octet
2}.{octet 3}.{octet 4} or IPv6 addresses in the format {block 1}:{block
2}:{block 3}:{block 4}:{block 5}:{block 6}:{block 7}:{block 8}.
modes: Internet protocol versions for which the instance has IP addresses
assigned. For this version, only MODE_IPV4 is supported.
network: The name of the Google Compute Engine [VPC network](/compute/docs
/networks-and-firewalls#networks) to which the instance is connected.
reservedIpRange: A /29 CIDR block in one of the [internal IP address
ranges](https://www.arin.net/knowledge/address_filters.html) that
identifies the range of IP addresses reserved for this instance. For
example, 10.0.0.0/29 or 192.168.0.0/29. The range you specify can't
overlap with either existing subnets or assigned IP address ranges for
other Cloud Filestore instances in the selected VPC network.
"""
class ModesValueListEntryValuesEnum(_messages.Enum):
r"""ModesValueListEntryValuesEnum enum type.
Values:
ADDRESS_MODE_UNSPECIFIED: <no description>
MODE_IPV4: <no description>
"""
ADDRESS_MODE_UNSPECIFIED = 0
MODE_IPV4 = 1
ipAddresses = _messages.StringField(1, repeated=True)
modes = _messages.EnumField('ModesValueListEntryValuesEnum', 2, repeated=True)
network = _messages.StringField(3)
reservedIpRange = _messages.StringField(4)
class Operation(_messages.Message):
r"""This resource represents a long-running operation that is the result of
a network API call.
Messages:
MetadataValue: Service-specific metadata associated with the operation.
It typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
ResponseValue: The normal response of the operation in case of success.
If the original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Fields:
done: If the value is `false`, it means the operation is still in
progress. If `true`, the operation is completed, and either `error` or
`response` is available.
error: The error result of the operation in case of failure or
cancellation.
metadata: Service-specific metadata associated with the operation. It
typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
name: The server-assigned name, which is only unique within the same
service that originally returns it. If you use the default HTTP mapping,
the `name` should be a resource name ending with
`operations/{unique_id}`.
response: The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class MetadataValue(_messages.Message):
r"""Service-specific metadata associated with the operation. It typically
contains progress information and common metadata such as create time.
Some services might not provide such metadata. Any method that returns a
long-running operation should document the metadata type, if any.
Messages:
AdditionalProperty: An additional property for a MetadataValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MetadataValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class ResponseValue(_messages.Message):
r"""The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the response
is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Messages:
AdditionalProperty: An additional property for a ResponseValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ResponseValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
done = _messages.BooleanField(1)
error = _messages.MessageField('Status', 2)
metadata = _messages.MessageField('MetadataValue', 3)
name = _messages.StringField(4)
response = _messages.MessageField('ResponseValue', 5)
class OperationMetadata(_messages.Message):
r"""Represents the metadata of the long-running operation.
Fields:
apiVersion: [Output only] API version used to start the operation.
cancelRequested: [Output only] Identifies whether the user has requested
cancellation of the operation. Operations that have successfully been
cancelled have Operation.error value with a google.rpc.Status.code of 1,
corresponding to `Code.CANCELLED`.
createTime: [Output only] The time the operation was created.
endTime: [Output only] The time the operation finished running.
statusDetail: [Output only] Human-readable status of the operation, if
any.
target: [Output only] Server-defined resource path for the target of the
operation.
verb: [Output only] Name of the verb executed by the operation.
"""
apiVersion = _messages.StringField(1)
cancelRequested = _messages.BooleanField(2)
createTime = _messages.StringField(3)
endTime = _messages.StringField(4)
statusDetail = _messages.StringField(5)
target = _messages.StringField(6)
verb = _messages.StringField(7)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
r"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default=u'json')
callback = _messages.StringField(4)
fields = _messages.StringField(5)
key = _messages.StringField(6)
oauth_token = _messages.StringField(7)
prettyPrint = _messages.BooleanField(8, default=True)
quotaUser = _messages.StringField(9)
trace = _messages.StringField(10)
uploadType = _messages.StringField(11)
upload_protocol = _messages.StringField(12)
class Status(_messages.Message):
r"""The `Status` type defines a logical error model that is suitable for
different programming environments, including REST APIs and RPC APIs. It is
used by [gRPC](https://github.com/grpc). Each `Status` message contains
three pieces of data: error code, error message, and error details. You can
find out more about this error model and how to work with it in the [API
Design Guide](https://cloud.google.com/apis/design/errors).
Messages:
DetailsValueListEntry: A DetailsValueListEntry object.
Fields:
code: The status code, which should be an enum value of google.rpc.Code.
details: A list of messages that carry the error details. There is a
common set of message types for APIs to use.
message: A developer-facing error message, which should be in English. Any
user-facing error message should be localized and sent in the
google.rpc.Status.details field, or localized by the client.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class DetailsValueListEntry(_messages.Message):
r"""A DetailsValueListEntry object.
Messages:
AdditionalProperty: An additional property for a DetailsValueListEntry
object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a DetailsValueListEntry object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
code = _messages.IntegerField(1, variant=_messages.Variant.INT32)
details = _messages.MessageField('DetailsValueListEntry', 2, repeated=True)
message = _messages.StringField(3)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
| [
"[email protected]"
]
| |
0691055f7a51776cd005a7d77e800bd1f20c2a0e | a3a3183bc3ae9d3d4bad2f4923e8297bce0ff7d3 | /final/python3.5-3.5.2/Lib/test/test_asyncio/test_base_events.py | 206ebc69fe4476a81a9bde84c517afde45f5d9e3 | [
"Python-2.0"
]
| permissive | Nuitka/Nuitka-references | 4b78831e6947f1c4b32ef034435a88ecfd27f701 | f20d1b5728ec00cf8a5b23d650101c288b2594e9 | refs/heads/main | 2023-08-06T19:12:11.795836 | 2023-08-03T14:54:16 | 2023-08-03T14:55:22 | 169,884,560 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 61,721 | py | """Tests for base_events.py"""
import errno
import logging
import math
import os
import socket
import sys
import threading
import time
import unittest
from unittest import mock
import asyncio
from asyncio import base_events
from asyncio import constants
from asyncio import test_utils
try:
from test import support
except ImportError:
from asyncio import test_support as support
try:
from test.support.script_helper import assert_python_ok
except ImportError:
try:
from test.script_helper import assert_python_ok
except ImportError:
from asyncio.test_support import assert_python_ok
MOCK_ANY = mock.ANY
PY34 = sys.version_info >= (3, 4)
def mock_socket_module():
m_socket = mock.MagicMock(spec=socket)
for name in (
'AF_INET', 'AF_INET6', 'AF_UNSPEC', 'IPPROTO_TCP', 'IPPROTO_UDP',
'SOCK_STREAM', 'SOCK_DGRAM', 'SOL_SOCKET', 'SO_REUSEADDR', 'inet_pton'
):
if hasattr(socket, name):
setattr(m_socket, name, getattr(socket, name))
else:
delattr(m_socket, name)
m_socket.socket = mock.MagicMock()
m_socket.socket.return_value = test_utils.mock_nonblocking_socket()
m_socket.getaddrinfo._is_coroutine = False
return m_socket
def patch_socket(f):
return mock.patch('asyncio.base_events.socket',
new_callable=mock_socket_module)(f)
class BaseEventTests(test_utils.TestCase):
def test_ipaddr_info(self):
UNSPEC = socket.AF_UNSPEC
INET = socket.AF_INET
INET6 = socket.AF_INET6
STREAM = socket.SOCK_STREAM
DGRAM = socket.SOCK_DGRAM
TCP = socket.IPPROTO_TCP
UDP = socket.IPPROTO_UDP
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info(b'1.2.3.4', 1, INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, STREAM, TCP))
self.assertEqual(
(INET, DGRAM, UDP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, DGRAM, UDP))
# Socket type STREAM implies TCP protocol.
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, STREAM, 0))
# Socket type DGRAM implies UDP protocol.
self.assertEqual(
(INET, DGRAM, UDP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, DGRAM, 0))
# No socket type.
self.assertIsNone(
base_events._ipaddr_info('1.2.3.4', 1, UNSPEC, 0, 0))
# IPv4 address with family IPv6.
self.assertIsNone(
base_events._ipaddr_info('1.2.3.4', 1, INET6, STREAM, TCP))
self.assertEqual(
(INET6, STREAM, TCP, '', ('::3', 1)),
base_events._ipaddr_info('::3', 1, INET6, STREAM, TCP))
self.assertEqual(
(INET6, STREAM, TCP, '', ('::3', 1)),
base_events._ipaddr_info('::3', 1, UNSPEC, STREAM, TCP))
# IPv6 address with family IPv4.
self.assertIsNone(
base_events._ipaddr_info('::3', 1, INET, STREAM, TCP))
# IPv6 address with zone index.
self.assertIsNone(
base_events._ipaddr_info('::3%lo0', 1, INET6, STREAM, TCP))
def test_port_parameter_types(self):
# Test obscure kinds of arguments for "port".
INET = socket.AF_INET
STREAM = socket.SOCK_STREAM
TCP = socket.IPPROTO_TCP
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 0)),
base_events._ipaddr_info('1.2.3.4', None, INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 0)),
base_events._ipaddr_info('1.2.3.4', b'', INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 0)),
base_events._ipaddr_info('1.2.3.4', '', INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', '1', INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 1)),
base_events._ipaddr_info('1.2.3.4', b'1', INET, STREAM, TCP))
def test_getaddrinfo_servname(self):
INET = socket.AF_INET
STREAM = socket.SOCK_STREAM
TCP = socket.IPPROTO_TCP
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 80)),
base_events._ipaddr_info('1.2.3.4', 'http', INET, STREAM, TCP))
self.assertEqual(
(INET, STREAM, TCP, '', ('1.2.3.4', 80)),
base_events._ipaddr_info('1.2.3.4', b'http', INET, STREAM, TCP))
# Raises "service/proto not found".
with self.assertRaises(OSError):
base_events._ipaddr_info('1.2.3.4', 'nonsense', INET, STREAM, TCP)
with self.assertRaises(OSError):
base_events._ipaddr_info('1.2.3.4', 'nonsense', INET, STREAM, TCP)
@patch_socket
def test_ipaddr_info_no_inet_pton(self, m_socket):
del m_socket.inet_pton
self.assertIsNone(base_events._ipaddr_info('1.2.3.4', 1,
socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP))
class BaseEventLoopTests(test_utils.TestCase):
def setUp(self):
self.loop = base_events.BaseEventLoop()
self.loop._selector = mock.Mock()
self.loop._selector.select.return_value = ()
self.set_event_loop(self.loop)
def test_not_implemented(self):
m = mock.Mock()
self.assertRaises(
NotImplementedError,
self.loop._make_socket_transport, m, m)
self.assertRaises(
NotImplementedError,
self.loop._make_ssl_transport, m, m, m, m)
self.assertRaises(
NotImplementedError,
self.loop._make_datagram_transport, m, m)
self.assertRaises(
NotImplementedError, self.loop._process_events, [])
self.assertRaises(
NotImplementedError, self.loop._write_to_self)
self.assertRaises(
NotImplementedError,
self.loop._make_read_pipe_transport, m, m)
self.assertRaises(
NotImplementedError,
self.loop._make_write_pipe_transport, m, m)
gen = self.loop._make_subprocess_transport(m, m, m, m, m, m, m)
with self.assertRaises(NotImplementedError):
gen.send(None)
def test_close(self):
self.assertFalse(self.loop.is_closed())
self.loop.close()
self.assertTrue(self.loop.is_closed())
# it should be possible to call close() more than once
self.loop.close()
self.loop.close()
# operation blocked when the loop is closed
f = asyncio.Future(loop=self.loop)
self.assertRaises(RuntimeError, self.loop.run_forever)
self.assertRaises(RuntimeError, self.loop.run_until_complete, f)
def test__add_callback_handle(self):
h = asyncio.Handle(lambda: False, (), self.loop)
self.loop._add_callback(h)
self.assertFalse(self.loop._scheduled)
self.assertIn(h, self.loop._ready)
def test__add_callback_cancelled_handle(self):
h = asyncio.Handle(lambda: False, (), self.loop)
h.cancel()
self.loop._add_callback(h)
self.assertFalse(self.loop._scheduled)
self.assertFalse(self.loop._ready)
def test_set_default_executor(self):
executor = mock.Mock()
self.loop.set_default_executor(executor)
self.assertIs(executor, self.loop._default_executor)
def test_getnameinfo(self):
sockaddr = mock.Mock()
self.loop.run_in_executor = mock.Mock()
self.loop.getnameinfo(sockaddr)
self.assertEqual(
(None, socket.getnameinfo, sockaddr, 0),
self.loop.run_in_executor.call_args[0])
def test_call_soon(self):
def cb():
pass
h = self.loop.call_soon(cb)
self.assertEqual(h._callback, cb)
self.assertIsInstance(h, asyncio.Handle)
self.assertIn(h, self.loop._ready)
def test_call_later(self):
def cb():
pass
h = self.loop.call_later(10.0, cb)
self.assertIsInstance(h, asyncio.TimerHandle)
self.assertIn(h, self.loop._scheduled)
self.assertNotIn(h, self.loop._ready)
def test_call_later_negative_delays(self):
calls = []
def cb(arg):
calls.append(arg)
self.loop._process_events = mock.Mock()
self.loop.call_later(-1, cb, 'a')
self.loop.call_later(-2, cb, 'b')
test_utils.run_briefly(self.loop)
self.assertEqual(calls, ['b', 'a'])
def test_time_and_call_at(self):
def cb():
self.loop.stop()
self.loop._process_events = mock.Mock()
delay = 0.1
when = self.loop.time() + delay
self.loop.call_at(when, cb)
t0 = self.loop.time()
self.loop.run_forever()
dt = self.loop.time() - t0
# 50 ms: maximum granularity of the event loop
self.assertGreaterEqual(dt, delay - 0.050, dt)
# tolerate a difference of +800 ms because some Python buildbots
# are really slow
self.assertLessEqual(dt, 0.9, dt)
def check_thread(self, loop, debug):
def cb():
pass
loop.set_debug(debug)
if debug:
msg = ("Non-thread-safe operation invoked on an event loop other "
"than the current one")
with self.assertRaisesRegex(RuntimeError, msg):
loop.call_soon(cb)
with self.assertRaisesRegex(RuntimeError, msg):
loop.call_later(60, cb)
with self.assertRaisesRegex(RuntimeError, msg):
loop.call_at(loop.time() + 60, cb)
else:
loop.call_soon(cb)
loop.call_later(60, cb)
loop.call_at(loop.time() + 60, cb)
def test_check_thread(self):
def check_in_thread(loop, event, debug, create_loop, fut):
# wait until the event loop is running
event.wait()
try:
if create_loop:
loop2 = base_events.BaseEventLoop()
try:
asyncio.set_event_loop(loop2)
self.check_thread(loop, debug)
finally:
asyncio.set_event_loop(None)
loop2.close()
else:
self.check_thread(loop, debug)
except Exception as exc:
loop.call_soon_threadsafe(fut.set_exception, exc)
else:
loop.call_soon_threadsafe(fut.set_result, None)
def test_thread(loop, debug, create_loop=False):
event = threading.Event()
fut = asyncio.Future(loop=loop)
loop.call_soon(event.set)
args = (loop, event, debug, create_loop, fut)
thread = threading.Thread(target=check_in_thread, args=args)
thread.start()
loop.run_until_complete(fut)
thread.join()
self.loop._process_events = mock.Mock()
self.loop._write_to_self = mock.Mock()
# raise RuntimeError if the thread has no event loop
test_thread(self.loop, True)
# check disabled if debug mode is disabled
test_thread(self.loop, False)
# raise RuntimeError if the event loop of the thread is not the called
# event loop
test_thread(self.loop, True, create_loop=True)
# check disabled if debug mode is disabled
test_thread(self.loop, False, create_loop=True)
def test_run_once_in_executor_handle(self):
def cb():
pass
self.assertRaises(
AssertionError, self.loop.run_in_executor,
None, asyncio.Handle(cb, (), self.loop), ('',))
self.assertRaises(
AssertionError, self.loop.run_in_executor,
None, asyncio.TimerHandle(10, cb, (), self.loop))
def test_run_once_in_executor_cancelled(self):
def cb():
pass
h = asyncio.Handle(cb, (), self.loop)
h.cancel()
f = self.loop.run_in_executor(None, h)
self.assertIsInstance(f, asyncio.Future)
self.assertTrue(f.done())
self.assertIsNone(f.result())
def test_run_once_in_executor_plain(self):
def cb():
pass
h = asyncio.Handle(cb, (), self.loop)
f = asyncio.Future(loop=self.loop)
executor = mock.Mock()
executor.submit.return_value = f
self.loop.set_default_executor(executor)
res = self.loop.run_in_executor(None, h)
self.assertIs(f, res)
executor = mock.Mock()
executor.submit.return_value = f
res = self.loop.run_in_executor(executor, h)
self.assertIs(f, res)
self.assertTrue(executor.submit.called)
f.cancel() # Don't complain about abandoned Future.
def test__run_once(self):
h1 = asyncio.TimerHandle(time.monotonic() + 5.0, lambda: True, (),
self.loop)
h2 = asyncio.TimerHandle(time.monotonic() + 10.0, lambda: True, (),
self.loop)
h1.cancel()
self.loop._process_events = mock.Mock()
self.loop._scheduled.append(h1)
self.loop._scheduled.append(h2)
self.loop._run_once()
t = self.loop._selector.select.call_args[0][0]
self.assertTrue(9.5 < t < 10.5, t)
self.assertEqual([h2], self.loop._scheduled)
self.assertTrue(self.loop._process_events.called)
def test_set_debug(self):
self.loop.set_debug(True)
self.assertTrue(self.loop.get_debug())
self.loop.set_debug(False)
self.assertFalse(self.loop.get_debug())
@mock.patch('asyncio.base_events.logger')
def test__run_once_logging(self, m_logger):
def slow_select(timeout):
# Sleep a bit longer than a second to avoid timer resolution
# issues.
time.sleep(1.1)
return []
# logging needs debug flag
self.loop.set_debug(True)
# Log to INFO level if timeout > 1.0 sec.
self.loop._selector.select = slow_select
self.loop._process_events = mock.Mock()
self.loop._run_once()
self.assertEqual(logging.INFO, m_logger.log.call_args[0][0])
def fast_select(timeout):
time.sleep(0.001)
return []
self.loop._selector.select = fast_select
self.loop._run_once()
self.assertEqual(logging.DEBUG, m_logger.log.call_args[0][0])
def test__run_once_schedule_handle(self):
handle = None
processed = False
def cb(loop):
nonlocal processed, handle
processed = True
handle = loop.call_soon(lambda: True)
h = asyncio.TimerHandle(time.monotonic() - 1, cb, (self.loop,),
self.loop)
self.loop._process_events = mock.Mock()
self.loop._scheduled.append(h)
self.loop._run_once()
self.assertTrue(processed)
self.assertEqual([handle], list(self.loop._ready))
def test__run_once_cancelled_event_cleanup(self):
self.loop._process_events = mock.Mock()
self.assertTrue(
0 < base_events._MIN_CANCELLED_TIMER_HANDLES_FRACTION < 1.0)
def cb():
pass
# Set up one "blocking" event that will not be cancelled to
# ensure later cancelled events do not make it to the head
# of the queue and get cleaned.
not_cancelled_count = 1
self.loop.call_later(3000, cb)
# Add less than threshold (base_events._MIN_SCHEDULED_TIMER_HANDLES)
# cancelled handles, ensure they aren't removed
cancelled_count = 2
for x in range(2):
h = self.loop.call_later(3600, cb)
h.cancel()
# Add some cancelled events that will be at head and removed
cancelled_count += 2
for x in range(2):
h = self.loop.call_later(100, cb)
h.cancel()
# This test is invalid if _MIN_SCHEDULED_TIMER_HANDLES is too low
self.assertLessEqual(cancelled_count + not_cancelled_count,
base_events._MIN_SCHEDULED_TIMER_HANDLES)
self.assertEqual(self.loop._timer_cancelled_count, cancelled_count)
self.loop._run_once()
cancelled_count -= 2
self.assertEqual(self.loop._timer_cancelled_count, cancelled_count)
self.assertEqual(len(self.loop._scheduled),
cancelled_count + not_cancelled_count)
# Need enough events to pass _MIN_CANCELLED_TIMER_HANDLES_FRACTION
# so that deletion of cancelled events will occur on next _run_once
add_cancel_count = int(math.ceil(
base_events._MIN_SCHEDULED_TIMER_HANDLES *
base_events._MIN_CANCELLED_TIMER_HANDLES_FRACTION)) + 1
add_not_cancel_count = max(base_events._MIN_SCHEDULED_TIMER_HANDLES -
add_cancel_count, 0)
# Add some events that will not be cancelled
not_cancelled_count += add_not_cancel_count
for x in range(add_not_cancel_count):
self.loop.call_later(3600, cb)
# Add enough cancelled events
cancelled_count += add_cancel_count
for x in range(add_cancel_count):
h = self.loop.call_later(3600, cb)
h.cancel()
# Ensure all handles are still scheduled
self.assertEqual(len(self.loop._scheduled),
cancelled_count + not_cancelled_count)
self.loop._run_once()
# Ensure cancelled events were removed
self.assertEqual(len(self.loop._scheduled), not_cancelled_count)
# Ensure only uncancelled events remain scheduled
self.assertTrue(all([not x._cancelled for x in self.loop._scheduled]))
def test_run_until_complete_type_error(self):
self.assertRaises(TypeError,
self.loop.run_until_complete, 'blah')
def test_run_until_complete_loop(self):
task = asyncio.Future(loop=self.loop)
other_loop = self.new_test_loop()
self.addCleanup(other_loop.close)
self.assertRaises(ValueError,
other_loop.run_until_complete, task)
def test_subprocess_exec_invalid_args(self):
args = [sys.executable, '-c', 'pass']
# missing program parameter (empty args)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol)
# expected multiple arguments, not a list
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, args)
# program arguments must be strings, not int
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, sys.executable, 123)
# universal_newlines, shell, bufsize must not be set
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, *args, universal_newlines=True)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, *args, shell=True)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_exec,
asyncio.SubprocessProtocol, *args, bufsize=4096)
def test_subprocess_shell_invalid_args(self):
# expected a string, not an int or a list
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, 123)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, [sys.executable, '-c', 'pass'])
# universal_newlines, shell, bufsize must not be set
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, 'exit 0', universal_newlines=True)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, 'exit 0', shell=True)
self.assertRaises(TypeError,
self.loop.run_until_complete, self.loop.subprocess_shell,
asyncio.SubprocessProtocol, 'exit 0', bufsize=4096)
def test_default_exc_handler_callback(self):
self.loop._process_events = mock.Mock()
def zero_error(fut):
fut.set_result(True)
1/0
# Test call_soon (events.Handle)
with mock.patch('asyncio.base_events.logger') as log:
fut = asyncio.Future(loop=self.loop)
self.loop.call_soon(zero_error, fut)
fut.add_done_callback(lambda fut: self.loop.stop())
self.loop.run_forever()
log.error.assert_called_with(
test_utils.MockPattern('Exception in callback.*zero'),
exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY))
# Test call_later (events.TimerHandle)
with mock.patch('asyncio.base_events.logger') as log:
fut = asyncio.Future(loop=self.loop)
self.loop.call_later(0.01, zero_error, fut)
fut.add_done_callback(lambda fut: self.loop.stop())
self.loop.run_forever()
log.error.assert_called_with(
test_utils.MockPattern('Exception in callback.*zero'),
exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY))
def test_default_exc_handler_coro(self):
self.loop._process_events = mock.Mock()
@asyncio.coroutine
def zero_error_coro():
yield from asyncio.sleep(0.01, loop=self.loop)
1/0
# Test Future.__del__
with mock.patch('asyncio.base_events.logger') as log:
fut = asyncio.ensure_future(zero_error_coro(), loop=self.loop)
fut.add_done_callback(lambda *args: self.loop.stop())
self.loop.run_forever()
fut = None # Trigger Future.__del__ or futures._TracebackLogger
support.gc_collect()
if PY34:
# Future.__del__ in Python 3.4 logs error with
# an actual exception context
log.error.assert_called_with(
test_utils.MockPattern('.*exception was never retrieved'),
exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY))
else:
# futures._TracebackLogger logs only textual traceback
log.error.assert_called_with(
test_utils.MockPattern(
'.*exception was never retrieved.*ZeroDiv'),
exc_info=False)
def test_set_exc_handler_invalid(self):
with self.assertRaisesRegex(TypeError, 'A callable object or None'):
self.loop.set_exception_handler('spam')
def test_set_exc_handler_custom(self):
def zero_error():
1/0
def run_loop():
handle = self.loop.call_soon(zero_error)
self.loop._run_once()
return handle
self.loop.set_debug(True)
self.loop._process_events = mock.Mock()
self.assertIsNone(self.loop.get_exception_handler())
mock_handler = mock.Mock()
self.loop.set_exception_handler(mock_handler)
self.assertIs(self.loop.get_exception_handler(), mock_handler)
handle = run_loop()
mock_handler.assert_called_with(self.loop, {
'exception': MOCK_ANY,
'message': test_utils.MockPattern(
'Exception in callback.*zero_error'),
'handle': handle,
'source_traceback': handle._source_traceback,
})
mock_handler.reset_mock()
self.loop.set_exception_handler(None)
with mock.patch('asyncio.base_events.logger') as log:
run_loop()
log.error.assert_called_with(
test_utils.MockPattern(
'Exception in callback.*zero'),
exc_info=(ZeroDivisionError, MOCK_ANY, MOCK_ANY))
assert not mock_handler.called
def test_set_exc_handler_broken(self):
def run_loop():
def zero_error():
1/0
self.loop.call_soon(zero_error)
self.loop._run_once()
def handler(loop, context):
raise AttributeError('spam')
self.loop._process_events = mock.Mock()
self.loop.set_exception_handler(handler)
with mock.patch('asyncio.base_events.logger') as log:
run_loop()
log.error.assert_called_with(
test_utils.MockPattern(
'Unhandled error in exception handler'),
exc_info=(AttributeError, MOCK_ANY, MOCK_ANY))
def test_default_exc_handler_broken(self):
_context = None
class Loop(base_events.BaseEventLoop):
_selector = mock.Mock()
_process_events = mock.Mock()
def default_exception_handler(self, context):
nonlocal _context
_context = context
# Simulates custom buggy "default_exception_handler"
raise ValueError('spam')
loop = Loop()
self.addCleanup(loop.close)
asyncio.set_event_loop(loop)
def run_loop():
def zero_error():
1/0
loop.call_soon(zero_error)
loop._run_once()
with mock.patch('asyncio.base_events.logger') as log:
run_loop()
log.error.assert_called_with(
'Exception in default exception handler',
exc_info=True)
def custom_handler(loop, context):
raise ValueError('ham')
_context = None
loop.set_exception_handler(custom_handler)
with mock.patch('asyncio.base_events.logger') as log:
run_loop()
log.error.assert_called_with(
test_utils.MockPattern('Exception in default exception.*'
'while handling.*in custom'),
exc_info=True)
# Check that original context was passed to default
# exception handler.
self.assertIn('context', _context)
self.assertIs(type(_context['context']['exception']),
ZeroDivisionError)
def test_set_task_factory_invalid(self):
with self.assertRaisesRegex(
TypeError, 'task factory must be a callable or None'):
self.loop.set_task_factory(1)
self.assertIsNone(self.loop.get_task_factory())
def test_set_task_factory(self):
self.loop._process_events = mock.Mock()
class MyTask(asyncio.Task):
pass
@asyncio.coroutine
def coro():
pass
factory = lambda loop, coro: MyTask(coro, loop=loop)
self.assertIsNone(self.loop.get_task_factory())
self.loop.set_task_factory(factory)
self.assertIs(self.loop.get_task_factory(), factory)
task = self.loop.create_task(coro())
self.assertTrue(isinstance(task, MyTask))
self.loop.run_until_complete(task)
self.loop.set_task_factory(None)
self.assertIsNone(self.loop.get_task_factory())
task = self.loop.create_task(coro())
self.assertTrue(isinstance(task, asyncio.Task))
self.assertFalse(isinstance(task, MyTask))
self.loop.run_until_complete(task)
def test_env_var_debug(self):
code = '\n'.join((
'import asyncio',
'loop = asyncio.get_event_loop()',
'print(loop.get_debug())'))
# Test with -E to not fail if the unit test was run with
# PYTHONASYNCIODEBUG set to a non-empty string
sts, stdout, stderr = assert_python_ok('-E', '-c', code)
self.assertEqual(stdout.rstrip(), b'False')
sts, stdout, stderr = assert_python_ok('-c', code,
PYTHONASYNCIODEBUG='')
self.assertEqual(stdout.rstrip(), b'False')
sts, stdout, stderr = assert_python_ok('-c', code,
PYTHONASYNCIODEBUG='1')
self.assertEqual(stdout.rstrip(), b'True')
sts, stdout, stderr = assert_python_ok('-E', '-c', code,
PYTHONASYNCIODEBUG='1')
self.assertEqual(stdout.rstrip(), b'False')
def test_create_task(self):
class MyTask(asyncio.Task):
pass
@asyncio.coroutine
def test():
pass
class EventLoop(base_events.BaseEventLoop):
def create_task(self, coro):
return MyTask(coro, loop=loop)
loop = EventLoop()
self.set_event_loop(loop)
coro = test()
task = asyncio.ensure_future(coro, loop=loop)
self.assertIsInstance(task, MyTask)
# make warnings quiet
task._log_destroy_pending = False
coro.close()
def test_run_forever_keyboard_interrupt(self):
# Python issue #22601: ensure that the temporary task created by
# run_forever() consumes the KeyboardInterrupt and so don't log
# a warning
@asyncio.coroutine
def raise_keyboard_interrupt():
raise KeyboardInterrupt
self.loop._process_events = mock.Mock()
self.loop.call_exception_handler = mock.Mock()
try:
self.loop.run_until_complete(raise_keyboard_interrupt())
except KeyboardInterrupt:
pass
self.loop.close()
support.gc_collect()
self.assertFalse(self.loop.call_exception_handler.called)
def test_run_until_complete_baseexception(self):
# Python issue #22429: run_until_complete() must not schedule a pending
# call to stop() if the future raised a BaseException
@asyncio.coroutine
def raise_keyboard_interrupt():
raise KeyboardInterrupt
self.loop._process_events = mock.Mock()
try:
self.loop.run_until_complete(raise_keyboard_interrupt())
except KeyboardInterrupt:
pass
def func():
self.loop.stop()
func.called = True
func.called = False
try:
self.loop.call_soon(func)
self.loop.run_forever()
except KeyboardInterrupt:
pass
self.assertTrue(func.called)
def test_single_selecter_event_callback_after_stopping(self):
# Python issue #25593: A stopped event loop may cause event callbacks
# to run more than once.
event_sentinel = object()
callcount = 0
doer = None
def proc_events(event_list):
nonlocal doer
if event_sentinel in event_list:
doer = self.loop.call_soon(do_event)
def do_event():
nonlocal callcount
callcount += 1
self.loop.call_soon(clear_selector)
def clear_selector():
doer.cancel()
self.loop._selector.select.return_value = ()
self.loop._process_events = proc_events
self.loop._selector.select.return_value = (event_sentinel,)
for i in range(1, 3):
with self.subTest('Loop %d/2' % i):
self.loop.call_soon(self.loop.stop)
self.loop.run_forever()
self.assertEqual(callcount, 1)
def test_run_once(self):
# Simple test for test_utils.run_once(). It may seem strange
# to have a test for this (the function isn't even used!) but
# it's a de-factor standard API for library tests. This tests
# the idiom: loop.call_soon(loop.stop); loop.run_forever().
count = 0
def callback():
nonlocal count
count += 1
self.loop._process_events = mock.Mock()
self.loop.call_soon(callback)
test_utils.run_once(self.loop)
self.assertEqual(count, 1)
def test_run_forever_pre_stopped(self):
# Test that the old idiom for pre-stopping the loop works.
self.loop._process_events = mock.Mock()
self.loop.stop()
self.loop.run_forever()
self.loop._selector.select.assert_called_once_with(0)
class MyProto(asyncio.Protocol):
done = None
def __init__(self, create_future=False):
self.state = 'INITIAL'
self.nbytes = 0
if create_future:
self.done = asyncio.Future()
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
transport.write(b'GET / HTTP/1.0\r\nHost: example.com\r\n\r\n')
def data_received(self, data):
assert self.state == 'CONNECTED', self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == 'CONNECTED', self.state
self.state = 'EOF'
def connection_lost(self, exc):
assert self.state in ('CONNECTED', 'EOF'), self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MyDatagramProto(asyncio.DatagramProtocol):
done = None
def __init__(self, create_future=False, loop=None):
self.state = 'INITIAL'
self.nbytes = 0
if create_future:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'INITIALIZED'
def datagram_received(self, data, addr):
assert self.state == 'INITIALIZED', self.state
self.nbytes += len(data)
def error_received(self, exc):
assert self.state == 'INITIALIZED', self.state
def connection_lost(self, exc):
assert self.state == 'INITIALIZED', self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class BaseEventLoopWithSelectorTests(test_utils.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
self.set_event_loop(self.loop)
@patch_socket
def test_create_connection_multiple_errors(self, m_socket):
class MyProto(asyncio.Protocol):
pass
@asyncio.coroutine
def getaddrinfo(*args, **kw):
yield from []
return [(2, 1, 6, '', ('107.6.106.82', 80)),
(2, 1, 6, '', ('107.6.106.82', 80))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
idx = -1
errors = ['err1', 'err2']
def _socket(*args, **kw):
nonlocal idx, errors
idx += 1
raise OSError(errors[idx])
m_socket.socket = _socket
self.loop.getaddrinfo = getaddrinfo_task
coro = self.loop.create_connection(MyProto, 'example.com', 80)
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(coro)
self.assertEqual(str(cm.exception), 'Multiple exceptions: err1, err2')
@patch_socket
def test_create_connection_timeout(self, m_socket):
# Ensure that the socket is closed on timeout
sock = mock.Mock()
m_socket.socket.return_value = sock
def getaddrinfo(*args, **kw):
fut = asyncio.Future(loop=self.loop)
addr = (socket.AF_INET, socket.SOCK_STREAM, 0, '',
('127.0.0.1', 80))
fut.set_result([addr])
return fut
self.loop.getaddrinfo = getaddrinfo
with mock.patch.object(self.loop, 'sock_connect',
side_effect=asyncio.TimeoutError):
coro = self.loop.create_connection(MyProto, '127.0.0.1', 80)
with self.assertRaises(asyncio.TimeoutError):
self.loop.run_until_complete(coro)
self.assertTrue(sock.close.called)
def test_create_connection_host_port_sock(self):
coro = self.loop.create_connection(
MyProto, 'example.com', 80, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
def test_create_connection_no_host_port_sock(self):
coro = self.loop.create_connection(MyProto)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
def test_create_connection_no_getaddrinfo(self):
@asyncio.coroutine
def getaddrinfo(*args, **kw):
yield from []
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
coro = self.loop.create_connection(MyProto, 'example.com', 80)
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
def test_create_connection_connect_err(self):
@asyncio.coroutine
def getaddrinfo(*args, **kw):
yield from []
return [(2, 1, 6, '', ('107.6.106.82', 80))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.side_effect = OSError
coro = self.loop.create_connection(MyProto, 'example.com', 80)
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
def test_create_connection_multiple(self):
@asyncio.coroutine
def getaddrinfo(*args, **kw):
return [(2, 1, 6, '', ('0.0.0.1', 80)),
(2, 1, 6, '', ('0.0.0.2', 80))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.side_effect = OSError
coro = self.loop.create_connection(
MyProto, 'example.com', 80, family=socket.AF_INET)
with self.assertRaises(OSError):
self.loop.run_until_complete(coro)
@patch_socket
def test_create_connection_multiple_errors_local_addr(self, m_socket):
def bind(addr):
if addr[0] == '0.0.0.1':
err = OSError('Err')
err.strerror = 'Err'
raise err
m_socket.socket.return_value.bind = bind
@asyncio.coroutine
def getaddrinfo(*args, **kw):
return [(2, 1, 6, '', ('0.0.0.1', 80)),
(2, 1, 6, '', ('0.0.0.2', 80))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.side_effect = OSError('Err2')
coro = self.loop.create_connection(
MyProto, 'example.com', 80, family=socket.AF_INET,
local_addr=(None, 8080))
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(coro)
self.assertTrue(str(cm.exception).startswith('Multiple exceptions: '))
self.assertTrue(m_socket.socket.return_value.close.called)
def _test_create_connection_ip_addr(self, m_socket, allow_inet_pton):
# Test the fallback code, even if this system has inet_pton.
if not allow_inet_pton:
del m_socket.inet_pton
m_socket.getaddrinfo = socket.getaddrinfo
sock = m_socket.socket.return_value
self.loop.add_reader = mock.Mock()
self.loop.add_reader._is_coroutine = False
self.loop.add_writer = mock.Mock()
self.loop.add_writer._is_coroutine = False
coro = self.loop.create_connection(asyncio.Protocol, '1.2.3.4', 80)
t, p = self.loop.run_until_complete(coro)
try:
sock.connect.assert_called_with(('1.2.3.4', 80))
_, kwargs = m_socket.socket.call_args
self.assertEqual(kwargs['family'], m_socket.AF_INET)
self.assertEqual(kwargs['type'], m_socket.SOCK_STREAM)
finally:
t.close()
test_utils.run_briefly(self.loop) # allow transport to close
sock.family = socket.AF_INET6
coro = self.loop.create_connection(asyncio.Protocol, '::1', 80)
t, p = self.loop.run_until_complete(coro)
try:
# Without inet_pton we use getaddrinfo, which transforms ('::1', 80)
# to ('::1', 80, 0, 0). The last 0s are flow info, scope id.
[address] = sock.connect.call_args[0]
host, port = address[:2]
self.assertRegex(host, r'::(0\.)*1')
self.assertEqual(port, 80)
_, kwargs = m_socket.socket.call_args
self.assertEqual(kwargs['family'], m_socket.AF_INET6)
self.assertEqual(kwargs['type'], m_socket.SOCK_STREAM)
finally:
t.close()
test_utils.run_briefly(self.loop) # allow transport to close
@patch_socket
def test_create_connection_ip_addr(self, m_socket):
self._test_create_connection_ip_addr(m_socket, True)
@patch_socket
def test_create_connection_no_inet_pton(self, m_socket):
self._test_create_connection_ip_addr(m_socket, False)
def test_create_connection_no_local_addr(self):
@asyncio.coroutine
def getaddrinfo(host, *args, **kw):
if host == 'example.com':
return [(2, 1, 6, '', ('107.6.106.82', 80)),
(2, 1, 6, '', ('107.6.106.82', 80))]
else:
return []
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
coro = self.loop.create_connection(
MyProto, 'example.com', 80, family=socket.AF_INET,
local_addr=(None, 8080))
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_connection_bluetooth(self, m_socket):
# See http://bugs.python.org/issue27136, fallback to getaddrinfo when
# we can't recognize an address is resolved, e.g. a Bluetooth address.
addr = ('00:01:02:03:04:05', 1)
def getaddrinfo(host, port, *args, **kw):
assert (host, port) == addr
return [(999, 1, 999, '', (addr, 1))]
m_socket.getaddrinfo = getaddrinfo
sock = m_socket.socket()
coro = self.loop.sock_connect(sock, addr)
self.loop.run_until_complete(coro)
def test_create_connection_ssl_server_hostname_default(self):
self.loop.getaddrinfo = mock.Mock()
def mock_getaddrinfo(*args, **kwds):
f = asyncio.Future(loop=self.loop)
f.set_result([(socket.AF_INET, socket.SOCK_STREAM,
socket.SOL_TCP, '', ('1.2.3.4', 80))])
return f
self.loop.getaddrinfo.side_effect = mock_getaddrinfo
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.return_value = ()
self.loop._make_ssl_transport = mock.Mock()
class _SelectorTransportMock:
_sock = None
def get_extra_info(self, key):
return mock.Mock()
def close(self):
self._sock.close()
def mock_make_ssl_transport(sock, protocol, sslcontext, waiter,
**kwds):
waiter.set_result(None)
transport = _SelectorTransportMock()
transport._sock = sock
return transport
self.loop._make_ssl_transport.side_effect = mock_make_ssl_transport
ANY = mock.ANY
# First try the default server_hostname.
self.loop._make_ssl_transport.reset_mock()
coro = self.loop.create_connection(MyProto, 'python.org', 80, ssl=True)
transport, _ = self.loop.run_until_complete(coro)
transport.close()
self.loop._make_ssl_transport.assert_called_with(
ANY, ANY, ANY, ANY,
server_side=False,
server_hostname='python.org')
# Next try an explicit server_hostname.
self.loop._make_ssl_transport.reset_mock()
coro = self.loop.create_connection(MyProto, 'python.org', 80, ssl=True,
server_hostname='perl.com')
transport, _ = self.loop.run_until_complete(coro)
transport.close()
self.loop._make_ssl_transport.assert_called_with(
ANY, ANY, ANY, ANY,
server_side=False,
server_hostname='perl.com')
# Finally try an explicit empty server_hostname.
self.loop._make_ssl_transport.reset_mock()
coro = self.loop.create_connection(MyProto, 'python.org', 80, ssl=True,
server_hostname='')
transport, _ = self.loop.run_until_complete(coro)
transport.close()
self.loop._make_ssl_transport.assert_called_with(ANY, ANY, ANY, ANY,
server_side=False,
server_hostname='')
def test_create_connection_no_ssl_server_hostname_errors(self):
# When not using ssl, server_hostname must be None.
coro = self.loop.create_connection(MyProto, 'python.org', 80,
server_hostname='')
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
coro = self.loop.create_connection(MyProto, 'python.org', 80,
server_hostname='python.org')
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
def test_create_connection_ssl_server_hostname_errors(self):
# When using ssl, server_hostname may be None if host is non-empty.
coro = self.loop.create_connection(MyProto, '', 80, ssl=True)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
coro = self.loop.create_connection(MyProto, None, 80, ssl=True)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
sock = socket.socket()
coro = self.loop.create_connection(MyProto, None, None,
ssl=True, sock=sock)
self.addCleanup(sock.close)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
def test_create_server_empty_host(self):
# if host is empty string use None instead
host = object()
@asyncio.coroutine
def getaddrinfo(*args, **kw):
nonlocal host
host = args[0]
yield from []
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
self.loop.getaddrinfo = getaddrinfo_task
fut = self.loop.create_server(MyProto, '', 0)
self.assertRaises(OSError, self.loop.run_until_complete, fut)
self.assertIsNone(host)
def test_create_server_host_port_sock(self):
fut = self.loop.create_server(
MyProto, '0.0.0.0', 0, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
def test_create_server_no_host_port_sock(self):
fut = self.loop.create_server(MyProto)
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
def test_create_server_no_getaddrinfo(self):
getaddrinfo = self.loop.getaddrinfo = mock.Mock()
getaddrinfo.return_value = []
f = self.loop.create_server(MyProto, 'python.org', 0)
self.assertRaises(OSError, self.loop.run_until_complete, f)
@patch_socket
def test_create_server_nosoreuseport(self, m_socket):
m_socket.getaddrinfo = socket.getaddrinfo
del m_socket.SO_REUSEPORT
m_socket.socket.return_value = mock.Mock()
f = self.loop.create_server(
MyProto, '0.0.0.0', 0, reuse_port=True)
self.assertRaises(ValueError, self.loop.run_until_complete, f)
@patch_socket
def test_create_server_cant_bind(self, m_socket):
class Err(OSError):
strerror = 'error'
m_socket.getaddrinfo.return_value = [
(2, 1, 6, '', ('127.0.0.1', 10100))]
m_socket.getaddrinfo._is_coroutine = False
m_sock = m_socket.socket.return_value = mock.Mock()
m_sock.bind.side_effect = Err
fut = self.loop.create_server(MyProto, '0.0.0.0', 0)
self.assertRaises(OSError, self.loop.run_until_complete, fut)
self.assertTrue(m_sock.close.called)
@patch_socket
def test_create_datagram_endpoint_no_addrinfo(self, m_socket):
m_socket.getaddrinfo.return_value = []
m_socket.getaddrinfo._is_coroutine = False
coro = self.loop.create_datagram_endpoint(
MyDatagramProto, local_addr=('localhost', 0))
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
def test_create_datagram_endpoint_addr_error(self):
coro = self.loop.create_datagram_endpoint(
MyDatagramProto, local_addr='localhost')
self.assertRaises(
AssertionError, self.loop.run_until_complete, coro)
coro = self.loop.create_datagram_endpoint(
MyDatagramProto, local_addr=('localhost', 1, 2, 3))
self.assertRaises(
AssertionError, self.loop.run_until_complete, coro)
def test_create_datagram_endpoint_connect_err(self):
self.loop.sock_connect = mock.Mock()
self.loop.sock_connect.side_effect = OSError
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol, remote_addr=('127.0.0.1', 0))
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_datagram_endpoint_socket_err(self, m_socket):
m_socket.getaddrinfo = socket.getaddrinfo
m_socket.socket.side_effect = OSError
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol, family=socket.AF_INET)
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol, local_addr=('127.0.0.1', 0))
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled')
def test_create_datagram_endpoint_no_matching_family(self):
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol,
remote_addr=('127.0.0.1', 0), local_addr=('::1', 0))
self.assertRaises(
ValueError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_datagram_endpoint_setblk_err(self, m_socket):
m_socket.socket.return_value.setblocking.side_effect = OSError
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol, family=socket.AF_INET)
self.assertRaises(
OSError, self.loop.run_until_complete, coro)
self.assertTrue(
m_socket.socket.return_value.close.called)
def test_create_datagram_endpoint_noaddr_nofamily(self):
coro = self.loop.create_datagram_endpoint(
asyncio.DatagramProtocol)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_datagram_endpoint_cant_bind(self, m_socket):
class Err(OSError):
pass
m_socket.getaddrinfo = socket.getaddrinfo
m_sock = m_socket.socket.return_value = mock.Mock()
m_sock.bind.side_effect = Err
fut = self.loop.create_datagram_endpoint(
MyDatagramProto,
local_addr=('127.0.0.1', 0), family=socket.AF_INET)
self.assertRaises(Err, self.loop.run_until_complete, fut)
self.assertTrue(m_sock.close.called)
def test_create_datagram_endpoint_sock(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind(('127.0.0.1', 0))
fut = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(create_future=True, loop=self.loop),
sock=sock)
transport, protocol = self.loop.run_until_complete(fut)
transport.close()
self.loop.run_until_complete(protocol.done)
self.assertEqual('CLOSED', protocol.state)
def test_create_datagram_endpoint_sock_sockopts(self):
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, local_addr=('127.0.0.1', 0), sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, remote_addr=('127.0.0.1', 0), sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, family=1, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, proto=1, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, flags=1, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, reuse_address=True, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, reuse_port=True, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
fut = self.loop.create_datagram_endpoint(
MyDatagramProto, allow_broadcast=True, sock=object())
self.assertRaises(ValueError, self.loop.run_until_complete, fut)
def test_create_datagram_endpoint_sockopts(self):
# Socket options should not be applied unless asked for.
# SO_REUSEADDR defaults to on for UNIX.
# SO_REUSEPORT is not available on all platforms.
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(create_future=True, loop=self.loop),
local_addr=('127.0.0.1', 0))
transport, protocol = self.loop.run_until_complete(coro)
sock = transport.get_extra_info('socket')
reuse_address_default_on = (
os.name == 'posix' and sys.platform != 'cygwin')
reuseport_supported = hasattr(socket, 'SO_REUSEPORT')
if reuse_address_default_on:
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR))
else:
self.assertFalse(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR))
if reuseport_supported:
self.assertFalse(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEPORT))
self.assertFalse(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_BROADCAST))
transport.close()
self.loop.run_until_complete(protocol.done)
self.assertEqual('CLOSED', protocol.state)
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(create_future=True, loop=self.loop),
local_addr=('127.0.0.1', 0),
reuse_address=True,
reuse_port=reuseport_supported,
allow_broadcast=True)
transport, protocol = self.loop.run_until_complete(coro)
sock = transport.get_extra_info('socket')
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR))
if reuseport_supported:
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEPORT))
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_BROADCAST))
transport.close()
self.loop.run_until_complete(protocol.done)
self.assertEqual('CLOSED', protocol.state)
@patch_socket
def test_create_datagram_endpoint_nosoreuseport(self, m_socket):
del m_socket.SO_REUSEPORT
m_socket.socket.return_value = mock.Mock()
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop),
local_addr=('127.0.0.1', 0),
reuse_address=False,
reuse_port=True)
self.assertRaises(ValueError, self.loop.run_until_complete, coro)
@patch_socket
def test_create_datagram_endpoint_ip_addr(self, m_socket):
def getaddrinfo(*args, **kw):
self.fail('should not have called getaddrinfo')
m_socket.getaddrinfo = getaddrinfo
m_socket.socket.return_value.bind = bind = mock.Mock()
self.loop.add_reader = mock.Mock()
self.loop.add_reader._is_coroutine = False
reuseport_supported = hasattr(socket, 'SO_REUSEPORT')
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop),
local_addr=('1.2.3.4', 0),
reuse_address=False,
reuse_port=reuseport_supported)
t, p = self.loop.run_until_complete(coro)
try:
bind.assert_called_with(('1.2.3.4', 0))
m_socket.socket.assert_called_with(family=m_socket.AF_INET,
proto=m_socket.IPPROTO_UDP,
type=m_socket.SOCK_DGRAM)
finally:
t.close()
test_utils.run_briefly(self.loop) # allow transport to close
def test_accept_connection_retry(self):
sock = mock.Mock()
sock.accept.side_effect = BlockingIOError()
self.loop._accept_connection(MyProto, sock)
self.assertFalse(sock.close.called)
@mock.patch('asyncio.base_events.logger')
def test_accept_connection_exception(self, m_log):
sock = mock.Mock()
sock.fileno.return_value = 10
sock.accept.side_effect = OSError(errno.EMFILE, 'Too many open files')
self.loop.remove_reader = mock.Mock()
self.loop.call_later = mock.Mock()
self.loop._accept_connection(MyProto, sock)
self.assertTrue(m_log.error.called)
self.assertFalse(sock.close.called)
self.loop.remove_reader.assert_called_with(10)
self.loop.call_later.assert_called_with(constants.ACCEPT_RETRY_DELAY,
# self.loop._start_serving
mock.ANY,
MyProto, sock, None, None)
def test_call_coroutine(self):
@asyncio.coroutine
def simple_coroutine():
pass
coro_func = simple_coroutine
coro_obj = coro_func()
self.addCleanup(coro_obj.close)
for func in (coro_func, coro_obj):
with self.assertRaises(TypeError):
self.loop.call_soon(func)
with self.assertRaises(TypeError):
self.loop.call_soon_threadsafe(func)
with self.assertRaises(TypeError):
self.loop.call_later(60, func)
with self.assertRaises(TypeError):
self.loop.call_at(self.loop.time() + 60, func)
with self.assertRaises(TypeError):
self.loop.run_in_executor(None, func)
@mock.patch('asyncio.base_events.logger')
def test_log_slow_callbacks(self, m_logger):
def stop_loop_cb(loop):
loop.stop()
@asyncio.coroutine
def stop_loop_coro(loop):
yield from ()
loop.stop()
asyncio.set_event_loop(self.loop)
self.loop.set_debug(True)
self.loop.slow_callback_duration = 0.0
# slow callback
self.loop.call_soon(stop_loop_cb, self.loop)
self.loop.run_forever()
fmt, *args = m_logger.warning.call_args[0]
self.assertRegex(fmt % tuple(args),
"^Executing <Handle.*stop_loop_cb.*> "
"took .* seconds$")
# slow task
asyncio.ensure_future(stop_loop_coro(self.loop), loop=self.loop)
self.loop.run_forever()
fmt, *args = m_logger.warning.call_args[0]
self.assertRegex(fmt % tuple(args),
"^Executing <Task.*stop_loop_coro.*> "
"took .* seconds$")
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
ef06ee1a907c60dea3b5c90465285f9435c942f6 | 2f5d5149c064dfd18c0456db4326eca8b3cdb843 | /pqmf.py | 871405affaee1a846645216bf7716d9611fd1e26 | [
"MIT"
]
| permissive | BridgetteSong/multiband-hifigan | a201cebc29d8d7b0d166303e824aca9651134cff | 718a8117211ceb735c8916cfae8662a68dad8876 | refs/heads/master | 2023-03-11T14:59:26.101235 | 2021-03-02T11:44:52 | 2021-03-02T11:44:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,832 | py | import numpy as np
import torch
import torch.nn.functional as F
from scipy import signal as sig
# adapted from
# https://github.com/kan-bayashi/ParallelWaveGAN/tree/master/parallel_wavegan
class PQMF(torch.nn.Module):
def __init__(self, N=4, taps=62, cutoff=0.15, beta=9.0):
super(PQMF, self).__init__()
self.N = N
self.taps = taps
self.cutoff = cutoff
self.beta = beta
QMF = sig.firwin(taps + 1, cutoff, window=('kaiser', beta))
H = np.zeros((N, len(QMF)))
G = np.zeros((N, len(QMF)))
for k in range(N):
constant_factor = (2 * k + 1) * (np.pi /
(2 * N)) * (np.arange(taps + 1) -
((taps - 1) / 2)) # TODO: (taps - 1) -> taps
phase = (-1)**k * np.pi / 4
H[k] = 2 * QMF * np.cos(constant_factor + phase)
G[k] = 2 * QMF * np.cos(constant_factor - phase)
H = torch.from_numpy(H[:, None, :]).float()
G = torch.from_numpy(G[None, :, :]).float()
self.register_buffer("H", H)
self.register_buffer("G", G)
updown_filter = torch.zeros((N, N, N)).float()
for k in range(N):
updown_filter[k, k, 0] = 1.0
self.register_buffer("updown_filter", updown_filter)
self.N = N
self.pad_fn = torch.nn.ConstantPad1d(taps // 2, 0.0)
def forward(self, x):
return self.analysis(x)
def analysis(self, x):
return F.conv1d(x, self.H, padding=self.taps // 2, stride=self.N)
def synthesis(self, x):
x = F.conv_transpose1d(x,
self.updown_filter * self.N,
stride=self.N)
x = F.conv1d(x, self.G, padding=self.taps // 2)
return x | [
"[email protected]"
]
| |
337f9585cdba8cf63f1a66aea1d6f63fc308bb21 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/actrl/rulehitparthist1h.py | b44a8ec368fbf77dd7ca080e705c28b55f70581d | []
| no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 34,306 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class RuleHitPartHist1h(Mo):
"""
A class that represents historical portion of the statistics for rule hits in a 1 hour sampling interval. This class updates every 15 minutes.
"""
meta = StatsClassMeta("cobra.model.actrl.RuleHitPartHist1h", "rule hits")
counter = CounterMeta("revPkts", CounterCategory.COUNTER, "packets", "reverse hit packets")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "revPktsCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "revPktsPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "revPktsMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "revPktsMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "revPktsAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "revPktsSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "revPktsThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "revPktsTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "revPktsRate"
meta._counters.append(counter)
counter = CounterMeta("pkts", CounterCategory.COUNTER, "packets", "hit packets")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "pktsCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "pktsPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "pktsMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "pktsMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "pktsAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "pktsSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "pktsThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "pktsTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "pktsRate"
meta._counters.append(counter)
counter = CounterMeta("egrPkts", CounterCategory.COUNTER, "packets", "egress hit packets")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "egrPktsCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "egrPktsPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "egrPktsMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "egrPktsMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "egrPktsAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "egrPktsSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "egrPktsThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "egrPktsTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "egrPktsRate"
meta._counters.append(counter)
counter = CounterMeta("ingrPkts", CounterCategory.COUNTER, "packets", "ingress hit packets")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "ingrPktsCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "ingrPktsPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "ingrPktsMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "ingrPktsMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "ingrPktsAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "ingrPktsSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "ingrPktsThr"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "ingrPktsTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "ingrPktsRate"
meta._counters.append(counter)
meta.moClassName = "actrlRuleHitPartHist1h"
meta.rnFormat = "HDactrlRuleHitPart1h-%(index)s-node-%(nodeId)s"
meta.category = MoCategory.STATS_HISTORY
meta.label = "historical portion of the rule hits stats in 1 hour"
meta.writeAccessMask = 0x601
meta.readAccessMask = 0x601
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.fv.RInfoHolder")
meta.superClasses.add("cobra.model.stats.HistAgPart")
meta.superClasses.add("cobra.model.stats.Hist")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.actrl.RuleHitPartHist")
meta.rnPrefixes = [
('HDactrlRuleHitPart1h-', True),
('-node-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "egrPktsAvg", "egrPktsAvg", 7471, PropCategory.IMPLICIT_AVG)
prop.label = "egress hit packets average value"
prop.isOper = True
prop.isStats = True
meta.props.add("egrPktsAvg", prop)
prop = PropMeta("str", "egrPktsCum", "egrPktsCum", 7467, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "egress hit packets cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("egrPktsCum", prop)
prop = PropMeta("str", "egrPktsMax", "egrPktsMax", 7470, PropCategory.IMPLICIT_MAX)
prop.label = "egress hit packets maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("egrPktsMax", prop)
prop = PropMeta("str", "egrPktsMin", "egrPktsMin", 7469, PropCategory.IMPLICIT_MIN)
prop.label = "egress hit packets minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("egrPktsMin", prop)
prop = PropMeta("str", "egrPktsPer", "egrPktsPer", 7468, PropCategory.IMPLICIT_PERIODIC)
prop.label = "egress hit packets periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("egrPktsPer", prop)
prop = PropMeta("str", "egrPktsRate", "egrPktsRate", 7475, PropCategory.IMPLICIT_RATE)
prop.label = "egress hit packets rate"
prop.isOper = True
prop.isStats = True
meta.props.add("egrPktsRate", prop)
prop = PropMeta("str", "egrPktsSpct", "egrPktsSpct", 7472, PropCategory.IMPLICIT_SUSPECT)
prop.label = "egress hit packets suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("egrPktsSpct", prop)
prop = PropMeta("str", "egrPktsThr", "egrPktsThr", 7473, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "egress hit packets thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("egrPktsThr", prop)
prop = PropMeta("str", "egrPktsTr", "egrPktsTr", 7474, PropCategory.IMPLICIT_TREND)
prop.label = "egress hit packets trend"
prop.isOper = True
prop.isStats = True
meta.props.add("egrPktsTr", prop)
prop = PropMeta("str", "index", "index", 5843, PropCategory.REGULAR)
prop.label = "History Index"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("index", prop)
prop = PropMeta("str", "ingrPktsAvg", "ingrPktsAvg", 7532, PropCategory.IMPLICIT_AVG)
prop.label = "ingress hit packets average value"
prop.isOper = True
prop.isStats = True
meta.props.add("ingrPktsAvg", prop)
prop = PropMeta("str", "ingrPktsCum", "ingrPktsCum", 7528, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "ingress hit packets cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("ingrPktsCum", prop)
prop = PropMeta("str", "ingrPktsMax", "ingrPktsMax", 7531, PropCategory.IMPLICIT_MAX)
prop.label = "ingress hit packets maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("ingrPktsMax", prop)
prop = PropMeta("str", "ingrPktsMin", "ingrPktsMin", 7530, PropCategory.IMPLICIT_MIN)
prop.label = "ingress hit packets minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("ingrPktsMin", prop)
prop = PropMeta("str", "ingrPktsPer", "ingrPktsPer", 7529, PropCategory.IMPLICIT_PERIODIC)
prop.label = "ingress hit packets periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("ingrPktsPer", prop)
prop = PropMeta("str", "ingrPktsRate", "ingrPktsRate", 7536, PropCategory.IMPLICIT_RATE)
prop.label = "ingress hit packets rate"
prop.isOper = True
prop.isStats = True
meta.props.add("ingrPktsRate", prop)
prop = PropMeta("str", "ingrPktsSpct", "ingrPktsSpct", 7533, PropCategory.IMPLICIT_SUSPECT)
prop.label = "ingress hit packets suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("ingrPktsSpct", prop)
prop = PropMeta("str", "ingrPktsThr", "ingrPktsThr", 7534, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "ingress hit packets thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("ingrPktsThr", prop)
prop = PropMeta("str", "ingrPktsTr", "ingrPktsTr", 7535, PropCategory.IMPLICIT_TREND)
prop.label = "ingress hit packets trend"
prop.isOper = True
prop.isStats = True
meta.props.add("ingrPktsTr", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "nodeId", "nodeId", 5844, PropCategory.REGULAR)
prop.label = "Node Id"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("nodeId", prop)
prop = PropMeta("str", "pktsAvg", "pktsAvg", 24176, PropCategory.IMPLICIT_AVG)
prop.label = "hit packets average value"
prop.isOper = True
prop.isStats = True
meta.props.add("pktsAvg", prop)
prop = PropMeta("str", "pktsCum", "pktsCum", 24172, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "hit packets cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("pktsCum", prop)
prop = PropMeta("str", "pktsMax", "pktsMax", 24175, PropCategory.IMPLICIT_MAX)
prop.label = "hit packets maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("pktsMax", prop)
prop = PropMeta("str", "pktsMin", "pktsMin", 24174, PropCategory.IMPLICIT_MIN)
prop.label = "hit packets minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("pktsMin", prop)
prop = PropMeta("str", "pktsPer", "pktsPer", 24173, PropCategory.IMPLICIT_PERIODIC)
prop.label = "hit packets periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("pktsPer", prop)
prop = PropMeta("str", "pktsRate", "pktsRate", 24180, PropCategory.IMPLICIT_RATE)
prop.label = "hit packets rate"
prop.isOper = True
prop.isStats = True
meta.props.add("pktsRate", prop)
prop = PropMeta("str", "pktsSpct", "pktsSpct", 24177, PropCategory.IMPLICIT_SUSPECT)
prop.label = "hit packets suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("pktsSpct", prop)
prop = PropMeta("str", "pktsThr", "pktsThr", 24178, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "hit packets thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("pktsThr", prop)
prop = PropMeta("str", "pktsTr", "pktsTr", 24179, PropCategory.IMPLICIT_TREND)
prop.label = "hit packets trend"
prop.isOper = True
prop.isStats = True
meta.props.add("pktsTr", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "revPktsAvg", "revPktsAvg", 24231, PropCategory.IMPLICIT_AVG)
prop.label = "reverse hit packets average value"
prop.isOper = True
prop.isStats = True
meta.props.add("revPktsAvg", prop)
prop = PropMeta("str", "revPktsCum", "revPktsCum", 24227, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "reverse hit packets cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("revPktsCum", prop)
prop = PropMeta("str", "revPktsMax", "revPktsMax", 24230, PropCategory.IMPLICIT_MAX)
prop.label = "reverse hit packets maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("revPktsMax", prop)
prop = PropMeta("str", "revPktsMin", "revPktsMin", 24229, PropCategory.IMPLICIT_MIN)
prop.label = "reverse hit packets minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("revPktsMin", prop)
prop = PropMeta("str", "revPktsPer", "revPktsPer", 24228, PropCategory.IMPLICIT_PERIODIC)
prop.label = "reverse hit packets periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("revPktsPer", prop)
prop = PropMeta("str", "revPktsRate", "revPktsRate", 24235, PropCategory.IMPLICIT_RATE)
prop.label = "reverse hit packets rate"
prop.isOper = True
prop.isStats = True
meta.props.add("revPktsRate", prop)
prop = PropMeta("str", "revPktsSpct", "revPktsSpct", 24232, PropCategory.IMPLICIT_SUSPECT)
prop.label = "reverse hit packets suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("revPktsSpct", prop)
prop = PropMeta("str", "revPktsThr", "revPktsThr", 24233, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "reverse hit packets thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("revPktsThr", prop)
prop = PropMeta("str", "revPktsTr", "revPktsTr", 24234, PropCategory.IMPLICIT_TREND)
prop.label = "reverse hit packets trend"
prop.isOper = True
prop.isStats = True
meta.props.add("revPktsTr", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "index"))
meta.namingProps.append(getattr(meta.props, "nodeId"))
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("ATgToGraphInst", "Graph Instances", "cobra.model.vns.GraphInst"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("AEPgToVirtualMachines", "Virtual Machines", "cobra.model.comp.Vm"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("MgmtInstPToNode", "External Management Network EPG to Node", "cobra.model.fv.Locale"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("OoBToNode", "Out-of-band Management EPG to Node", "cobra.model.fv.Locale"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("InBToNode", "Node", "cobra.model.fv.Locale"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("EPgToNwIf", "Interface", "cobra.model.nw.If"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("CtxToNwIf", "Private Network to Interface", "cobra.model.nw.If"))
def __init__(self, parentMoOrDn, index, nodeId, markDirty=True, **creationProps):
namingVals = [index, nodeId]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
]
| |
7a48086a1a8ce564a8b688393160f9619cc2d920 | bfb6ccbcb2707bca5eb44f2b64c0084aa6561b5a | /docs/examples/textbook/stellar_minimal.py | 0eb338ca801befefe4cf1b76f358451e7f1a3879 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | cgroeneveld/amuse | 79c8ece558f484df4494609e95274cffd5c37c60 | 9684fd22ce8293b837d2c78f56948e3ec3d04032 | refs/heads/master | 2020-08-16T16:44:46.702465 | 2019-10-14T19:16:04 | 2019-10-14T19:16:04 | 215,526,071 | 0 | 0 | Apache-2.0 | 2019-10-16T10:57:34 | 2019-10-16T10:57:34 | null | UTF-8 | Python | false | false | 1,591 | py | """
Minimal routine for running a stellar evolution code
"""
###BOOKLISTSTART###
from amuse.lab import *
def main(m, z, model_time):
stellar = MESA()
stellar.parameters.metallicity = z
stellar.particles.add_particle(Particle(mass=m))
initial_luminosity = stellar.particles.luminosity
dt = 1 | units.Myr
while stellar.model_time < model_time:
stellar.evolve_model(stellar.model_time+dt)
print "at T=", stellar.model_time.in_(units.Myr), \
"L(t=0)=", initial_luminosity, \
", L (t=", stellar.particles.age.in_(units.Myr), \
")=", stellar.particles.luminosity.in_(units.LSun), \
", m=", stellar.particles.mass.in_(units.MSun), \
", R=", stellar.particles.radius.in_(units.RSun)
stellar.stop()
###BOOKLISTSTOP###
###BOOKLISTSTART2###
def new_option_parser():
from amuse.units.optparse import OptionParser
result = OptionParser()
result.add_option("-m", unit=units.MSun,
dest="m", type="float", default=1.0|units.MSun,
help="stellar mass [%default]")
result.add_option("-t", unit=units.Myr,
dest="model_time", type="float",
default=4700.0|units.Myr,
help="end time of the simulation [%default]")
result.add_option("-z", dest="z", type="float",
default=0.02, help="metallicity [%default]")
return result
if __name__ in ('__main__', '__plot__'):
o, arguments = new_option_parser().parse_args()
main(**o.__dict__)
###BOOKLISTSTOP2###
| [
"[email protected]"
]
| |
52fc3c13de1c7ab9d2f4226fda0f15568c18f21f | b7d01719eb6d5c504747674227df3410b22f75fa | /scripts/ibm/get_activation.py | aa337733f12853b6f6b01afe8d12ac631e6c42f4 | []
| no_license | lee212/FaaS-Evaluation | c16c0d5b16ae8ac5d45d6cf6638ba60e712094b1 | 38876242553d0bf36e262e91da244fd1bdd55121 | refs/heads/master | 2021-09-23T02:36:41.500656 | 2018-09-20T01:41:18 | 2018-09-20T01:41:18 | 103,572,487 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,573 | py | import sys
import json
import requests
import os
import argparse
url = 'https://openwhisk.ng.bluemix.net/api/v1/namespaces/_/activations/'
auth_string = os.environ['IBM_OPENWHISK_AUTH_STRING']
def argument_parser():
parser = argparse.ArgumentParser("IBM OpenWhisk Activation Results")
parser.add_argument("fname", help="filename to obtain activation ids")
args = parser.parse_args()
return args
def collect_activation_ids(fname):
with open(fname) as f:
r = json.load(f)
try:
actlist = r.keys()
except:
actlist = []
for i in r:
tmp = i['result'].keys()
actlist = actlist + list(tmp)
return actlist
def read_activation_through_rest(actlist):
actdict = {}
for i in actlist:
# curl -H 'Authorization: Basic
# NTNjNmY5ZDctM2JhYy00YjQ1LWI3N2ItNGVhMDMzYzg5YmUwOmNjTWxnaW5GU1VtZENuNGI0aWwxb0RaMVI2RlRNdm9QNUdtaUdlc3A3d25ucDR4QjdKQjZzUVpFQzBkTlZjclI='
# -L
# 'https://openwhisk.ng.bluemix.net/api/v1/namespaces/_/activations/2cd2e85819ba4a9592e85819ba5a957e'
headers = {'Authorization': auth_string}
rdata = requests.get(url + i, headers=headers)
rdict = rdata.json()
actdict[i] = rdict
return actdict
def to_file(fname, data):
with open(fname, "w") as f:
json.dump(data, f, indent=4)
if __name__ == "__main__":
args = argument_parser()
actids = collect_activation_ids(args.fname)
actdict = read_activation_through_rest(actids)
to_file("{}.activation".format(args.fname), actdict)
| [
"[email protected]"
]
| |
3da489896f1d4b8481f999e4ab87ffaca902aa69 | dfc2c18053b8e7576f88e7b2524d7ca3a8f47282 | /ch09/session4/21.py | 22f613217b0cbb380b591a0d5e7f7acd6804e7a8 | []
| no_license | Xoozi/tchomework | a6eed3bbf697ff12af8d42249ec58a139aed0c4c | 627c98b0b652ef20fd93025a17341bba76fbfce6 | refs/heads/master | 2021-01-23T21:18:15.793703 | 2018-10-21T11:05:55 | 2018-10-21T11:05:55 | 57,583,655 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 448 | py |
#燃烧的箭
#例3中的火炬台直径12英尺. 用方程5和例3c, 求燃烧的箭通过边沿之间的水平距离需要多长时间
#箭在边沿上方时有多高
#由例3c v0 cos α = 90*32/sqrt(68*64)
#半程时间为t/2
#期间竖直方向位移为-g(t/2)²
v0h = 90.0*32.0/sqrt(68.0*64)
t = 12.0/v0h
g = 32.15
h = 74-g*(t/2.0)**2
print 't:%f, h:%f' % (t, h)
#xoozi 答案是通过时间1.9秒, 这绝对是错了, 我的答案合理
| [
"[email protected]"
]
| |
8809f892e839ecb6296a61b91988e02177d6fd1e | ef90992dc00640f42ec615075a9b030b771f81e4 | /python-machine-learning/ch04/ch04-6/mushroom-download.py | cc4bf516b5258e4e5b4c8be4642807012331b96c | []
| no_license | korea7030/pythonwork | 88f5e67b33e9143eb40f6c10311a29e08317b77e | 70741acb0477c9348ad3f1ea07a183dda82a5402 | refs/heads/master | 2023-01-08T01:47:15.141471 | 2020-09-09T13:28:20 | 2020-09-09T13:28:20 | 54,378,053 | 0 | 0 | null | 2022-12-26T20:25:43 | 2016-03-21T10:00:07 | Jupyter Notebook | UTF-8 | Python | false | false | 523 | py | # -*- coding: utf-8 -*-
import urllib.request as req
local = "mushroom.csv"
url = "https://archive.ics.uci.edu/ml/machine-learning-databases/mushroom/agaricus-lepiota.data"
req.urlretrieve(url, local)
print("ok")
"""
첫번째 열 : 독의 유무(독: p / 식용 : e)
두번째 열 : 버섯머리모양(벨 : b / 혹 : k / 오목 : s/ 평평한 : f)
네번째 열 : 머리색(갈색 : n / 황갈색 : b / 연한 갈색 : c/ 회색 : g/ 녹색 : r/분홍색 : p/보라색 : u/ 붉은색 : c / 흰색 : w/노란색 : y)
"""
| [
"[email protected]"
]
| |
f1c1c9c0f8af90cec5c9def77c68d395a9021266 | 3e54ca2ad9146c1eaeee8a2497483187d660289c | /Python/dict05.py | 6b74a88786ac65d9efdba980ff5b74049c7e199a | []
| no_license | ParvathyGS/My-learnings | 438e096dc45d73ac5808a7b77316317dd90942ec | 653dac3dc3b46803ab89f07b7c14435f15b3164f | refs/heads/master | 2023-01-13T02:53:58.223103 | 2020-08-09T08:09:08 | 2020-08-09T08:09:08 | 227,995,341 | 0 | 1 | null | 2023-01-07T20:48:22 | 2019-12-14T09:09:53 | HTML | UTF-8 | Python | false | false | 82 | py | x = ('key1', 'key2', 'key3')
#y = 0
thisdict = dict.fromkeys(x)
print(thisdict) | [
"[email protected]"
]
| |
17fec2fc6f5b0c5fd1cd55adb95430ecf92baa2a | 9f6ca792f8ef4fac137ddab6b5af3ae4629759d8 | /realize_bj_2941.py | 18991a5526a58b5a875b70f7f4f0e667d86f1cdf | []
| no_license | pjhq2/Baekjoon | d01374eca0fc0b4d68d2209fc57d83db3349c89c | 921822f0cdaca1456f167d271c9efe84ddee2bd4 | refs/heads/main | 2023-08-11T11:48:30.364066 | 2021-09-22T12:43:07 | 2021-09-22T12:43:07 | 386,900,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | word = input()
cro = ['c=', 'c-', 'dz=', 'd-', 'lj', 'nj', 's=', 'z=']
result = len(word)
for c in cro:
result -= word.count(c)
print(result) | [
"[email protected]"
]
| |
c9aa4f532ea1ce1fd202c31db44912e2f536d887 | b3e147ac438246d60644725fa93c16c9bae7fa7e | /Django Social Authentication/msg/communities/migrations/0001_initial.py | deadef4e706daa8a0473ca511f741db8917598ed | []
| no_license | Ehsan-Molavi/teamtreehouse | c55180b3d8eac8c18c03f335056fae1088c769e4 | cbe90b2eff0708e4c95a6909d7edec494ddd9615 | refs/heads/master | 2020-09-09T05:54:49.828487 | 2017-04-25T16:09:19 | 2017-04-25T16:09:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,824 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-08-18 21:44
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Community',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('slug', models.SlugField(allow_unicode=True, unique=True)),
('description', models.TextField(blank=True, default='')),
],
options={
'verbose_name_plural': 'communities',
'ordering': ['name'],
},
),
migrations.CreateModel(
name='CommunityMember',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role', models.IntegerField(choices=[(0, 'banned'), (1, 'member'), (2, 'moderator'), (3, 'admin')], default=1)),
('community', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='communities.Community')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='communities', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='communitymember',
unique_together=set([('community', 'user')]),
),
]
| [
"[email protected]"
]
| |
202f31bd6101c6c46f9d68cdc9939ee4c7eeb028 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_108/ch159_2020_04_29_20_03_29_896486.py | 802b623ad51afbeffc4e2f040a58867cd5216d33 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | import json
with open("estoque.json","r") as arquivo:
print([prod for prod in json.loads(arquivo.read())]) | [
"[email protected]"
]
| |
ad16a47079c9d48d939186dd69f791eb8776e562 | eba3e4a3935d6422d1ed85aaf69337f5ba15fc74 | /tg-build/WebApp/bbwi/bbwi/commands.py | bc1be16efe88e2d957500026b277c85035637373 | []
| no_license | arianepaola/tg2jython | 2ae74250ca43b021323ef0951a9763712c2eb3d6 | 971b9c3eb8ca941d1797bb4b458f275bdca5a2cb | refs/heads/master | 2021-01-21T12:07:48.815690 | 2009-03-27T02:38:11 | 2009-03-27T02:38:11 | 160,242 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,737 | py | # -*- coding: utf-8 -*-
"""This module contains functions called from console script entry points."""
import sys
from os import getcwd
from os.path import dirname, exists, join
import pkg_resources
pkg_resources.require("TurboGears>=1.0.5")
pkg_resources.require("SQLObject>=0.7.1")
import cherrypy
import turbogears
cherrypy.lowercase_api = True
class ConfigurationError(Exception):
pass
def start():
"""Start the CherryPy application server."""
setupdir = dirname(dirname(__file__))
curdir = getcwd()
# First look on the command line for a desired config file,
# if it's not on the command line, then look for 'setup.py'
# in the current directory. If there, load configuration
# from a file called 'dev.cfg'. If it's not there, the project
# is probably installed and we'll look first for a file called
# 'prod.cfg' in the current directory and then for a default
# config file called 'default.cfg' packaged in the egg.
if len(sys.argv) > 1:
configfile = sys.argv[1]
elif exists(join(setupdir, "setup.py")):
configfile = join(setupdir, "dev.cfg")
elif exists(join(curdir, "prod.cfg")):
configfile = join(curdir, "prod.cfg")
else:
try:
configfile = pkg_resources.resource_filename(
pkg_resources.Requirement.parse("bbwi"),
"config/default.cfg")
except pkg_resources.DistributionNotFound:
raise ConfigurationError("Could not find default configuration.")
turbogears.update_config(configfile=configfile,
modulename="bbwi.config")
from bbwi.controllers import Root
turbogears.start_server(Root())
| [
"[email protected]"
]
| |
7026e3a545655b602456d212833fb1d8827ea6eb | df7f13ec34591fe1ce2d9aeebd5fd183e012711a | /hata/discord/message/message/tests/test__validate_reactions.py | 9cefa7864a1e54b584a1deb4686712a2d416e78a | [
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | HuyaneMatsu/hata | 63e2f6a2d7a7539fd8f18498852d9d3fe5c41d2e | 53f24fdb38459dc5a4fd04f11bdbfee8295b76a4 | refs/heads/master | 2023-08-20T15:58:09.343044 | 2023-08-20T13:09:03 | 2023-08-20T13:09:03 | 163,677,173 | 3 | 3 | Apache-2.0 | 2019-12-18T03:46:12 | 2018-12-31T14:59:47 | Python | UTF-8 | Python | false | false | 1,431 | py | import vampytest
from ....core import BUILTIN_EMOJIS
from ....emoji import ReactionMapping
from ....user import User
from ..fields import validate_reactions
def test__validate_reactions__0():
"""
Tests whether ``validate_reactions`` works as intended.
Case: passing.
"""
reactions = ReactionMapping()
for input_value, expected_output in (
(None, None),
(reactions, reactions),
):
output = validate_reactions(input_value)
vampytest.assert_is(output, expected_output)
def test__validate_reactions__1():
"""
Tests whether ``validate_reactions`` works as intended.
Case: `TypeError`.
"""
for input_value in (
12.6,
):
with vampytest.assert_raises(TypeError):
validate_reactions(input_value)
def test__validate_reactions__2():
"""
Tests whether ``validate_reactions`` works as intended.
Case: Successful conversion.
"""
emoji_1 = BUILTIN_EMOJIS['heart']
emoji_2 = BUILTIN_EMOJIS['x']
user_id_0 = 202305010021
user_id_1 = 202305010022
user_0 = User.precreate(user_id_0)
user_1 = User.precreate(user_id_1)
input_value = {
emoji_1: [user_0, user_1],
emoji_2: [user_1]
}
expected_output = ReactionMapping(input_value)
output = validate_reactions(input_value)
vampytest.assert_eq(output, expected_output)
| [
"[email protected]"
]
| |
eca9d2184d08829e57c6e53dc200435a62c27dca | 077c91b9d5cb1a6a724da47067483c622ce64be6 | /fuzz_pyretic_mesh_proactive_firewall_no_close_check_loop_mcs_with_max_replays_5/interreplay_131_l_4/replay_config.py | 81dd578b4a0ae58eed76c481e6a8fe40e4d236b9 | []
| no_license | Spencerx/experiments | 0edd16398725f6fd9365ddbb1b773942e4878369 | aaa98b0f67b0d0c0c826b8a1565916bf97ae3179 | refs/heads/master | 2020-04-03T10:11:40.671606 | 2014-06-11T23:55:11 | 2014-06-11T23:55:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,226 | py |
from config.experiment_config_lib import ControllerConfig
from sts.topology import *
from sts.control_flow import Replayer
from sts.simulation_state import SimulationConfig
from sts.input_traces.input_logger import InputLogger
simulation_config = SimulationConfig(controller_configs=[ControllerConfig(start_cmd='./pyretic.py -m p0 pyretic.examples.firewall_for_sts_no_close', label='c1', address='127.0.0.1', cwd='../pyretic', kill_cmd='ps aux | grep -e pox -e pyretic | grep -v simulator | cut -c 9-15 | xargs kill -9')],
topology_class=MeshTopology,
topology_params="num_switches=3",
patch_panel_class=BufferedPatchPanel,
multiplex_sockets=False,
kill_controllers_on_exit=True)
control_flow = Replayer(simulation_config, "experiments/fuzz_pyretic_mesh_proactive_firewall_no_close_check_loop_mcs/interreplay_131_l_4/events.trace",
input_logger=InputLogger(),
wait_on_deterministic_values=False,
allow_unexpected_messages=False,
delay_flow_mods=False,
pass_through_whitelisted_messages=True)
# Invariant check: 'None'
| [
"[email protected]"
]
| |
681d130884fa46cdcf49b775e65fe554d56cf5c8 | b5d6219ac738ed05485439540f38d63d21694c51 | /DAT/ED6_DT01/T2510.主楼 社会系教室.py | 4942a79778d13311086f2c26ebe8eb5ad498883d | []
| no_license | otoboku/ED6-FC-Steam-CN | f87ffb2ff19f9272b986fa32a91bec360c21dffa | c40d9bc5aaea9446dda27e7b94470d91cb5558c5 | refs/heads/master | 2021-01-21T02:37:30.443986 | 2015-11-27T07:41:41 | 2015-11-27T07:41:41 | 46,975,651 | 1 | 0 | null | 2015-11-27T10:58:43 | 2015-11-27T10:58:42 | null | UTF-8 | Python | false | false | 110,639 | py | from ED6ScenarioHelper import *
def main():
# 主楼 社会系教室
CreateScenaFile(
FileName = 'T2510 ._SN',
MapName = 'Ruan',
Location = 'T2510.x',
MapIndex = 1,
MapDefaultBGM = "ed60014",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'科林兹校长', # 9
'乔儿', # 10
'汉斯', # 11
'男学生', # 12
'男学生', # 13
'男学生', # 14
'女学生', # 15
'女教师', # 16
'珐奥娜', # 17
'拉迪奥老师', # 18
'碧欧拉老师', # 19
'米丽亚老师', # 20
'艾福托老师', # 21
'罗迪', # 22
'坎诺', # 23
'雅莉丝', # 24
'黛拉', # 25
'帕布尔', # 26
'罗基克', # 27
'罗伊斯', # 28
'莫妮卡', # 29
'塞尔玛', # 30
'基诺奇奥', # 31
'妮吉塔', # 32
'梅贝尔市长', # 33
'戴尔蒙市长', # 34
'CH22000', # 35
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 6000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT07/CH02600 ._CH', # 00
'ED6_DT07/CH02393 ._CH', # 01
'ED6_DT07/CH02553 ._CH', # 02
'ED6_DT07/CH01360 ._CH', # 03
'ED6_DT07/CH01370 ._CH', # 04
'ED6_DT07/CH01430 ._CH', # 05
'ED6_DT07/CH02490 ._CH', # 06
'ED6_DT07/CH01660 ._CH', # 07
'ED6_DT07/CH01210 ._CH', # 08
'ED6_DT07/CH01430 ._CH', # 09
'ED6_DT07/CH01460 ._CH', # 0A
'ED6_DT07/CH01360 ._CH', # 0B
'ED6_DT07/CH01580 ._CH', # 0C
'ED6_DT07/CH01590 ._CH', # 0D
'ED6_DT07/CH01370 ._CH', # 0E
'ED6_DT07/CH01090 ._CH', # 0F
'ED6_DT07/CH01080 ._CH', # 10
'ED6_DT07/CH01580 ._CH', # 11
'ED6_DT07/CH02360 ._CH', # 12
'ED6_DT07/CH00003 ._CH', # 13
'ED6_DT07/CH00013 ._CH', # 14
'ED6_DT07/CH00043 ._CH', # 15
'ED6_DT07/CH01363 ._CH', # 16
'ED6_DT07/CH01083 ._CH', # 17
'ED6_DT07/CH01583 ._CH', # 18
'ED6_DT07/CH01373 ._CH', # 19
'ED6_DT07/CH01663 ._CH', # 1A
'ED6_DT07/CH01213 ._CH', # 1B
'ED6_DT07/CH01433 ._CH', # 1C
'ED6_DT07/CH01463 ._CH', # 1D
'ED6_DT07/CH01593 ._CH', # 1E
'ED6_DT07/CH01093 ._CH', # 1F
'ED6_DT07/CH02603 ._CH', # 20
'ED6_DT06/CH20021 ._CH', # 21
)
AddCharChipPat(
'ED6_DT07/CH02600P._CP', # 00
'ED6_DT07/CH02393P._CP', # 01
'ED6_DT07/CH02553P._CP', # 02
'ED6_DT07/CH01360P._CP', # 03
'ED6_DT07/CH01370P._CP', # 04
'ED6_DT07/CH01210P._CP', # 05
'ED6_DT07/CH02490P._CP', # 06
'ED6_DT07/CH01660P._CP', # 07
'ED6_DT07/CH01210P._CP', # 08
'ED6_DT07/CH01430P._CP', # 09
'ED6_DT07/CH01460P._CP', # 0A
'ED6_DT07/CH01360P._CP', # 0B
'ED6_DT07/CH01580P._CP', # 0C
'ED6_DT07/CH01590P._CP', # 0D
'ED6_DT07/CH01370P._CP', # 0E
'ED6_DT07/CH01090P._CP', # 0F
'ED6_DT07/CH01080P._CP', # 10
'ED6_DT07/CH01580P._CP', # 11
'ED6_DT07/CH02360P._CP', # 12
'ED6_DT07/CH00003P._CP', # 13
'ED6_DT07/CH00013P._CP', # 14
'ED6_DT07/CH00043P._CP', # 15
'ED6_DT07/CH01363P._CP', # 16
'ED6_DT07/CH01083P._CP', # 17
'ED6_DT07/CH01583P._CP', # 18
'ED6_DT07/CH01373P._CP', # 19
'ED6_DT07/CH01663P._CP', # 1A
'ED6_DT07/CH01213P._CP', # 1B
'ED6_DT07/CH01433P._CP', # 1C
'ED6_DT07/CH01463P._CP', # 1D
'ED6_DT07/CH01593P._CP', # 1E
'ED6_DT07/CH01093P._CP', # 1F
'ED6_DT07/CH02603P._CP', # 20
'ED6_DT06/CH20021P._CP', # 21
)
DeclNpc(
X = 116010,
Z = 200,
Y = 4800,
Direction = 180,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x115,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 3,
)
DeclNpc(
X = 30700,
Z = 0,
Y = 55110,
Direction = 270,
Unknown2 = 0,
Unknown3 = 1,
ChipIndex = 0x1,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 29460,
Z = 0,
Y = 53800,
Direction = 0,
Unknown2 = 0,
Unknown3 = 2,
ChipIndex = 0x2,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 29460,
Z = 0,
Y = 53800,
Direction = 0,
Unknown2 = 0,
Unknown3 = 16,
ChipIndex = 0x10,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 29460,
Z = 0,
Y = 53800,
Direction = 0,
Unknown2 = 0,
Unknown3 = 12,
ChipIndex = 0xC,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 29460,
Z = 0,
Y = 53800,
Direction = 0,
Unknown2 = 0,
Unknown3 = 3,
ChipIndex = 0x3,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 29460,
Z = 0,
Y = 53800,
Direction = 0,
Unknown2 = 0,
Unknown3 = 4,
ChipIndex = 0x4,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 29460,
Z = 0,
Y = 53800,
Direction = 0,
Unknown2 = 0,
Unknown3 = 8,
ChipIndex = 0x8,
NpcIndex = 0x181,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 41400,
Z = 0,
Y = 7500,
Direction = 180,
Unknown2 = 0,
Unknown3 = 6,
ChipIndex = 0x6,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 5,
)
DeclNpc(
X = 84450,
Z = 250,
Y = 1030,
Direction = 90,
Unknown2 = 0,
Unknown3 = 7,
ChipIndex = 0x7,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 6,
)
DeclNpc(
X = 87700,
Z = 0,
Y = 2800,
Direction = 270,
Unknown2 = 0,
Unknown3 = 8,
ChipIndex = 0x8,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 7,
)
DeclNpc(
X = 84450,
Z = 250,
Y = 2790,
Direction = 90,
Unknown2 = 0,
Unknown3 = 9,
ChipIndex = 0x9,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 8,
)
DeclNpc(
X = 87540,
Z = 250,
Y = 2770,
Direction = 270,
Unknown2 = 0,
Unknown3 = 29,
ChipIndex = 0x1D,
NpcIndex = 0x155,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = 0,
TalkScenaIndex = 9,
)
DeclNpc(
X = 0,
Z = 0,
Y = 3100,
Direction = 270,
Unknown2 = 0,
Unknown3 = 11,
ChipIndex = 0xB,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 10,
)
DeclNpc(
X = -2800,
Z = 0,
Y = 4000,
Direction = 90,
Unknown2 = 0,
Unknown3 = 12,
ChipIndex = 0xC,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 11,
)
DeclNpc(
X = -700,
Z = 0,
Y = 4000,
Direction = 270,
Unknown2 = 0,
Unknown3 = 13,
ChipIndex = 0xD,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 12,
)
DeclNpc(
X = 3500,
Z = 0,
Y = 2000,
Direction = 90,
Unknown2 = 0,
Unknown3 = 14,
ChipIndex = 0xE,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 13,
)
DeclNpc(
X = -3100,
Z = 0,
Y = 5400,
Direction = 180,
Unknown2 = 0,
Unknown3 = 15,
ChipIndex = 0xF,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 14,
)
DeclNpc(
X = 4490,
Z = 250,
Y = 34880,
Direction = 270,
Unknown2 = 0,
Unknown3 = 16,
ChipIndex = 0x10,
NpcIndex = 0x105,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 15,
)
DeclNpc(
X = 4790,
Z = 250,
Y = -1130,
Direction = 90,
Unknown2 = 0,
Unknown3 = 12,
ChipIndex = 0xC,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 16,
)
DeclNpc(
X = 5400,
Z = 300,
Y = 30500,
Direction = 0,
Unknown2 = 0,
Unknown3 = 14,
ChipIndex = 0xE,
NpcIndex = 0x191,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 17,
)
DeclNpc(
X = 3040,
Z = 0,
Y = 35050,
Direction = 90,
Unknown2 = 0,
Unknown3 = 15,
ChipIndex = 0xF,
NpcIndex = 0x101,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 18,
)
DeclNpc(
X = 85800,
Z = 0,
Y = 30000,
Direction = 270,
Unknown2 = 0,
Unknown3 = 16,
ChipIndex = 0x10,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 19,
)
DeclNpc(
X = 84080,
Z = 0,
Y = 30000,
Direction = 90,
Unknown2 = 0,
Unknown3 = 13,
ChipIndex = 0xD,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 20,
)
DeclNpc(
X = -3900,
Z = 0,
Y = 3100,
Direction = 270,
Unknown2 = 0,
Unknown3 = 18,
ChipIndex = 0x12,
NpcIndex = 0x181,
InitFunctionIndex = 0,
InitScenaIndex = 2,
TalkFunctionIndex = 0,
TalkScenaIndex = 21,
)
DeclNpc(
X = 0,
Z = 0,
Y = 0,
Direction = 0,
Unknown2 = 0,
Unknown3 = 0,
ChipIndex = 0x0,
NpcIndex = 0x185,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclNpc(
X = 85590,
Z = 700,
Y = 3050,
Direction = 0,
Unknown2 = 0,
Unknown3 = 1835041,
ChipIndex = 0x21,
NpcIndex = 0x166,
InitFunctionIndex = -1,
InitScenaIndex = -1,
TalkFunctionIndex = -1,
TalkScenaIndex = -1,
)
DeclEvent(
X = 51000,
Y = 0,
Z = 1400,
Range = 1000,
Unknown_10 = 0x7D0,
Unknown_14 = 0x0,
Unknown_18 = 0x41,
Unknown_1C = 27,
)
DeclEvent(
X = 59000,
Y = 0,
Z = 1400,
Range = 1000,
Unknown_10 = 0x7D0,
Unknown_14 = 0x0,
Unknown_18 = 0x41,
Unknown_1C = 28,
)
DeclEvent(
X = 33000,
Y = 0,
Z = 1400,
Range = 1000,
Unknown_10 = 0x7D0,
Unknown_14 = 0x0,
Unknown_18 = 0x41,
Unknown_1C = 29,
)
DeclEvent(
X = 58990,
Y = 0,
Z = 31330,
Range = 1000,
Unknown_10 = 0x7D0,
Unknown_14 = 0x0,
Unknown_18 = 0x41,
Unknown_1C = 30,
)
DeclEvent(
X = 33000,
Y = 0,
Z = 31400,
Range = 1000,
Unknown_10 = 0x7D0,
Unknown_14 = 0x0,
Unknown_18 = 0x41,
Unknown_1C = 31,
)
DeclActor(
TriggerX = 33000,
TriggerZ = 0,
TriggerY = 2190,
TriggerRange = 800,
ActorX = 33000,
ActorZ = 1000,
ActorY = 2190,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 24,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 33000,
TriggerZ = 0,
TriggerY = 32200,
TriggerRange = 800,
ActorX = 33000,
ActorZ = 1000,
ActorY = 32200,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 24,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 59000,
TriggerZ = 0,
TriggerY = 32000,
TriggerRange = 800,
ActorX = 59000,
ActorZ = 1000,
ActorY = 32000,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 24,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 41200,
TriggerZ = 0,
TriggerY = 5490,
TriggerRange = 400,
ActorX = 41400,
ActorZ = 1500,
ActorY = 7500,
Flags = 0x7E,
TalkScenaIndex = 0,
TalkFunctionIndex = 4,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 51020,
TriggerZ = 0,
TriggerY = 31860,
TriggerRange = 800,
ActorX = 51020,
ActorZ = 1500,
ActorY = 31860,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 25,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 85590,
TriggerZ = 700,
TriggerY = 3400,
TriggerRange = 1000,
ActorX = 85590,
ActorZ = 1000,
ActorY = 3050,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 26,
Unknown_22 = 0,
)
ScpFunction(
"Function_0_692", # 00, 0
"Function_1_ADC", # 01, 1
"Function_2_B61", # 02, 2
"Function_3_CDE", # 03, 3
"Function_4_1520", # 04, 4
"Function_5_1525", # 05, 5
"Function_6_1F7F", # 06, 6
"Function_7_233C", # 07, 7
"Function_8_26EC", # 08, 8
"Function_9_2C99", # 09, 9
"Function_10_312F", # 0A, 10
"Function_11_332C", # 0B, 11
"Function_12_373F", # 0C, 12
"Function_13_39F7", # 0D, 13
"Function_14_3BA2", # 0E, 14
"Function_15_3C35", # 0F, 15
"Function_16_44FF", # 10, 16
"Function_17_45A6", # 11, 17
"Function_18_45F2", # 12, 18
"Function_19_479B", # 13, 19
"Function_20_48B5", # 14, 20
"Function_21_4B17", # 15, 21
"Function_22_4E76", # 16, 22
"Function_23_5672", # 17, 23
"Function_24_5A5E", # 18, 24
"Function_25_5B54", # 19, 25
"Function_26_5BB8", # 1A, 26
"Function_27_5C20", # 1B, 27
"Function_28_5C24", # 1C, 28
"Function_29_5C28", # 1D, 29
"Function_30_5C2C", # 1E, 30
"Function_31_5C30", # 1F, 31
)
def Function_0_692(): pass
label("Function_0_692")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0xA0, 0)), scpexpr(EXPR_END)), "loc_7A9")
SetChrPos(0x11, 5320, 250, 2110, 270)
SetChrPos(0x16, -3060, 0, 3170, 45)
SetChrPos(0x15, 560, 100, 240, 90)
SetChrChipByIndex(0x15, 22)
SetChrFlags(0x15, 0x4)
SetChrFlags(0x15, 0x10)
OP_44(0x15, 0xFF)
SetChrPos(0x12, 5300, 250, 32080, 180)
SetChrFlags(0x12, 0x10)
SetChrPos(0x1B, -1100, 0, 32240, 270)
SetChrChipByIndex(0x1D, 31)
SetChrPos(0x1D, -2660, 100, 32180, 90)
SetChrFlags(0x1D, 0x4)
SetChrFlags(0x1D, 0x10)
OP_44(0x1D, 0xFF)
SetChrChipByIndex(0x1A, 23)
SetChrPos(0x1A, -5950, 100, 34220, 90)
SetChrFlags(0x1A, 0x4)
SetChrFlags(0x1A, 0x10)
OP_44(0x1A, 0xFF)
ClearChrFlags(0x1E, 0x80)
SetChrPos(0x1E, 86430, 0, 31990, 90)
ClearChrFlags(0x1F, 0x80)
SetChrPos(0x1F, 95400, 250, 31050, 90)
ClearChrFlags(0x1C, 0x80)
SetChrChipByIndex(0x13, 28)
SetChrFlags(0x13, 0x4)
SetChrFlags(0x13, 0x10)
OP_44(0x13, 0xFF)
SetChrFlags(0x14, 0x80)
SetChrFlags(0x17, 0x80)
SetChrFlags(0x19, 0x80)
SetChrChipByIndex(0x8, 32)
Jump("loc_A78")
label("loc_7A9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 4)), scpexpr(EXPR_END)), "loc_7F4")
SetChrFlags(0x11, 0x80)
SetChrFlags(0x12, 0x80)
SetChrFlags(0x13, 0x80)
SetChrFlags(0x14, 0x80)
SetChrFlags(0x15, 0x80)
SetChrFlags(0x16, 0x80)
SetChrFlags(0x17, 0x80)
SetChrFlags(0x18, 0x80)
SetChrFlags(0x19, 0x80)
SetChrFlags(0x1A, 0x80)
SetChrFlags(0x1D, 0x80)
SetChrFlags(0x1B, 0x80)
SetChrChipByIndex(0x8, 32)
Jump("loc_A78")
label("loc_7F4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_886")
SetChrPos(0x11, 1710, 0, 4970, 180)
SetChrPos(0x12, -6910, 0, 33220, 90)
SetChrPos(0x13, 95370, 250, 34220, 225)
SetChrPos(0x8, 42950, 0, 1120, 270)
SetChrPos(0x16, -7060, 0, 1680, 90)
SetChrPos(0x17, 920, 0, -1500, 0)
SetChrPos(0x18, -1590, 0, 34700, 0)
SetChrPos(0x1A, 1300, 0, 28510, 90)
Jump("loc_A78")
label("loc_886")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_91B")
SetChrPos(0x11, 1710, 0, 4970, 180)
SetChrPos(0x12, -6910, 0, 33220, 90)
SetChrPos(0x13, 95370, 250, 34220, 225)
SetChrPos(0x8, 43470, 0, 5280, 225)
SetChrFlags(0x8, 0x10)
SetChrFlags(0x15, 0x80)
SetChrFlags(0x18, 0x80)
SetChrPos(0x16, -7060, 0, 1680, 90)
SetChrPos(0x17, 920, 0, -1500, 0)
SetChrPos(0x1A, 1300, 0, 28510, 90)
ClearChrFlags(0x20, 0x80)
Jump("loc_A78")
label("loc_91B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 0)), scpexpr(EXPR_END)), "loc_976")
SetChrFlags(0x12, 0x80)
SetChrFlags(0x14, 0x80)
SetChrFlags(0x15, 0x80)
SetChrFlags(0x18, 0x80)
SetChrFlags(0x19, 0x80)
SetChrPos(0x16, -5200, 0, 2050, 0)
SetChrPos(0x17, 4500, 250, 4019, 270)
SetChrPos(0x1A, 790, 0, 34680, 0)
SetChrChipByIndex(0x8, 32)
Jump("loc_A78")
label("loc_976")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_A27")
SetChrPos(0x12, 5300, 250, 32080, 90)
SetChrFlags(0x14, 0x80)
SetChrFlags(0x15, 0x80)
SetChrFlags(0x18, 0x80)
SetChrFlags(0x19, 0x80)
ClearChrFlags(0x1E, 0x80)
ClearChrFlags(0x1F, 0x80)
SetChrChipByIndex(0x13, 28)
SetChrFlags(0x13, 0x4)
SetChrFlags(0x13, 0x10)
OP_44(0x13, 0xFF)
SetChrChipByIndex(0x11, 26)
SetChrFlags(0x11, 0x4)
SetChrFlags(0x11, 0x10)
OP_44(0x11, 0xFF)
SetChrChipByIndex(0x16, 24)
SetChrPos(0x16, -2650, 100, 4200, 90)
SetChrFlags(0x16, 0x4)
SetChrFlags(0x16, 0x10)
OP_44(0x16, 0xFF)
SetChrChipByIndex(0x1F, 30)
SetChrPos(0x1F, 84120, 100, 30200, 90)
SetChrFlags(0x1F, 0x4)
SetChrFlags(0x1F, 0x10)
OP_44(0x1F, 0xFF)
SetChrFlags(0x1B, 0x80)
SetChrChipByIndex(0x8, 32)
Jump("loc_A78")
label("loc_A27")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 6)), scpexpr(EXPR_END)), "loc_A78")
SetChrFlags(0x11, 0x80)
SetChrFlags(0x12, 0x80)
SetChrFlags(0x13, 0x80)
SetChrFlags(0x15, 0x80)
SetChrFlags(0x16, 0x80)
SetChrFlags(0x17, 0x80)
SetChrFlags(0x18, 0x80)
SetChrFlags(0x19, 0x80)
SetChrFlags(0x1A, 0x80)
SetChrFlags(0x1B, 0x80)
SetChrFlags(0x1D, 0x80)
OP_44(0x8, 0xFF)
SetChrFlags(0x8, 0x4)
SetChrFlags(0x8, 0x10)
SetChrChipByIndex(0x8, 32)
label("loc_A78")
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x0), scpexpr(EXPR_END)),
(114, "loc_A84"),
(SWITCH_DEFAULT, "loc_AB5"),
)
label("loc_A84")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 6)), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_AB2")
OP_A2(0x42F)
OP_71(0x1, 0x10)
OP_71(0x2, 0x10)
OP_71(0x3, 0x10)
OP_64(0x0, 0x1)
OP_64(0x1, 0x1)
OP_64(0x2, 0x1)
Event(0, 22)
label("loc_AB2")
Jump("loc_AB5")
label("loc_AB5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x7F, 2)), scpexpr(EXPR_END)), "loc_ADB")
OP_A3(0x3FA)
SetChrFlags(0x1A, 0x80)
SetChrFlags(0x1B, 0x80)
SetChrFlags(0x1D, 0x80)
Event(0, 23)
OP_B1("t2510_n")
label("loc_ADB")
Return()
# Function_0_692 end
def Function_1_ADC(): pass
label("Function_1_ADC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 4)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 0)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_OR), scpexpr(EXPR_END)), "loc_AFD")
OP_B1("t2510_y")
Jump("loc_B06")
label("loc_AFD")
OP_B1("t2510_n")
label("loc_B06")
OP_64(0x0, 0x1)
OP_64(0x1, 0x1)
OP_64(0x2, 0x1)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 5)), scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_NEQUZ_I64), scpexpr(EXPR_END)), "loc_B39")
OP_72(0x1, 0x10)
OP_72(0x2, 0x10)
OP_72(0x3, 0x10)
OP_65(0x0, 0x1)
OP_65(0x1, 0x1)
OP_65(0x2, 0x1)
label("loc_B39")
OP_64(0x5, 0x1)
Jc((scpexpr(EXPR_EXEC_OP, "OP_29(0x27, 0x1, 0x20)"), scpexpr(EXPR_END)), "loc_B4C")
OP_65(0x5, 0x1)
label("loc_B4C")
Jc((scpexpr(EXPR_EXEC_OP, "OP_29(0x27, 0x1, 0x80)"), scpexpr(EXPR_END)), "loc_B60")
OP_64(0x5, 0x1)
SetChrFlags(0x22, 0x80)
label("loc_B60")
Return()
# Function_1_ADC end
def Function_2_B61(): pass
label("Function_2_B61")
RunExpression(0x0, (scpexpr(EXPR_RAND), scpexpr(EXPR_PUSH_LONG, 0xE), scpexpr(EXPR_IMOD), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x0), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_B86")
OP_99(0xFE, 0x0, 0x7, 0x672)
Jump("loc_CC8")
label("loc_B86")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_B9F")
OP_99(0xFE, 0x1, 0x7, 0x640)
Jump("loc_CC8")
label("loc_B9F")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x2), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_BB8")
OP_99(0xFE, 0x2, 0x7, 0x60E)
Jump("loc_CC8")
label("loc_BB8")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x3), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_BD1")
OP_99(0xFE, 0x3, 0x7, 0x5DC)
Jump("loc_CC8")
label("loc_BD1")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_BEA")
OP_99(0xFE, 0x4, 0x7, 0x5AA)
Jump("loc_CC8")
label("loc_BEA")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x5), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_C03")
OP_99(0xFE, 0x5, 0x7, 0x578)
Jump("loc_CC8")
label("loc_C03")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x6), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_C1C")
OP_99(0xFE, 0x6, 0x7, 0x546)
Jump("loc_CC8")
label("loc_C1C")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x7), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_C35")
OP_99(0xFE, 0x0, 0x7, 0x677)
Jump("loc_CC8")
label("loc_C35")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x8), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_C4E")
OP_99(0xFE, 0x1, 0x7, 0x645)
Jump("loc_CC8")
label("loc_C4E")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0x9), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_C67")
OP_99(0xFE, 0x2, 0x7, 0x613)
Jump("loc_CC8")
label("loc_C67")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0xA), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_C80")
OP_99(0xFE, 0x3, 0x7, 0x5E1)
Jump("loc_CC8")
label("loc_C80")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0xB), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_C99")
OP_99(0xFE, 0x4, 0x7, 0x5AF)
Jump("loc_CC8")
label("loc_C99")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0xC), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_CB2")
OP_99(0xFE, 0x5, 0x7, 0x57D)
Jump("loc_CC8")
label("loc_CB2")
Jc((scpexpr(EXPR_GET_RESULT, 0x0), scpexpr(EXPR_PUSH_LONG, 0xD), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_CC8")
OP_99(0xFE, 0x6, 0x7, 0x54B)
label("loc_CC8")
Jc((scpexpr(EXPR_PUSH_LONG, 0x1), scpexpr(EXPR_END)), "loc_CDD")
OP_99(0xFE, 0x0, 0x7, 0x5DC)
Jump("loc_CC8")
label("loc_CDD")
Return()
# Function_2_B61 end
def Function_3_CDE(): pass
label("Function_3_CDE")
TalkBegin(0x8)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_EF3")
ClearChrFlags(0xFE, 0x10)
TurnDirection(0xFE, 0x0, 0)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x87), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_D0A")
SetChrSubChip(0xFE, 1)
Jump("loc_D25")
label("loc_D0A")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0xE1), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_D20")
SetChrSubChip(0xFE, 0)
Jump("loc_D25")
label("loc_D20")
SetChrSubChip(0xFE, 2)
label("loc_D25")
OP_8C(0xFE, 180, 0)
SetChrFlags(0xFE, 0x10)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_E52")
OP_A2(0x0)
ChrTalk(
0xFE,
(
"#782F哦哦,是艾丝蒂尔和约修亚啊。\x02\x03",
"我和戴尔蒙市长交往多年了,\x01",
"自己也对这次事件深感震惊。\x01",
" \x02\x03",
"#783F他的行为的确难以原谅,\x01",
"而且也没有人会原谅他……\x02\x03",
"我祈祷他能对自己\x01",
"误入歧途犯下的罪行感到忏悔。\x02",
)
)
CloseMessageWindow()
Jump("loc_EEB")
label("loc_E52")
ChrTalk(
0xFE,
(
"#783F他的行为的确难以原谅,\x01",
"而且也没有人会原谅他……\x02\x03",
"我祈祷他能对自己\x01",
"误入歧途犯下的罪行感到忏悔。\x02",
)
)
CloseMessageWindow()
label("loc_EEB")
SetChrSubChip(0xFE, 0)
Jump("loc_151C")
label("loc_EF3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 4)), scpexpr(EXPR_END)), "loc_10FD")
ClearChrFlags(0xFE, 0x10)
TurnDirection(0xFE, 0x0, 0)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x87), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_F1C")
SetChrSubChip(0xFE, 1)
Jump("loc_F37")
label("loc_F1C")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0xE1), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_F32")
SetChrSubChip(0xFE, 0)
Jump("loc_F37")
label("loc_F32")
SetChrSubChip(0xFE, 2)
label("loc_F37")
OP_8C(0xFE, 180, 0)
SetChrFlags(0xFE, 0x10)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1062")
OP_A2(0x0)
ChrTalk(
0xFE,
(
"#780F艾丝蒂尔、约修亚。\x01",
"这次实在是麻烦你们了。\x02\x03",
"舞台剧我看了哦,\x01",
"真的是十分精彩。\x02\x03",
"特别是约修亚饰演的塞茜莉亚公主,\x01",
"演技和扮相实在是太感人了。\x02\x03",
"下次有机会的话\x01",
"请务必再到我们学院来玩。\x02",
)
)
CloseMessageWindow()
Jump("loc_10F5")
label("loc_1062")
ChrTalk(
0xFE,
(
"#780F话说回来,\x01",
"能帮上特蕾莎老师实在是太好了。\x02\x03",
"那次纵火事件实在让孩子们受苦了啊。\x01",
" \x02",
)
)
CloseMessageWindow()
label("loc_10F5")
SetChrSubChip(0xFE, 0)
Jump("loc_151C")
label("loc_10FD")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_1211")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_11AC")
OP_A2(0x0)
ChrTalk(
0xFE,
(
"#780F哦,是你们。\x01",
"这次真是史无前例的盛况啊。\x02\x03",
"大家都很期待舞台剧,\x01",
"请务必让学园祭圆满成功。\x02",
)
)
CloseMessageWindow()
Jump("loc_120E")
label("loc_11AC")
ChrTalk(
0xFE,
(
"#780F大家都很期待舞台剧。\x01",
"请务必让学园祭圆满成功。\x02",
)
)
CloseMessageWindow()
label("loc_120E")
Jump("loc_151C")
label("loc_1211")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_13D4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_131A")
OP_A2(0x0)
ChrTalk(
0xFE,
(
"#780F戴尔蒙市长,\x01",
"自从去年的王国会议之后我们也好久不见了。\x02\x03",
"这段时间,你身体怎么样?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x21,
(
"#660F就像你看到的,结实着呢。\x01",
"科林兹校长也很精神嘛。\x02\x03",
"今天我打算好好放松一下。\x01",
" \x02",
)
)
CloseMessageWindow()
ClearChrFlags(0x8, 0x10)
Jump("loc_13D1")
label("loc_131A")
ChrTalk(
0xFE,
(
"#780F我还要找时间和市长先生谈谈\x01",
"关于学院运营的事情呢。\x01",
" \x02\x03",
"虽说是王立的教育机构,\x01",
"但也要重视地方上的建议。\x02",
)
)
CloseMessageWindow()
label("loc_13D1")
Jump("loc_151C")
label("loc_13D4")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_151C")
ClearChrFlags(0xFE, 0x10)
TurnDirection(0xFE, 0x0, 0)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x87), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_13FD")
SetChrSubChip(0xFE, 1)
Jump("loc_1418")
label("loc_13FD")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0xE1), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_1413")
SetChrSubChip(0xFE, 0)
Jump("loc_1418")
label("loc_1413")
SetChrSubChip(0xFE, 2)
label("loc_1418")
OP_8C(0xFE, 180, 0)
SetChrFlags(0xFE, 0x10)
ChrTalk(
0xFE,
(
"#780F你们住宿的地方我们已经给安排好了。\x01",
" \x02\x03",
"学院里也有食堂,\x01",
"你们就安心准备好舞台剧吧。\x02",
)
)
CloseMessageWindow()
SetChrSubChip(0xFE, 0)
label("loc_151C")
TalkEnd(0x8)
Return()
# Function_3_CDE end
def Function_4_1520(): pass
label("Function_4_1520")
Call(0, 5)
Return()
# Function_4_1520 end
def Function_5_1525(): pass
label("Function_5_1525")
TalkBegin(0x10)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_1602")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_15B2")
OP_A2(0x1)
ChrTalk(
0x10,
(
"啊,怎么了?\x01",
"你们要找人吗。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"现在正好是\x01",
"上课结束的时间。\x01",
"我想大家都在校园里。\x02",
)
)
CloseMessageWindow()
Jump("loc_15FF")
label("loc_15B2")
ChrTalk(
0x10,
(
"现在正好是\x01",
"上课结束的时间。\x01",
"我想大家都在校园里。\x02",
)
)
CloseMessageWindow()
label("loc_15FF")
Jump("loc_1F7B")
label("loc_1602")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 4)), scpexpr(EXPR_END)), "loc_1692")
ChrTalk(
0x10,
"呵呵,学园祭很成功呢。\x02",
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"学生们正在\x01",
"礼堂那里庆祝胜利呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F7B")
label("loc_1692")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_17B7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_175A")
OP_A2(0x1)
ChrTalk(
0x10,
(
"说起来\x01",
"真是出乎意料的盛况啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"有很多带孩子来的家长,\x01",
"我担心会有孩子走失。\x02",
)
)
CloseMessageWindow()
Jump("loc_17B4")
label("loc_175A")
ChrTalk(
0x10,
"请问您想找哪位呢?\x02",
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"我可以使用广播\x01",
"来帮您寻找想找的人。\x02",
)
)
CloseMessageWindow()
label("loc_17B4")
Jump("loc_1F7B")
label("loc_17B7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_192A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_18C6")
OP_A2(0x1)
ChrTalk(
0x10,
(
"各种活动都在\x01",
"校园和主楼里进行。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"下午礼堂那边\x01",
"预定要演出舞台剧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"食堂作为休息场所开放,\x01",
"你们可以好好利用。\x02",
)
)
CloseMessageWindow()
Jump("loc_1927")
label("loc_18C6")
ChrTalk(
0x10,
(
"为了以防万一,\x01",
"学园祭举行的时候\x01",
"宿舍楼都是锁住的。\x02",
)
)
CloseMessageWindow()
label("loc_1927")
Jump("loc_1F7B")
label("loc_192A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 0)), scpexpr(EXPR_END)), "loc_1A08")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_19C6")
OP_A2(0x1)
ChrTalk(
0x10,
(
"准备完成了吗?\x01",
"明天就要正式表演了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"到了明天\x01",
"就会有许多客人来参观了。\x02",
)
)
CloseMessageWindow()
Jump("loc_1A05")
label("loc_19C6")
ChrTalk(
0x10,
(
"准备完成了吗?\x01",
"明天就要正式表演了。\x02",
)
)
CloseMessageWindow()
label("loc_1A05")
Jump("loc_1F7B")
label("loc_1A08")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_1B22")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1ABA")
OP_A2(0x1)
ChrTalk(
0x10,
(
"一到下课时间,\x01",
"校园里就会变得热闹起来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
"离学园祭没多久了……\x02",
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"同学们也都在\x01",
"拼命加油呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_1B1F")
label("loc_1ABA")
ChrTalk(
0x10,
"离学园祭没多久了……\x02",
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"同学们也都在\x01",
"拼命加油呢。\x02",
)
)
CloseMessageWindow()
label("loc_1B1F")
Jump("loc_1F7B")
label("loc_1B22")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 6)), scpexpr(EXPR_END)), "loc_1CAA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1C37")
OP_A2(0x1)
TurnDirection(0x10, 0x105, 0)
ChrTalk(
0x10,
"啊,科洛丝。\x02",
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#040F对不起,珐奥娜,\x01",
"我到现在才回来。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
"呵呵,回来就好。\x02",
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"要找校长的话,\x01",
"他就在办公室里。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"他也很担心\x01",
"科洛丝你呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#040F啊,是的。\x01",
"我们现在就过去。\x02",
)
)
CloseMessageWindow()
Jump("loc_1CA7")
label("loc_1C37")
TurnDirection(0x10, 0x105, 0)
ChrTalk(
0x10,
(
"要找校长的话,\x01",
"他就在办公室里。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"他也很担心\x01",
"科洛丝你呢。\x02",
)
)
CloseMessageWindow()
label("loc_1CA7")
Jump("loc_1F7B")
label("loc_1CAA")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 0)), scpexpr(EXPR_END)), "loc_1DA7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1D82")
OP_A2(0x1)
ChrTalk(
0x10,
(
"啊,科洛丝。\x01",
"你外出回来了吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#040F啊,不是的……\x02\x03",
"对不起,\x01",
"我们还没有办完事呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"是吗。\x01",
"外出时请务必要小心。\x02",
)
)
CloseMessageWindow()
Jump("loc_1DA4")
label("loc_1D82")
ChrTalk(
0x10,
(
"科洛丝,\x01",
"外出时请务必要小心。\x02",
)
)
CloseMessageWindow()
label("loc_1DA4")
Jump("loc_1F7B")
label("loc_1DA7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x83, 6)), scpexpr(EXPR_END)), "loc_1E26")
ChrTalk(
0x10,
"啊,是想参观吗?\x02",
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"很抱歉,\x01",
"现在学生们正在上课,\x01",
"不能带您参观。\x02",
)
)
CloseMessageWindow()
Jump("loc_1F7B")
label("loc_1E26")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x82, 1)), scpexpr(EXPR_END)), "loc_1F7B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_1F2A")
OP_A2(0x1)
ChrTalk(
0x10,
(
"啊,科洛丝。\x01",
"已经回来了吗?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#040F不是,\x01",
"我正要带这两位朋友去卢安呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x10,
(
"是吗,难得的假日,\x01",
"就好好地放松一下吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
"#040F嗯,谢谢了。\x02",
)
CloseMessageWindow()
Jump("loc_1F7B")
label("loc_1F2A")
ChrTalk(
0x10,
(
"科洛丝,\x01",
"难得的假日,\x01",
"就好好地放松一下吧。\x02",
)
)
CloseMessageWindow()
label("loc_1F7B")
TalkEnd(0x10)
Return()
# Function_5_1525 end
def Function_6_1F7F(): pass
label("Function_6_1F7F")
TalkBegin(0x11)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_1FD0")
ChrTalk(
0xFE,
(
"课虽然上完了,\x01",
"但还有学生们的问题要回答。\x02",
)
)
CloseMessageWindow()
Jump("loc_2338")
label("loc_1FD0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_2046")
ChrTalk(
0xFE,
(
"唔,\x01",
"我们班的同学干劲热火朝天啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"大家做布景\x01",
"也非常地努力嘛。\x02",
)
)
CloseMessageWindow()
Jump("loc_2338")
label("loc_2046")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_20BC")
ChrTalk(
0xFE,
(
"学园祭的主角\x01",
"果然还是学生们啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"大家都比平时\x01",
"要活跃许多呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_2338")
label("loc_20BC")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 0)), scpexpr(EXPR_END)), "loc_2225")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2193")
OP_A2(0x2)
ChrTalk(
0xFE,
(
"你们好像是\x01",
"从洛连特来的吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"其实我也是\x01",
"洛连特出身的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"说起来我父母\x01",
"也要来参观学园祭呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"……我要是能招待他们就好了。\x02",
)
CloseMessageWindow()
Jump("loc_2222")
label("loc_2193")
ChrTalk(
0xFE,
(
"对了对了……\x01",
"舞台剧表演我也看了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"那天真是很开心啊。\x02",
)
CloseMessageWindow()
label("loc_2222")
Jump("loc_2338")
label("loc_2225")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_2338")
ClearChrFlags(0xFE, 0x10)
TurnDirection(0xFE, 0x0, 0)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x2D), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_224E")
SetChrSubChip(0xFE, 1)
Jump("loc_227F")
label("loc_224E")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x87), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2264")
SetChrSubChip(0xFE, 0)
Jump("loc_227F")
label("loc_2264")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x10E), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_227A")
SetChrSubChip(0xFE, 2)
Jump("loc_227F")
label("loc_227A")
SetChrSubChip(0xFE, 1)
label("loc_227F")
OP_8C(0xFE, 90, 0)
SetChrFlags(0xFE, 0x10)
ChrTalk(
0xFE,
(
"学园祭快到了,\x01",
"同学们就连上课\x01",
"都开始坐不安定了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"呵呵,这也是没办法的呀。\x02",
)
CloseMessageWindow()
SetChrSubChip(0xFE, 0)
label("loc_2338")
TalkEnd(0x11)
Return()
# Function_6_1F7F end
def Function_7_233C(): pass
label("Function_7_233C")
TalkBegin(0x12)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_23F0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_23BB")
OP_A2(0x3)
ChrTalk(
0xFE,
(
"唔唔,\x01",
"这个问题……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"………………………………\x02",
)
CloseMessageWindow()
OP_62(0x12, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
OP_22(0x31, 0x0, 0x64)
Sleep(1000)
ChrTalk(
0xFE,
"怎么做好呢?\x02",
)
CloseMessageWindow()
ClearChrFlags(0x12, 0x10)
Jump("loc_23ED")
label("loc_23BB")
ChrTalk(
0xFE,
(
"呼,这里的学生\x01",
"都很热心于学习呀。\x02",
)
)
CloseMessageWindow()
label("loc_23ED")
Jump("loc_26E8")
label("loc_23F0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_245B")
ChrTalk(
0xFE,
(
"下午终于要上演\x01",
"万众瞩目的舞台剧了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"拜托你们二位了!\x01",
"我相信一定能取得成功的。\x02",
)
)
CloseMessageWindow()
Jump("loc_26E8")
label("loc_245B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_2543")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2511")
OP_A2(0x3)
ChrTalk(
0xFE,
(
"嗯,\x01",
"我们班的同学相当认真呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"虽然我觉得\x01",
"研究发表什么的太朴素了……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"不过这样也好,\x01",
"有很多客人来看呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_2540")
label("loc_2511")
ChrTalk(
0xFE,
(
"决不能输给\x01",
"米丽亚的班级……\x02",
)
)
CloseMessageWindow()
label("loc_2540")
Jump("loc_26E8")
label("loc_2543")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_26E8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_26A0")
OP_A2(0x3)
TurnDirection(0xFE, 0x105, 0)
ChrTalk(
0xFE,
(
"啊,科洛丝。\x01",
"你回来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#040F碧欧拉老师,\x01",
"我刚刚才回来。\x02\x03",
"对不起……\x01",
"我又没来上课。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"呵呵,没关系。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
"你不是有重要的事情吗?\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"有时间的话来一下办公室,\x01",
"我给你漏下的上课笔记。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
"#040F嗯,我过会儿就去。\x02",
)
CloseMessageWindow()
Jump("loc_26E8")
label("loc_26A0")
ChrTalk(
0xFE,
(
"我还是趁现在\x01",
"批改一下考试卷子吧。\x02",
)
)
CloseMessageWindow()
label("loc_26E8")
TalkEnd(0x12)
Return()
# Function_7_233C end
def Function_8_26EC(): pass
label("Function_8_26EC")
TalkBegin(0x13)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_27E5")
ClearChrFlags(0xFE, 0x10)
TurnDirection(0xFE, 0x0, 0)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x2D), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2718")
SetChrSubChip(0xFE, 1)
Jump("loc_2749")
label("loc_2718")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x87), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_272E")
SetChrSubChip(0xFE, 0)
Jump("loc_2749")
label("loc_272E")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x10E), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2744")
SetChrSubChip(0xFE, 2)
Jump("loc_2749")
label("loc_2744")
SetChrSubChip(0xFE, 1)
label("loc_2749")
OP_8C(0xFE, 90, 0)
SetChrFlags(0xFE, 0x10)
ChrTalk(
0xFE,
(
"我是今年\x01",
"入学考试的出题老师。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"呵呵……\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
"我已经跃跃欲试了。\x02",
)
CloseMessageWindow()
SetChrSubChip(0xFE, 0)
Jump("loc_2C95")
label("loc_27E5")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_28D7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2874")
OP_A2(0x4)
ChrTalk(
0xFE,
(
"为什么我们班的同学\x01",
"尽办些游戏和占卜的活动……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"维奥拉的班级\x01",
"都是很正经的东西呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_28D4")
label("loc_2874")
ChrTalk(
0xFE,
(
"那个班的老师不行,\x01",
"学生们却都很优秀。\x02",
)
)
CloseMessageWindow()
label("loc_28D4")
Jump("loc_2C95")
label("loc_28D7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_293D")
ChrTalk(
0xFE,
"人还真是多呀……\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
"大家都很闲吗?\x02",
)
CloseMessageWindow()
Jump("loc_2C95")
label("loc_293D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 0)), scpexpr(EXPR_END)), "loc_2A45")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_29F2")
OP_A2(0x4)
ChrTalk(
0xFE,
(
"嗯,明天就能好好看到\x01",
"同学们努力的成果了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"无论怎样,\x01",
"那天我可不能再啰嗦了。\x02",
)
)
CloseMessageWindow()
Jump("loc_2A42")
label("loc_29F2")
ChrTalk(
0xFE,
(
"嗯,明天就能好好看到\x01",
"同学们努力的成果了。\x02",
)
)
CloseMessageWindow()
label("loc_2A42")
Jump("loc_2C95")
label("loc_2A45")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_2C95")
ClearChrFlags(0xFE, 0x10)
TurnDirection(0xFE, 0x0, 0)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x2D), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2A6E")
SetChrSubChip(0xFE, 1)
Jump("loc_2A9F")
label("loc_2A6E")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x87), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2A84")
SetChrSubChip(0xFE, 0)
Jump("loc_2A9F")
label("loc_2A84")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x10E), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2A9A")
SetChrSubChip(0xFE, 2)
Jump("loc_2A9F")
label("loc_2A9A")
SetChrSubChip(0xFE, 1)
label("loc_2A9F")
OP_8C(0xFE, 90, 0)
SetChrFlags(0xFE, 0x10)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2BC6")
OP_A2(0x4)
ChrTalk(
0xFE,
(
"在学园祭的准备期间,\x01",
"大家学习都提不起精神来呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"就算在课上\x01",
"也开始不愿动脑筋了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"要不要明天\x01",
"来次突击测验呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_2C90")
label("loc_2BC6")
ChrTalk(
0xFE,
(
"在学园祭的准备期间,\x01",
"大家学习都提不起精神来呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"要不要明天\x01",
"来次突击测验呢。\x02",
)
)
CloseMessageWindow()
label("loc_2C90")
SetChrSubChip(0xFE, 0)
label("loc_2C95")
TalkEnd(0x13)
Return()
# Function_8_26EC end
def Function_9_2C99(): pass
label("Function_9_2C99")
TalkBegin(0x14)
ClearChrFlags(0xFE, 0x10)
TurnDirection(0xFE, 0x0, 0)
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x5A), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2CBE")
SetChrSubChip(0xFE, 2)
Jump("loc_2CEF")
label("loc_2CBE")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0xE1), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2CD4")
SetChrSubChip(0xFE, 1)
Jump("loc_2CEF")
label("loc_2CD4")
Jc((scpexpr(EXPR_GET_CHR_WORK, 0xFE, 0x4), scpexpr(EXPR_PUSH_LONG, 0x13B), scpexpr(EXPR_LSS), scpexpr(EXPR_END)), "loc_2CEA")
SetChrSubChip(0xFE, 0)
Jump("loc_2CEF")
label("loc_2CEA")
SetChrSubChip(0xFE, 2)
label("loc_2CEF")
OP_8C(0xFE, 270, 0)
SetChrFlags(0xFE, 0x10)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_2D79")
ChrTalk(
0xFE,
"嗯,差不多该去巡视了。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我要看看\x01",
"有没有同学太过懒散了。\x02",
)
)
CloseMessageWindow()
Jump("loc_3126")
label("loc_2D79")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_2EE6")
Jc((scpexpr(EXPR_EXEC_OP, "OP_29(0x27, 0x1, 0x8000)"), scpexpr(EXPR_END)), "loc_2E20")
ChrTalk(
0xFE,
"哦,昨天真是辛苦你们了。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
"我真是个不称职的老师啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"为了防止再发生突发事件,\x01",
"我在这里待命。\x02",
)
)
CloseMessageWindow()
Jump("loc_2EE3")
label("loc_2E20")
ChrTalk(
0xFE,
(
"昨天,\x01",
"有学生说在旧校舍看到了魔兽。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"为了慎重起见,\x01",
"我把旧校舍的门锁紧了。\x01",
"不过一会儿还是再去看看吧。\x02",
)
)
CloseMessageWindow()
label("loc_2EE3")
Jump("loc_3126")
label("loc_2EE6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 6)), scpexpr(EXPR_END)), "loc_3089")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_2FDE")
OP_A2(0x5)
ChrTalk(
0xFE,
(
"这个学园一共设立了\x01",
"三个方向的专业。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我教的科目则是\x01",
"所有专业都必修的科目——体育。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"现在这个时候我没有课,\x01",
"就来整理一下教案了。\x02",
)
)
CloseMessageWindow()
Jump("loc_3086")
label("loc_2FDE")
ChrTalk(
0xFE,
(
"我教的科目则是\x01",
"所有专业都必修的科目——体育。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"现在这个时候我没有课,\x01",
"就来整理一下教案了。\x02",
)
)
CloseMessageWindow()
label("loc_3086")
Jump("loc_3126")
label("loc_3089")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 0)), scpexpr(EXPR_END)), "loc_3126")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 5)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_30FC")
OP_A2(0x5)
ChrTalk(
0xFE,
(
"唔,怎么,\x01",
"你们是哪个班的学生?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"现在正在上课哦。\x01",
"要有外出许可证\x01",
"才能出去哦。\x02",
)
)
CloseMessageWindow()
Jump("loc_3126")
label("loc_30FC")
ChrTalk(
0xFE,
(
"要有外出许可证\x01",
"才能出去哦。\x02",
)
)
CloseMessageWindow()
label("loc_3126")
SetChrSubChip(0xFE, 0)
TalkEnd(0x14)
Return()
# Function_9_2C99 end
def Function_10_312F(): pass
label("Function_10_312F")
TalkBegin(0x15)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_31F1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_31B4")
OP_A2(0x6)
ChrTalk(
0xFE,
(
"呼~\x01",
"今天的课总算上完了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"下午的课\x01",
"一定会睡着的……\x02",
)
)
CloseMessageWindow()
Jump("loc_31EE")
label("loc_31B4")
ChrTalk(
0xFE,
(
"下午的课\x01",
"一定会睡着的……\x02",
)
)
CloseMessageWindow()
label("loc_31EE")
Jump("loc_3328")
label("loc_31F1")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_3328")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_32A5")
OP_A2(0x6)
ChrTalk(
0xFE,
(
"我一直在照顾\x01",
"我们社团的店面呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"班里的活动\x01",
"就没办法参加了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"嗯,感觉真是很充实呢。\x02",
)
CloseMessageWindow()
Jump("loc_3328")
label("loc_32A5")
ChrTalk(
0xFE,
(
"我一直在照顾\x01",
"我们社团的店面呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"班里的活动\x01",
"就没办法参加了。\x02",
)
)
CloseMessageWindow()
label("loc_3328")
TalkEnd(0x15)
Return()
# Function_10_312F end
def Function_11_332C(): pass
label("Function_11_332C")
TalkBegin(0x16)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_338F")
ChrTalk(
0xFE,
"那么,该去社团活动了。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"今天要把\x01",
"画到一半的绘画完成!\x02",
)
)
CloseMessageWindow()
Jump("loc_373B")
label("loc_338F")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_3425")
ChrTalk(
0xFE,
(
"嗯,\x01",
"茶座还是要办成这样才对啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"辛苦也值得了。\x02",
)
CloseMessageWindow()
Jump("loc_373B")
label("loc_3425")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_34E7")
ChrTalk(
0xFE,
(
"嗯,不管怎么说\x01",
"准备工作还是赶上了……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"因为通宵工作,\x01",
"现在好困啊……\x02",
)
)
CloseMessageWindow()
Jump("loc_373B")
label("loc_34E7")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 0)), scpexpr(EXPR_END)), "loc_35D8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_357E")
OP_A2(0x7)
ChrTalk(
0xFE,
(
"唔哇哇!\x01",
"怎么回事!!\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"呆在这里\x01",
"会来不及准备的。\x02",
)
)
CloseMessageWindow()
Jump("loc_35D5")
label("loc_357E")
ChrTalk(
0xFE,
(
"……难道说\x01",
"这样下去要通宵赶工了?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"呼,\x01",
"做衣服花了太多时间了。\x02",
)
)
CloseMessageWindow()
label("loc_35D5")
Jump("loc_373B")
label("loc_35D8")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_373B")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x0, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_36DA")
OP_A2(0x7)
ChrTalk(
0xFE,
"啦啦啦~~⊙\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我正在做\x01",
"摆摊时穿的衣服。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"唔~就是要在\x01",
"这种时候集中精力!\x02",
)
)
CloseMessageWindow()
Jump("loc_373B")
label("loc_36DA")
ChrTalk(
0xFE,
(
"因为做这种东西\x01",
"是我最喜欢干的事情了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"对了对了,\x01",
"接下来还要做房间的装饰。\x02",
)
)
CloseMessageWindow()
label("loc_373B")
TalkEnd(0x16)
Return()
# Function_11_332C end
def Function_12_373F(): pass
label("Function_12_373F")
TalkBegin(0x17)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_3784")
ChrTalk(
0xFE,
"欢迎光临~\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"如果需要的话,\x01",
"我可以帮你们找空位。\x02",
)
)
CloseMessageWindow()
Jump("loc_39F3")
label("loc_3784")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_37DF")
ChrTalk(
0xFE,
"嘿嘿,这件制服很可爱吧。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
"坎诺还为我准备了好多呢。\x02",
)
CloseMessageWindow()
Jump("loc_39F3")
label("loc_37DF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 0)), scpexpr(EXPR_END)), "loc_38B9")
ChrTalk(
0xFE,
(
"一想时间还很充裕\x01",
"就不由自主地松懈了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"不过应该还来得及。\x01",
"努力把店面打扮得漂亮一些吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_39F3")
label("loc_38B9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_39F3")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_397D")
OP_A2(0x8)
ChrTalk(
0xFE,
(
"坎诺君的手\x01",
"可巧啦……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"这次他缝了个\x01",
"布娃娃给我呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_39F3")
label("loc_397D")
ChrTalk(
0xFE,
(
"就算是演出用的女佣服装\x01",
"也是他自己做的。\x02",
)
)
CloseMessageWindow()
label("loc_39F3")
TalkEnd(0x17)
Return()
# Function_12_373F end
def Function_13_39F7(): pass
label("Function_13_39F7")
TalkBegin(0x18)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_3A43")
ChrTalk(
0xFE,
(
"就算是再微小的问题,\x01",
"拉迪奥老师也会\x01",
"很仔细地给我讲解。\x02",
)
)
CloseMessageWindow()
Jump("loc_3B9E")
label("loc_3A43")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_3B9E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3B3D")
OP_A2(0x9)
ChrTalk(
0xFE,
(
"社会系各位的作品\x01",
"都是研究成果发表啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"哇……真是厉害啊……\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我们系的同学\x01",
"只会办茶座或者\x01",
"鬼怪屋什么的……\x02",
)
)
CloseMessageWindow()
Jump("loc_3B9E")
label("loc_3B3D")
ChrTalk(
0xFE,
(
"社会系各位的作品\x01",
"都是研究成果发表啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"哇……真是厉害啊……\x02",
)
CloseMessageWindow()
label("loc_3B9E")
TalkEnd(0x18)
Return()
# Function_13_39F7 end
def Function_14_3BA2(): pass
label("Function_14_3BA2")
TalkBegin(0x19)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_3BD6")
ChrTalk(
0xFE,
(
"欢迎光临。\x01",
"这里是我们的茶座『芳塔娜』。\x02",
)
)
CloseMessageWindow()
Jump("loc_3C31")
label("loc_3BD6")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_3C31")
ChrTalk(
0xFE,
(
"穿成这个样子\x01",
"虽然有点不好意思,\x01",
"但为了学园祭,忍了吧。\x02",
)
)
CloseMessageWindow()
label("loc_3C31")
TalkEnd(0x19)
Return()
# Function_14_3BA2 end
def Function_15_3C35(): pass
label("Function_15_3C35")
TalkBegin(0x1A)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_3C71")
ChrTalk(
0xFE,
(
"唔,\x01",
"今天也是很有意义的一课啊……\x02",
)
)
CloseMessageWindow()
Jump("loc_44FB")
label("loc_3C71")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 2)), scpexpr(EXPR_END)), "loc_3DEF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3D5E")
OP_A2(0xB)
ChrTalk(
0xFE,
(
"虽然办娱乐活动很有意思,\x01",
"不过让大家知道我们\x01",
"平日的研究成果也是很重要的。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"尤其是有很多前辈\x01",
"和市民们前来参观。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"……虽说如此,\x01",
"考试也不会得到更高的分数。\x02",
)
)
CloseMessageWindow()
Jump("loc_3DEC")
label("loc_3D5E")
ChrTalk(
0xFE,
(
"虽然办娱乐活动很有意思,\x01",
"不过让大家知道我们\x01",
"平日的研究成果也是很重要的。\x02",
)
)
CloseMessageWindow()
label("loc_3DEC")
Jump("loc_44FB")
label("loc_3DEF")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_421D")
Jc((scpexpr(EXPR_EXEC_OP, "OP_29(0x27, 0x1, 0x1000)"), scpexpr(EXPR_END)), "loc_3F8A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3F56")
OP_A2(0xB)
ChrTalk(
0xFE,
(
"我们社会系发表了\x01",
"从各种产业的经济指标上\x01",
"进行经济动向的预测的研究。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"而且也收集了\x01",
"通俗易懂的关于卢安地区\x01",
"历史和发展的资料。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"如果有兴趣的话就请看一下吧。\x02",
)
CloseMessageWindow()
Jump("loc_3F87")
label("loc_3F56")
ChrTalk(
0xFE,
"如果有兴趣的话就请看一下吧。\x02",
)
CloseMessageWindow()
label("loc_3F87")
Jump("loc_421A")
label("loc_3F8A")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_40F8")
OP_A2(0xB)
ChrTalk(
0xFE,
(
"我们社会系发表了\x01",
"从各种产业的经济指标上\x01",
"进行经济动向的预测的研究。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"而且也收集了\x01",
"通俗易懂的关于卢安地区\x01",
"历史和发展的资料。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"虽然有几份资料没到手,\x01",
"但在这么点时间里,\x01",
"能做成这么完善的内容也算不错了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"如果有兴趣的话就请看一下吧。\x02",
)
CloseMessageWindow()
Jump("loc_421A")
label("loc_40F8")
Jc((scpexpr(EXPR_EXEC_OP, "OP_29(0x27, 0x1, 0x20)"), scpexpr(EXPR_END)), "loc_41E9")
ChrTalk(
0xFE,
(
"虽然没赶上这次发表,\x01",
"但是《卢安经济史》是很贵重的资料。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"如果你们看到那三本书的话,\x01",
"麻烦帮我放回资料室的书架上。\x02",
)
)
CloseMessageWindow()
Jump("loc_421A")
label("loc_41E9")
ChrTalk(
0xFE,
"如果有兴趣的话就请看一下吧。\x02",
)
CloseMessageWindow()
label("loc_421A")
Jump("loc_44FB")
label("loc_421D")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 0)), scpexpr(EXPR_END)), "loc_4341")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_42EF")
OP_A2(0xB)
ChrTalk(
0xFE,
(
"唔,\x01",
"还是需要一些辅助研究的资料啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"时间不够了啊……\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"不过在有限的时间里,\x01",
"内容已经可算是做得很完善了。\x02",
)
)
CloseMessageWindow()
Jump("loc_433E")
label("loc_42EF")
ChrTalk(
0xFE,
(
"唔,\x01",
"还是需要一些辅助研究的资料啊。\x02",
)
)
CloseMessageWindow()
label("loc_433E")
Jump("loc_44FB")
label("loc_4341")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_44FB")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 3)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_44D2")
OP_A2(0xB)
TurnDirection(0xFE, 0x105, 0)
ChrTalk(
0xFE,
(
"啊,科洛丝。\x01",
"你终于回来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我们班级的节目\x01",
"准备工作进展得很顺利啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"你们舞台剧方面怎么样了?\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"听说连主要演员\x01",
"都还没决定下来啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#040F呵呵,罗基克,\x01",
"那件事已经解决了。\x02\x03",
"舞台剧方面我们不会输的。\x01",
"敬请期待哦。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
"哦,是吗……\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
"那我们互相加油吧。\x02",
)
CloseMessageWindow()
Jump("loc_44FB")
label("loc_44D2")
TurnDirection(0xFE, 0x105, 0)
ChrTalk(
0xFE,
"科洛丝,我们互相加油吧。\x02",
)
CloseMessageWindow()
label("loc_44FB")
TalkEnd(0x1A)
Return()
# Function_15_3C35 end
def Function_16_44FF(): pass
label("Function_16_44FF")
TalkBegin(0x1B)
ChrTalk(
0xFE,
(
"这次的女王诞辰庆典上\x01",
"要召开武术大会。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我们的击剑部\x01",
"也好想参加啊。\x02",
)
)
CloseMessageWindow()
TalkEnd(0x1B)
Return()
# Function_16_44FF end
def Function_17_45A6(): pass
label("Function_17_45A6")
TalkBegin(0x1C)
ChrTalk(
0xFE,
"啊,老师,是这里。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"从这里开始\x01",
"就完全不明白了。\x02",
)
)
CloseMessageWindow()
TalkEnd(0x1C)
Return()
# Function_17_45A6 end
def Function_18_45F2(): pass
label("Function_18_45F2")
TalkBegin(0x1D)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_4670")
ChrTalk(
0xFE,
(
"啊~\x01",
"今天是弓道部的练习日。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"一直在准备学园祭,\x01",
"好久没有休息了。\x02",
)
)
CloseMessageWindow()
Jump("loc_4797")
label("loc_4670")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x1, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4719")
OP_A2(0xE)
TurnDirection(0xFE, 0x105, 0)
ChrTalk(
0xFE,
(
"啊,科洛丝。\x01",
"你回来了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我们差不多\x01",
"该开始装饰教室了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"先从可以着手的地方\x01",
"开始进行吧……\x02",
)
)
CloseMessageWindow()
Jump("loc_4797")
label("loc_4719")
ChrTalk(
0xFE,
(
"我们差不多\x01",
"该开始装饰教室了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"先从可以着手的地方\x01",
"开始进行吧……\x02",
)
)
CloseMessageWindow()
label("loc_4797")
TalkEnd(0x1D)
Return()
# Function_18_45F2 end
def Function_19_479B(): pass
label("Function_19_479B")
TalkBegin(0x1E)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_4838")
ChrTalk(
0xFE,
"今天的课上完了。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"我也没参加社团活动,\x01",
"那就快点回家去吧。\x02",
)
)
CloseMessageWindow()
Jump("loc_48B1")
label("loc_4838")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_48B1")
ChrTalk(
0xFE,
(
"嗯,\x01",
"首先是要去采购呀。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"只要把列表上的东西\x01",
"都买过来就行了吧?\x02",
)
)
CloseMessageWindow()
label("loc_48B1")
TalkEnd(0x1E)
Return()
# Function_19_479B end
def Function_20_48B5(): pass
label("Function_20_48B5")
TalkBegin(0x1F)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x87, 5)), scpexpr(EXPR_END)), "loc_49C9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x2, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4972")
OP_A2(0x10)
ChrTalk(
0xFE,
(
"唔,我正想要问老师\x01",
"没听明白的地方呢……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"米丽亚老师一上完课\x01",
"就马上回办公室了。\x02",
)
)
CloseMessageWindow()
Jump("loc_49C6")
label("loc_4972")
ChrTalk(
0xFE,
(
"我答应今天要去\x01",
"姐姐的店里帮忙,\x01",
"必须快点回去了……\x02",
)
)
CloseMessageWindow()
label("loc_49C6")
Jump("loc_4B13")
label("loc_49C9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x85, 7)), scpexpr(EXPR_END)), "loc_4B13")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x2, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4AC4")
OP_A2(0x10)
ChrTalk(
0xFE,
(
"真是的,\x01",
"为什么你老是那么草率啊。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"大家都很忙,\x01",
"人手也不足,\x01",
"你提高点效率好不好?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"为了买东西在卢安\x01",
"和学园之间往返了好几次呢。\x02",
)
)
CloseMessageWindow()
Jump("loc_4B13")
label("loc_4AC4")
ChrTalk(
0xFE,
(
"呼,\x01",
"基诺奇奥做事真是很粗心呀。\x02",
)
)
CloseMessageWindow()
label("loc_4B13")
TalkEnd(0x1F)
Return()
# Function_20_48B5 end
def Function_21_4B17(): pass
label("Function_21_4B17")
TalkBegin(0x20)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x86, 1)), scpexpr(EXPR_END)), "loc_4E72")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x2, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_4DE7")
OP_A2(0x11)
ChrTalk(
0x101,
"#000F啊,梅贝尔市长?\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
"#610F啊,是艾丝蒂尔和约修亚!\x02",
)
CloseMessageWindow()
ChrTalk(
0x101,
(
"#000F您为什么会在这里呢?\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"#610F呵呵,其实我是\x01",
"这个学院的毕业生。\x02\x03",
"每年的学园祭都要来出席的。\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F哦,是这样啊。\x02",
)
CloseMessageWindow()
ChrTalk(
0xFE,
(
"#610F那么你们俩是为什么来这儿的啊?\x01",
" \x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F嘿嘿,其实呢……\x02",
)
CloseMessageWindow()
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"艾丝蒂尔\x01",
"向梅贝尔市长说明了事情的经过。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
ChrTalk(
0xFE,
(
"#610F哦,是协助演出啊。\x02\x03",
"我也认为演出是很考功夫的。\x01",
" \x02\x03",
"呵呵,连艾丝蒂尔\x01",
"和约修亚也参加演出的话,\x01",
"那我真要好好看看才行呢。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x102,
(
"#010F(唉,真不想让\x01",
"认识的人看到啊……)\x02",
)
)
Jump("loc_4E72")
label("loc_4DE7")
ChrTalk(
0xFE,
(
"#610F我也认为演出是很考功夫的。\x01",
" \x02\x03",
"呵呵,连艾丝蒂尔\x01",
"和约修亚也参加演出的话,\x01",
"那我真要好好看看才行呢。\x02",
)
)
CloseMessageWindow()
label("loc_4E72")
TalkEnd(0x20)
Return()
# Function_21_4B17 end
def Function_22_4E76(): pass
label("Function_22_4E76")
EventBegin(0x0)
ClearMapFlags(0x1)
OP_6D(116280, 0, 2160, 0)
SetChrPos(0x101, 117450, 0, -1700, 0)
SetChrPos(0x102, 116510, 0, -1950, 0)
SetChrPos(0x105, 117000, 0, -1020, 0)
FadeToBright(1000, 0)
OP_0D()
ChrTalk(
0x105,
(
"#040F校长,您好。\x01",
"我已经回来了。\x02",
)
)
CloseMessageWindow()
def lambda_4EF3():
OP_6D(117230, 0, 4590, 2000)
ExitThread()
QueueWorkItem(0x105, 2, lambda_4EF3)
def lambda_4F0B():
OP_8E(0xFE, 0x1C890, 0x0, 0x690, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x105, 1, lambda_4F0B)
Sleep(500)
def lambda_4F2B():
OP_8E(0xFE, 0x1CB10, 0x0, 0x690, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x101, 1, lambda_4F2B)
Sleep(300)
def lambda_4F4B():
OP_8E(0xFE, 0x1C64C, 0x0, 0x5D2, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x102, 1, lambda_4F4B)
WaitChrThread(0x105, 0x1)
def lambda_4F6B():
OP_8E(0xFE, 0x1C58E, 0x0, 0x9F6, 0x7D0, 0x0)
ExitThread()
QueueWorkItem(0x105, 1, lambda_4F6B)
WaitChrThread(0x105, 0x1)
OP_8C(0x105, 0, 400)
WaitChrThread(0x101, 0x1)
TurnDirection(0x101, 0x105, 400)
WaitChrThread(0x102, 0x1)
ChrTalk(
0x8,
(
"#780F#1P科洛丝,你回来了啊。\x02\x03",
"哎哟?这两位是……\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#000F初次见面,校长。\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F我们是游击士协会的人。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#780F#1P呵呵,如此年轻就成为游击士,\x01",
"的确是后生可畏啊。\x02\x03",
"听说孤儿院发生了火灾,\x01",
"莫非你们是为那件事而来的?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
"#049F#4P是的,其实……\x02",
)
CloseMessageWindow()
FadeToDark(300, 0, 100)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"科洛丝向科林兹校长\x01",
"说明了包括纵火事件在内的一系列事情。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
ChrTalk(
0x8,
(
"#780F#1P是吗……\x01",
"那事情可就严重了。\x02\x03",
"要是我们也能以什么方式\x01",
"给院长和孩子们帮上忙就好了……\x02\x03",
"…………………………\x02\x03",
"那么首先,一定要办好学园祭,\x01",
"不能辜负那些孩子对我们的期待……\x02\x03",
"而且也只能从这里做起了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
(
"#047F是……\x02\x03",
"#040F校长,有件事想和您说说。\x01",
"这次我想请艾丝蒂尔和约修亚\x01",
"来协助参演今年的舞台剧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#781F#1P这想法不错嘛。\x02\x03",
"#780F艾丝蒂尔、约修亚。\x01",
"这次的舞台剧就拜托你们了。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#006F啊,是!\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#010F我们愿尽绵薄之力。\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#780F#1P与舞台剧相关的工作\x01",
"是由学生会长乔儿全权负责的。\x02\x03",
"导演也由她担任,\x01",
"所以详细情形向她请教就行了。\x02\x03",
"而我这里就……\x01",
"帮你们两位安排宿舍吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#004F哎……?\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#014F宿舍?\x02",
)
CloseMessageWindow()
ChrTalk(
0x8,
(
"#780F#1P毕竟学园祭\x01",
"已经迫在眉睫了啊。\x02\x03",
"恐怕每天都需要\x01",
"排练到很晚呢。\x02\x03",
"这样一来,\x01",
"就需要有个住的地方对吧?\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#501F啊,原来是这样啊……\x02",
)
CloseMessageWindow()
ChrTalk(
0x102,
"#019F这样的确方便多了。\x02",
)
CloseMessageWindow()
OP_22(0x8A, 0x0, 0x64)
Sleep(1000)
OP_62(0x101, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_62(0x102, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_62(0x105, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
OP_62(0x8, 0x0, 2000, 0x26, 0x26, 0xFA, 0x1)
Sleep(1000)
ChrTalk(
0x8,
(
"#780F#1P刚好也下课了。\x02\x03",
"科洛丝,你就马上\x01",
"把他们介绍给学生会长吧。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x105,
"#041F好的。\x02",
)
CloseMessageWindow()
OP_8C(0x105, 135, 400)
ChrTalk(
0x105,
(
"#040F#1P艾丝蒂尔、约修亚。\x01",
"接下来我带你们去学生会室吧。\x02\x03",
"这座主楼的右边是社团大楼,\x01",
"而学生会室就在大楼的第二层。\x02",
)
)
CloseMessageWindow()
ChrTalk(
0x101,
"#006F嗯,那我们走吧。\x02",
)
CloseMessageWindow()
OP_28(0x3D, 0x1, 0x8)
OP_28(0x3D, 0x1, 0x10)
EventEnd(0x0)
Return()
# Function_22_4E76 end
def Function_23_5672(): pass
label("Function_23_5672")
EventBegin(0x0)
OP_77(0xFF, 0xC8, 0x96, 0x0, 0x0)
OP_6D(-1190, 0, 33250, 0)
ClearChrFlags(0x9, 0x80)
ClearChrFlags(0xA, 0x80)
ClearChrFlags(0xB, 0x80)
ClearChrFlags(0xC, 0x80)
ClearChrFlags(0xD, 0x80)
ClearChrFlags(0xE, 0x80)
ClearChrFlags(0xF, 0x80)
SetChrFlags(0x9, 0x4)
SetChrFlags(0xA, 0x4)
SetChrFlags(0xB, 0x4)
SetChrFlags(0xC, 0x4)
SetChrFlags(0xD, 0x4)
SetChrFlags(0xE, 0x4)
SetChrFlags(0x105, 0x4)
SetChrFlags(0x101, 0x4)
SetChrFlags(0x102, 0x4)
SetChrChipByIndex(0x101, 19)
SetChrChipByIndex(0x102, 20)
SetChrChipByIndex(0x105, 21)
SetChrChipByIndex(0xB, 23)
SetChrChipByIndex(0xC, 24)
SetChrChipByIndex(0xD, 22)
SetChrChipByIndex(0xE, 25)
SetChrPos(0x101, 500, 200, 32060, 90)
SetChrPos(0x102, 500, 200, 29980, 90)
SetChrPos(0x105, 520, 200, 34100, 90)
SetChrPos(0xA, -2750, 200, 30010, 90)
SetChrPos(0x9, -2750, 200, 32060, 90)
SetChrPos(0xC, -2750, 100, 34060, 90)
SetChrPos(0xB, -5900, 100, 30010, 90)
SetChrPos(0xD, -5900, 100, 34160, 90)
SetChrPos(0xE, -5900, 100, 31920, 90)
SetChrPos(0xF, 5300, 250, 32119, 90)
def lambda_57B1():
OP_6D(3580, 0, 33240, 5000)
ExitThread()
QueueWorkItem(0x101, 1, lambda_57B1)
FadeToBright(2000, 0)
OP_8E(0xF, 0x150E, 0xFA, 0x76FC, 0x3E8, 0x0)
OP_8C(0xF, 90, 400)
Sleep(500)
OP_8E(0xF, 0x14FA, 0xFA, 0x7CD8, 0x3E8, 0x0)
OP_8C(0xF, 90, 400)
Sleep(500)
OP_8E(0xF, 0x150E, 0xFA, 0x76FC, 0x3E8, 0x0)
OP_8C(0xF, 270, 400)
Sleep(1000)
def lambda_5832():
OP_6D(2000, 0, 33250, 1500)
ExitThread()
QueueWorkItem(0xF, 1, lambda_5832)
OP_8E(0xF, 0xD2A, 0x0, 0x7710, 0x7D0, 0x0)
OP_8E(0xF, 0xA0A, 0x0, 0x7B70, 0x7D0, 0x0)
TurnDirection(0xF, 0x101, 400)
SetChrSubChip(0x102, 1)
Sleep(100)
SetChrSubChip(0x105, 2)
OP_62(0xF, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_63(0xF)
Sleep(500)
OP_62(0x101, 0x0, 2000, 0x28, 0x2B, 0x64, 0x3)
Sleep(1500)
OP_62(0x101, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_63(0x101)
Sleep(500)
OP_62(0xF, 0x0, 2000, 0x10, 0x13, 0xFA, 0x1)
Sleep(1500)
Sleep(500)
TurnDirection(0xF, 0x102, 400)
SetChrSubChip(0x101, 2)
Sleep(500)
OP_62(0xF, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1500)
OP_63(0xF)
Sleep(500)
OP_62(0x102, 0x0, 2000, 0x18, 0x1B, 0xFA, 0x0)
Sleep(1200)
OP_63(0x102)
Sleep(500)
OP_62(0xF, 0x0, 2000, 0x8, 0x9, 0xFA, 0x2)
Sleep(1000)
Sleep(500)
OP_62(0x101, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
OP_62(0x105, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
OP_62(0x9, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
OP_62(0xA, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
OP_62(0xB, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
OP_62(0xC, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
OP_62(0xD, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
OP_62(0xE, 0x0, 2000, 0x26, 0x27, 0xFA, 0x2)
Sleep(1500)
FadeToDark(1000, 0, -1)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
"上午,他们和其他学生一起\x01",
"在老师的教导下接受正统的课程教育……\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
OP_A2(0x3FA)
NewScene("ED6_DT01/T2511 ._SN", 100, 0, 0)
IdleLoop()
Return()
# Function_23_5672 end
def Function_24_5A5E(): pass
label("Function_24_5A5E")
Jc((scpexpr(EXPR_PUSH_VALUE_INDEX, 0xA), scpexpr(EXPR_PUSH_LONG, 0x4), scpexpr(EXPR_EQU), scpexpr(EXPR_END)), "loc_5AF6")
TurnDirection(0x105, 0x1, 400)
ChrTalk(
0x105,
(
"#040F对不起,\x01",
"现在正在上课。\x02\x03",
"我们先去校长办公室吧。\x01",
"就在这个建筑物的一楼走廊里面。\x02",
)
)
CloseMessageWindow()
Jump("loc_5B50")
label("loc_5AF6")
TurnDirection(0x102, 0x101, 400)
ChrTalk(
0x102,
(
"#010F现在好像在上课。\x02\x03",
"先去校长办公室吧。\x02",
)
)
CloseMessageWindow()
label("loc_5B50")
TalkEnd(0xFF)
Return()
# Function_24_5A5E end
def Function_25_5B54(): pass
label("Function_25_5B54")
FadeToDark(300, 0, 100)
SetChrName("")
SetMessageWindowPos(-1, -1, -1, -1)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x5),
" 走 \x01",
" 廊 \x01",
" 里 \x01",
" 请 \x01",
" 保 \x01",
" 学持 \x01",
" 生安 \x01",
" 指静 \x01",
" 导! \x01",
" 部 \x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(72, 320, 56, 3)
TalkEnd(0xFF)
Return()
# Function_25_5B54 end
def Function_26_5BB8(): pass
label("Function_26_5BB8")
OP_22(0x11, 0x0, 0x64)
SetChrFlags(0x22, 0x80)
OP_64(0x5, 0x1)
FadeToDark(300, 0, 100)
SetChrName("")
SetMessageWindowPos(-1, -1, -1, -1)
AnonymousTalk(
(
scpstr(SCPSTR_CODE_COLOR, 0x0),
"得到了\x07\x02",
"卢安经济史·中\x07\x00",
"。\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
SetMessageWindowPos(72, 320, 56, 3)
OP_3E(0x33E, 1)
OP_28(0x27, 0x1, 0x80)
TalkEnd(0xFF)
Return()
# Function_26_5BB8 end
def Function_27_5C20(): pass
label("Function_27_5C20")
SetPlaceName(0x6F) # 主楼 社会系教室
Return()
# Function_27_5C20 end
def Function_28_5C24(): pass
label("Function_28_5C24")
SetPlaceName(0x5E) # 主楼 社会系教室
Return()
# Function_28_5C24 end
def Function_29_5C28(): pass
label("Function_29_5C28")
SetPlaceName(0x6E) # 主楼 社会系教室
Return()
# Function_29_5C28 end
def Function_30_5C2C(): pass
label("Function_30_5C2C")
SetPlaceName(0x74) # 主楼 社会系教室
Return()
# Function_30_5C2C end
def Function_31_5C30(): pass
label("Function_31_5C30")
SetPlaceName(0x73) # 主楼 社会系教室
Return()
# Function_31_5C30 end
SaveToFile()
Try(main)
| [
"[email protected]"
]
| |
13690714d2c28a995bb0fdc95c5e71261ffd971b | 091a301b966f3141fc6020c754916ca1828216f9 | /exercises/05_basic_scripts/task_5_1c.py | 0f175b39c372c5c8c949a367a202937c462f389a | []
| no_license | netproglogic/pyneng-examples-exercises-en | f9d6a9b04873fc79ef6d3362844fb6464715cd3d | 299676a575f1d97922d8e60e8773ad9ea0301ce5 | refs/heads/main | 2023-06-28T23:12:02.153433 | 2021-07-27T02:38:51 | 2021-07-27T02:38:51 | 389,825,380 | 0 | 0 | null | 2021-07-27T02:30:05 | 2021-07-27T02:30:04 | null | UTF-8 | Python | false | false | 1,413 | py | # -*- coding: utf-8 -*-
"""
Task 5.1c
Copy and modify the script from task 5.1b so that when you request a parameter
that is not in the device dictionary, the message 'There is no such parameter' is displayed.
The assignment applies only to the parameters of the devices, not to the devices themselves.
> Try typing a non-existent parameter, to see what the result will be. And then complete the task.
If an existing parameter is selected, print information about the corresponding parameter.
An example of script execution:
$ python task_5_1c.py
Enter device name: r1
Enter parameter name (ios, model, vendor, location, ip): ips
There is no such parameter
Restriction: You cannot modify the london_co dictionary.
All tasks must be completed using only the topics covered. That is, this task can be
solved without using the if condition.
"""
london_co = {
"r1": {
"location": "21 New Globe Walk",
"vendor": "Cisco",
"model": "4451",
"ios": "15.4",
"ip": "10.255.0.1",
},
"r2": {
"location": "21 New Globe Walk",
"vendor": "Cisco",
"model": "4451",
"ios": "15.4",
"ip": "10.255.0.2",
},
"sw1": {
"location": "21 New Globe Walk",
"vendor": "Cisco",
"model": "3850",
"ios": "3.6.XE",
"ip": "10.255.0.101",
"vlans": "10,20,30",
"routing": True,
},
}
| [
"[email protected]"
]
| |
35a312ce72be62ae3e48ff98179b21d0e9debdf3 | 425db5a849281d333e68c26a26678e7c8ce11b66 | /LeetCodeSolutions/LeetCode_0159.py | 84026e92728dd9d32811d5f23f0d556a9e0d1ec8 | [
"MIT"
]
| permissive | lih627/python-algorithm-templates | e8092b327a02506086414df41bbfb2af5d6b06dc | a61fd583e33a769b44ab758990625d3381793768 | refs/heads/master | 2021-07-23T17:10:43.814639 | 2021-01-21T17:14:55 | 2021-01-21T17:14:55 | 238,456,498 | 29 | 8 | null | null | null | null | UTF-8 | Python | false | false | 681 | py | class Solution:
def lengthOfLongestSubstringTwoDistinct(self, s: str) -> int:
ret, l, r = 0, 0, 0
cnt = dict()
hash_set = set()
while r < len(s):
cs = s[r]
# print(hash_set, cnt, l, r)
if cs not in cnt:
cnt[cs] = 1
else:
cnt[cs] += 1
hash_set.add(cs)
if len(hash_set) > 2:
while cnt[s[l]] != 1:
cnt[s[l]] -= 1
l += 1
cnt[s[l]] -= 1
hash_set.remove(s[l])
l += 1
ret = max(ret, r - l + 1)
r += 1
return ret
| [
"[email protected]"
]
| |
bebca619b51381ed8388bff56f2aa99999713b2d | 1515e55e6695bf6e385da86e489fddbbe64a667f | /Tree Based/617. Merge Two Binary Trees.py | f66b02c3bde3a1c74686589f056d8aa1764c7a0d | []
| no_license | Stella2019/leetcode_stella | f9d9789ef6815c05feb04587718fb528d1c0331d | 253a5cc51394d3c15c64d398af5442ccc65ae7aa | refs/heads/master | 2022-12-11T18:18:47.685881 | 2020-09-07T00:24:23 | 2020-09-07T00:24:23 | 293,383,967 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,362 | py | """
把两个树重叠,重叠部分求和,不重叠部分是两个树不空的节点
"""
"""
题目大意
将两个二叉树进行merge操作。操作方式是把两个树进行重叠,如果重叠部分都有值,那么这个新节点是他们的值的和;如果重叠部分没有值,那么新的节点就是他们两个当中不为空的节点。
解题方法
递归
如果两个树都有节点的话就把两个相加,左右孩子为两者的左右孩子。
否则选不是空的节点当做子节点。
时间复杂度是O(N1+N2),空间复杂度O(N)。N = t1 的 t2交集。
"""
class Solution:
def mergeTrees(self, t1, t2):
"""
:type t1: TreeNode
:type t2: TreeNode
:rtype: TreeNode
"""
if t1 and t2:
newT = TreeNode(t1.val + t2.val)
newT.left = self.mergeTrees(t1.left, t2.left)
newT.right = self.mergeTrees(t1.right, t2.right)
return newT
else:
return t1 or t2
#也可以换一种写法,没有任何区别:
class Solution:
def mergeTrees(self, t1, t2):
if not t2:
return t1
if not t1:
return t2
newT = TreeNode(t1.val + t2.val)
newT.left = self.mergeTrees(t1.left, t2.left)
newT.right = self.mergeTrees(t1.right, t2.right)
return newT
| [
"[email protected]"
]
| |
c515f19470336115e247a67213a7c931353d56b5 | 551b75f52d28c0b5c8944d808a361470e2602654 | /huaweicloud-sdk-dns/huaweicloudsdkdns/v2/dns_client.py | 86e64e5dcbcd762c38bfab5246a7c5b18639d697 | [
"Apache-2.0"
]
| permissive | wuchen-huawei/huaweicloud-sdk-python-v3 | 9d6597ce8ab666a9a297b3d936aeb85c55cf5877 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | refs/heads/master | 2023-05-08T21:32:31.920300 | 2021-05-26T08:54:18 | 2021-05-26T08:54:18 | 370,898,764 | 0 | 0 | NOASSERTION | 2021-05-26T03:50:07 | 2021-05-26T03:50:07 | null | UTF-8 | Python | false | false | 99,284 | py | # coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class DnsClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(DnsClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkdns.v2.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@classmethod
def new_builder(cls, clazz=None):
if clazz is None:
return ClientBuilder(cls)
if clazz.__name__ != "DnsClient":
raise TypeError("client type error, support client type is DnsClient")
return ClientBuilder(clazz)
def create_custom_line(self, request):
"""创建单个自定义线路
创建单个自定义线路
:param CreateCustomLineRequest request
:return: CreateCustomLineResponse
"""
return self.create_custom_line_with_http_info(request)
def create_custom_line_with_http_info(self, request):
"""创建单个自定义线路
创建单个自定义线路
:param CreateCustomLineRequest request
:return: CreateCustomLineResponse
"""
all_params = ['create_custom_lines']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/customlines',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateCustomLineResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_custom_line(self, request):
"""删除单个自定义线路
删除单个自定义线路
:param DeleteCustomLineRequest request
:return: DeleteCustomLineResponse
"""
return self.delete_custom_line_with_http_info(request)
def delete_custom_line_with_http_info(self, request):
"""删除单个自定义线路
删除单个自定义线路
:param DeleteCustomLineRequest request
:return: DeleteCustomLineResponse
"""
all_params = ['line_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'line_id' in local_var_params:
path_params['line_id'] = local_var_params['line_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/customlines/{line_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteCustomLineResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_api_versions(self, request):
"""查询所有的云解析服务API版本号
查询所有的云解析服务API版本号列表
:param ListApiVersionsRequest request
:return: ListApiVersionsResponse
"""
return self.list_api_versions_with_http_info(request)
def list_api_versions_with_http_info(self, request):
"""查询所有的云解析服务API版本号
查询所有的云解析服务API版本号列表
:param ListApiVersionsRequest request
:return: ListApiVersionsResponse
"""
all_params = []
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListApiVersionsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_custom_line(self, request):
"""查询自定义线路
查询自定义线路
:param ListCustomLineRequest request
:return: ListCustomLineResponse
"""
return self.list_custom_line_with_http_info(request)
def list_custom_line_with_http_info(self, request):
"""查询自定义线路
查询自定义线路
:param ListCustomLineRequest request
:return: ListCustomLineResponse
"""
all_params = ['line_id', 'name', 'limit', 'offset', 'show_detail']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'line_id' in local_var_params:
query_params.append(('line_id', local_var_params['line_id']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'show_detail' in local_var_params:
query_params.append(('show_detail', local_var_params['show_detail']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/customlines',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListCustomLineResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_name_servers(self, request):
"""查询名称服务器列表
查询名称服务器列表
:param ListNameServersRequest request
:return: ListNameServersResponse
"""
return self.list_name_servers_with_http_info(request)
def list_name_servers_with_http_info(self, request):
"""查询名称服务器列表
查询名称服务器列表
:param ListNameServersRequest request
:return: ListNameServersResponse
"""
all_params = ['type', 'region']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'region' in local_var_params:
query_params.append(('region', local_var_params['region']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/nameservers',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListNameServersResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_api_info(self, request):
"""查询指定的云解析服务API版本号
查询指定的云解析服务API版本号
:param ShowApiInfoRequest request
:return: ShowApiInfoResponse
"""
return self.show_api_info_with_http_info(request)
def show_api_info_with_http_info(self, request):
"""查询指定的云解析服务API版本号
查询指定的云解析服务API版本号
:param ShowApiInfoRequest request
:return: ShowApiInfoResponse
"""
all_params = ['version']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'version' in local_var_params:
path_params['version'] = local_var_params['version']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/{version}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowApiInfoResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_custom_line(self, request):
"""更新单个自定义线路
更新单个自定义线路
:param UpdateCustomLineRequest request
:return: UpdateCustomLineResponse
"""
return self.update_custom_line_with_http_info(request)
def update_custom_line_with_http_info(self, request):
"""更新单个自定义线路
更新单个自定义线路
:param UpdateCustomLineRequest request
:return: UpdateCustomLineResponse
"""
all_params = ['line_id', 'update_customs_line_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'line_id' in local_var_params:
path_params['line_id'] = local_var_params['line_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/customlines/{line_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateCustomLineResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_eip_record_set(self, request):
"""设置弹性IP的PTR记录
设置弹性IP的PTR记录
:param CreateEipRecordSetRequest request
:return: CreateEipRecordSetResponse
"""
return self.create_eip_record_set_with_http_info(request)
def create_eip_record_set_with_http_info(self, request):
"""设置弹性IP的PTR记录
设置弹性IP的PTR记录
:param CreateEipRecordSetRequest request
:return: CreateEipRecordSetResponse
"""
all_params = ['region', 'floatingip_id', 'create_ptr_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'region' in local_var_params:
path_params['region'] = local_var_params['region']
if 'floatingip_id' in local_var_params:
path_params['floatingip_id'] = local_var_params['floatingip_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/reverse/floatingips/{region}:{floatingip_id}',
method='PATCH',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateEipRecordSetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_ptr_records(self, request):
"""查询租户弹性IP的PTR记录列表
查询租户弹性IP的PTR记录列表
:param ListPtrRecordsRequest request
:return: ListPtrRecordsResponse
"""
return self.list_ptr_records_with_http_info(request)
def list_ptr_records_with_http_info(self, request):
"""查询租户弹性IP的PTR记录列表
查询租户弹性IP的PTR记录列表
:param ListPtrRecordsRequest request
:return: ListPtrRecordsResponse
"""
all_params = ['marker', 'limit', 'offset', 'enterprise_project_id', 'tags', 'status']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'enterprise_project_id' in local_var_params:
query_params.append(('enterprise_project_id', local_var_params['enterprise_project_id']))
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/reverse/floatingips',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPtrRecordsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def restore_ptr_record(self, request):
"""将弹性IP的PTR记录恢复为默认值
将弹性IP的PTR记录恢复为默认值
:param RestorePtrRecordRequest request
:return: RestorePtrRecordResponse
"""
return self.restore_ptr_record_with_http_info(request)
def restore_ptr_record_with_http_info(self, request):
"""将弹性IP的PTR记录恢复为默认值
将弹性IP的PTR记录恢复为默认值
:param RestorePtrRecordRequest request
:return: RestorePtrRecordResponse
"""
all_params = ['region', 'floatingip_id', 'restore_ptr_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'region' in local_var_params:
path_params['region'] = local_var_params['region']
if 'floatingip_id' in local_var_params:
path_params['floatingip_id'] = local_var_params['floatingip_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/reverse/floatingips/{region}:{floatingip_id}',
method='PATCH',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='RestorePtrRecordResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_ptr_record_set(self, request):
"""查询单个弹性IP的PTR记录
查询单个弹性IP的PTR记录
:param ShowPtrRecordSetRequest request
:return: ShowPtrRecordSetResponse
"""
return self.show_ptr_record_set_with_http_info(request)
def show_ptr_record_set_with_http_info(self, request):
"""查询单个弹性IP的PTR记录
查询单个弹性IP的PTR记录
:param ShowPtrRecordSetRequest request
:return: ShowPtrRecordSetResponse
"""
all_params = ['region', 'floatingip_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'region' in local_var_params:
path_params['region'] = local_var_params['region']
if 'floatingip_id' in local_var_params:
path_params['floatingip_id'] = local_var_params['floatingip_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/reverse/floatingips/{region}:{floatingip_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPtrRecordSetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_ptr_record(self, request):
"""修改弹性IP的PTR记录
修改弹性IP的PTR记录
:param UpdatePtrRecordRequest request
:return: UpdatePtrRecordResponse
"""
return self.update_ptr_record_with_http_info(request)
def update_ptr_record_with_http_info(self, request):
"""修改弹性IP的PTR记录
修改弹性IP的PTR记录
:param UpdatePtrRecordRequest request
:return: UpdatePtrRecordResponse
"""
all_params = ['region', 'floatingip_id', 'update_ptr_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'region' in local_var_params:
path_params['region'] = local_var_params['region']
if 'floatingip_id' in local_var_params:
path_params['floatingip_id'] = local_var_params['floatingip_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/reverse/floatingips/{region}:{floatingip_id}',
method='PATCH',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdatePtrRecordResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_record_set(self, request):
"""创建单个Record Set
创建单个Record Set
:param CreateRecordSetRequest request
:return: CreateRecordSetResponse
"""
return self.create_record_set_with_http_info(request)
def create_record_set_with_http_info(self, request):
"""创建单个Record Set
创建单个Record Set
:param CreateRecordSetRequest request
:return: CreateRecordSetResponse
"""
all_params = ['zone_id', 'create_record_set_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/recordsets',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateRecordSetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_record_set_with_line(self, request):
"""创建单个Record Set,仅适用于公网DNS
创建单个Record Set,仅适用于公网DNS
:param CreateRecordSetWithLineRequest request
:return: CreateRecordSetWithLineResponse
"""
return self.create_record_set_with_line_with_http_info(request)
def create_record_set_with_line_with_http_info(self, request):
"""创建单个Record Set,仅适用于公网DNS
创建单个Record Set,仅适用于公网DNS
:param CreateRecordSetWithLineRequest request
:return: CreateRecordSetWithLineResponse
"""
all_params = ['zone_id', 'create_record_set_with_line_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/zones/{zone_id}/recordsets',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateRecordSetWithLineResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_record_set(self, request):
"""删除单个Record Set
删除单个Record Set
:param DeleteRecordSetRequest request
:return: DeleteRecordSetResponse
"""
return self.delete_record_set_with_http_info(request)
def delete_record_set_with_http_info(self, request):
"""删除单个Record Set
删除单个Record Set
:param DeleteRecordSetRequest request
:return: DeleteRecordSetResponse
"""
all_params = ['zone_id', 'recordset_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
if 'recordset_id' in local_var_params:
path_params['recordset_id'] = local_var_params['recordset_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/recordsets/{recordset_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteRecordSetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_record_sets(self, request):
"""删除单个Record Set
删除单个Record Set
:param DeleteRecordSetsRequest request
:return: DeleteRecordSetsResponse
"""
return self.delete_record_sets_with_http_info(request)
def delete_record_sets_with_http_info(self, request):
"""删除单个Record Set
删除单个Record Set
:param DeleteRecordSetsRequest request
:return: DeleteRecordSetsResponse
"""
all_params = ['zone_id', 'recordset_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
if 'recordset_id' in local_var_params:
path_params['recordset_id'] = local_var_params['recordset_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/zones/{zone_id}/recordsets/{recordset_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteRecordSetsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_record_sets(self, request):
"""查询租户Record Set资源列表
查询租户Record Set资源列表
:param ListRecordSetsRequest request
:return: ListRecordSetsResponse
"""
return self.list_record_sets_with_http_info(request)
def list_record_sets_with_http_info(self, request):
"""查询租户Record Set资源列表
查询租户Record Set资源列表
:param ListRecordSetsRequest request
:return: ListRecordSetsResponse
"""
all_params = ['zone_type', 'marker', 'limit', 'offset', 'tags', 'status', 'type', 'name', 'id', 'records', 'sort_key', 'sort_dir']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'zone_type' in local_var_params:
query_params.append(('zone_type', local_var_params['zone_type']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'records' in local_var_params:
query_params.append(('records', local_var_params['records']))
if 'sort_key' in local_var_params:
query_params.append(('sort_key', local_var_params['sort_key']))
if 'sort_dir' in local_var_params:
query_params.append(('sort_dir', local_var_params['sort_dir']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/recordsets',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListRecordSetsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_record_sets_by_zone(self, request):
"""查询单个Zone下Record Set列表
查询单个Zone下Record Set列表
:param ListRecordSetsByZoneRequest request
:return: ListRecordSetsByZoneResponse
"""
return self.list_record_sets_by_zone_with_http_info(request)
def list_record_sets_by_zone_with_http_info(self, request):
"""查询单个Zone下Record Set列表
查询单个Zone下Record Set列表
:param ListRecordSetsByZoneRequest request
:return: ListRecordSetsByZoneResponse
"""
all_params = ['zone_id', 'marker', 'limit', 'offset', 'tags', 'status', 'type', 'name', 'id', 'sort_key', 'sort_dir']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'sort_key' in local_var_params:
query_params.append(('sort_key', local_var_params['sort_key']))
if 'sort_dir' in local_var_params:
query_params.append(('sort_dir', local_var_params['sort_dir']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/recordsets',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListRecordSetsByZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_record_sets_with_line(self, request):
"""查询租户Record Set资源列表
查询租户Record Set资源列表
:param ListRecordSetsWithLineRequest request
:return: ListRecordSetsWithLineResponse
"""
return self.list_record_sets_with_line_with_http_info(request)
def list_record_sets_with_line_with_http_info(self, request):
"""查询租户Record Set资源列表
查询租户Record Set资源列表
:param ListRecordSetsWithLineRequest request
:return: ListRecordSetsWithLineResponse
"""
all_params = ['zone_type', 'marker', 'limit', 'offset', 'line_id', 'tags', 'status', 'type', 'name', 'id', 'records', 'sort_key', 'sort_dir', 'health_check_id', 'search_mode']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'zone_type' in local_var_params:
query_params.append(('zone_type', local_var_params['zone_type']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'line_id' in local_var_params:
query_params.append(('line_id', local_var_params['line_id']))
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'id' in local_var_params:
query_params.append(('id', local_var_params['id']))
if 'records' in local_var_params:
query_params.append(('records', local_var_params['records']))
if 'sort_key' in local_var_params:
query_params.append(('sort_key', local_var_params['sort_key']))
if 'sort_dir' in local_var_params:
query_params.append(('sort_dir', local_var_params['sort_dir']))
if 'health_check_id' in local_var_params:
query_params.append(('health_check_id', local_var_params['health_check_id']))
if 'search_mode' in local_var_params:
query_params.append(('search_mode', local_var_params['search_mode']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/recordsets',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListRecordSetsWithLineResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def set_record_sets_status(self, request):
"""设置Record Set状态
设置Record Set状态
:param SetRecordSetsStatusRequest request
:return: SetRecordSetsStatusResponse
"""
return self.set_record_sets_status_with_http_info(request)
def set_record_sets_status_with_http_info(self, request):
"""设置Record Set状态
设置Record Set状态
:param SetRecordSetsStatusRequest request
:return: SetRecordSetsStatusResponse
"""
all_params = ['recordset_id', 'set_record_sets_status_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'recordset_id' in local_var_params:
path_params['recordset_id'] = local_var_params['recordset_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/recordsets/{recordset_id}/statuses/set',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='SetRecordSetsStatusResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_record_set(self, request):
"""查询单个Record Set
查询单个Record Set
:param ShowRecordSetRequest request
:return: ShowRecordSetResponse
"""
return self.show_record_set_with_http_info(request)
def show_record_set_with_http_info(self, request):
"""查询单个Record Set
查询单个Record Set
:param ShowRecordSetRequest request
:return: ShowRecordSetResponse
"""
all_params = ['zone_id', 'recordset_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
if 'recordset_id' in local_var_params:
path_params['recordset_id'] = local_var_params['recordset_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/recordsets/{recordset_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowRecordSetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_record_set_by_zone(self, request):
"""查询单个Zone下Record Set列表
查询单个Zone下Record Set列表
:param ShowRecordSetByZoneRequest request
:return: ShowRecordSetByZoneResponse
"""
return self.show_record_set_by_zone_with_http_info(request)
def show_record_set_by_zone_with_http_info(self, request):
"""查询单个Zone下Record Set列表
查询单个Zone下Record Set列表
:param ShowRecordSetByZoneRequest request
:return: ShowRecordSetByZoneResponse
"""
all_params = ['zone_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/zones/{zone_id}/recordsets',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowRecordSetByZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_record_set_with_line(self, request):
"""查询单个Record Set,仅适用于公网DNS
查询单个Record Set,仅适用于公网DNS
:param ShowRecordSetWithLineRequest request
:return: ShowRecordSetWithLineResponse
"""
return self.show_record_set_with_line_with_http_info(request)
def show_record_set_with_line_with_http_info(self, request):
"""查询单个Record Set,仅适用于公网DNS
查询单个Record Set,仅适用于公网DNS
:param ShowRecordSetWithLineRequest request
:return: ShowRecordSetWithLineResponse
"""
all_params = ['zone_id', 'recordset_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
if 'recordset_id' in local_var_params:
path_params['recordset_id'] = local_var_params['recordset_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/zones/{zone_id}/recordsets/{recordset_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowRecordSetWithLineResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_record_set(self, request):
"""修改单个Record Set
修改单个Record Set
:param UpdateRecordSetRequest request
:return: UpdateRecordSetResponse
"""
return self.update_record_set_with_http_info(request)
def update_record_set_with_http_info(self, request):
"""修改单个Record Set
修改单个Record Set
:param UpdateRecordSetRequest request
:return: UpdateRecordSetResponse
"""
all_params = ['zone_id', 'recordset_id', 'update_record_set_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
if 'recordset_id' in local_var_params:
path_params['recordset_id'] = local_var_params['recordset_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/recordsets/{recordset_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateRecordSetResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_record_sets(self, request):
"""修改单个Record Set
修改单个Record Set
:param UpdateRecordSetsRequest request
:return: UpdateRecordSetsResponse
"""
return self.update_record_sets_with_http_info(request)
def update_record_sets_with_http_info(self, request):
"""修改单个Record Set
修改单个Record Set
:param UpdateRecordSetsRequest request
:return: UpdateRecordSetsResponse
"""
all_params = ['zone_id', 'recordset_id', 'update_record_sets_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
if 'recordset_id' in local_var_params:
path_params['recordset_id'] = local_var_params['recordset_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2.1/zones/{zone_id}/recordsets/{recordset_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateRecordSetsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_create_tag(self, request):
"""为指定实例批量添加或删除标签
为指定实例批量添加或删除标签
:param BatchCreateTagRequest request
:return: BatchCreateTagResponse
"""
return self.batch_create_tag_with_http_info(request)
def batch_create_tag_with_http_info(self, request):
"""为指定实例批量添加或删除标签
为指定实例批量添加或删除标签
:param BatchCreateTagRequest request
:return: BatchCreateTagResponse
"""
all_params = ['resource_type', 'resource_id', 'batch_hand_tags']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/{resource_type}/{resource_id}/tags/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchCreateTagResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_tag(self, request):
"""为指定实例添加标签
为指定实例添加标签
:param CreateTagRequest request
:return: CreateTagResponse
"""
return self.create_tag_with_http_info(request)
def create_tag_with_http_info(self, request):
"""为指定实例添加标签
为指定实例添加标签
:param CreateTagRequest request
:return: CreateTagResponse
"""
all_params = ['resource_type', 'resource_id', 'create_tag_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/{resource_type}/{resource_id}/tags',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateTagResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_tag(self, request):
"""删除资源标签
删除资源标签
:param DeleteTagRequest request
:return: DeleteTagResponse
"""
return self.delete_tag_with_http_info(request)
def delete_tag_with_http_info(self, request):
"""删除资源标签
删除资源标签
:param DeleteTagRequest request
:return: DeleteTagResponse
"""
all_params = ['resource_type', 'resource_id', 'key']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
if 'key' in local_var_params:
path_params['key'] = local_var_params['key']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/{resource_type}/{resource_id}/tags/{key}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteTagResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_tag(self, request):
"""使用标签查询资源实例
使用标签查询资源实例
:param ListTagRequest request
:return: ListTagResponse
"""
return self.list_tag_with_http_info(request)
def list_tag_with_http_info(self, request):
"""使用标签查询资源实例
使用标签查询资源实例
:param ListTagRequest request
:return: ListTagResponse
"""
all_params = ['resource_type', 'list_tag_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/{resource_type}/resource_instances/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListTagResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_tags(self, request):
"""查询指定实例类型的所有标签集合
查询指定实例类型的所有标签集合
:param ListTagsRequest request
:return: ListTagsResponse
"""
return self.list_tags_with_http_info(request)
def list_tags_with_http_info(self, request):
"""查询指定实例类型的所有标签集合
查询指定实例类型的所有标签集合
:param ListTagsRequest request
:return: ListTagsResponse
"""
all_params = ['resource_type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/{resource_type}/tags',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListTagsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_resource_tag(self, request):
"""查询指定实例的标签信息
查询指定实例的标签信息
:param ShowResourceTagRequest request
:return: ShowResourceTagResponse
"""
return self.show_resource_tag_with_http_info(request)
def show_resource_tag_with_http_info(self, request):
"""查询指定实例的标签信息
查询指定实例的标签信息
:param ShowResourceTagRequest request
:return: ShowResourceTagResponse
"""
all_params = ['resource_type', 'resource_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/{resource_type}/{resource_id}/tags',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowResourceTagResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def associate_router(self, request):
"""在内网Zone上关联VPC
在内网Zone上关联VPC
:param AssociateRouterRequest request
:return: AssociateRouterResponse
"""
return self.associate_router_with_http_info(request)
def associate_router_with_http_info(self, request):
"""在内网Zone上关联VPC
在内网Zone上关联VPC
:param AssociateRouterRequest request
:return: AssociateRouterResponse
"""
all_params = ['zone_id', 'associate_router_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/associaterouter',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AssociateRouterResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_private_zone(self, request):
"""创建单个内网Zone
创建单个内网Zone
:param CreatePrivateZoneRequest request
:return: CreatePrivateZoneResponse
"""
return self.create_private_zone_with_http_info(request)
def create_private_zone_with_http_info(self, request):
"""创建单个内网Zone
创建单个内网Zone
:param CreatePrivateZoneRequest request
:return: CreatePrivateZoneResponse
"""
all_params = ['create_private_zone_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreatePrivateZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_public_zone(self, request):
"""创建单个公网Zone
创建单个公网Zone
:param CreatePublicZoneRequest request
:return: CreatePublicZoneResponse
"""
return self.create_public_zone_with_http_info(request)
def create_public_zone_with_http_info(self, request):
"""创建单个公网Zone
创建单个公网Zone
:param CreatePublicZoneRequest request
:return: CreatePublicZoneResponse
"""
all_params = ['create_public_zone']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreatePublicZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_private_zone(self, request):
"""删除单个内网Zone
删除单个内网Zone
:param DeletePrivateZoneRequest request
:return: DeletePrivateZoneResponse
"""
return self.delete_private_zone_with_http_info(request)
def delete_private_zone_with_http_info(self, request):
"""删除单个内网Zone
删除单个内网Zone
:param DeletePrivateZoneRequest request
:return: DeletePrivateZoneResponse
"""
all_params = ['zone_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeletePrivateZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_public_zone(self, request):
"""删除单个公网Zone
删除单个公网Zone
:param DeletePublicZoneRequest request
:return: DeletePublicZoneResponse
"""
return self.delete_public_zone_with_http_info(request)
def delete_public_zone_with_http_info(self, request):
"""删除单个公网Zone
删除单个公网Zone
:param DeletePublicZoneRequest request
:return: DeletePublicZoneResponse
"""
all_params = ['zone_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeletePublicZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def disassociate_router(self, request):
"""在Private Zone上解关联VPC
在Private Zone上解关联VPC
:param DisassociateRouterRequest request
:return: DisassociateRouterResponse
"""
return self.disassociate_router_with_http_info(request)
def disassociate_router_with_http_info(self, request):
"""在Private Zone上解关联VPC
在Private Zone上解关联VPC
:param DisassociateRouterRequest request
:return: DisassociateRouterResponse
"""
all_params = ['zone_id', 'disassociaterouter_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/disassociaterouter',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DisassociateRouterResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_private_zones(self, request):
"""查询内网Zone的列表
查询内网Zone的列表
:param ListPrivateZonesRequest request
:return: ListPrivateZonesResponse
"""
return self.list_private_zones_with_http_info(request)
def list_private_zones_with_http_info(self, request):
"""查询内网Zone的列表
查询内网Zone的列表
:param ListPrivateZonesRequest request
:return: ListPrivateZonesResponse
"""
all_params = ['type', 'limit', 'marker', 'offset', 'tags', 'name', 'status', 'enterprise_project_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
if 'enterprise_project_id' in local_var_params:
query_params.append(('enterprise_project_id', local_var_params['enterprise_project_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPrivateZonesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_public_zones(self, request):
"""查询公网Zone的列表
查询公网Zone的列表
:param ListPublicZonesRequest request
:return: ListPublicZonesResponse
"""
return self.list_public_zones_with_http_info(request)
def list_public_zones_with_http_info(self, request):
"""查询公网Zone的列表
查询公网Zone的列表
:param ListPublicZonesRequest request
:return: ListPublicZonesResponse
"""
all_params = ['type', 'limit', 'marker', 'offset', 'tags', 'name', 'status', 'enterprise_project_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'marker' in local_var_params:
query_params.append(('marker', local_var_params['marker']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags']))
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'status' in local_var_params:
query_params.append(('status', local_var_params['status']))
if 'enterprise_project_id' in local_var_params:
query_params.append(('enterprise_project_id', local_var_params['enterprise_project_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListPublicZonesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_private_zone(self, request):
"""查询单个内网Zone
查询单个内网Zone
:param ShowPrivateZoneRequest request
:return: ShowPrivateZoneResponse
"""
return self.show_private_zone_with_http_info(request)
def show_private_zone_with_http_info(self, request):
"""查询单个内网Zone
查询单个内网Zone
:param ShowPrivateZoneRequest request
:return: ShowPrivateZoneResponse
"""
all_params = ['zone_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPrivateZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_private_zone_name_server(self, request):
"""查询内网Zone的名称服务器
查询内网Zone的列表
:param ShowPrivateZoneNameServerRequest request
:return: ShowPrivateZoneNameServerResponse
"""
return self.show_private_zone_name_server_with_http_info(request)
def show_private_zone_name_server_with_http_info(self, request):
"""查询内网Zone的名称服务器
查询内网Zone的列表
:param ShowPrivateZoneNameServerRequest request
:return: ShowPrivateZoneNameServerResponse
"""
all_params = ['zone_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/nameservers',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPrivateZoneNameServerResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_public_zone(self, request):
"""查询单个公网Zone
查询单个公网Zone
:param ShowPublicZoneRequest request
:return: ShowPublicZoneResponse
"""
return self.show_public_zone_with_http_info(request)
def show_public_zone_with_http_info(self, request):
"""查询单个公网Zone
查询单个公网Zone
:param ShowPublicZoneRequest request
:return: ShowPublicZoneResponse
"""
all_params = ['zone_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPublicZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_public_zone_name_server(self, request):
"""查询单个公网Zone的名称服务器
查询单个公网Zone的名称服务器
:param ShowPublicZoneNameServerRequest request
:return: ShowPublicZoneNameServerResponse
"""
return self.show_public_zone_name_server_with_http_info(request)
def show_public_zone_name_server_with_http_info(self, request):
"""查询单个公网Zone的名称服务器
查询单个公网Zone的名称服务器
:param ShowPublicZoneNameServerRequest request
:return: ShowPublicZoneNameServerResponse
"""
all_params = ['zone_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/nameservers',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPublicZoneNameServerResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_private_zone(self, request):
"""修改单个Zone
修改单个Zone
:param UpdatePrivateZoneRequest request
:return: UpdatePrivateZoneResponse
"""
return self.update_private_zone_with_http_info(request)
def update_private_zone_with_http_info(self, request):
"""修改单个Zone
修改单个Zone
:param UpdatePrivateZoneRequest request
:return: UpdatePrivateZoneResponse
"""
all_params = ['zone_id', 'update_private_zone_info_req']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}',
method='PATCH',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdatePrivateZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_public_zone(self, request):
"""修改单个Zone
修改单个Zone
:param UpdatePublicZoneRequest request
:return: UpdatePublicZoneResponse
"""
return self.update_public_zone_with_http_info(request)
def update_public_zone_with_http_info(self, request):
"""修改单个Zone
修改单个Zone
:param UpdatePublicZoneRequest request
:return: UpdatePublicZoneResponse
"""
all_params = ['zone_id', 'update_public_zone_info']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}',
method='PATCH',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdatePublicZoneResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_public_zone_status(self, request):
"""设置单个公网Zone状态,支持暂停、启用Zone
设置单个公网Zone状态,支持暂停、启用Zone
:param UpdatePublicZoneStatusRequest request
:return: UpdatePublicZoneStatusResponse
"""
return self.update_public_zone_status_with_http_info(request)
def update_public_zone_status_with_http_info(self, request):
"""设置单个公网Zone状态,支持暂停、启用Zone
设置单个公网Zone状态,支持暂停、启用Zone
:param UpdatePublicZoneStatusRequest request
:return: UpdatePublicZoneStatusResponse
"""
all_params = ['zone_id', 'update_public_zone_status']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'zone_id' in local_var_params:
path_params['zone_id'] = local_var_params['zone_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/zones/{zone_id}/statuses',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdatePublicZoneStatusResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type)
| [
"[email protected]"
]
| |
8875cb041f81b6df9792c3edb27badd2bb779332 | 23ec6adce704bff40d04cd6fc0ba446375405b68 | /firstBadVersion.py | f20f5f8d9fe87eb060c05515325ef8b18688ed84 | []
| no_license | amoghrajesh/Coding | 1845be9ea8df2d13d2a21ebef9ee6de750c8831d | a7dc41a4963f97dfb62ee4b1cab5ed80043cfdef | refs/heads/master | 2023-08-31T10:10:48.948129 | 2023-08-30T15:04:02 | 2023-08-30T15:04:02 | 267,779,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | def bs(a,l,r,key):
while l<r:
m= l + (r-l)//2
if a[m]==key:
r=m
else:
l=m+1
return l,a[l]
a=[True,True,True,True,True,True,True,True,True,True,False,False,False,False,False,False]
n=len(a)
print(bs(a,0,n-1,False)) | [
"[email protected]"
]
| |
9c7d9f922f699e4c8a86df35403e79935cb104eb | 4dcf1fe21a730b704a16e7991926510a43ba3f02 | /google/cloud/security/privateca_v1beta1/types/resources.py | 91f4ca29e8c6e26ede4c9292348dfe3066ddd280 | [
"Apache-2.0"
]
| permissive | Global19-atlassian-net/python-security-private-ca | 9142f8cd0f4604b46714de0fb3c96777841f86c5 | bbe17c13e7af03c68f933bc2405522f227c0522e | refs/heads/master | 2022-12-21T02:12:53.951027 | 2020-10-02T22:36:02 | 2020-10-02T22:36:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 58,446 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import duration_pb2 as duration # type: ignore
from google.protobuf import timestamp_pb2 as timestamp # type: ignore
from google.protobuf import wrappers_pb2 as wrappers # type: ignore
__protobuf__ = proto.module(
package="google.cloud.security.privateca.v1beta1",
manifest={
"RevocationReason",
"CertificateAuthority",
"CertificateRevocationList",
"Certificate",
"ReusableConfig",
"ReusableConfigValues",
"ReusableConfigWrapper",
"SubordinateConfig",
"PublicKey",
"CertificateConfig",
"CertificateDescription",
"ObjectId",
"X509Extension",
"KeyUsage",
"Subject",
"SubjectAltNames",
},
)
class RevocationReason(proto.Enum):
r"""A
[RevocationReason][google.cloud.security.privateca.v1beta1.RevocationReason]
indicates whether a
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
has been revoked, and the reason for revocation. These are standard
revocation reasons from RFC 5280.
"""
REVOCATION_REASON_UNSPECIFIED = 0
KEY_COMPROMISE = 1
CERTIFICATE_AUTHORITY_COMPROMISE = 2
AFFILIATION_CHANGED = 3
SUPERSEDED = 4
CESSATION_OF_OPERATION = 5
CERTIFICATE_HOLD = 6
PRIVILEGE_WITHDRAWN = 7
ATTRIBUTE_AUTHORITY_COMPROMISE = 8
class CertificateAuthority(proto.Message):
r"""A
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
represents an individual Certificate Authority. A
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
can be used to create
[Certificates][google.cloud.security.privateca.v1beta1.Certificate].
Attributes:
name (str):
Output only. The resource name for this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
in the format
``projects/*/locations/*/certificateAuthorities/*``.
type_ (~.resources.CertificateAuthority.Type):
Required. Immutable. The
[Type][google.cloud.security.privateca.v1beta1.CertificateAuthority.Type]
of this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
tier (~.resources.CertificateAuthority.Tier):
Required. Immutable. The
[Tier][google.cloud.security.privateca.v1beta1.CertificateAuthority.Tier]
of this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
config (~.resources.CertificateConfig):
Required. Immutable. The config used to
create a self-signed X.509 certificate or CSR.
lifetime (~.duration.Duration):
Required. The desired lifetime of the CA certificate. Used
to create the "not_before_time" and "not_after_time" fields
inside an X.509 certificate.
key_spec (~.resources.CertificateAuthority.KeyVersionSpec):
Required. Immutable. Used when issuing certificates for this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
If this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
is a self-signed CertificateAuthority, this key is also used
to sign the self-signed CA certificate. Otherwise, it is
used to sign a CSR.
certificate_policy (~.resources.CertificateAuthority.CertificateAuthorityPolicy):
Optional. The
[CertificateAuthorityPolicy][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy]
to enforce when issuing
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
from this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
issuing_options (~.resources.CertificateAuthority.IssuingOptions):
Optional. The
[IssuingOptions][google.cloud.security.privateca.v1beta1.CertificateAuthority.IssuingOptions]
to follow when issuing
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
from this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
subordinate_config (~.resources.SubordinateConfig):
Optional. If this is a subordinate
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority],
this field will be set with the subordinate configuration,
which describes its issuers. This may be updated, but this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
must continue to validate.
state (~.resources.CertificateAuthority.State):
Output only. The
[State][google.cloud.security.privateca.v1beta1.CertificateAuthority.State]
for this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
pem_ca_certificates (Sequence[str]):
Output only. This
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]'s
certificate chain, including the current
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]'s
certificate. Ordered such that the root issuer is the final
element (consistent with RFC 5246). For a self-signed CA,
this will only list the current
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]'s
certificate.
ca_certificate_descriptions (Sequence[~.resources.CertificateDescription]):
Output only. A structured description of this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]'s
CA certificate and its issuers. Ordered as self-to-root.
gcs_bucket (str):
Immutable. The name of a Cloud Storage bucket where this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
will publish content, such as the CA certificate and CRLs.
This must be a bucket name, without any prefixes (such as
``gs://``) or suffixes (such as ``.googleapis.com``). For
example, to use a bucket named ``my-bucket``, you would
simply specify ``my-bucket``. If not specified, a managed
bucket will be created.
access_urls (~.resources.CertificateAuthority.AccessUrls):
Output only. URLs for accessing content
published by this CA, such as the CA certificate
and CRLs.
create_time (~.timestamp.Timestamp):
Output only. The time at which this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
was created.
update_time (~.timestamp.Timestamp):
Output only. The time at which this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
was updated.
delete_time (~.timestamp.Timestamp):
Output only. The time at which this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
will be deleted, if scheduled for deletion.
labels (Sequence[~.resources.CertificateAuthority.LabelsEntry]):
Optional. Labels with user-defined metadata.
"""
class Type(proto.Enum):
r"""The type of a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority],
indicating its issuing chain.
"""
TYPE_UNSPECIFIED = 0
SELF_SIGNED = 1
SUBORDINATE = 2
class Tier(proto.Enum):
r"""The tier of a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority],
indicating its supported functionality and/or billing SKU.
"""
TIER_UNSPECIFIED = 0
ENTERPRISE = 1
DEVOPS = 2
class State(proto.Enum):
r"""The state of a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority],
indicating if it can be used.
"""
STATE_UNSPECIFIED = 0
ENABLED = 1
DISABLED = 2
PENDING_ACTIVATION = 3
PENDING_DELETION = 4
class SignHashAlgorithm(proto.Enum):
r"""The algorithm of a Cloud KMS CryptoKeyVersion of a
[CryptoKey][google.cloud.kms.v1.CryptoKey] with the
[CryptoKeyPurpose][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose]
value ``ASYMMETRIC_SIGN``. These values correspond to the
[CryptoKeyVersionAlgorithm][google.cloud.kms.v1.CryptoKey.CryptoKeyVersion.CryptoKeyVersionAlgorithm].
values.
"""
SIGN_HASH_ALGORITHM_UNSPECIFIED = 0
RSA_PSS_2048_SHA_256 = 1
RSA_PSS_3072_SHA_256 = 2
RSA_PSS_4096_SHA_256 = 3
EC_P256_SHA256 = 4
EC_P384_SHA384 = 5
class IssuingOptions(proto.Message):
r"""Options that affect all certificates issued by a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
Attributes:
include_ca_cert_url (bool):
Required. When true, includes a URL to the
issuing CA certificate in the "authority
information access" X.509 extension.
include_crl_access_url (bool):
Required. When true, includes a URL to the CRL corresponding
to certificates issued from a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
CRLs will expire 7 days from their creation. However, we
will rebuild daily. CRLs are also rebuilt shortly after a
certificate is revoked.
"""
include_ca_cert_url = proto.Field(proto.BOOL, number=1)
include_crl_access_url = proto.Field(proto.BOOL, number=2)
class CertificateAuthorityPolicy(proto.Message):
r"""The issuing policy for a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
will not be successfully issued from this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
if they violate the policy.
Attributes:
allowed_config_list (~.resources.CertificateAuthority.CertificateAuthorityPolicy.AllowedConfigList):
Optional. All
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
issued by the
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
must match at least one listed
[ReusableConfigWrapper][google.cloud.security.privateca.v1beta1.ReusableConfigWrapper]
in the list.
overwrite_config_values (~.resources.ReusableConfigWrapper):
Optional. All
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
issued by the
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
will use the provided configuration values, overwriting any
requested configuration values.
allowed_locations_and_organizations (Sequence[~.resources.Subject]):
Optional. If any
[Subject][google.cloud.security.privateca.v1beta1.Subject]
is specified here, then all
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
issued by the
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
must match at least one listed
[Subject][google.cloud.security.privateca.v1beta1.Subject].
If a
[Subject][google.cloud.security.privateca.v1beta1.Subject]
has an empty field, any value will be allowed for that
field.
allowed_common_names (Sequence[str]):
Optional. If any value is specified here, then all
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
issued by the
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
must match at least one listed value. If no value is
specified, all values will be allowed for this fied. Glob
patterns are also supported.
allowed_sans (~.resources.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames):
Optional. If a
[AllowedSubjectAltNames][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames]
is specified here, then all
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
issued by the
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
must match
[AllowedSubjectAltNames][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames].
If no value or an empty value is specified, any value will
be allowed for the
[SubjectAltNames][google.cloud.security.privateca.v1beta1.SubjectAltNames]
field.
maximum_lifetime (~.duration.Duration):
Optional. The maximum lifetime allowed by the
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
Note that if the any part if the issuing chain expires
before a
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]'s
requested maximum_lifetime, the effective lifetime will be
explicitly truncated.
allowed_issuance_modes (~.resources.CertificateAuthority.CertificateAuthorityPolicy.IssuanceModes):
Optional. If specified, then only methods allowed in the
[IssuanceModes][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.IssuanceModes]
may be used to issue
[Certificates][google.cloud.security.privateca.v1beta1.Certificate].
"""
class AllowedConfigList(proto.Message):
r"""
Attributes:
allowed_config_values (Sequence[~.resources.ReusableConfigWrapper]):
Required. All
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
issued by the
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
must match at least one listed
[ReusableConfigWrapper][google.cloud.security.privateca.v1beta1.ReusableConfigWrapper].
If a
[ReusableConfigWrapper][google.cloud.security.privateca.v1beta1.ReusableConfigWrapper]
has an empty field, any value will be allowed for that
field.
"""
allowed_config_values = proto.RepeatedField(
proto.MESSAGE, number=1, message="ReusableConfigWrapper",
)
class AllowedSubjectAltNames(proto.Message):
r"""[AllowedSubjectAltNames][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames]
specifies the allowed values for
[SubjectAltNames][google.cloud.security.privateca.v1beta1.SubjectAltNames]
by the
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
when issuing
[Certificates][google.cloud.security.privateca.v1beta1.Certificate].
Attributes:
allowed_dns_names (Sequence[str]):
Optional. Contains valid, fully-qualified host names. Glob
patterns are also supported. To allow an explicit wildcard
certificate, escape with backlash (i.e. "*"). E.g. for
globbed entries: '*bar.com' will allow foo.bar.com, but not
*.bar.com, unless the
[allow_globbing_dns_wildcards][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allow_globbing_dns_wildcards]
field is set. E.g. for wildcard entries: '*.bar.com' will
allow '*.bar.com', but not 'foo.bar.com'.
allowed_uris (Sequence[str]):
Optional. Contains valid RFC 3986 URIs. Glob patterns are
also supported. To match across path seperators (i.e. '/')
use the double star glob pattern (i.e. '**').
allowed_email_addresses (Sequence[str]):
Optional. Contains valid RFC 2822 E-mail
addresses. Glob patterns are also supported.
allowed_ips (Sequence[str]):
Optional. Contains valid 32-bit IPv4
addresses and subnet ranges or RFC 4291 IPv6
addresses and subnet ranges. Subnet ranges are
specified using the '/' notation (e.g.
10.0.0.0/8, 2001:700:300:1800::/64). Glob
patterns are supported only for ip address
entries (i.e. not for subnet ranges).
allow_globbing_dns_wildcards (bool):
Optional. Specifies if glob patterns used for
[allowed_dns_names][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames.allowed_dns_names]
allows wildcard certificates.
allow_custom_sans (bool):
Optional. Specifies if to allow custom
X509Extension values.
"""
allowed_dns_names = proto.RepeatedField(proto.STRING, number=1)
allowed_uris = proto.RepeatedField(proto.STRING, number=2)
allowed_email_addresses = proto.RepeatedField(proto.STRING, number=3)
allowed_ips = proto.RepeatedField(proto.STRING, number=4)
allow_globbing_dns_wildcards = proto.Field(proto.BOOL, number=5)
allow_custom_sans = proto.Field(proto.BOOL, number=6)
class IssuanceModes(proto.Message):
r"""[IssuanceModes][google.cloud.security.privateca.v1beta1.CertificateAuthority.CertificateAuthorityPolicy.IssuanceModes]
specifies the allowed ways in which
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
may be requested from this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
Attributes:
allow_csr_based_issuance (bool):
Required. When true, allows callers to create
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
by specifying a CSR.
allow_config_based_issuance (bool):
Required. When true, allows callers to create
[Certificates][google.cloud.security.privateca.v1beta1.Certificate]
by specifying a
[CertificateConfig][google.cloud.security.privateca.v1beta1.CertificateConfig].
"""
allow_csr_based_issuance = proto.Field(proto.BOOL, number=1)
allow_config_based_issuance = proto.Field(proto.BOOL, number=2)
allowed_config_list = proto.Field(
proto.MESSAGE,
number=1,
oneof="config_policy",
message="CertificateAuthority.CertificateAuthorityPolicy.AllowedConfigList",
)
overwrite_config_values = proto.Field(
proto.MESSAGE,
number=2,
oneof="config_policy",
message="ReusableConfigWrapper",
)
allowed_locations_and_organizations = proto.RepeatedField(
proto.MESSAGE, number=3, message="Subject",
)
allowed_common_names = proto.RepeatedField(proto.STRING, number=4)
allowed_sans = proto.Field(
proto.MESSAGE,
number=5,
message="CertificateAuthority.CertificateAuthorityPolicy.AllowedSubjectAltNames",
)
maximum_lifetime = proto.Field(
proto.MESSAGE, number=6, message=duration.Duration,
)
allowed_issuance_modes = proto.Field(
proto.MESSAGE,
number=8,
message="CertificateAuthority.CertificateAuthorityPolicy.IssuanceModes",
)
class AccessUrls(proto.Message):
r"""URLs where a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
will publish content.
Attributes:
ca_certificate_access_url (str):
The URL where this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]'s
CA certificate is published. This will only be set for CAs
that have been activated.
crl_access_url (str):
The URL where this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]'s
CRLs are published. This will only be set for CAs that have
been activated.
"""
ca_certificate_access_url = proto.Field(proto.STRING, number=1)
crl_access_url = proto.Field(proto.STRING, number=2)
class KeyVersionSpec(proto.Message):
r"""A Cloud KMS key configuration that a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
will use.
Attributes:
cloud_kms_key_version (str):
Required. The resource name for an existing Cloud KMS
CryptoKeyVersion in the
format\ ``projects/*/locations/*/keyRings/*/cryptoKeys/*/cryptoKeyVersions/*``.
This option enables full flexibility in the key's
capabilities and properties.
algorithm (~.resources.CertificateAuthority.SignHashAlgorithm):
Required. The algorithm to use for creating a managed Cloud
KMS key for a for a simplified experience. All managed keys
will be have their
[ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] as
``HSM``.
"""
cloud_kms_key_version = proto.Field(proto.STRING, number=1, oneof="KeyVersion")
algorithm = proto.Field(
proto.ENUM,
number=2,
oneof="KeyVersion",
enum="CertificateAuthority.SignHashAlgorithm",
)
name = proto.Field(proto.STRING, number=1)
type_ = proto.Field(proto.ENUM, number=2, enum=Type,)
tier = proto.Field(proto.ENUM, number=3, enum=Tier,)
config = proto.Field(proto.MESSAGE, number=4, message="CertificateConfig",)
lifetime = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,)
key_spec = proto.Field(proto.MESSAGE, number=6, message=KeyVersionSpec,)
certificate_policy = proto.Field(
proto.MESSAGE, number=7, message=CertificateAuthorityPolicy,
)
issuing_options = proto.Field(proto.MESSAGE, number=8, message=IssuingOptions,)
subordinate_config = proto.Field(
proto.MESSAGE, number=19, message="SubordinateConfig",
)
state = proto.Field(proto.ENUM, number=10, enum=State,)
pem_ca_certificates = proto.RepeatedField(proto.STRING, number=9)
ca_certificate_descriptions = proto.RepeatedField(
proto.MESSAGE, number=12, message="CertificateDescription",
)
gcs_bucket = proto.Field(proto.STRING, number=13)
access_urls = proto.Field(proto.MESSAGE, number=14, message=AccessUrls,)
create_time = proto.Field(proto.MESSAGE, number=15, message=timestamp.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=16, message=timestamp.Timestamp,)
delete_time = proto.Field(proto.MESSAGE, number=17, message=timestamp.Timestamp,)
labels = proto.MapField(proto.STRING, proto.STRING, number=18)
class CertificateRevocationList(proto.Message):
r"""A
[CertificateRevocationList][google.cloud.security.privateca.v1beta1.CertificateRevocationList]
corresponds to a signed X.509 certificate Revocation List (CRL). A
CRL contains the serial numbers of certificates that should no
longer be trusted.
Attributes:
name (str):
Output only. The resource path for this
[CertificateRevocationList][google.cloud.security.privateca.v1beta1.CertificateRevocationList]
in the format
``projects/*/locations/*/certificateAuthorities/*/ certificateRevocationLists/*``.
sequence_number (int):
Output only. The CRL sequence number that appears in
pem_crl.
revoked_certificates (Sequence[~.resources.CertificateRevocationList.RevokedCertificate]):
Output only. The revoked serial numbers that appear in
pem_crl.
pem_crl (str):
Output only. The PEM-encoded X.509 CRL.
access_url (str):
Output only. The location where 'pem_crl' can be accessed.
state (~.resources.CertificateRevocationList.State):
Output only. The
[State][google.cloud.security.privateca.v1beta1.CertificateRevocationList.State]
for this
[CertificateRevocationList][google.cloud.security.privateca.v1beta1.CertificateRevocationList].
create_time (~.timestamp.Timestamp):
Output only. The time at which this
[CertificateRevocationList][google.cloud.security.privateca.v1beta1.CertificateRevocationList]
was created.
update_time (~.timestamp.Timestamp):
Output only. The time at which this
[CertificateRevocationList][google.cloud.security.privateca.v1beta1.CertificateRevocationList]
was updated.
labels (Sequence[~.resources.CertificateRevocationList.LabelsEntry]):
Optional. Labels with user-defined metadata.
"""
class State(proto.Enum):
r"""The state of a
[CertificateRevocationList][google.cloud.security.privateca.v1beta1.CertificateRevocationList],
indicating if it is current.
"""
STATE_UNSPECIFIED = 0
ACTIVE = 1
SUPERSEDED = 2
class RevokedCertificate(proto.Message):
r"""Describes a revoked
[Certificate][google.cloud.security.privateca.v1beta1.Certificate].
Attributes:
certificate (str):
The resource path for the
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
in the format
``projects/*/locations/*/certificateAuthorities/*/certificates/*``.
hex_serial_number (str):
The serial number of the
[Certificate][google.cloud.security.privateca.v1beta1.Certificate].
revocation_reason (~.resources.RevocationReason):
The reason the
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
was revoked.
"""
certificate = proto.Field(proto.STRING, number=1)
hex_serial_number = proto.Field(proto.STRING, number=2)
revocation_reason = proto.Field(proto.ENUM, number=3, enum="RevocationReason",)
name = proto.Field(proto.STRING, number=1)
sequence_number = proto.Field(proto.INT64, number=2)
revoked_certificates = proto.RepeatedField(
proto.MESSAGE, number=3, message=RevokedCertificate,
)
pem_crl = proto.Field(proto.STRING, number=4)
access_url = proto.Field(proto.STRING, number=5)
state = proto.Field(proto.ENUM, number=6, enum=State,)
create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,)
labels = proto.MapField(proto.STRING, proto.STRING, number=9)
class Certificate(proto.Message):
r"""A [Certificate][google.cloud.security.privateca.v1beta1.Certificate]
corresponds to a signed X.509 certificate issued by a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
Attributes:
name (str):
Output only. The resource path for this
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
in the format
``projects/*/locations/*/certificateAuthorities/*/certificates/*``.
pem_csr (str):
Immutable. A pem-encoded X.509 certificate
signing request (CSR).
config (~.resources.CertificateConfig):
Immutable. A description of the certificate
and key that does not require X.509 or ASN.1.
lifetime (~.duration.Duration):
Required. The desired lifetime of a certificate. Used to
create the "not_before_time" and "not_after_time" fields
inside an X.509 certificate. Note that the lifetime may be
truncated if it would extend past the life of any
certificate authority in the issuing chain.
revocation_details (~.resources.Certificate.RevocationDetails):
Output only. Details regarding the revocation of this
[Certificate][google.cloud.security.privateca.v1beta1.Certificate].
This
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
is considered revoked if and only if this field is present.
pem_certificate (str):
Output only. The pem-encoded, signed X.509
certificate.
certificate_description (~.resources.CertificateDescription):
Output only. A structured description of the
issued X.509 certificate.
pem_certificate_chain (Sequence[str]):
Output only. The chain that may be used to
verify the X.509 certificate. Expected to be in
issuer-to-root order according to RFC 5246.
create_time (~.timestamp.Timestamp):
Output only. The time at which this
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
was created.
update_time (~.timestamp.Timestamp):
Output only. The time at which this
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
was updated.
labels (Sequence[~.resources.Certificate.LabelsEntry]):
Optional. Labels with user-defined metadata.
"""
class RevocationDetails(proto.Message):
r"""Describes fields that are relavent to the revocation of a
[Certificate][google.cloud.security.privateca.v1beta1.Certificate].
Attributes:
revocation_state (~.resources.RevocationReason):
Indicates why a
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
was revoked.
revocation_time (~.timestamp.Timestamp):
The time at which this
[Certificate][google.cloud.security.privateca.v1beta1.Certificate]
was revoked.
"""
revocation_state = proto.Field(proto.ENUM, number=1, enum="RevocationReason",)
revocation_time = proto.Field(
proto.MESSAGE, number=2, message=timestamp.Timestamp,
)
name = proto.Field(proto.STRING, number=1)
pem_csr = proto.Field(proto.STRING, number=2, oneof="certificate_config")
config = proto.Field(
proto.MESSAGE,
number=3,
oneof="certificate_config",
message="CertificateConfig",
)
lifetime = proto.Field(proto.MESSAGE, number=4, message=duration.Duration,)
revocation_details = proto.Field(
proto.MESSAGE, number=5, message=RevocationDetails,
)
pem_certificate = proto.Field(proto.STRING, number=6)
certificate_description = proto.Field(
proto.MESSAGE, number=7, message="CertificateDescription",
)
pem_certificate_chain = proto.RepeatedField(proto.STRING, number=8)
create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,)
labels = proto.MapField(proto.STRING, proto.STRING, number=11)
class ReusableConfig(proto.Message):
r"""A
[ReusableConfig][google.cloud.security.privateca.v1beta1.ReusableConfig]
refers to a managed
[ReusableConfigValues][google.cloud.security.privateca.v1beta1.ReusableConfigValues].
Those, in turn, are used to describe certain fields of an X.509
certificate, such as the key usage fields, fields specific to CA
certificates, certificate policy extensions and custom extensions.
Attributes:
name (str):
Output only. The resource path for this
[ReusableConfig][google.cloud.security.privateca.v1beta1.ReusableConfig]
in the format ``projects/*/locations/*/reusableConfigs/*``.
values (~.resources.ReusableConfigValues):
Required. The config values.
description (str):
Optional. A human-readable description of
scenarios these ReusableConfigValues may be
compatible with.
create_time (~.timestamp.Timestamp):
Output only. The time at which this
[ReusableConfig][google.cloud.security.privateca.v1beta1.ReusableConfig]
was created.
update_time (~.timestamp.Timestamp):
Output only. The time at which this
[ReusableConfig][google.cloud.security.privateca.v1beta1.ReusableConfig]
was updated.
labels (Sequence[~.resources.ReusableConfig.LabelsEntry]):
Optional. Labels with user-defined metadata.
"""
name = proto.Field(proto.STRING, number=1)
values = proto.Field(proto.MESSAGE, number=2, message="ReusableConfigValues",)
description = proto.Field(proto.STRING, number=3)
create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,)
labels = proto.MapField(proto.STRING, proto.STRING, number=6)
class ReusableConfigValues(proto.Message):
r"""A
[ReusableConfigValues][google.cloud.security.privateca.v1beta1.ReusableConfigValues]
is used to describe certain fields of an X.509 certificate, such as
the key usage fields, fields specific to CA certificates,
certificate policy extensions and custom extensions.
Attributes:
key_usage (~.resources.KeyUsage):
Optional. Indicates the intended use for keys
that correspond to a certificate.
ca_options (~.resources.ReusableConfigValues.CaOptions):
Optional. Describes options in this
[ReusableConfigValues][google.cloud.security.privateca.v1beta1.ReusableConfigValues]
that are relevant in a CA certificate.
policy_ids (Sequence[~.resources.ObjectId]):
Optional. Describes the X.509 certificate
policy object identifiers, per
https://tools.ietf.org/html/rfc5280#section-4.2.1.4rfc5280
aia_ocsp_servers (Sequence[str]):
Optional. Describes Online Certificate Status
Protocol (OCSP) endpoint addresses that appear
in the "Authority Information Access" extension
in the certificate.
additional_extensions (Sequence[~.resources.X509Extension]):
Optional. Describes custom X.509 extensions.
"""
class CaOptions(proto.Message):
r"""Describes values that are relevant in a CA certificate.
Attributes:
is_ca (~.wrappers.BoolValue):
Optional. Refers to the "CA" X.509 extension,
which is a boolean value. When this value is
missing, the extension will be omitted from the
CA certificate.
max_issuer_path_length (~.wrappers.Int32Value):
Optional. Refers to the path length
restriction X.509 extension. For a CA
certificate, this value describes the depth of
subordinate CA certificates that are allowed.
If this value is less than 0, the request will
fail. If this value is missing, the max path
length will be omitted from the CA certificate.
"""
is_ca = proto.Field(proto.MESSAGE, number=1, message=wrappers.BoolValue,)
max_issuer_path_length = proto.Field(
proto.MESSAGE, number=2, message=wrappers.Int32Value,
)
key_usage = proto.Field(proto.MESSAGE, number=1, message="KeyUsage",)
ca_options = proto.Field(proto.MESSAGE, number=2, message=CaOptions,)
policy_ids = proto.RepeatedField(proto.MESSAGE, number=3, message="ObjectId",)
aia_ocsp_servers = proto.RepeatedField(proto.STRING, number=4)
additional_extensions = proto.RepeatedField(
proto.MESSAGE, number=5, message="X509Extension",
)
class ReusableConfigWrapper(proto.Message):
r"""A
[ReusableConfigWrapper][google.cloud.security.privateca.v1beta1.ReusableConfigWrapper]
describes values that may assist in creating an X.509 certificate,
or a reference to a pre-defined set of values.
Attributes:
reusable_config (str):
Required. A resource path to a
[ReusableConfig][google.cloud.security.privateca.v1beta1.ReusableConfig]
in the format ``projects/*/locations/*/reusableConfigs/*``.
reusable_config_values (~.resources.ReusableConfigValues):
Required. A user-specified inline
[ReusableConfigValues][google.cloud.security.privateca.v1beta1.ReusableConfigValues].
"""
reusable_config = proto.Field(proto.STRING, number=1, oneof="config_values")
reusable_config_values = proto.Field(
proto.MESSAGE, number=2, oneof="config_values", message=ReusableConfigValues,
)
class SubordinateConfig(proto.Message):
r"""Describes a subordinate CA's issuers. This is either a resource path
to a known issuing
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority],
or a PEM issuer certificate chain.
Attributes:
certificate_authority (str):
Required. This can refer to a
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
in the same project that was used to create a subordinate
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority].
This field is used for information and usability purposes
only. The resource name is in the format
``projects/*/locations/*/certificateAuthorities/*``.
pem_issuer_chain (~.resources.SubordinateConfig.SubordinateConfigChain):
Required. Contains the PEM certificate chain for the issuers
of this
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority],
but not pem certificate for this CA itself.
"""
class SubordinateConfigChain(proto.Message):
r"""This message describes a subordinate CA's issuer certificate
chain. This wrapper exists for compatibility reasons.
Attributes:
pem_certificates (Sequence[str]):
Required. Expected to be in leaf-to-root
order according to RFC 5246.
"""
pem_certificates = proto.RepeatedField(proto.STRING, number=1)
certificate_authority = proto.Field(
proto.STRING, number=1, oneof="subordinate_config"
)
pem_issuer_chain = proto.Field(
proto.MESSAGE,
number=2,
oneof="subordinate_config",
message=SubordinateConfigChain,
)
class PublicKey(proto.Message):
r"""A [PublicKey][google.cloud.security.privateca.v1beta1.PublicKey]
describes a public key.
Attributes:
type_ (~.resources.PublicKey.KeyType):
Required. The type of public key.
key (bytes):
Required. A public key. Padding and encoding
varies by 'KeyType' and is described along with
the KeyType values.
"""
class KeyType(proto.Enum):
r"""Types of public keys that are supported. At a minimum, we support
RSA and ECDSA, for the key sizes or curves listed:
https://cloud.google.com/kms/docs/algorithms#asymmetric_signing_algorithms
"""
KEY_TYPE_UNSPECIFIED = 0
PEM_RSA_KEY = 1
PEM_EC_KEY = 2
type_ = proto.Field(proto.ENUM, number=1, enum=KeyType,)
key = proto.Field(proto.BYTES, number=2)
class CertificateConfig(proto.Message):
r"""A
[CertificateConfig][google.cloud.security.privateca.v1beta1.CertificateConfig]
describes an X.509 certificate or CSR that is to be created, as an
alternative to using ASN.1.
Attributes:
subject_config (~.resources.CertificateConfig.SubjectConfig):
Required. Specifies some of the values in a
certificate that are related to the subject.
reusable_config (~.resources.ReusableConfigWrapper):
Required. Describes how some of the technical
fields in a certificate should be populated.
public_key (~.resources.PublicKey):
Optional. The public key that corresponds to this config.
This is, for example, used when issuing
[Certificates][google.cloud.security.privateca.v1beta1.Certificate],
but not when creating a self-signed
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
or
[CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority]
CSR.
"""
class SubjectConfig(proto.Message):
r"""These values are used to create the distinguished name and
subject alternative name fields in an X.509 certificate.
Attributes:
subject (~.resources.Subject):
Required. Contains distinguished name fields
such as the location and organization.
common_name (str):
Optional. The "common name" of the
distinguished name.
subject_alt_name (~.resources.SubjectAltNames):
Optional. The subject alternative name
fields.
"""
subject = proto.Field(proto.MESSAGE, number=1, message="Subject",)
common_name = proto.Field(proto.STRING, number=2)
subject_alt_name = proto.Field(
proto.MESSAGE, number=3, message="SubjectAltNames",
)
subject_config = proto.Field(proto.MESSAGE, number=1, message=SubjectConfig,)
reusable_config = proto.Field(
proto.MESSAGE, number=2, message=ReusableConfigWrapper,
)
public_key = proto.Field(proto.MESSAGE, number=3, message=PublicKey,)
class CertificateDescription(proto.Message):
r"""A
[CertificateDescription][google.cloud.security.privateca.v1beta1.CertificateDescription]
describes an X.509 certificate or CSR that has been issued, as an
alternative to using ASN.1 / X.509.
Attributes:
subject_description (~.resources.CertificateDescription.SubjectDescription):
Describes some of the values in a certificate
that are related to the subject and lifetime.
config_values (~.resources.ReusableConfigValues):
Describes some of the technical fields in a
certificate.
public_key (~.resources.PublicKey):
The public key that corresponds to an issued
certificate.
subject_key_id (~.resources.CertificateDescription.KeyId):
Provides a means of identifiying certificates
that contain a particular public key, per
https://tools.ietf.org/html/rfc5280#section-4.2.1.2.
authority_key_id (~.resources.CertificateDescription.KeyId):
Identifies the subject_key_id of the parent certificate, per
https://tools.ietf.org/html/rfc5280#section-4.2.1.1
crl_distribution_points (Sequence[str]):
Describes a list of locations to obtain CRL
information, i.e. the DistributionPoint.fullName
described by
https://tools.ietf.org/html/rfc5280#section-4.2.1.13
aia_issuing_certificate_urls (Sequence[str]):
Describes lists of issuer CA certificate URLs
that appear in the "Authority Information
Access" extension in the certificate.
cert_fingerprint (~.resources.CertificateDescription.CertificateFingerprint):
The hash of the x.509 certificate.
"""
class SubjectDescription(proto.Message):
r"""These values describe fields in an issued X.509 certificate
such as the distinguished name, subject alternative names,
serial number, and lifetime.
Attributes:
subject (~.resources.Subject):
Contains distinguished name fields such as
the location and organization.
common_name (str):
The "common name" of the distinguished name.
subject_alt_name (~.resources.SubjectAltNames):
The subject alternative name fields.
hex_serial_number (str):
The serial number encoded in lowercase
hexadecimal.
lifetime (~.duration.Duration):
For convenience, the actual lifetime of an issued
certificate. Corresponds to 'not_after_time' -
'not_before_time'.
not_before_time (~.timestamp.Timestamp):
The time at which the certificate becomes
valid.
not_after_time (~.timestamp.Timestamp):
The time at which the certificate expires.
"""
subject = proto.Field(proto.MESSAGE, number=1, message="Subject",)
common_name = proto.Field(proto.STRING, number=2)
subject_alt_name = proto.Field(
proto.MESSAGE, number=3, message="SubjectAltNames",
)
hex_serial_number = proto.Field(proto.STRING, number=4)
lifetime = proto.Field(proto.MESSAGE, number=5, message=duration.Duration,)
not_before_time = proto.Field(
proto.MESSAGE, number=6, message=timestamp.Timestamp,
)
not_after_time = proto.Field(
proto.MESSAGE, number=7, message=timestamp.Timestamp,
)
class KeyId(proto.Message):
r"""A KeyId identifies a specific public key, usually by hashing
the public key.
Attributes:
key_id (str):
Optional. The value of this KeyId encoded in
lowercase hexadecimal. This is most likely the
160 bit SHA-1 hash of the public key.
"""
key_id = proto.Field(proto.STRING, number=1)
class CertificateFingerprint(proto.Message):
r"""A group of fingerprints for the x509 certificate.
Attributes:
sha256_hash (str):
The SHA 256 hash, encoded in hexadecimal, of
the DER x509 certificate.
"""
sha256_hash = proto.Field(proto.STRING, number=1)
subject_description = proto.Field(
proto.MESSAGE, number=1, message=SubjectDescription,
)
config_values = proto.Field(proto.MESSAGE, number=2, message=ReusableConfigValues,)
public_key = proto.Field(proto.MESSAGE, number=3, message=PublicKey,)
subject_key_id = proto.Field(proto.MESSAGE, number=4, message=KeyId,)
authority_key_id = proto.Field(proto.MESSAGE, number=5, message=KeyId,)
crl_distribution_points = proto.RepeatedField(proto.STRING, number=6)
aia_issuing_certificate_urls = proto.RepeatedField(proto.STRING, number=7)
cert_fingerprint = proto.Field(
proto.MESSAGE, number=8, message=CertificateFingerprint,
)
class ObjectId(proto.Message):
r"""An [ObjectId][google.cloud.security.privateca.v1beta1.ObjectId]
specifies an object identifier (OID). These provide context and
describe types in ASN.1 messages.
Attributes:
object_id_path (Sequence[int]):
Required. The parts of an OID path. The most
significant parts of the path come first.
"""
object_id_path = proto.RepeatedField(proto.INT32, number=1)
class X509Extension(proto.Message):
r"""An
[X509Extension][google.cloud.security.privateca.v1beta1.X509Extension]
specifies an X.509 extension, which may be used in different parts
of X.509 objects like certificates, CSRs, and CRLs.
Attributes:
object_id (~.resources.ObjectId):
Required. The OID for this X.509 extension.
critical (bool):
Required. Indicates whether or not this
extension is critical (i.e., if the client does
not know how to handle this extension, the
client should consider this to be an error).
value (bytes):
Required. The value of this X.509 extension.
"""
object_id = proto.Field(proto.MESSAGE, number=1, message=ObjectId,)
critical = proto.Field(proto.BOOL, number=2)
value = proto.Field(proto.BYTES, number=3)
class KeyUsage(proto.Message):
r"""A [KeyUsage][google.cloud.security.privateca.v1beta1.KeyUsage]
describes key usage values that may appear in an X.509 certificate.
Attributes:
base_key_usage (~.resources.KeyUsage.KeyUsageOptions):
Describes high-level ways in which a key may
be used.
extended_key_usage (~.resources.KeyUsage.ExtendedKeyUsageOptions):
Detailed scenarios in which a key may be
used.
unknown_extended_key_usages (Sequence[~.resources.ObjectId]):
Used to describe extended key usages that are not listed in
the
[KeyUsage.ExtendedKeyUsageOptions][google.cloud.security.privateca.v1beta1.KeyUsage.ExtendedKeyUsageOptions]
message.
"""
class KeyUsageOptions(proto.Message):
r"""[KeyUsage.KeyUsageOptions][google.cloud.security.privateca.v1beta1.KeyUsage.KeyUsageOptions]
corresponds to the key usage values described in
https://tools.ietf.org/html/rfc5280#section-4.2.1.3.
Attributes:
digital_signature (bool):
The key may be used for digital signatures.
content_commitment (bool):
The key may be used for cryptographic
commitments. Note that this may also be referred
to as "non-repudiation".
key_encipherment (bool):
The key may be used to encipher other keys.
data_encipherment (bool):
The key may be used to encipher data.
key_agreement (bool):
The key may be used in a key agreement
protocol.
cert_sign (bool):
The key may be used to sign certificates.
crl_sign (bool):
The key may be used sign certificate
revocation lists.
encipher_only (bool):
The key may be used to encipher only.
decipher_only (bool):
The key may be used to decipher only.
"""
digital_signature = proto.Field(proto.BOOL, number=1)
content_commitment = proto.Field(proto.BOOL, number=2)
key_encipherment = proto.Field(proto.BOOL, number=3)
data_encipherment = proto.Field(proto.BOOL, number=4)
key_agreement = proto.Field(proto.BOOL, number=5)
cert_sign = proto.Field(proto.BOOL, number=6)
crl_sign = proto.Field(proto.BOOL, number=7)
encipher_only = proto.Field(proto.BOOL, number=8)
decipher_only = proto.Field(proto.BOOL, number=9)
class ExtendedKeyUsageOptions(proto.Message):
r"""[KeyUsage.ExtendedKeyUsageOptions][google.cloud.security.privateca.v1beta1.KeyUsage.ExtendedKeyUsageOptions]
has fields that correspond to certain common OIDs that could be
specified as an extended key usage value.
Attributes:
server_auth (bool):
Corresponds to OID 1.3.6.1.5.5.7.3.1.
Officially described as "TLS WWW server
authentication", though regularly used for non-
WWW TLS.
client_auth (bool):
Corresponds to OID 1.3.6.1.5.5.7.3.2.
Officially described as "TLS WWW client
authentication", though regularly used for non-
WWW TLS.
code_signing (bool):
Corresponds to OID 1.3.6.1.5.5.7.3.3.
Officially described as "Signing of downloadable
executable code client authentication".
email_protection (bool):
Corresponds to OID 1.3.6.1.5.5.7.3.4.
Officially described as "Email protection".
time_stamping (bool):
Corresponds to OID 1.3.6.1.5.5.7.3.8.
Officially described as "Binding the hash of an
object to a time".
ocsp_signing (bool):
Corresponds to OID 1.3.6.1.5.5.7.3.9.
Officially described as "Signing OCSP
responses".
"""
server_auth = proto.Field(proto.BOOL, number=1)
client_auth = proto.Field(proto.BOOL, number=2)
code_signing = proto.Field(proto.BOOL, number=3)
email_protection = proto.Field(proto.BOOL, number=4)
time_stamping = proto.Field(proto.BOOL, number=5)
ocsp_signing = proto.Field(proto.BOOL, number=6)
base_key_usage = proto.Field(proto.MESSAGE, number=1, message=KeyUsageOptions,)
extended_key_usage = proto.Field(
proto.MESSAGE, number=2, message=ExtendedKeyUsageOptions,
)
unknown_extended_key_usages = proto.RepeatedField(
proto.MESSAGE, number=3, message=ObjectId,
)
class Subject(proto.Message):
r"""[Subject][google.cloud.security.privateca.v1beta1.Subject] describes
parts of a distinguished name that, in turn, describes the subject
of the certificate.
Attributes:
country_code (str):
The country code of the subject.
organization (str):
The organization of the subject.
organizational_unit (str):
The organizational_unit of the subject.
locality (str):
The locality or city of the subject.
province (str):
The province, territory, or regional state of
the subject.
street_address (str):
The street address of the subject.
postal_code (str):
The postal code of the subject.
"""
country_code = proto.Field(proto.STRING, number=1)
organization = proto.Field(proto.STRING, number=2)
organizational_unit = proto.Field(proto.STRING, number=3)
locality = proto.Field(proto.STRING, number=4)
province = proto.Field(proto.STRING, number=5)
street_address = proto.Field(proto.STRING, number=6)
postal_code = proto.Field(proto.STRING, number=7)
class SubjectAltNames(proto.Message):
r"""[SubjectAltNames][google.cloud.security.privateca.v1beta1.SubjectAltNames]
corresponds to a more modern way of listing what the asserted
identity is in a certificate (i.e., compared to the "common name" in
the distinguished name).
Attributes:
dns_names (Sequence[str]):
Contains only valid, fully-qualified host
names.
uris (Sequence[str]):
Contains only valid RFC 3986 URIs.
email_addresses (Sequence[str]):
Contains only valid RFC 2822 E-mail
addresses.
ip_addresses (Sequence[str]):
Contains only valid 32-bit IPv4 addresses or
RFC 4291 IPv6 addresses.
custom_sans (Sequence[~.resources.X509Extension]):
Contains additional subject alternative name
values.
"""
dns_names = proto.RepeatedField(proto.STRING, number=1)
uris = proto.RepeatedField(proto.STRING, number=2)
email_addresses = proto.RepeatedField(proto.STRING, number=3)
ip_addresses = proto.RepeatedField(proto.STRING, number=4)
custom_sans = proto.RepeatedField(proto.MESSAGE, number=5, message=X509Extension,)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"[email protected]"
]
| |
3732d7628f4b49ef7cd4f940283873df9defe2b4 | f0b741f24ccf8bfe9bd1950425d83b6291d21b10 | /components/aws/sagemaker/workteam/src/sagemaker_workteam_spec.py | 5b53cf8749125007f9bca1eb2cf8f0c92c1fadd4 | [
"PSF-2.0",
"LicenseRef-scancode-python-cwi",
"GPL-1.0-or-later",
"LicenseRef-scancode-proprietary-license",
"MIT",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-other-copyleft",
"Unlicense",
"Python-2.0",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"LicenseRef-scancode-protobuf"
]
| permissive | kubeflow/pipelines | e678342b8a325559dec0a6e1e484c525fdcc8ce8 | 3fb199658f68e7debf4906d9ce32a9a307e39243 | refs/heads/master | 2023-09-04T11:54:56.449867 | 2023-09-01T19:07:33 | 2023-09-01T19:12:27 | 133,100,880 | 3,434 | 1,675 | Apache-2.0 | 2023-09-14T20:19:06 | 2018-05-12T00:31:47 | Python | UTF-8 | Python | false | false | 3,485 | py | """Specification for the SageMaker workteam component."""
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from typing import List
from common.sagemaker_component_spec import (
SageMakerComponentSpec,
SageMakerComponentBaseOutputs,
)
from common.spec_input_parsers import SpecInputParsers
from common.common_inputs import (
COMMON_INPUTS,
SageMakerComponentCommonInputs,
SageMakerComponentInput as Input,
SageMakerComponentOutput as Output,
SageMakerComponentInputValidator as InputValidator,
SageMakerComponentOutputValidator as OutputValidator,
)
@dataclass(frozen=True)
class SageMakerWorkteamInputs(SageMakerComponentCommonInputs):
"""Defines the set of inputs for the workteam component."""
team_name: Input
description: Input
user_pool: Input
user_groups: Input
client_id: Input
sns_topic: Input
@dataclass
class SageMakerWorkteamOutputs(SageMakerComponentBaseOutputs):
"""Defines the set of outputs for the workteam component."""
workteam_arn: Output
class SageMakerWorkteamSpec(
SageMakerComponentSpec[SageMakerWorkteamInputs, SageMakerWorkteamOutputs]
):
INPUTS: SageMakerWorkteamInputs = SageMakerWorkteamInputs(
team_name=InputValidator(
input_type=str, required=True, description="The name of your work team."
),
description=InputValidator(
input_type=str, required=True, description="A description of the work team."
),
user_pool=InputValidator(
input_type=str,
required=False,
description="An identifier for a user pool. The user pool must be in the same region as the service that you are calling.",
),
user_groups=InputValidator(
input_type=str,
required=False,
description="A list of identifiers for user groups separated by commas.",
default="",
),
client_id=InputValidator(
input_type=str,
required=False,
description="An identifier for an application client. You must create the app client ID using Amazon Cognito.",
),
sns_topic=InputValidator(
input_type=str,
required=False,
description="The ARN for the SNS topic to which notifications should be published.",
default="",
),
**vars(COMMON_INPUTS),
)
OUTPUTS = SageMakerWorkteamOutputs(
workteam_arn=OutputValidator(description="The ARN of the workteam."),
)
def __init__(self, arguments: List[str]):
super().__init__(arguments, SageMakerWorkteamInputs, SageMakerWorkteamOutputs)
@property
def inputs(self) -> SageMakerWorkteamInputs:
return self._inputs
@property
def outputs(self) -> SageMakerWorkteamOutputs:
return self._outputs
@property
def output_paths(self) -> SageMakerWorkteamOutputs:
return self._output_paths
| [
"[email protected]"
]
| |
78f8e900199661a6db3b3bf59925c91f5e739119 | ebc00ddf4c8c5f5076471e8b8d56c2b634c51230 | /test/functional/wallet_multiwallet.py | 79e6ea8665ac6abc878f3b4bab25435d9008730a | [
"MIT"
]
| permissive | BlockMechanic/rain | 584a9e245cfb7ab5fb1add97b699b86833bfbc5b | e8818b75240ff9277b0d14d38769378f05d0b525 | refs/heads/master | 2021-07-03T03:48:53.977665 | 2021-03-04T01:28:20 | 2021-03-04T01:28:20 | 228,412,343 | 0 | 0 | MIT | 2019-12-16T15:03:28 | 2019-12-16T15:03:27 | null | UTF-8 | Python | false | false | 17,250 | py | #!/usr/bin/env python3
# Copyright (c) 2017-2019 The Rain Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiwallet.
Verify that a raind node can load multiple wallet files
"""
import os
import shutil
import time
from test_framework.test_framework import RainTestFramework
from test_framework.test_node import ErrorMatch
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class MultiWalletTest(RainTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.supports_cli = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
node = self.nodes[0]
data_dir = lambda *p: os.path.join(node.datadir, 'regtest', *p)
wallet_dir = lambda *p: data_dir('wallets', *p)
wallet = lambda name: node.get_wallet_rpc(name)
def wallet_file(name):
if os.path.isdir(wallet_dir(name)):
return wallet_dir(name, "wallet.dat")
return wallet_dir(name)
assert_equal(self.nodes[0].listwalletdir(), { 'wallets': [{ 'name': '' }] })
# check wallet.dat is created
self.stop_nodes()
assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True)
# create symlink to verify wallet directory path can be referenced
# through symlink
os.mkdir(wallet_dir('w7'))
os.symlink('w7', wallet_dir('w7_symlink'))
# rename wallet.dat to make sure plain wallet file paths (as opposed to
# directory paths) can be loaded
os.rename(wallet_dir("wallet.dat"), wallet_dir("w8"))
# create another dummy wallet for use in testing backups later
self.start_node(0, [])
self.stop_nodes()
empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat')
os.rename(wallet_dir("wallet.dat"), empty_wallet)
# restart node with a mix of wallet names:
# w1, w2, w3 - to verify new wallets created when non-existing paths specified
# w - to verify wallet name matching works when one wallet path is prefix of another
# sub/w5 - to verify relative wallet path is created correctly
# extern/w6 - to verify absolute wallet path is created correctly
# w7_symlink - to verify symlinked wallet path is initialized correctly
# w8 - to verify existing wallet file is loaded correctly
# '' - to verify default wallet file is created correctly
wallet_names = ['w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', '']
extra_args = ['-wallet={}'.format(n) for n in wallet_names]
self.start_node(0, extra_args)
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8'])
assert_equal(set(node.listwallets()), set(wallet_names))
# check that all requested wallets were created
self.stop_node(0)
for wallet_name in wallet_names:
assert_equal(os.path.isfile(wallet_file(wallet_name)), True)
# should not initialize if wallet path can't be created
exp_stderr = "boost::filesystem::create_directory:"
self.nodes[0].assert_start_raises_init_error(['-wallet=wallet.dat/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist')
self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir())
self.nodes[0].assert_start_raises_init_error(['-walletdir=debug.log'], 'Error: Specified -walletdir "debug.log" is not a directory', cwd=data_dir())
# should not initialize if there are duplicate wallets
self.nodes[0].assert_start_raises_init_error(['-wallet=w1', '-wallet=w1'], 'Error: Error loading wallet w1. Duplicate -wallet filename specified.')
# should not initialize if one wallet is a copy of another
shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy'))
exp_stderr = "BerkeleyBatch: Can't open database w8_copy \(duplicates fileid \w+ from w8\)"
self.nodes[0].assert_start_raises_init_error(['-wallet=w8', '-wallet=w8_copy'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
# should not initialize if wallet file is a symlink
os.symlink('w8', wallet_dir('w8_symlink'))
self.nodes[0].assert_start_raises_init_error(['-wallet=w8_symlink'], 'Error: Invalid -wallet path \'w8_symlink\'\. .*', match=ErrorMatch.FULL_REGEX)
# should not initialize if the specified walletdir does not exist
self.nodes[0].assert_start_raises_init_error(['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist')
# should not initialize if the specified walletdir is not a directory
not_a_dir = wallet_dir('notadir')
open(not_a_dir, 'a', encoding="utf8").close()
self.nodes[0].assert_start_raises_init_error(['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory')
self.log.info("Do not allow -zapwallettxes with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.log.info("Do not allow -salvagewallet with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-salvagewallet', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-salvagewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
self.log.info("Do not allow -upgradewallet with multiwallet")
self.nodes[0].assert_start_raises_init_error(['-upgradewallet', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-upgradewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
# if wallets/ doesn't exist, datadir should be the default wallet dir
wallet_dir2 = data_dir('walletdir')
os.rename(wallet_dir(), wallet_dir2)
self.start_node(0, ['-wallet=w4', '-wallet=w5'])
assert_equal(set(node.listwallets()), {"w4", "w5"})
w5 = wallet("w5")
node.generatetoaddress(nblocks=1, address=w5.getnewaddress())
# now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded
os.rename(wallet_dir2, wallet_dir())
self.restart_node(0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()])
assert_equal(set(node.listwallets()), {"w4", "w5"})
w5 = wallet("w5")
w5_info = w5.getwalletinfo()
assert_equal(w5_info['immature_balance'], 50)
competing_wallet_dir = os.path.join(self.options.tmpdir, 'competing_walletdir')
os.mkdir(competing_wallet_dir)
self.restart_node(0, ['-walletdir=' + competing_wallet_dir])
exp_stderr = "Error: Error initializing wallet database environment \"\S+competing_walletdir\"!"
self.nodes[1].assert_start_raises_init_error(['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
self.restart_node(0, extra_args)
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy'])
wallets = [wallet(w) for w in wallet_names]
wallet_bad = wallet("bad")
# check wallet names and balances
node.generatetoaddress(nblocks=1, address=wallets[0].getnewaddress())
for wallet_name, wallet in zip(wallet_names, wallets):
info = wallet.getwalletinfo()
assert_equal(info['immature_balance'], 50 if wallet is wallets[0] else 0)
assert_equal(info['walletname'], wallet_name)
# accessing invalid wallet fails
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo)
# accessing wallet RPC without using wallet endpoint fails
assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo)
w1, w2, w3, w4, *_ = wallets
node.generatetoaddress(nblocks=101, address=w1.getnewaddress())
assert_equal(w1.getbalance(), 100)
assert_equal(w2.getbalance(), 0)
assert_equal(w3.getbalance(), 0)
assert_equal(w4.getbalance(), 0)
w1.sendtoaddress(w2.getnewaddress(), 1)
w1.sendtoaddress(w3.getnewaddress(), 2)
w1.sendtoaddress(w4.getnewaddress(), 3)
node.generatetoaddress(nblocks=1, address=w1.getnewaddress())
assert_equal(w2.getbalance(), 1)
assert_equal(w3.getbalance(), 2)
assert_equal(w4.getbalance(), 3)
batch = w1.batch([w1.getblockchaininfo.get_request(), w1.getwalletinfo.get_request()])
assert_equal(batch[0]["result"]["chain"], "regtest")
assert_equal(batch[1]["result"]["walletname"], "w1")
self.log.info('Check for per-wallet settxfee call')
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
assert_equal(w2.getwalletinfo()['paytxfee'], 0)
w2.settxfee(4.0)
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
assert_equal(w2.getwalletinfo()['paytxfee'], 4.0)
self.log.info("Test dynamic wallet loading")
self.restart_node(0, ['-nowallet'])
assert_equal(node.listwallets(), [])
assert_raises_rpc_error(-32601, "Method not found", node.getwalletinfo)
self.log.info("Load first wallet")
loadwallet_name = node.loadwallet(wallet_names[0])
assert_equal(loadwallet_name['name'], wallet_names[0])
assert_equal(node.listwallets(), wallet_names[0:1])
node.getwalletinfo()
w1 = node.get_wallet_rpc(wallet_names[0])
w1.getwalletinfo()
self.log.info("Load second wallet")
loadwallet_name = node.loadwallet(wallet_names[1])
assert_equal(loadwallet_name['name'], wallet_names[1])
assert_equal(node.listwallets(), wallet_names[0:2])
assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo)
w2 = node.get_wallet_rpc(wallet_names[1])
w2.getwalletinfo()
self.log.info("Load remaining wallets")
for wallet_name in wallet_names[2:]:
loadwallet_name = self.nodes[0].loadwallet(wallet_name)
assert_equal(loadwallet_name['name'], wallet_name)
assert_equal(set(self.nodes[0].listwallets()), set(wallet_names))
# Fail to load if wallet doesn't exist
assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets')
# Fail to load duplicate wallets
assert_raises_rpc_error(-4, 'Wallet file verification failed: Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
# Fail to load duplicate wallets by different ways (directory and filepath)
assert_raises_rpc_error(-4, "Wallet file verification failed: Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
# Fail to load if one wallet is a copy of another
assert_raises_rpc_error(-1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
# Fail to load if one wallet is a copy of another, test this twice to make sure that we don't re-introduce #14304
assert_raises_rpc_error(-1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
# Fail to load if wallet file is a symlink
assert_raises_rpc_error(-4, "Wallet file verification failed: Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink')
# Fail to load if a directory is specified that doesn't contain a wallet
os.mkdir(wallet_dir('empty_wallet_dir'))
assert_raises_rpc_error(-18, "Directory empty_wallet_dir does not contain a wallet.dat file", self.nodes[0].loadwallet, 'empty_wallet_dir')
self.log.info("Test dynamic wallet creation.")
# Fail to create a wallet if it already exists.
assert_raises_rpc_error(-4, "Wallet w2 already exists.", self.nodes[0].createwallet, 'w2')
# Successfully create a wallet with a new name
loadwallet_name = self.nodes[0].createwallet('w9')
assert_equal(loadwallet_name['name'], 'w9')
w9 = node.get_wallet_rpc('w9')
assert_equal(w9.getwalletinfo()['walletname'], 'w9')
assert 'w9' in self.nodes[0].listwallets()
# Successfully create a wallet using a full path
new_wallet_dir = os.path.join(self.options.tmpdir, 'new_walletdir')
new_wallet_name = os.path.join(new_wallet_dir, 'w10')
loadwallet_name = self.nodes[0].createwallet(new_wallet_name)
assert_equal(loadwallet_name['name'], new_wallet_name)
w10 = node.get_wallet_rpc(new_wallet_name)
assert_equal(w10.getwalletinfo()['walletname'], new_wallet_name)
assert new_wallet_name in self.nodes[0].listwallets()
self.log.info("Test dynamic wallet unloading")
# Test `unloadwallet` errors
assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].unloadwallet)
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", self.nodes[0].unloadwallet, "dummy")
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", node.get_wallet_rpc("dummy").unloadwallet)
assert_raises_rpc_error(-8, "Cannot unload the requested wallet", w1.unloadwallet, "w2"),
# Successfully unload the specified wallet name
self.nodes[0].unloadwallet("w1")
assert 'w1' not in self.nodes[0].listwallets()
# Successfully unload the wallet referenced by the request endpoint
# Also ensure unload works during walletpassphrase timeout
w2.encryptwallet('test')
w2.walletpassphrase('test', 1)
w2.unloadwallet()
time.sleep(1.1)
assert 'w2' not in self.nodes[0].listwallets()
# Successfully unload all wallets
for wallet_name in self.nodes[0].listwallets():
self.nodes[0].unloadwallet(wallet_name)
assert_equal(self.nodes[0].listwallets(), [])
assert_raises_rpc_error(-32601, "Method not found (wallet method is disabled because no wallet is loaded)", self.nodes[0].getwalletinfo)
# Successfully load a previously unloaded wallet
self.nodes[0].loadwallet('w1')
assert_equal(self.nodes[0].listwallets(), ['w1'])
assert_equal(w1.getwalletinfo()['walletname'], 'w1')
assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy', 'w9'])
# Test backing up and restoring wallets
self.log.info("Test wallet backup")
self.restart_node(0, ['-nowallet'])
for wallet_name in wallet_names:
self.nodes[0].loadwallet(wallet_name)
for wallet_name in wallet_names:
rpc = self.nodes[0].get_wallet_rpc(wallet_name)
addr = rpc.getnewaddress()
backup = os.path.join(self.options.tmpdir, 'backup.dat')
rpc.backupwallet(backup)
self.nodes[0].unloadwallet(wallet_name)
shutil.copyfile(empty_wallet, wallet_file(wallet_name))
self.nodes[0].loadwallet(wallet_name)
assert_equal(rpc.getaddressinfo(addr)['ismine'], False)
self.nodes[0].unloadwallet(wallet_name)
shutil.copyfile(backup, wallet_file(wallet_name))
self.nodes[0].loadwallet(wallet_name)
assert_equal(rpc.getaddressinfo(addr)['ismine'], True)
# Test .walletlock file is closed
self.start_node(1)
wallet = os.path.join(self.options.tmpdir, 'my_wallet')
self.nodes[0].createwallet(wallet)
assert_raises_rpc_error(-4, "Error initializing wallet database environment", self.nodes[1].loadwallet, wallet)
self.nodes[0].unloadwallet(wallet)
self.nodes[1].loadwallet(wallet)
if __name__ == '__main__':
MultiWalletTest().main()
| [
"[email protected]"
]
| |
1f73e465808ce7005cd026a05ec7c9f9142b53dc | 4d1039cb53135c002dbee160c13e6a89bf57e57d | /main/migrations/0010_auto_20200901_1742.py | faacead814c375600d0625c3082f60b71c5ad347 | []
| no_license | nova-sangeeth/drix-url | 235c59c3692c294f84d94bb4bcd633bf20172aaf | 1fe32b45397f853e406a3641a23bdd5bb128d346 | refs/heads/master | 2023-08-06T07:40:55.387540 | 2021-09-24T04:48:01 | 2021-09-24T04:48:01 | 275,612,983 | 0 | 0 | null | 2021-09-24T04:48:02 | 2020-06-28T15:25:46 | HTML | UTF-8 | Python | false | false | 762 | py | # Generated by Django 3.0.7 on 2020-09-01 17:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0009_user_created_url'),
]
operations = [
migrations.AddField(
model_name='short_urls',
name='created_time',
field=models.DateTimeField(editable=False, null=True),
),
migrations.AddField(
model_name='short_urls',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| [
"[email protected]"
]
| |
e17ffa077e98c9701b914e8fd82a3c615792b1f2 | 1e17d31c1e8e6db8e42b9c1358b1cbd7d58b1eb9 | /examples/readme.py | d559b18c0a0c60d197f7baf168e4fce4fa9656c0 | [
"MIT"
]
| permissive | pedrogclp/MerossIot | 69970526734b7b4762da888d1bcf09567ca8fd3f | 5913704375a4a20e7626519dd05d20143001adaf | refs/heads/0.4.X.X | 2023-05-27T22:56:48.291245 | 2021-03-27T10:16:21 | 2021-03-27T10:16:21 | 371,523,608 | 0 | 0 | MIT | 2021-05-27T22:58:37 | 2021-05-27T22:58:36 | null | UTF-8 | Python | false | false | 1,214 | py | import asyncio
import os
from meross_iot.http_api import MerossHttpClient
from meross_iot.manager import MerossManager
EMAIL = os.environ.get('MEROSS_EMAIL') or "YOUR_MEROSS_CLOUD_EMAIL"
PASSWORD = os.environ.get('MEROSS_PASSWORD') or "YOUR_MEROSS_CLOUD_PASSWORD"
async def main():
# Setup the HTTP client API from user-password
http_api_client = await MerossHttpClient.async_from_user_password(email=EMAIL, password=PASSWORD)
# Setup and start the device manager
manager = MerossManager(http_client=http_api_client)
await manager.async_init()
# Discover devices.
await manager.async_device_discovery()
meross_devices = manager.find_devices()
# Print them
print("I've found the following devices:")
for dev in meross_devices:
print(f"- {dev.name} ({dev.type}): {dev.online_status}")
# Close the manager and logout from http_api
manager.close()
await http_api_client.async_logout()
if __name__ == '__main__':
# On Windows + Python 3.8, you should uncomment the following
# asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
loop.close()
| [
"[email protected]"
]
| |
2a9d0b5364557a152ab30182bfae2f7f02da9b62 | 9bbb00c09aaaa19565d3fb8091af568decb5820f | /3_Intro_To_Importing_Data_Python/1_Introduction_To_Flat_Files/7_Importing_with_Mixed_DataTypes.py | eb846cc9f26170ad1f3cd993191f6acb801e650f | []
| no_license | PeterL64/UCDDataAnalytics | 4417fdeda9c64c2f350a5ba53b2a01b4bdc36fc7 | d6ff568e966caf954323ecf641769b7c79ccb83a | refs/heads/master | 2023-06-14T04:10:41.575025 | 2021-07-07T15:23:50 | 2021-07-07T15:23:50 | 349,780,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 437 | py | # Importing with Mixed Data Types 2
# Similar to the np.genfromtxt() function there is another function, np.recfromcsv() for CSV files.
# np.recfromcsv has some defaults, so you do not need to enter them. dtype=None, delimiter=',' and names=True
# Assign the filename: file
file = 'titanic.csv'
# Import file using np.recfromcsv and assign it the variable: d
d = np.recfromcsv(file)
# Print out first three entries of d
print(d[:3])
| [
"[email protected]"
]
| |
881c471feea196f62aaf175c1faf4a7fcfe47c31 | 9a1e2f17f4a7677f07a2a062dcfa18659b2429a2 | /models/adaCNN.py | b123316d7573a4896ed80ca6f8f330c4329e05be | []
| no_license | greentfrapp/cond-shift-neurons | 4b3f78096a2adeba17b0af7b98a344be9f98ca39 | 7a8d14a6b3d736f5daf8c71d85022065fe5a0ac8 | refs/heads/master | 2020-03-21T16:01:41.857665 | 2018-07-08T06:35:01 | 2018-07-08T06:35:01 | 138,746,556 | 29 | 4 | null | null | null | null | UTF-8 | Python | false | false | 8,194 | py | """
models - adaCNN
"""
import tensorflow as tf
import numpy as np
from models.utils import BaseModel, MemoryValueModel
# CNN used in adaCNN
class adaCNNNet(object):
def __init__(self, name, inputs, layers, output_dim, parent, is_training, csn):
super(adaCNNNet, self).__init__()
self.name = name
self.inputs = inputs
self.is_training = is_training
self.csn = csn
self.gradients = dict()
with tf.variable_scope(self.name, reuse=tf.AUTO_REUSE):
self.build_model(layers, output_dim)
def build_model(self, layers, output_dim):
running_output = self.inputs
for i in np.arange(layers):
conv = tf.layers.conv2d(
inputs=running_output,
filters=32,
kernel_size=(3, 3),
padding="same",
activation=None,
name="conv_{}".format(i),
reuse=tf.AUTO_REUSE,
)
if self.csn is not None and self.csn["conv_{}".format(i)] is not None:
conv += self.csn["conv_{}".format(i)]
relu = tf.nn.relu(conv)
self.gradients["conv_{}".format(i)] = tf.gradients(relu, conv)
maxpool = tf.layers.max_pooling2d(
inputs=relu,
pool_size=(2, 2),
strides=(1, 1),
)
running_output = maxpool
self.output = tf.layers.dense(
inputs=tf.reshape(running_output, [-1, (28 - layers) * (28 - layers) * 32]),
units=output_dim,
activation=None,
name="logits",
reuse=tf.AUTO_REUSE,
)
self.logits = self.output
if self.csn is not None:
self.logits += self.csn["logits"]
class adaCNNModel(BaseModel):
def __init__(self, name, num_classes=5, input_tensors=None, lr=1e-4, logdir=None, prefix='', is_training=None, num_test_classes=None):
super(adaCNNModel, self).__init__()
self.name = name
# Use a mask to test on tasks with fewer classes than training tasks
self.num_test_classes = num_test_classes
if self.num_test_classes is not None:
self.logit_mask = np.zeros([1, num_classes])
for i in np.arange(num_test_classes):
self.logit_mask[0, i] = 1
else:
self.logit_mask = np.ones([1, num_classes])
self.num_test_classes = num_classes
with tf.variable_scope(self.name, reuse=tf.AUTO_REUSE):
self.build_model(num_classes, input_tensors, lr, is_training)
variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.name)
self.saver = tf.train.Saver(var_list=variables, max_to_keep=3)
if logdir is not None:
self.writer = tf.summary.FileWriter(logdir + prefix)
self.summary = tf.summary.merge([
tf.summary.scalar("loss", self.test_loss, family=prefix),
tf.summary.scalar("accuracy", self.test_accuracy, family=prefix),
])
def build_model(self, num_classes, input_tensors=None, lr=1e-4, is_training=None):
if input_tensors is None:
self.train_inputs = tf.placeholder(
shape=(None, 28, 28, 1),
dtype=tf.float32,
name="train_inputs",
)
self.train_labels = tf.placeholder(
shape=(None, num_classes),
dtype=tf.float32,
name="train_labels",
)
self.test_inputs = tf.placeholder(
shape=(None, 28, 28, 1),
dtype=tf.float32,
name="test_inputs"
)
self.test_labels = tf.placeholder(
shape=(None, num_classes),
dtype=tf.float32,
name="test_labels",
)
else:
self.train_inputs = tf.reshape(input_tensors['train_inputs'], [-1, 28, 28, 1])
self.test_inputs = tf.reshape(input_tensors['test_inputs'], [-1, 28, 28, 1])
if tf.shape(input_tensors['train_labels'])[-1] != self.num_test_classes:
self.train_labels = tf.reshape(tf.one_hot(tf.argmax(input_tensors['train_labels'], axis=2), depth=num_classes), [-1, num_classes])
self.test_labels = tf.reshape(tf.one_hot(tf.argmax(input_tensors['test_labels'], axis=2), depth=num_classes), [-1, num_classes])
else:
self.train_labels = tf.reshape(input_tensors['train_labels'], [-1, num_classes])
self.test_labels = tf.reshape(input_tensors['test_labels'], [-1, num_classes])
if is_training is None:
self.is_training = tf.placeholder(
shape=(None),
dtype=tf.bool,
name="is_training",
)
else:
self.is_training = is_training
batch_size = tf.shape(input_tensors['train_inputs'])[0]
self.inputs = tf.concat([self.train_inputs, self.test_inputs], axis=0)
self.labels = tf.concat([self.train_labels, self.test_labels], axis=0)
# CNN
self.cnn_train = adaCNNNet(
name="cnn",
inputs=self.train_inputs,
layers=4,
output_dim=num_classes,
parent=self,
is_training=self.is_training,
csn=None
)
# Need to calculate training loss per task
self.train_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=tf.reshape(self.train_labels, [batch_size, -1, num_classes]), logits=tf.reshape(self.cnn_train.logits, [batch_size, -1, num_classes])), axis=1)
# Preshift accuracy for logging
self.train_predictions = tf.argmax(self.cnn_train.logits, axis=1)
self.train_accuracy = tf.contrib.metrics.accuracy(labels=tf.argmax(self.train_labels, axis=1), predictions=self.train_predictions)
# CSN Memory Matrix
# - Keys
self.memory_key_model = adaCNNNet(
name="key_model",
inputs=self.inputs,
layers=2,
output_dim=32,
parent=self,
is_training=self.is_training,
csn=None,
)
keys = tf.split(
self.memory_key_model.output,
[tf.shape(self.train_inputs)[0], tf.shape(self.test_inputs)[0]],
axis=0,
)
self.train_keys = train_keys = tf.reshape(keys[0], [batch_size, -1, 32])
self.test_keys = test_keys = tf.reshape(keys[1], [batch_size, -1, 32])
# - Values
csn_gradients = {
"conv_1": tf.reshape(self.cnn_train.gradients["conv_1"][0], [-1, 27 * 27 * 32, 1]) * tf.expand_dims(tf.gradients(self.train_loss, self.cnn_train.logits)[0], axis=1),
"conv_2": tf.reshape(self.cnn_train.gradients["conv_2"][0], [-1, 26 * 26 * 32, 1]) * tf.expand_dims(tf.gradients(self.train_loss, self.cnn_train.logits)[0], axis=1),
"conv_3": tf.reshape(self.cnn_train.gradients["conv_3"][0], [-1, 25 * 25 * 32, 1]) * tf.expand_dims(tf.gradients(self.train_loss, self.cnn_train.logits)[0], axis=1),
# "conv_4": tf.reshape(self.cnn_train.gradients["conv_4"][0], [-1, 24 * 24 * 32, 1]) * tf.expand_dims(tf.gradients(self.train_loss, self.cnn_train.logits)[0], axis=1),
"logits": tf.expand_dims(tf.gradients(self.train_loss, self.cnn_train.logits)[0], axis=2) * tf.expand_dims(tf.gradients(self.train_loss, self.cnn_train.logits)[0], axis=1),
}
self.train_values = train_values = {
"conv_1": tf.reshape(MemoryValueModel(csn_gradients["conv_1"], self).outputs, [batch_size, -1, 27 * 27 * 32]),
"conv_2": tf.reshape(MemoryValueModel(csn_gradients["conv_2"], self).outputs, [batch_size, -1, 26 * 26 * 32]),
"conv_3": tf.reshape(MemoryValueModel(csn_gradients["conv_3"], self).outputs, [batch_size, -1, 25 * 25 * 32]),
# "conv_4": tf.reshape(MemoryValueModel(csn_gradients["conv_4"], self).outputs, [batch_size, -1, 24 * 24 * 32]),
"logits": tf.reshape(MemoryValueModel(csn_gradients["logits"], self).outputs, [batch_size, -1, num_classes]),
}
# Calculating Value for Test Key
dotp = tf.matmul(test_keys, train_keys, transpose_b=True)
self.attention_weights = attention_weights = tf.nn.softmax(dotp)
csn = dict(zip(train_values.keys(), [tf.matmul(attention_weights, value) for value in train_values.values()]))
self.csn = {
"conv_0": None,
"conv_1": tf.reshape(csn["conv_1"], [-1, 27, 27, 32]),
"conv_2": tf.reshape(csn["conv_2"], [-1, 26, 26, 32]),
"conv_3": tf.reshape(csn["conv_3"], [-1, 25, 25, 32]),
# "conv_4": tf.reshape(csn["conv_4"], [-1, 24, 24, 32]),
"logits": tf.reshape(csn["logits"], [-1, num_classes]),
}
# Finally, pass CSN values to adaCNNNet
self.cnn_test = adaCNNNet(
name="cnn",
inputs=self.test_inputs,
layers=4,
output_dim=num_classes,
parent=self,
is_training=self.is_training,
csn=self.csn,
)
self.test_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=self.test_labels, logits=self.cnn_test.logits))
self.optimize = tf.train.AdamOptimizer(learning_rate=lr).minimize(self.test_loss)
self.test_predictions = tf.argmax(self.cnn_test.logits * self.logit_mask, axis=1)
self.test_accuracy = tf.contrib.metrics.accuracy(labels=tf.argmax(self.test_labels, axis=1), predictions=self.test_predictions)
| [
"[email protected]"
]
| |
e04dffd49d4e6b770fadf40d765cf2c417a9b93a | 6f255449d5790a1124ca56bec0e3dc457c1b3958 | /quzzi/quiz-5/[email protected]/quiz_5.py | 3e5b2471c2d36032c333902037e21a3206dfc3af | []
| no_license | tomtang110/comp9021 | ac8995f3f558ffdfff7af76a08c67e208fe26aa4 | 6c9e6404f515a72bc94a185c1c98d5aba49266c8 | refs/heads/master | 2020-03-23T18:56:41.177586 | 2018-07-24T04:56:23 | 2018-07-24T04:56:23 | 141,943,053 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,211 | py | # Randomly fills a grid of size 10 x 10 with 0s and 1s and computes:
# - the size of the largest homogenous region starting from the top left corner,
# so the largest region consisting of connected cells all filled with 1s or
# all filled with 0s, depending on the value stored in the top left corner;
# - the size of the largest area with a checkers pattern.
#
# Written by *** and Eric Martin for COMP9021
import sys
from random import seed, randint
dim = 10
grid = [[None] * dim for _ in range(dim)]
def display_grid():
for i in range(dim):
print(' ', ' '.join(str(int(grid[i][j] != 0)) for j in range(dim)))
# Possibly define other functions
try:
arg_for_seed, density = input('Enter two nonnegative integers: ').split()
except ValueError:
print('Incorrect input, giving up.')
sys.exit()
try:
arg_for_seed, density = int(arg_for_seed), int(density)
if arg_for_seed < 0 or density < 0:
raise ValueError
except ValueError:
print('Incorrect input, giving up.')
sys.exit()
seed(arg_for_seed)
# We fill the grid with randomly generated 0s and 1s,
# with for every cell, a probability of 1/(density + 1) to generate a 0.
for i in range(dim):
for j in range(dim):
grid[i][j] = int(randint(0, density) != 0)
print('Here is the grid that has been generated:')
display_grid()
size_of_largest_homogenous_region_from_top_left_corner = 0
# Replace this comment with your code
from copy import deepcopy
grid1 = deepcopy(grid)
def count_1(i,j,R):
global grid
if grid[i][j] == R:
grid[i][j] = '*'
if i:
count_1(i-1,j,R)
if i<dim -1:
count_1(i+1,j,R)
if j:
count_1(i,j-1,R)
if j < dim-1:
count_1(i,j+1,R)
#question1
if grid[0][0] == 1:
count_1(0,0,1)
elif grid[0][0] == 0:
count_1(0,0,0)
size_top_lef=sum(i.count('*') for i in grid)
size_of_largest_homogenous_region_from_top_left_corner += size_top_lef
print('The size_of the largest homogenous region from the top left corner is '
f'{size_of_largest_homogenous_region_from_top_left_corner}.'
)
max_size_of_region_with_checkers_structure = 0
# Replace this comment with your code
def count_2(i,j,grid,grid1,emp_list):
ab=(i,j)
if ab not in emp_list:
emp_list.append(ab)
grid[i][j] = '*'
if i:
if grid1[i][j] != grid1[i-1][j]:
count_2(i-1,j,grid,grid1,emp_list)
if i<dim - 1:
if grid1[i][j] != grid1[i+1][j]:
count_2(i+1,j,grid,grid1,emp_list)
if j:
if grid1[i][j] != grid1[i][j-1]:
count_2(i,j-1,grid,grid1,emp_list)
if j<dim - 1:
if grid1[i][j] != grid1[i][j+1]:
count_2(i,j+1,grid,grid1,emp_list)
q2=[]
for i in range(len(grid1)):
for j in range(len(grid1)):
grid=deepcopy(grid1)
count_2(i,j,grid,grid1,[])
answer = sum(k.count('*') for k in grid)
q2.append(answer)
max_size_of_region_with_checkers_structure += max(q2)
print('The size of the largest area with a checkers structure is '
f'{max_size_of_region_with_checkers_structure}.'
)
| [
"[email protected]"
]
| |
38a5a0665f4b5838c1c02d622affa06df9ded96a | 4191b25485148f003193d556a34b8d3cca2f2e27 | /code_testing/visualization.py | 893b8df97219af3de336af1efe0f42d194b0f4cf | []
| no_license | anilkunwar/FEA-Net | 429b7077d3d4c3a5e8a5edde52c049f2a985b5e7 | 858d3e3aed8f851082ac6f95756f382118e97908 | refs/heads/master | 2022-03-17T03:01:59.483998 | 2019-12-01T17:42:11 | 2019-12-01T17:42:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 818 | py | import matplotlib.pyplot as plt
import numpy as np
SMALL_SIZE = 8
MEDIUM_SIZE = 10
BIGGER_SIZE = 12
plt.rc('font', size=SMALL_SIZE) # controls default text sizes
plt.rc('axes', titlesize=SMALL_SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels
plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels
plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels
plt.rc('legend', fontsize=SMALL_SIZE) # legend fontsize
plt.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title
def fmg_net_complexity():
x=np.asarray([64,128,256,512,1024,2048,4096,8192])
y=np.asarray([16,27,36,49,84,101,120,141])
plt.semilogx(x*x,y,'o-')
plt.xlabel('DOF')
plt.ylabel('# of LU block in FMG-NET')
| [
"[email protected]"
]
| |
a77a1d75e259fc62a385e27de488bc5836d93512 | bc7d019d3c88cfb637fdcc15ec800ed6e7db565c | /voyage_prevu/migrations/0035_aeroport_port.py | ef2ac653179f3370811641731034e92a65b4d838 | []
| no_license | Zaenma/paiement-django | dec9fca85b2cad0c62b7ec3fa416b34420dea31f | 7dfb777425320daf5a165dcc36ec7c0df898aa34 | refs/heads/master | 2023-03-19T07:31:53.409214 | 2020-11-09T14:58:51 | 2020-11-09T14:58:51 | 311,372,216 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 905 | py | # Generated by Django 3.1.2 on 2020-11-06 18:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('voyage_prevu', '0034_auto_20201105_2023'),
]
operations = [
migrations.CreateModel(
name='Aeroport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('non', models.CharField(max_length=50, verbose_name='Les villes où se situent les aéroports')),
],
),
migrations.CreateModel(
name='Port',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('non', models.CharField(max_length=50, verbose_name='Les villes où se situent les ports')),
],
),
]
| [
"[email protected]"
]
| |
784027b7fe592ac2a4ca726e8c9251121601027f | 07306da5b68336715383a705f85f90aefeccfe96 | /tests/test_stream_protocol.py | 7f9a7d0ce1b20d3e43cd386f75a3f12224c7dcc2 | [
"Apache-2.0"
]
| permissive | datarobot/aiohttp | ebbb9e3c2af3c1b8a7fddc888195c09ad7ab55f9 | 19d95a5ad84cf1453b8091b9bd8640317831e15f | refs/heads/master | 2021-05-13T17:29:51.844981 | 2018-06-12T15:24:36 | 2018-06-12T15:24:36 | 116,824,901 | 0 | 1 | Apache-2.0 | 2018-06-12T15:32:02 | 2018-01-09T14:15:15 | Python | UTF-8 | Python | false | false | 961 | py | from unittest import mock
from aiohttp import parsers
def test_connection_made(loop):
tr = mock.Mock()
proto = parsers.StreamProtocol(loop=loop)
assert proto.transport is None
proto.connection_made(tr)
assert proto.transport is tr
def test_connection_lost(loop):
proto = parsers.StreamProtocol(loop=loop)
proto.connection_made(mock.Mock())
proto.connection_lost(None)
assert proto.transport is None
assert proto.writer is None
assert proto.reader._eof
def test_connection_lost_exc(loop):
proto = parsers.StreamProtocol(loop=loop)
proto.connection_made(mock.Mock())
exc = ValueError()
proto.connection_lost(exc)
assert proto.reader.exception() is exc
def test_data_received(loop):
proto = parsers.StreamProtocol(loop=loop)
proto.connection_made(mock.Mock())
proto.reader = mock.Mock()
proto.data_received(b'data')
proto.reader.feed_data.assert_called_with(b'data')
| [
"[email protected]"
]
| |
03fef4d8423aa08251d98b642634ce53a2e4542a | 607dc8df19fc5248f6289cdda97857b5d58ca16f | /smac/runner/__init__.py | 4bda4cb5ebae36ef239b127e1fa675d9cb31b1b9 | [
"BSD-3-Clause",
"LicenseRef-scancode-philippe-de-muyter",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | automl/SMAC3 | 7dce243a33023c52d6819deff966f7b502e90ed0 | 541ee7e0383b491b86d1a23dcff669f2efad616d | refs/heads/main | 2023-08-31T17:36:06.067579 | 2023-08-01T13:02:51 | 2023-08-01T13:02:51 | 65,900,469 | 943 | 259 | NOASSERTION | 2023-09-11T02:36:57 | 2016-08-17T10:58:05 | Python | UTF-8 | Python | false | false | 472 | py | from smac.runner.abstract_runner import AbstractRunner
from smac.runner.dask_runner import DaskParallelRunner
from smac.runner.exceptions import (
FirstRunCrashedException,
TargetAlgorithmAbortException,
)
from smac.runner.target_function_runner import TargetFunctionRunner
__all__ = [
# Runner
"AbstractRunner",
"TargetFunctionRunner",
"DaskParallelRunner",
# Exceptions
"TargetAlgorithmAbortException",
"FirstRunCrashedException",
]
| [
"[email protected]"
]
| |
beb4ebfba4d36ed804bf6dbd3b0fe7eb3512b978 | 1d61bf0b287533c9eb89bf71e217ead8cffb7811 | /System/Tester/tester.py | 1463da9e5ee33340fd14192bc1502171fc16b93f | []
| no_license | chukotka12/PP4E-GitHub | 2f6bf5e431a211beb9e1b6aa56b495770f07e6e4 | c9347ffa20f598b8c469082788a964549cd5df2b | refs/heads/master | 2020-05-30T04:43:30.570872 | 2019-09-10T14:33:14 | 2019-09-10T14:33:14 | 189,545,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,056 | py | """
##############################################################################
Тестирует сценарии Python в каталоге, передает им аргументы командной строки,
выполняет перенаправление stdin, перехватывает stdout, stderr и код завершения,
чтобы определить наличие ошибок и отклонений от предыдущих результатов
выполнения. Запуск сценариев и управление потоками ввода-вывода производится
с помощью переносимого модуля subprocess (как это делает функция os.popen3
в Python 2.X). Потоки ввода-вывода всегда интерпретируются модулем subprocess
как двоичные. Стандартный ввод, аргументы, стандартный вывод и стандартный вывод
ошибок отображаются в файлы, находящиеся в подкаталогах.
Этот сценарий командной строки позволяет указать имя тестируемого каталога
и флаг принудительной генерации выходного файла. Этот программный код можно было
бы упаковать в функцию, однако то обстоятельство, что результатами сценария
являются сообщения и выходные файлы, снижает практическую пользу модели вызов/
возвращаемое значение.
Дополнительные возможные расширения: можно было бы реализовать по несколько
наборов аргументов командной строки и/или входных файлов для каждого
тестируемого сценария и запускать их по несколько раз (использовать функцию glob
для выборки нескольких файлов “.in*” в каталоге Inputs).
Возможно, было бы проще хранить все файлы, необходимые для проведения тестов,
в одном и том же каталоге, но с различными расширениями, однако с течением
времени их объем мог бы оказаться слишком большим.
В случае ошибок можно было бы сохранять содержимое потоков вывода stderr
и stdout в подкаталоге Errors, но я предпочитаю иметь ожидаемый/фактический
вывод в подкаталоге Outputs.
##############################################################################
"""
import os, sys, glob, time
from subprocess import Popen, PIPE
# конфигурационные аргументы
testdir = sys.argv[1] if len(sys.argv) > 1 else os.curdir
forcegen = len(sys.argv) > 2
print('Start tester:', time.asctime())
print('in', os.path.abspath(testdir))
def verbose(*args):
print('-' * 80)
for arg in args: print(arg)
def quiet(*args): pass
trace = quiet
# trace = verbose
# отбор сценариев для тестирования
testpatt = os.path.join(testdir, 'Scripts', '*.py')
testfiles = glob.glob(testpatt)
testfiles.sort()
trace(os.getcwd(), *testfiles)
numfail = 0
for testpath in testfiles:
testname = os.path.basename(testpath)
# получить входной файл и аргументы для тестируемого сценария
infile = testname.replace('.py', '.in')
inpath = os.path.join(testdir, 'Inputs', infile)
indata = open(inpath, 'rb').read() if os.path.exists(inpath) else b''
argfile = testname.replace('.py', '.args')
argpath = os.path.join(testdir, 'Args', argfile)
argdata = open(argpath).read() if os.path.exists(argpath) else ''
# местоположение файлов для сохранения stdout и stderr,
# очистить предыдущие результаты
outfile = testname.replace('.py', '.out')
outpath = os.path.join(testdir, 'Outputs', outfile)
outpathbad = outpath + '.bad'
if os.path.exists(outpathbad): os.remove(outpathbad)
errfile = testname.replace('.py', '.err')
errpath = os.path.join(testdir, 'Errors', errfile)
if os.path.exists(errpath): os.remove(errpath)
# запустить тестируемый сценарий, перенаправив потоки ввода-вывода
pypath = sys.executable
command = '%s %s %s' % (pypath, testpath, argdata)
trace(command, indata)
process = Popen(command, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE)
process.stdin.write(indata)
process.stdin.close()
outdata = process.stdout.read()
errdata = process.stderr.read()
exitstatus = process.wait()
trace(outdata, errdata, exitstatus)
# проанализировать результаты
if exitstatus != 0:
print('ERROR status:', testname, exitstatus)
if errdata:
print('ERROR stream:', testname, errpath)
open(errpath, 'wb').write(errdata)
if exitstatus or errdata:
numfail += 1
open(outpathbad, 'wb').write(outdata)
elif not os.path.exists(outpath) or forcegen:
print('generating:', outpath)
open(outpath, 'wb').write(outdata)
else:
priorout = open(outpath, 'rb').read()
if priorout == outdata:
print('passed:', testname)
else:
numfail += 1
print('FAILED output:', testname, outpathbad)
open(outpathbad, 'wb').write(outdata)
print('Finished:', time.asctime())
print('%s tests were run, %s tests failed.' % (len(testfiles), numfail))
| [
"[email protected]"
]
| |
bab250c929a45a4303cf4458404b5564c91d3e7e | 7374204324f6326663d12b3dd1fecc5bebb6854e | /offer/21.py | 92c2b908ffa610098c7214b6d91323a1b69d4e39 | []
| no_license | KevinChen1994/leetcode-algorithm | c18b58df398027078b0c0f468c4c873e9419433b | 1bcf3206cd3acc428ec690cb883c612aaf708aac | refs/heads/master | 2023-02-07T11:35:47.747207 | 2023-01-29T11:08:49 | 2023-01-29T11:08:49 | 230,386,123 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 466 | py | # !usr/bin/env python
# -*- coding:utf-8 _*-
# author:chenmeng
# datetime:2020/8/27 22:54
class Solution:
def exchange(self, nums):
right = 0
for i in range(len(nums)):
if nums[i] % 2 != 0:
nums[i], nums[right] = nums[right], nums[i]
right += 1
return nums
if __name__ == '__main__':
solution = Solution()
nums = [1, 3, 8, 4, 2, 2, 3, 5, 90, 2, 1]
print(solution.exchange(nums))
| [
"[email protected]"
]
| |
72edb51cbaee18ff1b5ed216dfe4334d588619c4 | 7bf617f77a55d8ec23fa8156c1380b563a5ac7f6 | /CG/SciPy/7900_08_04.py | 347dafa320f3570a8b98232c11f7348e03708ca2 | []
| no_license | anyatran/school | c06da0e08b148e3d93aec0e76329579bddaa85d5 | 24bcfd75f4a6fe9595d790808f8fca4f9bf6c7ec | refs/heads/master | 2021-06-17T10:45:47.648361 | 2017-05-26T12:57:23 | 2017-05-26T12:57:23 | 92,509,148 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | #!/usr/bin/python
# select a non-GUI backend
import matplotlib
matplotlib.use('Agg')
# import plotting module
import matplotlib.pyplot as plt
# used to 'fake' file writing
from cStringIO import StringIO
# used to generate the graph
import numpy as np
# function called by mod_python upon request on this file
def index(req):
# clean the axes
plt.cla()
# generate the graph
x = np.arange(0, 6, .01)
plt.plot(x, np.sin(x)**3 + 0.5*np.cos(x))
# instantiate a StringIO object
s = StringIO()
# and save the plot on it
plt.savefig(s)
# set the content-type for the respons
req.content_type = "image/png"
# and write the content of StringIO object
req.write(s.getvalue())
| [
"[email protected]"
]
| |
a60335fd3c0161a7831115494dd5523f809e1519 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_8/ndxkee009/question3.py | 5bbb84a08292c8d8d7b0d5a8882340aedd384aa1 | []
| no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | """Keegan Naidoo
NDXKEE009
4 May 2014"""
text=input("Enter a message:\n")
def Encrypt(text):
if text =="":
return ""
elif text[0]==" ":
return " " + Encrypt(text[1:])
elif(ord(text[0])<97):
return text[0] +Encrypt(text[1:])
elif(text[0]=='z'):
return "a" + Encrypt(text[1:])
elif not text[0].isalpha():
return text[0] + Encrypt(text[1:])
else:
return chr(ord(text[0])+1)+Encrypt(text[1:])
x=Encrypt(text)
print("Encrypted message:\n"+str(x)) | [
"[email protected]"
]
| |
2ebf12f787421a9fff6f50523fcb9c583a664292 | 680185d233bdc0a1b2f404923d69e1d2e5b94d9d | /rambler/net/compareExpressions.py | 6dc3058383d1cf10c04989605ccfdad5836882f0 | []
| no_license | pombredanne/rambler.net | e0c5d36d3495d85fa07edbaa2c52c6ce69c2ae70 | 065d5ec4d1eee086b0b37910e3a6887ae748d83e | refs/heads/master | 2020-12-31T02:49:31.666102 | 2012-04-12T18:24:58 | 2012-04-12T18:24:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,909 | py | import cStringIO
REGULAR_EXPRESSION_CHARS='.*+?'
IGNORE_CHARS='()'
def compareExpressions(exp1,exp2):
"""Given two strings which represent regular expressions returns compare how greedy an expressin
-1 if exp1 is more precicse than than exp2
0 if exp1 is as precise as exp2
1 if exp1 is less precise than exp2
This function is useful when you want to test a string against a series of regular expressions
and you want more precises expressions to have a chance at evaluating a string.
For examlpe consider the following regular expressions
>>> expressions = ['.*', 'foo.*']
If we were to evaluate a string against each expression and stop
on the first match like this code
>>> import re
>>> for expr in expressions:
... if re.match(expr, 'foo'):
... break
The second expression 'foo.*' would never be tested because '.*'
matches everything.
>>> expr == '.*'
True
Therefore we want the more specific match to run first, which
means we need to sort the list.
>>> expressions.sort(compareExpressions)
>>> expressions
['foo.*', '.*']
"""
# We delibertly flip exp2 and exp1 when calling cmp() because we
# want higher precision expressions to come first, not last.
return cmp(expressionPrecision(exp2), expressionPrecision(exp1))
def expressionPrecision(expStr):
""" Return the precision of an expression. The precision is simply the
number of non regular expression characters from the begining of the
string before reaching the first character that is part of a regular
expression.
For example
>>> expressionPrecision('blah')
4
Because 'blah' contains no regular expression characters
This next examlpe the precision is 2 because the expresion can
match either "blah", "bloh", "blue herring"
>>> expressionPrecision('bl(.*)h')
2
Now in this example the precision is three because the grouping
character has no impact on the precission of the expression.
>>> expressionPrecision('bl(a.*)h')
3
Escaped regulare expression characters should count as normal characters
>>> expressionPrecision('blah\.')
5
"""
stream = cStringIO.StringIO(expStr)
precision = 0
char = stream.read(1)
while char:
if char == '\\': # Skip over the next character and raise the precision
char = stream.read(1)
precision += 1
elif char in IGNORE_CHARS:
# It's a ( or something else that has no impacto on the
# precision of the string.
pass
elif char not in REGULAR_EXPRESSION_CHARS:
precision += 1
else:
# We found a regular expression character, return the precission
break
char = stream.read(1)
return precision
if __name__ == "__main__":
import sys, doctest
mod = sys.modules[__name__]
doctest.testmod(mod)
| [
"[email protected]"
]
| |
7b764c9a795922647c4f3acaf33d0114c1f2c22c | 4cdc9ba739f90f6ac4bcd6f916ba194ada77d68c | /剑指offer/第四遍/19.正则表达式匹配.py | e839a9c0ddfc157700ff3f37508a06178b532c62 | []
| no_license | leilalu/algorithm | bee68690daf836cc5807c3112c2c9e6f63bc0a76 | 746d77e9bfbcb3877fefae9a915004b3bfbcc612 | refs/heads/master | 2020-09-30T15:56:28.224945 | 2020-05-30T03:28:39 | 2020-05-30T03:28:39 | 227,313,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,457 | py | """
请实现一个函数用来匹配包含'. '和'*'的正则表达式。模式中的字符'.'表示任意一个字符,而'*'表示它前面的字符可以出现任意次(含0次)。在本题中,匹配是指字符串的所有字符匹配整个模式。例如,字符串"aaa"与模式"a.a"和"ab*ac*a"匹配,但与"aa.a"和"ab*a"均不匹配。
示例 1:
输入:
s = "aa"
p = "a"
输出: false
解释: "a" 无法匹配 "aa" 整个字符串。
示例 2:
输入:
s = "aa"
p = "a*"
输出: true
解释: 因为 '*' 代表可以匹配零个或多个前面的那一个元素, 在这里前面的元素就是 'a'。因此,字符串 "aa" 可被视为 'a' 重复了一次。
示例 3:
输入:
s = "ab"
p = ".*"
输出: true
解释: ".*" 表示可匹配零个或多个('*')任意字符('.')。
示例 4:
输入:
s = "aab"
p = "c*a*b"
输出: true
解释: 因为 '*' 表示零个或多个,这里 'c' 为 0 个, 'a' 被重复一次。因此可以匹配字符串 "aab"。
示例 5:
输入:
s = "mississippi"
p = "mis*is*p*."
输出: false
s 可能为空,且只包含从 a-z 的小写字母。
p 可能为空,且只包含从 a-z 的小写字母,以及字符 . 和 *。
"""
class Solution:
def isMatch(self, s, p):
s, p = '#' + s, '#' + p
m, n = len(s), len(p)
# dp[i][j]表示s的前i个字符和p的前j个字符是匹配的
dp = [[False] * n for _ in range(m)]
# base case 两个空字符是匹配的
dp[0][0] = True
# 字符串
for i in range(m):
# 模式
for j in range(1, n):
# 如果i==0,则是拿空字符串去匹配p的前j个字符, 除非p[j]为*,选择匹配0个dp[i][j-2]
if i == 0:
dp[i][j] = j > 1 and p[j] == '*' and dp[i][j - 2]
# 如果当前字符匹配上了,直接看上一个字符匹配上没
elif p[j] in [s[i], '.']:
dp[i][j] = dp[i - 1][j - 1]
# 如果当前模式p为*,则有两种情况,不匹配,或者重复一次,
elif p[j] == '*':
dp[i][j] = j > 1 and dp[i][j - 2] or (p[j - 1] in [s[i], '.'] and dp[i - 1][j])
else:
dp[i][j] = False
return dp[-1][-1]
if __name__ == '__main__':
s = "aaaaaaaaaaaaab"
p = "a*a*a*a*a*a*a*a*a*a*a*a*b"
res = Solution().isMatch(s, p)
print(res)
| [
"[email protected]"
]
| |
4291138f901a7718cffecda75a832431563fc9a5 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_2453486_0/Python/yakirsudry/solution.py | ea43b0bf5caa42220edc99eeee8ce197f20cdefd | []
| no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,058 | py | import os
input = r"A-small-attempt0.in"
output = r"testoutput.txt"
def get_solution_str(table):
o_wins_string = "O won"
x_wins_string = "X won"
draw_string = "Draw"
not_finished_string = "Game has not completed"
o_good_chars = ["O","T"]
x_good_chars = ["X","T"]
o_wins = False
x_wins = False
for i in xrange(4):
if table[i][0] in o_good_chars and table[i][1] in o_good_chars and table[i][2] in o_good_chars and table[i][3] in o_good_chars:
return o_wins_string
if table[i][0] in x_good_chars and table[i][1] in x_good_chars and table[i][2] in x_good_chars and table[i][3] in x_good_chars:
return x_wins_string
if table[0][i] in o_good_chars and table[1][i] in o_good_chars and table[2][i] in o_good_chars and table[3][i] in o_good_chars:
return o_wins_string
if table[0][i] in x_good_chars and table[1][i] in x_good_chars and table[2][i] in x_good_chars and table[3][i] in x_good_chars:
return x_wins_string
if table[0][0] in o_good_chars and table[1][1] in o_good_chars and table[2][2] in o_good_chars and table[3][3] in o_good_chars:
return o_wins_string
if table[0][0] in x_good_chars and table[1][1] in x_good_chars and table[2][2] in x_good_chars and table[3][3] in x_good_chars:
return x_wins_string
if table[0][3] in o_good_chars and table[1][2] in o_good_chars and table[2][1] in o_good_chars and table[3][0] in o_good_chars:
return o_wins_string
if table[0][3] in x_good_chars and table[1][2] in x_good_chars and table[2][1] in x_good_chars and table[3][0] in x_good_chars:
return x_wins_string
for i in xrange(4):
if "." in table[i]:
return not_finished_string
return draw_string
def decode(line):
return [i for i in line]
lines = open(input, "r").readlines()
num_cases = int(lines[0])
out = open(output, "w")
cur_line = 1
table = [""] * 4
cur_case = 1
for i in xrange(num_cases):
for i in xrange(4):
table[i] = decode(lines[cur_line][:-1])
cur_line += 1
cur_line += 1
out.write("Case #%d: %s\n" %(cur_case, get_solution_str(table)))
cur_case += 1
print table | [
"[email protected]"
]
| |
b6ee2ce9b062a51c42f418699bc88c5290f2d92e | 8d3835e39cbc2c74d8535b809686d6ab3033c0d0 | /ecommerce/carts/migrations/0001_initial.py | ae9854cb961e0bd4a352d399116cca10ab49b218 | []
| no_license | gayatribasude/GayatrisWorld | 125698955cd8b98a5aa2377331293587a57f2911 | 552ea2ef946e95f5bccc4e51d4030484ab0bc438 | refs/heads/master | 2023-06-25T19:45:03.232059 | 2021-08-02T16:43:47 | 2021-08-02T16:43:47 | 384,343,617 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,069 | py | # Generated by Django 2.1.3 on 2018-12-06 16:23
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0003_auto_20181202_0848'),
]
operations = [
migrations.CreateModel(
name='Cart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('total', models.DecimalField(decimal_places=2, default=0.0, max_digits=30)),
('updated', models.DateTimeField(auto_now=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('products', models.ManyToManyField(blank=True, to='products.Product')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"gayatribasude"
]
| gayatribasude |
6b38251500d3171ba1a90aee8bf9ea0e04f55017 | faf45ce5687f45b1c1a9aad272dcec6c2543db24 | /imaginet/evaluate.py | 2c82c70e8b09db334924e7f825b1d587f5660e18 | [
"MIT"
]
| permissive | gchrupala/reimaginet | b8b7ee73bfdbf5d4d293bd5704e554fb9800ac1e | f583b62877a62d8c06e2dcd1e39363f4cc4976f9 | refs/heads/master | 2020-04-12T06:15:43.473654 | 2017-05-22T15:47:38 | 2017-05-22T15:47:38 | 37,725,869 | 6 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,646 | py | # encoding: utf-8
# Copyright (c) 2015 Grzegorz Chrupała
from __future__ import division
import theano
import theano.tensor as T
import numpy
from scipy.spatial.distance import cdist
def paraphrase_ranking(vectors, group):
"""Rank sentences by projection and return evaluation metrics."""
return ranking(vectors, vectors, group, ns=[4], exclude_self=True)
def ranking(candidates, vectors, correct, ns=[1,5,10], exclude_self=False):
"""Rank `candidates` in order of similarity for each vector and return evaluation metrics.
`correct[i][j]` indicates whether for vector i the candidate j is correct.
"""
#distances = cdist(vectors, candidates, metric='cosine')
distances = Cdist(batch_size=2**13)(vectors, candidates)
result = {'ranks' : [] , 'precision' : {}, 'recall' : {}, 'overlap' : {} }
for n in ns:
result['precision'][n] = []
result['recall'][n] = []
result['overlap'][n] = []
for j, row in enumerate(distances):
ranked = numpy.argsort(row)
if exclude_self:
ranked = ranked[ranked!=j]
id_correct = numpy.where(correct[j][ranked])[0]
rank1 = id_correct[0] + 1
topn = {}
for n in ns:
id_topn = ranked[:n]
overlap = len(set(id_topn).intersection(set(ranked[id_correct])))
result['precision'][n].append(overlap/n)
result['recall' ][n].append(overlap/len(id_correct))
result['overlap' ][n].append(overlap)
result['ranks'].append(rank1)
return result
class Cdist():
"""Return cosine distances between two sets of vectors."""
def __init__(self, batch_size=None):
self.batch_size = batch_size
self.U = T.matrix('U')
self.V = T.matrix('V')
self.U_norm = self.U / self.U.norm(2, axis=1).reshape((self.U.shape[0], 1))
self.V_norm = self.V / self.V.norm(2, axis=1).reshape((self.V.shape[0], 1))
self.W = T.dot(self.U_norm, self.V_norm.T)
self.cosine = theano.function([self.U, self.V], self.W)
def __call__(self, A, B):
if self.batch_size is None:
chunks = [A]
else:
chunks = numpy.split(A, [i for i
in range(self.batch_size, A.shape[0], self.batch_size) ])
cosines = numpy.vstack([self.cosine(chunk, B) for chunk in chunks])
return 1 - cosines
import json
import imaginet.defn.visual as visual
from imaginet.simple_data import phonemes
from scipy.spatial.distance import cosine
def eval_bestimg(modelpath, testpath, tokenize=phonemes):
rows = [ json.loads(line) for line in open(testpath)]
model = visual.load(path=modelpath)
scaler = model.scaler
batcher = model.batcher
mapper = batcher.mapper
img_fs = {}
sent_ids = {}
prov = dp.getDataProvider('coco', root='/home/gchrupala/repos/reimaginet')
for split in ['val','test','restval']:
for img in prov.iterImages(split=split):
img_fs[img['cocoid']] = scaler.transform([ img['feat'] ])[0]
for sent in img['sentences']:
sent_ids[sent['sentid']]=sent
def response(row):
sent = sent_ids[row['meta']['id']]
inputs = list(mapper.transform([tokenize(sent) ]))
pred = model.Visual.predict(batcher.batch_inp(inputs))[0]
return 1+numpy.argmin([ cosine(pred, img_fs[cocoid]) for cocoid in row['meta']['candidates']])
preds = numpy.array([ response(row) for row in rows ])
target = numpy.array([ row['meta']['response'] for row in rows])
return numpy.mean(preds==target)
| [
"[email protected]"
]
| |
ec03231406e390198411099c19d5d3824fc7495c | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/GEM/YW_CYBMM_SZSJ_143.py | 13ea7b42f76855712e5a8114bc47a20da2331362 | []
| no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,065 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from ServiceConfig import *
from mainService import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_CYBMM_SZSJ_143(xtp_test_case):
# YW_CYBMM_SZSJ_143
def test_YW_CYBMM_SZSJ_143(self):
title = '交易日五档即成转撤销卖-非最后一次卖为非100的倍数'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11010123,
'errorMSG': queryOrderErrorMsg(11010123),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('300130', '2', '2', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price': stkparm['涨停价'],
'quantity': 88,
'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 0
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
e94b34f7a4b61d2270b468fc4f32388eb0bd1a4b | a8d5cb55b80f4c160323b6c6fbe094c6e6634c75 | /users/migrations/0002_user_date_validated.py | f66d0dab256b23098a3db314f10101d8bcfdba4b | []
| no_license | birkoss/mtg-achievements-api | 00d5d954e3c5bbf919a44ef49bde9feaf3ceee61 | 133806c668f92b4b94d0f731edec5c08041010cc | refs/heads/master | 2023-03-28T18:16:06.077949 | 2021-04-03T13:34:15 | 2021-04-03T13:34:15 | 347,626,595 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | # Generated by Django 3.1.7 on 2021-03-14 13:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='date_validated',
field=models.DateTimeField(null=True),
),
]
| [
"[email protected]"
]
| |
c432544fd2dcaf1b9c22fc5905cc20ab2b72813e | 88bbf27deb0b2a1b96985c0a94ff0b7a3d012820 | /hq/wsgi.py | 30b1d7a4018f35aa170b75ea1e1d0f1b1cca4721 | []
| no_license | Code-Community99/Hiq-django | e8efb7d63bd4fc0bc8e2af193fdec9aaab0975b0 | af62622648ad88f6e8d94e86a8dc5d6660e3bbe2 | refs/heads/master | 2022-12-14T01:12:45.218318 | 2020-05-18T23:29:35 | 2020-05-18T23:29:35 | 233,811,384 | 2 | 1 | null | 2022-12-08T03:34:53 | 2020-01-14T10:02:55 | JavaScript | UTF-8 | Python | false | false | 163 | py | import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'hq.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
11e3806d74fcbc6ee3de46136854f7d5e113f04a | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_199/2374.py | daaadf5cd464dc0f520aa0689946e9fa40c58e1b | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,584 | py | global flipcounter
def flip(line,start,k):
global flipcounter
for t in range(k):
if(line[start + t]) == '+':
line[start + t] = '-'
else:
line[start + t] = '+'
flipcounter= flipcounter +1
def myfunc(line,k):
global flipcounter
flipcounter = 0;
pluscounter =0
for i in range(len(line)-k+1):
if(line[i] == '+'):
pluscounter = pluscounter + 1
continue
else:
flip(line,i,k)
if(pluscounter == len(line)):
return
pluscounter =0;
for i in range(len(line)):
if(line[i] == '+'):
pluscounter = pluscounter + 1
if(pluscounter == len(line)):
return
line = list(reversed(line))
for i in range(len(line)-k+1):
if(line[i] == '+'):
pluscounter = pluscounter + 1
continue
else:
flip(line,i,k)
return "impossible"
myfile = open("A-large.in", "r")
noofcases = myfile.readline();
outfile = open("outputlarge", 'w')
for i in range(int(noofcases)):
myinput = myfile.readline();
mylist = myinput.split( );
returnval = myfunc (list(mylist[0]),int(mylist[1]),)
if(returnval == "impossible"):
outfile.write("Case #" + str(i+1) + ": IMPOSSIBLE\n" ) # python will convert \n to os.linesep
else:
outfile.write("Case #" + str(i+1) + ": " + str(flipcounter)+"\n" ) # python will convert \n to os.linesep
outfile.close()
| [
"[email protected]"
]
| |
67f81d788c523b73d542f8546f97f8bef336fb9f | 13faa0d553ed6c6a57791db3dfdb2a0580a1695b | /CodeChef/Long/August 2017/MATDW.py | 45224dc97400725d10522949e774031fc7ef9126 | []
| no_license | kautsiitd/Competitive_Programming | ba968a4764ba7b5f2531d03fb9c53dc1621c2d44 | a0d8ae16646d73c346d9ce334e5b5b09bff67f67 | refs/heads/master | 2021-01-17T13:29:52.407558 | 2017-10-01T09:58:23 | 2017-10-01T09:58:23 | 59,496,650 | 0 | 0 | null | 2017-05-20T17:27:18 | 2016-05-23T15:56:55 | HTML | UTF-8 | Python | false | false | 1,639 | py | import sys
def printIt(s):
print s
sys.stdout.flush()
def bestDisk(diskInfo):
return diskInfo[0]/(diskInfo[3]*1.0)
n,h = map(int,raw_input().split())
hardDisksInfo = sorted([map(int,raw_input().split())+[i] for i in range(h)], key=bestDisk)
penalty = input()
numberOfHardDisk = 0
currentHardDisk = -1
userMap = {}
hardDiskMap = {}
exceed = False
hardDiskUsed = True
for _ in range(n):
# Buying hardDisk
currentCapacity = 0
if numberOfHardDisk < 1050 and hardDiskUsed:
printIt("p b "+str(hardDisksInfo[0][-1]))
hardDiskUsed = False
numberOfHardDisk += 1
currentHardDisk += 1
currentCapacity = hardDisksInfo[0][3]
else:
exceed = True
# Asking query
printIt("g")
q = map(int,raw_input().split())
qType = q[0]
if qType == 0:
userId = q[1]
userData = q[2]
if userData < currentCapacity and not(exceed):
userMap[userId] = currentHardDisk
hardDiskUsed = True
hardDiskMap[currentHardDisk] = userId
hardDiskMap[currentHardDisk-1] = userId
printIt("p s "+str(currentHardDisk)+" 0")
else:
printIt("p s -1 -1")
elif qType == 1:
userId = q[1]
userData = q[2]
if userId in userMap:
printIt("p i "+str(userMap[userId])+" "+str(userData))
else:
printIt("p i -1 -1")
else:
erasedHardDisk = q[1]
if erasedHardDisk in hardDiskMap:
erasedUser = hardDiskMap[erasedHardDisk]
if erasedUser in userMap:
del userMap[erasedUser]
print "end"
| [
"[email protected]"
]
| |
53cc9c9f7fc41839ec9ba889a34855ff10ac768b | e5a044708032b853f1cdf8906da63502716fd410 | /test/test_payment_tokenization_error_response.py | e86d56dc5ffac418b05c1b6b00de8fdc69fafcad | []
| no_license | GBSEcom/Python | 4b93bab80476051fc99f379f018ac9fa109a8a6a | 5fa37dba8d0c3853686fdc726f863743376060c9 | refs/heads/master | 2021-12-04T12:55:29.605843 | 2021-11-19T22:01:03 | 2021-11-19T22:01:03 | 136,058,345 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,236 | py | # coding: utf-8
"""
Payment Gateway API Specification.
The documentation here is designed to provide all of the technical guidance required to consume and integrate with our APIs for payment processing. To learn more about our APIs please visit https://docs.firstdata.com/org/gateway. # noqa: E501
The version of the OpenAPI document: 21.5.0.20211029.001
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import openapi_client
from openapi_client.models.payment_tokenization_error_response import PaymentTokenizationErrorResponse # noqa: E501
from openapi_client.rest import ApiException
class TestPaymentTokenizationErrorResponse(unittest.TestCase):
"""PaymentTokenizationErrorResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPaymentTokenizationErrorResponse(self):
"""Test PaymentTokenizationErrorResponse"""
# FIXME: construct object with mandatory attributes with example values
# model = openapi_client.models.payment_tokenization_error_response.PaymentTokenizationErrorResponse() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
2fd7d116a398cf706f0f19d4f65963b0537f9d24 | 0323561eacf19846e7e293d9cbc0b5e0e2de1d91 | /Step_8/A3C_NETWORK.py | 0203c47f042512a2d9d7c3992d69d133fb3ece7a | [
"Apache-2.0"
]
| permissive | LeeDaeil/Process_A3C | 77cb3760f579fb1d80e191871bf853e27089bff9 | 1876fbe1b928e13b9c8766095b2d13abfda94019 | refs/heads/master | 2020-03-31T15:39:23.567327 | 2019-07-10T09:40:25 | 2019-07-10T09:40:25 | 152,345,573 | 1 | 2 | Apache-2.0 | 2018-12-25T02:34:36 | 2018-10-10T01:39:29 | Python | UTF-8 | Python | false | false | 4,228 | py | from keras.layers import Dense, Input, LSTM
from keras.models import Model
from keras.layers.wrappers import TimeDistributed
from keras.optimizers import RMSprop
from keras import backend as K
from Step_6.Parameter import PARA
class A3C_net_model:
def __init__(self):
if PARA.Model == 'LSTM':
# self.input_shape = (10, 2) # basic LSTM (1, 2, 3) shape
self.input_shape = (10, 3) # basic LSTM (1, 2, 3) shape
elif PARA.Model == 'DNN':
self.input_shape = (3,) # basic DNN (1, 3) shape
self.action_size = 3
def _make_model(self):
in_put = Input(shape=self.input_shape)
if PARA.Model == 'LSTM':
hidden_layer = TimeDistributed(Dense(64), input_shape=self.input_shape)(in_put)
hidden_layer = LSTM(32, return_sequences=True)(hidden_layer)
hidden_layer = LSTM(16)(hidden_layer)
elif PARA.Model == 'DNN':
hidden_layer = Dense(64, activation='relu')(in_put)
hidden_layer = Dense(32, activation='relu')(hidden_layer)
policy = Dense(self.action_size, activation='softmax')(hidden_layer)
critic = Dense(1, activation='linear')(hidden_layer)
actor = Model(inputs=in_put, outputs=policy)
cric = Model(inputs=in_put, outputs=critic)
return actor, cric
class A3C_shared_network:
def __init__(self):
print('Main_net')
self.A3C_net_model = A3C_net_model()
self.actor, self.cric = self._make_actor_critic_network()
self.optimizer = [self._actor_optimizer(), self._critic_optimizer()]
self.conter = 0
def _make_actor_critic_network(self):
# 네트워크를 구성하기 위해서 아래와 같이 작성한다.
actor, cric = self.A3C_net_model._make_model()
actor._make_predict_function()
cric._make_predict_function()
if PARA.show_model:
actor.summary()
cric.summary()
return actor, cric
def _actor_optimizer(self):
action = K.placeholder(shape=[None, self.A3C_net_model.action_size])
advantage = K.placeholder(shape=[None, ])
policy = self.actor.output
# 정책 크로스 엔트로피 오류함수
action_prob = K.sum(action * policy, axis=1)
cross_entropy = K.log(action_prob + 1e-10) * advantage
cross_entropy = -K.sum(cross_entropy)
# 탐색을 지속적으로 하기 위한 엔트로피 오류
entropy = K.sum(policy * K.log(policy + 1e-10), axis=1)
entropy = K.sum(entropy)
# 두 오류함수를 더해 최종 오류함수를 만듬
loss = cross_entropy + 0.01 * entropy
optimizer = RMSprop(lr=2.5e-4, rho=0.99, epsilon=0.01)
updates = optimizer.get_updates(self.actor.trainable_weights, [], loss)
train = K.function([self.actor.input, action, advantage], [loss], updates=updates)
return train
def _critic_optimizer(self):
discount_prediction = K.placeholder(shape=(None,))
value = self.cric.output
# [반환값 - 가치]의 제곱을 오류함수로 함.
loss = K.mean(K.square(discount_prediction - value))
optimizer = RMSprop(lr=2.5e-4, rho=0.99, epsilon=0.01)
updates = optimizer.get_updates(self.cric.trainable_weights, [], loss)
train = K.function([self.cric.input, discount_prediction], [loss], updates=updates)
return train
class A3C_local_network:
def __init__(self, shared_net_actor, shared_net_cric):
print('Local_net')
self.A3C_net_model = A3C_net_model()
self.local_actor, self.local_cric = self._make_local_actor_critic_network(shared_net_actor, shared_net_cric)
def _make_local_actor_critic_network(self, shared_net_actor, shared_net_cric):
local_actor, local_cric = self.A3C_net_model._make_model()
local_actor._make_predict_function()
local_cric._make_predict_function()
local_cric.set_weights(shared_net_cric.get_weights())
local_actor.set_weights(shared_net_actor.get_weights())
if PARA.show_model:
local_actor.summary()
local_cric.summary()
return local_actor, local_cric | [
"[email protected]"
]
| |
4a6e24f762958195452038e9fed1de89efdbd298 | c1bd12405d244c5924a4b069286cd9baf2c63895 | /azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/connectivity_hop_py3.py | 1ab3ca4895e26853a60e072edcb9b2dcc90c301e | [
"MIT"
]
| permissive | lmazuel/azure-sdk-for-python | 972708ad5902778004680b142874582a284a8a7c | b40e0e36cc00a82b7f8ca2fa599b1928240c98b5 | refs/heads/master | 2022-08-16T02:32:14.070707 | 2018-03-29T17:16:15 | 2018-03-29T17:16:15 | 21,287,134 | 1 | 3 | MIT | 2019-10-25T15:56:00 | 2014-06-27T19:40:56 | Python | UTF-8 | Python | false | false | 2,122 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ConnectivityHop(Model):
"""Information about a hop between the source and the destination.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar type: The type of the hop.
:vartype type: str
:ivar id: The ID of the hop.
:vartype id: str
:ivar address: The IP address of the hop.
:vartype address: str
:ivar resource_id: The ID of the resource corresponding to this hop.
:vartype resource_id: str
:ivar next_hop_ids: List of next hop identifiers.
:vartype next_hop_ids: list[str]
:ivar issues: List of issues.
:vartype issues:
list[~azure.mgmt.network.v2017_08_01.models.ConnectivityIssue]
"""
_validation = {
'type': {'readonly': True},
'id': {'readonly': True},
'address': {'readonly': True},
'resource_id': {'readonly': True},
'next_hop_ids': {'readonly': True},
'issues': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'address': {'key': 'address', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'next_hop_ids': {'key': 'nextHopIds', 'type': '[str]'},
'issues': {'key': 'issues', 'type': '[ConnectivityIssue]'},
}
def __init__(self, **kwargs) -> None:
super(ConnectivityHop, self).__init__(**kwargs)
self.type = None
self.id = None
self.address = None
self.resource_id = None
self.next_hop_ids = None
self.issues = None
| [
"[email protected]"
]
| |
3cd1756d135e7014c2c7cfd9d5f9e2379b777769 | b44ae8c215c7577616ce94bbddda57d46ff46577 | /experiments/sparsity/sameK_20repeats_movielens_100K/gaussian_gaussian_univariate.py | 4a00a17770a6dc4e784f85d4bedd6be94cbc77c7 | []
| no_license | changchunli/BMF_Priors | 06a74d89198b11c0c3ba673a1d4869986cd7bc2d | 15b20537eefd36347ed84617882eeea1c453e162 | refs/heads/master | 2020-03-21T07:50:08.081910 | 2018-06-10T10:22:04 | 2018-06-10T10:22:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,398 | py | '''
Measure sparsity experiment on the MovieLens 100K dataset, with
the All Gaussian model (univariate posterior).
'''
import sys, os
project_location = os.path.dirname(__file__)+"/../../../../"
sys.path.append(project_location)
from BMF_Priors.code.models.bmf_gaussian_gaussian_univariate import BMF_Gaussian_Gaussian_univariate
from BMF_Priors.data.movielens.load_data import load_movielens_100K
from BMF_Priors.experiments.sparsity.sparsity_experiment import sparsity_experiment
import matplotlib.pyplot as plt
''' Run the experiment. '''
R, M = load_movielens_100K()
model_class = BMF_Gaussian_Gaussian_univariate
n_repeats = 20
stratify_rows = False
fractions_known = [0.07, 0.06, 0.05, 0.04, 0.03, 0.02]
fractions_unknown = [1. - v for v in fractions_known]
settings = {
'R': R,
'M': M,
'K': 5,
'hyperparameters': { 'alpha':1., 'beta':1., 'lamb':0.1 },
'init': 'random',
'iterations': 250,
'burn_in': 200,
'thinning': 1,
}
fout = './results/performances_gaussian_gaussian_univariate.txt'
average_performances, all_performances = sparsity_experiment(
n_repeats=n_repeats, fractions_unknown=fractions_unknown, stratify_rows=stratify_rows,
model_class=model_class, settings=settings, fout=fout)
''' Plot the performance. '''
plt.figure()
plt.title("Sparsity performances")
plt.plot(fractions_unknown, average_performances['MSE'])
plt.ylim(0,4) | [
"[email protected]"
]
| |
abc95fb4dae6cd5138c5a60bba257d3f577757ee | 41c64b0495902c111ab627b71a6a5e54a8e12bcf | /test/subfolder2/module2.py | 457f0237bbe47531bf5169f6990b0f8a7550f163 | []
| no_license | ZhangRui111/Rui_utils | 80c335e3fcfa8e7e88accf7af6079ed7b7b30a25 | 5e1a303cf2dab26f2ec092d0953f09354a787079 | refs/heads/master | 2021-10-08T16:03:13.636857 | 2021-10-08T05:01:20 | 2021-10-08T05:01:20 | 160,489,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42 | py | def func2():
print("This is func2()")
| [
"[email protected]"
]
| |
2cb0388f30962d3fce1d5759446ab5384374b8a0 | ff4302db00bc503ab86ae02f170b078e1cda2312 | /daili/tiantian_cart/urls.py | b8ce4472c71db93b77dd44ee8ad49601cd8f9f76 | []
| no_license | pythonIn/dali | 28f3eab4f747fa0fc885d21b650e3111a0a88da2 | f37ed9a4d3e52cf1dabe454cb434abf736eb04bf | refs/heads/master | 2020-03-16T15:48:36.869406 | 2018-06-01T18:43:57 | 2018-06-01T18:43:57 | 132,759,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | from django.conf.urls import url, include
import views
urlpatterns = [
url(r'^cart$',views.cart),
url(r"^add(\d+)_(\d+)$", views.add),
url(r'^mend(\d+)_(\d+)$', views.mend),
url(r'^cart_del(\d+)$', views.cart_del)
]
| [
"[email protected]"
]
| |
e1156b9394dfbc82e1105de1b18c7c019647151c | 10920b11a22a20f9a7f63157818327f3c4e41888 | /jibby_opencv/Object Recognition/image.py | 51ebc065790ef203d5025db954510b6b13e3513e | []
| no_license | dsall/computerv | e331b3d025c8cec0119b789107d1fef18d08f02a | 40671d618c31ad9d9b20fc902a218a8e281098bc | refs/heads/master | 2021-09-15T09:33:08.495580 | 2018-05-29T23:41:42 | 2018-05-29T23:41:42 | 135,363,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,968 | py | import urllib.request
import cv2
import numpy as np
import os
def store_raw_images():
neg_images_link = 'http://image-net.org/api/text/imagenet.synset.geturls?wnid=n07942152'
neg_image_urls = urllib.request.urlopen(neg_images_link).read().decode()
pic_num = 877
if not os.path.exists('neg'):
os.makedirs('neg')
for i in neg_image_urls.split('\n'):
try:
print(i)
urllib.request.urlretrieve(i, "neg/"+str(pic_num)+".jpg")
img = cv2.imread("neg/"+str(pic_num)+".jpg",cv2.IMREAD_GRAYSCALE)
# should be larger than samples / pos pic (so we can place our image on it)
resized_image = cv2.resize(img, (100, 100))
cv2.imwrite("neg/"+str(pic_num)+".jpg",resized_image)
pic_num += 1
except Exception as e:
print(str(e))
#store_raw_images()
def find_uglies():
match = False
for file_type in ['neg']:
for img in os.listdir(file_type):
for ugly in os.listdir('uglies'):
try:
current_image_path = str(file_type)+'/'+str(img)
ugly = cv2.imread('uglies/'+str(ugly))
question = cv2.imread(current_image_path)
if ugly.shape == question.shape and not(np.bitwise_xor(ugly,question).any()):
print('That is one ugly pic! Deleting!')
print(current_image_path)
os.remove(current_image_path)
except Exception as e:
print(str(e))
#find_uglies()
def create_pos_n_neg():
for file_type in ['neg']:
for img in os.listdir(file_type):
if file_type == 'neg':
line = file_type+'/'+img+'\n'
with open('bg.txt','a') as f:
f.write(line)
create_pos_n_neg() | [
"[email protected]"
]
| |
63acd57aecf7e88812cea81b5035339c8601159b | 50c2f7d7292bf020bbc70e34b16bebb94eafb208 | /django_minishop/settings.py | 4cfb4aa72a0488dd7a81fefafd7c60494c2a91e4 | []
| no_license | sakkhar/django_minishop | 8178a71562a92ac56c37f81200c52661ced22ebe | 0427c1519ec18013a0846d642e7f780daec8216a | refs/heads/master | 2021-08-11T07:51:21.023070 | 2017-11-13T10:53:50 | 2017-11-13T10:53:50 | 110,529,039 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,203 | py | """
Django settings for django_minishop project.
Generated by 'django-admin startproject' using Django 1.11.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4eaa)_ybmk^hs$9)z%u*im(yuchx4mi6xz-=m0l#3(&tof0u8@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'minishop.apps.MinishopConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_minishop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_minishop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
a261fb52ce97eb8401acfc01dd3dd7cc0ab4a979 | a0cbae33d175fdf0299eddc775a1b4b84c0addcf | /orquesta/tests/unit/specs/mistral/test_base_spec.py | 1559a000d680dcef5ba76c42537c582ccfa6a8bf | [
"Apache-2.0"
]
| permissive | batk0/orquesta | 240ff95c76c610c52518ee7d2e3eee11b6594a73 | f03f3f2f3820bf111a9277f4f6c5d6c83a89d004 | refs/heads/master | 2020-04-17T10:48:48.016607 | 2019-01-19T15:40:05 | 2019-01-19T15:40:05 | 166,514,957 | 0 | 0 | Apache-2.0 | 2019-01-19T06:37:39 | 2019-01-19T06:37:39 | null | UTF-8 | Python | false | false | 1,958 | py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from orquesta.specs import loader
from orquesta.specs import mistral
from orquesta.specs.mistral import v2 as mistral_v2
class SpecTest(unittest.TestCase):
def setUp(self):
super(SpecTest, self).setUp()
self.spec_module_name = 'mistral'
def test_get_module(self):
self.assertEqual(
loader.get_spec_module(self.spec_module_name),
mistral
)
def test_get_spec(self):
spec_module = loader.get_spec_module(self.spec_module_name)
self.assertEqual(
spec_module.WorkflowSpec,
mistral.WorkflowSpec
)
def test_spec_catalog(self):
spec_module = loader.get_spec_module(self.spec_module_name)
self.assertEqual(
spec_module.WorkflowSpec.get_catalog(),
self.spec_module_name
)
def test_spec_version(self):
self.assertEqual('2.0', mistral_v2.VERSION)
self.assertEqual('2.0', mistral.VERSION)
def test_workflow_spec_imports(self):
self.assertEqual(
mistral.WorkflowSpec,
mistral_v2.workflows.WorkflowSpec
)
def test_task_spec_imports(self):
self.assertEqual(
mistral.TaskDefaultsSpec,
mistral_v2.tasks.TaskDefaultsSpec
)
self.assertEqual(
mistral.TaskSpec,
mistral_v2.tasks.TaskSpec
)
| [
"[email protected]"
]
| |
df4fecd88d5cd9e582134ca7ea60cbda93a24e83 | 1dc0de033e5f4e2471fb0ecdf55cc955e9287836 | /lines_bars_and_markers/120-绘制条形图.py | 2dacb6031c45e979a4e0de31749212506fa89c36 | []
| no_license | weiyinfu/learnMatplotlib | 5db2337723751a10d5dc6f077c97bef0fb919c0d | 3b173161f96d7b419c1b1be65f09e267141fa385 | refs/heads/master | 2022-08-29T20:46:26.222042 | 2022-08-02T02:22:33 | 2022-08-02T02:22:33 | 147,894,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 695 | py | # 导入绘图模块
import matplotlib.pyplot as plt
# 构建数据
GDP = [12406.8, 13908.57, 9386.87, 9143.64]
# 中文乱码的处理
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False
# 绘图
plt.bar(range(4), GDP, align='center', color='steelblue', alpha=0.8)
# 添加轴标签
plt.ylabel('GDP')
# 添加标题
plt.title('四个直辖市GDP大比拼')
# 添加刻度标签
plt.xticks(range(4), ['北京市', '上海市', '天津市', '重庆市'])
# 设置Y轴的刻度范围
plt.ylim([5000, 15000])
# 为每个条形图添加数值标签
for x, y in enumerate(GDP):
plt.text(x, y + 100, '%s' % round(y, 1), ha='center')
# 显示图形
plt.show()
| [
"[email protected]"
]
| |
58c292c8901934720c90bda9409969ed438bc743 | b1c403ad1211221427dddc80a7f15956da498175 | /0x03-caching/0-basic_cache.py | d6dab650de3d655f0aa8fac8482cca3dc26d9587 | []
| no_license | oumaymabg/holbertonschool-web_back_end | 246dd47b9abdb277d6ef539c9bc38a8f0509554a | dce7ff683d8bce9ad986d72948c9e75ca7b80d2a | refs/heads/master | 2023-09-05T22:27:28.961336 | 2021-11-15T21:05:53 | 2021-11-15T21:05:53 | 389,440,104 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | #!/usr/bin/python3
""" Basic dictionary """
from base_caching import BaseCaching
class BasicCache(BaseCaching):
""" Class that inherits from BaseCaching and is a caching system
This caching system doesn’t have limit """
def put(self, key, item):
""" Assign to the dictionary """
if key and item:
self.cache_data[key] = item
def get(self, key):
""" Return the value linked """
if key is None or self.cache_data.get(key) is None:
return None
return self.cache_data.get(key)
| [
"[email protected]"
]
| |
5e1ab4fade27d72a59bf02fb6ddccf1dfefb8530 | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.2/tests/regressiontests/views/tests/specials.py | e2a2e4d4bb4e431d00cc394218d62573be10e7f9 | []
| no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.2/tests/regressiontests/views/tests/specials.py | [
"[email protected]"
]
| |
ec5cac4f337e78f90c3b2276496c1e93b970c87c | 14baf43101ef0d805bfe287a1498106f9b89580f | /lol.py | 77acc4b5075751f88bc3a0b7d258b4fe2d8e00c3 | []
| no_license | justinembawomye/python-fun | e10358e1825697d725dd9d11b94bbcc920965800 | edabbff26a39fefe36b9800b784fee438fa1b2c8 | refs/heads/master | 2023-08-27T13:20:56.258556 | 2021-10-28T20:17:49 | 2021-10-28T20:17:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py |
# creating the main function
def main():
for a in range(3):
meow()
def meow():
print('Hey cat!! .....Y.O.L.O')
main() | [
"[email protected]"
]
| |
c95518e76732e60e2498accc86ec780469776dd4 | 1e7673cf975dbdafd57cf040b3df00bf62da2f2a | /final_PhonoCi2.0/makegif.py | 63954ff3e0d581008d002f990733acea4847b393 | []
| no_license | pondjames007/RWET | 9771058767e45392537f20d701b772d4ec776ff2 | 5fbfc46ab3f98336cfa7bd3c1789097a7f8e16b9 | refs/heads/master | 2021-05-04T00:53:03.212648 | 2018-05-04T20:42:15 | 2018-05-04T20:42:15 | 120,354,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,059 | py | import requests
from bs4 import BeautifulSoup
import os
import sys
import random
from PIL import Image, ImageDraw, ImageFont
import textwrap
def download_file(url, local_filename=None):
if local_filename is None:
local_filename = url.split('/')[-1]
# if os.path.exists(local_filename):
# return local_filename
if not url.startswith('http'):
url = 'http:' + url
# NOTE the stram=True parameter
r = requests.get(url, stream=True)
with open(local_filename, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
return local_filename
def get_images(query, i):
url = "https://www.shutterstock.com/search?searchterm=" + query
html = requests.get(url).text
soup = BeautifulSoup(html, 'html.parser')
images = soup.select('.img-wrap > img')
print(len(images))
if len(images) == 0:
return False
else:
img_url = random.choice(images).get("src")
savedname = 'frames/' + str(i) + '.jpg'
try:
raw_image = download_file(img_url, savedname)
print(raw_image)
edit_image(raw_image, query)
return True
except Exception as e:
print(e)
return False
def edit_image(imagename, words):
image = Image.open(imagename)
image = image.resize((400,400))
# print(image.size)
canvas = ImageDraw.Draw(image, 'RGBA')
useFont = "/Library/Fonts/Verdana.ttf"
font = ImageFont.truetype(useFont, 30)
# lines = textwrap.wrap(words, width=15)
# y_height = 0
# for line in lines:
w, h = canvas.textsize(words, font=font)
canvas.rectangle([0, (image.size[1]-h)/2, image.size[0], (image.size[1]+h)/2], fill=(0, 0, 0, 30))
canvas.text(((image.size[0]-w)/2, (image.size[1]-h)/2) , words, font=font, fill=(255,255,255))
# y_height += h
out_image_name = imagename
image.save(out_image_name)
# edit_image("frames/1.jpg", "lololololol") | [
"[email protected]"
]
| |
bf76b641e40e5ae38de03980d78eb5ec4c5cea4e | 1fe8d4133981e53e88abf633046060b56fae883e | /venv/lib/python3.8/site-packages/tensorflow/python/data/__init__.py | fdef948f260e00288a0fd67087f3b7bd58136b7c | []
| no_license | Akira331/flask-cifar10 | 6c49db8485038731ce67d23f0972b9574746c7a7 | 283e7a2867c77d4b6aba7aea9013bf241d35d76c | refs/heads/master | 2023-06-14T16:35:06.384755 | 2021-07-05T14:09:15 | 2021-07-05T14:09:15 | 382,864,970 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:d2c61947afcd5ecdc7b2c49e834ad9feeeed830ece0335e0f29375f7da626b17
size 1506
| [
"[email protected]"
]
| |
f4167843167e81ed9d67f4244251831321f65ce7 | d8a9b88f4087ebfe97b462e589071222e2261e47 | /189. Rotate Array.py | e4112871d60ef56e0896ddb710f0ccab9299cc03 | []
| no_license | rohitpatwa/leetcode | a7a4e8a109ace53a38d613b5f898dd81d4771b1b | f4826763e8f154cac9134d53b154b8299acd39a8 | refs/heads/master | 2021-07-07T12:40:30.424243 | 2021-03-31T00:21:30 | 2021-03-31T00:21:30 | 235,003,084 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 611 | py | # Create 2 parts of the array of size n-k and k. Reverse these two parts individually and then reverse the whole array.
class Solution:
def reverse(self, nums, start, end):
while start<end:
nums[start], nums[end] = nums[end], nums[start]
start += 1
end -= 1
def rotate(self, nums: List[int], k: int) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
n = len(nums)
k = k%n
self.reverse(nums, 0, n-1)
self.reverse(nums, 0, k-1)
self.reverse(nums, k, n-1)
| [
"[email protected]"
]
| |
909cc6f34cdde8891f13255f83a6b221376d03b9 | 27e18001bd40f6fe5b9f675130e359147ce3519a | /20.py | e2bcaa0ba31c60aa60680cbbbe23bd056d298725 | []
| no_license | jsomers/project-euler | 6934a5d4eb2c116b08face308a010ddb74e0c123 | 61cc4cd7978deeed9d071f678c786f991e05d8a7 | refs/heads/master | 2021-01-01T05:39:39.568380 | 2014-08-21T04:05:10 | 2014-08-21T04:05:10 | 10,680,061 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | # Give the sum of the digits in 100!
def mul(x, y):
return x*y
def fact(n):
facts = []
for i in range (1, n+1):
facts.append(i)
a = reduce(mul, facts)
return a
print fact(100)
string = '93326215443944152681699238856266700490715968264381621468592963895217599993229915608941463976156518286253697920827223758251185210916864000000000000000000000000'
numbers = []
for l in string:
numbers.append(int(l))
print sum(numbers) | [
"[email protected]"
]
| |
b1c6f8d3d478e51f7e01b828c0c84f8246095111 | 25b9a5040f6458877f7eb992af9d5fc11769258d | /honahlee/profile_template/appdata/config.py | 66d3ed9938809b0a01f37fa0b4c8cc5aeebb7b25 | [
"BSD-3-Clause"
]
| permissive | volundmush/honahlee | 612399adf21ffaf2dca15e8de8853085a4308939 | da536fb3813621b624f6b3d8b79fe5e7cb73dbc8 | refs/heads/master | 2023-05-31T02:08:04.231953 | 2021-05-12T14:31:37 | 2021-05-12T14:31:37 | 270,115,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | from honahlee.core import LauncherConfig
class Launcher(LauncherConfig):
pass
| [
"[email protected]"
]
| |
7a7281413d8c98326a014edb808a6254dad9ba1e | 919e74f05976d9ea5f28d5dcf0a3e9311a4d22b2 | /conans/test/functional/toolchains/gnu/autotools/test_apple_toolchain.py | c9217eac77c70998abec202599110b89c924298c | [
"MIT"
]
| permissive | thorsten-klein/conan | 1801b021a66a89fc7d83e32100a6a44e98d4e567 | 7cf8f384b00ba5842886e39b2039963fc939b00e | refs/heads/develop | 2023-09-01T12:04:28.975538 | 2023-07-26T10:55:02 | 2023-07-26T10:55:02 | 150,574,910 | 0 | 0 | MIT | 2023-08-22T14:45:06 | 2018-09-27T11:16:48 | Python | UTF-8 | Python | false | false | 4,435 | py | import os
import textwrap
import platform
import pytest
from conans.client.tools.apple import to_apple_arch
from conans.test.assets.autotools import gen_makefile
from conans.test.assets.sources import gen_function_h, gen_function_cpp
from conans.test.utils.tools import TestClient
makefile = gen_makefile(apps=["app"], libs=["hello"])
conanfile_py = textwrap.dedent("""
from conans import ConanFile, tools
from conan.tools.gnu import Autotools
class App(ConanFile):
settings = "os", "arch", "compiler", "build_type"
options = {"shared": [True, False], "fPIC": [True, False]}
default_options = {"shared": False, "fPIC": True}
generators = "AutotoolsToolchain"
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def build(self):
env_build = Autotools(self)
env_build.make()
""")
@pytest.mark.skipif(platform.system() != "Darwin", reason="Only OSX")
@pytest.mark.parametrize("config", [("x86_64", "Macos", "10.14"),
("armv8", "iOS", "10.0"),
("armv7", "iOS", "10.0"),
("x86", "iOS", "10.0"),
("x86_64", "iOS", "10.0"),
("armv8", "Macos", "10.14") # M1
])
def test_makefile_arch(config):
arch, os_, os_version = config
profile = textwrap.dedent("""
include(default)
[settings]
os = {os}
os.version = {os_version}
arch = {arch}
""").format(os=os_, arch=arch, os_version=os_version)
t = TestClient()
hello_h = gen_function_h(name="hello")
hello_cpp = gen_function_cpp(name="hello")
main_cpp = gen_function_cpp(name="main", includes=["hello"], calls=["hello"])
t.save({"Makefile": makefile,
"hello.h": hello_h,
"hello.cpp": hello_cpp,
"app.cpp": main_cpp,
"conanfile.py": conanfile_py,
"profile": profile})
t.run("install . --profile:host=profile --profile:build=default")
t.run("build .")
libhello = os.path.join(t.current_folder, "libhello.a")
app = os.path.join(t.current_folder, "app")
assert os.path.isfile(libhello)
assert os.path.isfile(app)
expected_arch = to_apple_arch(arch)
t.run_command('lipo -info "%s"' % libhello)
assert "architecture: %s" % expected_arch in t.out
t.run_command('lipo -info "%s"' % app)
assert "architecture: %s" % expected_arch in t.out
@pytest.mark.skipif(platform.system() != "Darwin", reason="Only OSX")
@pytest.mark.parametrize("arch", ["x86_64", "armv8"])
def test_catalyst(arch):
profile = textwrap.dedent("""
include(default)
[settings]
os = Macos
os.version = 13.0
os.sdk = macosx
os.subsystem = catalyst
os.subsystem.ios_version = 13.1
arch = {arch}
""").format(arch=arch)
t = TestClient()
hello_h = gen_function_h(name="hello")
hello_cpp = gen_function_cpp(name="hello")
main_cpp = textwrap.dedent("""
#include "hello.h"
#include <TargetConditionals.h>
#include <iostream>
int main()
{
#if TARGET_OS_MACCATALYST
std::cout << "running catalyst " << __IPHONE_OS_VERSION_MIN_REQUIRED << std::endl;
#else
#error "not building for Apple Catalyst"
#endif
}
""")
t.save({"Makefile": makefile,
"hello.h": hello_h,
"hello.cpp": hello_cpp,
"app.cpp": main_cpp,
"conanfile.py": conanfile_py,
"profile": profile})
t.run("install . --profile:host=profile --profile:build=default")
t.run("build .")
libhello = os.path.join(t.current_folder, "libhello.a")
app = os.path.join(t.current_folder, "app")
assert os.path.isfile(libhello)
assert os.path.isfile(app)
expected_arch = to_apple_arch(arch)
t.run_command('lipo -info "%s"' % libhello)
assert "architecture: %s" % expected_arch in t.out
t.run_command('lipo -info "%s"' % app)
assert "architecture: %s" % expected_arch in t.out
if arch == "x86_64":
t.run_command('"%s"' % app)
assert "running catalyst 130100" in t.out
| [
"[email protected]"
]
| |
d7dc9495b41fb3a5a17dc56d9095b637106ff390 | 3aab11d445011f4a0de1376886dd3899aba44e68 | /opps/db/models/fields/__init__.py | e3f983a62a7edb7d7eff888dad217bb39a40c82c | [
"MIT"
]
| permissive | opps/opps | 4ba6a08ac5aa31be48c245b2e8f9d9a714a5e473 | 5552924fa34ea40d24febeac5046bd59f62e0e4f | refs/heads/master | 2023-08-24T21:09:23.489540 | 2023-05-22T20:07:33 | 2023-05-22T20:07:33 | 7,712,379 | 166 | 76 | MIT | 2022-01-06T22:53:23 | 2013-01-20T03:56:15 | Python | UTF-8 | Python | false | false | 90 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .jsonf import JSONField, JSONCharField
| [
"[email protected]"
]
| |
f149b88f3ad064c0087ca3f578642aea3bc5c4ed | c4af67db4c523d20f2d55aef90ba77db1fb53c38 | /Archetypes/tests/test_construction.py | 5e11425921baaa08285bc4c4a19308a46ef93964 | [
"BSD-3-Clause"
]
| permissive | dtgit/dtedu | e59b16612d7d9ea064026bf80a44657082ef45a3 | d787885fe7ed0de6f9e40e9b05d852a0e9d60677 | refs/heads/master | 2020-04-06T05:22:50.025074 | 2009-04-08T20:13:20 | 2009-04-08T20:13:20 | 171,351 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 779 | py | import unittest
from Products.Archetypes.tests.atsitetestcase import ATSiteTestCase
class FactoryTest(ATSiteTestCase):
def testSimplePortalType(self):
self.folder.invokeFactory(id="dummy", type_name="SimpleType")
self.assertEqual(self.folder.dummy.getPtype(), "Simple Type")
def XXXtestCopiedFTIPortalType(self):
# A known bug where `default_method` doesn't have the correct
# portal type available. For a discussion, see
# https://dev.plone.org/plone/ticket/6734
self.folder.invokeFactory(id="dummy", type_name="MySimpleType")
self.assertEqual(self.folder.dummy.getPtype(), "My Simple Type")
def test_suite():
suite=unittest.TestSuite()
suite.addTest(unittest.makeSuite(FactoryTest))
return suite
| [
"[email protected]"
]
| |
560b1cb677c6f79c4127fb6c7433e86b2f01752a | 6320fef2ea7376c2b35f97f1a5af004e90f09098 | /1-2주차 실습(복습)/venv/Lib/site-packages/bleach/utils.py | 1163bd3e732017cd74fb3402832980ffcaa22fa2 | []
| no_license | Dplo1514/ploaistudy | 7aa08d7f71653748a9e32dcc09ee8f6cec0aaed9 | e35e42b1e5f0c90cc1e2a59993a1ef73d8872d0c | refs/heads/master | 2023-09-03T00:45:55.601651 | 2021-10-24T12:19:38 | 2021-10-24T12:19:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 634 | py | from collections import OrderedDict
def _attr_key(attr):
"""Returns appropriate key for sorting attribute names
Attribute names are a tuple of ``(namespace, name)`` where namespace can be
``None`` or a string. These can't be compared in Python 3, so we conver the
``None`` to an empty string.
"""
key = (attr[0][0] or ""), attr[0][1]
return key
def alphabetize_attributes(attrs):
"""Takes a dict of attributes (or None) and returns them alphabetized"""
if not attrs:
return attrs
return OrderedDict([(k, v) for k, v in sorted(attrs.items(), key=_attr_key)])
| [
"[email protected]"
]
| |
b3caaba13db1b1269a90e25161e1dace19a05ba5 | f7d0c32b8d29dcff788d439c2b7051734afbbfc6 | /meiduo1/apps/user/utils.py | 3f1156f45390d1d7a949de53e45f59034719129f | [
"MIT"
]
| permissive | woobrain/nginx-uwsgi-web | dcf5159ba3f3332108c2d351ef3dac0cc504ade7 | 5b3ca1fba8205c2c0a2b91d951f812f1c30e12ae | refs/heads/master | 2022-12-22T21:08:00.758841 | 2019-11-13T12:31:20 | 2019-11-13T12:31:20 | 221,447,696 | 0 | 0 | MIT | 2022-12-11T19:51:54 | 2019-11-13T11:52:19 | JavaScript | UTF-8 | Python | false | false | 664 | py |
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from meiduo1 import settings
def generic_active_email_url(id, email):
# 实例化加密对象
s = Serializer(secret_key=settings.SECRET_KEY,expires_in=3600)
# 组织数据
data = {
"id":id,
"email":email
}
# 加密数据
serect_data = s.dumps(data)
return 'http://www.meiduo.site:8000/emailsactive/?token=%s' % serect_data.decode()
def check_active_email_url(token_id):
s = Serializer(secret_key=settings.SECRET_KEY,expires_in=3600)
try:
token_id = s.loads(token_id)
except:
return None
return token_id
| [
"[email protected]"
]
| |
f8eec1a3c5a02a18416e5676f7d9647bf0fd199e | 35ff4e124ea73cd2630ddf25dfe019b4b4e3f5d6 | /200_NumberOfIslands/200_NumberOfIslands_1.py | 88b82c01ab92a45cbcc835225055f670d848f1c6 | []
| no_license | H-Cong/LeetCode | 0a2084a4845b5d7fac67c89bd72a2adf49f90c3d | d00993a88c6b34fcd79d0a6580fde5c523a2741d | refs/heads/master | 2023-03-19T15:22:00.971461 | 2021-03-11T00:33:00 | 2021-03-11T00:33:00 | 303,265,129 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,435 | py | class Solution:
def numIslands(self, grid: List[List[str]]) -> int:
'''
BFS
'''
row = len(grid)
col = len(grid[0])
count = 0
for i in range(row):
for j in range(col):
if grid[i][j] == "1":
count += 1
self.bfs(grid, i, j)
return count
def bfs(self, grid, i, j):
row = len(grid)
col = len(grid[0])
queue = collections.deque()
queue.append((i,j))
while queue:
x, y = queue.pop()
directions = [(0, 1), (0, -1), (1, 0), (-1, 0)]
for d in directions:
a = x + d[0]
b = y + d[1]
if 0 <= a < row and 0 <= b < col and grid[a][b] == "1":
queue.appendleft((a, b))
grid[a][b] = "0"
# TC: O(m*n)
# visit all elements of grid once
# SC: O(min(m, n))
# min(m,n) as the space taken by queue
# It is worthy to mention that when you iterate a matrix from any corener,
# the length of queue is smaller or equal to min(m,n) as queue is a FIFO
# data structure. However, if you star from middle of the matrix, the length
# of queue can certainly be longer than min(m,n). It can be smh like
# min(m, n)*3+b I think, but the dominating factor still is min(m,n).
| [
"[email protected]"
]
| |
46737b82df4a556766e1e833e4e748b0474f551c | 2136701f48ad131084b331039d864f85988cf451 | /spider/work/media_huadong/somenew/spiders/zhongguojiangxiwang.py | a15ce477adf94d9e61ca22cfee9bc5f1b8e94dbf | []
| no_license | cuiyulin77/other | 9d374a47d482f1c3f9ef0f3ac4429487643b04b9 | c00cafaf7607452966fa523c4d0b04edb7f153e6 | refs/heads/master | 2020-05-18T04:24:26.095929 | 2019-04-30T06:37:53 | 2019-04-30T06:37:53 | 184,169,488 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,443 | py | # -*- coding: utf-8 -*-
import scrapy
from somenew.items import SomenewItem
import hashlib
import datetime
class DezhouxinwenSpider(scrapy.Spider):
# 中国江西网
name = 'zhongguojiangxiwang'
allowed_domains = ['jxnews.com.cn']
start_urls = ['http://www.jxnews.com.cn/']
# start_urls = ['http://fc.jxnews.com.cn/system/2019/03/14/017417180.shtml']
custom_settings = {'DOWNLOAD_DELAY': 0.8}
def parse(self, response):
# print(response.url)
xp = '//*[@id="Cen_Ri_R"]/div/table/tr/td/a/@href|//*[@id="PageOneRighLine"]/div[16]/ul/table/tr/td/a/@href|/html/body/div[32]/div[4]/div[1]/div/table/tr/td/@href|/html/body/div[32]/div[4]/div[2]/div/ul/li/a/@href|//*[@id="jsbbs"]/div/ul/li/a/@href|//div/div/ul/li/a/@href'
res = response.xpath(xp).extract()
print(res)
for url in res:
if '#' not in url and 'jiangxi' not in url and 'wenz'not in url and 'bbs' not in url:
yield scrapy.Request(url, callback=self.get_detail)
def get_detail(self,response):
print(response.url,'响应的url')
item = SomenewItem()
item['title'] = response.xpath("//h1/a/text()|//div[@class=\"biaoti\"]/*/text()|//h1/text()|//div[@class=\"BiaoTi\"]/text()").extract_first()
item['time'] = response.xpath("/html/body/div[5]/div[1]/div[1]/h5/text()|//*[@id=\"pubtime_baidu\"]/text()|//div[@class=\"xbt\"]/span[1]/text()|//div[@class=\"text1t\"]/h5/text()").extract_first()
item['content'] = response.xpath('//*[@id="content"]/p/text()|//p/text()').extract()
item['come_from'] ='中国江西网'
item['content']= ''.join(item['content']).replace('\u3000', u' ').replace(u'\xa0', u' ').\
replace('\n', '').replace( '\u2002', '').replace( '\r', '').replace( '\r\n', '').strip()
if item['content'] and item['title']:
item['url'] = response.url
m = hashlib.md5()
m.update(str(item['url']).encode('utf8'))
item['article_id'] = m.hexdigest()
item['media'] = '中国江西网'
item['create_time'] = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
item['comm_num'] = "0"
item['fav_num'] = '0'
item['read_num'] = '0'
item['env_num'] = '0'
item['media_type'] = '网媒'
item['addr_province'] = '江西'
print(item)
yield item
| [
"[email protected]"
]
| |
121ee74bb7d8df6823b01dac06b94931eb309d1a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03043/s556042691.py | 4e6f577175da4a7f55616baa41faeb4da62a38fa | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | n,k=map(int, input().split())
ans=0
import math
for i in range(1,n+1):
sikou=math.ceil(math.log((k/i),2))
if sikou<0:
sikou=0
ans+=(1/n)*0.5**sikou
print(ans) | [
"[email protected]"
]
| |
afd23e3eaa95e013ad89a9c3b5e91281ab263ef3 | 3147bb3457617842c24210c9c7e5a0d07fc548a6 | /guardianCrossword.py | 3e8b9aa7b1dcf1ebf5c095422550004651768d48 | []
| no_license | rashley2712/tools | 4378a062276e4c5c52563842d7a077e3a5e84f42 | e361abce89bfd4cf9e11d0e575c5d12c2c530e13 | refs/heads/master | 2023-01-10T12:52:50.744164 | 2022-12-21T14:19:01 | 2022-12-21T14:19:01 | 40,479,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,040 | py | #!/usr/bin/env python3
import argparse
import datetime
import datetime
import sys
import urllib.request
import os
# Google Cloud libraries
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from apiclient.http import MediaFileUpload
def getDriveListing():
# Call the Drive v3 API
results = service.files().list(
pageSize=10, fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
else:
print('Recent 10 files:')
for item in items:
print(u'{0} ({1})'.format(item['name'], item['id']))
def checkForExistingFile(service, name):
print("Searching for a file called:", name)
results = service.files().list(q="name = '" + name + "'", spaces="drive", fields="files(id, name, parents, trashed)").execute()
if (len(results.get('files', [])) > 0):
print("file found!")
allTrashed = True
for f in results.get('files', []):
print(f.get('name'), f.get('id'), f.get('parents'), f.get('trashed'))
if not f.get('trashed'): return True
return False
def uploadToDrive(crosswordFile):
SCOPES = ['https://www.googleapis.com/auth/drive']
creds = None
if os.path.exists(cloudtokenFile):
creds = Credentials.from_authorized_user_file(cloudtokenFile, SCOPES)
else:
print("No token.json file. Exiting")
return
service = build('drive', 'v3', credentials=creds)
name = crosswordFile.split('/')[-1]
if checkForExistingFile(service, name): return
fileMetadata = { 'name': name, "parents" : ["1Kwy3lson-RWAntRkxO67NV-Mo6l8jYzw"]}
media = MediaFileUpload(crosswordFile, mimetype='application/pdf')
results = service.files().create(body=fileMetadata, media_body = media).execute()
print("File Name: %s ID: %s"%(name, results.get('id')))
def getWriteCrossword(fullURL, outputFilename):
try:
response = urllib.request.urlopen(fullURL)
except urllib.error.HTTPError as e:
print("We got an error of:", e.code)
sys.exit()
except urllib.error.URLError as e:
print(e.reason)
sys.exit()
headers = str(response.headers)
startIndex = headers.find('Content-Type')
startIndex+= len("Content-Type: ")
endIndex = headers.find('\n', startIndex)
contentType = headers[startIndex:endIndex]
# print("Content-type: " + contentType)
if contentType!='application/pdf':
print("The server did not return a PDF object.")
sys.exit()
pdfData = response.read()
print("Fetched the data ok ... Writing to %s"%outputFilename)
outputFile = open(outputFilename, 'wb')
outputFile.write(pdfData)
outputFile.close()
print("Written the file to:", outputFilename)
return outputFilename
if __name__ == "__main__":
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
testBaseURL = "http://www.devicecloud.co.uk/crosswords/"
baseURL = "http://crosswords-static.guim.co.uk/"
homeDIR = os.getenv("HOME")
crosswordPath = homeDIR + "/Crosswords/"
namePrefix = "gdn.quick."
nameSuffix = ".pdf"
cloudtokenFile = homeDIR + "/bin/token.json"
parser = argparse.ArgumentParser(description='Downloads the Guardian Quick crosswords and saves (and archives) them to a Dropbox folder.')
parser.add_argument('--date', default = 'today', type=str, help='Date for the crossword (default: today)')
parser.add_argument('-g', '--get', action='store_true', help='\'Get\' directive. Asks the script to get the crossword.')
parser.add_argument('--test', action='store_true', help='Use the test URL instead of the real Guardian URL.')
parser.add_argument('--archive', action = 'store_true', help='Clean up the Drive directory.')
parser.add_argument('-u', '--upload', action = 'store_true', help='Upload the crossword to a Google Drive folder.')
arg = parser.parse_args()
print(arg)
if arg.test:
baseURL = testBaseURL
todaysDate = datetime.datetime.now()
requestedDate = todaysDate
if arg.date!='today':
try:
inputDate = datetime.datetime.strptime(arg.date, '%Y-%m-%d')
requestedDate = inputDate
except ValueError:
print("I am not able to understand the date input, please use YYYY-MM-DD")
sys.exit()
todayYear = todaysDate.year
todayMonth = todaysDate.month
todayDay = todaysDate.day
todayDayOfWeek = todaysDate.weekday()
requestedYear = requestedDate.year
requestedDay = requestedDate.day
requestedMonth = requestedDate.month
requestedDayOfWeek = requestedDate.weekday()
dayDifference = todaysDate - requestedDate
print("Today is: %d-%02d-%02d %s"%(todayYear, todayMonth, todayDay, days[todayDayOfWeek]))
print("You have asked for: %d-%02d-%02d %s"%(requestedYear, requestedMonth, requestedDay, days[requestedDayOfWeek]))
if dayDifference.days<0:
print("Your requested date is in the future, no crossword yet.")
sys.exit()
if dayDifference.days>0:
print('Your date was %d days ago'%dayDifference.days)
if requestedDayOfWeek == 6:
print("You are requesting a crossword for a Sunday. Try the Observer.")
sys.exit()
dateString = "%d%02d%02d"%(requestedYear, requestedMonth, requestedDay)
fullURL = baseURL + namePrefix + dateString + nameSuffix
print("Ready to fetch: ", fullURL)
outputFilename = crosswordPath + namePrefix + dateString + nameSuffix
if (arg.get):
crosswordFile = getWriteCrossword(fullURL, outputFilename)
else:
print("You did not specify the 'get' directive, so not really fetching the crossword.")
if (arg.upload):
uploadToDrive(crosswordFile)
if (arg.archive):
files = os.listdir(crosswordPath)
crosswordFilenames = []
dates = []
for f in files:
if f.find('gdn.quick.')!=-1:
crosswordFilenames.append(f)
dateString = f[10:18]
dates.append(dateString)
print("Crosswords found in root folder...")
print(crosswordFilenames)
daysOld = []
for d in dates:
date = datetime.datetime.strptime(d, '%Y%m%d')
days = (todaysDate - date).days
daysOld.append(days)
oldCrosswords = []
oldCrosswordDates = []
for index, f in enumerate(crosswordFilenames):
if daysOld[index] > 7:
oldCrosswords.append(f)
oldCrosswordDates.append(dates[index])
print("Crosswords older than 7 days...")
print(oldCrosswords)
for index, filename in enumerate(oldCrosswords):
date = datetime.datetime.strptime(oldCrosswordDates[index], '%Y%m%d')
print(filename,date)
month = date.month
monthString = months[month-1]
year = date.year
print(year, monthString)
directory = str(year) + "-" + monthString
fullDirectory = dropboxPath + "/" + directory
if not os.path.exists(fullDirectory):
print("Creating the directory: " + fullDirectory)
os.mkdir(fullDirectory)
oldFilename = dropboxPath + "/" + filename
newFilename = dropboxPath + "/" + directory + "/" + filename
print(oldFilename, newFilename)
os.rename(oldFilename, newFilename)
print('Completed successfully')
| [
"[email protected]"
]
| |
2c064571b166c16d2e2162eb550f4acddc9755a2 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/artificial/transf_BoxCox/trend_Lag1Trend/cycle_0/ar_12/test_artificial_128_BoxCox_Lag1Trend_0_12_100.py | 69be51d0ac616d981af16e76ed8ffa0a59aeca1f | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 263 | py | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 0, transform = "BoxCox", sigma = 0.0, exog_count = 100, ar_order = 12); | [
"[email protected]"
]
| |
5cbd9b759b68b1c777c5a8035820707a318f8074 | dd449ad8388847779b265f49f2339c9681376c60 | /rl-lapan-book/chap6_dqn_pong/model.py | 12c593bc41d537299d8fb3ee85ce5990e5c10f0d | []
| no_license | whoji/training-ground | 478d76a8c274050eb910b28729ca1d1cdb47eae9 | b107cc47c4a04bb8868c410ab207bacab5a86e4c | refs/heads/master | 2020-05-16T16:13:26.788156 | 2019-12-04T01:56:01 | 2019-12-04T01:56:01 | 183,154,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,301 | py | import torch
import torch.nn as nn
import numpy as np
class DQN(nn.Module):
"""docstring for DQN"""
def __init__(self, input_shape, n_actions):
"""assumes input_shape is of CHW shape (4,84,84)"""
super(DQN, self).__init__()
self.conv=nn.Sequential(
nn.Conv2d(in_channels=input_shape[0],
out_channels=32,
kernel_size=8,
stride=4),
nn.ReLU(),
nn.Conv2d(32, 64, 4, 2),
nn.ReLU(),
nn.Conv2d(64, 128, 3, 1),
nn.ReLU()
)
conv_out_size = self._get_conv_out(input_shape)
self.fc = nn.Sequential(
nn.Linear(conv_out_size, 512),
nn.ReLU(),
nn.Linear(512, n_actions)
)
def _get_conv_out(self, shape):
temp_conv_out = self.conv(torch.zeros(1, *shape))
# import pdb; pdb.set_trace()
return int(np.prod(temp_conv_out.size())) # 1*128*7*7 = 6272
def forward(self, x):
""" assumes x is 4D of shape BCHW, output will be 2D: B*n_actions """
conv_out = self.conv(x).view(x.size()[0], -1) # flatten ? what this is not 1D ??!!
return self.fc(conv_out)
if __name__ == '__main__':
m = DQN((4,100,100), 5)
print(m)
| [
"[email protected]"
]
| |
e1ac6ff44e858e2564178ff72f096851718b1ac2 | 7f8b266b1b175f62a5d9608a0028f2265f294233 | /guild/commands/operations.py | 0e51d3f6f25d165f93eb42a4dc169732d4f1fe81 | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
]
| permissive | jukiewiczm/guildai | 7d1cd4e324e1f2bd70a90156e254e9e4d00c6e4c | 478cc29cb102a8bd0bed693ce9626fe4949257a2 | refs/heads/master | 2020-10-01T22:47:39.595549 | 2019-12-12T15:38:06 | 2019-12-12T15:38:06 | 227,639,644 | 0 | 0 | Apache-2.0 | 2019-12-12T15:37:39 | 2019-12-12T15:37:38 | null | UTF-8 | Python | false | false | 1,875 | py | # Copyright 2017-2019 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import click
from guild import click_util
@click.command(name="operations, ops")
@click.argument("filters", metavar="[FILTER]...", required=False, nargs=-1)
@click.option(
"-a", "--all", is_flag=True,
help="Show all operations including those designated as private.")
@click.option(
"-i", "--installed", is_flag=True,
help=(
"Include operations installed from packages when running "
"command from a project directory."))
@click.option("-v", "--verbose", help="Show operation details.", is_flag=True)
@click_util.use_args
def operations(args):
"""Show model operations.
If the current directory is a project directory (i.e. contains a
Guild file), the command shows operations defined for the
project. Otherwise, the command shows operations defined in
installed packages.
Note that operations defined in packages are always available to
run, even when running within a project directory. To always show
installed operations, use the `--installed` option.
Use one or more `FILTER` arguments to show only operations with
names or models that match the specified values.
"""
from . import operations_impl
operations_impl.main(args)
| [
"[email protected]"
]
| |
c612b7d7805ae1cdcd762fbafa0515f4f0c61507 | 7d8f344949755664850cf50c070e0442fecc3e89 | /glynt/apps/company/admin.py | 3c3aae49ccc3c059c5fa6793e2f9b1c147bc59ba | []
| no_license | rosscdh/glynt | 7ac24c74c3ddeee2f6af584f54f548c662a13f18 | a6a083704f300ed2f9fa4cd4e666d07199f52d1e | refs/heads/master | 2020-04-15T00:33:04.930549 | 2016-06-14T08:37:30 | 2016-06-14T08:37:30 | 4,831,016 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | # -*- coding: UTF-8 -*-
from django.contrib import admin
from models import Company
admin.site.register([Company])
| [
"[email protected]"
]
| |
e43043403f1e2a603fa3fac290b06c7c435e8f11 | fa0ee3daeed8edb10e98b6772fa39923243d53c5 | /algorithms/hard_all_pairs_that_sum_to_number.py | 56976bc27cc4888ec8cdbc2a9df5029dab6ab0f5 | [
"Apache-2.0"
]
| permissive | ppinko/python_exercises | 8a93664b6a8fc75d1088d67cd8e5f5ba3f39364a | d4ef2ddb28834ef49ac8060ce16f6b1446b6713e | refs/heads/master | 2022-07-21T13:39:39.694515 | 2022-07-06T18:46:49 | 2022-07-06T18:46:49 | 254,176,710 | 1 | 0 | null | 2020-04-08T19:13:45 | 2020-04-08T19:05:39 | Python | UTF-8 | Python | false | false | 836 | py | """
https://edabit.com/challenge/8LZdBwmpBiLJ5Sobt
"""
def all_pairs(lst: list, num: int) -> list:
lst.sort()
ans = []
for i, v in enumerate(lst[:-1]):
for j in lst[i+1:]:
if v + j == num:
ans.append([v, j])
return ans
assert all_pairs([2, 4, 5, 3], 7) == [[2, 5], [3, 4]]
assert all_pairs([5, 3, 9, 2, 1], 3) == [[1, 2]]
assert all_pairs([4, 5, 1, 3, 6, 8], 9) == [[1, 8], [3, 6], [4, 5]]
assert all_pairs([5, 2], 14) == []
assert all_pairs([5, 5, 2], 14) == []
assert all_pairs([8, 7, 7, 2, 4, 6], 14) == [[6, 8], [7, 7]]
assert all_pairs([8, 7, 2, 4, 6], 14) == [[6, 8]]
assert all_pairs([1, 3, 5, 4, 0], 4) == [[0, 4], [1, 3]]
assert all_pairs([1, 3, 5, 4, 0, 2], 4) == [[0, 4], [1, 3]]
assert all_pairs([1, 3, 5, 4, 0, 2, 2], 4) == [[0, 4], [1, 3], [2, 2]]
print('Success') | [
"[email protected]"
]
| |
699cfc5f42f72a4fd6018d7cf3cb617c55697fac | 953fc3064e82231d1c5b7fb3be6563113e3c1483 | /examples/dfa/authentication/create_dfa_client_without_yaml.py | 3cc547e12209647629f680aa25f4f657827a493f | [
"Apache-2.0"
]
| permissive | hshore29/googleads-python-lib | 08a3dc093b76b16c7b84026360e89793a1a5e4c9 | 14de75b8400aa4c1c0920d8edda6bb6e46b858cc | refs/heads/master | 2021-01-15T12:50:43.278667 | 2014-03-20T14:47:01 | 2014-03-20T14:47:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,108 | py | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Initializes a DfaClient without using yaml-cached credentials.
While our LoadFromStorage method provides a useful shortcut to instantiate a
client if you regularly use just one set of credentials, production applications
may need to swap out users. This example shows you how to create an OAuth 2.0
client and a DfaClient without relying on a yaml file.
"""
__author__ = 'Joseph DiLallo'
from googleads import dfa
from googleads import oauth2
# OAuth 2.0 credential information. In a real application, you'd probably be
# pulling these values from a credential storage.
CLIENT_ID = 'INSERT_CLIENT_ID_HERE'
CLIENT_SECRET = 'INSERT_CLIENT_SECRET_HERE'
REFRESH_TOKEN = 'INSERT_REFRESH_TOKEN_HERE'
# DFA API information.
USER_PROFILE_NAME = 'INSERT_USER_PROFILE_NAME_HERE'
APPLICATION_NAME = 'INSERT_APPLICATION_NAME_HERE'
def main(client_id, client_secret, refresh_token, user_profile_name,
application_name):
oauth2_client = oauth2.GoogleRefreshTokenClient(
client_id, client_secret, refresh_token)
dfa_client = dfa.DfaClient(user_profile_name, oauth2_client, application_name)
results = dfa_client.GetService('CampaignService').getCampaignsByCriteria({})
if results['records']:
for campaign in results['records']:
print ('Campaign with name \'%s\' and ID \'%s\' was found.'
% (campaign['name'], campaign['id']))
if __name__ == '__main__':
main(CLIENT_ID, CLIENT_SECRET, REFRESH_TOKEN, USER_PROFILE_NAME,
APPLICATION_NAME)
| [
"[email protected]"
]
| |
0bac18c144a8f3e3cda2c0e7f688d90bd8534da6 | 8189650954de2c4cc78f67cc60300b51cd4bccfa | /gpu_frites/benchmark.py | d313c6c783d23d1bffc538deebc002a5a98daf1e | []
| no_license | EtienneCmb/gpu_frites | 7a890cb9de9e7b309233d770f82dafa864a453f6 | b9325c4b1d995a849fb716837f830b1b73d45198 | refs/heads/main | 2023-01-21T01:03:45.556346 | 2020-12-04T14:52:22 | 2020-12-04T14:52:22 | 317,906,684 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,916 | py | """Benchmarking CPU and GPU codes."""
import xarray as xr
from mne.utils import ProgressBar
from time import time as tst
# profiling function
def tmt(method, n_loops=100):
def timed(*args, **kw):
# dry run
method(*args, **kw)
# timing run
ts = tst()
for n_l in range(n_loops):
method(*args, **kw)
te = tst()
result = (te - ts) / n_loops
return result
return timed
###############################################################################
###############################################################################
# I(C; C)
###############################################################################
###############################################################################
def test_mi_1d_gg_equals():
import numpy as np
import cupy as cp
from frites.core import mi_1d_gg
from gpu_frites.core import mi_model_1d_gpu_gd
x_dims = [(1, 20), (10, 100), (10, 100), (1, 100)]
y_dims = [(1, 20), (10, 100), (1, 100), (10, 100)]
for x_dim, y_dim in zip(x_dims, y_dims):
x = np.random.rand(*x_dim)
y = np.random.rand(*y_dim)
# mi on cpu
mi_cpu = mi_1d_gg(x, y)
# mi on gpu
mi_gpu = cp.asnumpy(mi_model_1d_gpu_gd(cp.asarray(x), cp.asarray(y)))
# testing equality
np.testing.assert_array_almost_equal(mi_cpu, mi_gpu)
def test_mi_gg_timing(target='cpu', ndims='1d', n_loops=100, n_trials=600):
# get cpu / gpu ressources
import xfrites
import numpy as np
_, cp = xfrites.utils.get_cupy(target=target)
if (target == 'cpu') and (ndims == '1d'):
from frites.core import mi_1d_gg
fcn = mi_1d_gg
elif (target == 'cpu') and (ndims == 'nd'):
from frites.core import mi_nd_gg
fcn = mi_nd_gg
elif (target == 'gpu') and (ndims == '1d'):
from gpu_frites.core import mi_1d_gpu_gg
fcn = mi_1d_gpu_gg
elif (target == 'gpu') and (ndims == 'nd'):
from gpu_frites.core import mi_nd_gpu_gg
fcn = mi_nd_gpu_gg
mesg = (f"Profiling I(C; C) (fcn={fcn.__name__}; target={target}; "
f"ndims={ndims})")
n_times = np.arange(1500, 4000, 100)
n_mv = np.arange(1, 20, 1)
# generate the data
x = cp.random.rand(int(n_times[-1]), int(n_mv[-1]), n_trials)
y = cp.random.rand(int(n_times[-1]), int(n_mv[-1]), n_trials)
# function to time
def _time_loop(a, b):
if ndims == '1d':
for n_t in range(a.shape[0]):
fcn(a[n_t, ...], b[n_t, ...])
elif ndims == 'nd':
fcn(a, b, mvaxis=-2, traxis=-1, shape_checking=False)
fcn_tmt = tmt(_time_loop, n_loops=n_loops)
pbar = ProgressBar(range(int(len(n_times) * len(n_mv))), mesg=mesg)
esti = xr.DataArray(np.zeros((len(n_mv), len(n_times))),
dims=('mv', 'times'), coords=(n_mv, n_times))
for n_m in range(len(n_mv)):
for n_t in range(len(n_times)):
esti[n_m, n_t] = fcn_tmt(
x[0:n_t + 1, 0:n_m + 1, :], y[0:n_t + 1, 0:n_m + 1, :])
pbar.update_with_increment_value(1)
esti.attrs['method'] = fcn.__name__
esti.attrs['target'] = target
esti.attrs['ndims'] = ndims
esti.attrs['n_loops'] = n_loops
esti.attrs['n_trials'] = n_trials
return esti
###############################################################################
###############################################################################
# I(C; D)
###############################################################################
###############################################################################
def test_mi_gd_timing(target='cpu', ndims='1d', n_loops=100, n_trials=600):
# get cpu / gpu ressources
import xfrites
import numpy as np
_, cp = xfrites.utils.get_cupy(target=target)
if (target == 'cpu') and (ndims == '1d'):
from frites.core import mi_model_1d_gd
fcn = mi_model_1d_gd
elif (target == 'cpu') and (ndims == 'nd'):
from frites.core import mi_model_nd_gd
fcn = mi_model_nd_gd
elif (target == 'gpu') and (ndims == '1d'):
from gpu_frites.core import mi_model_1d_gpu_gd
fcn = mi_model_1d_gpu_gd
elif (target == 'gpu') and (ndims == 'nd'):
from gpu_frites.core import mi_model_nd_gpu_gd
fcn = mi_model_nd_gpu_gd
mesg = (f"Profiling I(C; D) (fcn={fcn.__name__}; target={target}; "
f"ndims={ndims})")
n_times = np.arange(1500, 4000, 100)
n_mv = np.arange(1, 20, 1)
# generate the data
x = cp.random.rand(int(n_times[-1]), int(n_mv[-1]), n_trials)
y = cp.random.randint(0, 3, size=(n_trials,))
# function to time
def _time_loop(a, b):
if ndims == '1d':
for n_t in range(a.shape[0]):
fcn(a[n_t, ...], b)
elif ndims == 'nd':
fcn(a, b, mvaxis=-2, traxis=-1, shape_checking=False)
fcn_tmt = tmt(_time_loop, n_loops=n_loops)
pbar = ProgressBar(range(int(len(n_times) * len(n_mv))), mesg=mesg)
esti = xr.DataArray(np.zeros((len(n_mv), len(n_times))),
dims=('mv', 'times'), coords=(n_mv, n_times))
for n_m in range(len(n_mv)):
for n_t in range(len(n_times)):
esti[n_m, n_t] = fcn_tmt(x[0:n_t + 1, 0:n_m + 1, :], y)
pbar.update_with_increment_value(1)
esti.attrs['method'] = fcn.__name__
esti.attrs['target'] = target
esti.attrs['ndims'] = ndims
esti.attrs['n_loops'] = n_loops
esti.attrs['n_trials'] = n_trials
return esti
###############################################################################
###############################################################################
# I(C; D)
###############################################################################
###############################################################################
def test_mi_ggg_timing(target='cpu', ndims='1d', n_loops=100, n_trials=600):
# get cpu / gpu ressources
import xfrites
import numpy as np
_, cp = xfrites.utils.get_cupy(target=target)
if (target == 'cpu') and (ndims == '1d'):
from frites.core import cmi_1d_ggg
fcn = cmi_1d_ggg
elif (target == 'cpu') and (ndims == 'nd'):
from frites.core import cmi_nd_ggg
fcn = cmi_nd_ggg
elif (target == 'gpu') and (ndims == '1d'):
from gpu_frites.core import cmi_1d_gpu_ggg
fcn = cmi_1d_gpu_ggg
elif (target == 'gpu') and (ndims == 'nd'):
from gpu_frites.core import cmi_nd_gpu_ggg
fcn = cmi_nd_gpu_ggg
mesg = (f"Profiling I(C; C | C) (fcn={fcn.__name__}; target={target}; "
f"ndims={ndims})")
n_times = np.arange(1500, 4000, 100)
n_mv = np.arange(1, 20, 1)
# generate the data
x = cp.random.rand(int(n_times[-1]), int(n_mv[-1]), n_trials)
y = cp.random.rand(int(n_times[-1]), int(n_mv[-1]), n_trials)
z = cp.random.rand(int(n_times[-1]), int(n_mv[-1]), n_trials)
# function to time
def _time_loop(a, b, c):
if ndims == '1d':
for n_t in range(a.shape[0]):
fcn(a[n_t, ...], b[n_t, ...], c[n_t, ...])
elif ndims == 'nd':
fcn(a, b, c, mvaxis=-2, traxis=-1, shape_checking=False)
fcn_tmt = tmt(_time_loop, n_loops=n_loops)
pbar = ProgressBar(range(int(len(n_times) * len(n_mv))), mesg=mesg)
esti = xr.DataArray(np.zeros((len(n_mv), len(n_times))),
dims=('mv', 'times'), coords=(n_mv, n_times))
for n_m in range(len(n_mv)):
for n_t in range(len(n_times)):
esti[n_m, n_t] = fcn_tmt(
x[0:n_t + 1, 0:n_m + 1, :], y[0:n_t + 1, 0:n_m + 1, :],
z[0:n_t + 1, 0:n_m + 1, :])
pbar.update_with_increment_value(1)
esti.attrs['method'] = fcn.__name__
esti.attrs['target'] = target
esti.attrs['ndims'] = ndims
esti.attrs['n_loops'] = n_loops
esti.attrs['n_trials'] = n_trials
return esti
def run_benchmark(save_to=None, n_trials=600, n_loops=100):
bmk = {}
kw = dict(n_loops=n_loops, n_trials=n_trials)
for target in ['cpu', 'gpu']:
for ndim in ['1d', 'nd']:
kw_b ={'target': target, 'ndims': ndim, **kw}
bmk[f'gg_{target}_{ndim}'] = test_mi_gg_timing(**kw_b)
bmk[f'gd_{target}_{ndim}'] = test_mi_gd_timing(**kw_b)
bmk[f'ggg_{target}_{ndim}'] = test_mi_ggg_timing(**kw_b)
# final xarray conversion
bmk = xr.Dataset(bmk)
if isinstance(save_to, str):
from datetime import datetime
import os
now = datetime.now()
dt_string = now.strftime("%d_%m_%Y_%Hh_%Mmin_%Ss.nc")
save_as = os.path.join(save_to, dt_string)
bmk.to_netcdf(save_as)
return bmk
| [
"[email protected]"
]
| |
c02e1231cabab17604214697637c98b264de8add | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_396/ch54_2019_09_23_14_18_10_668876.py | 7c95795017287e26da8a69a6e6f64707dd17edcc | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | def junta_nome_sobrenome(x, y):
i = 0
lis = []
while i < len(x):
lis.append(x[i] + " " + y[i])
return lis | [
"[email protected]"
]
| |
0c3fd2bef7b4cd1a6f18d706cdf63c1576257b8d | 6c8a590bac5d39d95034127f0f47a84e71d298a7 | /game_element.py | e3dcefbe2fc5a99445c3f617772db566b601c6ac | []
| no_license | UO-CIS211/FiveTwelve | 3f57c40d401588f89370676679dd03f5ce35f42e | eca5d58e4c09189b873a6ce04fe109b6ac711a2e | refs/heads/master | 2022-01-12T18:58:30.185373 | 2022-01-11T23:11:09 | 2022-01-11T23:11:09 | 117,500,226 | 1 | 13 | null | 2020-10-01T17:16:05 | 2018-01-15T05:16:11 | Python | UTF-8 | Python | false | false | 2,087 | py | """
Board game element: Relates the model component
of a grid game to the view component.
Neither the game logic (in the model component)
nor the display logic (n the view component) is
defined here; this is the notification logic
for sending events from the model component
to the view component.
The 'model' component will inherit from the
GameListener class and generate EventKind events.
"""
from enum import Enum
class EventKind(Enum):
"""All the kinds of events that we may notify listeners of"""
tile_created = 1
tile_updated = 2
tile_removed = 3
class GameEvent(object):
"""An event that may need to be depicted
"""
def __init__(self, kind: EventKind, tile: "Tile"):
self.kind = kind
self.tile = tile
def __repr__(self):
return f"GameEvent({self.kind}, {self.tile})"
class GameListener(object):
"""Abstract base class for objects that listen to
game events in a model-view-controller pattern.
Each listener must implement a 'notify' method.
"""
def notify(self, event: GameEvent):
raise NotImplementedError("Game Listener classes must implement 'notify'")
# -------------------------------------------
class GameElement(object):
"""Base class for game elements, especially to support
depiction through Model-View-Controller.
"""
def __init__(self):
"""Each game element can have zero or more listeners.
Listeners are view components that react to notifications.
"""
self._listeners = []
def add_listener(self, listener: GameListener):
self._listeners.append(listener)
def notify_all(self, event: GameEvent):
"""Instead of handling graphics in the model component,
we notify view components of each significant event and let
the view component decide how to adjust the graphical view.
When additional information must be packaged with an event,
it goes in the optional 'data' parameter.
"""
for listener in self._listeners:
listener.notify(event)
| [
"[email protected]"
]
| |
116f54ce54861411c14fc2fd70b69e2b5b4e04c4 | 6d4f60b000d3b00561c439c5faceaa9931e20a9d | /fast_arrow/resources/option_order.py | de9ab50509c11638544e980be5ecac9f567d2ab9 | [
"MIT"
]
| permissive | Jiacli/fast_arrow | 25485c6712e3e2f5e8dea7de7cc40e932d372381 | beb95fc402fca12670a6c39cdb4abe524937d321 | refs/heads/master | 2020-03-28T03:12:59.549434 | 2018-09-06T02:45:37 | 2018-09-06T02:45:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 906 | py | from fast_arrow import util
from fast_arrow.resources.option import Option
class OptionOrder(object):
@classmethod
def all(cls, client):
"""
fetch all option positions
"""
url = 'https://api.robinhood.com/options/orders/'
data = client.get(url)
results = data["results"]
while data["next"]:
data = get(data["next"], token)
results.extend(data["results"])
return results
@classmethod
def humanize_numbers(cls, option_orders):
results = []
for oo in option_orders:
keys_to_humanize = ["processed_premium"]
coef = (1.0 if oo["direction"] == "credit" else -1.0)
for k in keys_to_humanize:
if oo[k] == None:
continue
oo[k] = float(oo[k]) * coef
results.append(oo)
return results
| [
"[email protected]"
]
| |
d041fcbf0b0deb2cb6fe89389f6891c487588d7a | 8d402df39c18eba7e1c86c762f205c944357c5df | /www/src/Lib/test/test_email/test_message.py | 4c754bf40fc300826bf2fbbdd206c3b94f8e8477 | [
"BSD-3-Clause"
]
| permissive | brython-dev/brython | 87cc023e25550dec9ce459ba68774189f33712b6 | b33958bff0e8c7a280babc30232dc389a2500a7a | refs/heads/master | 2023-09-04T04:49:29.156209 | 2023-09-01T06:36:08 | 2023-09-01T06:36:08 | 24,046,239 | 6,569 | 625 | BSD-3-Clause | 2023-07-05T06:13:32 | 2014-09-15T06:58:21 | Python | UTF-8 | Python | false | false | 34,896 | py | import unittest
import textwrap
from email import policy, message_from_string
from email.message import EmailMessage, MIMEPart
from test.test_email import TestEmailBase, parameterize
# Helper.
def first(iterable):
return next(filter(lambda x: x is not None, iterable), None)
class Test(TestEmailBase):
policy = policy.default
def test_error_on_setitem_if_max_count_exceeded(self):
m = self._str_msg("")
m['To'] = 'abc@xyz'
with self.assertRaises(ValueError):
m['To'] = 'xyz@abc'
def test_rfc2043_auto_decoded_and_emailmessage_used(self):
m = message_from_string(textwrap.dedent("""\
Subject: Ayons asperges pour le =?utf-8?q?d=C3=A9jeuner?=
From: =?utf-8?q?Pep=C3=A9?= Le Pew <[email protected]>
To: "Penelope Pussycat" <"[email protected]">
MIME-Version: 1.0
Content-Type: text/plain; charset="utf-8"
sample text
"""), policy=policy.default)
self.assertEqual(m['subject'], "Ayons asperges pour le déjeuner")
self.assertEqual(m['from'], "Pepé Le Pew <[email protected]>")
self.assertIsInstance(m, EmailMessage)
@parameterize
class TestEmailMessageBase:
policy = policy.default
# The first argument is a triple (related, html, plain) of indices into the
# list returned by 'walk' called on a Message constructed from the third.
# The indices indicate which part should match the corresponding part-type
# when passed to get_body (ie: the "first" part of that type in the
# message). The second argument is a list of indices into the 'walk' list
# of the attachments that should be returned by a call to
# 'iter_attachments'. The third argument is a list of indices into 'walk'
# that should be returned by a call to 'iter_parts'. Note that the first
# item returned by 'walk' is the Message itself.
message_params = {
'empty_message': (
(None, None, 0),
(),
(),
""),
'non_mime_plain': (
(None, None, 0),
(),
(),
textwrap.dedent("""\
To: [email protected]
simple text body
""")),
'mime_non_text': (
(None, None, None),
(),
(),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: image/jpg
bogus body.
""")),
'plain_html_alternative': (
(None, 2, 1),
(),
(1, 2),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/alternative; boundary="==="
preamble
--===
Content-Type: text/plain
simple body
--===
Content-Type: text/html
<p>simple body</p>
--===--
""")),
'plain_html_mixed': (
(None, 2, 1),
(),
(1, 2),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary="==="
preamble
--===
Content-Type: text/plain
simple body
--===
Content-Type: text/html
<p>simple body</p>
--===--
""")),
'plain_html_attachment_mixed': (
(None, None, 1),
(2,),
(1, 2),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary="==="
--===
Content-Type: text/plain
simple body
--===
Content-Type: text/html
Content-Disposition: attachment
<p>simple body</p>
--===--
""")),
'html_text_attachment_mixed': (
(None, 2, None),
(1,),
(1, 2),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary="==="
--===
Content-Type: text/plain
Content-Disposition: AtTaChment
simple body
--===
Content-Type: text/html
<p>simple body</p>
--===--
""")),
'html_text_attachment_inline_mixed': (
(None, 2, 1),
(),
(1, 2),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary="==="
--===
Content-Type: text/plain
Content-Disposition: InLine
simple body
--===
Content-Type: text/html
Content-Disposition: inline
<p>simple body</p>
--===--
""")),
# RFC 2387
'related': (
(0, 1, None),
(2,),
(1, 2),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/related; boundary="==="; type=text/html
--===
Content-Type: text/html
<p>simple body</p>
--===
Content-Type: image/jpg
Content-ID: <image1>
bogus data
--===--
""")),
# This message structure will probably never be seen in the wild, but
# it proves we distinguish between text parts based on 'start'. The
# content would not, of course, actually work :)
'related_with_start': (
(0, 2, None),
(1,),
(1, 2),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/related; boundary="==="; type=text/html;
start="<body>"
--===
Content-Type: text/html
Content-ID: <include>
useless text
--===
Content-Type: text/html
Content-ID: <body>
<p>simple body</p>
<!--#include file="<include>"-->
--===--
""")),
'mixed_alternative_plain_related': (
(3, 4, 2),
(6, 7),
(1, 6, 7),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary="==="
--===
Content-Type: multipart/alternative; boundary="+++"
--+++
Content-Type: text/plain
simple body
--+++
Content-Type: multipart/related; boundary="___"
--___
Content-Type: text/html
<p>simple body</p>
--___
Content-Type: image/jpg
Content-ID: <image1@cid>
bogus jpg body
--___--
--+++--
--===
Content-Type: image/jpg
Content-Disposition: attachment
bogus jpg body
--===
Content-Type: image/jpg
Content-Disposition: AttacHmenT
another bogus jpg body
--===--
""")),
# This structure suggested by Stephen J. Turnbull...may not exist/be
# supported in the wild, but we want to support it.
'mixed_related_alternative_plain_html': (
(1, 4, 3),
(6, 7),
(1, 6, 7),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary="==="
--===
Content-Type: multipart/related; boundary="+++"
--+++
Content-Type: multipart/alternative; boundary="___"
--___
Content-Type: text/plain
simple body
--___
Content-Type: text/html
<p>simple body</p>
--___--
--+++
Content-Type: image/jpg
Content-ID: <image1@cid>
bogus jpg body
--+++--
--===
Content-Type: image/jpg
Content-Disposition: attachment
bogus jpg body
--===
Content-Type: image/jpg
Content-Disposition: attachment
another bogus jpg body
--===--
""")),
# Same thing, but proving we only look at the root part, which is the
# first one if there isn't any start parameter. That is, this is a
# broken related.
'mixed_related_alternative_plain_html_wrong_order': (
(1, None, None),
(6, 7),
(1, 6, 7),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary="==="
--===
Content-Type: multipart/related; boundary="+++"
--+++
Content-Type: image/jpg
Content-ID: <image1@cid>
bogus jpg body
--+++
Content-Type: multipart/alternative; boundary="___"
--___
Content-Type: text/plain
simple body
--___
Content-Type: text/html
<p>simple body</p>
--___--
--+++--
--===
Content-Type: image/jpg
Content-Disposition: attachment
bogus jpg body
--===
Content-Type: image/jpg
Content-Disposition: attachment
another bogus jpg body
--===--
""")),
'message_rfc822': (
(None, None, None),
(),
(),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: message/rfc822
To: [email protected]
From: [email protected]
this is a message body.
""")),
'mixed_text_message_rfc822': (
(None, None, 1),
(2,),
(1, 2),
textwrap.dedent("""\
To: [email protected]
MIME-Version: 1.0
Content-Type: multipart/mixed; boundary="==="
--===
Content-Type: text/plain
Your message has bounced, sir.
--===
Content-Type: message/rfc822
To: [email protected]
From: [email protected]
this is a message body.
--===--
""")),
}
def message_as_get_body(self, body_parts, attachments, parts, msg):
m = self._str_msg(msg)
allparts = list(m.walk())
expected = [None if n is None else allparts[n] for n in body_parts]
related = 0; html = 1; plain = 2
self.assertEqual(m.get_body(), first(expected))
self.assertEqual(m.get_body(preferencelist=(
'related', 'html', 'plain')),
first(expected))
self.assertEqual(m.get_body(preferencelist=('related', 'html')),
first(expected[related:html+1]))
self.assertEqual(m.get_body(preferencelist=('related', 'plain')),
first([expected[related], expected[plain]]))
self.assertEqual(m.get_body(preferencelist=('html', 'plain')),
first(expected[html:plain+1]))
self.assertEqual(m.get_body(preferencelist=['related']),
expected[related])
self.assertEqual(m.get_body(preferencelist=['html']), expected[html])
self.assertEqual(m.get_body(preferencelist=['plain']), expected[plain])
self.assertEqual(m.get_body(preferencelist=('plain', 'html')),
first(expected[plain:html-1:-1]))
self.assertEqual(m.get_body(preferencelist=('plain', 'related')),
first([expected[plain], expected[related]]))
self.assertEqual(m.get_body(preferencelist=('html', 'related')),
first(expected[html::-1]))
self.assertEqual(m.get_body(preferencelist=('plain', 'html', 'related')),
first(expected[::-1]))
self.assertEqual(m.get_body(preferencelist=('html', 'plain', 'related')),
first([expected[html],
expected[plain],
expected[related]]))
def message_as_iter_attachment(self, body_parts, attachments, parts, msg):
m = self._str_msg(msg)
allparts = list(m.walk())
attachments = [allparts[n] for n in attachments]
self.assertEqual(list(m.iter_attachments()), attachments)
def message_as_iter_parts(self, body_parts, attachments, parts, msg):
def _is_multipart_msg(msg):
return 'Content-Type: multipart' in msg
m = self._str_msg(msg)
allparts = list(m.walk())
parts = [allparts[n] for n in parts]
iter_parts = list(m.iter_parts()) if _is_multipart_msg(msg) else []
self.assertEqual(iter_parts, parts)
class _TestContentManager:
def get_content(self, msg, *args, **kw):
return msg, args, kw
def set_content(self, msg, *args, **kw):
self.msg = msg
self.args = args
self.kw = kw
def test_get_content_with_cm(self):
m = self._str_msg('')
cm = self._TestContentManager()
self.assertEqual(m.get_content(content_manager=cm), (m, (), {}))
msg, args, kw = m.get_content('foo', content_manager=cm, bar=1, k=2)
self.assertEqual(msg, m)
self.assertEqual(args, ('foo',))
self.assertEqual(kw, dict(bar=1, k=2))
def test_get_content_default_cm_comes_from_policy(self):
p = policy.default.clone(content_manager=self._TestContentManager())
m = self._str_msg('', policy=p)
self.assertEqual(m.get_content(), (m, (), {}))
msg, args, kw = m.get_content('foo', bar=1, k=2)
self.assertEqual(msg, m)
self.assertEqual(args, ('foo',))
self.assertEqual(kw, dict(bar=1, k=2))
def test_set_content_with_cm(self):
m = self._str_msg('')
cm = self._TestContentManager()
m.set_content(content_manager=cm)
self.assertEqual(cm.msg, m)
self.assertEqual(cm.args, ())
self.assertEqual(cm.kw, {})
m.set_content('foo', content_manager=cm, bar=1, k=2)
self.assertEqual(cm.msg, m)
self.assertEqual(cm.args, ('foo',))
self.assertEqual(cm.kw, dict(bar=1, k=2))
def test_set_content_default_cm_comes_from_policy(self):
cm = self._TestContentManager()
p = policy.default.clone(content_manager=cm)
m = self._str_msg('', policy=p)
m.set_content()
self.assertEqual(cm.msg, m)
self.assertEqual(cm.args, ())
self.assertEqual(cm.kw, {})
m.set_content('foo', bar=1, k=2)
self.assertEqual(cm.msg, m)
self.assertEqual(cm.args, ('foo',))
self.assertEqual(cm.kw, dict(bar=1, k=2))
# outcome is whether xxx_method should raise ValueError error when called
# on multipart/subtype. Blank outcome means it depends on xxx (add
# succeeds, make raises). Note: 'none' means there are content-type
# headers but payload is None...this happening in practice would be very
# unusual, so treating it as if there were content seems reasonable.
# method subtype outcome
subtype_params = (
('related', 'no_content', 'succeeds'),
('related', 'none', 'succeeds'),
('related', 'plain', 'succeeds'),
('related', 'related', ''),
('related', 'alternative', 'raises'),
('related', 'mixed', 'raises'),
('alternative', 'no_content', 'succeeds'),
('alternative', 'none', 'succeeds'),
('alternative', 'plain', 'succeeds'),
('alternative', 'related', 'succeeds'),
('alternative', 'alternative', ''),
('alternative', 'mixed', 'raises'),
('mixed', 'no_content', 'succeeds'),
('mixed', 'none', 'succeeds'),
('mixed', 'plain', 'succeeds'),
('mixed', 'related', 'succeeds'),
('mixed', 'alternative', 'succeeds'),
('mixed', 'mixed', ''),
)
def _make_subtype_test_message(self, subtype):
m = self.message()
payload = None
msg_headers = [
('To', '[email protected]'),
('From', '[email protected]'),
]
if subtype != 'no_content':
('content-shadow', 'Logrus'),
msg_headers.append(('X-Random-Header', 'Corwin'))
if subtype == 'text':
payload = ''
msg_headers.append(('Content-Type', 'text/plain'))
m.set_payload('')
elif subtype != 'no_content':
payload = []
msg_headers.append(('Content-Type', 'multipart/' + subtype))
msg_headers.append(('X-Trump', 'Random'))
m.set_payload(payload)
for name, value in msg_headers:
m[name] = value
return m, msg_headers, payload
def _check_disallowed_subtype_raises(self, m, method_name, subtype, method):
with self.assertRaises(ValueError) as ar:
getattr(m, method)()
exc_text = str(ar.exception)
self.assertIn(subtype, exc_text)
self.assertIn(method_name, exc_text)
def _check_make_multipart(self, m, msg_headers, payload):
count = 0
for name, value in msg_headers:
if not name.lower().startswith('content-'):
self.assertEqual(m[name], value)
count += 1
self.assertEqual(len(m), count+1) # +1 for new Content-Type
part = next(m.iter_parts())
count = 0
for name, value in msg_headers:
if name.lower().startswith('content-'):
self.assertEqual(part[name], value)
count += 1
self.assertEqual(len(part), count)
self.assertEqual(part.get_payload(), payload)
def subtype_as_make(self, method, subtype, outcome):
m, msg_headers, payload = self._make_subtype_test_message(subtype)
make_method = 'make_' + method
if outcome in ('', 'raises'):
self._check_disallowed_subtype_raises(m, method, subtype, make_method)
return
getattr(m, make_method)()
self.assertEqual(m.get_content_maintype(), 'multipart')
self.assertEqual(m.get_content_subtype(), method)
if subtype == 'no_content':
self.assertEqual(len(m.get_payload()), 0)
self.assertEqual(m.items(),
msg_headers + [('Content-Type',
'multipart/'+method)])
else:
self.assertEqual(len(m.get_payload()), 1)
self._check_make_multipart(m, msg_headers, payload)
def subtype_as_make_with_boundary(self, method, subtype, outcome):
# Doing all variation is a bit of overkill...
m = self.message()
if outcome in ('', 'raises'):
m['Content-Type'] = 'multipart/' + subtype
with self.assertRaises(ValueError) as cm:
getattr(m, 'make_' + method)()
return
if subtype == 'plain':
m['Content-Type'] = 'text/plain'
elif subtype != 'no_content':
m['Content-Type'] = 'multipart/' + subtype
getattr(m, 'make_' + method)(boundary="abc")
self.assertTrue(m.is_multipart())
self.assertEqual(m.get_boundary(), 'abc')
def test_policy_on_part_made_by_make_comes_from_message(self):
for method in ('make_related', 'make_alternative', 'make_mixed'):
m = self.message(policy=self.policy.clone(content_manager='foo'))
m['Content-Type'] = 'text/plain'
getattr(m, method)()
self.assertEqual(m.get_payload(0).policy.content_manager, 'foo')
class _TestSetContentManager:
def set_content(self, msg, content, *args, **kw):
msg['Content-Type'] = 'text/plain'
msg.set_payload(content)
def subtype_as_add(self, method, subtype, outcome):
m, msg_headers, payload = self._make_subtype_test_message(subtype)
cm = self._TestSetContentManager()
add_method = 'add_attachment' if method=='mixed' else 'add_' + method
if outcome == 'raises':
self._check_disallowed_subtype_raises(m, method, subtype, add_method)
return
getattr(m, add_method)('test', content_manager=cm)
self.assertEqual(m.get_content_maintype(), 'multipart')
self.assertEqual(m.get_content_subtype(), method)
if method == subtype or subtype == 'no_content':
self.assertEqual(len(m.get_payload()), 1)
for name, value in msg_headers:
self.assertEqual(m[name], value)
part = m.get_payload()[0]
else:
self.assertEqual(len(m.get_payload()), 2)
self._check_make_multipart(m, msg_headers, payload)
part = m.get_payload()[1]
self.assertEqual(part.get_content_type(), 'text/plain')
self.assertEqual(part.get_payload(), 'test')
if method=='mixed':
self.assertEqual(part['Content-Disposition'], 'attachment')
elif method=='related':
self.assertEqual(part['Content-Disposition'], 'inline')
else:
# Otherwise we don't guess.
self.assertIsNone(part['Content-Disposition'])
class _TestSetRaisingContentManager:
def set_content(self, msg, content, *args, **kw):
raise Exception('test')
def test_default_content_manager_for_add_comes_from_policy(self):
cm = self._TestSetRaisingContentManager()
m = self.message(policy=self.policy.clone(content_manager=cm))
for method in ('add_related', 'add_alternative', 'add_attachment'):
with self.assertRaises(Exception) as ar:
getattr(m, method)('')
self.assertEqual(str(ar.exception), 'test')
def message_as_clear(self, body_parts, attachments, parts, msg):
m = self._str_msg(msg)
m.clear()
self.assertEqual(len(m), 0)
self.assertEqual(list(m.items()), [])
self.assertIsNone(m.get_payload())
self.assertEqual(list(m.iter_parts()), [])
def message_as_clear_content(self, body_parts, attachments, parts, msg):
m = self._str_msg(msg)
expected_headers = [h for h in m.keys()
if not h.lower().startswith('content-')]
m.clear_content()
self.assertEqual(list(m.keys()), expected_headers)
self.assertIsNone(m.get_payload())
self.assertEqual(list(m.iter_parts()), [])
def test_is_attachment(self):
m = self._make_message()
self.assertFalse(m.is_attachment())
m['Content-Disposition'] = 'inline'
self.assertFalse(m.is_attachment())
m.replace_header('Content-Disposition', 'attachment')
self.assertTrue(m.is_attachment())
m.replace_header('Content-Disposition', 'AtTachMent')
self.assertTrue(m.is_attachment())
m.set_param('filename', 'abc.png', 'Content-Disposition')
self.assertTrue(m.is_attachment())
def test_iter_attachments_mutation(self):
# We had a bug where iter_attachments was mutating the list.
m = self._make_message()
m.set_content('arbitrary text as main part')
m.add_related('more text as a related part')
m.add_related('yet more text as a second "attachment"')
orig = m.get_payload().copy()
self.assertEqual(len(list(m.iter_attachments())), 2)
self.assertEqual(m.get_payload(), orig)
class TestEmailMessage(TestEmailMessageBase, TestEmailBase):
message = EmailMessage
def test_set_content_adds_MIME_Version(self):
m = self._str_msg('')
cm = self._TestContentManager()
self.assertNotIn('MIME-Version', m)
m.set_content(content_manager=cm)
self.assertEqual(m['MIME-Version'], '1.0')
class _MIME_Version_adding_CM:
def set_content(self, msg, *args, **kw):
msg['MIME-Version'] = '1.0'
def test_set_content_does_not_duplicate_MIME_Version(self):
m = self._str_msg('')
cm = self._MIME_Version_adding_CM()
self.assertNotIn('MIME-Version', m)
m.set_content(content_manager=cm)
self.assertEqual(m['MIME-Version'], '1.0')
def test_as_string_uses_max_header_length_by_default(self):
m = self._str_msg('Subject: long line' + ' ab'*50 + '\n\n')
self.assertEqual(len(m.as_string().strip().splitlines()), 3)
def test_as_string_allows_maxheaderlen(self):
m = self._str_msg('Subject: long line' + ' ab'*50 + '\n\n')
self.assertEqual(len(m.as_string(maxheaderlen=0).strip().splitlines()),
1)
self.assertEqual(len(m.as_string(maxheaderlen=34).strip().splitlines()),
6)
def test_as_string_unixform(self):
m = self._str_msg('test')
m.set_unixfrom('From foo@bar Thu Jan 1 00:00:00 1970')
self.assertEqual(m.as_string(unixfrom=True),
'From foo@bar Thu Jan 1 00:00:00 1970\n\ntest')
self.assertEqual(m.as_string(unixfrom=False), '\ntest')
def test_str_defaults_to_policy_max_line_length(self):
m = self._str_msg('Subject: long line' + ' ab'*50 + '\n\n')
self.assertEqual(len(str(m).strip().splitlines()), 3)
def test_str_defaults_to_utf8(self):
m = EmailMessage()
m['Subject'] = 'unicöde'
self.assertEqual(str(m), 'Subject: unicöde\n\n')
def test_folding_with_utf8_encoding_1(self):
# bpo-36520
#
# Fold a line that contains UTF-8 words before
# and after the whitespace fold point, where the
# line length limit is reached within an ASCII
# word.
m = EmailMessage()
m['Subject'] = 'Hello Wörld! Hello Wörld! ' \
'Hello Wörld! Hello Wörld!Hello Wörld!'
self.assertEqual(bytes(m),
b'Subject: Hello =?utf-8?q?W=C3=B6rld!_Hello_W'
b'=C3=B6rld!_Hello_W=C3=B6rld!?=\n'
b' Hello =?utf-8?q?W=C3=B6rld!Hello_W=C3=B6rld!?=\n\n')
def test_folding_with_utf8_encoding_2(self):
# bpo-36520
#
# Fold a line that contains UTF-8 words before
# and after the whitespace fold point, where the
# line length limit is reached at the end of an
# encoded word.
m = EmailMessage()
m['Subject'] = 'Hello Wörld! Hello Wörld! ' \
'Hello Wörlds123! Hello Wörld!Hello Wörld!'
self.assertEqual(bytes(m),
b'Subject: Hello =?utf-8?q?W=C3=B6rld!_Hello_W'
b'=C3=B6rld!_Hello_W=C3=B6rlds123!?=\n'
b' Hello =?utf-8?q?W=C3=B6rld!Hello_W=C3=B6rld!?=\n\n')
def test_folding_with_utf8_encoding_3(self):
# bpo-36520
#
# Fold a line that contains UTF-8 words before
# and after the whitespace fold point, where the
# line length limit is reached at the end of the
# first word.
m = EmailMessage()
m['Subject'] = 'Hello-Wörld!-Hello-Wörld!-Hello-Wörlds123! ' \
'Hello Wörld!Hello Wörld!'
self.assertEqual(bytes(m), \
b'Subject: =?utf-8?q?Hello-W=C3=B6rld!-Hello-W'
b'=C3=B6rld!-Hello-W=C3=B6rlds123!?=\n'
b' Hello =?utf-8?q?W=C3=B6rld!Hello_W=C3=B6rld!?=\n\n')
def test_folding_with_utf8_encoding_4(self):
# bpo-36520
#
# Fold a line that contains UTF-8 words before
# and after the fold point, where the first
# word is UTF-8 and the fold point is within
# the word.
m = EmailMessage()
m['Subject'] = 'Hello-Wörld!-Hello-Wörld!-Hello-Wörlds123!-Hello' \
' Wörld!Hello Wörld!'
self.assertEqual(bytes(m),
b'Subject: =?utf-8?q?Hello-W=C3=B6rld!-Hello-W'
b'=C3=B6rld!-Hello-W=C3=B6rlds123!?=\n'
b' =?utf-8?q?-Hello_W=C3=B6rld!Hello_W=C3=B6rld!?=\n\n')
def test_folding_with_utf8_encoding_5(self):
# bpo-36520
#
# Fold a line that contains a UTF-8 word after
# the fold point.
m = EmailMessage()
m['Subject'] = '123456789 123456789 123456789 123456789 123456789' \
' 123456789 123456789 Hello Wörld!'
self.assertEqual(bytes(m),
b'Subject: 123456789 123456789 123456789 123456789'
b' 123456789 123456789 123456789\n'
b' Hello =?utf-8?q?W=C3=B6rld!?=\n\n')
def test_folding_with_utf8_encoding_6(self):
# bpo-36520
#
# Fold a line that contains a UTF-8 word before
# the fold point and ASCII words after
m = EmailMessage()
m['Subject'] = '123456789 123456789 123456789 123456789 Hello Wörld!' \
' 123456789 123456789 123456789 123456789 123456789' \
' 123456789'
self.assertEqual(bytes(m),
b'Subject: 123456789 123456789 123456789 123456789'
b' Hello =?utf-8?q?W=C3=B6rld!?=\n 123456789 '
b'123456789 123456789 123456789 123456789 '
b'123456789\n\n')
def test_folding_with_utf8_encoding_7(self):
# bpo-36520
#
# Fold a line twice that contains UTF-8 words before
# and after the first fold point, and ASCII words
# after the second fold point.
m = EmailMessage()
m['Subject'] = '123456789 123456789 Hello Wörld! Hello Wörld! ' \
'123456789-123456789 123456789 Hello Wörld! 123456789' \
' 123456789'
self.assertEqual(bytes(m),
b'Subject: 123456789 123456789 Hello =?utf-8?q?'
b'W=C3=B6rld!_Hello_W=C3=B6rld!?=\n'
b' 123456789-123456789 123456789 Hello '
b'=?utf-8?q?W=C3=B6rld!?= 123456789\n 123456789\n\n')
def test_folding_with_utf8_encoding_8(self):
# bpo-36520
#
# Fold a line twice that contains UTF-8 words before
# the first fold point, and ASCII words after the
# first fold point, and UTF-8 words after the second
# fold point.
m = EmailMessage()
m['Subject'] = '123456789 123456789 Hello Wörld! Hello Wörld! ' \
'123456789 123456789 123456789 123456789 123456789 ' \
'123456789-123456789 123456789 Hello Wörld! 123456789' \
' 123456789'
self.assertEqual(bytes(m),
b'Subject: 123456789 123456789 Hello '
b'=?utf-8?q?W=C3=B6rld!_Hello_W=C3=B6rld!?=\n 123456789 '
b'123456789 123456789 123456789 123456789 '
b'123456789-123456789\n 123456789 Hello '
b'=?utf-8?q?W=C3=B6rld!?= 123456789 123456789\n\n')
def test_get_body_malformed(self):
"""test for bpo-42892"""
msg = textwrap.dedent("""\
Message-ID: <[email protected]>
Date: Wed, 08 Nov 2017 08:50:22 +0700
From: Foo Bar <[email protected]>
MIME-Version: 1.0
To: [email protected] <[email protected]>
Subject: Python Email
Content-Type: multipart/mixed;
boundary="------------879045806563892972123996"
X-Global-filter:Messagescannedforspamandviruses:passedalltests
This is a multi-part message in MIME format.
--------------879045806563892972123996
Content-Type: text/plain; charset=ISO-8859-1; format=flowed
Content-Transfer-Encoding: 7bit
Your message is ready to be sent with the following file or link
attachments:
XU89 - 08.11.2017
""")
m = self._str_msg(msg)
# In bpo-42892, this would raise
# AttributeError: 'str' object has no attribute 'is_attachment'
m.get_body()
class TestMIMEPart(TestEmailMessageBase, TestEmailBase):
# Doing the full test run here may seem a bit redundant, since the two
# classes are almost identical. But what if they drift apart? So we do
# the full tests so that any future drift doesn't introduce bugs.
message = MIMEPart
def test_set_content_does_not_add_MIME_Version(self):
m = self._str_msg('')
cm = self._TestContentManager()
self.assertNotIn('MIME-Version', m)
m.set_content(content_manager=cm)
self.assertNotIn('MIME-Version', m)
def test_string_payload_with_multipart_content_type(self):
msg = message_from_string(textwrap.dedent("""\
Content-Type: multipart/mixed; charset="utf-8"
sample text
"""), policy=policy.default)
attachments = msg.iter_attachments()
self.assertEqual(list(attachments), [])
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
aba91413f1139d347b5032c4f60dae2b8e713d84 | 2ac77ba91b0972516aea272d24faad85a53e229a | /ProfileReviews.py | e31c1ed97473faafbc7c570b420f4a925172d989 | [
"MIT"
]
| permissive | Blue-IT-Marketing/cloud-jobs | f98cdb80f61a1b85230025696dab432b423f7c21 | 99f577fe963aeaaaad6a6c6139563648a1fb3b03 | refs/heads/master | 2020-03-18T19:28:22.313288 | 2019-02-27T07:28:39 | 2019-02-27T07:28:39 | 135,156,777 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,548 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Justice Ndou'
__website__ = 'http://jobcloud.freelancing-seo.com/'
__email__ = '[email protected]'
# Copyright 2014 Freelancing Solutions.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datatypes import Reference
from ConstantsAndErrorCodes import MyConstants, ErrorCodes, isGoogleServer
from google.appengine.ext import db
import logging
class ProfileReviews(db.Expando, MyConstants, ErrorCodes):
PReviewRef = db.StringProperty()
indexReference = db.ReferenceProperty(reference_class=Reference,collection_name='profile_reviews') # to the owner of the Profile
UReferenceNum = db.StringProperty() # A Reference Number from the Profile Owner Reference Class
# A Reference Number to the user who placed the Review
RReferenceNum = db.StringProperty()
Firstname = db.StringProperty()
Email = db.EmailProperty()
Subject = db.StringProperty()
Rating = db.StringProperty()
Review = db.StringProperty()
DateTimeCreated = db.DateTimeProperty(auto_now_add=True)
def createProfileReview(self, inIndex, inUReference, inRReference,inFirstname,inEmail,inSubject,inRating,inReview):
try:
if self.writeIndexReference(strinput=inIndex) and self.writeUReferenceNum(strinput=inUReference) \
and self.writeFirstname(strinput=inFirstname) and self.writeEmail(strinput=inEmail) and \
self.writeSubject(strinput=inSubject) and self.writeRating(strinput=inRating) and \
self.writeReview(strinput=inReview) and self.writeRReferenceNum(strinput=inRReference):
logging.info('PROFILE REVIEW INITIAL PASSED')
self.PReviewRef = str(self.put())
self.put()
return True
else:
logging.info('SIMPLE FAILURE IN CREATING PROFILE REVIEWS')
return False
except:
logging.info('SERIOUS FAILURE IN CREATING PROFILE REVIEWS')
return self._generalError
def readRReferenceNum(self):
try:
temp = str(self.RReferenceNum)
if len(temp) > 0:
return temp
else:
return self.undefined
except:
return self._generalError
def writeRReferenceNum(self, strinput):
try:
strinput = str(strinput)
if len(strinput) > 0:
self.RReferenceNum = strinput
return True
else:
return False
except:
return self._generalError
def readIndexReference(self):
try:
temp = self.indexReference
if not(temp == self.undefined):
return temp
else:
return self.undefined
except:
return self._generalError
def writeIndexReference(self, strinput):
try:
temp = str(strinput)
if len(temp) > 0:
self.indexReference = strinput
logging.info('WRITE INDEX FOR PROFILE REVIEW PASSED')
return True
else:
return False
except:
return self._generalError
def readUReferenceNum(self):
try:
temp = str(self.UReferenceNum)
if len(temp) > 0:
return temp
else:
return self.undefined
except:
return self._generalError
def writeUReferenceNum(self, strinput):
try:
strinput = str(strinput)
if len(strinput) > 0:
self.UReferenceNum = strinput
logging.info('WRITE UREFERENCE PASSED ON PROFILE REVIEWS')
return True
else:
return False
except:
return self._generalError
def readFirstname(self):
try:
temp = str(self.Firstname)
if len(temp) > 0:
return temp
else:
return self.undefined
except:
return self._generalError
def writeFirstname(self, strinput):
try:
strinput = str(strinput)
if len(strinput) > 0:
self.Firstname = strinput
logging.info('WRITE FIRST NAME PASSED ON PROFILE REVIEWS')
return True
else:
return False
except:
return self._generalError
def readEmail(self):
try:
temp = str(self.Email)
if len(temp) > 0:
return temp
else:
return self.undefined
except:
return self._generalError
def writeEmail(self, strinput):
try:
strinput = str(strinput)
if len(strinput) > 0:
self.Email = strinput
logging.info('WRITE EMAIL PASSED ON PROFILE REVIEWS')
return True
else:
return False
except:
return self._generalError
def readSubject(self):
try:
temp = str(self.Subject)
if len(temp) > 0:
return temp
else:
return self.undefined
except:
return self._generalError
def writeSubject(self, strinput):
try:
strinput = str(strinput)
if len(strinput) > 0:
self.Subject = strinput
logging.info('WRITE SUBJECT PASSED ON PROFILE REVIEWS')
return True
else:
return False
except:
return self._generalError
def readRating(self):
try:
temp = int(self.Rating)
if (temp > 0) and (temp < 11):
return temp
else:
return self.undefined
except:
return self._generalError
def writeRating(self, strinput):
try:
strinput = str(strinput)
strinput = strinput.strip()
if strinput.isdigit():
tRating = int(strinput)
else:
tRating = 0
if (tRating > 0) and (tRating < 11):
self.Rating = str(tRating)
logging.info('WRITE RATING PASSED ON PROFILE REVIEWS')
return True
else:
return False
except:
return self._generalError
def readReview(self):
try:
temp = str(self.Review)
if len(temp) > 0:
return temp
else:
return self.undefined
except:
return self._generalError
def writeReview(self, strinput):
try:
strinput = str(strinput)
if len(strinput) > 0:
self.Review = strinput
return True
else:
return False
except:
return self._generalError
| [
"[email protected]"
]
| |
0651a0df3619054f72a838d5ca3e1adf32cbab8d | 7889f7f0532db6a7f81e6f8630e399c90438b2b9 | /1.5.0/examples/user_interfaces/embedding_in_qt5.py | 6f35d7745d3b7dae338f30a786f1cece1338a65e | []
| no_license | matplotlib/matplotlib.github.com | ef5d23a5bf77cb5af675f1a8273d641e410b2560 | 2a60d39490941a524e5385670d488c86083a032c | refs/heads/main | 2023-08-16T18:46:58.934777 | 2023-08-10T05:07:57 | 2023-08-10T05:08:30 | 1,385,150 | 25 | 59 | null | 2023-08-30T15:59:50 | 2011-02-19T03:27:35 | null | UTF-8 | Python | false | false | 4,328 | py | #!/usr/bin/env python
# embedding_in_qt5.py --- Simple Qt5 application embedding matplotlib canvases
#
# Copyright (C) 2005 Florent Rougon
# 2006 Darren Dale
# 2015 Jens H Nielsen
#
# This file is an example program for matplotlib. It may be used and
# modified with no restriction; raw copies as well as modified versions
# may be distributed without limitation.
from __future__ import unicode_literals
import sys
import os
import random
import matplotlib
# Make sure that we are using QT5
matplotlib.use('Qt5Agg')
from PyQt5 import QtGui, QtCore, QtWidgets
from numpy import arange, sin, pi
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
progname = os.path.basename(sys.argv[0])
progversion = "0.1"
class MyMplCanvas(FigureCanvas):
"""Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
def __init__(self, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
# We want the axes cleared every time plot() is called
self.axes.hold(False)
self.compute_initial_figure()
#
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def compute_initial_figure(self):
pass
class MyStaticMplCanvas(MyMplCanvas):
"""Simple canvas with a sine plot."""
def compute_initial_figure(self):
t = arange(0.0, 3.0, 0.01)
s = sin(2*pi*t)
self.axes.plot(t, s)
class MyDynamicMplCanvas(MyMplCanvas):
"""A canvas that updates itself every second with a new plot."""
def __init__(self, *args, **kwargs):
MyMplCanvas.__init__(self, *args, **kwargs)
timer = QtCore.QTimer(self)
timer.timeout.connect(self.update_figure)
timer.start(1000)
def compute_initial_figure(self):
self.axes.plot([0, 1, 2, 3], [1, 2, 0, 4], 'r')
def update_figure(self):
# Build a list of 4 random integers between 0 and 10 (both inclusive)
l = [random.randint(0, 10) for i in range(4)]
self.axes.plot([0, 1, 2, 3], l, 'r')
self.draw()
class ApplicationWindow(QtWidgets.QMainWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.setWindowTitle("application main window")
self.file_menu = QtWidgets.QMenu('&File', self)
self.file_menu.addAction('&Quit', self.fileQuit,
QtCore.Qt.CTRL + QtCore.Qt.Key_Q)
self.menuBar().addMenu(self.file_menu)
self.help_menu = QtWidgets.QMenu('&Help', self)
self.menuBar().addSeparator()
self.menuBar().addMenu(self.help_menu)
self.help_menu.addAction('&About', self.about)
self.main_widget = QtWidgets.QWidget(self)
l = QtWidgets.QVBoxLayout(self.main_widget)
sc = MyStaticMplCanvas(self.main_widget, width=5, height=4, dpi=100)
dc = MyDynamicMplCanvas(self.main_widget, width=5, height=4, dpi=100)
l.addWidget(sc)
l.addWidget(dc)
self.main_widget.setFocus()
self.setCentralWidget(self.main_widget)
self.statusBar().showMessage("All hail matplotlib!", 2000)
def fileQuit(self):
self.close()
def closeEvent(self, ce):
self.fileQuit()
def about(self):
QtGui.QMessageBox.about(self, "About",
"""embedding_in_qt5.py example
Copyright 2005 Florent Rougon, 2006 Darren Dale, 2015 Jens H Nielsen
This program is a simple example of a Qt5 application embedding matplotlib
canvases.
It may be used and modified with no restriction; raw copies as well as
modified versions may be distributed without limitation.
This is modified from the embedding in qt4 example to show the difference
between qt4 and qt5"""
)
qApp = QtWidgets.QApplication(sys.argv)
aw = ApplicationWindow()
aw.setWindowTitle("%s" % progname)
aw.show()
sys.exit(qApp.exec_())
#qApp.exec_()
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.