blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
44be92edbd4686e354ce691569964fad2a11f50e | b54ba6923597138d744c890c21f09ceb768ea0ac | /Python 8 to 9AM/BasicExamples/Demo11.py | eb5a3414b5d2f196b98720855020d0c7a65b179c | [] | no_license | ashokreddy7013/python | ad3893e598a827bf520a97ed63e7441ce5ebea62 | e9032166ad32001e5fc89903b62d419d6d68b6d0 | refs/heads/master | 2020-05-01T08:54:19.106039 | 2019-03-24T08:19:18 | 2019-03-24T08:19:18 | 171,410,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py |
class Employee:
def __init__(self):
print("I am Default Const")
Employee()
| [
"[email protected]"
] | |
dc757c59fdf8ee93aa9bf252e1823c13c8c21ba9 | 9bd82e484b3d32c4059ef57dec70e64bced8dde7 | /OTB/OTB/pipelines.py | 693eeb3a38760be718dbda4b5478b7a5cebc9419 | [
"MIT"
] | permissive | houzw/knowledge-base-data | 8a4df88558edcc5fcc25bac82788c6eb96119854 | 60771e8bf300227e1a26c9e77f56b09d23acd64a | refs/heads/master | 2021-06-02T02:28:09.624790 | 2020-10-21T07:02:26 | 2020-10-21T07:02:26 | 148,170,033 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
class OtbPipeline(object):
def process_item(self, item, spider):
return item
| [
"[email protected]"
] | |
e16b8c9808ebc38687cf672a338a6f901cd42936 | 9f1039075cc611198a988034429afed6ec6d7408 | /tensorflow-stubs/contrib/framework/python/framework/checkpoint_utils.pyi | e6e501dad7d3fcdf5bcb59bd42acdb9afc2b5d9c | [] | no_license | matangover/tensorflow-stubs | 9422fbb1cb3a3638958d621461291c315f9c6ec2 | 664bd995ef24f05ba2b3867d979d23ee845cb652 | refs/heads/master | 2020-05-23T12:03:40.996675 | 2019-05-15T06:21:43 | 2019-05-15T06:21:43 | 186,748,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | pyi | # Stubs for tensorflow.contrib.framework.python.framework.checkpoint_utils (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from tensorflow.python.ops import io_ops as io_ops, state_ops as state_ops, variables as variables
from tensorflow.python.platform import gfile as gfile
from tensorflow.python.training import checkpoint_management as checkpoint_management
from typing import Any as Any
def load_checkpoint(filepattern: Any): ...
def load_variable(checkpoint_dir: Any, name: Any): ...
def list_variables(checkpoint_dir: Any): ...
def init_from_checkpoint(checkpoint_dir: Any, assignment_map: Any): ...
| [
"[email protected]"
] | |
0478b50dada5ebde6ba5cc4ed593631ad0a7f72d | 00ee6a3c859362bbc20342c568a27ea2a493e427 | /src/x007007007/djapp/raspberry/net/models/__init__.py | df1ebdbcd68a29e15c124791e2bb900e5e47a5bf | [
"MIT"
] | permissive | x007007007/raspberrypi | 7721b1fde2763fd28db579ca65217b81ee2193ae | 9dfe49666c029b8bb617830a5c5a873a6106d853 | refs/heads/master | 2022-10-04T04:51:29.974216 | 2022-10-03T16:36:00 | 2022-10-03T16:36:00 | 56,951,270 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19 | py | from .. import hook | [
"[email protected]"
] | |
22a0ca8e1e08f8516eb0a7d34b276c7390c35474 | c15a28ae62eb94dbf3ed13e2065195e572a9988e | /Cook book/src/8/lazily_computed_attributes/example1.py | a872f26e188323bd9e96e4b786016ffff9d9d6d8 | [] | no_license | xuyuchends1/python | 10798c92840a1a59d50f5dc5738b2881e65f7865 | 545d950a3d2fee799902658e8133e3692939496b | refs/heads/master | 2021-01-25T07:07:04.812140 | 2020-02-28T09:25:15 | 2020-02-28T09:25:15 | 93,647,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 704 | py | class lazyproperty:
def __init__(self, func):
self.func = func
def __get__(self, instance, cls):
if instance is None:
return self
else:
value = self.func(instance)
setattr(instance, self.func.__name__, value)
return value
if __name__ == '__main__':
import math
class Circle:
def __init__(self, radius):
self.radius = radius
@lazyproperty
def area(self):
print('Computing area')
return math.pi * self.radius ** 2
@lazyproperty
def perimeter(self):
print('Computing perimeter')
return 2 * math.pi * self.radius
| [
"[email protected]"
] | |
66ba6597d61378a91d5ee82df352bcc2eba93876 | d8d95b609a103454b408634bc3a61e4c1fb72dd6 | /6주차/my_SIFT for student.py | d29df1b53fb430f6b303991b82775340a443ecd1 | [] | no_license | sglee487/ComputerGraphics | b7d8cb26a93c91bcfa8515807dce5b09a5bf4384 | 5468b807d98589fda5c9effc64740f1963d7550b | refs/heads/master | 2020-07-31T10:03:54.900052 | 2020-04-04T08:07:01 | 2020-04-04T08:07:01 | 210,547,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,154 | py | import cv2
import numpy as np
def get_extrema(DoG, ext):
for i in range(1, 4):
for j in range(1, DoG.shape[0]-1):
for k in range(1, DoG.shape[1]-1):
# 최대값 혹은 최소값인 지점을 extrema로 구해주세요.
DoG1localMax = np.max(DoG[j-1:j+2,k-1:k+2,i-1])
DoG1localMin = np.min(DoG[j-1:j+2,k-1:k+2,i-1])
DoG2localMax = np.max(DoG[j-1:j+2,k-1:k+2,i])
DoG2localMin = np.min(DoG[j-1:j+2,k-1:k+2,i])
DoG3localMax = np.max(DoG[j-1:j+2,k-1:k+2,i+1])
DoG3localMin = np.min(DoG[j-1:j+2,k-1:k+2,i+1])
allLocalMax = max(DoG1localMax,DoG2localMax,DoG3localMax)
allLocalMin = min(DoG1localMin,DoG2localMin,DoG3localMin)
if ((allLocalMax == DoG[j][k][i]) or (allLocalMin == DoG[j][k][i])):
# xhat과 D(xhat)을 구하기 위한 미분을 수행해주세요.
dDdx = (DoG[j,k+1,i]-DoG[j,k-1,i])/2
dDdy = (DoG[j+1,k,i]-DoG[j-1,k,i])/2
dDds = (DoG[j,k,i+1]-DoG[j,k,i-1])/2
d2Ddx2 = DoG[j,k+1,i] - DoG[j,k-1,i] + 2 * DoG[j,k,i]
d2Ddy2 = DoG[j+1, k , i] - DoG[j-1, k, i] + 2 * DoG[j, k, i]
d2Dds2 = DoG[j, k , i+1] - DoG[j, k , i-1] + 2 * DoG[j, k, i]
d2Ddxy = (((DoG[j+1,k+1,i]) - DoG[j+1,k-1,i])-((DoG[j-1,k+1,i]-DoG[j-1,k-1,i])))/4
d2Ddxs = (((DoG[j, k + 1, i+1]) - DoG[j, k - 1, i+1]) - (
(DoG[j, k + 1, i-1] - DoG[j, k - 1, i-1]))) / 4
d2Ddys = (((DoG[j + 1, k, i+1]) - DoG[j + 1, k, i-1]) - (
(DoG[j - 1, k, i+1] - DoG[j - 1, k, i-1]))) / 4
H = [[d2Ddx2,d2Ddxy,d2Ddxs],[d2Ddxy,d2Ddy2,d2Ddxy],[d2Ddxs,d2Ddys,d2Dds2]]
dD = np.transpose([dDdx,dDdy,dDds])
xhat = np.linalg.lstsq(np.dot(-1,H), dD, rcond=-1)[0]
target = DoG[j,k,i]
Dxhat = target + 0.5 * np.dot(dD.transpose(), xhat)
# Thresholding을 수행해주세요. ( 적절한 위치만 ext 배열에 저장해주세요, )
if(np.abs(Dxhat) < thresh or np.min(np.abs(xhat)) > 0.5):
continue
Hpart = np.array([[d2Ddx2,d2Ddxy],[d2Ddxy,d2Ddy2]])
traceHpartsquare = np.trace(Hpart) ** 2
detHpart = np.linalg.det(Hpart)
rc = ((r + 1) ** 2)/r
if (detHpart<0 or (traceHpartsquare/detHpart) > rc):
continue
ext[j,k,i-1] = 1
return ext
def SIFT(src, thresh, r):
s = 1.3 #초기 sigma
a = 3. #극점을 찾을 이미지 수
k = 2. ** (1/a) # scale step
lv1sigma = np.array([s , s * k, s * (k**2), s * (k**3), s * (k**4), s * (k**5)]) #double image에 적용될 sigma.
lv2sigma = np.array([s * (k**3) , s * (k**4), s * (k**5), s * (k**6), s * (k**7), s * (k**8) ]) #Original size image #start : 2 * sigma
lv3sigma = np.array([s * (k**6) , s * (k**7), s * (k**8), s * (k**9), s * (k**10), s * (k**11) ]) #half size image #start : 4 * sigma
lv4sigma = np.array([s * (k**9) , s * (k**10), s * (k**11), s * (k**12), s * (k**13), s * (k**14) ]) #quater size image #start : 8 * sigma
#image resize
doubled = cv2.resize(src,None,fx=2,fy=2,interpolation=cv2.INTER_LINEAR) #원본의 2배로 이미지를 resize 해주세요. cv2.INTER_LINEAR, cv2.INTER_NEAREST 자유롭게 사용.
normal = src #원본과 동일
half = cv2.resize(src,None,fx=0.5,fy=0.5,interpolation=cv2.INTER_LINEAR) #가로 세로 각각 1/2
quarter = cv2.resize(src,None,fx=0.25,fy=0.25,interpolation=cv2.INTER_LINEAR) #가로 세로 각각 1/4
# Gaussian 피라미드 저장할 3차원 배열
lv1py = np.zeros((doubled.shape[0], doubled.shape[1], 6))
lv2py = np.zeros((normal.shape[0], normal.shape[1], 6))
lv3py = np.zeros((half.shape[0], half.shape[1], 6))
lv4py = np.zeros((quarter.shape[0], quarter.shape[1], 6))
print('make gaussian pyr')
# Gaussian을 계산
# ksize = 2 * int(4 * sigma + 0.5) + 1
for i in range(6):
#Gaussian Pyramids를 만들어주세요.
#예제에서는 한 Level(Octave)에 6개의 Gaussian Image가 저장됩니다.
ksize = 2 * int(4 * lv1sigma[i] + 0.5) + 1
lv1py[:,:,i] = cv2.GaussianBlur(doubled, (ksize, ksize), lv1sigma[i])
ksize = 2 * int(4 * lv2sigma[i] + 0.5) + 1
lv2py[:,:,i] = cv2.GaussianBlur(normal,(ksize,ksize),lv2sigma[i])
ksize = 2 * int(4 * lv3sigma[i] + 0.5) + 1
lv3py[:,:,i] = cv2.GaussianBlur(half,(ksize,ksize),lv3sigma[i])
ksize = 2 * int(4 * lv4sigma[i] + 0.5) + 1
lv4py[:,:,i] = cv2.GaussianBlur(quarter,(ksize,ksize),lv4sigma[i])
#DoG 피라미드를 저장할 3차원 배열
DoGlv1 = np.zeros((doubled.shape[0], doubled.shape[1], 5))
DoGlv2 = np.zeros((normal.shape[0], normal.shape[1], 5))
DoGlv3 = np.zeros((half.shape[0], half.shape[1], 5))
DoGlv4 = np.zeros((quarter.shape[0], quarter.shape[1], 5))
print('calc DoG')
# DoG를 계산
for i in range(5):
#Difference of Gaussian Image pyramids 를 구해주세요.
DoGlv1[:,:,i] = cv2.subtract(lv1py[:,:,i],lv1py[:,:,i+1])
DoGlv2[:,:,i] = cv2.subtract(lv2py[:,:,i],lv2py[:,:,i+1])
DoGlv3[:,:,i] = cv2.subtract(lv3py[:,:,i],lv3py[:,:,i+1])
DoGlv4[:,:,i] = cv2.subtract(lv4py[:,:,i],lv4py[:,:,i+1])
# 극값의 위치를 표시할 3차원 배열
extPy1 = np.zeros((doubled.shape[0], doubled.shape[1], 3))
extPy2 = np.zeros((normal.shape[0], normal.shape[1], 3))
extPy3 = np.zeros((half.shape[0], half.shape[1], 3))
extPy4 = np.zeros((quarter.shape[0], quarter.shape[1], 3))
# Extrema의 위치 계산
print('find extrema')
extPy1 = get_extrema(DoGlv1, extPy1)
extPy2 = get_extrema(DoGlv2, extPy2)
extPy3 = get_extrema(DoGlv3, extPy3)
extPy4 = get_extrema(DoGlv4, extPy4)
extr_sum = extPy1.sum() + extPy2.sum() + extPy3.sum() + extPy4.sum()
extr_sum = extr_sum.astype(np.int)
keypoints = np.zeros((extr_sum, 3)) # 원래는 3가지의 정보가 들어가나 과제에선 Y좌표, X 좌표, scale 세 가지의 값만 저장한다.
#값 저장
count = 0 #keypoints 수를 Count
for i in range(3):
for j in range(doubled.shape[0]):
for k in range(doubled.shape[1]):
#Lv1
#Keypoints 배열에 Keypoint의 정보를 저장하세요. 함수로 만들어서 수행하셔도 됩니다.
if (extPy1[j,k,i] == 1):
keypoints[count,0] = j * 0.5
keypoints[count,1] = k * 0.5
keypoints[count,2] = i
count += 1
for i in range(3):
for j in range(normal.shape[0]):
for k in range(normal.shape[1]):
#Lv2
#Keypoints 배열에 Keypoint의 정보를 저장하세요.
if (extPy2[j,k,i] == 1):
keypoints[count,0] = j
keypoints[count,1] = k
keypoints[count,2] = i
count += 1
for i in range(3):
for j in range(half.shape[0]):
for k in range(half.shape[1]):
#Lv3
#Keypoints 배열에 Keypoint의 정보를 저장하세요.
if (extPy3[j,k,i] == 1):
keypoints[count,0] = j * 2
keypoints[count,1] = k * 2
keypoints[count,2] = i
count += 1
for i in range(3):
for j in range(quarter.shape[0]):
for k in range(quarter.shape[1]):
#Lv4
#Keypoints 배열에 Keypoint의 정보를 저장하세요.
if (extPy4[j,k,i] == 1):
keypoints[count,0] = j * 4
keypoints[count,1] = k * 4
keypoints[count,2] = i
count += 1
return keypoints
if __name__ == '__main__':
src = cv2.imread('./building.jpg')
gray = cv2.cvtColor(src, cv2.COLOR_BGR2GRAY)
gray = gray.astype(np.double)
gray /= 255.
thresh = 0.03
r = 10. #원 논문에서 값을 10으로 사용
keypoints = SIFT(gray, thresh = thresh, r = r)
for i in range(len(keypoints)):
cv2.circle(src, (int(keypoints[i,1]), int(keypoints[i,0])), int(1 * keypoints[i,2]), (0, 0, 255), 1) # 해당 위치에 원을 그려주는 함수
src2 = cv2.imread('./building_temp.jpg')
gray2 = cv2.cvtColor(src2, cv2.COLOR_BGR2GRAY)
gray2 = gray2.astype(np.double) / 255.
keypoints2 = SIFT(gray2, thresh=thresh, r=r)
for i in range(len(keypoints2)):
cv2.circle(src2, (int(keypoints2[i,1]), int(keypoints2[i,0])), int(1 * keypoints2[i,2]), (0, 0, 255), 1) # 해당 위치에 원을 그려주는 함수
cv2.imshow('src', src)
cv2.imshow('src2', src2)
cv2.waitKey()
cv2.destroyAllWindows() | [
"[email protected]"
] | |
f1fae0f924c00a9486c0b986eec4af374aa7f501 | ea83e60e2be606813005081a9f1b9516de018c7d | /language/realm/retrieval.py | 7962c9d8ad6fdb8180adf7d556fcee0ec5c34d37 | [
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | optimopium/language | 1562a1f150cf4374cf8d2e6a0b7ab4a44c5b8961 | bcc90d312aa355f507ed128e39b7f6ea4b709537 | refs/heads/master | 2022-04-03T03:51:28.831387 | 2022-03-16T21:41:17 | 2022-03-16T22:50:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,555 | py | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Utilities for performing retrieval."""
import abc
from concurrent import futures
import time
from absl import logging
from language.realm import featurization
from language.realm import parallel
from language.realm import profile
import numpy as np
import tensorflow.compat.v1 as tf
import tensorflow_hub as hub
class Retriever(abc.ABC):
"""Retrieves documents for a query."""
@abc.abstractmethod
def retrieve(self, query_batch):
"""Retrieves candidates for a batch of queries.
Args:
query_batch (list[Query]): a list of queries.
Returns:
a batch of lists, where each list is a list of Documents for the
corresponding query.
"""
raise NotImplementedError()
class DummyRetriever(Retriever):
"""Dummy retriever for testing."""
def __init__(self, num_neighbors):
self._num_neighbors = num_neighbors
self.total_candidates = 13353718
self.embed_dim = 128
with tf.device('/CPU:0'):
self._doc_embeds = tf.zeros((self.total_candidates, self.embed_dim))
def retrieve(self, query_batch):
# [batch_size, embed_dim]
query_embeds = tf.zeros((len(query_batch), self.embed_dim))
with tf.device('/CPU:0'):
# [batch_size, total_candidates]
cand_scores = tf.matmul(query_embeds, self._doc_embeds, transpose_b=True)
_, top_ids_batch = tf.math.top_k(cand_scores, k=self._num_neighbors)
title_ids = np.zeros(10, dtype=np.int32)
body_ids = np.zeros(280, dtype=np.int32)
retrievals_batch = []
for top_ids in top_ids_batch:
retrievals = [
featurization.Document(0, title_ids, body_ids) for i in top_ids
]
retrievals_batch.append(retrievals)
return retrievals_batch
class BruteForceRetriever(Retriever):
"""Retrieves documents using brute force matrix multiplication."""
def __init__(self, query_embedder, documents, doc_embeds_or_path,
num_neighbors):
"""Constructs BruteForceRetriever.
Args:
query_embedder: an instance of QueryEmbedder.
documents: a list of Document objects.
doc_embeds_or_path: either a [num_docs, embed_dim] TF Tensor, or a path to
load it.
num_neighbors: number of neighbors to retrieve.
"""
total_candidates = len(documents)
self._query_embedder = query_embedder
self._num_neighbors = num_neighbors
self._documents = documents
# Load embeddings.
if isinstance(doc_embeds_or_path, str):
with tf.device('/CPU:0'):
ckpt_reader = tf.train.load_checkpoint(doc_embeds_or_path)
self._doc_embeds = ckpt_reader.get_tensor('block_emb')
else:
self._doc_embeds = doc_embeds_or_path
logging.info('Loaded document embeddings.')
# Check shapes.
if self._doc_embeds.shape[0] != total_candidates:
raise ValueError('Did not load the right number of embeddings.')
@profile.profiled_function
def retrieve(self, query_batch):
# [batch_size, embed_dim]
query_embeds = self._query_embedder.embed(query_batch)
with tf.device('/CPU:0'):
# [batch_size, total_candidates]
cand_scores = tf.matmul(query_embeds, self._doc_embeds, transpose_b=True)
_, top_ids_batch = tf.math.top_k(cand_scores, k=self._num_neighbors)
retrievals_batch = []
for top_ids in top_ids_batch:
retrievals = [self._documents[i] for i in top_ids]
retrievals_batch.append(retrievals)
return retrievals_batch
def count_tf_records(file_path):
"""Counts the number of records in a GZIP'd TFRecord file."""
gzip_option = tf.python_io.TFRecordOptions(
tf.python_io.TFRecordCompressionType.GZIP)
count = 0
for _ in tf.python_io.tf_record_iterator(file_path, gzip_option):
count += 1
return count
def count_tf_records_parallel_helper(args):
"""Just a helper function for count_tf_records_parallel."""
file_idx, file_path = args
return (file_idx, count_tf_records(file_path))
def count_tf_records_parallel(file_paths, num_processes=None):
"""Counts number of records in TFRecord files in parallel.
Args:
file_paths: a list of paths, where each path points to a GZIP-ed TFRecord
file.
num_processes: number of Python processes to use in parallel. If None, will
use all available CPUs.
Returns:
shard_sizes: a list of ints.
"""
num_files = len(file_paths)
with parallel.Executor(
create_worker=lambda: count_tf_records_parallel_helper,
queue_size=num_files,
num_workers=num_processes) as executor:
for file_idx, file_path in enumerate(file_paths):
executor.submit((file_idx, file_path))
counts = [None] * num_files
results = executor.results(max_to_yield=num_files)
for i, (file_idx, count) in enumerate(results):
counts[file_idx] = count
logging.info('Counted %d / %d files.', i + 1, num_files)
return counts
def load_documents(path):
"""Loads Documents from a GZIP-ed TFRecords file into a Python list."""
gzip_option = tf.python_io.TFRecordOptions(
tf.python_io.TFRecordCompressionType.GZIP)
def get_bytes_feature(ex, name):
return list(ex.features.feature[name].bytes_list.value)
def get_ints_feature(ex, name):
# 32-bit Numpy arrays are more memory-efficient than Python lists.
return np.array(ex.features.feature[name].int64_list.value, dtype=np.int32)
docs = []
for val in tf.python_io.tf_record_iterator(path, gzip_option):
ex = tf.train.Example.FromString(val)
title = get_bytes_feature(ex, 'title')[0]
body = get_bytes_feature(ex, 'body')[0]
doc_uid = featurization.get_document_uid(title, body)
title_token_ids = get_ints_feature(ex, 'title_token_ids')
body_token_ids = get_ints_feature(ex, 'body_token_ids')
doc = featurization.Document(
uid=doc_uid,
title_token_ids=title_token_ids,
body_token_ids=body_token_ids)
docs.append(doc)
return docs
def load_documents_from_shard(args):
"""A helper function for load_documents_from_shards."""
shard_idx, shard_path = args
docs = load_documents(shard_path)
return (shard_idx, docs)
@profile.profiled_function
def load_documents_from_shards(shard_paths, num_processes=None):
"""Loads Documents from a sharded, GZIP-ed TFRecords file into a Python list.
Uses multiple processes to perform IO in parallel.
Args:
shard_paths: a list of paths, where each path points to a GZIP-ed TFRecords
file. Documents loaded from each shard will be concatenated in the order
of shard_paths.
num_processes: number of Python processes to use in parallel. If None, will
use all available CPUs.
Returns:
a list of Document instances.
"""
num_shards = len(shard_paths)
with parallel.Executor(
create_worker=lambda: load_documents_from_shard,
queue_size=num_shards,
num_workers=num_processes) as executor:
for shard_idx, shard_path in enumerate(shard_paths):
executor.submit((shard_idx, shard_path))
results = []
for shard_idx, docs in executor.results(max_to_yield=num_shards):
results.append((shard_idx, docs))
logging.info('Loaded %d of %d document shards.', len(results), num_shards)
# Sorts results by shard_idx.
results.sort()
logging.info('Combining data from all document shards.')
all_docs = []
for shard_idx, docs in results:
all_docs.extend(docs)
logging.info('Finished loading all shards.')
return all_docs
class QueryEmbedder(object):
"""Embeds queries."""
def __init__(self, embedder_model_or_path, featurizer):
if isinstance(embedder_model_or_path, str):
# Assume it is a path to a SavedModel
self._model = tf.saved_model.load_v2(embedder_model_or_path, tags={})
else:
# Assume it is an already loaded SavedModel
self._model = embedder_model_or_path
logging.info('Loaded query embedder.')
self._featurizer = featurizer
def embed(self, query_batch):
"""Embeds a batch of queries.
Args:
query_batch: a list of Query instances.
Returns:
embeds: a [batch_size, embed_dim] float Tensor.
"""
with profile.Timer('embed_featurize'):
feature_dicts = [self._featurizer.featurize_query(q) for q in query_batch]
# Concatenate features into a single dict with the following structure:
# input_ids: [batch_size, seq_len] <int32>
# input_mask: [batch_size, seq_len] <int32>
# segment_ids: [batch_size, seq_len] <int32>
model_inputs = featurization.batch_feature_dicts(feature_dicts)
with profile.Timer('embed_tf'):
return self._model.signatures['projected'](**model_inputs)['default']
class DocumentEmbedder(object):
"""Embeds documents using TF Estimator.
Note: this only works with the REALM Hub modules. An ICT Hub module won't work
because it has a different set of signatures.
"""
def __init__(self, hub_module_spec, featurizer, use_tpu, run_config=None):
"""Constructs the DocumentEmbedder."""
if run_config is None:
if use_tpu:
raise ValueError('Must supply a RunConfig if use_tpu.')
else:
run_config = tf.estimator.tpu.RunConfig() # Just supply a default.
self._hub_module_spec = hub_module_spec
self._featurizer = featurizer
self._use_tpu = use_tpu
self._run_config = run_config
self._log_interval = 10 # When embedding, log every 10 seconds.
def embed(self, get_documents_dataset, total_docs, batch_size):
"""Embeds a Dataset of documents using Estimator.
Args:
get_documents_dataset: a function that returns a TF Dataset, where each
element is a dict with attributes described below.
total_docs: total number of documents in the Dataset.
batch_size: number of documents to embed in each batch. Each element in
the Dataset returned by get_documents_dataset should be a dict with the
attributes described below.
get_documents_dataset should return a Dataset over dicts, each containing at
least the following attributes:
- title_token_ids: a 1-D int Tensor.
- body_token_ids: a 1-D int Tensor.
Yields:
a [embed_dim] Numpy array, one for each document.
"""
if total_docs < 1:
raise ValueError('Must embed at least 1 document.')
# These hyperparams are passed to Estimator.
params = {
'vocab_path':
self._featurizer.tokenizer.vocab_path,
'do_lower_case':
self._featurizer.tokenizer.do_lower_case,
'query_seq_len':
self._featurizer.query_seq_len,
'candidate_seq_len':
self._featurizer.candidate_seq_len,
'num_candidates':
self._featurizer.num_candidates,
'max_masks':
self._featurizer.max_masks,
'separate_candidate_segments':
self._featurizer.separate_candidate_segments,
}
def input_fn(params):
"""Constructs the dataset fed to Estimator."""
# We cannot access self._featurizer via closure, because this function is
# passed to another device. Hence, we need to reconstruct the featurizer
# from its hyerparameters (passed through `params`).
tokenizer = featurization.Tokenizer(
vocab_path=params['vocab_path'],
do_lower_case=params['do_lower_case'])
featurizer = featurization.Featurizer(
query_seq_len=params['query_seq_len'],
candidate_seq_len=params['candidate_seq_len'],
num_candidates=params['num_candidates'],
max_masks=params['max_masks'],
tokenizer=tokenizer,
separate_candidate_segments=params['separate_candidate_segments'])
dataset = get_documents_dataset()
def featurize(doc_dict):
return featurizer.featurize_document_tf(doc_dict['title_token_ids'],
doc_dict['body_token_ids'])
dataset = dataset.map(
featurize, num_parallel_calls=tf.data.experimental.AUTOTUNE)
# Add a document index variable.
dataset = dataset.enumerate()
def _enumerate_to_dict(result_idx, tensor_dict):
return dict(tensor_dict, result_idx=result_idx)
dataset = dataset.map(
_enumerate_to_dict, num_parallel_calls=tf.data.experimental.AUTOTUNE)
# Pad the end of the dataset with one full extra batch.
# This ensures that we don't drop the remainder.
if total_docs % batch_size != 0:
# Pad using the first value of the dataset, repeated batch_size times.
pad_vals = dataset.take(1).repeat(batch_size)
dataset = dataset.concatenate(pad_vals)
# Batch the dataset.
dataset = dataset.batch(batch_size, drop_remainder=True)
dataset = dataset.prefetch(2) # Prefetch for efficiency.
return dataset
def model_fn(features, labels, mode, params):
"""Constructs the model used by Estimator."""
del labels, params
embedder_module = hub.Module(
spec=self._hub_module_spec, name='embedder', trainable=False)
# Remove the result_idx before feeding features to the module.
result_idx = features.pop('result_idx')
# [batch_size, embed_dim]
embeds = embedder_module(inputs=features, signature='projected')
return tf.estimator.tpu.TPUEstimatorSpec(
mode=mode, predictions={
'embeds': embeds,
'result_idx': result_idx
})
estimator = tf.estimator.tpu.TPUEstimator(
use_tpu=self._use_tpu,
model_fn=model_fn,
model_dir=None, # Don't persist model.
config=self._run_config,
params=params,
train_batch_size=batch_size,
predict_batch_size=batch_size)
logging.info('Embedding %d documents total.', total_docs)
predictions = estimator.predict(
input_fn=input_fn, yield_single_examples=True)
for result in yield_predictions_from_estimator(
predictions, total=total_docs, log_interval=self._log_interval):
yield result['embeds']
def yield_predictions_from_estimator(predictions, total, log_interval=10):
"""Yields predictions from Estimator.predict, with added error correction.
This function handles the case of Estimator.predict occasionally restarting,
causing results to be yielded out of order.
Args:
predictions: the return value of Estimator.predict. An iterable of dicts.
Each dict MUST have a 'result_idx' attribute, used to track result order.
total (int): total expected number of elements to yield from predictions.
log_interval: log every this many seconds.
Yields:
the same dicts yielded from Estimator.predict, but in the right order. The
result_idx element is removed from every dict.
"""
predictions_iter = iter(predictions)
total_yielded = 0
start_time = time.time()
last_log_timestamp = time.time()
while total_yielded < total:
try:
result = next(predictions_iter)
except StopIteration:
raise ValueError(
'Estimator.predict terminated before we got all results.')
result_idx = result.pop('result_idx')
if result_idx == total_yielded:
# If results are always emitted from Estimator.predict in the same
# order that they were fed into the Estimator, then we should always
# expect result_idx to equal total_yielded. However, this does not always
# happen, so we handle that in the `else` case below.
yield result
total_yielded += 1
# Log progress.
current_time = time.time()
if current_time - last_log_timestamp > log_interval:
total_time = current_time - start_time
log_msg = 'Yielded {} results in {:.2f} secs.'.format(
total_yielded, total_time)
logging.info(log_msg)
last_log_timestamp = current_time
else:
# If results start to arrive out of order, something has gone wrong.
if result_idx < total_yielded:
# This can happen if the TPU worker dies, causing Estimator.predict to
# restart from the beginning. In this case, we just don't yield
# anything on this step. Instead, we keep pulling things from the
# iterator until we are back to where we were.
if result_idx == 0:
logging.warning('TPU worker seems to have restarted.')
elif result_idx > total_yielded:
# Something has gone really wrong.
raise ValueError('Estimator.predict has somehow missed a result.')
def embed_documents_using_multiple_tpu_workers(
shard_paths, shard_sizes, hub_module_spec,
featurizer, tpu_workers,
batch_size, num_tpu_cores_per_worker):
"""Embeds documents using multiple TPU workers.
Args:
shard_paths: a list of file paths, each specifying a GZIP'd TFRecord file
containing documents stored as TF Examples. Doc embeddings will be
concatenated in the order of shard_paths.
shard_sizes: a list parallel to shard_paths, specifying the number of
documents in each shard.
hub_module_spec: path to the Hub module that will be used to embed the
documents.
featurizer: a Featurizer used to convert documents into Tensor features.
tpu_workers: list of addresses of available TPU workers.
batch_size: each TPU worker embeds documents in batches of this size.
num_tpu_cores_per_worker: number of cores to use on each TPU worker.
Returns:
a [total_docs, embed_dim] Numpy array.
"""
num_shards = len(shard_paths)
num_tpu_workers = len(tpu_workers)
tpu_config = tf.estimator.tpu.TPUConfig(
iterations_per_loop=1, # This seems to be ignored by predict().
num_shards=num_tpu_cores_per_worker)
# Distribute the data shards as evenly as possible among the workers.
num_shards_per_worker = [num_shards // num_tpu_workers] * num_tpu_workers
for worker_idx in range(num_shards % num_tpu_workers):
num_shards_per_worker[worker_idx] += 1
worker_kwargs = []
shards_assigned = 0
for k, num_shards_k in enumerate(num_shards_per_worker):
worker_kwargs.append({
'tpu_run_config':
tf.estimator.tpu.RunConfig(
master=tpu_workers[k], tpu_config=tpu_config),
'shard_paths':
shard_paths[shards_assigned:shards_assigned + num_shards_k],
'shard_sizes':
shard_sizes[shards_assigned:shards_assigned + num_shards_k],
'hub_module_spec': hub_module_spec,
'featurizer': featurizer,
'batch_size': batch_size,
})
shards_assigned += num_shards_k
# All shards should be assigned.
assert shards_assigned == num_shards
# Run all workers in parallel via separate threads.
with futures.ThreadPoolExecutor(max_workers=num_tpu_workers) as executor:
# A list of [num_docs_per_worker, embed_dim] Numpy arrays.
embeds_list = list(
executor.map(lambda kwargs: embed_documents(**kwargs), worker_kwargs))
# A [total_docs, embed_dim] Numpy array.
embeds = np.concatenate(embeds_list, axis=0)
return embeds
def embed_documents(
shard_paths,
shard_sizes,
hub_module_spec,
featurizer,
batch_size,
tpu_run_config = None):
"""Embeds documents either locally (CPU/GPU) or with a TPU worker.
Note: TPUEstimator.predict currently requires the TPU worker to have a single
"host" (a machine running TensorFlow that is physically connected to the TPU
chips). This is not true for all TPU topologies -- some have multiple hosts.
Args:
shard_paths: a list of file paths, each specifying a GZIP'd TFRecord file
containing documents stored as TF Examples. Doc embeddings will be
concatenated in the order of shard_paths.
shard_sizes: a list parallel to shard_paths, specifying the number of
documents in each shard.
hub_module_spec: path to the Hub module that will be used to embed the
documents.
featurizer: a Featurizer used to convert documents into Tensor features.
batch_size: embed documents in batches of this size.
tpu_run_config: configures the TPU worker. If None, run on CPU/GPU.
Returns:
a [total_docs, embed_dim] Numpy array.
"""
embedder = DocumentEmbedder(
hub_module_spec=hub_module_spec,
featurizer=featurizer,
use_tpu=(tpu_run_config is not None),
run_config=tpu_run_config)
def parse_tf_example(serialized):
# FixedLenSequenceFeature requires allow_missing to be True, even though we
# can't actually handle those cases.
feature_spec = {
'title':
tf.FixedLenSequenceFeature([], tf.string, allow_missing=True),
'text':
tf.FixedLenSequenceFeature([], tf.string, allow_missing=True),
'title_token_ids':
tf.FixedLenSequenceFeature([], tf.int64, allow_missing=True),
'body_token_ids':
tf.FixedLenSequenceFeature([], tf.int64, allow_missing=True),
}
features = tf.parse_single_example(serialized, feature_spec)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
for name in list(features.keys()):
tensor = features[name]
if tensor.dtype == tf.int64:
tensor = tf.cast(tensor, tf.int32)
features[name] = tensor
return features
def get_documents_dataset():
# Note: num_parallel_reads should be None to guarantee that shard_paths
# are visited sequentially, not in parallel.
dataset = tf.data.TFRecordDataset(
shard_paths,
compression_type='GZIP',
buffer_size=8 * 1024 * 1024,
num_parallel_reads=None)
return dataset.map(
parse_tf_example, num_parallel_calls=tf.data.experimental.AUTOTUNE)
embeds = embedder.embed(
get_documents_dataset=get_documents_dataset,
total_docs=sum(shard_sizes),
batch_size=batch_size)
# A list of [embed_dim] Numpy arrays.
embeds_list = list(embeds)
# A [total_docs, embed_dim] Numpy array.
return np.stack(embeds_list, axis=0)
| [
"[email protected]"
] | |
82303825a36ae127081f7c965f2fa948b36e6fcc | d7ae8db44b31de83eabaf0e286b1452d4ada24ff | /IoT_Domain_Analyst_ECE_3502/Lab_3/Linear_Regression.py | 524229a56949837e42e249dd6a58236604882ea0 | [
"CC0-1.0"
] | permissive | eshan5/VIT-Labs | ae4c6719b86fb5e2f30e0f5a023171597cf33d42 | 5a20b9571a10b4550b886d588969592e595dac1d | refs/heads/main | 2023-08-24T06:50:23.888426 | 2021-10-09T10:18:32 | 2021-10-09T10:18:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | import numpy as np
from sklearn.linear_model import LinearRegression
x = np.array([5, 15, 25, 35, 45, 55]).reshape((-1, 1))
y = np.array([5, 20, 14, 32, 22, 38])
print(x)
print(y)
model = LinearRegression().fit(x, y)
r_sq = model.score(x, y)
print('coefficient of determination:', r_sq)
print('intercept:', model.intercept_)
print('slope:', model.coef_)
"""new_model = LinearRegression().fit(x, y.reshape((-1, 1)))
print('intercept:', new_model.intercept_)
print('slope:', new_model.coef_)
y_pred = model.predict(x)
print('predicted response:', y_pred, sep='\n')
y_pred = model.intercept_ + model.coef_ * x
print('predicted response:', y_pred, sep='\n')
x_new = np.arange(5).reshape((-1, 1))
print(" First few points of the line :")
print(x_new)
y_new = model.predict(x_new)
print(y_new)"""
| [
"[email protected]"
] | |
1882e6bd42af8f728c9d7796b25c44164b46c8a0 | d2915ef6ee9c1ea01f47d3468bba8e320a8f5914 | /design_patterns/behavioural/template_method.py | b4d81ca7739be92fcbe5d17b1a54a35d7cf159d6 | [] | no_license | asing177/python_basics | a269adbaf166fb760d2692874601528ef230bbbd | 48ce7d5d6356edbd9bc21f8ebb55ec95787d4340 | refs/heads/main | 2023-01-11T12:11:44.155102 | 2020-11-13T07:24:54 | 2020-11-13T07:24:54 | 300,806,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,123 | py | from test_abc import ABC, abstractmethod
class AbstractClass(ABC):
"""
The Abstract Class defines a template method that contains a skeleton of
some algorithm, composed of calls to (usually) abstract primitive
operations.
Concrete subclasses should implement these operations, but leave the
template method itself intact.
"""
def template_method(self) -> None:
"""
The template method defines the skeleton of an algorithm.
"""
self.base_operation1()
self.required_operations1()
self.base_operation2()
self.hook1()
self.required_operations2()
self.base_operation3()
self.hook2()
# These operations already have implementations.
def base_operation1(self) -> None:
print("AbstractClass says: I am doing the bulk of the work")
def base_operation2(self) -> None:
print("AbstractClass says: But I let subclasses override some operations")
def base_operation3(self) -> None:
print("AbstractClass says: But I am doing the bulk of the work anyway")
# These operations have to be implemented in subclasses.
@abstractmethod
def required_operations1(self) -> None:
pass
@abstractmethod
def required_operations2(self) -> None:
pass
# These are "hooks." Subclasses may override them, but it's not mandatory
# since the hooks already have default (but empty) implementation. Hooks
# provide additional extension points in some crucial places of the
# algorithm.
def hook1(self) -> None:
pass
def hook2(self) -> None:
pass
class ConcreteClass1(AbstractClass):
"""
Concrete classes have to implement all abstract operations of the base
class. They can also override some operations with a default implementation.
"""
def required_operations1(self) -> None:
print("ConcreteClass1 says: Implemented Operation1")
def required_operations2(self) -> None:
print("ConcreteClass1 says: Implemented Operation2")
class ConcreteClass2(AbstractClass):
"""
Usually, concrete classes override only a fraction of base class'
operations.
"""
def required_operations1(self) -> None:
print("ConcreteClass2 says: Implemented Operation1")
def required_operations2(self) -> None:
print("ConcreteClass2 says: Implemented Operation2")
def hook1(self) -> None:
print("ConcreteClass2 says: Overridden Hook1")
def client_code(abstract_class: AbstractClass) -> None:
"""
The client code calls the template method to execute the algorithm. Client
code does not have to know the concrete class of an object it works with, as
long as it works with objects through the interface of their base class.
"""
# ...
abstract_class.template_method()
# ...
if __name__ == "__main__":
print("Same client code can work with different subclasses:")
client_code(ConcreteClass1())
print("")
print("Same client code can work with different subclasses:")
client_code(ConcreteClass2()) | [
"[email protected]"
] | |
1d4a7962f047e1507edd5b010afde2fc751120b8 | e400d4a141f35bc4240293253048535f1e737d4e | /src/03_IPhreeqcPy/02_phreeqc_mixing_CSH.py | 0ef9294bb86ad1f2be65ad009b6c572debf6e331 | [] | no_license | annavarzina/carbonation | 94416935f92cdfb1874c61407c8d1909178bd6c9 | 030b222f000d79538e9890fb9047d57ced7bad2d | refs/heads/master | 2021-06-23T07:33:20.147869 | 2021-03-02T13:29:34 | 2021-03-02T13:29:34 | 193,922,887 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,356 | py | import numpy as np
import matplotlib.pylab as plt
from mixing import PhreeqcMixing
from kinetics import PhreeqcKinetics
class PhreeqcMixingCSH(PhreeqcMixing):
def __init__(self, n, fraction, csh, database):
self.phase = csh['name']
self.csh = csh
self.steps = n
self.fraction = fraction
self.database = database
self.phrqc_input = []
self.selected_output = []
self.phrqc_string = ''
self.simulation_time = 0
def generate_phrqc_string(self):
self.phases()
self.solution_1()
self.solution_2()
for i in np.arange(0, self.steps):
self.mix_2()
self.selected_output_1()
self.user_punch()
self.mix_3()
self.phrqc_string = '\n'.join(self.phrqc_input)
def phases(self):
phrqc_input = []
# CSH stochiometry
s = self.csh['stochiometry']
h = s['H+']
h2o = s['H2O'] + s['H+'] - s['Ca']
sign1 = '+'
if h < 0:
sign1 = '-'
h *= -1
sign2 = '+'
if h2o < 0:
sign2 = '-'
h2o *= -1
# input
phrqc_input.append('PHASES')
phrqc_input.append(self.phase)
phrqc_input.append('\t(CaO)' + str(s['Ca']) +'(SiO2)'+ str(s['Si']) + \
'(H2O)' + str(s['H2O']) + ' ' + sign1 + ' ' + str(h) + 'H+ = ' + \
str(s['Ca']) + 'Ca+2 + ' + str(s['Si']) + 'SiO2 ' + sign2 +\
' ' + str(h2o) + ' H2O')
#phrqc_input.append('\t-Vm\t' + str(csh['vm']) )
phrqc_input.append('\t-log_K\t' + str(self.csh['log_k']) + '\n')
self.phrqc_input += phrqc_input
def user_punch(self):
phrqc_input = []
phrqc_input.append('USER_PUNCH')
phrqc_input.append('\t-headings\tCa\t' + self.phase)
phrqc_input.append('\t-start')
phrqc_input.append('\t10\tpunch\ttot("Ca")')
phrqc_input.append('\t15\tpunch\ttot("Si")')
phrqc_input.append('\t20\tpunch\ttot("' + self.phase + '")')
phrqc_input.append('\t30\tpunch')
phrqc_input.append('\t-end')
phrqc_input.append('END')
self.phrqc_input += phrqc_input
#%% PARAMETERS
database = 'C:\Anaconda2\lib\site-packages\databases\cemdata18.dat'
csh = {'name':'CSH', 'stochiometry':{'Ca':1.67, 'Si':1.0, 'H2O':4.34, 'H+':3.34}, 'log_k':29.133,}
n = 400000 # time should be ~10 minutes
krate = 10**(-8.0) #1e-7
s = 800#scale factor
fraction = krate * s
print('Kinetic rate = ' + str(krate))
print('Mixing fraction = ' + str(fraction))
#%% RUN
pm = PhreeqcMixingCSH(n, fraction, csh, database)
pm.run_phreeqc()
print('Mixing fraction simulation time = ' + str(pm.simulation_time))
#%% PLOT
h = 1
t = range(1, n+1)
t = [i/3600. for i in t]
ca_m = []
si_m = []
for i in range(len(pm.selected_output)):
if pm.selected_output[i][0]==3:
ca_m.append(pm.selected_output[i][1])
si_m.append(pm.selected_output[i][2])
plt.figure()
plt.plot(t, ca_m, label = "mix")
plt.xlabel('time (h)')
plt.ylabel('Ca (mol/l)')
plt.legend()
plt.figure()
plt.plot(t, si_m, label = "mix")
plt.xlabel('time (h)')
plt.ylabel('Si (mol/l)')
plt.legend()
plt.show()
| [
"[email protected]"
] | |
913e406199d7adf3fcacb33850752f52a57881fa | 69e5f24fa12346f892b1c907e802286045b3641f | /train.py | c17b2ccaaaaae82b11f58306d9b719d7f6098609 | [] | no_license | hope-yao/failed_adversarial_training | 0cf9d05333767756134db1eb8ea2424ace8449c9 | be87e05b59aaeecec9001c1d6ae69afcf9382c1d | refs/heads/master | 2020-04-01T19:04:32.433080 | 2018-10-17T22:39:48 | 2018-10-17T22:39:48 | 153,532,414 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,181 | py | """Trains a model, saving checkpoints and tensorboard summaries along
the way."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from datetime import datetime
import json
import os
import shutil
from timeit import default_timer as timer
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
from model import Model
from pgd_attack import LinfPGDAttack
with open('config.json') as config_file:
config = json.load(config_file)
# Setting up training parameters
tf.set_random_seed(config['random_seed'])
max_num_training_steps = config['max_num_training_steps']
num_output_steps = config['num_output_steps']
num_summary_steps = config['num_summary_steps']
num_checkpoint_steps = config['num_checkpoint_steps']
batch_size = config['training_batch_size']
# Setting up the data and the model
mnist = input_data.read_data_sets('MNIST_data', one_hot=False)
global_step = tf.contrib.framework.get_or_create_global_step()
model = Model()
# Setting up the optimizer
train_step = tf.train.AdamOptimizer(1e-4).minimize(model.xent,
global_step=global_step)
# Set up adversary
attack = LinfPGDAttack(model,
config['epsilon'],
config['k'],
config['a'],
config['random_start'],
config['loss_func'])
# Setting up the Tensorboard and checkpoint outputs
model_dir = config['model_dir']
if not os.path.exists(model_dir):
os.makedirs(model_dir)
# We add accuracy and xent twice so we can easily make three types of
# comparisons in Tensorboard:
# - train vs eval (for a single run)
# - train of different runs
# - eval of different runs
saver = tf.train.Saver(max_to_keep=3)
tf.summary.scalar('accuracy adv train', model.accuracy)
tf.summary.scalar('accuracy adv', model.accuracy)
tf.summary.scalar('xent adv train', model.xent / batch_size)
tf.summary.scalar('xent adv', model.xent / batch_size)
tf.summary.image('images adv train', model.x_image)
merged_summaries = tf.summary.merge_all()
shutil.copy('config.json', model_dir)
with tf.Session() as sess:
# Initialize the summary writer, global variables, and our time counter.
summary_writer = tf.summary.FileWriter(model_dir, sess.graph)
sess.run(tf.global_variables_initializer())
# saver.restore(sess,'/home/hope-yao/Documents/madrys_code/mnist_challenge/models/a_very_robust_model_run2/checkpoint-99900')
training_time = 0.0
# Main training loop
for ii in range(max_num_training_steps):
if ii%10000 == 0:
num_adv_batch = 1000
x_pool_nat = np.zeros((num_adv_batch * batch_size, 784))
x_pool_adv = np.zeros((num_adv_batch * batch_size, 784))
y_pool = np.zeros((num_adv_batch * batch_size))
from tqdm import tqdm
for jj in tqdm(range(num_adv_batch)):
x_batch, y_batch = mnist.train.next_batch(batch_size)
x_batch_adv = attack.perturb(x_batch, y_batch, sess)
x_pool_nat[jj * batch_size:(jj + 1) * batch_size] = x_batch
x_pool_adv[jj * batch_size:(jj + 1) * batch_size] = x_batch_adv
y_pool[jj * batch_size:(jj + 1) * batch_size] = y_batch
np.save('x_pool_adv_itr{}'.format(ii), x_pool_adv)
np.save('x_pool_nat_itr{}'.format(ii), x_pool_nat)
np.save('y_pool_itr{}'.format(ii), y_pool)
# x_batch, y_batch = mnist.train.next_batch(batch_size)
# # Compute Adversarial Perturbations
# start = timer()
# x_batch_adv = attack.perturb(x_batch, y_batch, sess)
# end = timer()
# training_time += end - start
x_batch = x_pool_nat[ii%1000 * batch_size:(ii%1000 + 1) * batch_size]
x_batch_adv = x_pool_adv[ii%1000 * batch_size:(ii%1000 + 1) * batch_size]
y_batch = y_pool[ii%1000 * batch_size:(ii%1000 + 1) * batch_size]
nat_dict = {model.x_input: x_batch,
model.y_input: y_batch}
adv_dict = {model.x_input: x_batch_adv,
model.y_input: y_batch}
# Output to stdout
if ii % num_output_steps == 0:
nat_acc = sess.run(model.accuracy, feed_dict=nat_dict)
adv_acc = sess.run(model.accuracy, feed_dict=adv_dict)
print('Step {}: ({})'.format(ii, datetime.now()))
print(' training nat accuracy {:.4}%'.format(nat_acc * 100))
print(' training adv accuracy {:.4}%'.format(adv_acc * 100))
if ii != 0:
print(' {} examples per second'.format(
num_output_steps * batch_size / training_time))
training_time = 0.0
# Tensorboard summaries
if ii % num_summary_steps == 0:
summary = sess.run(merged_summaries, feed_dict=adv_dict)
summary_writer.add_summary(summary, global_step.eval(sess))
# Write a checkpoint
if ii % num_checkpoint_steps == 0:
saver.save(sess,
os.path.join(model_dir, 'checkpoint'),
global_step=global_step)
# Actual training step
start = timer()
for jj in range(5):
sess.run(train_step, feed_dict=adv_dict)
end = timer()
training_time += end - start
| [
"[email protected]"
] | |
03fa270be63af49d803b50f06e2f566610bf1159 | 1c962341f3b580f2be0529a2d5804d49804470f6 | /judge_2152.py | a4cdbad8d0c1c0d6e41d3a7a54609469d3035777 | [] | no_license | andersonmarquees/-uri_python | 7bc14b50198bd238f9594b37a86553ecfb277f76 | 379518cd17433725d6a859526de356162b26aa40 | refs/heads/master | 2020-05-05T09:08:51.483638 | 2019-04-14T16:42:24 | 2019-04-14T16:42:24 | 179,892,376 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,090 | py | n = int(input())
while n > 0:
number = list(map(int, input().split()))
if number[2] == 0 and number[0] >= 10 and number[1] >= 10:
print("{}:{} - A porta fechou!".format(number[0], number[1]))
elif number[2] == 1 and number[0] >= 10 and number[1] >= 10:
print("{}:{} - A porta abriu!".format(number[0], number[1]))
elif number[2] == 0 and number[0] < 10 and number[1] < 10:
print("0{}:0{} - A porta fechou!".format(number[0], number[1]))
elif number[2] == 1 and number[0] < 10 and number[1] < 10:
print("0{}:0{} - A porta abriu!".format(number[0], number[1]))
elif number[2] == 0 and number[0] < 10:
print("0{}:{} - A porta fechou!".format(number[0], number[1]))
elif number[2] == 1 and number[0] < 10:
print("0{}:{} - A porta abriu!".format(number[0], number[1]))
elif number[2] == 0 and number[1] < 10:
print("{}:0{} - A porta fechou!".format(number[0], number[1]))
elif number[2] == 1 and number[1] < 10:
print("{}:0{} - A porta abriu!".format(number[0], number[1]))
n -= 1
| [
"[email protected]"
] | |
b189d011d6657ef5e6f9b4e1061f09ba5eb4c1a7 | 3a891a79be468621aae43defd9a5516f9763f36e | /apps/beeswax/gen-py/TCLIService/TCLIService.py | 3bf8cee42c53891d5ab95aa26926dd4078f58c7f | [
"Apache-2.0"
] | permissive | oyorooms/hue | b53eb87f805063a90f957fd2e1733f21406269aa | 4082346ef8d5e6a8365b05752be41186840dc868 | refs/heads/master | 2020-04-15T20:31:56.931218 | 2019-01-09T19:02:21 | 2019-01-09T19:05:36 | 164,998,117 | 4 | 2 | Apache-2.0 | 2019-01-10T05:47:36 | 2019-01-10T05:47:36 | null | UTF-8 | Python | false | true | 104,941 | py | #
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(object):
def OpenSession(self, req):
"""
Parameters:
- req
"""
pass
def CloseSession(self, req):
"""
Parameters:
- req
"""
pass
def GetInfo(self, req):
"""
Parameters:
- req
"""
pass
def ExecuteStatement(self, req):
"""
Parameters:
- req
"""
pass
def GetTypeInfo(self, req):
"""
Parameters:
- req
"""
pass
def GetCatalogs(self, req):
"""
Parameters:
- req
"""
pass
def GetSchemas(self, req):
"""
Parameters:
- req
"""
pass
def GetTables(self, req):
"""
Parameters:
- req
"""
pass
def GetTableTypes(self, req):
"""
Parameters:
- req
"""
pass
def GetColumns(self, req):
"""
Parameters:
- req
"""
pass
def GetFunctions(self, req):
"""
Parameters:
- req
"""
pass
def GetOperationStatus(self, req):
"""
Parameters:
- req
"""
pass
def CancelOperation(self, req):
"""
Parameters:
- req
"""
pass
def CloseOperation(self, req):
"""
Parameters:
- req
"""
pass
def GetResultSetMetadata(self, req):
"""
Parameters:
- req
"""
pass
def FetchResults(self, req):
"""
Parameters:
- req
"""
pass
def GetDelegationToken(self, req):
"""
Parameters:
- req
"""
pass
def CancelDelegationToken(self, req):
"""
Parameters:
- req
"""
pass
def RenewDelegationToken(self, req):
"""
Parameters:
- req
"""
pass
def GetLog(self, req):
"""
Parameters:
- req
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def OpenSession(self, req):
"""
Parameters:
- req
"""
self.send_OpenSession(req)
return self.recv_OpenSession()
def send_OpenSession(self, req):
self._oprot.writeMessageBegin('OpenSession', TMessageType.CALL, self._seqid)
args = OpenSession_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_OpenSession(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = OpenSession_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "OpenSession failed: unknown result");
def CloseSession(self, req):
"""
Parameters:
- req
"""
self.send_CloseSession(req)
return self.recv_CloseSession()
def send_CloseSession(self, req):
self._oprot.writeMessageBegin('CloseSession', TMessageType.CALL, self._seqid)
args = CloseSession_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CloseSession(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CloseSession_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CloseSession failed: unknown result");
def GetInfo(self, req):
"""
Parameters:
- req
"""
self.send_GetInfo(req)
return self.recv_GetInfo()
def send_GetInfo(self, req):
self._oprot.writeMessageBegin('GetInfo', TMessageType.CALL, self._seqid)
args = GetInfo_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetInfo(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetInfo_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetInfo failed: unknown result");
def ExecuteStatement(self, req):
"""
Parameters:
- req
"""
self.send_ExecuteStatement(req)
return self.recv_ExecuteStatement()
def send_ExecuteStatement(self, req):
self._oprot.writeMessageBegin('ExecuteStatement', TMessageType.CALL, self._seqid)
args = ExecuteStatement_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ExecuteStatement(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ExecuteStatement_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ExecuteStatement failed: unknown result");
def GetTypeInfo(self, req):
"""
Parameters:
- req
"""
self.send_GetTypeInfo(req)
return self.recv_GetTypeInfo()
def send_GetTypeInfo(self, req):
self._oprot.writeMessageBegin('GetTypeInfo', TMessageType.CALL, self._seqid)
args = GetTypeInfo_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetTypeInfo(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetTypeInfo_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTypeInfo failed: unknown result");
def GetCatalogs(self, req):
"""
Parameters:
- req
"""
self.send_GetCatalogs(req)
return self.recv_GetCatalogs()
def send_GetCatalogs(self, req):
self._oprot.writeMessageBegin('GetCatalogs', TMessageType.CALL, self._seqid)
args = GetCatalogs_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetCatalogs(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetCatalogs_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetCatalogs failed: unknown result");
def GetSchemas(self, req):
"""
Parameters:
- req
"""
self.send_GetSchemas(req)
return self.recv_GetSchemas()
def send_GetSchemas(self, req):
self._oprot.writeMessageBegin('GetSchemas', TMessageType.CALL, self._seqid)
args = GetSchemas_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetSchemas(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetSchemas_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetSchemas failed: unknown result");
def GetTables(self, req):
"""
Parameters:
- req
"""
self.send_GetTables(req)
return self.recv_GetTables()
def send_GetTables(self, req):
self._oprot.writeMessageBegin('GetTables', TMessageType.CALL, self._seqid)
args = GetTables_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetTables(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetTables_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTables failed: unknown result");
def GetTableTypes(self, req):
"""
Parameters:
- req
"""
self.send_GetTableTypes(req)
return self.recv_GetTableTypes()
def send_GetTableTypes(self, req):
self._oprot.writeMessageBegin('GetTableTypes', TMessageType.CALL, self._seqid)
args = GetTableTypes_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetTableTypes(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetTableTypes_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetTableTypes failed: unknown result");
def GetColumns(self, req):
"""
Parameters:
- req
"""
self.send_GetColumns(req)
return self.recv_GetColumns()
def send_GetColumns(self, req):
self._oprot.writeMessageBegin('GetColumns', TMessageType.CALL, self._seqid)
args = GetColumns_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetColumns(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetColumns_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetColumns failed: unknown result");
def GetFunctions(self, req):
"""
Parameters:
- req
"""
self.send_GetFunctions(req)
return self.recv_GetFunctions()
def send_GetFunctions(self, req):
self._oprot.writeMessageBegin('GetFunctions', TMessageType.CALL, self._seqid)
args = GetFunctions_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetFunctions(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetFunctions_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetFunctions failed: unknown result");
def GetOperationStatus(self, req):
"""
Parameters:
- req
"""
self.send_GetOperationStatus(req)
return self.recv_GetOperationStatus()
def send_GetOperationStatus(self, req):
self._oprot.writeMessageBegin('GetOperationStatus', TMessageType.CALL, self._seqid)
args = GetOperationStatus_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetOperationStatus(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetOperationStatus_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetOperationStatus failed: unknown result");
def CancelOperation(self, req):
"""
Parameters:
- req
"""
self.send_CancelOperation(req)
return self.recv_CancelOperation()
def send_CancelOperation(self, req):
self._oprot.writeMessageBegin('CancelOperation', TMessageType.CALL, self._seqid)
args = CancelOperation_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CancelOperation(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CancelOperation_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelOperation failed: unknown result");
def CloseOperation(self, req):
"""
Parameters:
- req
"""
self.send_CloseOperation(req)
return self.recv_CloseOperation()
def send_CloseOperation(self, req):
self._oprot.writeMessageBegin('CloseOperation', TMessageType.CALL, self._seqid)
args = CloseOperation_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CloseOperation(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CloseOperation_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CloseOperation failed: unknown result");
def GetResultSetMetadata(self, req):
"""
Parameters:
- req
"""
self.send_GetResultSetMetadata(req)
return self.recv_GetResultSetMetadata()
def send_GetResultSetMetadata(self, req):
self._oprot.writeMessageBegin('GetResultSetMetadata', TMessageType.CALL, self._seqid)
args = GetResultSetMetadata_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetResultSetMetadata(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetResultSetMetadata_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetResultSetMetadata failed: unknown result");
def FetchResults(self, req):
"""
Parameters:
- req
"""
self.send_FetchResults(req)
return self.recv_FetchResults()
def send_FetchResults(self, req):
self._oprot.writeMessageBegin('FetchResults', TMessageType.CALL, self._seqid)
args = FetchResults_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_FetchResults(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = FetchResults_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "FetchResults failed: unknown result");
def GetDelegationToken(self, req):
"""
Parameters:
- req
"""
self.send_GetDelegationToken(req)
return self.recv_GetDelegationToken()
def send_GetDelegationToken(self, req):
self._oprot.writeMessageBegin('GetDelegationToken', TMessageType.CALL, self._seqid)
args = GetDelegationToken_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetDelegationToken(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetDelegationToken_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetDelegationToken failed: unknown result");
def CancelDelegationToken(self, req):
"""
Parameters:
- req
"""
self.send_CancelDelegationToken(req)
return self.recv_CancelDelegationToken()
def send_CancelDelegationToken(self, req):
self._oprot.writeMessageBegin('CancelDelegationToken', TMessageType.CALL, self._seqid)
args = CancelDelegationToken_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CancelDelegationToken(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CancelDelegationToken_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelDelegationToken failed: unknown result");
def RenewDelegationToken(self, req):
"""
Parameters:
- req
"""
self.send_RenewDelegationToken(req)
return self.recv_RenewDelegationToken()
def send_RenewDelegationToken(self, req):
self._oprot.writeMessageBegin('RenewDelegationToken', TMessageType.CALL, self._seqid)
args = RenewDelegationToken_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_RenewDelegationToken(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = RenewDelegationToken_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "RenewDelegationToken failed: unknown result");
def GetLog(self, req):
"""
Parameters:
- req
"""
self.send_GetLog(req)
return self.recv_GetLog()
def send_GetLog(self, req):
self._oprot.writeMessageBegin('GetLog', TMessageType.CALL, self._seqid)
args = GetLog_args()
args.req = req
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_GetLog(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = GetLog_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "GetLog failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["OpenSession"] = Processor.process_OpenSession
self._processMap["CloseSession"] = Processor.process_CloseSession
self._processMap["GetInfo"] = Processor.process_GetInfo
self._processMap["ExecuteStatement"] = Processor.process_ExecuteStatement
self._processMap["GetTypeInfo"] = Processor.process_GetTypeInfo
self._processMap["GetCatalogs"] = Processor.process_GetCatalogs
self._processMap["GetSchemas"] = Processor.process_GetSchemas
self._processMap["GetTables"] = Processor.process_GetTables
self._processMap["GetTableTypes"] = Processor.process_GetTableTypes
self._processMap["GetColumns"] = Processor.process_GetColumns
self._processMap["GetFunctions"] = Processor.process_GetFunctions
self._processMap["GetOperationStatus"] = Processor.process_GetOperationStatus
self._processMap["CancelOperation"] = Processor.process_CancelOperation
self._processMap["CloseOperation"] = Processor.process_CloseOperation
self._processMap["GetResultSetMetadata"] = Processor.process_GetResultSetMetadata
self._processMap["FetchResults"] = Processor.process_FetchResults
self._processMap["GetDelegationToken"] = Processor.process_GetDelegationToken
self._processMap["CancelDelegationToken"] = Processor.process_CancelDelegationToken
self._processMap["RenewDelegationToken"] = Processor.process_RenewDelegationToken
self._processMap["GetLog"] = Processor.process_GetLog
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_OpenSession(self, seqid, iprot, oprot):
args = OpenSession_args()
args.read(iprot)
iprot.readMessageEnd()
result = OpenSession_result()
result.success = self._handler.OpenSession(args.req)
oprot.writeMessageBegin("OpenSession", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CloseSession(self, seqid, iprot, oprot):
args = CloseSession_args()
args.read(iprot)
iprot.readMessageEnd()
result = CloseSession_result()
result.success = self._handler.CloseSession(args.req)
oprot.writeMessageBegin("CloseSession", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetInfo(self, seqid, iprot, oprot):
args = GetInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetInfo_result()
result.success = self._handler.GetInfo(args.req)
oprot.writeMessageBegin("GetInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_ExecuteStatement(self, seqid, iprot, oprot):
args = ExecuteStatement_args()
args.read(iprot)
iprot.readMessageEnd()
result = ExecuteStatement_result()
result.success = self._handler.ExecuteStatement(args.req)
oprot.writeMessageBegin("ExecuteStatement", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetTypeInfo(self, seqid, iprot, oprot):
args = GetTypeInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetTypeInfo_result()
result.success = self._handler.GetTypeInfo(args.req)
oprot.writeMessageBegin("GetTypeInfo", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetCatalogs(self, seqid, iprot, oprot):
args = GetCatalogs_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetCatalogs_result()
result.success = self._handler.GetCatalogs(args.req)
oprot.writeMessageBegin("GetCatalogs", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetSchemas(self, seqid, iprot, oprot):
args = GetSchemas_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetSchemas_result()
result.success = self._handler.GetSchemas(args.req)
oprot.writeMessageBegin("GetSchemas", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetTables(self, seqid, iprot, oprot):
args = GetTables_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetTables_result()
result.success = self._handler.GetTables(args.req)
oprot.writeMessageBegin("GetTables", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetTableTypes(self, seqid, iprot, oprot):
args = GetTableTypes_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetTableTypes_result()
result.success = self._handler.GetTableTypes(args.req)
oprot.writeMessageBegin("GetTableTypes", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetColumns(self, seqid, iprot, oprot):
args = GetColumns_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetColumns_result()
result.success = self._handler.GetColumns(args.req)
oprot.writeMessageBegin("GetColumns", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetFunctions(self, seqid, iprot, oprot):
args = GetFunctions_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetFunctions_result()
result.success = self._handler.GetFunctions(args.req)
oprot.writeMessageBegin("GetFunctions", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetOperationStatus(self, seqid, iprot, oprot):
args = GetOperationStatus_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetOperationStatus_result()
result.success = self._handler.GetOperationStatus(args.req)
oprot.writeMessageBegin("GetOperationStatus", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CancelOperation(self, seqid, iprot, oprot):
args = CancelOperation_args()
args.read(iprot)
iprot.readMessageEnd()
result = CancelOperation_result()
result.success = self._handler.CancelOperation(args.req)
oprot.writeMessageBegin("CancelOperation", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CloseOperation(self, seqid, iprot, oprot):
args = CloseOperation_args()
args.read(iprot)
iprot.readMessageEnd()
result = CloseOperation_result()
result.success = self._handler.CloseOperation(args.req)
oprot.writeMessageBegin("CloseOperation", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetResultSetMetadata(self, seqid, iprot, oprot):
args = GetResultSetMetadata_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetResultSetMetadata_result()
result.success = self._handler.GetResultSetMetadata(args.req)
oprot.writeMessageBegin("GetResultSetMetadata", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_FetchResults(self, seqid, iprot, oprot):
args = FetchResults_args()
args.read(iprot)
iprot.readMessageEnd()
result = FetchResults_result()
result.success = self._handler.FetchResults(args.req)
oprot.writeMessageBegin("FetchResults", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetDelegationToken(self, seqid, iprot, oprot):
args = GetDelegationToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetDelegationToken_result()
result.success = self._handler.GetDelegationToken(args.req)
oprot.writeMessageBegin("GetDelegationToken", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CancelDelegationToken(self, seqid, iprot, oprot):
args = CancelDelegationToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = CancelDelegationToken_result()
result.success = self._handler.CancelDelegationToken(args.req)
oprot.writeMessageBegin("CancelDelegationToken", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_RenewDelegationToken(self, seqid, iprot, oprot):
args = RenewDelegationToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = RenewDelegationToken_result()
result.success = self._handler.RenewDelegationToken(args.req)
oprot.writeMessageBegin("RenewDelegationToken", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_GetLog(self, seqid, iprot, oprot):
args = GetLog_args()
args.read(iprot)
iprot.readMessageEnd()
result = GetLog_result()
result.success = self._handler.GetLog(args.req)
oprot.writeMessageBegin("GetLog", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class OpenSession_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TOpenSessionReq, TOpenSessionReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TOpenSessionReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('OpenSession_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class OpenSession_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TOpenSessionResp, TOpenSessionResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TOpenSessionResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('OpenSession_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseSession_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TCloseSessionReq, TCloseSessionReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TCloseSessionReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CloseSession_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseSession_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCloseSessionResp, TCloseSessionResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCloseSessionResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CloseSession_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetInfo_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetInfoReq, TGetInfoReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetInfoReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetInfo_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetInfo_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetInfoResp, TGetInfoResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetInfoResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecuteStatement_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TExecuteStatementReq, TExecuteStatementReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TExecuteStatementReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecuteStatement_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecuteStatement_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TExecuteStatementResp, TExecuteStatementResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TExecuteStatementResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecuteStatement_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTypeInfo_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetTypeInfoReq, TGetTypeInfoReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetTypeInfoReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTypeInfo_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTypeInfo_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetTypeInfoResp, TGetTypeInfoResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetTypeInfoResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTypeInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetCatalogs_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetCatalogsReq, TGetCatalogsReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetCatalogsReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetCatalogs_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetCatalogs_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetCatalogsResp, TGetCatalogsResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetCatalogsResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetCatalogs_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetSchemas_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetSchemasReq, TGetSchemasReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetSchemasReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetSchemas_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetSchemas_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetSchemasResp, TGetSchemasResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetSchemasResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetSchemas_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTables_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetTablesReq, TGetTablesReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetTablesReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTables_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTables_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetTablesResp, TGetTablesResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetTablesResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTables_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTableTypes_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetTableTypesReq, TGetTableTypesReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetTableTypesReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTableTypes_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetTableTypes_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetTableTypesResp, TGetTableTypesResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetTableTypesResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetTableTypes_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetColumns_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetColumnsReq, TGetColumnsReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetColumnsReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetColumns_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetColumns_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetColumnsResp, TGetColumnsResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetColumnsResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetColumns_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetFunctions_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetFunctionsReq, TGetFunctionsReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetFunctionsReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetFunctions_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetFunctions_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetFunctionsResp, TGetFunctionsResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetFunctionsResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetFunctions_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetOperationStatus_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetOperationStatusReq, TGetOperationStatusReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetOperationStatusReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetOperationStatus_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetOperationStatus_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetOperationStatusResp, TGetOperationStatusResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetOperationStatusResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetOperationStatus_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelOperation_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TCancelOperationReq, TCancelOperationReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TCancelOperationReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelOperation_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelOperation_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCancelOperationResp, TCancelOperationResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCancelOperationResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelOperation_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseOperation_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TCloseOperationReq, TCloseOperationReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TCloseOperationReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CloseOperation_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CloseOperation_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCloseOperationResp, TCloseOperationResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCloseOperationResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CloseOperation_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetResultSetMetadata_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetResultSetMetadataReq, TGetResultSetMetadataReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetResultSetMetadataReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetResultSetMetadata_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetResultSetMetadata_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetResultSetMetadataResp, TGetResultSetMetadataResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetResultSetMetadataResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetResultSetMetadata_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class FetchResults_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TFetchResultsReq, TFetchResultsReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TFetchResultsReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('FetchResults_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class FetchResults_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TFetchResultsResp, TFetchResultsResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TFetchResultsResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('FetchResults_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetDelegationToken_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetDelegationTokenReq, TGetDelegationTokenReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetDelegationTokenReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetDelegationToken_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetDelegationToken_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetDelegationTokenResp, TGetDelegationTokenResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetDelegationTokenResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetDelegationToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelDelegationToken_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TCancelDelegationTokenReq, TCancelDelegationTokenReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TCancelDelegationTokenReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelDelegationToken_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelDelegationToken_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCancelDelegationTokenResp, TCancelDelegationTokenResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCancelDelegationTokenResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelDelegationToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RenewDelegationToken_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TRenewDelegationTokenReq, TRenewDelegationTokenReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TRenewDelegationTokenReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('RenewDelegationToken_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class RenewDelegationToken_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TRenewDelegationTokenResp, TRenewDelegationTokenResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TRenewDelegationTokenResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('RenewDelegationToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetLog_args(object):
"""
Attributes:
- req
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'req', (TGetLogReq, TGetLogReq.thrift_spec), None, ), # 1
)
def __init__(self, req=None,):
self.req = req
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.req = TGetLogReq()
self.req.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetLog_args')
if self.req is not None:
oprot.writeFieldBegin('req', TType.STRUCT, 1)
self.req.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class GetLog_result(object):
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TGetLogResp, TGetLogResp.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TGetLogResp()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('GetLog_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| [
"[email protected]"
] | |
284deb502b460d18389044ea5103890c7f6686d0 | 01a8c5ecea9cb4d40d3e26a1ca08cb1ccc17e98a | /common/prep_terrain_data.py | a35188d77e0eab7d0ba710f5dbfa6d1addca21c6 | [] | no_license | pelinbalci/intro_to_ml | fe570cfe5a556cdd55fccabd1f7096b42124a7a7 | 450ba3cff7d3f2009d94a526527ed76fee6e1fdf | refs/heads/master | 2022-11-15T04:22:29.372686 | 2020-07-12T10:13:05 | 2020-07-12T10:13:05 | 277,359,558 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,825 | py | #!/usr/bin/python
import random
def makeTerrainData(n_points=1000):
"""make the toy dataset """
random.seed(42)
grade = [random.random() for i in range(0, n_points)] #[0.63, 0.025, 0.275, 0.223, 0.736, 0.676, 0.89, 0.085, 0.42, 0.029]
bumpy = [random.random() for i in range(0, n_points)] #[0.218, 0.50, 0.026, 0.19, 0.649, 0.54, 0.22, 0.58, 0.809, 0.006]
error = [random.random() for i in range(0, n_points)]
y = [round(grade[i]*bumpy[i]+0.3+0.1*error[i]) for i in range(0, n_points)] #[1, 0, 0, 0, 1, 1, 1, 0, 1, 0]
for i in range(0, len(y)):
if grade[i] > 0.8 or bumpy[i] > 0.8:
y[i] = 1.0 # <class 'list'>: [1, 0, 0, 0, 1, 1, 1.0, 0, 1.0, 0]
# split into train/test sets
X = [[gg, ss] for gg, ss in zip(grade, bumpy)]
split = int(0.75 * n_points)
X_train = X[0:split] # [[0.63, 0.218], [0.025, 0.50] ... ]
X_test = X[split:]
y_train = y[0:split] # [1, 0, 0, 0, 1, 1, 1.0]
y_test = y[split:]
grade_sig = [X_train[i][0] for i in range(0, len(X_train)) if y_train[i] == 0]
bumpy_sig = [X_train[i][1] for i in range(0, len(X_train)) if y_train[i] == 0]
grade_bkg = [X_train[i][0] for i in range(0, len(X_train)) if y_train[i] == 1]
bumpy_bkg = [X_train[i][1] for i in range(0, len(X_train)) if y_train[i] == 1]
grade_sig = [X_test[i][0] for i in range(0, len(X_test)) if y_test[i] == 0]
bumpy_sig = [X_test[i][1] for i in range(0, len(X_test)) if y_test[i] == 0]
grade_bkg = [X_test[i][0] for i in range(0, len(X_test)) if y_test[i] == 1]
bumpy_bkg = [X_test[i][1] for i in range(0, len(X_test)) if y_test[i] == 1]
test_data = {"fast": {"grade": grade_sig, "bumpiness": bumpy_sig},
"slow": {"grade": grade_bkg, "bumpiness": bumpy_bkg}}
return X, y, X_train, y_train, X_test, y_test | [
"[email protected]"
] | |
a307d190864c688240df95388d0101710746d094 | 16eff60c29062849d7d2fc035a9fbb4a3f93e206 | /crnn_model/cnn_basenet.py | 4bc2c25734ae536b5105871a303e2fb722471c70 | [] | no_license | marjeylee/text_recognization | 3144d3f3903918d1c9a9e75b14597288b92af8cd | efc7982198cbdea8f330de2f758583be6ba3c23f | refs/heads/master | 2020-03-23T18:09:57.215751 | 2018-07-22T13:07:57 | 2018-07-22T13:07:57 | 141,893,083 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,465 | py | # -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: cnn_basenet
Description :
Author : 'li'
date: 2018/7/22
-------------------------------------------------
Change Activity:
2018/7/22:
-------------------------------------------------
"""
__author__ = 'li'
import tensorflow as tf
import numpy as np
from abc import ABCMeta
"""
cnn基本方法库
"""
class CNNBaseModel(metaclass=ABCMeta):
"""
基本方法库模型
"""
def __init__(self):
pass
@staticmethod
def conv2d(inputdata, out_channel, kernel_size, padding='SAME', stride=1, w_init=None, b_init=None,
nl=tf.identity, split=1, use_bias=True, data_format='NHWC', name=None):
"""
卷积操作
:param name: op name
:param inputdata: A 4D tensorflow tensor which ust have known number of channels, but can have other
unknown dimensions.
:param out_channel: number of output channel.
:param kernel_size: int so only support square kernel convolution
:param padding: 'VALID' or 'SAME'
:param stride: int so only support square stride
:param w_init: initializer for convolution weights
:param b_init: initializer for bias
:param nl: a tensorflow identify function
:param split: split channels as used in Alexnet mainly group for GPU memory save.
:param use_bias: whether to use bias.
:param data_format: default set to NHWC according tensorflow
:return: tf.Tensor named ``output``
"""
with tf.variable_scope(name):
in_shape = inputdata.get_shape().as_list()
channel_axis = 3 if data_format == 'NHWC' else 1
in_channel = in_shape[channel_axis]
assert in_channel is not None, "[Conv2D] Input cannot have unknown channel!"
assert in_channel % split == 0
assert out_channel % split == 0
padding = padding.upper()
if isinstance(kernel_size, list):
filter_shape = [kernel_size[0], kernel_size[1]] + [in_channel / split, out_channel]
else:
filter_shape = [kernel_size, kernel_size] + [in_channel / split, out_channel]
if isinstance(stride, list):
strides = [1, stride[0], stride[1], 1] if data_format == 'NHWC' else [1, 1, stride[0], stride[1]]
else:
strides = [1, stride, stride, 1] if data_format == 'NHWC' else [1, 1, stride, stride]
if w_init is None:
w_init = tf.contrib.layers.variance_scaling_initializer()
if b_init is None:
b_init = tf.constant_initializer()
w = tf.get_variable('W', filter_shape, initializer=w_init)
b = None
if use_bias:
b = tf.get_variable('b', [out_channel], initializer=b_init)
if split == 1:
conv = tf.nn.conv2d(inputdata, w, strides, padding, data_format=data_format)
else:
inputs = tf.split(inputdata, split, channel_axis)
kernels = tf.split(w, split, 3)
outputs = [tf.nn.conv2d(i, k, strides, padding, data_format=data_format)
for i, k in zip(inputs, kernels)]
conv = tf.concat(outputs, channel_axis)
ret = nl(tf.nn.bias_add(conv, b, data_format=data_format) if use_bias else conv, name=name)
return ret
@staticmethod
def relu(input_data, name=None):
"""
relu方法
:param name:
:param input_data:
:return:
"""
return tf.nn.relu(features=input_data, name=name)
@staticmethod
def sigmoid(inputdata, name=None):
"""
sigmoid方法
:param name:
:param inputdata:
:return:
"""
return tf.nn.sigmoid(x=inputdata, name=name)
@staticmethod
def maxpooling(inputdata, kernel_size, stride=None, padding='VALID', data_format='NHWC', name=None):
"""
池化操作
:param name:
:param inputdata:
:param kernel_size:
:param stride:
:param padding:
:param data_format:
:return:
"""
padding = padding.upper()
if stride is None:
stride = kernel_size
if isinstance(kernel_size, list):
kernel = [1, kernel_size[0], kernel_size[1], 1] if data_format == 'NHWC' else \
[1, 1, kernel_size[0], kernel_size[1]]
else:
kernel = [1, kernel_size, kernel_size, 1] if data_format == 'NHWC' else [1, 1, kernel_size, kernel_size]
if isinstance(stride, list):
strides = [1, stride[0], stride[1], 1] if data_format == 'NHWC' else [1, 1, stride[0], stride[1]]
else:
strides = [1, stride, stride, 1] if data_format == 'NHWC' else [1, 1, stride, stride]
return tf.nn.max_pool(value=inputdata, ksize=kernel, strides=strides, padding=padding,
data_format=data_format, name=name)
@staticmethod
def avgpooling(inputdata, kernel_size, stride=None, padding='VALID', data_format='NHWC', name=None):
"""
平均值池话
:param name:
:param inputdata:
:param kernel_size:
:param stride:
:param padding:
:param data_format:
:return:
"""
if stride is None:
stride = kernel_size
kernel = [1, kernel_size, kernel_size, 1] if data_format == 'NHWC' else [1, 1, kernel_size, kernel_size]
strides = [1, stride, stride, 1] if data_format == 'NHWC' else [1, 1, stride, stride]
return tf.nn.avg_pool(value=inputdata, ksize=kernel, strides=strides, padding=padding,
data_format=data_format, name=name)
@staticmethod
def globalavgpooling(inputdata, data_format='NHWC', name=None):
"""
全局平均值池话
:param name:
:param inputdata:
:param data_format:
:return:
"""
assert inputdata.shape.ndims == 4
assert data_format in ['NHWC', 'NCHW']
axis = [1, 2] if data_format == 'NHWC' else [2, 3]
return tf.reduce_mean(input_tensor=inputdata, axis=axis, name=name)
@staticmethod
def layernorm(inputdata, epsilon=1e-5, use_bias=True, use_scale=True, data_format='NHWC', name=None):
"""
batch normalize操作
:param name:
:param inputdata:
:param epsilon: epsilon to avoid divide-by-zero.
:param use_bias: whether to use the extra affine transformation or not.
:param use_scale: whether to use the extra affine transformation or not.
:param data_format:
:return:
"""
shape = inputdata.get_shape().as_list()
ndims = len(shape)
assert ndims in [2, 4]
mean, var = tf.nn.moments(inputdata, list(range(1, len(shape))), keep_dims=True)
if data_format == 'NCHW':
channnel = shape[1]
new_shape = [1, channnel, 1, 1]
else:
channnel = shape[-1]
new_shape = [1, 1, 1, channnel]
if ndims == 2:
new_shape = [1, channnel]
if use_bias:
beta = tf.get_variable('beta', [channnel], initializer=tf.constant_initializer())
beta = tf.reshape(beta, new_shape)
else:
beta = tf.zeros([1] * ndims, name='beta')
if use_scale:
gamma = tf.get_variable('gamma', [channnel], initializer=tf.constant_initializer(1.0))
gamma = tf.reshape(gamma, new_shape)
else:
gamma = tf.ones([1] * ndims, name='gamma')
return tf.nn.batch_normalization(inputdata, mean, var, beta, gamma, epsilon, name=name)
@staticmethod
def instancenorm(inputdata, epsilon=1e-5, data_format='NHWC', use_affine=True, name=None):
"""
:param name:
:param inputdata:
:param epsilon:
:param data_format:
:param use_affine:
:return:
"""
shape = inputdata.get_shape().as_list()
if len(shape) != 4:
raise ValueError("Input data of instancebn layer has to be 4D tensor")
if data_format == 'NHWC':
axis = [1, 2]
ch = shape[3]
new_shape = [1, 1, 1, ch]
else:
axis = [2, 3]
ch = shape[1]
new_shape = [1, ch, 1, 1]
if ch is None:
raise ValueError("Input of instancebn require known channel!")
mean, var = tf.nn.moments(inputdata, axis, keep_dims=True)
if not use_affine:
return tf.divide(inputdata - mean, tf.sqrt(var + epsilon), name='output')
beta = tf.get_variable('beta', [ch], initializer=tf.constant_initializer())
beta = tf.reshape(beta, new_shape)
gamma = tf.get_variable('gamma', [ch], initializer=tf.constant_initializer(1.0))
gamma = tf.reshape(gamma, new_shape)
return tf.nn.batch_normalization(inputdata, mean, var, beta, gamma, epsilon, name=name)
@staticmethod
def dropout(inputdata, keep_prob, noise_shape=None, name=None):
"""
dropout
:param name:
:param inputdata:
:param keep_prob:
:param noise_shape:
:return:
"""
return tf.nn.dropout(inputdata, keep_prob=keep_prob, noise_shape=noise_shape, name=name)
@staticmethod
def fullyconnect(inputdata, out_dim, w_init=None, b_init=None, nl=tf.identity, use_bias=True, name=None):
"""
全连接层
:param inputdata: a tensor to be flattened except for the first dimension.
:param out_dim: output dimension
:param w_init: initializer for w. Defaults to `variance_scaling_initializer`.
:param b_init: initializer for b. Defaults to zero
:param nl: a nonlinearity function
:param use_bias: whether to use bias.
:param name:
:return: tf.Tensor: a NC tensor named ``output`` with attribute `variables`.
"""
shape = inputdata.get_shape().as_list()[1:]
if None not in shape:
inputdata = tf.reshape(inputdata, [-1, int(np.prod(shape))])
else:
inputdata = tf.reshape(inputdata, tf.stack([tf.shape(inputdata)[0], -1]))
if w_init is None:
w_init = tf.contrib.layers.variance_scaling_initializer()
if b_init is None:
b_init = tf.constant_initializer()
ret = tf.layers.dense(inputs=inputdata, activation=lambda x: nl(x, name='output'), use_bias=use_bias, name=name,
kernel_initializer=w_init, bias_initializer=b_init, trainable=True, units=out_dim)
return ret
@staticmethod
def layerbn(inputdata, is_training):
"""
batch norm
:param inputdata:
:param is_training:
:return:
"""
output = tf.contrib.layers.batch_norm(inputdata, scale=True, is_training=is_training, updates_collections=None)
return output
@staticmethod
def squeeze(inputdata, axis=None, name=None):
"""
从tensor中删除所有大小是1的维度
:param inputdata:
:param axis:
:param name:
:return:
"""
return tf.squeeze(input=inputdata, axis=axis, name=name)
| [
"[email protected]"
] | |
c39cc5dda80f909656f9411ff1e0ff395f66ea2f | 9da0798b6f309d2274c65077efa81c3766b78051 | /SearchQuery.py | 398bb743fac51257f35f8c955e13f286be2efd41 | [] | no_license | theriley106/RandomSearchQuery | 09b37c23c3798b873c45db529158b326410d759e | e084a1a63279994fe06ef8dd594d2bc8e1d7b445 | refs/heads/master | 2021-01-13T04:57:56.583001 | 2017-02-07T05:04:07 | 2017-02-07T05:04:07 | 81,155,360 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | import random
import csv
QueryList = open('QueryList.csv', 'r')
QueryList = csv.reader(QueryList)
QueryList = [row for row in QueryList]
QueryList = [l[0] for l in QueryList]
def Random():
return random.choice(QueryList)
def Multi():
return QueryList | [
"[email protected]"
] | |
4a13e69ae72231f2bbbeccfef203a95165134ed0 | 98fd3275aa34c90c26d1f43d70983ae762c69064 | /floor_division.py | 1f092bde43f669fac26bc9d825a9243c8393537d | [] | no_license | hasnatosman/problem_solving | 62b5eaf6a418ae7f75d187b2c8e1e4b0ab4750fd | 1f33acc6289d322a9e950b6e39185a505159b7e2 | refs/heads/main | 2023-06-17T05:34:33.908078 | 2021-07-15T06:36:45 | 2021-07-15T06:36:45 | 383,810,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | """
PROBLEM 4:
Find the floor division of two numbers.
HINTS:
Just use two // instead of one.
"""
num1 = int(input('Enter the first number: '))
num2 = int(input('Enter the second number: '))
result = num1 // num2
print("Result is: ", result)
"""
Explanation:
When you divide one number by another you get two things. One is called the integer part of the division.
Another is the remainder.
To get the quotient (result without the remainder), you can use two-division symbols.
"""
"""
import math
result = math.floor(3.4)
print(result)
""" | [
"[email protected]"
] | |
c52b322c1c1fb0464674ec1211c34b90dcd6b4b1 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/eqptdiag/entity.py | 86ae5c47a213aa5ec2540a9fb905cafd8a5403b0 | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 6,254 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class Entity(Mo):
"""
Diag-related entity information
"""
meta = ClassMeta("cobra.model.eqptdiag.Entity")
meta.moClassName = "eqptdiagEntity"
meta.rnFormat = "diag"
meta.category = MoCategory.REGULAR
meta.label = "Equipment Diagnostics Entity"
meta.writeAccessMask = 0x880080000000001
meta.readAccessMask = 0x880080000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.eqptdiagp.GrpTests")
meta.childClasses.add("cobra.model.eqptdiag.Rule")
meta.childNamesAndRnPrefix.append(("cobra.model.eqptdiagp.GrpTests", "grptests-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.eqptdiag.Rule", "rule-"))
meta.parentClasses.add("cobra.model.top.System")
meta.superClasses.add("cobra.model.nw.Conn")
meta.superClasses.add("cobra.model.nw.CpEntity")
meta.superClasses.add("cobra.model.nw.Item")
meta.superClasses.add("cobra.model.nw.GEp")
meta.rnPrefixes = [
('diag', False),
]
prop = PropMeta("str", "adminSt", "adminSt", 3670, PropCategory.REGULAR)
prop.label = "Admin State"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "enabled"
prop._addConstant("disabled", "disabled", 2)
prop._addConstant("enabled", "enabled", 1)
meta.props.add("adminSt", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 14498, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "name", "name", 3669, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.range = [(1, 128)]
meta.props.add("name", prop)
prop = PropMeta("str", "operErr", "operErr", 3672, PropCategory.REGULAR)
prop.label = "Operational Errors Qualifier"
prop.isOper = True
prop._addConstant("feature-unsupported", "feature-unsupported", 64)
prop._addConstant("init-err", "initialization-error", 1)
prop._addConstant("int-err", "internal-error", 8)
prop._addConstant("ipc-err", "ipc-error", 4)
prop._addConstant("mem-err", "memory-error", 2)
prop._addConstant("proto-err", "protocol-error", 32)
prop._addConstant("sock-err", "socket-error", 16)
meta.props.add("operErr", prop)
prop = PropMeta("str", "operSt", "operSt", 3671, PropCategory.REGULAR)
prop.label = "Operational State"
prop.isOper = True
prop.defaultValue = 1
prop.defaultValueStr = "enabled"
prop._addConstant("disabled", "disabled", 2)
prop._addConstant("enabled", "enabled", 1)
prop._addConstant("failed", "failed", 4)
prop._addConstant("initializing", "initializing", 3)
prop._addConstant("unknown", "unknown", 0)
meta.props.add("operSt", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
615006d06bcf4d93bc93ed9798d73762df416462 | 947fa6a4a6155ffce0038b11f4d743603418ad68 | /.c9/metadata/environment/clean_code/clean_code_submissions/clean_code_assignment_004/fb_post/utils/reply_to_comment.py | bf2b3191ba7a9a31d3d72f9ff3c6587ad9dd601e | [] | no_license | bharathi151/bharathi_diyyala | bd75e10639d7d22b332d5ce677e7799402dc4984 | 99f8657d010c790a0e4e4c9d6b57f81814784eb0 | refs/heads/master | 2022-11-21T12:43:48.401239 | 2020-07-23T09:05:52 | 2020-07-23T09:05:52 | 281,903,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,914 | py | {"filter":false,"title":"reply_to_comment.py","tooltip":"/clean_code/clean_code_submissions/clean_code_assignment_004/fb_post/utils/reply_to_comment.py","undoManager":{"mark":57,"position":57,"stack":[[{"start":{"row":21,"column":45},"end":{"row":22,"column":0},"action":"insert","lines":["",""],"id":2},{"start":{"row":22,"column":0},"end":{"row":22,"column":8},"action":"insert","lines":[" "]}],[{"start":{"row":22,"column":4},"end":{"row":22,"column":8},"action":"remove","lines":[" "],"id":3},{"start":{"row":22,"column":0},"end":{"row":22,"column":4},"action":"remove","lines":[" "]}],[{"start":{"row":21,"column":8},"end":{"row":21,"column":45},"action":"remove","lines":["is_valid_reply_content(reply_content)"],"id":4}],[{"start":{"row":21,"column":4},"end":{"row":21,"column":8},"action":"remove","lines":[" "],"id":5},{"start":{"row":21,"column":0},"end":{"row":21,"column":4},"action":"remove","lines":[" "]},{"start":{"row":20,"column":53},"end":{"row":21,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":18,"column":66},"end":{"row":19,"column":0},"action":"insert","lines":["",""],"id":6},{"start":{"row":19,"column":0},"end":{"row":19,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":19,"column":4},"end":{"row":19,"column":41},"action":"insert","lines":["is_valid_reply_content(reply_content)"],"id":7}],[{"start":{"row":19,"column":41},"end":{"row":20,"column":0},"action":"insert","lines":["",""],"id":8},{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"remove","lines":[" "],"id":9},{"start":{"row":19,"column":41},"end":{"row":20,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":19,"column":41},"end":{"row":20,"column":0},"action":"insert","lines":["",""],"id":10},{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"insert","lines":[" "]}],[{"start":{"row":8,"column":49},"end":{"row":9,"column":0},"action":"insert","lines":["",""],"id":11},{"start":{"row":9,"column":0},"end":{"row":9,"column":12},"action":"insert","lines":[" "]}],[{"start":{"row":9,"column":8},"end":{"row":9,"column":12},"action":"remove","lines":[" "],"id":12},{"start":{"row":9,"column":4},"end":{"row":9,"column":8},"action":"remove","lines":[" "]},{"start":{"row":9,"column":0},"end":{"row":9,"column":4},"action":"remove","lines":[" "]},{"start":{"row":8,"column":49},"end":{"row":9,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":8,"column":34},"end":{"row":8,"column":35},"action":"insert","lines":["\\"],"id":13}],[{"start":{"row":8,"column":35},"end":{"row":9,"column":0},"action":"insert","lines":["",""],"id":14},{"start":{"row":9,"column":0},"end":{"row":9,"column":8},"action":"insert","lines":[" "]}],[{"start":{"row":9,"column":8},"end":{"row":9,"column":12},"action":"insert","lines":[" "],"id":15}],[{"start":{"row":9,"column":12},"end":{"row":9,"column":16},"action":"insert","lines":[" "],"id":16}],[{"start":{"row":9,"column":16},"end":{"row":9,"column":20},"action":"insert","lines":[" "],"id":17}],[{"start":{"row":9,"column":20},"end":{"row":9,"column":24},"action":"insert","lines":[" "],"id":18}],[{"start":{"row":10,"column":48},"end":{"row":10,"column":49},"action":"remove","lines":[" "],"id":19},{"start":{"row":10,"column":44},"end":{"row":10,"column":48},"action":"remove","lines":[" "]},{"start":{"row":10,"column":40},"end":{"row":10,"column":44},"action":"remove","lines":[" "]}],[{"start":{"row":10,"column":46},"end":{"row":11,"column":0},"action":"insert","lines":["",""],"id":20},{"start":{"row":11,"column":0},"end":{"row":11,"column":40},"action":"insert","lines":[" "]}],[{"start":{"row":11,"column":36},"end":{"row":11,"column":40},"action":"remove","lines":[" "],"id":21},{"start":{"row":11,"column":32},"end":{"row":11,"column":36},"action":"remove","lines":[" "]},{"start":{"row":11,"column":28},"end":{"row":11,"column":32},"action":"remove","lines":[" "]},{"start":{"row":11,"column":24},"end":{"row":11,"column":28},"action":"remove","lines":[" "]}],[{"start":{"row":18,"column":37},"end":{"row":19,"column":0},"action":"insert","lines":["",""],"id":22},{"start":{"row":19,"column":0},"end":{"row":19,"column":8},"action":"insert","lines":[" "]},{"start":{"row":19,"column":8},"end":{"row":19,"column":9},"action":"insert","lines":["r"]},{"start":{"row":19,"column":9},"end":{"row":19,"column":10},"action":"insert","lines":["e"]}],[{"start":{"row":19,"column":10},"end":{"row":19,"column":11},"action":"insert","lines":["r"],"id":23},{"start":{"row":19,"column":11},"end":{"row":19,"column":12},"action":"insert","lines":["u"]}],[{"start":{"row":19,"column":11},"end":{"row":19,"column":12},"action":"remove","lines":["u"],"id":24},{"start":{"row":19,"column":10},"end":{"row":19,"column":11},"action":"remove","lines":["r"]}],[{"start":{"row":19,"column":10},"end":{"row":19,"column":11},"action":"insert","lines":["t"],"id":25},{"start":{"row":19,"column":11},"end":{"row":19,"column":12},"action":"insert","lines":["u"]},{"start":{"row":19,"column":12},"end":{"row":19,"column":13},"action":"insert","lines":["r"]},{"start":{"row":19,"column":13},"end":{"row":19,"column":14},"action":"insert","lines":["n"]}],[{"start":{"row":19,"column":14},"end":{"row":20,"column":0},"action":"insert","lines":["",""],"id":26},{"start":{"row":20,"column":0},"end":{"row":20,"column":8},"action":"insert","lines":[" "]},{"start":{"row":20,"column":4},"end":{"row":20,"column":8},"action":"remove","lines":[" "]}],[{"start":{"row":20,"column":0},"end":{"row":20,"column":4},"action":"remove","lines":[" "],"id":27}],[{"start":{"row":20,"column":0},"end":{"row":21,"column":0},"action":"insert","lines":["",""],"id":28}],[{"start":{"row":19,"column":8},"end":{"row":19,"column":14},"action":"remove","lines":["return"],"id":29},{"start":{"row":19,"column":4},"end":{"row":19,"column":8},"action":"remove","lines":[" "]}],[{"start":{"row":13,"column":8},"end":{"row":15,"column":29},"action":"remove","lines":["new_comment_id = comment_creation(user_id, comment_id,"," reply_content, comment)"," return new_comment_id"],"id":30}],[{"start":{"row":13,"column":4},"end":{"row":13,"column":8},"action":"remove","lines":[" "],"id":31},{"start":{"row":13,"column":0},"end":{"row":13,"column":4},"action":"remove","lines":[" "]}],[{"start":{"row":17,"column":0},"end":{"row":17,"column":4},"action":"remove","lines":[" "],"id":32}],[{"start":{"row":18,"column":0},"end":{"row":18,"column":4},"action":"insert","lines":[" "],"id":33}],[{"start":{"row":18,"column":4},"end":{"row":20,"column":29},"action":"insert","lines":["new_comment_id = comment_creation(user_id, comment_id,"," reply_content, comment)"," return new_comment_id"],"id":34}],[{"start":{"row":20,"column":6},"end":{"row":20,"column":7},"action":"remove","lines":[" "],"id":35}],[{"start":{"row":20,"column":6},"end":{"row":20,"column":7},"action":"remove","lines":[" "],"id":36},{"start":{"row":20,"column":5},"end":{"row":20,"column":6},"action":"remove","lines":[" "]},{"start":{"row":20,"column":4},"end":{"row":20,"column":5},"action":"remove","lines":[" "]}],[{"start":{"row":13,"column":0},"end":{"row":14,"column":0},"action":"remove","lines":["",""],"id":37},{"start":{"row":12,"column":0},"end":{"row":13,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":10,"column":36},"end":{"row":10,"column":40},"action":"remove","lines":[" "],"id":38}],[{"start":{"row":10,"column":36},"end":{"row":10,"column":37},"action":"insert","lines":[" "],"id":39},{"start":{"row":10,"column":37},"end":{"row":10,"column":38},"action":"insert","lines":[" "]},{"start":{"row":10,"column":38},"end":{"row":10,"column":39},"action":"insert","lines":[" "]},{"start":{"row":10,"column":39},"end":{"row":10,"column":40},"action":"insert","lines":[" "]}],[{"start":{"row":10,"column":36},"end":{"row":10,"column":40},"action":"remove","lines":[" "],"id":40}],[{"start":{"row":10,"column":36},"end":{"row":10,"column":37},"action":"insert","lines":[" "],"id":41},{"start":{"row":10,"column":37},"end":{"row":10,"column":38},"action":"insert","lines":[" "]},{"start":{"row":10,"column":38},"end":{"row":10,"column":39},"action":"insert","lines":[" "]}],[{"start":{"row":11,"column":20},"end":{"row":11,"column":24},"action":"remove","lines":[" "],"id":42}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":20},"action":"remove","lines":[" "],"id":43},{"start":{"row":11,"column":12},"end":{"row":11,"column":16},"action":"remove","lines":[" "]}],[{"start":{"row":11,"column":12},"end":{"row":11,"column":16},"action":"insert","lines":[" "],"id":44}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":20},"action":"insert","lines":[" "],"id":45}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":20},"action":"remove","lines":[" "],"id":46}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":17},"action":"insert","lines":[" "],"id":47}],[{"start":{"row":11,"column":16},"end":{"row":11,"column":17},"action":"remove","lines":[" "],"id":48},{"start":{"row":11,"column":12},"end":{"row":11,"column":16},"action":"remove","lines":[" "]},{"start":{"row":11,"column":8},"end":{"row":11,"column":12},"action":"remove","lines":[" "]},{"start":{"row":11,"column":4},"end":{"row":11,"column":8},"action":"remove","lines":[" "]},{"start":{"row":11,"column":0},"end":{"row":11,"column":4},"action":"remove","lines":[" "]}],[{"start":{"row":10,"column":45},"end":{"row":11,"column":0},"action":"remove","lines":["",""],"id":49},{"start":{"row":10,"column":44},"end":{"row":10,"column":45},"action":"remove","lines":["'"]}],[{"start":{"row":10,"column":44},"end":{"row":10,"column":45},"action":"insert","lines":["'"],"id":50}],[{"start":{"row":10,"column":38},"end":{"row":10,"column":39},"action":"remove","lines":[" "],"id":51},{"start":{"row":10,"column":37},"end":{"row":10,"column":38},"action":"remove","lines":[" "]},{"start":{"row":10,"column":36},"end":{"row":10,"column":37},"action":"remove","lines":[" "]},{"start":{"row":10,"column":32},"end":{"row":10,"column":36},"action":"remove","lines":[" "]},{"start":{"row":10,"column":28},"end":{"row":10,"column":32},"action":"remove","lines":[" "]},{"start":{"row":10,"column":24},"end":{"row":10,"column":28},"action":"remove","lines":[" "]},{"start":{"row":10,"column":20},"end":{"row":10,"column":24},"action":"remove","lines":[" "]},{"start":{"row":10,"column":16},"end":{"row":10,"column":20},"action":"remove","lines":[" "]},{"start":{"row":10,"column":12},"end":{"row":10,"column":16},"action":"remove","lines":[" "]}],[{"start":{"row":10,"column":8},"end":{"row":10,"column":12},"action":"remove","lines":[" "],"id":52},{"start":{"row":10,"column":4},"end":{"row":10,"column":8},"action":"remove","lines":[" "]},{"start":{"row":10,"column":0},"end":{"row":10,"column":4},"action":"remove","lines":[" "]},{"start":{"row":9,"column":56},"end":{"row":10,"column":0},"action":"remove","lines":["",""]}],[{"start":{"row":9,"column":56},"end":{"row":9,"column":57},"action":"insert","lines":[" "],"id":53}],[{"start":{"row":9,"column":39},"end":{"row":10,"column":0},"action":"insert","lines":["",""],"id":54},{"start":{"row":10,"column":0},"end":{"row":10,"column":28},"action":"insert","lines":[" "]}],[{"start":{"row":10,"column":52},"end":{"row":11,"column":0},"action":"insert","lines":["",""],"id":55},{"start":{"row":11,"column":0},"end":{"row":11,"column":28},"action":"insert","lines":[" "]}],[{"start":{"row":11,"column":24},"end":{"row":11,"column":28},"action":"remove","lines":[" "],"id":56}],[{"start":{"row":17,"column":41},"end":{"row":17,"column":42},"action":"remove","lines":[" "],"id":57},{"start":{"row":17,"column":40},"end":{"row":17,"column":41},"action":"remove","lines":[" "]},{"start":{"row":17,"column":36},"end":{"row":17,"column":40},"action":"remove","lines":[" "]}],[{"start":{"row":17,"column":36},"end":{"row":17,"column":37},"action":"insert","lines":[" "],"id":58},{"start":{"row":17,"column":37},"end":{"row":17,"column":38},"action":"insert","lines":[" "]}],[{"start":{"row":23,"column":0},"end":{"row":23,"column":4},"action":"remove","lines":[" "],"id":59}]]},"ace":{"folds":[],"scrolltop":199.04411764705884,"scrollleft":0,"selection":{"start":{"row":4,"column":0},"end":{"row":38,"column":0},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":{"row":10,"state":"start","mode":"ace/mode/python"}},"timestamp":1588050830706,"hash":"97b5a1bf73781825fea117ab0236a3a13f64600e"} | [
"[email protected]"
] | |
7f78ff3bbfee0ec659df2d2fe6639af9fe66f59b | 72b00923d4aa11891f4a3038324c8952572cc4b2 | /python/datastruct/dd_oob/pgm06_13.txt | 68d0a171c8d350cdcfdc58f5ebe0b45790150e1e | [] | no_license | taowuwen/codec | 3698110a09a770407e8fb631e21d86ba5a885cd5 | d92933b07f21dae950160a91bb361fa187e26cd2 | refs/heads/master | 2022-03-17T07:43:55.574505 | 2022-03-10T05:20:44 | 2022-03-10T05:20:44 | 87,379,261 | 0 | 0 | null | 2019-03-25T15:40:27 | 2017-04-06T02:50:54 | C | UTF-8 | Python | false | false | 1,058 | txt | #
# This file contains the Python code from Program 6.13 of
# "Data Structures and Algorithms
# with Object-Oriented Design Patterns in Python"
# by Bruno R. Preiss.
#
# Copyright (c) 2003 by Bruno R. Preiss, P.Eng. All rights reserved.
#
# http://www.brpreiss.com/books/opus7/programs/pgm06_13.txt
#
class QueueAsArray(Queue):
def getHead(self):
if self._count == 0:
raise ContainerEmpty
return self._array[self._head]
def enqueue(self, obj):
if self._count == len(self._array):
raise ContainerFull
self._tail = self._tail + 1
if self._tail == len(self._array):
self._tail = 0
self._array[self._tail] = obj
self._count += 1
def dequeue(self):
if self._count == 0:
raise ContainerEmpty
result = self._array[self._head]
self._array[self._head] = None
self._head = self._head + 1
if self._head == len(self._array):
self._head = 0
self._count -= 1
return result
# ...
| [
"[email protected]"
] | |
1ddbfb75321c4a6c9628325701f965d26cc4ace3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03254/s765346849.py | 33081586b65e211540fb56775a50c4be338f79f8 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | n, x = map(int, input().split())
a = list(map(int, input().split()))
a.sort()
ans = 0
for i in range(n-1):
if x >= a[i]:
x -= a[i]
ans += 1
if x == a[-1]:
ans += 1
print(ans) | [
"[email protected]"
] | |
4219a4b68fda829e5ffe9f53e3fc479e6f4e4f2f | 26f6313772161851b3b28b32a4f8d255499b3974 | /Python/PseudoPalindromicPathsinaBinaryTree.py | f55438ead603aea16a74885f9461cc385a4c486d | [] | no_license | here0009/LeetCode | 693e634a3096d929e5c842c5c5b989fa388e0fcd | f96a2273c6831a8035e1adacfa452f73c599ae16 | refs/heads/master | 2023-06-30T19:07:23.645941 | 2021-07-31T03:38:51 | 2021-07-31T03:38:51 | 266,287,834 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,315 | py | """
Given a binary tree where node values are digits from 1 to 9. A path in the binary tree is said to be pseudo-palindromic if at least one permutation of the node values in the path is a palindrome.
Return the number of pseudo-palindromic paths going from the root node to leaf nodes.
Example 1:
Input: root = [2,3,1,3,1,null,1]
Output: 2
Explanation: The figure above represents the given binary tree. There are three paths going from the root node to leaf nodes: the red path [2,3,3], the green path [2,1,1], and the path [2,3,1]. Among these paths only red path and green path are pseudo-palindromic paths since the red path [2,3,3] can be rearranged in [3,2,3] (palindrome) and the green path [2,1,1] can be rearranged in [1,2,1] (palindrome).
Example 2:
Input: root = [2,1,1,1,3,null,null,null,null,null,1]
Output: 1
Explanation: The figure above represents the given binary tree. There are three paths going from the root node to leaf nodes: the green path [2,1,1], the path [2,1,3,1], and the path [2,1]. Among these paths only the green path is pseudo-palindromic since [2,1,1] can be rearranged in [1,2,1] (palindrome).
Example 3:
Input: root = [9]
Output: 1
Constraints:
The given binary tree will have between 1 and 10^5 nodes.
Node values are digits from 1 to 9.
"""
# Definition for a binary tree node.
from collections import Counter
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def pseudoPalindromicPaths(self, root: TreeNode) -> int:
def ispseduoPalindrom(string):
"""
return whether a string is a pseudoPalindrom
if the counts of a letter is odd, then odd +=1
if odd >=2, then the string is not a pseudoPalindrom
"""
c_string = Counter(string)
odds = sum([v % 2 for v in c_string.values()])
return odds < 2
def dfs(node, string):
if node:
string += str(node.val)
if not node.left and not node.right:
res += int(ispseduoPalindrom(string))
dfs(node.left, string)
dfs(node.right, string)
res = 0
dfs(root, '')
return res
| [
"[email protected]"
] | |
4b1e6ff8dcab39ce71d92053b69511dbb5cc419d | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/APPIAN-STRATUM-MIB.py | 06de6f8880fa3548a8b448db96e72c44e32cc272 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 19,741 | py | #
# PySNMP MIB module APPIAN-STRATUM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/APPIAN-STRATUM-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:23:58 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
acChassisCurrentTime, acChassisRingId = mibBuilder.importSymbols("APPIAN-CHASSIS-MIB", "acChassisCurrentTime", "acChassisRingId")
acOsap, AcOpStatus, AcNodeId = mibBuilder.importSymbols("APPIAN-SMI-MIB", "acOsap", "AcOpStatus", "AcNodeId")
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
IpAddress, ModuleIdentity, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, TimeTicks, Counter64, Gauge32, ObjectIdentity, Counter32, MibIdentifier, Integer32, iso, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "ModuleIdentity", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "TimeTicks", "Counter64", "Gauge32", "ObjectIdentity", "Counter32", "MibIdentifier", "Integer32", "iso", "Unsigned32")
TruthValue, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "DisplayString", "TextualConvention")
acStratum = ModuleIdentity((1, 3, 6, 1, 4, 1, 2785, 2, 9))
acStratum.setRevisions(('1900-08-22 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: acStratum.setRevisionsDescriptions(('Draft MIB for Engineering use only.',))
if mibBuilder.loadTexts: acStratum.setLastUpdated('0008220000Z')
if mibBuilder.loadTexts: acStratum.setOrganization('Appian Communications, Inc.')
if mibBuilder.loadTexts: acStratum.setContactInfo('Brian Johnson')
if mibBuilder.loadTexts: acStratum.setDescription('Appian Communications Stratum MIB contain the definitions for the configuration and control of Stratum Clock module hardware information and status.')
acStratumTable = MibTable((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1), )
if mibBuilder.loadTexts: acStratumTable.setStatus('current')
if mibBuilder.loadTexts: acStratumTable.setDescription('This table contains two rows for access and control of the Stratum-3 clock modules.')
acStratumEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1), ).setIndexNames((0, "APPIAN-STRATUM-MIB", "acStratumNodeId"))
if mibBuilder.loadTexts: acStratumEntry.setStatus('current')
if mibBuilder.loadTexts: acStratumEntry.setDescription('A row within the Stratum table containing access control and status information relating to the operation of the Stratum-3 clock module.')
acStratumNodeId = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 1), AcNodeId()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: acStratumNodeId.setStatus('current')
if mibBuilder.loadTexts: acStratumNodeId.setDescription("The unique node identification number representing a chassis within a ring of OSAP's.")
acStratumClockSource = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("internal", 1), ("bits", 2), ("line", 3))).clone('internal')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumClockSource.setStatus('current')
if mibBuilder.loadTexts: acStratumClockSource.setDescription('This attribute determines the clock source.')
acStratumOpStatusModuleA = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 3), AcOpStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: acStratumOpStatusModuleA.setStatus('current')
if mibBuilder.loadTexts: acStratumOpStatusModuleA.setDescription('This field indicates the current operational status for the clock card in slot 16, module A . Only the following values are applicable to the module: operational, offline, initializing, selfTesting, upgrading, standby, shuttingDown, failed, and hw not present.')
acStratumOpStatusModuleB = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 4), AcOpStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: acStratumOpStatusModuleB.setStatus('current')
if mibBuilder.loadTexts: acStratumOpStatusModuleB.setDescription('This field indicates the current operational status for the clock card in slot 16, module B . Only the following values are applicable to the module: operational, offline, initializing, selfTesting, upgrading, standby, shuttingDown, failed, and hw not present.')
acStratumAlarmStatusModuleA = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: acStratumAlarmStatusModuleA.setStatus('current')
if mibBuilder.loadTexts: acStratumAlarmStatusModuleA.setDescription('This attribute contains the current status of the clock alarms. The acStratumAlarmStatus is a bit map represented as a sum. Normal may only be set if and only if no other alarms are set. The various bit positions are: 1 normal No alarm present 2 los Loss of Signal 4 lof Loss of Frame ')
acStratumAlarmStatusModuleB = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 6))).setMaxAccess("readonly")
if mibBuilder.loadTexts: acStratumAlarmStatusModuleB.setStatus('current')
if mibBuilder.loadTexts: acStratumAlarmStatusModuleB.setDescription('This attribute contains the current status of the clock alarms. The acStratumAlarmStatus is a bit map represented as a sum. Normal must be set if and oly if no other flash is set. The various bit positions are: 1 normal No alarm present 2 los Loss of Signal 4 lof Loss of Frame ')
acStratumCurrentClockSourceModuleA = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("unknown", 0), ("none", 1), ("bits-a", 2), ("bits-b", 3), ("line-slot1-port1", 4), ("line-slot1-port2", 5), ("line-slot2-port1", 6), ("line-slot2-port2", 7), ("holdover", 8), ("internal", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: acStratumCurrentClockSourceModuleA.setStatus('current')
if mibBuilder.loadTexts: acStratumCurrentClockSourceModuleA.setDescription('This attribute displays the current source that the clock card is selecting.')
acStratumCurrentClockSourceModuleB = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("unknown", 0), ("none", 1), ("bits-a", 2), ("bits-b", 3), ("line-slot1-port1", 4), ("line-slot1-port2", 5), ("line-slot2-port1", 6), ("line-slot2-port2", 7), ("holdover", 8), ("internal", 9)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: acStratumCurrentClockSourceModuleB.setStatus('current')
if mibBuilder.loadTexts: acStratumCurrentClockSourceModuleB.setDescription('This attribute displays the current source that the clock card is selecting.')
acStratumLockoutReference = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumLockoutReference.setStatus('current')
if mibBuilder.loadTexts: acStratumLockoutReference.setDescription('This attribute is a bit mask of clock references that should be locked out from selection for the clock source. None can only be selected when no other lockout references are selected. The various bit positions are: 0 none No clock references are locked out from selection. 1 bits-a BITS source from clock module A is locked out. 2 bits-b BITS source from clock module B is locked out. 4 line-slot1 LINE timing source from SONET slot 1 is locked out. 8 line-slot2 LINE timing source from SONET slot 2 is locked out. 16 holdover-a Holdover from clock module A is locked out. 32 holdover-b Holdover from clock module B is locked out. ')
acStratumManualSwitch = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 0), ("bits-a", 1), ("bits-b", 2), ("line-slot1", 3), ("line-slot2", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumManualSwitch.setStatus('current')
if mibBuilder.loadTexts: acStratumManualSwitch.setDescription('This attribute will manually switch the clock references. If the clock reference does not exist, is locked out, or the reference has failed, the switch will not take place.')
acStratumForcedSwitch = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 0), ("bits-a", 1), ("bits-b", 2), ("line-slot1", 3), ("line-slot2", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumForcedSwitch.setStatus('current')
if mibBuilder.loadTexts: acStratumForcedSwitch.setDescription('This attribute will force switch the clock references. If the clock reference does not exist or is locked out, the switch will not take place.')
acStratumRevertiveRefSwitchEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 12), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumRevertiveRefSwitchEnabled.setStatus('current')
if mibBuilder.loadTexts: acStratumRevertiveRefSwitchEnabled.setDescription('Setting of this attribute to true(1) will the reference to revert back to the original reference when that reference become ready again.')
acStratumClearAlarms = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 13), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumClearAlarms.setStatus('current')
if mibBuilder.loadTexts: acStratumClearAlarms.setDescription('Setting of this attribute to true(1) will cause the alarm contacts to clear. Reading this attribute will always return false.')
acStratumLineTimingPortSlot1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumLineTimingPortSlot1.setStatus('current')
if mibBuilder.loadTexts: acStratumLineTimingPortSlot1.setDescription('When configured for line timing, this value describes which port on the SONET card will be used to drive the line. This value is not applicable when not configured for line timing.')
acStratumLineTimingPortSlot2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2)).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumLineTimingPortSlot2.setStatus('current')
if mibBuilder.loadTexts: acStratumLineTimingPortSlot2.setDescription('When configured for line timing, this value describes which port on the SONET card will be used to drive the line. This value is not applicable when not configured for line timing.')
acStratumBITSFramingType = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("esf", 1), ("d4", 2))).clone('esf')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: acStratumBITSFramingType.setStatus('current')
if mibBuilder.loadTexts: acStratumBITSFramingType.setDescription('When configured for BITS timing, this value describes the type of framing that will be used on the BITS interface. This value is not applicable when not configured for BITS timing.')
acStratumCurrentClockSourceSystem = MibTableColumn((1, 3, 6, 1, 4, 1, 2785, 2, 9, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))).clone(namedValues=NamedValues(("unknown", 0), ("bits-a", 1), ("bits-b", 2), ("line-slot1-port1", 3), ("line-slot1-port2", 4), ("line-slot2-port1", 5), ("line-slot2-port2", 6), ("holdover-clock-a", 7), ("holdover-clock-b", 8), ("internal-clock-a", 9), ("internal-clock-b", 10), ("internal-sonet-slot1", 11), ("internal-sonet-slot2", 12)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: acStratumCurrentClockSourceSystem.setStatus('current')
if mibBuilder.loadTexts: acStratumCurrentClockSourceSystem.setDescription('This attribute displays the current clock source that the system is selecting.')
acStratumTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0))
acStratumFailedModuleATrap = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 1)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"))
if mibBuilder.loadTexts: acStratumFailedModuleATrap.setStatus('current')
if mibBuilder.loadTexts: acStratumFailedModuleATrap.setDescription('The stratum clock module failed.')
acStratumFailedModuleBTrap = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 2)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"))
if mibBuilder.loadTexts: acStratumFailedModuleBTrap.setStatus('current')
if mibBuilder.loadTexts: acStratumFailedModuleBTrap.setDescription('The stratum clock module failed.')
acStratumClockFailureModuleATrap = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 3)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"), ("APPIAN-STRATUM-MIB", "acStratumAlarmStatusModuleA"))
if mibBuilder.loadTexts: acStratumClockFailureModuleATrap.setStatus('current')
if mibBuilder.loadTexts: acStratumClockFailureModuleATrap.setDescription('Stratum clock agent has detected a clock timing failure.')
acStratumClockFailureModuleBTrap = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 4)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"), ("APPIAN-STRATUM-MIB", "acStratumAlarmStatusModuleB"))
if mibBuilder.loadTexts: acStratumClockFailureModuleBTrap.setStatus('current')
if mibBuilder.loadTexts: acStratumClockFailureModuleBTrap.setDescription('Stratum clock agent has detected a clock timing failure.')
acStratumRemovalModuleATrap = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 5)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"))
if mibBuilder.loadTexts: acStratumRemovalModuleATrap.setStatus('current')
if mibBuilder.loadTexts: acStratumRemovalModuleATrap.setDescription('The stratum clock module has been removed from the system.')
acStratumRemovalModuleBTrap = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 6)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"))
if mibBuilder.loadTexts: acStratumRemovalModuleBTrap.setStatus('current')
if mibBuilder.loadTexts: acStratumRemovalModuleBTrap.setDescription('The stratum clock module has been removed from the system.')
acStratumInsertedModuleATrap = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 7)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"))
if mibBuilder.loadTexts: acStratumInsertedModuleATrap.setStatus('current')
if mibBuilder.loadTexts: acStratumInsertedModuleATrap.setDescription('A stratum clock module has been inserted into the system.')
acStratumInsertedModuleBTrap = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 8)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"))
if mibBuilder.loadTexts: acStratumInsertedModuleBTrap.setStatus('current')
if mibBuilder.loadTexts: acStratumInsertedModuleBTrap.setDescription('A stratum clock module has been inserted into the system.')
acStratumClockModuleAOk = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 9)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"), ("APPIAN-STRATUM-MIB", "acStratumAlarmStatusModuleA"))
if mibBuilder.loadTexts: acStratumClockModuleAOk.setStatus('current')
if mibBuilder.loadTexts: acStratumClockModuleAOk.setDescription('Stratum clock agent has recovered clock timing.')
acStratumClockModuleBOk = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 10)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"), ("APPIAN-STRATUM-MIB", "acStratumAlarmStatusModuleB"))
if mibBuilder.loadTexts: acStratumClockModuleBOk.setStatus('current')
if mibBuilder.loadTexts: acStratumClockModuleBOk.setDescription('Stratum clock agent has recovered clock timing.')
acStratumSystemClockSourceChange = NotificationType((1, 3, 6, 1, 4, 1, 2785, 2, 9, 0, 11)).setObjects(("APPIAN-CHASSIS-MIB", "acChassisCurrentTime"), ("APPIAN-CHASSIS-MIB", "acChassisRingId"), ("APPIAN-STRATUM-MIB", "acStratumNodeId"), ("APPIAN-STRATUM-MIB", "acStratumCurrentClockSourceSystem"))
if mibBuilder.loadTexts: acStratumSystemClockSourceChange.setStatus('current')
if mibBuilder.loadTexts: acStratumSystemClockSourceChange.setDescription('Stratum clock source has changed to acStratumCurrentClockSourceSystem.')
mibBuilder.exportSymbols("APPIAN-STRATUM-MIB", acStratumClockFailureModuleATrap=acStratumClockFailureModuleATrap, acStratumManualSwitch=acStratumManualSwitch, acStratumClockModuleBOk=acStratumClockModuleBOk, acStratumRemovalModuleBTrap=acStratumRemovalModuleBTrap, acStratumBITSFramingType=acStratumBITSFramingType, acStratumTable=acStratumTable, acStratumRevertiveRefSwitchEnabled=acStratumRevertiveRefSwitchEnabled, acStratumRemovalModuleATrap=acStratumRemovalModuleATrap, acStratumFailedModuleBTrap=acStratumFailedModuleBTrap, acStratumLineTimingPortSlot2=acStratumLineTimingPortSlot2, acStratumInsertedModuleATrap=acStratumInsertedModuleATrap, acStratumFailedModuleATrap=acStratumFailedModuleATrap, acStratumTraps=acStratumTraps, acStratumAlarmStatusModuleA=acStratumAlarmStatusModuleA, acStratumNodeId=acStratumNodeId, acStratumClockModuleAOk=acStratumClockModuleAOk, acStratumOpStatusModuleB=acStratumOpStatusModuleB, acStratumForcedSwitch=acStratumForcedSwitch, acStratumCurrentClockSourceModuleA=acStratumCurrentClockSourceModuleA, acStratumAlarmStatusModuleB=acStratumAlarmStatusModuleB, acStratumCurrentClockSourceSystem=acStratumCurrentClockSourceSystem, acStratumClockSource=acStratumClockSource, acStratumCurrentClockSourceModuleB=acStratumCurrentClockSourceModuleB, PYSNMP_MODULE_ID=acStratum, acStratum=acStratum, acStratumLineTimingPortSlot1=acStratumLineTimingPortSlot1, acStratumSystemClockSourceChange=acStratumSystemClockSourceChange, acStratumEntry=acStratumEntry, acStratumOpStatusModuleA=acStratumOpStatusModuleA, acStratumClearAlarms=acStratumClearAlarms, acStratumLockoutReference=acStratumLockoutReference, acStratumClockFailureModuleBTrap=acStratumClockFailureModuleBTrap, acStratumInsertedModuleBTrap=acStratumInsertedModuleBTrap)
| [
"[email protected]"
] | |
1b43082d768a96c889d523cd9c34162a613e63b8 | 5c883c87f337be7ffd52f49f0a4e6c72bbd58932 | /apps/almacenes/migrations/0026_auto_20170322_1009.py | 6bc93dfdfc7667e77ff7d1173f2e6f96fe4acf6f | [] | no_license | DARKDEYMON/Tesis-2-Vidaurre-J.C. | f1b0d8e8a593a9d4a585bdd14b21d4809d55ce9f | 4299cea2e990ee798b02724849d747bfd558b97d | refs/heads/master | 2021-06-20T09:25:53.273225 | 2017-05-25T22:20:31 | 2017-05-25T22:20:31 | 65,408,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,348 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-22 14:09
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('almacenes', '0025_auto_20161029_1535'),
]
operations = [
migrations.AlterField(
model_name='herramientas',
name='decripcion',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='insumos',
name='decripcion',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='maquinaria_equipo',
name='decripcion',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='material',
name='decripcion',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='proveedor',
name='rason_social',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='tipoactivo',
name='tipo',
field=models.CharField(max_length=60, unique=True),
),
]
| [
"[email protected]"
] | |
a7a10c869e455f85d0277f3c8391df0683381241 | 742f8aa424b5ef4d9865dee98bebbd5f741a3831 | /tests/test_pregel.py | 8c876136c50ef8db82da2cb79530357b615bc4f3 | [
"MIT"
] | permissive | TZubiri/python-arango | a8be86f2cf9190c2d74d99eb2ef8f5f48b9f45c6 | 232c2d09c7bf9b5e0b71b7ab16fbce6682db383d | refs/heads/master | 2020-04-04T22:24:03.898075 | 2018-11-06T03:59:54 | 2018-11-06T03:59:54 | 156,322,851 | 0 | 0 | null | 2018-11-06T03:51:04 | 2018-11-06T03:51:03 | null | UTF-8 | Python | false | false | 1,823 | py | from __future__ import absolute_import, unicode_literals
from six import string_types
from arango.exceptions import (
PregelJobCreateError,
PregelJobGetError,
PregelJobDeleteError
)
from tests.helpers import (
assert_raises,
generate_string
)
def test_pregel_attributes(db, username):
assert db.pregel.context in ['default', 'async', 'batch', 'transaction']
assert db.pregel.username == username
assert db.pregel.db_name == db.name
assert repr(db.pregel) == '<Pregel in {}>'.format(db.name)
def test_pregel_management(db, graph):
# Test create pregel job
job_id = db.pregel.create_job(
graph.name,
'pagerank',
store=False,
max_gss=100,
thread_count=1,
async_mode=False,
result_field='result',
algorithm_params={'threshold': 0.000001}
)
assert isinstance(job_id, int)
# Test create pregel job with unsupported algorithm
with assert_raises(PregelJobCreateError) as err:
db.pregel.create_job(graph.name, 'invalid')
assert err.value.error_code == 10
# Test get existing pregel job
job = db.pregel.job(job_id)
assert isinstance(job['state'], string_types)
assert isinstance(job['aggregators'], dict)
assert isinstance(job['gss'], int)
assert isinstance(job['received_count'], int)
assert isinstance(job['send_count'], int)
assert isinstance(job['total_runtime'], float)
# Test delete existing pregel job
assert db.pregel.delete_job(job_id) is True
with assert_raises(PregelJobGetError) as err:
db.pregel.job(job_id)
assert err.value.error_code == 10
# Test delete missing pregel job
with assert_raises(PregelJobDeleteError) as err:
db.pregel.delete_job(generate_string())
assert err.value.error_code == 10
| [
"[email protected]"
] | |
6244ec064900b8dd809f7c79a459e071ac1fbc06 | cfa26ab2d83f25f88c61b040e385a8e2b80fad49 | /cmsplugin_cascade/cms_plugins.py | 8f455e4e6ff33669d4cff5e3df130c47f22dc72d | [
"MIT"
] | permissive | jrief/djangocms-cascade | e952ed65c5f8ec14a2d81b424b0797bc5a87413d | 6e4d5ec7d5cbcc076aa1ea9e16b7c55c07f0ef25 | refs/heads/master | 2023-07-07T07:40:20.368478 | 2022-09-13T14:52:53 | 2022-09-13T14:52:53 | 12,973,900 | 143 | 95 | MIT | 2022-05-11T08:16:45 | 2013-09-20T13:20:48 | Python | UTF-8 | Python | false | false | 1,088 | py | import sys
from importlib import import_module
from django.core.exceptions import ImproperlyConfigured
from . import app_settings
for module in app_settings.CASCADE_PLUGINS:
try:
# if a module was specified, load all plugins in module settings
module_settings = import_module('{}.settings'.format(module))
module_plugins = getattr(module_settings, 'CASCADE_PLUGINS', [])
for p in module_plugins:
try:
import_module('{}.{}'.format(module, p))
except ImportError as err:
traceback = sys.exc_info()[2]
msg = "Plugin {} as specified in {}.settings.CASCADE_PLUGINS could not be loaded: {}"
raise ImproperlyConfigured(msg.format(p, module, err.with_traceback(traceback)))
except ImportError:
try:
# otherwise try with cms_plugins in the named module
import_module('{}.cms_plugins'.format(module))
except ImportError:
# otherwise just use the named module as plugin
import_module('{}'.format(module))
| [
"[email protected]"
] | |
8bdad359dcf597e9a4a118fba408d6d99665be07 | 0102d0999e74deada2aacb8ccfcdc5896a2064a8 | /_request.py | 813f53258f15457ff6a4976a513774c2ea72de36 | [
"Apache-2.0"
] | permissive | CashWin2020/VideoCrawlerEngine | 1b09921add00bb492c8b01dcb0569f5d20c7bed1 | 175bb488dbf29cb0a7d7d15a93536889d022d1fb | refs/heads/master | 2022-10-20T22:24:12.450461 | 2020-06-16T14:32:28 | 2020-06-16T14:32:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,112 | py | from functools import wraps, partial
from inspect import getfullargspec, iscoroutinefunction
from context import impl_ctx
from utils import current_time
from worker import get_worker
from traceback import format_exc
import threading
from queue import Queue
import re
class Request:
""" Request 请求对象是用来描述从脚本的开始到完成过程中的处理方式。
name: 请求名称
"""
name = None
WEIGHT = 1
__simple__ = None
@property
def progress(self):
return self.__progress__
def start_request(self, context=None):
if context is None:
context = {}
context = impl_ctx(context)
self.progress.enqueue()
return get_worker(self.name).submit(self, context)
def end_request(self):
""" 结束请求。"""
raise NotImplementedError
def subrequest(self):
""" 返回该请求的子请求。 """
return []
def error_handler(self, exception):
""" 异常处理。"""
self.progress.error(format_exc())
def getresponse(self):
""" 返回响应 """
return self.__progress__.details()
def get_data(self, name, default=None):
return self.__progress__.data.get(name, default)
def sketch(self):
sketch = self.__progress__.sketch()
sketch.update({
'name': self.name,
})
return sketch
def details(self, log=False):
return self.__progress__.details(log)
def stop(self):
return self.progress.stop()
def __repr__(self):
return f'<{self.__class__.__name__}>'
def __new__(cls, *args, **kwargs):
inst = object.__new__(cls)
inst.__progress__ = RequestProgress()
return inst
def requester(request_name,
weight=1,
sketch_data=(),
bases_cls=None,
root=False,
auto_search=True):
""" 简单请求构建器。
Args:
request_name: 请求者名称
weight: 当前请求器在百分比percent中所占的权重
sketch_data: 上传upload的数据中被sketch()返回的数据字段组成的列表。
bases_cls:
root:
auto_search:
"""
def wrapper(func):
nonlocal bases_cls
argnames, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations = getfullargspec(func)
@wraps(func)
def wrapped(*args, **kwargs):
_worker = partial(inner_worker, *args, **kwargs)
kws = {}
# 设置默认的列表参数
for i, v in enumerate(argnames[len(argnames) - len(defaults or ()):]):
kws[v] = defaults[i]
narg = min(len(args), len(argnames))
# 设置列表参数
for i in range(narg):
kws[argnames[i]] = args[i]
# 关键词转列表参数
for k in tuple(kwargs):
if k in argnames:
kws[k] = kwargs.pop(k)
# 设置默认的关键词参数
for k in kwonlyargs:
kws[k] = kwargs.pop(k, kwonlydefaults[k])
# 设置未定义的关键词参数
kws.update({
'args': args[narg:],
'kwargs': kwargs
})
req = result(**kws)
req.end_request = _worker
if callable(initializer):
initializer(req)
if auto_search:
subs = _search_request(args)
subs.extend(_search_request(kwargs))
req.__subrequest__ = tuple(subs)
return req
initializer = None
def wrapped_init(init_func):
nonlocal initializer
initializer = init_func
return init_func
wrapped.initializer = wrapped_init
if iscoroutinefunction(func):
async def inner_worker(*args, **kwargs):
return await func(*args, **kwargs)
else:
def inner_worker(*args, **kwargs):
return func(*args, **kwargs)
def __init__(self, **kwargs):
self.args = ()
self.kwargs = {}
_ = {self.__setattr__(k, v) for k, v in kwargs.items()}
def __repr__(self):
return f'<{__name__}>'
def subrequest(self):
return self.__subrequest__
if sketch_data:
def sketch(self):
sk = Request.sketch(self)
for k in sketch_data:
sk[k] = self.get_data(k)
return sk
else:
sketch = Request.sketch
__name__ = f'{request_name.title()}Request'
__slots__ = tuple(list(argnames) + kwonlyargs + ['args', 'kwargs'])
class_namespace = {
'name': request_name,
'subrequest': subrequest,
'sketch': sketch,
'WEIGHT': weight,
'__slots__': __slots__,
'__init__': __init__,
'__repr__': __repr__,
'__doc__': func.__doc__,
'__subrequest__': (),
'__simple__': wrapped,
}
if bases_cls is None:
bases_cls = []
if root:
bases = (RootRequest,)
else:
bases = (Request,)
if bases[0] not in bases_cls:
bases_cls = bases + tuple(bases_cls)
result = type(__name__, bases_cls, class_namespace)
return wrapped
return wrapper
def get_requester(name):
""" 返回指定名称的请求器。
Args:
name: 请求器名称
"""
for req_cls in Request.__subclasses__():
if name == req_cls.name:
if req_cls.__simple__:
return req_cls.__simple__
else:
return req_cls
return None
def _is_related_types(obj):
return isinstance(obj, (Request, Option, Optional))
def _search_request(arg):
def _list_tuple_set(o):
for v in o:
if _is_related_types(v):
rs.append(v)
else:
_do(v)
def _dict(o):
for k, v in o.items():
if _is_related_types(k):
rs.append(k)
else:
_do(k)
if _is_related_types(v):
rs.append(v)
else:
_do(v)
def _do(o):
typ = type(o)
if typ in (list, tuple, set):
_list_tuple_set(o)
elif typ is dict:
_dict(o)
elif _is_related_types(o):
rs.append(o)
rs = []
_do(arg)
return rs
class RequestProgress:
EXPORT_ATTR = frozenset({
'percent', 'speed', 'timeleft', 'status'
})
EXPORT_METH = frozenset({
'upload', 'upload_default', 'start', 'close', 'task_done', 'get_data',
'error', 'success', 'info', 'warning', 'report', 'sketch', 'details', 'add_stopper'
})
__slots__ = ('data', 'logs', '_status', '_percent', '_speed', '_timeleft',
'__worker__', '_stoppers', '_stoppers', '_closed', '_lock', '_started')
def __init__(self):
self.data = {}
self.logs = []
self._status = REQ_READY
self._percent = 0
self._speed = 0
self._timeleft = float('inf')
self.__worker__ = None
self._stoppers = Queue()
self._lock = threading.Lock()
self._closed = False
self._started = False
@property
def status(self):
status = self._status
return status() if callable(status) else status
@status.setter
def status(self, value):
self._status = value
@property
def percent(self):
percent = self._percent
return percent() if callable(percent) else percent
@percent.setter
def percent(self, value):
self._percent = value
@property
def speed(self):
speed = self._speed
return speed() if callable(speed) else speed
@speed.setter
def speed(self, value):
self._speed = value
@property
def timeleft(self):
timeleft = self._timeleft
return timeleft() if callable(timeleft) else timeleft
@timeleft.setter
def timeleft(self, value):
self._timeleft = value
def sketch(self):
return {
'percent': self.percent,
'status': self.status,
'speed': self.speed,
'timeleft': self.timeleft,
'latest': (self.logs and self.logs[-1]) or ''
}
def details(self, log=False):
data = {k: v() if callable(v) else v for k, v in self.data.items()}
info = self.sketch()
info.update({
'data': data,
})
if log:
info['logs'] = self.logs
return info
def get_data(self, key, default=None):
return self.data.get(key, default)
def upload(self, **kwargs):
""" 上传数据。
:param
**kwargs: 描述信息
"""
for k, v in kwargs.items():
self.data[k] = v
def upload_default(self, key, default):
if key not in self.data:
self.data[key] = default
def enqueue(self, message=''):
self._status = REQ_QUEUING
self.percent = 0
self.report('ENQUEUE:' + message)
def start(self, worker=None):
with self._lock:
self._started = True
self._status = REQ_RUNNING
self.percent = 0
self.timeleft = float('inf')
self.report('START:')
self.__worker__ = worker
def stop(self):
self._status = REQ_STOPPED
with self._lock:
if self._started:
if self._closed:
return False
while True:
stopper = self._stoppers.get()
if stopper is None:
break
try:
stopper()
except:
pass
def close(self, *args, **kwargs):
self._stoppers.put(None)
def add_stopper(self, func):
self._stoppers.put(func)
def task_done(self, message=''):
if self.status == REQ_RUNNING:
self._status = REQ_DONE
self.percent = 100
self.timeleft = 0
self.report('TASK DONE:' + message)
def error(self, message):
self._status = REQ_ERROR
self.report('ERROR: ' + message)
def success(self, message):
self.report('SUCCESS: ' + message)
def info(self, message):
self.report('INFO: ' + message)
def warning(self, message):
self.report('WARNING: ' + message)
def report(self, message):
message = current_time() + ' ' + message
self.logs.append(message)
class Optional:
""" 可选请求列表 """
__slots__ = '_options', '_selected'
def __init__(self, options):
"""
:param
list: 可选择的项列表
sort_key: 项目排序的key
"""
self._options = options
self._selected = None
def __iter__(self):
return iter(self._options)
@property
def selected(self):
""" 返回被选择的项。"""
if self._selected is None:
raise ValueError('未选择的列表。')
return self._selected
def select(self, rule):
""" 根据rule来选择最恰当的选项。
:param
rule: 选择规则
- high: 最高质量 100
- middle: 中等质量 50
- low: 最低质量 1
- %d: 1-100 [1,100] - (注意: 倾向于高质量。)
"""
if rule == 'high':
rule = 100
elif rule == 'low':
rule = 1
elif rule == 'middle':
rule = 50
if isinstance(rule, int) and 1 <= rule <= 100:
selected = self._options[max(0, int((100-rule) * len(self._options) / 100) - 1)]
else:
selected = self._options[0]
self._selected = selected
return selected
def __getattr__(self, item):
return getattr(self._selected, item)
def __repr__(self):
return repr(self._selected)
class Option:
""" 可选列表的选项 """
__slots__ = '_content', 'descriptions'
def __init__(self, content, descriptions=None):
self._content = content
if descriptions is None:
descriptions = {}
self.descriptions = descriptions
def __repr__(self):
return str(self._content)
def __getattr__(self, item):
return getattr(self._content, item)
@property
def content(self):
return self._content
class Response:
def __init__(self, request, **desc):
self.__name = request.name
desc.update(request.progress.data)
self.__datadict = desc
def __getattr__(self, item):
return self.__datadict[item]
def __repr__(self):
return '<%s %s>' % (self.__name, str(self.__dict__))
REQ_READY = 0
REQ_QUEUING = 1
REQ_RUNNING = 2
REQ_STOPPED = 3
REQ_WARNING = 4
REQ_ERROR = -1
REQ_DONE = 5
RE_VALID_PATHNAME = re.compile(r'[\\/:*?"<>|\r\n]+')
class RootRequest(Request):
name = 'root'
discard_next = False
def end_request(self):
raise NotImplementedError
def _all_status(iteration):
status = REQ_DONE
for i in iteration:
_b = i.status()
if _b == REQ_ERROR:
status = REQ_ERROR
break
elif _b == REQ_STOPPED:
status = REQ_STOPPED
break
elif _b == REQ_RUNNING:
status = REQ_RUNNING
break
elif _b != REQ_DONE:
status = REQ_QUEUING
break
return status
def requester_info():
return_dict = {}
for v in Request.__subclasses__():
return_dict[v.name] = {
'weight': v.WEIGHT
}
return return_dict
| [
"[email protected]"
] | |
7ec4112133d33b3aff667aac27a9a4b8451f92f9 | fbb53a3366a0f10a7eb8070620cacec5101459fb | /company/m-solutions2019/c.py | 16ee9cebc54f31595a786fb0932d2b433b17b306 | [] | no_license | penicillin0/atcoder | 272bf0b9f211907c9f7f2491335f0d34f2dcd43b | 827d5cdc03531d48a44e021bd702f80b305f64d6 | refs/heads/master | 2023-08-05T09:43:50.114694 | 2021-09-20T09:21:07 | 2021-09-20T09:21:07 | 256,395,305 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,161 | py | N = int(input())
par = [-1] * N # 親だった場合は-(その集合のサイズ)
if N == 1:
print(0)
# xがどのグループに属しているか調べる
def find(x):
if par[x] < 0:
return x
else:
par[x] = find(par[x])
return find(par[x])
# 自分のいるグループの数
def size(x):
return -par[find(x)]
# xとyの属する集合を併合
def unite(x, y):
# 根を探す
x, y = find(x), find(y)
# 根が一緒
if x == y:
return
# 大きい方に小さい方をくっつける
if size(x) < size(y):
x, y = y, x
# xのサイズを更新
par[x] += par[y]
# yの親をxにする
par[y] = x
# 同じ集合に属するか判定
def same(x, y):
return find(x) == find(y)
AB = [list(map(int, input().split())) for _ in range(N - 1)]
C = list(map(int, input().split()))
for ab in AB:
a, b = ab
a, b = a - 1, b - 1
if same(a, b):
continue
else:
unite(a, b)
n = find(0)
# print(n)
ret = sum(C) - max(C)
print(ret)
m = C.index(max(C))
if n != m:
C[n], C[m] = C[m], C[n]
C = list(map(str, C))
print(' '.join(C))
| [
"[email protected]"
] | |
3623f7dea2f82a675fd99637d86022f3c7006302 | 41986b7a1b95784f0a6256ae24d5942c70ced4d7 | /prod/google-cloud-sdk/lib/googlecloudsdk/third_party/apis/gkehub/v1/gkehub_v1_messages.py | a3f4adacead73fa41eb1423f39268a2fe43b0d51 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | wakabayashi-seiya/terraform_gcp | ed829a5a21d5d19d6663804ee5d5f7f3d23b4ec4 | f757e56779f33c2fabd8a8eed9c51ff0b897a38f | refs/heads/master | 2021-07-07T21:51:35.993317 | 2020-03-11T05:42:57 | 2020-03-11T05:42:57 | 239,411,772 | 0 | 1 | null | 2021-04-30T21:05:04 | 2020-02-10T02:32:04 | Python | UTF-8 | Python | false | false | 44,251 | py | """Generated message classes for gkehub version v1.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'gkehub'
class AuditConfig(_messages.Message):
r"""Specifies the audit configuration for a service. The configuration
determines which permission types are logged, and what identities, if any,
are exempted from logging. An AuditConfig must have one or more
AuditLogConfigs. If there are AuditConfigs for both `allServices` and a
specific service, the union of the two AuditConfigs is used for that
service: the log_types specified in each AuditConfig are enabled, and the
exempted_members in each AuditLogConfig are exempted. Example Policy with
multiple AuditConfigs: { "audit_configs": [ {
"service": "allServices" "audit_log_configs": [ {
"log_type": "DATA_READ", "exempted_members": [
"user:[email protected]" ] }, {
"log_type": "DATA_WRITE", }, {
"log_type": "ADMIN_READ", } ] }, {
"service": "sampleservice.googleapis.com" "audit_log_configs": [
{ "log_type": "DATA_READ", }, {
"log_type": "DATA_WRITE", "exempted_members": [
"user:[email protected]" ] } ] }
] } For sampleservice, this policy enables DATA_READ, DATA_WRITE and
ADMIN_READ logging. It also exempts [email protected] from DATA_READ logging,
and [email protected] from DATA_WRITE logging.
Fields:
auditLogConfigs: The configuration for logging of each type of permission.
service: Specifies a service that will be enabled for audit logging. For
example, `storage.googleapis.com`, `cloudsql.googleapis.com`.
`allServices` is a special value that covers all services.
"""
auditLogConfigs = _messages.MessageField('AuditLogConfig', 1, repeated=True)
service = _messages.StringField(2)
class AuditLogConfig(_messages.Message):
r"""Provides the configuration for logging a type of permissions. Example:
{ "audit_log_configs": [ { "log_type": "DATA_READ",
"exempted_members": [ "user:[email protected]" ]
}, { "log_type": "DATA_WRITE", } ] }
This enables 'DATA_READ' and 'DATA_WRITE' logging, while exempting
[email protected] from DATA_READ logging.
Enums:
LogTypeValueValuesEnum: The log type that this config enables.
Fields:
exemptedMembers: Specifies the identities that do not cause logging for
this type of permission. Follows the same format of Binding.members.
logType: The log type that this config enables.
"""
class LogTypeValueValuesEnum(_messages.Enum):
r"""The log type that this config enables.
Values:
LOG_TYPE_UNSPECIFIED: Default case. Should never be this.
ADMIN_READ: Admin reads. Example: CloudIAM getIamPolicy
DATA_WRITE: Data writes. Example: CloudSQL Users create
DATA_READ: Data reads. Example: CloudSQL Users list
"""
LOG_TYPE_UNSPECIFIED = 0
ADMIN_READ = 1
DATA_WRITE = 2
DATA_READ = 3
exemptedMembers = _messages.StringField(1, repeated=True)
logType = _messages.EnumField('LogTypeValueValuesEnum', 2)
class Binding(_messages.Message):
r"""Associates `members` with a `role`.
Fields:
condition: The condition that is associated with this binding. NOTE: An
unsatisfied condition will not allow user access via current binding.
Different bindings, including their conditions, are examined
independently.
members: Specifies the identities requesting access for a Cloud Platform
resource. `members` can have the following values: * `allUsers`: A
special identifier that represents anyone who is on the internet;
with or without a Google account. * `allAuthenticatedUsers`: A special
identifier that represents anyone who is authenticated with a Google
account or a service account. * `user:{emailid}`: An email address that
represents a specific Google account. For example,
`[email protected]` . * `serviceAccount:{emailid}`: An email address
that represents a service account. For example, `my-other-
[email protected]`. * `group:{emailid}`: An email address
that represents a Google group. For example, `[email protected]`. *
`deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique
identifier) representing a user that has been recently deleted. For
example, `[email protected]?uid=123456789012345678901`. If the user is
recovered, this value reverts to `user:{emailid}` and the recovered user
retains the role in the binding. *
`deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address
(plus unique identifier) representing a service account that has been
recently deleted. For example, `my-other-
[email protected]?uid=123456789012345678901`. If the
service account is undeleted, this value reverts to
`serviceAccount:{emailid}` and the undeleted service account retains the
role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An
email address (plus unique identifier) representing a Google group
that has been recently deleted. For example,
`[email protected]?uid=123456789012345678901`. If the group is
recovered, this value reverts to `group:{emailid}` and the recovered
group retains the role in the binding. * `domain:{domain}`: The G
Suite domain (primary) that represents all the users of that domain.
For example, `google.com` or `example.com`.
role: Role that is assigned to `members`. For example, `roles/viewer`,
`roles/editor`, or `roles/owner`.
"""
condition = _messages.MessageField('Expr', 1)
members = _messages.StringField(2, repeated=True)
role = _messages.StringField(3)
class CancelOperationRequest(_messages.Message):
r"""The request message for Operations.CancelOperation."""
class ConnectAgentResource(_messages.Message):
r"""ConnectAgentResource represents a Kubernetes resource manifest for
connect agnet deployment.
Fields:
manifest: YAML manifest of the resource.
type: Kubernetes type of the resource.
"""
manifest = _messages.StringField(1)
type = _messages.MessageField('TypeMeta', 2)
class Empty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo {
rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
JSON representation for `Empty` is empty JSON object `{}`.
"""
class Expr(_messages.Message):
r"""Represents a textual expression in the Common Expression Language (CEL)
syntax. CEL is a C-like expression language. The syntax and semantics of CEL
are documented at https://github.com/google/cel-spec. Example (Comparison):
title: "Summary size limit" description: "Determines if a summary is
less than 100 chars" expression: "document.summary.size() < 100"
Example (Equality): title: "Requestor is owner" description:
"Determines if requestor is the document owner" expression:
"document.owner == request.auth.claims.email" Example (Logic): title:
"Public documents" description: "Determine whether the document should
be publicly visible" expression: "document.type != 'private' &&
document.type != 'internal'" Example (Data Manipulation): title:
"Notification string" description: "Create a notification string with a
timestamp." expression: "'New message received at ' +
string(document.create_time)" The exact variables and functions that may be
referenced within an expression are determined by the service that evaluates
it. See the service documentation for additional information.
Fields:
description: Optional. Description of the expression. This is a longer
text which describes the expression, e.g. when hovered over it in a UI.
expression: Textual representation of an expression in Common Expression
Language syntax.
location: Optional. String indicating the location of the expression for
error reporting, e.g. a file name and a position in the file.
title: Optional. Title for the expression, i.e. a short string describing
its purpose. This can be used e.g. in UIs which allow to enter the
expression.
"""
description = _messages.StringField(1)
expression = _messages.StringField(2)
location = _messages.StringField(3)
title = _messages.StringField(4)
class GenerateConnectManifestResponse(_messages.Message):
r"""Response message for `GkeHubService.GenerateConnectManifest` method.
Fields:
manifest: The ordered list of Kubernetes resources that need to be applied
to the cluster for GKE Connect agent installation/upgrade.
"""
manifest = _messages.MessageField('ConnectAgentResource', 1, repeated=True)
class GkeCluster(_messages.Message):
r"""GkeCluster represents a k8s cluster on GKE.
Fields:
resourceLink: Self-link of the GCP resource for the GKE cluster. For
example: //container.googleapis.com/v1/projects/my-project/zones/us-
west1-a/clusters/my-cluster It can be at the most 1000 characters in
length.
"""
resourceLink = _messages.StringField(1)
class GkehubProjectsLocationsGetRequest(_messages.Message):
r"""A GkehubProjectsLocationsGetRequest object.
Fields:
name: Resource name for the location.
"""
name = _messages.StringField(1, required=True)
class GkehubProjectsLocationsListRequest(_messages.Message):
r"""A GkehubProjectsLocationsListRequest object.
Fields:
filter: The standard list filter.
name: The resource that owns the locations collection, if applicable.
pageSize: The standard list page size.
pageToken: The standard list page token.
"""
filter = _messages.StringField(1)
name = _messages.StringField(2, required=True)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
class GkehubProjectsLocationsMembershipsCreateRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsCreateRequest object.
Fields:
membership: A Membership resource to be passed as the request body.
membershipId: Required. Client chosen ID for the membership. The ID must
be a valid RFC 1123 compliant DNS label. In particular, the ID must be:
1. At most 63 characters in length 2. It must consist of lower case
alphanumeric characters or `-` 3. It must start and end with an
alphanumeric character I.e. ID must match the regex:
`[a-z0-9]([-a-z0-9]*[a-z0-9])?` with at most 63 characters.
parent: Required. The parent in whose context the membership is created.
The parent value is in the format:
`projects/[project_id]/locations/global`.
"""
membership = _messages.MessageField('Membership', 1)
membershipId = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class GkehubProjectsLocationsMembershipsDeleteRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsDeleteRequest object.
Fields:
name: Required. The membership resource name in the format:
`projects/[project_id]/locations/global/memberships/[membership_id]`
"""
name = _messages.StringField(1, required=True)
class GkehubProjectsLocationsMembershipsGenerateConnectManifestRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsGenerateConnectManifestRequest
object.
Fields:
imagePullSecretContent: Optional. The image pull secret content for the
registry, if not public.
isUpgrade: Optional. If true, generate the resources for upgrade only.
Some resources (e.g. secrets) generated for installation will be
excluded.
name: Required. The membership resource the connect agent is associated
with.
`projects/[project_id]/locations/global/memberships/[membership_id]`.
namespace: Optional. Namespace for GKE Connect agent resources. If empty,
uses 'gke-connect'.
proxy: Optional. URI of a proxy if connectivity from the agent to
gkeconnect.googleapis.com requires the use of a proxy. Format must be in
the form http(s)://{proxy_address}, depending on the HTTP/HTTPS protocol
supported by the proxy. This will direct the connect agent's outbound
traffic through a HTTP(S) proxy.
registry: Optional. The registry to fetch connect agent image; default to
gcr.io/gkeconnect.
version: Optional. The version to use for connect agent. If empty, the
current default version will be used.
"""
imagePullSecretContent = _messages.BytesField(1)
isUpgrade = _messages.BooleanField(2)
name = _messages.StringField(3, required=True)
namespace = _messages.StringField(4)
proxy = _messages.BytesField(5)
registry = _messages.StringField(6)
version = _messages.StringField(7)
class GkehubProjectsLocationsMembershipsGetIamPolicyRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsGetIamPolicyRequest object.
Fields:
options_requestedPolicyVersion: Optional. The policy format version to be
returned. Valid values are 0, 1, and 3. Requests specifying an invalid
value will be rejected. Requests for policies with any conditional
bindings must specify version 3. Policies without any conditional
bindings may specify any valid value or leave the field unset.
resource: REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this
field.
"""
options_requestedPolicyVersion = _messages.IntegerField(1, variant=_messages.Variant.INT32)
resource = _messages.StringField(2, required=True)
class GkehubProjectsLocationsMembershipsGetRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsGetRequest object.
Fields:
name: Required. The Membership resource name in the format:
`projects/[project_id]/locations/global/memberships/[membership_id]`
"""
name = _messages.StringField(1, required=True)
class GkehubProjectsLocationsMembershipsListRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsListRequest object.
Fields:
filter: Optional. Lists the Memberships that match the filter expression.
A filter expression filters the resources listed in the response. The
expression must be of the form `{field} {operator} {value}` where
operators: `<`, `>`, `<=`,`>=`, `!=`, `=`, `:` are supported (colon `:`
represents a HAS operator which is roughly synonymous with equality).
`{field}` can refer to a proto or JSON field, or a synthetic field.
Field names can be camelCase or snake_case. Examples: - Filter by name:
name = "projects/foo-proj/locations/global/membership/bar - Filter by
labels: - Resources that have a key called `foo` labels.foo:* -
Resources that have a key called `foo` whose value is `bar`
labels.foo = bar - Filter by state: - Members in CREATING state.
state = CREATING
orderBy: Optional. Field to use to sort the list.
pageSize: Optional. When requesting a 'page' of resources, `page_size`
specifies number of resources to return. If unspecified or set to 0, all
resources will be returned.
pageToken: Optional. Token returned by previous call to `ListMemberships`
which specifies the position in the list from where to continue listing
the resources.
parent: Required. The parent in whose context the memberships are listed.
The parent value is in the format:
`projects/[project_id]/locations/global`.
"""
filter = _messages.StringField(1)
orderBy = _messages.StringField(2)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
parent = _messages.StringField(5, required=True)
class GkehubProjectsLocationsMembershipsPatchRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsPatchRequest object.
Fields:
membership: A Membership resource to be passed as the request body.
name: Required. The membership resource name in the format:
`projects/[project_id]/locations/global/memberships/[membership_id]`
updateMask: Required. Mask of fields to update. At least one field path
must be specified in this mask.
"""
membership = _messages.MessageField('Membership', 1)
name = _messages.StringField(2, required=True)
updateMask = _messages.StringField(3)
class GkehubProjectsLocationsMembershipsSetIamPolicyRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsSetIamPolicyRequest object.
Fields:
resource: REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this
field.
setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
request body.
"""
resource = _messages.StringField(1, required=True)
setIamPolicyRequest = _messages.MessageField('SetIamPolicyRequest', 2)
class GkehubProjectsLocationsMembershipsTestIamPermissionsRequest(_messages.Message):
r"""A GkehubProjectsLocationsMembershipsTestIamPermissionsRequest object.
Fields:
resource: REQUIRED: The resource for which the policy detail is being
requested. See the operation documentation for the appropriate value for
this field.
testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
passed as the request body.
"""
resource = _messages.StringField(1, required=True)
testIamPermissionsRequest = _messages.MessageField('TestIamPermissionsRequest', 2)
class GkehubProjectsLocationsOperationsCancelRequest(_messages.Message):
r"""A GkehubProjectsLocationsOperationsCancelRequest object.
Fields:
cancelOperationRequest: A CancelOperationRequest resource to be passed as
the request body.
name: The name of the operation resource to be cancelled.
"""
cancelOperationRequest = _messages.MessageField('CancelOperationRequest', 1)
name = _messages.StringField(2, required=True)
class GkehubProjectsLocationsOperationsDeleteRequest(_messages.Message):
r"""A GkehubProjectsLocationsOperationsDeleteRequest object.
Fields:
name: The name of the operation resource to be deleted.
"""
name = _messages.StringField(1, required=True)
class GkehubProjectsLocationsOperationsGetRequest(_messages.Message):
r"""A GkehubProjectsLocationsOperationsGetRequest object.
Fields:
name: The name of the operation resource.
"""
name = _messages.StringField(1, required=True)
class GkehubProjectsLocationsOperationsListRequest(_messages.Message):
r"""A GkehubProjectsLocationsOperationsListRequest object.
Fields:
filter: The standard list filter.
name: The name of the operation's parent resource.
pageSize: The standard list page size.
pageToken: The standard list page token.
"""
filter = _messages.StringField(1)
name = _messages.StringField(2, required=True)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
class GoogleRpcStatus(_messages.Message):
r"""The `Status` type defines a logical error model that is suitable for
different programming environments, including REST APIs and RPC APIs. It is
used by [gRPC](https://github.com/grpc). Each `Status` message contains
three pieces of data: error code, error message, and error details. You can
find out more about this error model and how to work with it in the [API
Design Guide](https://cloud.google.com/apis/design/errors).
Messages:
DetailsValueListEntry: A DetailsValueListEntry object.
Fields:
code: The status code, which should be an enum value of google.rpc.Code.
details: A list of messages that carry the error details. There is a
common set of message types for APIs to use.
message: A developer-facing error message, which should be in English. Any
user-facing error message should be localized and sent in the
google.rpc.Status.details field, or localized by the client.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class DetailsValueListEntry(_messages.Message):
r"""A DetailsValueListEntry object.
Messages:
AdditionalProperty: An additional property for a DetailsValueListEntry
object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a DetailsValueListEntry object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
code = _messages.IntegerField(1, variant=_messages.Variant.INT32)
details = _messages.MessageField('DetailsValueListEntry', 2, repeated=True)
message = _messages.StringField(3)
class ListLocationsResponse(_messages.Message):
r"""The response message for Locations.ListLocations.
Fields:
locations: A list of locations that matches the specified filter in the
request.
nextPageToken: The standard List next-page token.
"""
locations = _messages.MessageField('Location', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListMembershipsResponse(_messages.Message):
r"""Response message for the `GkeHub.ListMemberships` method.
Fields:
nextPageToken: A token to request the next page of resources from the
`ListMemberships` method. The value of an empty string means that there
are no more resources to return.
resources: The list of Memberships contained within the parent.
unreachable: List of locations that could not be reached while fetching
this list.
"""
nextPageToken = _messages.StringField(1)
resources = _messages.MessageField('Membership', 2, repeated=True)
unreachable = _messages.StringField(3, repeated=True)
class ListOperationsResponse(_messages.Message):
r"""The response message for Operations.ListOperations.
Fields:
nextPageToken: The standard List next-page token.
operations: A list of operations that matches the specified filter in the
request.
"""
nextPageToken = _messages.StringField(1)
operations = _messages.MessageField('Operation', 2, repeated=True)
class Location(_messages.Message):
r"""A resource that represents Google Cloud Platform location.
Messages:
LabelsValue: Cross-service attributes for the location. For example
{"cloud.googleapis.com/region": "us-east1"}
MetadataValue: Service-specific metadata. For example the available
capacity at the given location.
Fields:
displayName: The friendly name for this location, typically a nearby city
name. For example, "Tokyo".
labels: Cross-service attributes for the location. For example
{"cloud.googleapis.com/region": "us-east1"}
locationId: The canonical id for this location. For example: `"us-east1"`.
metadata: Service-specific metadata. For example the available capacity at
the given location.
name: Resource name for the location, which may vary between
implementations. For example: `"projects/example-project/locations/us-
east1"`
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""Cross-service attributes for the location. For example
{"cloud.googleapis.com/region": "us-east1"}
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class MetadataValue(_messages.Message):
r"""Service-specific metadata. For example the available capacity at the
given location.
Messages:
AdditionalProperty: An additional property for a MetadataValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MetadataValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
displayName = _messages.StringField(1)
labels = _messages.MessageField('LabelsValue', 2)
locationId = _messages.StringField(3)
metadata = _messages.MessageField('MetadataValue', 4)
name = _messages.StringField(5)
class Membership(_messages.Message):
r"""Membership contains information about a member cluster.
Messages:
LabelsValue: Optional. GCP labels for this membership.
Fields:
createTime: Output only. Timestamp for when the Membership was created.
deleteTime: Output only. Timestamp for when the Membership was deleted.
description: Output only. Description of this membership, limited to 63
characters. It will match the regex: `a-zA-Z0-9*` This field is present
for legacy purposes.
endpoint: Optional. Endpoint information to reach this member.
externalId: Optional. An externally-generated and managed ID for this
Membership. This ID may still be modified after creation but it is not
recommended to do so. The ID must match the regex: `a-zA-Z0-9*`
labels: Optional. GCP labels for this membership.
lastConnectionTime: Output only. For clusters using Connect, the timestamp
of the most recent connection established with Google Cloud. This time
is updated every several minutes, not continuously. For clusters that do
not use GKE Connect, or that have never connected successfully, this
field will be unset.
name: Output only. The unique name of this domain resource in the format:
`projects/[project_id]/locations/global/memberships/[membership_id]`.
`membership_id` can only be set at creation time using the
`membership_id` field in the creation request. `membership_id` must be a
valid RFC 1123 compliant DNS label. In particular, it must be: 1. At
most 63 characters in length 2. It must consist of lower case
alphanumeric characters or `-` 3. It must start and end with an
alphanumeric character I.e. `membership_id` must match the regex:
`[a-z0-9]([-a-z0-9]*[a-z0-9])?` with at most 63 characters.
state: Output only. State of the Membership resource.
updateTime: Output only. Timestamp for when the Membership was last
updated.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""Optional. GCP labels for this membership.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
createTime = _messages.StringField(1)
deleteTime = _messages.StringField(2)
description = _messages.StringField(3)
endpoint = _messages.MessageField('MembershipEndpoint', 4)
externalId = _messages.StringField(5)
labels = _messages.MessageField('LabelsValue', 6)
lastConnectionTime = _messages.StringField(7)
name = _messages.StringField(8)
state = _messages.MessageField('MembershipState', 9)
updateTime = _messages.StringField(10)
class MembershipEndpoint(_messages.Message):
r"""MembershipEndpoint contains the information to reach a member.
Fields:
gkeCluster: If this Membership is a Kubernetes API server hosted on GKE,
this is a self link to its GCP resource.
"""
gkeCluster = _messages.MessageField('GkeCluster', 1)
class MembershipState(_messages.Message):
r"""State of the Membership resource.
Enums:
CodeValueValuesEnum: Code indicating the state of the Membership resource.
Fields:
code: Code indicating the state of the Membership resource.
description: Human readable description of the issue.
updateTime: The last update time of this state by the controllers
"""
class CodeValueValuesEnum(_messages.Enum):
r"""Code indicating the state of the Membership resource.
Values:
CODE_UNSPECIFIED: Not set.
CREATING: CREATING indicates the cluster is being registered.
READY: READY indicates the cluster is registered.
DELETING: DELETING indicates that the cluster is being unregistered.
UPDATING: UPDATING indicates that the cluster registration is being
updated.
"""
CODE_UNSPECIFIED = 0
CREATING = 1
READY = 2
DELETING = 3
UPDATING = 4
code = _messages.EnumField('CodeValueValuesEnum', 1)
description = _messages.StringField(2)
updateTime = _messages.StringField(3)
class Operation(_messages.Message):
r"""This resource represents a long-running operation that is the result of
a network API call.
Messages:
MetadataValue: Service-specific metadata associated with the operation.
It typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
ResponseValue: The normal response of the operation in case of success.
If the original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Fields:
done: If the value is `false`, it means the operation is still in
progress. If `true`, the operation is completed, and either `error` or
`response` is available.
error: The error result of the operation in case of failure or
cancellation.
metadata: Service-specific metadata associated with the operation. It
typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
name: The server-assigned name, which is only unique within the same
service that originally returns it. If you use the default HTTP mapping,
the `name` should be a resource name ending with
`operations/{unique_id}`.
response: The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class MetadataValue(_messages.Message):
r"""Service-specific metadata associated with the operation. It typically
contains progress information and common metadata such as create time.
Some services might not provide such metadata. Any method that returns a
long-running operation should document the metadata type, if any.
Messages:
AdditionalProperty: An additional property for a MetadataValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a MetadataValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class ResponseValue(_messages.Message):
r"""The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the response
is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Messages:
AdditionalProperty: An additional property for a ResponseValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a ResponseValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
done = _messages.BooleanField(1)
error = _messages.MessageField('GoogleRpcStatus', 2)
metadata = _messages.MessageField('MetadataValue', 3)
name = _messages.StringField(4)
response = _messages.MessageField('ResponseValue', 5)
class Policy(_messages.Message):
r"""An Identity and Access Management (IAM) policy, which specifies access
controls for Google Cloud resources. A `Policy` is a collection of
`bindings`. A `binding` binds one or more `members` to a single `role`.
Members can be user accounts, service accounts, Google groups, and domains
(such as G Suite). A `role` is a named list of permissions; each `role` can
be an IAM predefined role or a user-created custom role. Optionally, a
`binding` can specify a `condition`, which is a logical expression that
allows access to a resource only if the expression evaluates to `true`. A
condition can add constraints based on attributes of the request, the
resource, or both. **JSON example:** { "bindings": [ {
"role": "roles/resourcemanager.organizationAdmin", "members": [
"user:[email protected]", "group:[email protected]",
"domain:google.com", "serviceAccount:my-project-
[email protected]" ] }, {
"role": "roles/resourcemanager.organizationViewer", "members":
["user:[email protected]"], "condition": { "title":
"expirable access", "description": "Does not grant access after
Sep 2020", "expression": "request.time <
timestamp('2020-10-01T00:00:00.000Z')", } } ],
"etag": "BwWWja0YfJA=", "version": 3 } **YAML example:**
bindings: - members: - user:[email protected] -
group:[email protected] - domain:google.com - serviceAccount
:[email protected] role:
roles/resourcemanager.organizationAdmin - members: -
user:[email protected] role: roles/resourcemanager.organizationViewer
condition: title: expirable access description: Does not
grant access after Sep 2020 expression: request.time <
timestamp('2020-10-01T00:00:00.000Z') - etag: BwWWja0YfJA= -
version: 3 For a description of IAM and its features, see the [IAM
documentation](https://cloud.google.com/iam/docs/).
Fields:
auditConfigs: Specifies cloud audit logging configuration for this policy.
bindings: Associates a list of `members` to a `role`. Optionally, may
specify a `condition` that determines how and when the `bindings` are
applied. Each of the `bindings` must contain at least one member.
etag: `etag` is used for optimistic concurrency control as a way to help
prevent simultaneous updates of a policy from overwriting each other. It
is strongly suggested that systems make use of the `etag` in the read-
modify-write cycle to perform policy updates in order to avoid race
conditions: An `etag` is returned in the response to `getIamPolicy`, and
systems are expected to put that etag in the request to `setIamPolicy`
to ensure that their change will be applied to the same version of the
policy. **Important:** If you use IAM Conditions, you must include the
`etag` field whenever you call `setIamPolicy`. If you omit this field,
then IAM allows you to overwrite a version `3` policy with a version `1`
policy, and all of the conditions in the version `3` policy are lost.
version: Specifies the format of the policy. Valid values are `0`, `1`,
and `3`. Requests that specify an invalid value are rejected. Any
operation that affects conditional role bindings must specify version
`3`. This requirement applies to the following operations: * Getting a
policy that includes a conditional role binding * Adding a conditional
role binding to a policy * Changing a conditional role binding in a
policy * Removing any role binding, with or without a condition, from a
policy that includes conditions **Important:** If you use IAM
Conditions, you must include the `etag` field whenever you call
`setIamPolicy`. If you omit this field, then IAM allows you to overwrite
a version `3` policy with a version `1` policy, and all of the
conditions in the version `3` policy are lost. If a policy does not
include any conditions, operations on that policy may specify any valid
version or leave the field unset.
"""
auditConfigs = _messages.MessageField('AuditConfig', 1, repeated=True)
bindings = _messages.MessageField('Binding', 2, repeated=True)
etag = _messages.BytesField(3)
version = _messages.IntegerField(4, variant=_messages.Variant.INT32)
class SetIamPolicyRequest(_messages.Message):
r"""Request message for `SetIamPolicy` method.
Fields:
policy: REQUIRED: The complete policy to be applied to the `resource`. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
updateMask: OPTIONAL: A FieldMask specifying which fields of the policy to
modify. Only the fields in the mask will be modified. If no mask is
provided, the following default mask is used: paths: "bindings, etag"
This field is only used by Cloud IAM.
"""
policy = _messages.MessageField('Policy', 1)
updateMask = _messages.StringField(2)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
r"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default=u'json')
callback = _messages.StringField(4)
fields = _messages.StringField(5)
key = _messages.StringField(6)
oauth_token = _messages.StringField(7)
prettyPrint = _messages.BooleanField(8, default=True)
quotaUser = _messages.StringField(9)
trace = _messages.StringField(10)
uploadType = _messages.StringField(11)
upload_protocol = _messages.StringField(12)
class TestIamPermissionsRequest(_messages.Message):
r"""Request message for `TestIamPermissions` method.
Fields:
permissions: The set of permissions to check for the `resource`.
Permissions with wildcards (such as '*' or 'storage.*') are not allowed.
For more information see [IAM
Overview](https://cloud.google.com/iam/docs/overview#permissions).
"""
permissions = _messages.StringField(1, repeated=True)
class TestIamPermissionsResponse(_messages.Message):
r"""Response message for `TestIamPermissions` method.
Fields:
permissions: A subset of `TestPermissionsRequest.permissions` that the
caller is allowed.
"""
permissions = _messages.StringField(1, repeated=True)
class TypeMeta(_messages.Message):
r"""TypeMeta is the type information needed for content unmarshalling of the
Kubernetes resources in the manifest.
Fields:
apiVersion: APIVersion of the resource (e.g. v1).
kind: Kind of the resource (e.g. Deployment).
"""
apiVersion = _messages.StringField(1)
kind = _messages.StringField(2)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
| [
"[email protected]"
] | |
f151e8badd6b1cb50965d9bd65e92835c2ea1db8 | e5abf2028b9e0b39a5bf905f14c401d3645bdb9a | /display.py | 2bcbbfdf5758468c37a0db038d2334e6b808bfba | [
"MIT"
] | permissive | vieirafrancisco/car-adventure | 2d2723e44fcb216f2ea37c1b35a1ec5f6f6fba8a | 79a86d830699f131fd4e4aa2031969aa7eae1a50 | refs/heads/master | 2020-03-30T00:01:11.845899 | 2018-09-28T22:27:57 | 2018-09-28T22:27:57 | 150,501,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | import pygame
class DisplaySurface:
def __init__(self, width, height):
self.width = width
self.height = height
self._size = (self.width, self.height)
self._display_surface = pygame.display.set_mode(self._size, pygame.HWSURFACE | pygame.DOUBLEBUF)
def get_display_surface(self):
return self._display_surface
def get_size(self):
return self._size | [
"[email protected]"
] | |
6f310f436ac9574a69159a506b99a3faa814ef2b | f9b6c56cec99eb2147777c4448b4b8ad757ff074 | /longest_harmounious_subsequence.py | 1f2b1f59cd7bdf90c4e192bd21e008bf7b4f26d3 | [] | no_license | zhrmrz/longest_harmounious_subsequence | 268676d4c1d7f76cddb10fcaa42fb8718689f3c6 | 71ddac4edd4d3948d462aae430ba7154f4aa921f | refs/heads/master | 2020-08-29T03:39:23.468859 | 2019-10-27T20:34:52 | 2019-10-27T20:34:52 | 217,913,179 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | from collections import Counter
class Sol:
def longest_harmounious_subsequence(self,nums):
max_subarr=0
freq=Counter(nums)
for num,count in freq.items():
if num+1 in freq:
max_subarr=max(max_subarr,count+freq[num+1])
| [
"[email protected]"
] | |
611a2b09ca927db5d34e83c7de96170e37583a7a | fdc0b72a3782a06952df4d723783dfa1bae65753 | /admin_request_for_information/models/__init__.py | a840dc8f0a87fc036e57720fc0f08c1b21d27938 | [] | no_license | Denbho/vendor_portal | 0878ad82bf3c40d38f6e123f6b25a358bfebce4f | 341a7ca77cbd310f3835d4b43de5012354a307c5 | refs/heads/main | 2023-04-19T21:26:56.115346 | 2021-05-17T04:16:53 | 2021-05-17T04:16:53 | 364,744,567 | 2 | 0 | null | 2021-05-06T04:34:51 | 2021-05-06T00:52:09 | Python | UTF-8 | Python | false | false | 62 | py | # -*- coding: utf-8 -*-
from . import request_for_information
| [
"[email protected]"
] | |
81ca32d7661a077e47039a5f78868c9fc5d381a8 | 66fda6586a902f8043b1f5e9532699babc7b591a | /lib_openshift/models/v1_build_config_status.py | fd78c8cbca2d5966a2f4258b5b0d00f8861062a6 | [
"Apache-2.0"
] | permissive | chouseknecht/lib_openshift | 86eff74b4659f05dfbab1f07d2d7f42b21e2252d | 02b0e4348631e088e72a982a55c214b30a4ab9d9 | refs/heads/master | 2020-12-11T05:23:17.081794 | 2016-07-28T20:15:39 | 2016-07-28T20:15:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,610 | py | # coding: utf-8
"""
OpenAPI spec version:
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class V1BuildConfigStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
operations = [
]
def __init__(self, last_version=None):
"""
V1BuildConfigStatus - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'last_version': 'int'
}
self.attribute_map = {
'last_version': 'lastVersion'
}
self._last_version = last_version
@property
def last_version(self):
"""
Gets the last_version of this V1BuildConfigStatus.
LastVersion is used to inform about number of last triggered build.
:return: The last_version of this V1BuildConfigStatus.
:rtype: int
"""
return self._last_version
@last_version.setter
def last_version(self, last_version):
"""
Sets the last_version of this V1BuildConfigStatus.
LastVersion is used to inform about number of last triggered build.
:param last_version: The last_version of this V1BuildConfigStatus.
:type: int
"""
self._last_version = last_version
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| [
"[email protected]"
] | |
b13f38f3e8d8a5795b2d0d326e3fc93575f01d54 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02577/s751792218.py | eea7ba3706c9f7076b7627d00c4c3aa5626f695a | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | N = input()
N_list = []
for i in N:
N_list.append(int(i))
if sum(N_list) % 9 == 0:
print("Yes")
else: print("No") | [
"[email protected]"
] | |
d080a80308e02553e9baac9420d73834f92a2979 | c026581b6c3855c75e7c9f9c6397acadc7833fb7 | /idm_core/name/urls.py | 5778785362e85f4443b71c0f79b76a31eb6f7cbe | [] | no_license | mans0954/idm-core | 5734fd08a3c8c5deaec62167c9470336f0c6c6ef | 2a3cf326e0bb3db469e2b318b122033a7dd92b83 | refs/heads/master | 2021-07-24T04:13:47.021951 | 2017-11-02T22:09:25 | 2017-11-02T22:09:25 | 109,317,967 | 1 | 0 | null | 2017-11-02T20:56:01 | 2017-11-02T20:55:58 | null | UTF-8 | Python | false | false | 745 | py | from django.conf.urls import url
from . import views
uuid_re = '[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}'
urlpatterns = [
url(r'^name/$',
views.NameListView.as_view(), name='name-list-self'),
url(r'^(?P<identity_type>[a-z-]+)/(?P<identity_id>' + uuid_re + ')/name/$',
views.NameListView.as_view(), name='name-list'),
url(r'^name/(?P<pk>[1-9][0-9]*)/$', views.NameDetailView.as_view(), name='name-detail'),
url(r'^name/new:(?P<context>[\w-]+)/$',
views.NameCreateView.as_view(), name='name-create-self'),
url(r'^(?P<identity_type>[a-z-]+)/(?P<identity_id>' + uuid_re + ')/name/new:(?P<context>[\w-]+)/$',
views.NameCreateView.as_view(), name='name-create'),
]
| [
"[email protected]"
] | |
37d18cddc7cd04f237cb183c58d0244a8489f42e | a9c3c0c958ed33646a6acfe97780d4939e1e0308 | /tensorflow/contrib/distribute/python/estimator_training_test.py | bd643bdbb4f4793433f41577484ae6545ba7d1bf | [
"Apache-2.0"
] | permissive | therladbsgh/tensorflow | 458fa3d34a48449845ded366cc8243fd177bfe49 | 9d5d35bf74c2dd4b65303a76b817fd1cf060df9b | refs/heads/master | 2020-05-15T00:33:30.533332 | 2019-04-18T01:15:45 | 2019-04-18T01:30:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,119 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests that show Distribute Coordinator works with Estimator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import glob
import json
import os
import sys
import tempfile
from absl.testing import parameterized
import numpy as np
from tensorflow.contrib.distribute.python import collective_all_reduce_strategy
from tensorflow.contrib.distribute.python import mirrored_strategy
from tensorflow.contrib.distribute.python import parameter_server_strategy
from tensorflow.contrib.optimizer_v2 import adagrad
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import cross_device_ops as cross_device_ops_lib
from tensorflow.python.distribute import distribute_coordinator as dc
from tensorflow.python.distribute import estimator_training as dc_training
from tensorflow.python.distribute import multi_worker_test_base
from tensorflow.python.distribute.distribute_config import DistributeConfig
from tensorflow.python.eager import context
from tensorflow.python.estimator import exporter as exporter_lib
from tensorflow.python.estimator import run_config as run_config_lib
from tensorflow.python.estimator import training as estimator_training
from tensorflow.python.estimator.canned import dnn_linear_combined
from tensorflow.python.estimator.canned import prediction_keys
from tensorflow.python.estimator.export import export as export_lib
from tensorflow.python.feature_column import feature_column_lib as feature_column
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.summary import summary_iterator
from tensorflow.python.summary.writer import writer_cache
from tensorflow.python.training import session_manager
BATCH_SIZE = 10
LABEL_DIMENSION = 2
DATA = np.linspace(
0., 2., BATCH_SIZE * LABEL_DIMENSION, dtype=np.float32).reshape(
BATCH_SIZE, LABEL_DIMENSION)
EVAL_NAME = "foo"
EXPORTER_NAME = "saved_model_exporter"
MAX_STEPS = 10
CHIEF = dc._TaskType.CHIEF
EVALUATOR = dc._TaskType.EVALUATOR
WORKER = dc._TaskType.WORKER
PS = dc._TaskType.PS
original_run_std_server = dc._run_std_server
class DistributeCoordinatorIntegrationTest(
multi_worker_test_base.IndependentWorkerTestBase, parameterized.TestCase):
@classmethod
def setUpClass(cls):
"""Create a local cluster with 2 workers."""
super(DistributeCoordinatorIntegrationTest, cls).setUpClass()
cls._cluster_spec = multi_worker_test_base.create_in_process_cluster(
num_workers=3, num_ps=2, has_eval=True)
def setUp(self):
self._model_dir = tempfile.mkdtemp()
super(DistributeCoordinatorIntegrationTest, self).setUp()
def dataset_input_fn(self, x, y, batch_size, shuffle):
def input_fn():
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
if shuffle:
dataset = dataset.shuffle(batch_size)
dataset = dataset.repeat(100).batch(batch_size)
return dataset
return input_fn
def _get_exporter(self, name, fc):
feature_spec = feature_column.make_parse_example_spec(fc)
serving_input_receiver_fn = (
export_lib.build_parsing_serving_input_receiver_fn(feature_spec))
return exporter_lib.LatestExporter(
name, serving_input_receiver_fn=serving_input_receiver_fn)
def _extract_loss_and_global_step(self, event_folder):
"""Returns the loss and global step in last event."""
event_paths = glob.glob(os.path.join(event_folder, "events*"))
self.assertNotEmpty(
event_paths, msg="Event file not found in dir %s" % event_folder)
loss = None
global_step_count = None
for e in summary_iterator.summary_iterator(event_paths[-1]):
current_loss = None
for v in e.summary.value:
if v.tag == "loss":
current_loss = v.simple_value
# If loss is not found, global step is meaningless.
if current_loss is None:
continue
current_global_step = e.step
if global_step_count is None or current_global_step > global_step_count:
global_step_count = current_global_step
loss = current_loss
return (loss, global_step_count)
def _get_estimator(self,
train_distribute,
eval_distribute,
remote_cluster=None):
input_dimension = LABEL_DIMENSION
linear_feature_columns = [
feature_column.numeric_column("x", shape=(input_dimension,))
]
dnn_feature_columns = [
feature_column.numeric_column("x", shape=(input_dimension,))
]
return dnn_linear_combined.DNNLinearCombinedRegressor(
linear_feature_columns=linear_feature_columns,
dnn_hidden_units=(2, 2),
dnn_feature_columns=dnn_feature_columns,
label_dimension=LABEL_DIMENSION,
model_dir=self._model_dir,
dnn_optimizer=adagrad.AdagradOptimizer(0.001),
linear_optimizer=adagrad.AdagradOptimizer(0.001),
config=run_config_lib.RunConfig(
experimental_distribute=DistributeConfig(
train_distribute=train_distribute,
eval_distribute=eval_distribute,
remote_cluster=remote_cluster)))
def _complete_flow(self,
train_distribute,
eval_distribute,
remote_cluster=None,
use_train_and_evaluate=True):
estimator = self._get_estimator(train_distribute, eval_distribute,
remote_cluster)
input_dimension = LABEL_DIMENSION
train_input_fn = self.dataset_input_fn(
x={"x": DATA},
y=DATA,
batch_size=BATCH_SIZE // train_distribute.num_replicas_in_sync,
shuffle=True)
if eval_distribute:
eval_batch_size = BATCH_SIZE // eval_distribute.num_replicas_in_sync
else:
eval_batch_size = BATCH_SIZE
eval_input_fn = self.dataset_input_fn(
x={"x": DATA}, y=DATA, batch_size=eval_batch_size, shuffle=False)
linear_feature_columns = [
feature_column.numeric_column("x", shape=(input_dimension,))
]
dnn_feature_columns = [
feature_column.numeric_column("x", shape=(input_dimension,))
]
feature_columns = linear_feature_columns + dnn_feature_columns
eval_spec = estimator_training.EvalSpec(
name=EVAL_NAME,
input_fn=eval_input_fn,
steps=None,
exporters=self._get_exporter(EXPORTER_NAME, feature_columns),
start_delay_secs=0,
throttle_secs=1)
if use_train_and_evaluate:
estimator_training.train_and_evaluate(
estimator,
estimator_training.TrainSpec(train_input_fn, max_steps=MAX_STEPS),
eval_spec)
else:
estimator.train(train_input_fn, max_steps=MAX_STEPS)
latest_ckpt_path = estimator.latest_checkpoint()
metrics = estimator.evaluate(eval_input_fn,
checkpoint_path=latest_ckpt_path,
name=EVAL_NAME)
# Export the eval result to files.
eval_result = estimator_training._EvalResult(
status=estimator_training._EvalStatus.EVALUATED,
metrics=metrics,
checkpoint_path=latest_ckpt_path)
evaluator = estimator_training._TrainingExecutor._Evaluator(estimator,
eval_spec,
None)
evaluator._export_eval_result(eval_result, True)
return estimator
def _inspect_train_and_eval_events(self, estimator):
# Make sure nothing is stuck in limbo.
writer_cache.FileWriterCache.clear()
# Examine the training events. Use a range to check global step to avoid
# flakyness due to global step race condition.
training_loss, _ = self._extract_loss_and_global_step(self._model_dir)
self.assertIsNotNone(training_loss)
# Examine the eval events. The global step should be accurate.
eval_dir = os.path.join(self._model_dir, "eval_" + EVAL_NAME)
eval_loss, eval_global_step = self._extract_loss_and_global_step(
event_folder=eval_dir)
self.assertIsNotNone(eval_loss)
self.assertGreaterEqual(eval_global_step, MAX_STEPS)
# Examine the export folder.
export_dir = os.path.join(
os.path.join(self._model_dir, "export"), EXPORTER_NAME)
self.assertTrue(gfile.Exists(export_dir))
# Examine the ckpt for predict.
def predict_input_fn():
return dataset_ops.Dataset.from_tensor_slices({
"x": DATA
}).batch(BATCH_SIZE)
predicted_proba = np.array([
x[prediction_keys.PredictionKeys.PREDICTIONS]
for x in estimator.predict(predict_input_fn)
])
self.assertAllEqual((BATCH_SIZE, LABEL_DIMENSION), predicted_proba.shape)
def _make_cross_device_ops(self, num_gpus_per_worker):
return cross_device_ops_lib.MultiWorkerAllReduce(
["/job:worker/task:0", "/job:worker/task:1", "/job:worker/task:2"],
num_gpus_per_worker)
def _get_strategy_object(self, strategy_cls):
if strategy_cls == mirrored_strategy.CoreMirroredStrategy:
return strategy_cls(
cross_device_ops=self._make_cross_device_ops(
num_gpus_per_worker=context.num_gpus()))
elif strategy_cls == mirrored_strategy.MirroredStrategy:
return strategy_cls(
num_gpus_per_worker=context.num_gpus(),
cross_device_ops=self._make_cross_device_ops(
num_gpus_per_worker=context.num_gpus()))
else:
return strategy_cls(num_gpus_per_worker=context.num_gpus())
@combinations.generate(
combinations.combine(
mode=["graph"],
train_distribute_cls=[
collective_all_reduce_strategy.CollectiveAllReduceStrategy,
mirrored_strategy.MirroredStrategy,
mirrored_strategy.CoreMirroredStrategy,
parameter_server_strategy.ParameterServerStrategy
],
eval_distribute_cls=[
None,
mirrored_strategy.MirroredStrategy,
mirrored_strategy.CoreMirroredStrategy,
parameter_server_strategy.ParameterServerStrategy,
collective_all_reduce_strategy.CollectiveAllReduceStrategy,
],
required_gpus=[0, 1]))
def test_complete_flow_standalone_client(self, train_distribute_cls,
eval_distribute_cls):
train_distribute = self._get_strategy_object(train_distribute_cls)
if eval_distribute_cls:
eval_distribute = self._get_strategy_object(eval_distribute_cls)
else:
eval_distribute = None
cluster_spec = copy.deepcopy(self._cluster_spec)
if (train_distribute_cls !=
parameter_server_strategy.ParameterServerStrategy):
cluster_spec.pop("ps", None)
estimator = self._complete_flow(train_distribute, eval_distribute,
cluster_spec)
self._inspect_train_and_eval_events(estimator)
@combinations.generate(
combinations.combine(
mode=["graph"],
eval_distribute_class=[
None,
mirrored_strategy.MirroredStrategy,
mirrored_strategy.CoreMirroredStrategy,
parameter_server_strategy.ParameterServerStrategy,
],
required_gpus=[0, 1]))
def test_complete_flow_standalone_client_collective_nccl(
self, eval_distribute_class):
train_distribute = (
collective_all_reduce_strategy.CollectiveAllReduceStrategy(
num_gpus_per_worker=context.num_gpus(),
communication=cross_device_ops_lib.CollectiveCommunication.NCCL))
if eval_distribute_class:
eval_distribute = self._get_strategy_object(eval_distribute_class)
else:
eval_distribute = None
cluster_spec = copy.deepcopy(self._cluster_spec)
cluster_spec.pop("ps", None)
estimator = self._complete_flow(train_distribute, eval_distribute,
cluster_spec)
self._inspect_train_and_eval_events(estimator)
@combinations.generate(
combinations.combine(
mode=["graph"],
train_distribute_cls=[
mirrored_strategy.MirroredStrategy,
mirrored_strategy.CoreMirroredStrategy,
],
eval_distribute_cls=[
None,
mirrored_strategy.MirroredStrategy,
mirrored_strategy.CoreMirroredStrategy,
],
required_gpus=[0, 1]))
def test_estimator_standalone_client(self, train_distribute_cls,
eval_distribute_cls):
train_distribute = self._get_strategy_object(train_distribute_cls)
if eval_distribute_cls:
eval_distribute = self._get_strategy_object(eval_distribute_cls)
else:
eval_distribute = None
# We use the whole cluster for evaluation.
cluster = copy.deepcopy(self._cluster_spec)
cluster.pop("evaluator", None)
estimator = self._complete_flow(
train_distribute, eval_distribute, remote_cluster=cluster,
use_train_and_evaluate=False)
self._inspect_train_and_eval_events(estimator)
def _mock_run_std_server(self, *args, **kwargs):
ret = original_run_std_server(*args, **kwargs)
# Wait for all std servers to be brought up in order to reduce the chance of
# remote sessions taking local ports that have been assigned to std servers.
self._barrier.wait()
return ret
def _independent_worker_fn(
self,
train_distribute,
eval_distribute,
):
with test.mock.patch.object(dc, "_run_std_server",
self._mock_run_std_server):
self._complete_flow(train_distribute, eval_distribute)
@combinations.generate(
combinations.combine(
mode=["graph"],
train_distribute_cls=[
collective_all_reduce_strategy.CollectiveAllReduceStrategy,
parameter_server_strategy.ParameterServerStrategy,
],
eval_distribute_cls=[
None,
mirrored_strategy.MirroredStrategy,
mirrored_strategy.CoreMirroredStrategy,
parameter_server_strategy.ParameterServerStrategy,
collective_all_reduce_strategy.CollectiveAllReduceStrategy,
],
required_gpus=[0, 1]))
def test_complete_flow_independent_worker_between_graph(
self, train_distribute_cls, eval_distribute_cls):
if (context.num_gpus() < 2 and eval_distribute_cls ==
collective_all_reduce_strategy.CollectiveAllReduceStrategy):
self.skipTest("`CollectiveAllReduceStrategy` needs at least two towers.")
train_distribute = self._get_strategy_object(train_distribute_cls)
if eval_distribute_cls:
eval_distribute = self._get_strategy_object(eval_distribute_cls)
else:
eval_distribute = None
if (train_distribute_cls == parameter_server_strategy
.ParameterServerStrategy):
cluster_spec = multi_worker_test_base.create_cluster_spec(
num_workers=3, num_ps=2, has_eval=True)
# 3 workers, 2 ps and 1 evaluator.
self._barrier = dc._Barrier(6)
else:
cluster_spec = multi_worker_test_base.create_cluster_spec(
num_workers=3, num_ps=0, has_eval=True)
# 3 workers and 1 evaluator.
self._barrier = dc._Barrier(4)
threads = self.run_multiple_tasks_in_threads(self._independent_worker_fn,
cluster_spec, train_distribute,
eval_distribute)
threads_to_join = []
for task_type, ts in threads.items():
if task_type == PS:
continue
for t in ts:
threads_to_join.append(t)
self.join_independent_workers(threads_to_join)
estimator = self._get_estimator(train_distribute, eval_distribute)
self._inspect_train_and_eval_events(estimator)
@combinations.generate(
combinations.combine(
mode=["graph"],
train_distribute_cls=[
mirrored_strategy.MirroredStrategy,
mirrored_strategy.CoreMirroredStrategy
],
eval_distribute_cls=[
None,
mirrored_strategy.MirroredStrategy,
mirrored_strategy.CoreMirroredStrategy
],
required_gpus=[0, 1]))
def test_complete_flow_independent_worker_in_graph(self, train_distribute_cls,
eval_distribute_cls):
train_distribute = self._get_strategy_object(train_distribute_cls)
if eval_distribute_cls:
eval_distribute = self._get_strategy_object(eval_distribute_cls)
else:
eval_distribute = None
cluster_spec = multi_worker_test_base.create_cluster_spec(
num_workers=3, num_ps=0, has_eval=True)
# 3 workers and 1 evaluator.
self._barrier = dc._Barrier(4)
threads = self.run_multiple_tasks_in_threads(self._independent_worker_fn,
cluster_spec, train_distribute,
eval_distribute)
self.join_independent_workers([threads[WORKER][0], threads[EVALUATOR][0]])
estimator = self._get_estimator(train_distribute, eval_distribute)
self._inspect_train_and_eval_events(estimator)
TF_CONFIG_WITH_CHIEF = {
"cluster": {
"chief": ["fake_chief"],
},
"task": {
"type": "chief",
"index": 0
}
}
TF_CONFIG_WITH_MASTER = {
"cluster": {
"master": ["fake_master"],
},
"task": {
"type": "master",
"index": 0
}
}
TF_CONFIG_WITHOUT_TASK = {"cluster": {"chief": ["fake_worker"]}}
class RunConfigTest(test.TestCase):
def test_previously_unexpected_cluster_spec(self):
with test.mock.patch.dict(
"os.environ", {"TF_CONFIG": json.dumps(TF_CONFIG_WITHOUT_TASK)}):
run_config_lib.RunConfig(
experimental_distribute=DistributeConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy(
["/device:GPU:0", "/device:GPU:1"])))
def test_should_run_distribute_coordinator(self):
"""Tests that should_run_distribute_coordinator return a correct value."""
# We don't use distribute coordinator for local training.
self.assertFalse(
dc_training.should_run_distribute_coordinator(
run_config_lib.RunConfig()))
# When `train_distribute` is not specified, don't use distribute
# coordinator.
with test.mock.patch.dict("os.environ",
{"TF_CONFIG": json.dumps(TF_CONFIG_WITH_CHIEF)}):
self.assertFalse(
dc_training.should_run_distribute_coordinator(
run_config_lib.RunConfig()))
# When `train_distribute` is specified and TF_CONFIG is detected, use
# distribute coordinator.
with test.mock.patch.dict("os.environ",
{"TF_CONFIG": json.dumps(TF_CONFIG_WITH_CHIEF)}):
config_with_train_distribute = run_config_lib.RunConfig(
experimental_distribute=DistributeConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy(
["/device:GPU:0", "/device:GPU:1"])))
config_with_eval_distribute = run_config_lib.RunConfig(
experimental_distribute=DistributeConfig(
eval_distribute=mirrored_strategy.CoreMirroredStrategy(
["/device:GPU:0", "/device:GPU:1"])))
self.assertTrue(
dc_training.should_run_distribute_coordinator(
config_with_train_distribute))
self.assertFalse(
dc_training.should_run_distribute_coordinator(
config_with_eval_distribute))
# With a master in the cluster, don't run distribute coordinator.
with test.mock.patch.dict("os.environ",
{"TF_CONFIG": json.dumps(TF_CONFIG_WITH_MASTER)}):
config = run_config_lib.RunConfig(
experimental_distribute=DistributeConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy(
["/device:GPU:0", "/device:GPU:1"])))
self.assertFalse(dc_training.should_run_distribute_coordinator(config))
def test_init_run_config_duplicate_distribute(self):
with self.assertRaises(ValueError):
run_config_lib.RunConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy(),
experimental_distribute=DistributeConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy()))
with self.assertRaises(ValueError):
run_config_lib.RunConfig(
eval_distribute=mirrored_strategy.CoreMirroredStrategy(),
experimental_distribute=DistributeConfig(
eval_distribute=mirrored_strategy.CoreMirroredStrategy()))
def test_init_run_config_none_distribute_coordinator_mode(self):
# We don't use distribute coordinator for local training.
config = run_config_lib.RunConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy())
dc_training.init_run_config(config, {})
self.assertIsNone(config._distribute_coordinator_mode)
# With a master in the cluster, don't run distribute coordinator.
with test.mock.patch.dict("os.environ",
{"TF_CONFIG": json.dumps(TF_CONFIG_WITH_MASTER)}):
config = run_config_lib.RunConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy())
self.assertIsNone(config._distribute_coordinator_mode)
# When `train_distribute` is not specified, don't use distribute
# coordinator.
with test.mock.patch.dict("os.environ",
{"TF_CONFIG": json.dumps(TF_CONFIG_WITH_CHIEF)}):
config = run_config_lib.RunConfig()
self.assertFalse(hasattr(config, "_distribute_coordinator_mode"))
def test_init_run_config_independent_worker(self):
# When `train_distribute` is specified and TF_CONFIG is detected, use
# distribute coordinator with INDEPENDENT_WORKER mode.
with test.mock.patch.dict("os.environ",
{"TF_CONFIG": json.dumps(TF_CONFIG_WITH_CHIEF)}):
config = run_config_lib.RunConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy())
self.assertEqual(config._distribute_coordinator_mode,
dc.CoordinatorMode.INDEPENDENT_WORKER)
def test_init_run_config_standalone_client(self):
# When `train_distribute` is specified, TF_CONFIG is detected and
# `experimental.remote_cluster` is set use distribute coordinator with
# STANDALONE_CLIENT mode.
config = run_config_lib.RunConfig(
train_distribute=mirrored_strategy.CoreMirroredStrategy(),
experimental_distribute=DistributeConfig(
remote_cluster={"chief": ["fake_worker"]}))
self.assertEqual(config._distribute_coordinator_mode,
dc.CoordinatorMode.STANDALONE_CLIENT)
if __name__ == "__main__":
# Reduce `recovery_wait_secs` from 30 seconds so the test completes quickly.
orig_init = session_manager.SessionManager.__init__
def new_init(*args, **kwargs):
kwargs.pop("recovery_wait_secs", None)
kwargs["recovery_wait_secs"] = 0.5
orig_init(*args, **kwargs)
session_manager.SessionManager.__init__ = new_init
with test.mock.patch.object(sys, "exit", os._exit):
test.main()
| [
"[email protected]"
] | |
5ad0428c695af2b019eeb2f0663b66e863d03a50 | c11c27b07086e97c633a833d37787474724bd2d2 | /src/ResNeXt/concateFeature.py | 6d8f98b7b69a10e8dff467812e4cacb8108ba6ef | [
"MIT"
] | permissive | willyspinner/High-Performance-Face-Recognition | d1826a73653dede6b43799439e4fb692f119c70b | c5caad61be97fd20f9c47a727278ff938dc5cc8f | refs/heads/master | 2020-06-22T16:36:29.663302 | 2019-07-19T09:41:47 | 2019-07-19T09:41:47 | 197,746,624 | 0 | 0 | MIT | 2019-07-19T09:42:00 | 2019-07-19T09:41:59 | null | UTF-8 | Python | false | false | 1,953 | py | import scipy.io as sio
import pickle
import numpy as np
import os
import numpy as np
from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
from scipy import spatial
from sklearn.externals import joblib
import time
reducedDim = 2048
pca = PCA(n_components = reducedDim, whiten = True)
path = "/media/zhaojian/6TB/data/extra_general_model_feature/"
with open(path + "NovelSet_List/NovelSet_1.txt", 'r') as f:
lines = f.readlines()
vggFeatures = np.loadtxt(path + 'NovelSet_Fea/VGG_NOVELSET_1.txt')
print "vggFeatures.shape: ", vggFeatures.shape
inputFeaturePath = "extracted_feature/NovelSet_1IdentityFeature/"
outputFeaturePath = "extracted_feature/NovelSet_1IdentityFeaturePCA2048/"
features = []
labelList = []
for index in range(len(lines)):
print index
line = lines[index]
ID = line.split("/")[-2]
print ID
labelList.append(ID)
vggFeature = feature = vggFeatures[index].flatten()
print "vggFeature.shape", vggFeature.shape
# caffeFeature = sio.loadmat(inputFeaturePath + ID + ".mat")["identityFeature"].flatten()
# print "caffeFeature.shape", caffeFeature.shape
#
# identityFeature = np.concatenate((caffeFeature, vggFeature), axis = 0)
# print "identityFeature.shape: ", identityFeature.shape
identityFeature = vggFeature
features.append(identityFeature)
features = np.asarray(features)
print "features..shape: ", features.shape
# sio.savemat("concatenateFeatures", {"identityFeature": features})
# sio.savemat("vggNovelSet_1_Features", {"identityFeature": features})
features = sio.loadmat("vggNovelSet_1_Features")['identityFeature']
#
# features = pca.fit_transform(features)
#
print "features..shape: ", features.shape
#
#
for index in range(len(features)):
identityFeature = features[index]
print "identityFeature.shape: ", identityFeature.shape
label = labelList[index]
# print index
# print label
sio.savemat(outputFeaturePath + label, {"identityFeature": identityFeature})
| [
"[email protected]"
] | |
35f3e6fc87bf0e774aa1fc4dd0a9fff46bc4aee3 | bd4dcd90d41aa228f0384c9ba03edd105a93d7ec | /products/migrations/0101_auto_20200221_2128.py | 40b496b06fcc159e8132ad5c55c7e06b1c94a954 | [] | no_license | deganoth/mu-shop | 0be0bb0cfa635986b37edbe371daf8373f09aefd | dc1a77ecf6217286c005d762b559fe3f61ef2f6d | refs/heads/master | 2023-02-17T08:23:36.339586 | 2023-01-10T17:51:21 | 2023-01-10T17:51:21 | 243,972,792 | 0 | 1 | null | 2023-02-15T23:10:09 | 2020-02-29T13:22:02 | Python | UTF-8 | Python | false | false | 5,567 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.24 on 2020-02-21 21:28
from __future__ import unicode_literals
from django.db import migrations
import djmoney.models.fields
class Migration(migrations.Migration):
dependencies = [
('products', '0100_auto_20200221_2125'),
]
operations = [
migrations.AlterField(
model_name='product',
name='price_currency',
field=djmoney.models.fields.CurrencyField(choices=[('XUA', 'ADB Unit of Account'), ('AFN', 'Afghani'), ('DZD', 'Algerian Dinar'), ('ARS', 'Argentine Peso'), ('AMD', 'Armenian Dram'), ('AWG', 'Aruban Guilder'), ('AUD', 'Australian Dollar'), ('AZN', 'Azerbaijanian Manat'), ('BSD', 'Bahamian Dollar'), ('BHD', 'Bahraini Dinar'), ('THB', 'Baht'), ('PAB', 'Balboa'), ('BBD', 'Barbados Dollar'), ('BYN', 'Belarussian Ruble'), ('BYR', 'Belarussian Ruble'), ('BZD', 'Belize Dollar'), ('BMD', 'Bermudian Dollar (customarily known as Bermuda Dollar)'), ('BTN', 'Bhutanese ngultrum'), ('VEF', 'Bolivar Fuerte'), ('BOB', 'Boliviano'), ('XBA', 'Bond Markets Units European Composite Unit (EURCO)'), ('BRL', 'Brazilian Real'), ('BND', 'Brunei Dollar'), ('BGN', 'Bulgarian Lev'), ('BIF', 'Burundi Franc'), ('XOF', 'CFA Franc BCEAO'), ('XAF', 'CFA franc BEAC'), ('XPF', 'CFP Franc'), ('CAD', 'Canadian Dollar'), ('CVE', 'Cape Verde Escudo'), ('KYD', 'Cayman Islands Dollar'), ('CLP', 'Chilean peso'), ('XTS', 'Codes specifically reserved for testing purposes'), ('COP', 'Colombian peso'), ('KMF', 'Comoro Franc'), ('CDF', 'Congolese franc'), ('BAM', 'Convertible Marks'), ('NIO', 'Cordoba Oro'), ('CRC', 'Costa Rican Colon'), ('HRK', 'Croatian Kuna'), ('CUP', 'Cuban Peso'), ('CUC', 'Cuban convertible peso'), ('CZK', 'Czech Koruna'), ('GMD', 'Dalasi'), ('DKK', 'Danish Krone'), ('MKD', 'Denar'), ('DJF', 'Djibouti Franc'), ('STD', 'Dobra'), ('DOP', 'Dominican Peso'), ('VND', 'Dong'), ('XCD', 'East Caribbean Dollar'), ('EGP', 'Egyptian Pound'), ('SVC', 'El Salvador Colon'), ('ETB', 'Ethiopian Birr'), ('EUR', 'Euro'), ('XBB', 'European Monetary Unit (E.M.U.-6)'), ('XBD', 'European Unit of Account 17(E.U.A.-17)'), ('XBC', 'European Unit of Account 9(E.U.A.-9)'), ('FKP', 'Falkland Islands Pound'), ('FJD', 'Fiji Dollar'), ('HUF', 'Forint'), ('GHS', 'Ghana Cedi'), ('GIP', 'Gibraltar Pound'), ('XAU', 'Gold'), ('XFO', 'Gold-Franc'), ('PYG', 'Guarani'), ('GNF', 'Guinea Franc'), ('GYD', 'Guyana Dollar'), ('HTG', 'Haitian gourde'), ('HKD', 'Hong Kong Dollar'), ('UAH', 'Hryvnia'), ('ISK', 'Iceland Krona'), ('INR', 'Indian Rupee'), ('IRR', 'Iranian Rial'), ('IQD', 'Iraqi Dinar'), ('IMP', 'Isle of Man Pound'), ('JMD', 'Jamaican Dollar'), ('JOD', 'Jordanian Dinar'), ('KES', 'Kenyan Shilling'), ('PGK', 'Kina'), ('LAK', 'Kip'), ('KWD', 'Kuwaiti Dinar'), ('AOA', 'Kwanza'), ('MMK', 'Kyat'), ('GEL', 'Lari'), ('LVL', 'Latvian Lats'), ('LBP', 'Lebanese Pound'), ('ALL', 'Lek'), ('HNL', 'Lempira'), ('SLL', 'Leone'), ('LSL', 'Lesotho loti'), ('LRD', 'Liberian Dollar'), ('LYD', 'Libyan Dinar'), ('SZL', 'Lilangeni'), ('LTL', 'Lithuanian Litas'), ('MGA', 'Malagasy Ariary'), ('MWK', 'Malawian Kwacha'), ('MYR', 'Malaysian Ringgit'), ('TMM', 'Manat'), ('MUR', 'Mauritius Rupee'), ('MZN', 'Metical'), ('MXV', 'Mexican Unidad de Inversion (UDI)'), ('MXN', 'Mexican peso'), ('MDL', 'Moldovan Leu'), ('MAD', 'Moroccan Dirham'), ('BOV', 'Mvdol'), ('NGN', 'Naira'), ('ERN', 'Nakfa'), ('NAD', 'Namibian Dollar'), ('NPR', 'Nepalese Rupee'), ('ANG', 'Netherlands Antillian Guilder'), ('ILS', 'New Israeli Sheqel'), ('RON', 'New Leu'), ('TWD', 'New Taiwan Dollar'), ('NZD', 'New Zealand Dollar'), ('KPW', 'North Korean Won'), ('NOK', 'Norwegian Krone'), ('PEN', 'Nuevo Sol'), ('MRO', 'Ouguiya'), ('TOP', 'Paanga'), ('PKR', 'Pakistan Rupee'), ('XPD', 'Palladium'), ('MOP', 'Pataca'), ('PHP', 'Philippine Peso'), ('XPT', 'Platinum'), ('GBP', 'Pound Sterling'), ('BWP', 'Pula'), ('QAR', 'Qatari Rial'), ('GTQ', 'Quetzal'), ('ZAR', 'Rand'), ('OMR', 'Rial Omani'), ('KHR', 'Riel'), ('MVR', 'Rufiyaa'), ('IDR', 'Rupiah'), ('RUB', 'Russian Ruble'), ('RWF', 'Rwanda Franc'), ('XDR', 'SDR'), ('SHP', 'Saint Helena Pound'), ('SAR', 'Saudi Riyal'), ('RSD', 'Serbian Dinar'), ('SCR', 'Seychelles Rupee'), ('XAG', 'Silver'), ('SGD', 'Singapore Dollar'), ('SBD', 'Solomon Islands Dollar'), ('KGS', 'Som'), ('SOS', 'Somali Shilling'), ('TJS', 'Somoni'), ('SSP', 'South Sudanese Pound'), ('LKR', 'Sri Lanka Rupee'), ('XSU', 'Sucre'), ('SDG', 'Sudanese Pound'), ('SRD', 'Surinam Dollar'), ('SEK', 'Swedish Krona'), ('CHF', 'Swiss Franc'), ('SYP', 'Syrian Pound'), ('BDT', 'Taka'), ('WST', 'Tala'), ('TZS', 'Tanzanian Shilling'), ('KZT', 'Tenge'), ('XXX', 'The codes assigned for transactions where no currency is involved'), ('TTD', 'Trinidad and Tobago Dollar'), ('MNT', 'Tugrik'), ('TND', 'Tunisian Dinar'), ('TRY', 'Turkish Lira'), ('TMT', 'Turkmenistan New Manat'), ('TVD', 'Tuvalu dollar'), ('AED', 'UAE Dirham'), ('XFU', 'UIC-Franc'), ('USD', 'US Dollar'), ('USN', 'US Dollar (Next day)'), ('UGX', 'Uganda Shilling'), ('CLF', 'Unidad de Fomento'), ('COU', 'Unidad de Valor Real'), ('UYI', 'Uruguay Peso en Unidades Indexadas (URUIURUI)'), ('UYU', 'Uruguayan peso'), ('UZS', 'Uzbekistan Sum'), ('VUV', 'Vatu'), ('CHE', 'WIR Euro'), ('CHW', 'WIR Franc'), ('KRW', 'Won'), ('YER', 'Yemeni Rial'), ('JPY', 'Yen'), ('CNY', 'Yuan Renminbi'), ('ZMK', 'Zambian Kwacha'), ('ZMW', 'Zambian Kwacha'), ('ZWD', 'Zimbabwe Dollar A/06'), ('ZWN', 'Zimbabwe dollar A/08'), ('ZWL', 'Zimbabwe dollar A/09'), ('PLN', 'Zloty')], default=None, editable=False, max_length=3, null=True),
),
]
| [
"[email protected]"
] | |
5762741a29ba36f2c36980cbe7c87cd3d2f89121 | a01e7f87a0088965e2e0a02476d2df12a49a1a18 | /package/tfi_helper/dhcp/hapack/dhcpparser.py | dea3a1526ea3c35f8b80c04e697d0a60a841bed7 | [] | no_license | gsrr/IFT_jerry | 0456a8a1fb98f84ad5c26dc36bdf32e2d85c750c | 4c2f6900dfd7ae7f6b3cc2150b1c1be236b4c95c | refs/heads/master | 2020-04-04T05:30:10.544252 | 2019-08-22T09:12:03 | 2019-08-22T09:12:03 | 48,145,836 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 686 | py | import argparse
class DHCPParser:
def __init__(self):
self.cmds = ['dhcp_test']
self.parser_dhcp = argparse.ArgumentParser(prog="dhcp", add_help=False)
self.parser_dhcp_test = argparse.ArgumentParser(prog="dhcp_test", add_help=False)
self.parser_dhcp_test.add_argument("-z", nargs="?", required=True)
def find(self, args):
cnt = 0
cmd = "dhcp"
while cnt < len(args):
cmd += ("_" + args[cnt])
if cmd in self.cmds:
break
cnt += 1
args = args[cnt+1:]
namespace = getattr(self, "parser" + "_" + cmd).parse_args(args).__dict__
return cmd, namespace
| [
"[email protected]"
] | |
d3173858f10737bbb574b5291c639096bd42fdb8 | 1ebe5a07e7f6260c2c2ceb6ca00dcf2a0341e544 | /op_impl/built-in/ai_core/tbe/impl/power.py | e29e5eed1d10da730e4062ba4a475b68b162ebd6 | [] | no_license | gekowa/ascend-opp | f5e09905336d85f9974d555d03d37a75cb8185c1 | 5c28a2faf9d2a117ea6f0923efe35fcd53904dd2 | refs/heads/master | 2023-04-09T12:14:40.337104 | 2021-04-19T23:00:59 | 2021-04-19T23:00:59 | 359,620,865 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,024 | py | # Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
power
"""
# pylint: disable=redefined-outer-name
import math
from functools import reduce
import te.lang.cce
from te import tvm
from te.platform.fusion_manager import fusion_manager
from te import platform as tbe_platform
from te.utils.op_utils import *
from topi import generic
from topi.cce import util
def positive_compute(base, power, version, input_dtype):
"""
calculate power for positive elements of base tensor
Parameters
----------
base: the base tensor
power: attr power
version: the product version
input_dtype: dtype of input
Returns
----------
res: the result tensor
"""
base_cast = base
if input_dtype == "float16" and \
tbe_platform.cce_conf.api_check_support("te.lang.cce.vexp", "float32") and \
tbe_platform.cce_conf.api_check_support("te.lang.cce.vlog", "float32"):
base_cast = te.lang.cce.cast_to(base, "float32")
log_val = te.lang.cce.vlog(base_cast)
mul_val = te.lang.cce.vmuls(log_val, power)
exp_val = te.lang.cce.vexp(mul_val)
if exp_val.dtype.lower() != input_dtype:
exp_val = te.lang.cce.cast_to(exp_val, input_dtype)
return exp_val
def negtive_compute(base, power, nan_values, version, input_dtype):
"""
calculate power for negative elements of base tensor
Parameters
----------
base: the base tensor
power: attr power
nan_values: a tensor with nan values
version: the product version
input_dtype: dtype of input
Returns
----------
res: the result tensor
"""
if float(power).is_integer():
base_cast = base
if input_dtype == "float16" and \
tbe_platform.cce_conf.api_check_support("te.lang.cce.vexp", "float32") and \
tbe_platform.cce_conf.api_check_support("te.lang.cce.vlog", "float32"):
base_cast = te.lang.cce.cast_to(base, "float32")
sign_value = math.pow(-1, power)
abs_base_value = te.lang.cce.vabs(base_cast)
log_value = te.lang.cce.vlog(abs_base_value)
mul_value = te.lang.cce.vmuls(log_value, power)
exp_value = te.lang.cce.vexp(mul_value)
res = te.lang.cce.vmuls(exp_value, sign_value)
if res.dtype.lower() != input_dtype:
res = te.lang.cce.cast_to(res, input_dtype)
return res
return nan_values
def zero_compute(power, nan_values, zero_values):
"""
calculate power for zero elements of base tensor
Parameters
----------
power: attr power
nan_values: a tensor with nan values
zero_values: a tensor with zero values
Returns
----------
res: the result tensor
"""
if power > 0.0:
return zero_values
return nan_values
def power_scalar(input_x, base, power):
"""
calculate power when attr scale is 0.0 and attr power is not
Parameters
----------
input_x: placeholder of input
base: the base value, equals attr shift
power: attr power
Returns
----------
res: the result when attr scale is 0.0 and attr power is not
"""
tmp_zero = te.lang.cce.vmuls(input_x, 0)
ones = te.lang.cce.vadds(tmp_zero, 1)
zeros = tmp_zero
if base > 0.0:
res = te.lang.cce.vmuls(ones, math.pow(base, power))
return res
if base < 0.0:
if float(power).is_integer():
res = te.lang.cce.vmuls(ones, math.pow(base, power))
return res
# return abnormal value
res = te.lang.cce.vrec(zeros)
return res
if power > 0:
return zeros
# return abnormal value
res = te.lang.cce.vrec(zeros)
return res
def zero_diff_scale_compute(input_x, shift, power):
"""
calculate power when power*scale is 0.0
Parameters
----------
input_x: placeholder of input
shift: attr shift
power: attr power
Returns
----------
res: the result when power*scale is 0.0
"""
if power == 0.0:
tmp_zero = te.lang.cce.vmuls(input_x, 0)
res = te.lang.cce.vadds(tmp_zero, 1)
return res
res = power_scalar(input_x, shift, power)
return res
# pylint: disable=locally-disabled,unused-argument,too-many-arguments
@fusion_manager.register("power")
def power_compute(input_x, output_y, power=1.0, scale=1.0,
shift=0.0, kernel_name="power"):
"""
calculate power according to different cases
Parameters
----------
input_x: placeholder of input
power: attr power
scale: attr scale
shift: attr shift
Returns
----------
res: result of power
"""
cce_product = tbe_platform.cce_conf.get_soc_spec("SOC_VERSION")
input_dtype = input_x.dtype.lower()
diff_scale = power * scale
if diff_scale == 0.0:
res = zero_diff_scale_compute(input_x, shift, power)
return res
shift_scaled_x = te.lang.cce.vmuls(input_x, scale)
shift_scaled_x = te.lang.cce.vadds(shift_scaled_x, shift)
tmp_zero = te.lang.cce.vmuls(input_x, 0)
zeros = tmp_zero
nan_value = te.lang.cce.vrec(zeros)
if power == 1.0:
res = shift_scaled_x
return res
if power == 2.0:
res = te.lang.cce.vmul(shift_scaled_x, shift_scaled_x)
return res
if power == 3.0:
res = te.lang.cce.vmul(shift_scaled_x, shift_scaled_x)
res = te.lang.cce.vmul(res, shift_scaled_x)
return res
positive_pow_val = \
positive_compute(shift_scaled_x, power, cce_product, input_dtype)
negative_pow_val = \
negtive_compute(shift_scaled_x, power,
nan_value, cce_product, input_dtype)
zero_pow_val = zero_compute(power, nan_value, zeros)
res = te.lang.cce.vcmpsel(shift_scaled_x, zeros,
'gt', positive_pow_val, negative_pow_val)
res = te.lang.cce.vcmpsel(shift_scaled_x, zeros,
'eq', zero_pow_val, res)
return res
# pylint: disable=redefined-outer-name, too-many-arguments, unused-variable
@check_op_params(REQUIRED_INPUT, REQUIRED_OUTPUT, OPTION_ATTR_FLOAT,
OPTION_ATTR_FLOAT, OPTION_ATTR_FLOAT, KERNEL_NAME)
def power(input_x, output_y, power=1.0, scale=1.0,
shift=0.0, kernel_name="power"):
"""
calculate power of input tensor according to
y = (x * scale + shift) ** power
Parameters
----------
input_x: dict of input, include shape and
dtype, dtype support float16, float32
output_y: dict of output, include shape and
dtype, dtype support float16, float32
power: attr power, default value is 1.0
scale: attr scale, default value is 1.0
shift: attr shift, default value is 0.0
kernel_name: cce kernel name, default value is "power"
Returns
----------
None
"""
shape = input_x.get("shape")
input_dtype = input_x.get("dtype").lower()
check_shape(shape, param_name="x")
type_tuple = ("float16", "float32")
check_dtype(input_dtype, type_tuple, param_name="x")
fuseshape = [1]
fuseshape[0] = reduce(lambda x, y: x*y, shape)
data_input = tvm.placeholder(fuseshape, name="data_input", dtype=input_dtype)
cur_cce_product = tbe_platform.cce_conf.get_soc_spec("SOC_VERSION")
if cur_cce_product in ("Ascend310", "Hi3796CV300ES", "Hi3796CV300CS"):
if input_dtype == "float32":
error_info = {}
error_info['errCode'] = 'E80008'
error_info['param_name'] = 'input_x'
error_info['op_name'] = 'power'
error_info['expect_value'] = "float16"
error_info['real_value'] = input_dtype
raise RuntimeError(error_info, "In op[%s], the parameter[%s]'s dtype "
"should be [%s], but actually is [%s]."
% (error_info['op_name'], error_info['param_name'],
error_info['expect_value'], error_info['real_value']))
res = power_compute(data_input, output_y, power, scale, shift, kernel_name)
else:
res = power_compute(data_input, output_y, power, scale, shift, kernel_name)
with tvm.target.cce():
sch = generic.auto_schedule(res)
config = {"name": kernel_name,
"tensor_list": [data_input, res],
"print_ir": True}
te.lang.cce.cce_build_code(sch, config)
| [
"[email protected]"
] | |
9d82a9d1425b1deae0c45fc833fe73e80449e0b6 | 2b7c7e9b00ed9b2dbbac943ee4b79865a96d10de | /Figure_script/Figure_1.py | 7caa0f0d7080d155e2572b49ddd294af94fa11d9 | [] | no_license | YaojieLu/Plant_traits_inversion | ad973e60bb32717d9d718f774c2ec77433c38ced | ec83642ae2a2e6ef96502e58f8074bffdadfefe8 | refs/heads/master | 2021-06-21T15:22:00.225498 | 2020-12-13T22:12:21 | 2020-12-13T22:12:21 | 140,017,309 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,680 | py | import pickle
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from scipy import stats
# load traces
ts = pickle.load(open("../Data/45.pickle", "rb"))
params = ['alpha', 'c', 'g1', 'kxmax', 'p50', 'L']
true_values = [0.02, 16, 50, 7, -4.5, 2]
# figure
labels = ['$\\alpha$', '$\\mathit{c}$', '$\\mathit{g_{1}}$',
'$\\mathit{k_{xmax}}$', '$\\psi_{x50}$', '$\\mathit{L}$']
ranges = [[0.001, 0.2], [2, 20], [10, 100], [1, 10], [-10, -0.1], [0.5, 5]]
fig, axs = plt.subplots(nrows=2, ncols=3, figsize=(30, 20))
for i, row in enumerate(axs):
for j, col in enumerate(row):
idx = i*3+j
param = params[idx]
df = pd.DataFrame({param: ts[param]}).iloc[:, 0]
col.hist(df, range=[ranges[idx][0], ranges[idx][1]], bins=100)
# kde = stats.gaussian_kde(df)
# param_range = np.linspace(ranges[idx][0], ranges[idx][1], 1000)
# col.plot(param_range, kde(param_range), linewidth=2.5, color='blue')
mean, std = df.mean(), df.std()
cv = abs(round(std/mean, 2))
col.set_title('RSD = {}'.format(cv), fontsize=30)
col.axvline(x=true_values[idx], c='black',
label='True value', linestyle='dashed')
col.axes.get_yaxis().set_visible(False)
col.tick_params(labelsize=30)
col.set_xlabel(labels[idx], fontsize=30)
if idx == 0:
col.legend([Line2D([0], [0], linestyle='dashed', color='black')],
['True value'], loc='upper right', fontsize=30, framealpha=0)
plt.subplots_adjust(hspace=0.25, wspace=0.1)
plt.savefig('../Figures/Figure 45.png', bbox_inches = 'tight')
| [
"="
] | = |
8f28ab12e6205691d69253b9b16c31e06f857774 | b5cc6d7b5f7ccea36fce4eab961979404414f8b0 | /kent-report/py/beam_distances.py | 2cc89895ad6d3fed6c27470bb32f1dfd505d8989 | [] | no_license | MiroK/cutFEM-beam | adf0c925dbe64b370dab48e82335617450675f5d | 2fb3686804e836d4031fbf231a36a0f9ac8a3012 | refs/heads/master | 2021-01-21T23:54:32.868307 | 2015-02-14T13:14:59 | 2015-02-14T13:14:59 | 25,625,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,537 | py | from __future__ import division
from sympy import sin, cos, pi, sqrt, symbols, lambdify
from sympy.mpmath import quad
import numpy as np
x, y, s = symbols('x, y, s')
def eigen_basis(n):
'''
Return first n eigenfunctions of Laplacian over biunit interval with homog.
Dirichlet bcs. at endpoints -1, 1. Functions of x.
'''
k = 0
functions = []
while k < n:
alpha = pi/2 + k*pi/2
if k % 2 == 0:
functions.append(cos(alpha*x))
else:
functions.append(sin(alpha*x))
k += 1
return functions
def shen_basis(n):
'''
Return first n Shen basis functions. Special polynomials made of Legendre
polynomials that have 0 values at -1, 1. Functions of x.
'''
k = 0
functions = []
while k < n:
weight = 1/sqrt(4*k + 6)
functions.append(weight*(legendre(k+2, x) - legendre(k, x)))
k += 1
return functions
def beam_restrict(A, B, u):
'''
Restict function(s) u of x, y to beam = {(x, y)=0.5*A*(1-s) + 0.5*B*(1+s)}.
'''
if isinstance(u, list):
return [beam_restrict(A, B, v) for v in u]
else:
assert x in u.atoms() and y in u.atoms()
ux = u.subs(x, A[0]/2*(1-s) + B[0]/2*(1+s))
u = ux.subs(y, A[1]/2*(1-s) + B[1]/2*(1+s))
return u
def L2_distance(f, g):
'L2 norm over [-1, 1] of f-g.'
d = f-g
d = lambdify(s, d)
return sqrt(quad(lambda s: d(s)**2, [-1, 1]))
def H10_distance(f, g):
'H10 norm over [-1, 1] of f-g.'
d = (f-g).diff(s, 1)
d = lambdify(s, d)
return sqrt(quad(lambda s: d(s)**2, [-1, 1]))
def distance_matrices(A, B, Vp, Vb, Q, norm):
'''
Given beam specified by A, B return two matrices. The first matrix has
norm(u-q) where u are functions from Vp restricted to beam and q are
functions from Q. The other matrix is norm(p-q) for p in Vb and Q in
Q.
'''
if norm == 'L2':
distance = L2_distance
elif norm == 'H10':
distance = H10_distance
else:
raise ValueError
m, n, r = len(Vp), len(Vb), len(Q)
mat0 = np.zeros((m, r))
# First do the restriction
Vp = beam_restrict(A, B, Vp)
for i, u in enumerate(Vp):
for j, q in enumerate(Q):
mat0[i, j] = distance(u, q)
mat1 = np.zeros((n, r))
for i, p in enumerate(Vb):
for j, q in enumerate(Q):
mat1[i, j] = distance(p, q)
return mat0, mat1
# -----------------------------------------------------------------------------
if __name__ == '__main__':
import matplotlib.pyplot as plt
from itertools import product
# Number of plate function in 1d, number of beam functions and number of
# functions for Lagrange multiplier space
m, n, r = 20, 20, 20
# Vp basis - functions of x, y
Vp = [fx*fy.subs(x, y) for fx, fy in product(eigen_basis(m), eigen_basis(m))]
# Vb basis - functions of s
Vb = [f.subs(x, s) for f in eigen_basis(n)]
# Q basis - functions of s
Q = [f.subs(x, s) for f in eigen_basis(r)]
# Sample beam
A = np.array([0, 0])
B = np.array([1, 1])
for norm in ['L2', 'H10']:
matBp, matBb = distance_matrices(A, B, Vp, Vb, Q, norm)
plt.figure()
plt.title(norm)
plt.pcolor(matBp)
plt.xlabel('$Q$')
plt.ylabel('$V_p$')
plt.colorbar()
plt.figure()
plt.title(norm)
plt.pcolor(matBb)
plt.xlabel('$Q$')
plt.ylabel('$V_b$')
plt.colorbar()
plt.show()
| [
"[email protected]"
] | |
1bd68140d32eb41f4a7e8552136f8d5ef1080f18 | 1ab7b3f2aa63de8488ce7c466a67d367771aa1f2 | /Ricardo_OS/Python_backend/venv/lib/python3.8/site-packages/pandas/tests/indexing/test_partial.py | 337ec683ee745d97ace410b4d302af252d40ba04 | [
"MIT"
] | permissive | icl-rocketry/Avionics | 9d39aeb11aba11115826fd73357b415026a7adad | 95b7a061eabd6f2b607fba79e007186030f02720 | refs/heads/master | 2022-07-30T07:54:10.642930 | 2022-07-10T12:19:10 | 2022-07-10T12:19:10 | 216,184,670 | 9 | 1 | MIT | 2022-06-27T10:17:06 | 2019-10-19T09:57:07 | C++ | UTF-8 | Python | false | false | 23,869 | py | """
test setting *parts* of objects both positionally and label based
TODO: these should be split among the indexer tests
"""
import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Index, Period, Series, Timestamp, date_range, period_range
import pandas._testing as tm
class TestPartialSetting:
def test_partial_setting(self):
# GH2578, allow ix and friends to partially set
# series
s_orig = Series([1, 2, 3])
s = s_orig.copy()
s[5] = 5
expected = Series([1, 2, 3, 5], index=[0, 1, 2, 5])
tm.assert_series_equal(s, expected)
s = s_orig.copy()
s.loc[5] = 5
expected = Series([1, 2, 3, 5], index=[0, 1, 2, 5])
tm.assert_series_equal(s, expected)
s = s_orig.copy()
s[5] = 5.0
expected = Series([1, 2, 3, 5.0], index=[0, 1, 2, 5])
tm.assert_series_equal(s, expected)
s = s_orig.copy()
s.loc[5] = 5.0
expected = Series([1, 2, 3, 5.0], index=[0, 1, 2, 5])
tm.assert_series_equal(s, expected)
# iloc/iat raise
s = s_orig.copy()
msg = "iloc cannot enlarge its target object"
with pytest.raises(IndexError, match=msg):
s.iloc[3] = 5.0
msg = "index 3 is out of bounds for axis 0 with size 3"
with pytest.raises(IndexError, match=msg):
s.iat[3] = 5.0
# ## frame ##
df_orig = DataFrame(
np.arange(6).reshape(3, 2), columns=["A", "B"], dtype="int64"
)
# iloc/iat raise
df = df_orig.copy()
msg = "iloc cannot enlarge its target object"
with pytest.raises(IndexError, match=msg):
df.iloc[4, 2] = 5.0
msg = "index 2 is out of bounds for axis 0 with size 2"
with pytest.raises(IndexError, match=msg):
df.iat[4, 2] = 5.0
# row setting where it exists
expected = DataFrame(dict({"A": [0, 4, 4], "B": [1, 5, 5]}))
df = df_orig.copy()
df.iloc[1] = df.iloc[2]
tm.assert_frame_equal(df, expected)
expected = DataFrame(dict({"A": [0, 4, 4], "B": [1, 5, 5]}))
df = df_orig.copy()
df.loc[1] = df.loc[2]
tm.assert_frame_equal(df, expected)
# like 2578, partial setting with dtype preservation
expected = DataFrame(dict({"A": [0, 2, 4, 4], "B": [1, 3, 5, 5]}))
df = df_orig.copy()
df.loc[3] = df.loc[2]
tm.assert_frame_equal(df, expected)
# single dtype frame, overwrite
expected = DataFrame(dict({"A": [0, 2, 4], "B": [0, 2, 4]}))
df = df_orig.copy()
df.loc[:, "B"] = df.loc[:, "A"]
tm.assert_frame_equal(df, expected)
# mixed dtype frame, overwrite
expected = DataFrame(dict({"A": [0, 2, 4], "B": Series([0, 2, 4])}))
df = df_orig.copy()
df["B"] = df["B"].astype(np.float64)
df.loc[:, "B"] = df.loc[:, "A"]
tm.assert_frame_equal(df, expected)
# single dtype frame, partial setting
expected = df_orig.copy()
expected["C"] = df["A"]
df = df_orig.copy()
df.loc[:, "C"] = df.loc[:, "A"]
tm.assert_frame_equal(df, expected)
# mixed frame, partial setting
expected = df_orig.copy()
expected["C"] = df["A"]
df = df_orig.copy()
df.loc[:, "C"] = df.loc[:, "A"]
tm.assert_frame_equal(df, expected)
# GH 8473
dates = date_range("1/1/2000", periods=8)
df_orig = DataFrame(
np.random.randn(8, 4), index=dates, columns=["A", "B", "C", "D"]
)
expected = pd.concat(
[df_orig, DataFrame({"A": 7}, index=dates[-1:] + dates.freq)], sort=True
)
df = df_orig.copy()
df.loc[dates[-1] + dates.freq, "A"] = 7
tm.assert_frame_equal(df, expected)
df = df_orig.copy()
df.at[dates[-1] + dates.freq, "A"] = 7
tm.assert_frame_equal(df, expected)
exp_other = DataFrame({0: 7}, index=dates[-1:] + dates.freq)
expected = pd.concat([df_orig, exp_other], axis=1)
df = df_orig.copy()
df.loc[dates[-1] + dates.freq, 0] = 7
tm.assert_frame_equal(df, expected)
df = df_orig.copy()
df.at[dates[-1] + dates.freq, 0] = 7
tm.assert_frame_equal(df, expected)
def test_partial_setting_mixed_dtype(self):
# in a mixed dtype environment, try to preserve dtypes
# by appending
df = DataFrame([[True, 1], [False, 2]], columns=["female", "fitness"])
s = df.loc[1].copy()
s.name = 2
expected = df.append(s)
df.loc[2] = df.loc[1]
tm.assert_frame_equal(df, expected)
# columns will align
df = DataFrame(columns=["A", "B"])
df.loc[0] = Series(1, index=range(4))
tm.assert_frame_equal(df, DataFrame(columns=["A", "B"], index=[0]))
# columns will align
df = DataFrame(columns=["A", "B"])
df.loc[0] = Series(1, index=["B"])
exp = DataFrame([[np.nan, 1]], columns=["A", "B"], index=[0], dtype="float64")
tm.assert_frame_equal(df, exp)
# list-like must conform
df = DataFrame(columns=["A", "B"])
msg = "cannot set a row with mismatched columns"
with pytest.raises(ValueError, match=msg):
df.loc[0] = [1, 2, 3]
# TODO: #15657, these are left as object and not coerced
df = DataFrame(columns=["A", "B"])
df.loc[3] = [6, 7]
exp = DataFrame([[6, 7]], index=[3], columns=["A", "B"], dtype="object")
tm.assert_frame_equal(df, exp)
def test_series_partial_set(self):
# partial set with new index
# Regression from GH4825
ser = Series([0.1, 0.2], index=[1, 2])
# loc equiv to .reindex
expected = Series([np.nan, 0.2, np.nan], index=[3, 2, 3])
with pytest.raises(KeyError, match="with any missing labels"):
result = ser.loc[[3, 2, 3]]
result = ser.reindex([3, 2, 3])
tm.assert_series_equal(result, expected, check_index_type=True)
expected = Series([np.nan, 0.2, np.nan, np.nan], index=[3, 2, 3, "x"])
with pytest.raises(KeyError, match="with any missing labels"):
result = ser.loc[[3, 2, 3, "x"]]
result = ser.reindex([3, 2, 3, "x"])
tm.assert_series_equal(result, expected, check_index_type=True)
expected = Series([0.2, 0.2, 0.1], index=[2, 2, 1])
result = ser.loc[[2, 2, 1]]
tm.assert_series_equal(result, expected, check_index_type=True)
expected = Series([0.2, 0.2, np.nan, 0.1], index=[2, 2, "x", 1])
with pytest.raises(KeyError, match="with any missing labels"):
result = ser.loc[[2, 2, "x", 1]]
result = ser.reindex([2, 2, "x", 1])
tm.assert_series_equal(result, expected, check_index_type=True)
# raises as nothing in in the index
msg = (
r"\"None of \[Int64Index\(\[3, 3, 3\], dtype='int64'\)\] are "
r"in the \[index\]\""
)
with pytest.raises(KeyError, match=msg):
ser.loc[[3, 3, 3]]
expected = Series([0.2, 0.2, np.nan], index=[2, 2, 3])
with pytest.raises(KeyError, match="with any missing labels"):
ser.loc[[2, 2, 3]]
result = ser.reindex([2, 2, 3])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3], index=[1, 2, 3])
expected = Series([0.3, np.nan, np.nan], index=[3, 4, 4])
with pytest.raises(KeyError, match="with any missing labels"):
s.loc[[3, 4, 4]]
result = s.reindex([3, 4, 4])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3, 0.4], index=[1, 2, 3, 4])
expected = Series([np.nan, 0.3, 0.3], index=[5, 3, 3])
with pytest.raises(KeyError, match="with any missing labels"):
s.loc[[5, 3, 3]]
result = s.reindex([5, 3, 3])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3, 0.4], index=[1, 2, 3, 4])
expected = Series([np.nan, 0.4, 0.4], index=[5, 4, 4])
with pytest.raises(KeyError, match="with any missing labels"):
s.loc[[5, 4, 4]]
result = s.reindex([5, 4, 4])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3, 0.4], index=[4, 5, 6, 7])
expected = Series([0.4, np.nan, np.nan], index=[7, 2, 2])
with pytest.raises(KeyError, match="with any missing labels"):
s.loc[[7, 2, 2]]
result = s.reindex([7, 2, 2])
tm.assert_series_equal(result, expected, check_index_type=True)
s = Series([0.1, 0.2, 0.3, 0.4], index=[1, 2, 3, 4])
expected = Series([0.4, np.nan, np.nan], index=[4, 5, 5])
with pytest.raises(KeyError, match="with any missing labels"):
s.loc[[4, 5, 5]]
result = s.reindex([4, 5, 5])
tm.assert_series_equal(result, expected, check_index_type=True)
# iloc
expected = Series([0.2, 0.2, 0.1, 0.1], index=[2, 2, 1, 1])
result = ser.iloc[[1, 1, 0, 0]]
tm.assert_series_equal(result, expected, check_index_type=True)
def test_series_partial_set_with_name(self):
# GH 11497
idx = Index([1, 2], dtype="int64", name="idx")
ser = Series([0.1, 0.2], index=idx, name="s")
# loc
with pytest.raises(KeyError, match="with any missing labels"):
ser.loc[[3, 2, 3]]
with pytest.raises(KeyError, match="with any missing labels"):
ser.loc[[3, 2, 3, "x"]]
exp_idx = Index([2, 2, 1], dtype="int64", name="idx")
expected = Series([0.2, 0.2, 0.1], index=exp_idx, name="s")
result = ser.loc[[2, 2, 1]]
tm.assert_series_equal(result, expected, check_index_type=True)
with pytest.raises(KeyError, match="with any missing labels"):
ser.loc[[2, 2, "x", 1]]
# raises as nothing in in the index
msg = (
r"\"None of \[Int64Index\(\[3, 3, 3\], dtype='int64', "
r"name='idx'\)\] are in the \[index\]\""
)
with pytest.raises(KeyError, match=msg):
ser.loc[[3, 3, 3]]
with pytest.raises(KeyError, match="with any missing labels"):
ser.loc[[2, 2, 3]]
idx = Index([1, 2, 3], dtype="int64", name="idx")
with pytest.raises(KeyError, match="with any missing labels"):
Series([0.1, 0.2, 0.3], index=idx, name="s").loc[[3, 4, 4]]
idx = Index([1, 2, 3, 4], dtype="int64", name="idx")
with pytest.raises(KeyError, match="with any missing labels"):
Series([0.1, 0.2, 0.3, 0.4], index=idx, name="s").loc[[5, 3, 3]]
idx = Index([1, 2, 3, 4], dtype="int64", name="idx")
with pytest.raises(KeyError, match="with any missing labels"):
Series([0.1, 0.2, 0.3, 0.4], index=idx, name="s").loc[[5, 4, 4]]
idx = Index([4, 5, 6, 7], dtype="int64", name="idx")
with pytest.raises(KeyError, match="with any missing labels"):
Series([0.1, 0.2, 0.3, 0.4], index=idx, name="s").loc[[7, 2, 2]]
idx = Index([1, 2, 3, 4], dtype="int64", name="idx")
with pytest.raises(KeyError, match="with any missing labels"):
Series([0.1, 0.2, 0.3, 0.4], index=idx, name="s").loc[[4, 5, 5]]
# iloc
exp_idx = Index([2, 2, 1, 1], dtype="int64", name="idx")
expected = Series([0.2, 0.2, 0.1, 0.1], index=exp_idx, name="s")
result = ser.iloc[[1, 1, 0, 0]]
tm.assert_series_equal(result, expected, check_index_type=True)
def test_partial_set_invalid(self):
# GH 4940
# allow only setting of 'valid' values
orig = tm.makeTimeDataFrame()
df = orig.copy()
# don't allow not string inserts
msg = "cannot insert DatetimeArray with incompatible label"
with pytest.raises(TypeError, match=msg):
df.loc[100.0, :] = df.iloc[0]
with pytest.raises(TypeError, match=msg):
df.loc[100, :] = df.iloc[0]
# allow object conversion here
df = orig.copy()
df.loc["a", :] = df.iloc[0]
exp = orig.append(Series(df.iloc[0], name="a"))
tm.assert_frame_equal(df, exp)
tm.assert_index_equal(df.index, Index(orig.index.tolist() + ["a"]))
assert df.index.dtype == "object"
def test_partial_set_empty_series(self):
# GH5226
# partially set with an empty object series
s = Series(dtype=object)
s.loc[1] = 1
tm.assert_series_equal(s, Series([1], index=[1]))
s.loc[3] = 3
tm.assert_series_equal(s, Series([1, 3], index=[1, 3]))
s = Series(dtype=object)
s.loc[1] = 1.0
tm.assert_series_equal(s, Series([1.0], index=[1]))
s.loc[3] = 3.0
tm.assert_series_equal(s, Series([1.0, 3.0], index=[1, 3]))
s = Series(dtype=object)
s.loc["foo"] = 1
tm.assert_series_equal(s, Series([1], index=["foo"]))
s.loc["bar"] = 3
tm.assert_series_equal(s, Series([1, 3], index=["foo", "bar"]))
s.loc[3] = 4
tm.assert_series_equal(s, Series([1, 3, 4], index=["foo", "bar", 3]))
def test_partial_set_empty_frame(self):
# partially set with an empty object
# frame
df = DataFrame()
msg = "cannot set a frame with no defined columns"
with pytest.raises(ValueError, match=msg):
df.loc[1] = 1
with pytest.raises(ValueError, match=msg):
df.loc[1] = Series([1], index=["foo"])
msg = "cannot set a frame with no defined index and a scalar"
with pytest.raises(ValueError, match=msg):
df.loc[:, 1] = 1
# these work as they don't really change
# anything but the index
# GH5632
expected = DataFrame(columns=["foo"], index=Index([], dtype="object"))
def f():
df = DataFrame(index=Index([], dtype="object"))
df["foo"] = Series([], dtype="object")
return df
tm.assert_frame_equal(f(), expected)
def f():
df = DataFrame()
df["foo"] = Series(df.index)
return df
tm.assert_frame_equal(f(), expected)
def f():
df = DataFrame()
df["foo"] = df.index
return df
tm.assert_frame_equal(f(), expected)
expected = DataFrame(columns=["foo"], index=Index([], dtype="int64"))
expected["foo"] = expected["foo"].astype("float64")
def f():
df = DataFrame(index=Index([], dtype="int64"))
df["foo"] = []
return df
tm.assert_frame_equal(f(), expected)
def f():
df = DataFrame(index=Index([], dtype="int64"))
df["foo"] = Series(np.arange(len(df)), dtype="float64")
return df
tm.assert_frame_equal(f(), expected)
def f():
df = DataFrame(index=Index([], dtype="int64"))
df["foo"] = range(len(df))
return df
expected = DataFrame(columns=["foo"], index=Index([], dtype="int64"))
expected["foo"] = expected["foo"].astype("float64")
tm.assert_frame_equal(f(), expected)
df = DataFrame()
tm.assert_index_equal(df.columns, Index([], dtype=object))
df2 = DataFrame()
df2[1] = Series([1], index=["foo"])
df.loc[:, 1] = Series([1], index=["foo"])
tm.assert_frame_equal(df, DataFrame([[1]], index=["foo"], columns=[1]))
tm.assert_frame_equal(df, df2)
# no index to start
expected = DataFrame({0: Series(1, index=range(4))}, columns=["A", "B", 0])
df = DataFrame(columns=["A", "B"])
df[0] = Series(1, index=range(4))
df.dtypes
str(df)
tm.assert_frame_equal(df, expected)
df = DataFrame(columns=["A", "B"])
df.loc[:, 0] = Series(1, index=range(4))
df.dtypes
str(df)
tm.assert_frame_equal(df, expected)
def test_partial_set_empty_frame_row(self):
# GH5720, GH5744
# don't create rows when empty
expected = DataFrame(columns=["A", "B", "New"], index=Index([], dtype="int64"))
expected["A"] = expected["A"].astype("int64")
expected["B"] = expected["B"].astype("float64")
expected["New"] = expected["New"].astype("float64")
df = DataFrame({"A": [1, 2, 3], "B": [1.2, 4.2, 5.2]})
y = df[df.A > 5]
y["New"] = np.nan
tm.assert_frame_equal(y, expected)
# tm.assert_frame_equal(y,expected)
expected = DataFrame(columns=["a", "b", "c c", "d"])
expected["d"] = expected["d"].astype("int64")
df = DataFrame(columns=["a", "b", "c c"])
df["d"] = 3
tm.assert_frame_equal(df, expected)
tm.assert_series_equal(df["c c"], Series(name="c c", dtype=object))
# reindex columns is ok
df = DataFrame({"A": [1, 2, 3], "B": [1.2, 4.2, 5.2]})
y = df[df.A > 5]
result = y.reindex(columns=["A", "B", "C"])
expected = DataFrame(columns=["A", "B", "C"], index=Index([], dtype="int64"))
expected["A"] = expected["A"].astype("int64")
expected["B"] = expected["B"].astype("float64")
expected["C"] = expected["C"].astype("float64")
tm.assert_frame_equal(result, expected)
def test_partial_set_empty_frame_set_series(self):
# GH 5756
# setting with empty Series
df = DataFrame(Series(dtype=object))
tm.assert_frame_equal(df, DataFrame({0: Series(dtype=object)}))
df = DataFrame(Series(name="foo", dtype=object))
tm.assert_frame_equal(df, DataFrame({"foo": Series(dtype=object)}))
def test_partial_set_empty_frame_empty_copy_assignment(self):
# GH 5932
# copy on empty with assignment fails
df = DataFrame(index=[0])
df = df.copy()
df["a"] = 0
expected = DataFrame(0, index=[0], columns=["a"])
tm.assert_frame_equal(df, expected)
def test_partial_set_empty_frame_empty_consistencies(self):
# GH 6171
# consistency on empty frames
df = DataFrame(columns=["x", "y"])
df["x"] = [1, 2]
expected = DataFrame(dict(x=[1, 2], y=[np.nan, np.nan]))
tm.assert_frame_equal(df, expected, check_dtype=False)
df = DataFrame(columns=["x", "y"])
df["x"] = ["1", "2"]
expected = DataFrame(dict(x=["1", "2"], y=[np.nan, np.nan]), dtype=object)
tm.assert_frame_equal(df, expected)
df = DataFrame(columns=["x", "y"])
df.loc[0, "x"] = 1
expected = DataFrame(dict(x=[1], y=[np.nan]))
tm.assert_frame_equal(df, expected, check_dtype=False)
@pytest.mark.parametrize(
"idx,labels,expected_idx",
[
(
period_range(start="2000", periods=20, freq="D"),
["2000-01-04", "2000-01-08", "2000-01-12"],
[
Period("2000-01-04", freq="D"),
Period("2000-01-08", freq="D"),
Period("2000-01-12", freq="D"),
],
),
(
date_range(start="2000", periods=20, freq="D"),
["2000-01-04", "2000-01-08", "2000-01-12"],
[
Timestamp("2000-01-04", freq="D"),
Timestamp("2000-01-08", freq="D"),
Timestamp("2000-01-12", freq="D"),
],
),
(
pd.timedelta_range(start="1 day", periods=20),
["4D", "8D", "12D"],
[pd.Timedelta("4 day"), pd.Timedelta("8 day"), pd.Timedelta("12 day")],
),
],
)
def test_loc_with_list_of_strings_representing_datetimes(
self, idx, labels, expected_idx
):
# GH 11278
s = Series(range(20), index=idx)
df = DataFrame(range(20), index=idx)
expected_value = [3, 7, 11]
expected_s = Series(expected_value, expected_idx)
expected_df = DataFrame(expected_value, expected_idx)
tm.assert_series_equal(expected_s, s.loc[labels])
tm.assert_series_equal(expected_s, s[labels])
tm.assert_frame_equal(expected_df, df.loc[labels])
@pytest.mark.parametrize(
"idx,labels",
[
(
period_range(start="2000", periods=20, freq="D"),
["2000-01-04", "2000-01-30"],
),
(
date_range(start="2000", periods=20, freq="D"),
["2000-01-04", "2000-01-30"],
),
(pd.timedelta_range(start="1 day", periods=20), ["3 day", "30 day"]),
],
)
def test_loc_with_list_of_strings_representing_datetimes_missing_value(
self, idx, labels
):
# GH 11278
s = Series(range(20), index=idx)
df = DataFrame(range(20), index=idx)
msg = r"with any missing labels"
with pytest.raises(KeyError, match=msg):
s.loc[labels]
with pytest.raises(KeyError, match=msg):
s[labels]
with pytest.raises(KeyError, match=msg):
df.loc[labels]
@pytest.mark.parametrize(
"idx,labels,msg",
[
(
period_range(start="2000", periods=20, freq="D"),
["4D", "8D"],
(
r"None of \[Index\(\['4D', '8D'\], dtype='object'\)\] "
r"are in the \[index\]"
),
),
(
date_range(start="2000", periods=20, freq="D"),
["4D", "8D"],
(
r"None of \[Index\(\['4D', '8D'\], dtype='object'\)\] "
r"are in the \[index\]"
),
),
(
pd.timedelta_range(start="1 day", periods=20),
["2000-01-04", "2000-01-08"],
(
r"None of \[Index\(\['2000-01-04', '2000-01-08'\], "
r"dtype='object'\)\] are in the \[index\]"
),
),
],
)
def test_loc_with_list_of_strings_representing_datetimes_not_matched_type(
self, idx, labels, msg
):
# GH 11278
s = Series(range(20), index=idx)
df = DataFrame(range(20), index=idx)
with pytest.raises(KeyError, match=msg):
s.loc[labels]
with pytest.raises(KeyError, match=msg):
s[labels]
with pytest.raises(KeyError, match=msg):
df.loc[labels]
def test_indexing_timeseries_regression(self):
# Issue 34860
arr = date_range("1/1/2008", "1/1/2009")
result = arr.to_series()["2008"]
rng = date_range(start="2008-01-01", end="2008-12-31")
expected = Series(rng, index=rng)
tm.assert_series_equal(result, expected)
def test_index_name_empty(self):
# GH 31368
df = pd.DataFrame({}, index=pd.RangeIndex(0, name="df_index"))
series = pd.Series(1.23, index=pd.RangeIndex(4, name="series_index"))
df["series"] = series
expected = pd.DataFrame(
{"series": [1.23] * 4}, index=pd.RangeIndex(4, name="df_index")
)
tm.assert_frame_equal(df, expected)
# GH 36527
df = pd.DataFrame()
series = pd.Series(1.23, index=pd.RangeIndex(4, name="series_index"))
df["series"] = series
expected = pd.DataFrame(
{"series": [1.23] * 4}, index=pd.RangeIndex(4, name="series_index")
)
tm.assert_frame_equal(df, expected)
| [
"[email protected]"
] | |
a728bf9ae2c46a9eeba638b54da02ebb8ac8ddca | a35b24c8c3c5bdf861f3cda9396f2fa6795ec929 | /abc/abc037/a/main.py | bb4c99a18af37578e976b0d53202738d5e7c3592 | [] | no_license | Msksgm/atcoder_msksgm_practice | 92a19e2d6c034d95e1cfaf963aff5739edb4ab6e | 3ae2dcb7d235a480cdfdfcd6a079e183936979b4 | refs/heads/master | 2021-08-18T16:08:08.551718 | 2020-09-24T07:01:11 | 2020-09-24T07:01:11 | 224,743,360 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | def main():
a, b, c = map(int, input().split())
min_price = min(a, b)
max_price = max(a, b)
ans = (c // min_price)
ans += (c % min_price) // max_price
print(ans)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
79f7198200be4d319c47ef26eb3c57f5f1be53d5 | 5cd0807f442e6d3890167c5d9c4715c32ee4dfcc | /Hello/product/admin.py | 0ac66c87b29eec67df5f5a9cf675bd28596b0216 | [] | no_license | udoy382/PythonForBeginners | 592b2890a71e6895c2db43dbaf39f08156ef5826 | 686f5e982ae40f149688a76ded53b90c6f17af8a | refs/heads/main | 2023-03-27T05:14:08.705961 | 2021-03-25T14:35:19 | 2021-03-25T14:35:19 | 351,468,393 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | from django.contrib import admin
from .models import Product
admin.site.register(Product) | [
"[email protected]"
] | |
9815087e80c3f0b15bf9a109a3263889e8a125ae | ae2c75fd7f9e86860ee013c8c05416fa9c688f1d | /manage.py | 94f12bedc46b2d514400712c428e7aefc1760406 | [] | no_license | crowdbotics-apps/new-app-chetna-soni--4500 | 733fc277bb085256867f1a8af29d1b444aa9aa86 | 50b03200b0fe8fa6b2b72214634a3beb6c5bf192 | refs/heads/master | 2023-05-27T22:30:32.627736 | 2020-05-13T13:57:29 | 2020-05-13T13:57:29 | 263,645,180 | 0 | 0 | null | 2021-06-12T18:03:30 | 2020-05-13T13:56:59 | Python | UTF-8 | Python | false | false | 645 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'new_app_chetna_soni__4500.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
13590eb83cedf7e78563f292ee34f03b3d739622 | a0a288a9563ed4519cfe9f9c24ecc41237753dbc | /thechronic/strange.py | 417876af3a2960256bd2b445292b60da0c62abbd | [
"MIT"
] | permissive | iluxonchik/the-chronic | 99b236456efb9c32dfb9e3978f9e2cc28910a03c | 4dd41ea1a96e4c5cb1741de02d55cf09b2e78979 | refs/heads/master | 2021-04-28T22:40:51.993595 | 2018-04-02T13:38:04 | 2018-04-02T13:38:04 | 77,719,263 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | class Strange(object):
"""
Wrapper arround the built-in range() function, which returns str instead of
int on iteration.
Just like a range object, an instance of Srange can be iterated over
multiple times.
"""
def __init__(self, start, stop=None, step=1):
if stop is None:
stop = start
start = 0
self._range = range(start, stop, step)
self._iter = iter(self._range)
def __iter__(self):
return self
def __next__(self):
try:
str_num = str(next(self._iter))
except StopIteration as err:
self._iter = iter(self._range)
raise err
return str_num
| [
"[email protected]"
] | |
8a7f44524ce9a081def3a9a9ada89f66644202d9 | 05e634a232574f676434dfa8e4183f3d0a1a4bc9 | /tutorials/mobilenetv3_prod/Step1-5/mobilenetv3_ref/torchvision/transforms/autoaugment.py | d2317602b1e7662fe828258ffcda461867fe541f | [
"Apache-2.0"
] | permissive | PaddlePaddle/models | 67ac00d93c5255ac64a9d80ae5be2e8927e47cee | 8042c21b690ffc0162095e749a41b94dd38732da | refs/heads/release/2.4 | 2023-09-04T15:23:59.543625 | 2023-07-20T11:54:16 | 2023-07-20T11:54:16 | 88,868,842 | 7,633 | 3,597 | Apache-2.0 | 2023-09-05T23:23:54 | 2017-04-20T13:30:15 | Python | UTF-8 | Python | false | false | 12,501 | py | import math
import torch
from enum import Enum
from torch import Tensor
from typing import List, Tuple, Optional
from . import functional as F, InterpolationMode
__all__ = ["AutoAugmentPolicy", "AutoAugment"]
class AutoAugmentPolicy(Enum):
"""AutoAugment policies learned on different datasets.
Available policies are IMAGENET, CIFAR10 and SVHN.
"""
IMAGENET = "imagenet"
CIFAR10 = "cifar10"
SVHN = "svhn"
def _get_transforms(policy: AutoAugmentPolicy):
if policy == AutoAugmentPolicy.IMAGENET:
return [
(("Posterize", 0.4, 8), ("Rotate", 0.6, 9)),
(("Solarize", 0.6, 5), ("AutoContrast", 0.6, None)),
(("Equalize", 0.8, None), ("Equalize", 0.6, None)),
(("Posterize", 0.6, 7), ("Posterize", 0.6, 6)),
(("Equalize", 0.4, None), ("Solarize", 0.2, 4)),
(("Equalize", 0.4, None), ("Rotate", 0.8, 8)),
(("Solarize", 0.6, 3), ("Equalize", 0.6, None)),
(("Posterize", 0.8, 5), ("Equalize", 1.0, None)),
(("Rotate", 0.2, 3), ("Solarize", 0.6, 8)),
(("Equalize", 0.6, None), ("Posterize", 0.4, 6)),
(("Rotate", 0.8, 8), ("Color", 0.4, 0)),
(("Rotate", 0.4, 9), ("Equalize", 0.6, None)),
(("Equalize", 0.0, None), ("Equalize", 0.8, None)),
(("Invert", 0.6, None), ("Equalize", 1.0, None)),
(("Color", 0.6, 4), ("Contrast", 1.0, 8)),
(("Rotate", 0.8, 8), ("Color", 1.0, 2)),
(("Color", 0.8, 8), ("Solarize", 0.8, 7)),
(("Sharpness", 0.4, 7), ("Invert", 0.6, None)),
(("ShearX", 0.6, 5), ("Equalize", 1.0, None)),
(("Color", 0.4, 0), ("Equalize", 0.6, None)),
(("Equalize", 0.4, None), ("Solarize", 0.2, 4)),
(("Solarize", 0.6, 5), ("AutoContrast", 0.6, None)),
(("Invert", 0.6, None), ("Equalize", 1.0, None)),
(("Color", 0.6, 4), ("Contrast", 1.0, 8)),
(("Equalize", 0.8, None), ("Equalize", 0.6, None)),
]
elif policy == AutoAugmentPolicy.CIFAR10:
return [
(("Invert", 0.1, None), ("Contrast", 0.2, 6)),
(("Rotate", 0.7, 2), ("TranslateX", 0.3, 9)),
(("Sharpness", 0.8, 1), ("Sharpness", 0.9, 3)),
(("ShearY", 0.5, 8), ("TranslateY", 0.7, 9)),
(("AutoContrast", 0.5, None), ("Equalize", 0.9, None)),
(("ShearY", 0.2, 7), ("Posterize", 0.3, 7)),
(("Color", 0.4, 3), ("Brightness", 0.6, 7)),
(("Sharpness", 0.3, 9), ("Brightness", 0.7, 9)),
(("Equalize", 0.6, None), ("Equalize", 0.5, None)),
(("Contrast", 0.6, 7), ("Sharpness", 0.6, 5)),
(("Color", 0.7, 7), ("TranslateX", 0.5, 8)),
(("Equalize", 0.3, None), ("AutoContrast", 0.4, None)),
(("TranslateY", 0.4, 3), ("Sharpness", 0.2, 6)),
(("Brightness", 0.9, 6), ("Color", 0.2, 8)),
(("Solarize", 0.5, 2), ("Invert", 0.0, None)),
(("Equalize", 0.2, None), ("AutoContrast", 0.6, None)),
(("Equalize", 0.2, None), ("Equalize", 0.6, None)),
(("Color", 0.9, 9), ("Equalize", 0.6, None)),
(("AutoContrast", 0.8, None), ("Solarize", 0.2, 8)),
(("Brightness", 0.1, 3), ("Color", 0.7, 0)),
(("Solarize", 0.4, 5), ("AutoContrast", 0.9, None)),
(("TranslateY", 0.9, 9), ("TranslateY", 0.7, 9)),
(("AutoContrast", 0.9, None), ("Solarize", 0.8, 3)),
(("Equalize", 0.8, None), ("Invert", 0.1, None)),
(("TranslateY", 0.7, 9), ("AutoContrast", 0.9, None)),
]
elif policy == AutoAugmentPolicy.SVHN:
return [
(("ShearX", 0.9, 4), ("Invert", 0.2, None)),
(("ShearY", 0.9, 8), ("Invert", 0.7, None)),
(("Equalize", 0.6, None), ("Solarize", 0.6, 6)),
(("Invert", 0.9, None), ("Equalize", 0.6, None)),
(("Equalize", 0.6, None), ("Rotate", 0.9, 3)),
(("ShearX", 0.9, 4), ("AutoContrast", 0.8, None)),
(("ShearY", 0.9, 8), ("Invert", 0.4, None)),
(("ShearY", 0.9, 5), ("Solarize", 0.2, 6)),
(("Invert", 0.9, None), ("AutoContrast", 0.8, None)),
(("Equalize", 0.6, None), ("Rotate", 0.9, 3)),
(("ShearX", 0.9, 4), ("Solarize", 0.3, 3)),
(("ShearY", 0.8, 8), ("Invert", 0.7, None)),
(("Equalize", 0.9, None), ("TranslateY", 0.6, 6)),
(("Invert", 0.9, None), ("Equalize", 0.6, None)),
(("Contrast", 0.3, 3), ("Rotate", 0.8, 4)),
(("Invert", 0.8, None), ("TranslateY", 0.0, 2)),
(("ShearY", 0.7, 6), ("Solarize", 0.4, 8)),
(("Invert", 0.6, None), ("Rotate", 0.8, 4)),
(("ShearY", 0.3, 7), ("TranslateX", 0.9, 3)),
(("ShearX", 0.1, 6), ("Invert", 0.6, None)),
(("Solarize", 0.7, 2), ("TranslateY", 0.6, 7)),
(("ShearY", 0.8, 4), ("Invert", 0.8, None)),
(("ShearX", 0.7, 9), ("TranslateY", 0.8, 3)),
(("ShearY", 0.8, 5), ("AutoContrast", 0.7, None)),
(("ShearX", 0.7, 2), ("Invert", 0.1, None)),
]
def _get_magnitudes():
_BINS = 10
return {
# name: (magnitudes, signed)
"ShearX": (torch.linspace(0.0, 0.3, _BINS), True),
"ShearY": (torch.linspace(0.0, 0.3, _BINS), True),
"TranslateX": (torch.linspace(0.0, 150.0 / 331.0, _BINS), True),
"TranslateY": (torch.linspace(0.0, 150.0 / 331.0, _BINS), True),
"Rotate": (torch.linspace(0.0, 30.0, _BINS), True),
"Brightness": (torch.linspace(0.0, 0.9, _BINS), True),
"Color": (torch.linspace(0.0, 0.9, _BINS), True),
"Contrast": (torch.linspace(0.0, 0.9, _BINS), True),
"Sharpness": (torch.linspace(0.0, 0.9, _BINS), True),
"Posterize": (torch.tensor([8, 8, 7, 7, 6, 6, 5, 5, 4, 4]), False),
"Solarize": (torch.linspace(256.0, 0.0, _BINS), False),
"AutoContrast": (None, None),
"Equalize": (None, None),
"Invert": (None, None),
}
class AutoAugment(torch.nn.Module):
r"""AutoAugment data augmentation method based on
`"AutoAugment: Learning Augmentation Strategies from Data" <https://arxiv.org/pdf/1805.09501.pdf>`_.
If the image is torch Tensor, it should be of type torch.uint8, and it is expected
to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
If img is PIL Image, it is expected to be in mode "L" or "RGB".
Args:
policy (AutoAugmentPolicy): Desired policy enum defined by
:class:`torchvision.transforms.autoaugment.AutoAugmentPolicy`. Default is ``AutoAugmentPolicy.IMAGENET``.
interpolation (InterpolationMode): Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
fill (sequence or number, optional): Pixel fill value for the area outside the transformed
image. If given a number, the value is used for all bands respectively.
"""
def __init__(self,
policy: AutoAugmentPolicy=AutoAugmentPolicy.IMAGENET,
interpolation: InterpolationMode=InterpolationMode.NEAREST,
fill: Optional[List[float]]=None):
super().__init__()
self.policy = policy
self.interpolation = interpolation
self.fill = fill
self.transforms = _get_transforms(policy)
if self.transforms is None:
raise ValueError(
"The provided policy {} is not recognized.".format(policy))
self._op_meta = _get_magnitudes()
@staticmethod
def get_params(transform_num: int) -> Tuple[int, Tensor, Tensor]:
"""Get parameters for autoaugment transformation
Returns:
params required by the autoaugment transformation
"""
policy_id = torch.randint(transform_num, (1, )).item()
probs = torch.rand((2, ))
signs = torch.randint(2, (2, ))
return policy_id, probs, signs
def _get_op_meta(self,
name: str) -> Tuple[Optional[Tensor], Optional[bool]]:
return self._op_meta[name]
def forward(self, img: Tensor):
"""
img (PIL Image or Tensor): Image to be transformed.
Returns:
PIL Image or Tensor: AutoAugmented image.
"""
fill = self.fill
if isinstance(img, Tensor):
if isinstance(fill, (int, float)):
fill = [float(fill)] * F._get_image_num_channels(img)
elif fill is not None:
fill = [float(f) for f in fill]
transform_id, probs, signs = self.get_params(len(self.transforms))
for i, (op_name, p,
magnitude_id) in enumerate(self.transforms[transform_id]):
if probs[i] <= p:
magnitudes, signed = self._get_op_meta(op_name)
magnitude = float(magnitudes[magnitude_id].item()) \
if magnitudes is not None and magnitude_id is not None else 0.0
if signed is not None and signed and signs[i] == 0:
magnitude *= -1.0
if op_name == "ShearX":
img = F.affine(
img,
angle=0.0,
translate=[0, 0],
scale=1.0,
shear=[math.degrees(magnitude), 0.0],
interpolation=self.interpolation,
fill=fill)
elif op_name == "ShearY":
img = F.affine(
img,
angle=0.0,
translate=[0, 0],
scale=1.0,
shear=[0.0, math.degrees(magnitude)],
interpolation=self.interpolation,
fill=fill)
elif op_name == "TranslateX":
img = F.affine(
img,
angle=0.0,
translate=[
int(F._get_image_size(img)[0] * magnitude), 0
],
scale=1.0,
interpolation=self.interpolation,
shear=[0.0, 0.0],
fill=fill)
elif op_name == "TranslateY":
img = F.affine(
img,
angle=0.0,
translate=[
0, int(F._get_image_size(img)[1] * magnitude)
],
scale=1.0,
interpolation=self.interpolation,
shear=[0.0, 0.0],
fill=fill)
elif op_name == "Rotate":
img = F.rotate(
img,
magnitude,
interpolation=self.interpolation,
fill=fill)
elif op_name == "Brightness":
img = F.adjust_brightness(img, 1.0 + magnitude)
elif op_name == "Color":
img = F.adjust_saturation(img, 1.0 + magnitude)
elif op_name == "Contrast":
img = F.adjust_contrast(img, 1.0 + magnitude)
elif op_name == "Sharpness":
img = F.adjust_sharpness(img, 1.0 + magnitude)
elif op_name == "Posterize":
img = F.posterize(img, int(magnitude))
elif op_name == "Solarize":
img = F.solarize(img, magnitude)
elif op_name == "AutoContrast":
img = F.autocontrast(img)
elif op_name == "Equalize":
img = F.equalize(img)
elif op_name == "Invert":
img = F.invert(img)
else:
raise ValueError(
"The provided operator {} is not recognized.".format(
op_name))
return img
def __repr__(self):
return self.__class__.__name__ + '(policy={}, fill={})'.format(
self.policy, self.fill)
| [
"[email protected]"
] | |
4baf087e4e4c72d03eb1f4f5b7f52fbbaa305d56 | b71e91d4eb55b6826dbe378180aa7b2b8a717bdf | /Capitulo1/exerc4_3_v5.py | 1058a9e307d85293890de1402b022dd0572ac930 | [] | no_license | gustavopierre/think_python | 49a9ceb50f760b41f6fbac54a07f6b394aa8d637 | a3ad6e660db4e6ce2aa105f5084e585f95936867 | refs/heads/main | 2023-03-24T23:48:29.415573 | 2021-03-15T22:15:30 | 2021-03-15T22:15:30 | 348,137,048 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | import turtle
import math
def arc(t, r, angle):
n = int(2*math.pi*r/10)
x = int(n*angle/360)
for count in range(x):
t.fd(10)
t.lt(360/n)
print(f'r = {r}')
print(f'angle = {angle}')
print(f'n = {n}')
print(f'x = {x}')
bob = turtle.Turtle()
print(bob)
arc(bob, 100, 270)
turtle.mainloop()
| [
"[email protected]"
] | |
6e02a4cd2c2891c084f93dad75871c179905debf | b54097ce251925a82e591a08ae625fa884500b9c | /tests/test_github.py | e942b6bfaabe6db425870e1377356785c841cac2 | [
"BSD-3-Clause"
] | permissive | johnnoone/aiovault | b45b576cfb30570b1bbe9ab018a3247156dbefea | 03e1bfb6f0404dcf97ce87a98c539027c4e78a37 | refs/heads/master | 2021-01-10T19:56:50.715283 | 2015-07-10T21:15:21 | 2015-07-10T21:15:21 | 35,452,083 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,199 | py | from aiovault import Vault, LoginError
from conftest import async_test
import pytest
@async_test
def test_github_raw_loading(dev_server):
client = Vault(dev_server.addr, token=dev_server.root_token)
response = yield from client.read('/sys/auth/github/login',
params={"help": 1})
data = yield from response.json()
print(data['help'])
# low level create/delete
response = yield from client.write('/sys/auth/github',
json={"type": "github"})
assert response.status == 204, 'Must add github auth backend'
response = yield from client.delete('/sys/auth/github')
assert response.status == 204, 'Must delete github auth backend'
# high level create/delete
response = yield from client.auth.enable('github')
assert response.type == 'github', 'Must add github auth backend'
response = yield from client.auth.disable('github')
assert response is True, 'Must delete github auth backend'
@async_test
def test_help(dev_server):
client = Vault(dev_server.addr, token=dev_server.root_token)
response = yield from client.read('/sys/auth/github',
params={"help": 1})
data = yield from response.json()
assert 'help' in data
@async_test
def test_github_loading(dev_server, env):
try:
github_org = env.GITHUB_ORG
github_token = env.GITHUB_TOKEN
except AttributeError:
return 'GITHUB_ORG or GITHUB_TOKEN missing'
client = Vault(dev_server.addr, token=dev_server.root_token)
backend1 = backend = yield from client.auth.enable('github')
configured = yield from backend.configure(organization=github_org)
assert configured
configured = yield from backend.write_team('test', policies='foo')
assert configured
client = Vault(dev_server.addr)
backend = client.auth.load('github')
dummy_token = '1111111111111111111111111111111111111111'
with pytest.raises(LoginError):
yield from backend.login(github_token=dummy_token)
yield from backend.login(github_token=github_token)
disabled = yield from backend1.disable()
assert disabled
| [
"[email protected]"
] | |
d36a0fd44877c71c01b65bf4986938b78a9d64dc | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/remove_invalid_submissions.py | bea3c87a9227a23ef93473ae7d1cd253ca981bc3 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 2,261 | py | import os
from collections import Counter
import shutil
import sys
import argparse
import tempfile
# for example call this script on hamming from exercism_data directory like: python remove_invalid_submissions.py "ruby/hamming/" "filtered-submissions/" "compute"
parser = argparse.ArgumentParser(description='Filter and clean data and put in src directory')
parser.add_argument('data_directory', help='Directory for original data')
parser.add_argument('filtered_submissions', help='Directory to store filtered data')
parser.add_argument('method', help='Method to extract')
args = parser.parse_args()
data_directory = args.data_directory
filtered_submissions =args.filtered_submissions
method = args.method
mapfile = open("mapping.csv", "w")
count = 0
for f in os.listdir(data_directory):
count+=1
print 'starting with submissions: ' + str(count)
if not os.path.isdir(filtered_submissions):
os.mkdir(filtered_submissions)
else:
shutil.rmtree(filtered_submissions)
os.mkdir(filtered_submissions)
if not os.path.isdir('src'):
os.mkdir('src')
else:
shutil.rmtree('src')
os.mkdir('src')
for f in os.listdir(data_directory):
t = tempfile.NamedTemporaryFile(mode="r+")
with open(data_directory+f) as filename:
for line in filename:
line = line.partition('#')[0]
line = line.rstrip() + "\n"
t.write(line)
t.seek(0)
data = t.read()
#if not (data.count('def') == 1 or data.find('def word_count') == -1 or data.find('def initialize') == -1):
if data.count('def') == 1 and (data.find('def self.' + str(method)) != -1 or data.find('def ' + str(method)) != -1):
data = data.replace('def self.' + str(method), 'def ' + str(method))
num_ends_to_strip = data.count('class') + data.count('module')
data = data[data.find('def ' + str(method)):]
for i in range(num_ends_to_strip):
data = data[:data.rfind('end')]
data = data.rstrip()
out = open(filtered_submissions+f, "w+")
out.write(data)
t.close()
count = 0
for f in os.listdir(filtered_submissions):
submission_id = f.strip(".rb")
index_id = len(os.listdir('src'))
shutil.copyfile(filtered_submissions+f, 'src/'+str(index_id)+'.rb')
mapfile.write(str(submission_id) + ' : ' + str(index_id) + '\n')
count += 1
print 'filtered to submissions: ' + str(count)
| [
"[email protected]"
] | |
b80cff1f63fc85e2e367363d8d4217c52f1bcb9c | 3e3741d9ea06f1dcd560e27145256bd3177bed14 | /01_py基础/第2周/day01/test05.py | 0b0877d5b07aa6ba958a21bc84e6b7a6d5a0890e | [] | no_license | Lousm/Python | 778bc730db09ab135bf53c7b62af29df2407199a | d3f19600012b3576cd5d58df510c17590fcaec14 | refs/heads/master | 2020-03-26T16:40:01.188306 | 2018-11-06T03:56:20 | 2018-11-06T03:56:20 | 145,116,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | a = [i*10 for i in range(1, 13)]
b = [i*2 for i in range(1, 13)]
def sy(a, b):
c = []
for i in range(len(a)):
c.append(a[i]+b[i])
return c
c = sy(a, b)
print(c)
| [
"[email protected]"
] | |
1b6b591a2a8ad31a5c1bd110be072f800865522b | e838ea567fe5216bd83b72d5cc549363a666ac3d | /community/migrations/0001_initial.py | 756f713d6f763cc2a681b7383369ae2c3dc63f28 | [] | no_license | iuriramos/swim-registry | f7ffee9a57b92021e7066820249092d1558a944d | 7c71d294b5aa7cb40e01ed559e2fcb81d2e1f43a | refs/heads/master | 2021-09-13T20:22:29.624535 | 2018-05-03T21:30:26 | 2018-05-03T21:30:26 | 85,312,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,544 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-18 18:20
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('registry', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Participant',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=255)),
('description', models.TextField(null=True)),
('image', models.ImageField(default='participants/images/profiles/none/default.jpg', upload_to='participants/images/profiles/')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ParticipantCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(choices=[('AIRSPACE COMPANY', 'Airspace company'), ('RESEARCH ORGANIZATION', 'Research Organization'), ('AIRPORT', 'Airport'), ('AERODROME', 'Aerodrome'), ('RESEARCH INSTITUTION', 'Research Institution'), ('PUBLIC AGENCY', 'Public Agency'), ('OTHER', 'Other')], max_length=50, unique=True)),
],
options={
'verbose_name_plural': 'participant categories',
'verbose_name': 'participant category',
},
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('notification_frequency', models.CharField(choices=[('NEVER', 'Never'), ('IMMEDIATE', 'Immediate'), ('DAILY', 'Daily'), ('WEEKLY', 'Weekly')], default='NEVER', max_length=10)),
('following_organizations', models.ManyToManyField(related_name='followers', to='community.Participant')),
('subscriptions_activity', models.ManyToManyField(related_name='profiles', to='registry.ActivityCategory')),
('subscriptions_content_type', models.ManyToManyField(related_name='profiles', to='registry.SubscriptionContentType')),
('subscriptions_data', models.ManyToManyField(related_name='profiles', to='registry.DataCategory')),
('subscriptions_flight_phase', models.ManyToManyField(related_name='profiles', to='registry.FlightPhaseCategory')),
('subscriptions_region', models.ManyToManyField(related_name='profiles', to='registry.RegionCategory')),
('subscriptions_stakeholder', models.ManyToManyField(related_name='profiles', to='registry.StakeholderCategory')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RegistrationRequest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('first_name', models.CharField(max_length=255)),
('last_name', models.CharField(max_length=255)),
('email', models.EmailField(max_length=254)),
('organization', models.CharField(max_length=255)),
('role', models.CharField(max_length=255)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='participant',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='participants', to='community.ParticipantCategory'),
),
]
| [
"[email protected]"
] | |
b46422a949e65f9e0a70a77877dc9dc67c456dbf | ff439153e551f58850ecfb32d1b067c9a5d7405d | /krcal/core/ltmaps_test.py | 9a01f3bf67925c9760722ab399af3e8c2dc0ae63 | [] | no_license | jmhaefner/ICAROS | 79d18b9f372e0d8cd180d9c4ba7572d9ec1efcf3 | 4477157abd9ab5df42a28012c167d9e0a9a1ce5a | refs/heads/master | 2021-08-06T14:27:50.179501 | 2021-06-24T19:31:29 | 2021-06-24T19:31:29 | 167,604,724 | 0 | 1 | null | 2019-01-25T19:50:55 | 2019-01-25T19:50:54 | null | UTF-8 | Python | false | false | 8,249 | py | import os
import pandas as pd
import numpy as np
from invisible_cities.io.dst_io import load_dsts
from krcal.core.core_functions import time_delta_from_time
from krcal.core.analysis_functions import kr_event
from krcal.core.analysis_functions import event_map
from krcal.core.analysis_functions import select_xy_sectors
from krcal.core.analysis_functions import select_rphi_sectors
from krcal.core.fit_lt_functions import get_time_series
from krcal.core.fit_lt_functions import time_fcs
from krcal.core.fit_lt_functions import fit_fcs_in_xy_bin
from krcal.core.fit_lt_functions import fit_fcs_in_rphi_sectors
from krcal.core.fit_lt_functions import fit_map_xy
from krcal.core.rphi_maps_functions import rphi_sector_map_def
from krcal.core.rphi_maps_functions import define_rphi_sectors
from krcal.core.kr_types import FitType, KrSector, MapType
import warnings
import pytest
@pytest.fixture(scope='session')
def DST(dst_filenames_path):
dst = load_dsts(dst_filenames_path, "DST", "Events")
dst_time = dst.sort_values('event')
T = dst_time.time.values
DT = time_delta_from_time(T)
kge = kr_event(dst, DT, dst.S2e, dst.S2q)
return dst, DT, kge
@pytest.fixture(scope='session')
def time_series(DST):
nt = 10
dst, DT, kge = DST
ts, masks = get_time_series(nt, (DT[0],DT[-1]), kge)
return nt, ts, masks
@pytest.fixture(scope='session')
def kBins():
return np.array([-200., -120., -40., 40., 120., 200.])
def test_get_time_series(time_series, DST):
dst, DT, kge = DST
nt, ts, masks = time_series
lengths = [len(mask)for mask in masks]
assert len(masks) == len(ts) == nt
assert len(masks[0]) == len(kge.X) == len(dst)
assert np.equal(lengths, len(dst) * np.ones(len(lengths))).all()
def test_time_fcs(time_series, DST):
dst, DT, kge = DST
nt, ts, masks = time_series
fps = time_fcs(ts, masks, kge,
nbins_z = 10,
nbins_e = 25,
range_z = (50, 550),
range_e = (5000, 13500),
energy = 'S2e',
fit = FitType.profile)
fpu = time_fcs(ts, masks, kge,
nbins_z = 10,
nbins_e = 25,
range_z = (50, 550),
range_e = (5000, 13500),
energy = 'S2e',
fit = FitType.unbined)
assert np.allclose(fps.e0, fpu.e0, rtol=1e-02)
assert np.allclose(fps.lt, fpu.lt, rtol=1e-02)
def test_fit_fcs_in_xy_bin(DST, kBins):
dst, DT, kge = DST
KRE = select_xy_sectors(dst, DT, dst.S2e.values, dst.S2q.values, kBins, kBins)
neM = event_map(KRE)
fps_p = fit_fcs_in_xy_bin (xybin = (2,2),
selection_map = KRE,
event_map = neM,
n_time_bins = 1,
time_diffs = DT,
nbins_z = 25,
nbins_e = 50,
range_z =(50, 550),
range_e = (5000, 13500),
energy = 'S2e',
fit = FitType.profile,
n_min = 100)
fps_u = fit_fcs_in_xy_bin (xybin = (2,2),
selection_map = KRE,
event_map = neM,
n_time_bins = 1,
time_diffs = DT,
nbins_z = 25,
nbins_e = 50,
range_z =(50, 550),
range_e = (5000, 13500),
energy = 'S2e',
fit = FitType.unbined,
n_min = 100)
np.allclose(fps_p.e0 / fps_u.e0, 1, rtol=1e-02)
np.allclose(fps_p.lt / fps_u.lt, 1, rtol=1e-02)
def test_fit_fcs_in_rphi_sectors(DST, kBins):
dst, DT, kge = DST
rpsmf = rphi_sector_map_def(nSectors =4, rmax =200, sphi =90)
rps = define_rphi_sectors(rpsmf)
KRES = select_rphi_sectors(dst, DT, dst.S2e, dst.S2q, rps)
neM = event_map(KRES)
fcs_u = fit_fcs_in_rphi_sectors(sector = 0,
selection_map = KRES,
event_map = neM,
n_time_bins =1,
time_diffs =DT,
nbins_z = 25,
nbins_e = 50,
range_z =(50, 550),
range_e = (5000, 13500),
energy = 'S2e',
fit = FitType.unbined,
n_min = 100)
fcs_p = fit_fcs_in_rphi_sectors(sector = 0,
selection_map = KRES,
event_map = neM,
n_time_bins =1,
time_diffs =DT,
nbins_z = 25,
nbins_e = 50,
range_z =(50, 550),
range_e = (5000, 13500),
energy = 'S2e',
fit = FitType.profile,
n_min = 100)
for i in range(4):
np.allclose(fcs_u[i].e0 / fcs_p[i].e0, 1, rtol=1e-02)
np.allclose(fcs_u[i].lt / fcs_p[i].lt, 1, rtol=1e-02)
def test_select_xy_sectors(DST, kBins):
dst, DT, kge = DST
KRE = select_xy_sectors(dst, DT, dst.S2e.values, dst.S2q.values, kBins, kBins)
neM = event_map(KRE)
l = ((neM[0]/neM[4]).values > 0.8).all()
r = ((neM[0]/neM[4]).values < 1.1).all()
assert l & r
def test_fit_xy_map(DST, kBins):
dst, DT, kge = DST
def get_maps_t0(fmxy):
pE0 = {}
pLT = {}
pC2 = {}
for nx in fmxy.keys():
pE0[nx] = [fmxy[nx][ny].e0[0] for ny in range(len(fmxy[nx]))] # notice [0] ts bin
pLT[nx] = [fmxy[nx][ny].lt[0] for ny in range(len(fmxy[nx]))]
pC2[nx] = [fmxy[nx][ny].c2[0] for ny in range(len(fmxy[nx]))]
return (pd.DataFrame.from_dict(pE0),
pd.DataFrame.from_dict(pLT),
pd.DataFrame.from_dict(pC2))
KRE = select_xy_sectors(dst, DT, dst.S2e.values, dst.S2q.values, kBins, kBins)
neM = event_map(KRE)
fpmxy = fit_map_xy(selection_map = KRE,
event_map = neM,
n_time_bins = 1,
time_diffs = DT,
nbins_z = 25,
nbins_e = 50,
range_z =(50, 550),
range_e = (5000, 13500),
energy = 'S2e',
fit = FitType.profile,
n_min = 100)
mE0p, mLTp, mC2p = get_maps_t0(fpmxy)
fumxy = fit_map_xy(selection_map = KRE,
event_map = neM,
n_time_bins = 1,
time_diffs = DT,
nbins_z = 25,
nbins_e = 50,
range_z =(50, 550),
range_e = (5000, 13500),
energy = 'S2e',
fit = FitType.unbined,
n_min = 100)
mE0u, mLTu, mC2u = get_maps_t0(fumxy)
r1 = (mLTp / mLTu).values
l1 = np.allclose(r1, 1, rtol=1e-01)
r2 = mE0p / mE0u
l2 = np.allclose(r2, 1, rtol=1e-02)
assert l1 & l2
| [
"[email protected]"
] | |
e67dd18e17853bde0845ae57c5ee63c25d10828b | a657283ae5208611351606f35b05f46f63581d5c | /website/routes.py | 83404e7b3b86c06c28d0c50b12f5eb7115140b6e | [] | no_license | rrkas/handwriting-generation-flask | e17c71f0335231a6157c728c78ce4c30d7d6df61 | 049091b1a3d341af0ce50e07d484c1bbf98fd3d8 | refs/heads/master | 2023-07-14T22:12:56.482115 | 2021-08-29T10:14:56 | 2021-08-29T10:14:56 | 391,993,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,975 | py | import io
import os
import uuid
import pywhatkit as kit
from flask import *
from werkzeug.datastructures import FileStorage
from website import app
output_dir = os.path.join('website', 'static', 'output')
if not os.path.exists(output_dir):
os.mkdir(output_dir)
allowed_file_ext = ['txt']
def allowed_file(filename):
return filename.split('.')[-1] in allowed_file_ext
def generate_signature(file):
try:
output_filename = str(uuid.uuid4().hex)
if isinstance(file, FileStorage):
if os.path.exists('pywhatkit_dbs.txt'):
os.remove('pywhatkit_dbs.txt')
file.save(os.path.join(output_dir, output_filename + '.txt'))
with open(os.path.join(output_dir, output_filename + '.txt'), 'r') as f:
text = f.read()
os.remove(os.path.join(output_dir, output_filename + '.txt'))
else:
text = file
kit.text_to_handwriting(
string=text,
rgb=(0, 0, 0),
save_to=os.path.join(output_dir, output_filename + '.png'),
)
return output_filename, True
except BaseException as e:
print(e)
return str(e), False
@app.route('/', methods=['POST', 'GET'])
def home():
if request.method == 'POST':
# print("request", request)
print("form", request.form)
if request.form.get('inputtype') == 'file':
if 'file' not in request.files:
flash('No file part!')
return redirect(request.url)
file = request.files.get('file')
if not allowed_file(file.filename):
flash('Invalid File!')
return redirect(request.url)
else:
file = request.form.get('text')
img_name, valid = generate_signature(file)
if valid:
flash('Image Generated Successfully!', 'success')
else:
flash('Something went wrong! Please try again!!', 'error')
return redirect(request.url)
return redirect(url_for('home', img_name=img_name))
filename = request.args.get('img_name')
result_path = os.path.join(output_dir, str(filename) + '.png')
if filename and not os.path.exists(result_path):
abort(404)
return render_template('home.html', img_name=request.args.get('img_name'))
@app.route('/download/<string:filename>', methods=['GET', 'POST'])
def download(filename):
result_path = os.path.join(output_dir, filename + '.png')
if not os.path.exists(result_path):
abort(404)
return_data = io.BytesIO()
with open(result_path, 'rb') as fo:
return_data.write(fo.read())
return_data.seek(0)
os.remove(result_path)
return send_file(
return_data,
mimetype='image/png',
as_attachment=True,
attachment_filename='txt2handwriting.png'
)
@app.errorhandler(404)
def error_400(error):
return render_template('errors/404.html')
| [
"[email protected]"
] | |
611500bc11e4bf0093b270c1e76a4ec33c642061 | e61e664d95af3b93150cda5b92695be6551d2a7c | /vega/security/load_pickle.py | 206511004faf87ad32800052b332f62e12f296b8 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | huawei-noah/vega | 44aaf8bb28b45f707ed6cd4e871ba70fc0c04846 | 12e37a1991eb6771a2999fe0a46ddda920c47948 | refs/heads/master | 2023-09-01T20:16:28.746745 | 2023-02-15T09:36:59 | 2023-02-15T09:36:59 | 273,667,533 | 850 | 184 | NOASSERTION | 2023-02-15T09:37:01 | 2020-06-20T08:20:06 | Python | UTF-8 | Python | false | false | 1,812 | py | # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Load pickle."""
import pickle
__all__ = ["restricted_loads"]
safe_builtins = {
'vega',
'torch',
'torchvision',
'functools',
'timm',
'mindspore',
'tensorflow',
'numpy',
'imageio',
'collections',
'apex',
'ascend_automl'
}
class RestrictedUnpickler(pickle.Unpickler):
"""Restrict unpickler."""
def __init__(self, file, fix_imports, encoding, errors, security):
super(RestrictedUnpickler, self).__init__(file=file, fix_imports=fix_imports, encoding=encoding, errors=errors)
self.security = security
def find_class(self, module, name):
"""Find class."""
_class = super().find_class(module, name)
if self.security:
if module.split('.')[0] in safe_builtins:
return _class
raise pickle.UnpicklingError(f"global '{module}' is forbidden")
else:
return _class
def restricted_loads(file, fix_imports=True, encoding="ASCII", errors="strict", security=False):
"""Load obj."""
return RestrictedUnpickler(file, fix_imports=fix_imports, encoding=encoding, errors=errors,
security=security).load()
| [
"[email protected]"
] | |
486752af90a81014c8a2c8b798d2c1b5fc1c35eb | 9dbe507104b03275b1ed5dc91a4aaa2ae6af4f51 | /hearthbreaker/cards/minions/shaman.py | 800985cec4d413a7eaac702479486e1dcdcc24bf | [
"MIT"
] | permissive | bussiere/hearthbreaker | 55fc7c77d8ffb37cda2b5d9afb7ccd44c250702c | 074e20de3498d078877e77b3603580b511e8522b | refs/heads/master | 2021-01-16T22:13:32.110626 | 2014-12-17T13:37:32 | 2014-12-17T13:37:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,946 | py | from hearthbreaker.tags.action import ChangeAttack, Draw, ChangeHealth, Damage, Give, Windfury
from hearthbreaker.tags.base import Aura, Effect, Battlecry
from hearthbreaker.tags.condition import Adjacent, HasOverload
from hearthbreaker.tags.event import TurnEnded, CardPlayed
from hearthbreaker.tags.selector import MinionSelector, SelfSelector, PlayerSelector, CharacterSelector, BothPlayer, \
UserPicker
from hearthbreaker.constants import CHARACTER_CLASS, CARD_RARITY, MINION_TYPE
from hearthbreaker.game_objects import MinionCard, Minion
class AlAkirTheWindlord(MinionCard):
def __init__(self):
super().__init__("Al'Akir the Windlord", 8, CHARACTER_CLASS.SHAMAN, CARD_RARITY.LEGENDARY)
def create_minion(self, player):
return Minion(3, 5, windfury=True, charge=True, divine_shield=True, taunt=True)
class DustDevil(MinionCard):
def __init__(self):
super().__init__("Dust Devil", 1, CHARACTER_CLASS.SHAMAN, CARD_RARITY.COMMON, overload=2)
def create_minion(self, player):
return Minion(3, 1, windfury=True)
class EarthElemental(MinionCard):
def __init__(self):
super().__init__("Earth Elemental", 5, CHARACTER_CLASS.SHAMAN, CARD_RARITY.EPIC, overload=3)
def create_minion(self, player):
return Minion(7, 8, taunt=True)
class FireElemental(MinionCard):
def __init__(self):
super().__init__("Fire Elemental", 6, CHARACTER_CLASS.SHAMAN, CARD_RARITY.COMMON,
battlecry=Battlecry(Damage(3), CharacterSelector(players=BothPlayer(), picker=UserPicker())))
def create_minion(self, player):
return Minion(6, 5)
class FlametongueTotem(MinionCard):
def __init__(self):
super().__init__("Flametongue Totem", 2, CHARACTER_CLASS.SHAMAN, CARD_RARITY.COMMON, MINION_TYPE.TOTEM)
def create_minion(self, player):
return Minion(0, 3, auras=[Aura(ChangeAttack(2), MinionSelector(Adjacent()))])
class ManaTideTotem(MinionCard):
def __init__(self):
super().__init__("Mana Tide Totem", 3, CHARACTER_CLASS.SHAMAN, CARD_RARITY.RARE, MINION_TYPE.TOTEM)
def create_minion(self, player):
return Minion(0, 3, effects=[Effect(TurnEnded(), Draw(), PlayerSelector())])
class UnboundElemental(MinionCard):
def __init__(self):
super().__init__("Unbound Elemental", 3, CHARACTER_CLASS.SHAMAN, CARD_RARITY.COMMON)
def create_minion(self, player):
return Minion(2, 4, effects=[Effect(CardPlayed(HasOverload()), ChangeAttack(1), SelfSelector()),
Effect(CardPlayed(HasOverload()), ChangeHealth(1), SelfSelector())])
class Windspeaker(MinionCard):
def __init__(self):
super().__init__("Windspeaker", 4, CHARACTER_CLASS.SHAMAN, CARD_RARITY.COMMON,
battlecry=Battlecry(Give(Windfury()), MinionSelector(picker=UserPicker())))
def create_minion(self, player):
return Minion(3, 3)
| [
"[email protected]"
] | |
8277d4f471be2dee3c2676a6bf9cbd30cf236a64 | c0ad282ab743a315e2f252a627933cb168434c1d | /models/agreement/type_prior.py | bde43251219d85d73f078c3d0ba4fad4980ae25c | [
"MIT"
] | permissive | AlexKuhnle/ShapeWorld | 6d1e16adc94e860abae99ade869f72575f573bc4 | e720bf46e57fc01326d04d639fa6133d9c12158f | refs/heads/master | 2021-07-09T00:02:33.808969 | 2021-04-19T11:10:52 | 2021-04-19T11:10:52 | 80,815,972 | 58 | 28 | MIT | 2021-04-19T11:10:53 | 2017-02-03T09:40:19 | Python | UTF-8 | Python | false | false | 2,722 | py | from models.TFMacros.tf_macros import *
def model(model, inputs, dataset_parameters):
caption = Input(name='caption_rpn', shape=dataset_parameters['caption_rpn_shape'], dtype='int', tensor=inputs.get('caption_rpn'))()
caption_length = Input(name='caption_rpn_length', shape=(), dtype='int', tensor=inputs.get('caption_rpn_length'))
agreement = Input(name='agreement', shape=(), dtype='float', tensor=inputs.get('agreement'))()
agreement = (
(caption, caption_length, agreement) >>
SuffixPrior(suffix_length=1, vocabulary_size=dataset_parameters['rpn_vocabulary_size']) >>
Binary(name='agreement', binary_transform=False, tensor=agreement)
)
return agreement
class SuffixPrior(Unit):
num_in = 3
num_out = 1
def __init__(self, suffix_length, vocabulary_size):
super(SuffixPrior, self).__init__()
self.suffix_length = suffix_length
self.vocabulary_size = vocabulary_size
def initialize(self, caption, caption_length, agreement):
super(SuffixPrior, self).initialize(caption, caption_length, agreement)
shape = tuple(self.vocabulary_size for _ in range(self.suffix_length)) + (2,)
self.suffix_agreement_counts = tf.get_variable(name='suffix-agreement-counts', shape=shape, dtype=tf.int32, initializer=tf.zeros_initializer(dtype=tf.int32), trainable=False)
def forward(self, caption, caption_length, agreement):
super(SuffixPrior, self).forward(caption, caption_length, agreement)
batch_size = tf.shape(input=caption)[0]
slice_indices = [tf.stack(values=(tf.range(batch_size), caption_length - (self.suffix_length - n)), axis=1) for n in range(self.suffix_length)]
suffix = tf.stack(values=[tf.gather_nd(params=caption, indices=indices) for indices in slice_indices], axis=1)
agreement_counts = tf.gather_nd(params=self.suffix_agreement_counts, indices=suffix)
prior = tf.where(
condition=(agreement_counts[:, 0] > agreement_counts[:, 1]),
x=tf.zeros(shape=(batch_size,)),
y=tf.where(
condition=(agreement_counts[:, 0] < agreement_counts[:, 1]),
x=tf.ones(shape=(batch_size,)),
y=(tf.ones(shape=(batch_size,)) * 0.5)
)
)
agreement = tf.expand_dims(input=tf.cast(x=agreement, dtype=Model.dtype('int')), axis=1)
indices = tf.concat(values=(suffix, agreement), axis=1)
updates = tf.ones(shape=(batch_size,), dtype=Model.dtype('int'))
assert Model.current.optimization is None
Model.current.optimization = tf.scatter_nd_add(ref=self.suffix_agreement_counts, indices=indices, updates=updates)
return prior
| [
"[email protected]"
] | |
952ecae4e414db6b616a055126571c0e7b129cdf | 8b427d0a012d7dbd3b49eb32c279588f9ebd4e6e | /05 排序和搜索/binary_search.py | 33fe499fccf320c9b9bcc7589236e441fdbcd076 | [] | no_license | chenyang929/Problem-Solving-with-Algorithms-and-Data-Structures-using-Python-Notes | e9f1b324d86963333edaf855fdb9e126e59e8542 | aed976e020147fe30a8e0bb708dfbe4bab4c15f7 | refs/heads/master | 2020-03-18T02:46:12.385967 | 2018-07-24T08:24:41 | 2018-07-24T08:24:41 | 134,206,437 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,023 | py | # binary_search.py
# 循环版
def binary_search(lst, item):
low = 0
high = len(lst) - 1
while low <= high:
mid = (low + high) // 2
guess = lst[mid]
if item == guess:
return mid
elif item > guess:
low += 1
else:
high -= 1
return -1
# 递归版(内存开销大)
def binary_search1(lst, item, low=0, high=None):
if high is None:
high = len(lst) - 1
if low > high:
return -1
else:
mid = (low + high) // 2
guess = lst[mid]
if item == guess:
return mid
elif item > guess:
low += 1
return binary_search1(lst, item, low, high)
else:
high -= 1
return binary_search1(lst, item, low, high)
if __name__ == '__main__':
l = [1, 3, 4, 7, 9, 12, 14]
print(binary_search(l, 12)) # 5
print(binary_search1(l, 12)) # 5
print(binary_search(l, 5)) # -1
print(binary_search1(l, 5)) # -1
| [
"[email protected]"
] | |
1c18a6ddc3944da8e2ba5f5ef396825ac6423869 | 6e13f7fdae0144dd0397031c59397b0372f0872a | /horch/layers/_se.py | 43f7469184fbc7b507af7080e15eb8071fc1c974 | [] | no_license | sbl1996/horch | 02e9996f764748c62648464d58318ceff92c87ed | 50d4f4da241a5727e3924a36fbc730dc61284904 | refs/heads/master | 2020-03-20T05:00:43.444092 | 2018-07-27T00:14:45 | 2018-07-27T00:14:45 | 137,201,939 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 671 | py | import horch as H
from ._linear import Linear
from ._module import Module
class SE(Module):
"""Squeeze and Excitation Module
"""
def __init__(self, channels, reduction_ratio=16):
super().__init__()
self.channels = channels
self.reduction_ratio = reduction_ratio
reduced_channels = channels // reduction_ratio
self.fc1 = Linear(channels, reduced_channels)
self.fc2 = Linear(reduced_channels, channels)
def forward(self, x):
z = H.mean(x, axis=(2,3))
z = self.fc1(z)
z = H.relu(z)
z = self.fc2(z)
s = H.sigmoid(z)
n, c = s.shape
s = s.reshape(n, c, 1, 1)
x = x * s
return x | [
"[email protected]"
] | |
ffd7b753f12a8fff9b52468226a6155d9a60a7c9 | 7bd9be7f25be80791f9220b62025f06170273293 | /end-plugins/pycerebro/examples/excel_export.py | 7ccaaa79c69f3f70111f28d64cfa01c407d3180a | [] | no_license | cerebrohq/cerebro-plugins | ab46b4844adcb12c51d14e21f2c0d8b758b0bb57 | e2e0f97b548ef22957e13d614200027ba89215e0 | refs/heads/master | 2021-11-12T16:25:48.228521 | 2021-10-22T11:25:58 | 2021-10-22T11:25:58 | 143,178,631 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 7,156 | py | # -*- coding: utf-8 -*-
"""
Пример экспорта задачи(проекта) со всеми вложенными задачами в Excel.
Этот пример демонстрирует экспорт свойств задачи в Excel.
Для записи в формат Excel используется сторонний пакет xlsxwriter (https://xlsxwriter.readthedocs.org/)
Для преобразования текста в формате html используется сторонний пакет html2text (http://www.aaronsw.com/2002/html2text/).
В модуле используются следующие функции:
do_export - Функция экспорта. Принимает параметры: Имя пользователя, Пароль пользователя, Путь до задачи, Путь к файлу Excel.
write - Функция, которая записывает свойства задачи и всех вложенных задач в файл Excel.
connect_db - Функция для соединения с базой данных Cerebro.
write_info, write_error - Функции для логирования.
Пример использования:
do_export('Имя_пользователя', 'Пароль_пользователя', '/Путь/к/Задаче', 'C:/путь/к/файлу.xlsx')
"""
# Имена колонок Excel
columns = { 0: "Задача", 1: "Описание", 2: "Назначено", 3: "Начало", 4: "Окончание", 5: "Запланировано"}
# Ширина колонок
columns_w = { 0: 50, 1: 50, 2: 10, 3: 10, 4: 10, 5: 15}
# Высота строк
row_h = 50
import sys
import os
local_dir = os.path.realpath(__file__).replace('\\', '/').rsplit('/', 1)[0]
backend_dir = local_dir + '/../..'
sys.path.append(backend_dir)
import xlsxwriter
from pycerebro import database, dbtypes
import html2text
import datetime
def do_export(db_user, db_password, task, file_name):
"""
Функция экспорта.
Параметры db_user и db_password - логин и пароль пользователя Cerebro.
task - тектовый локатор(путь) до задачи.
Формат локатора: '/Проект/Задача 1/Задача 2', то есть по сути путь до задачи.
Примечание: Имена задач регистрозависимы!
Пример вызова функции:
::
import excel_export
excel_export.do_export('user', 'password', '/Проект/Задача 1/Задача 2', 'c:/temp/export.xlsx')
::
"""
# Устанавливаем соединение с базой данных
db = connect_db(db_user, db_password)
if (db):
# Создаем файл Excel
wb = xlsxwriter.Workbook(file_name)
if (wb):
# Добавляем лист
ws = wb.add_worksheet()
if (ws):
# Создаем формат для заголовка
format = wb.add_format()
format.set_bold(True) # Жирный шрифт
format.set_align('center_across') # По центру
for col in columns:
# Задаем ширину колонок
ws.set_column(col, col, columns_w[col])
# Создаем Заголовок
ws.write(0, col, columns[col], format)
# Получаем идентификатор задачи (проекта)
task_id = db.task_by_url(task)[0]
if (task_id):
write(db, task_id, ws, wb.add_format())
wb.close()
write_info('Export finished!')
else:
write_error('Can not connect to db: ' + host)
_i = 0
def write(db, task_id, ws, format):
"""
Функция для записи свойств задачи и вложенных задач в файл Excel.
db - переменная для работы с базой данных.
task_id - идентификатор задачи.
ws - лист Excel.
format - переменная форматирования рабочей кники Excel.
"""
global _i
_i += 1
# Создадим формат для выравнивания по верхней границы ячейки и переноса по словам
format_top_text_wrap = format
format_top_text_wrap.set_align('top')
format_top_text_wrap.set_text_wrap()
# Устанавливаем высоту строки
ws.set_row(_i, row_h)
# Получаем задачу по идентификатору
task = db.task(task_id)
# Получаем постановку задачи
task_def = db.task_definition(task_id)
# Получаем полный путь к задаче
name = task[dbtypes.TASK_DATA_PARENT_URL] + task[dbtypes.TASK_DATA_NAME]
# Записываем полный путь к задаче
ws.write(_i, 0, name, format_top_text_wrap)
# Если у задачи есть "Постановка задачи" записываем ее в файл
if (task_def):
ws.write(_i, 1, html2text.html2text(task_def[dbtypes.MESSAGE_DATA_TEXT]), format_top_text_wrap)
# Получаем список пользователей, назначенных на задачу
user_name = task[dbtypes.TASK_DATA_ALLOCATED]
# Если есть назначенные на задачу пользователи, сохраняем их в файл
if (user_name):
ws.write(_i, 2, user_name, format_top_text_wrap)
# Получаем начальную дату отсчета
datetime_2000 = datetime.datetime(2000, 1, 1)
# Получаем дату старта задачи
datetime_start = datetime_2000 + datetime.timedelta(task[dbtypes.TASK_DATA_OFFSET])
# Сохраняем дату старта в файл
ws.write(_i, 3, datetime_start.strftime('%d.%m.%y %H:%M'), format_top_text_wrap)
# Получаем дату окончания задачи
datetime_stop = datetime_start + datetime.timedelta(task[dbtypes.TASK_DATA_DURATION])
# Сохраняем дату окончания в файл
ws.write(_i, 4, datetime_stop.strftime('%d.%m.%y %H:%M'), format_top_text_wrap)
# Сохраняем запланированное время
ws.write(_i, 5, task[dbtypes.TASK_DATA_PLANNED], format_top_text_wrap)
# Если у задачи есть вложенные задачи, так-же сохраняем их в файл
for child in db.task_children(task_id):
write(db, child[dbtypes.TASK_DATA_ID], ws, format)
def connect_db(user, password):
"""
Функция для соединения с базой данных.
user - имя пользователя cerebro.
password - пароль пользователя cerebro.
"""
# Создаем объект базы данных
db = database.Database()
# Соединяемся с базой данных
db.connect(user, password)
return db
def write_info(text):
"""
Функция для логирования информационных сообщений.
text - текст сообщения.
"""
print('info: ' + text)
def write_error(text):
"""
Функция для логирования ошибок.
text - текст сообщения.
"""
print('error: ' + text)
| [
"[email protected]"
] | |
c478c0d6aed9b07eae4b4ea4776e7c073d3b4ace | e6a5fce33aad4fcba37842e135a51ba441b06f48 | /Python/Errors and Exceptions/Exceptions.py | 5f5278b64b8e82541685d00cec1a244dd307ddce | [
"MIT"
] | permissive | pavstar619/HackerRank | 6710ddd450b06fbb69da5abad9f570e5e26bbbc0 | 697ee46b6e621ad884a064047461d7707b1413cd | refs/heads/master | 2020-06-18T18:53:53.421685 | 2020-02-18T09:35:48 | 2020-02-18T09:35:48 | 196,408,726 | 0 | 0 | MIT | 2019-07-11T14:18:16 | 2019-07-11T14:18:16 | null | UTF-8 | Python | false | false | 438 | py | class Main:
def __init__(self):
self.t = int(input())
for i in range(0, self.t):
try:
self.n, self.m = map(int, input().split())
print(self.n // self.m)
except ZeroDivisionError as e:
print("Error Code:", e)
except ValueError as e:
print("Error Code:", e)
if __name__ == '__main__':
obj = Main()
| [
"[email protected]"
] | |
7c9e8b7bfead44bee572fa7070396b90066e9a6e | 746a9c1f65674cd5bcdce6dbd1971b6a16345f9d | /account/forms.py | e907a732e6e142794a14079dcb07a70bcd7fc718 | [] | no_license | mazulo/bookmarks | 4dc25dc09772663c65698d3cc9f5b653fd409ba9 | 5c2ce3c3ad811466c63f7b0f3a21bf33a6a28f5e | refs/heads/master | 2021-01-10T07:23:37.185414 | 2016-03-23T06:40:53 | 2016-03-23T05:40:53 | 54,158,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,002 | py | from django import forms
from django.contrib.auth.models import User
from .models import Profile
class LoginForm(forms.Form):
username = forms.CharField()
password = forms.CharField(widget=forms.PasswordInput)
class UserRegistrationForm(forms.ModelForm):
password = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(
label='Repeat password', widget=forms.PasswordInput
)
class Meta:
model = User
fields = ('username', 'first_name', 'email')
def clean_password2(self):
cd = self.cleaned_data
if cd['password'] != cd['password2']:
raise forms.ValidationError('Passwords don\'t match.')
return cd['password2']
class UserEditForm(forms.ModelForm):
class Meta:
model = User
fields = ('first_name', 'last_name', 'email')
class ProfileEditForm(forms.ModelForm):
class Meta:
model = Profile
fields = ('date_of_birth', 'photo')
| [
"[email protected]"
] | |
0ae53c0486a272162874ca46f41ecb302e6654f0 | a9d65a3fb6e990c5bb250cfde44220182f6cd430 | /indra/tools/analyze_ontology.py | 2f8bda343d488ebd0f277e9aedab4e858bfe2742 | [
"BSD-2-Clause",
"BSD-2-Clause-Views"
] | permissive | dianakolusheva/indra | 66e6c69b762922d4f79757e388b693f76b3fcd56 | 205a719c5b1ff2333e415476b4136e8c57c22949 | refs/heads/master | 2022-03-14T23:10:57.718762 | 2022-02-11T14:58:12 | 2022-02-11T14:58:12 | 170,338,649 | 0 | 0 | BSD-2-Clause | 2019-02-12T15:09:36 | 2019-02-12T15:09:30 | Python | UTF-8 | Python | false | false | 1,402 | py | from collections import Counter, defaultdict
import networkx
from indra.ontology.bio import bio_ontology
def plot_problem(problem):
import matplotlib.pyplot as plt
plt.ion()
plt.figure()
G = bio_ontology.subgraph(problem)
pos = networkx.spring_layout(G)
networkx.draw_networkx(G, pos, node_color='pink')
edge_labels = networkx.get_edge_attributes(G, 'source')
networkx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels)
plt.show()
if __name__ == '__main__':
bio_ontology.initialize()
xrefs = [(e[0], e[1]) for e in bio_ontology.edges(data=True) if
e[2]['type'] == 'xref']
xrefg = bio_ontology.edge_subgraph(xrefs)
comps = networkx.algorithms.strongly_connected_components(xrefg)
problems = []
for comp in comps:
namespaces = [bio_ontology.get_ns(node) for node in comp]
cnt = Counter(namespaces)
if any(v > 1 for k, v in cnt.items()):
problems.append(comp)
print('Found %d problems in total' % len(problems))
problems_by_ns = defaultdict(list)
for problem in problems:
nscnt = Counter([bio_ontology.get_ns(n) for n in problem])
namespaces = [ns for ns, cnt in nscnt.items() if cnt > 1]
for ns in namespaces:
problems_by_ns[ns].append(problem)
for ns, problems_ns in problems_by_ns.items():
print(ns, len(problems_ns)) | [
"[email protected]"
] | |
d89ebcab0f970fc4bf36d21e6205b3316c25af4a | a8079efec61894fb6082986e66c4c146757fc895 | /src/__main__.py | 03f618d7b50266b668d1b471f5e62afc4edc2244 | [] | no_license | gsdlab/ClaferSMT | aaa5bd0c0c72f6a9b156529a871cced40e006cba | d8240b4503107641d62f7f913ebe50a88182d9a3 | refs/heads/master | 2021-01-16T21:23:22.838308 | 2015-08-20T00:24:54 | 2015-08-20T00:24:54 | 9,037,961 | 2 | 1 | null | 2018-08-21T13:48:02 | 2013-03-26T19:00:12 | TeX | UTF-8 | Python | false | false | 127 | py | '''
Created on Jul 10, 2014
@author: ezulkosk
'''
from front import ClaferRun
if __name__ == '__main__':
ClaferRun.main() | [
"[email protected]"
] | |
2e92fd124092bb606c4af2b949bf986b8f0f39e0 | 33c6b5d9f1852ac878aada3eb646ac2eabc6b705 | /junk/users2/signals.py | 490388c9433b9bb4b0c0de06090ea7a91ecbb2b2 | [] | no_license | detalikota/trading2 | 3c0e4308f45a620fe4c034403143cf68dd16db9c | 1aaafb6bf7d304be1896efb5ea0963fc40893b23 | refs/heads/master | 2023-02-04T14:03:10.860410 | 2020-12-17T14:28:12 | 2020-12-17T14:28:12 | 313,858,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.dispatch import receiver
from .models import Profile2
@receiver(post_save, sender=User) #when the user is saved - send this signal
def create_profile(sender, instance, created, **kwargs):
if created:
Profile2.objects.create(user=instance)
@receiver(post_save, sender=User) # save the profile
def save_profile(sender, instance, **kwargs):
instance.profile.save() | [
"[email protected]"
] | |
0b34b5732a18165fbcd70164b4c2648ea5eaeeb0 | 661b6c3d0d2344f86ed126d9b4f6f10c0d9c630b | /track/admin.py | 1d2bcc9fdd717c1e4f90db9346add4da60c66ec6 | [] | no_license | Sababa123/activity_tracker | 69eae58dbbf7523dcc144d3f05f6952cc4e4225b | 7c5e2c83e5fc76f8c9a2c5f58569ed92c9eb4421 | refs/heads/master | 2020-06-25T17:43:14.321638 | 2019-08-11T21:45:16 | 2019-08-11T21:45:16 | 199,380,581 | 0 | 0 | null | 2019-07-29T04:53:25 | 2019-07-29T04:53:24 | null | UTF-8 | Python | false | false | 175 | py | from django.contrib import admin
from .models import Activity, ActivityTracker
# Register your models here.
admin.site.register(Activity)
admin.site.register(ActivityTracker) | [
"[email protected]"
] | |
7b15c3a5d2060f2149607d1ee7ea040fb35c2eb7 | 913fb9ec1e709a5140676ba7b2371b1976afca72 | /seqPeaks/mirrorPeaks.py | db673bb1b55f332087c53b7b17d7dc5e614e6de3 | [] | no_license | cgreer/ResearchScripts | 171cfe9555ea06fdeb91084c12d07d1b45a2335c | 1107803bb1459d6b6e1dfb1a89679d2b6fd49062 | refs/heads/master | 2016-09-05T10:43:19.090247 | 2012-04-12T21:38:11 | 2012-04-12T21:38:11 | 1,673,080 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | import cgPeaks
import compareData as compare
import math
import bioLibCG as cg
knowns = compare.tccFileToList('mouseKnownMirs.tcc', 0)
eLevels = []
for known in knowns:
chrom, strand, start, end = cg.tccSplit(known, True) #text...
if strand == '1':
strand = '-1'
else:
strand = '1'
oppTcc = cg.makeTcc(chrom, strand, start, end)
knownStretch = cgPeaks.stretch(known)
knownStretch.createPeaks(1,20)
kPos = knownStretch.getHighestPeak()
if kPos: eLevels.append(knownStretch.profile[kPos])
oppStretch = cgPeaks.stretch(oppTcc)
oppStretch.createPeaks(1,20)
oPos = oppStretch.getHighestPeak()
if oPos and kPos:
#determine if they are close enough to be considered mirrored...
if math.fabs(int(kPos) - int(oPos)) < 12:
print known, oPos, kPos, oppStretch.profile[oPos], knownStretch.profile[kPos]
print eLevels
| [
"[email protected]"
] | |
113f2aeb9ba582a085e977d64df0240587c81645 | 5c5e7b03c3373e6217665842f542ca89491290ff | /2016/day18.py | b5a92f12f811a0ccb9b0c88bae32c9802f1ce21c | [] | no_license | incnone/AdventOfCode | 9c35214e338e176b6252e52a25a0141a01e290c8 | 29eac5d42403141fccef3c3ddbb986e01c89a593 | refs/heads/master | 2022-12-21T21:54:02.058024 | 2022-12-15T17:33:58 | 2022-12-15T17:33:58 | 229,338,789 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 970 | py | from getinput import get_input
def parse_input(s):
return [True if c == '^' else False for c in s]
def next_trap_row(s):
next_row = [s[1]]
next_row += [(s1 and not s2) or (not s1 and s2) for s1, s2 in zip(s, s[2:])]
next_row += [s[-2]]
return next_row
def generate_traps(init_row, num_rows):
traps = [init_row]
for _ in range(num_rows - 1):
traps.append(next_trap_row(traps[-1]))
return traps
def trap_str(traps):
return '\n'.join(''.join('^' if c else '.' for c in line) for line in traps)
def part_1(trap_row):
traps = generate_traps(trap_row, 40)
return sum(sum(1 for x in line if not x) for line in traps)
def part_2(trap_row):
traps = generate_traps(trap_row, 400000)
return sum(sum(1 for x in line if not x) for line in traps)
if __name__ == "__main__":
the_trap_list = parse_input(get_input(18))
print('Part 1:', part_1(the_trap_list))
print('Part 2:', part_2(the_trap_list))
| [
"[email protected]"
] | |
f1da8fb43bb78b4f502b576a1f67c671e6e1abed | 1a4bc1a11fdb3f714f22f5e0e826b47aa0569de2 | /lab/lab04/tests/q1_3.py | 79768f8de727ed291ba49dffc59b456e772584a8 | [] | no_license | taylorgibson/ma4110-fa21 | 201af7a044fd7d99140c68c48817306c18479610 | a306e1b6e7516def7de968781f6c8c21deebeaf5 | refs/heads/main | 2023-09-05T21:31:44.259079 | 2021-11-18T17:42:15 | 2021-11-18T17:42:15 | 395,439,687 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | test = { 'name': 'q1_3',
'points': None,
'suites': [{'cases': [{'code': ">>> disemvowel('datasceince')\n'dtscnc'", 'hidden': False, 'locked': False}], 'scored': True, 'setup': '', 'teardown': '', 'type': 'doctest'}]}
| [
"[email protected]"
] | |
7e2a67db930a6ea17d2bd8cc7a88bf10d46aef32 | 0fde82bf8a529c1045595b0f451dc7005a9d90cf | /generate/test.py | 23d7846f745c166c7e2e9d872e50a0103ee75942 | [
"MIT"
] | permissive | cosinekitty/astronomy | 18ccec1d62bbae24a8424b3eb807f4b3b97c04a1 | 056076b85aa591ba756ef8a24fc017ddb3b3029b | refs/heads/master | 2023-09-04T18:00:34.676849 | 2023-08-28T19:20:08 | 2023-08-28T19:20:08 | 180,022,816 | 305 | 56 | MIT | 2023-08-28T19:20:10 | 2019-04-07T20:55:20 | C | UTF-8 | Python | false | false | 159,205 | py | #!/usr/bin/env python3
import sys
import math
import re
import os
from itertools import chain
sys.path.append('../source/python')
import astronomy
#-----------------------------------------------------------------------------------------------------------
Verbose = False
SECONDS_PER_DAY = 86400.0
MINUTES_PER_DAY = 1440.0
def Debug(text):
if Verbose:
print(text)
def Pass(funcname):
print('PY {}: PASS'.format(funcname))
return 0
def Fail(funcname, reason):
print('PY {} FAIL: {}'.format(funcname, reason))
return 1
def v(x):
# Verify that a number is really numeric
if not isinstance(x, (int, float)):
raise Exception('Not a numeric type: {}'.format(x))
if not math.isfinite(x):
raise Exception('Not a finite numeric value: {}'.format(x))
return x
def vabs(x):
return abs(v(x))
def vmax(a, b):
return max(v(a), v(b))
def vmin(a, b):
return min(v(a), v(b))
def sqrt(x):
return v(math.sqrt(v(x)))
def AssertGoodTime(text, correct):
time = astronomy.Time.Parse(text)
check = str(time)
if check != correct:
print('Python AssertGoodTime FAILURE: parsed "{}", got "{}", expected "{}"'.format(text, check, correct))
sys.exit(1)
Debug('PY AssertGoodTime: "{}" OK'.format(text))
def AssertBadTime(text):
try:
astronomy.Time.Parse(text)
except astronomy.DateTimeFormatError:
Debug('PY AssertBadTime: "{}" OK'.format(text))
else:
print('PY AssertBadTime FAILURE: should not have parsed "{}"'.format(text))
sys.exit(1)
def CalendarCase(year, month, day, hour, minute, second):
# Convert to Astronomy Engine Time object.
time = astronomy.Time.Make(year, month, day, hour, minute, second)
# Convert to back calendar date tuple.
(cyear, cmonth, cday, chour, cminute, csecond) = time.Calendar()
if (cyear, cmonth, cday) != (year, month, day):
return Fail('CalendarCase', 'Expected {:06d}-{:02d}-{:02d} but found {:06d}-{:02d}-{:02d}'.format(
year, month, day,
cyear, cmonth, cday
))
expectedMillis = 1000.0*(second + 60.0*(minute + 60.0*hour))
calcMillis = 1000.0*(csecond + 60.0*(cminute + 60.0*chour))
diffMillis = vabs(calcMillis - expectedMillis)
if diffMillis > 4.0:
return Fail('CalendarCase', 'EXCESSIVE millisecond error = {:0.6f} for {:06d}-{:02d}-{:02d}'.format(
diffMillis, year, month, day
))
return 0
def AstroTime():
expected_ut = 6910.270978506945
expected_tt = 6910.271800214368
time = astronomy.Time.Make(2018, 12, 2, 18, 30, 12.543)
diff = time.ut - expected_ut
if vabs(diff) > 1.0e-12:
print('PY AstroTime: excessive UT error {}'.format(diff))
return 1
diff = time.tt - expected_tt
if vabs(diff) > 1.0e-12:
print('PY AstroTime: excessive TT error {}'.format(diff))
return 1
s = str(time.Utc())
if s != '2018-12-02 18:30:12.543000':
print('PY AstroTime: Utc() returned incorrect string "{}"'.format(s))
return 1
time = astronomy.Time.Make(2018, 12, 31, 23, 59, 59.9999)
expected = '2018-12-31T23:59:59.999Z'
s = str(time)
if s != expected:
print('PY AstroTime: expected {} but found {}'.format(expected, s))
return 1
print('PY Current time =', astronomy.Time.Now())
AssertGoodTime('2015-12-31T23:45Z', '2015-12-31T23:45:00.000Z')
AssertGoodTime('2015-01-02T23:45:17Z', '2015-01-02T23:45:17.000Z')
AssertGoodTime('1971-03-17T03:30:55.976Z', '1971-03-17T03:30:55.976Z')
AssertBadTime('')
AssertBadTime('1971-13-01')
AssertBadTime('1971-12-32')
AssertBadTime('1971-12-31T24:00:00Z')
AssertBadTime('1971-12-31T23:60:00Z')
AssertBadTime('1971-12-31T23:00:60Z')
AssertBadTime('1971-03-17T03:30:55.976')
# Extreme year values...
AssertGoodTime('-4172-12-02T14:30:45.123Z', '-004172-12-02T14:30:45.123Z')
AssertGoodTime('-4173-12-02T14:30:45.123Z', '-004173-12-02T14:30:45.123Z')
AssertGoodTime('-4174-12-02T14:30:45.123Z', '-004174-12-02T14:30:45.123Z')
AssertGoodTime('-4175-12-02T14:30:45.123Z', '-004175-12-02T14:30:45.123Z')
AssertGoodTime('-4176-12-02T14:30:45.123Z', '-004176-12-02T14:30:45.123Z')
AssertGoodTime('-2300-12-19T16:22:26.325Z', '-002300-12-19T16:22:26.325Z')
AssertGoodTime('-2300-12-19T16:22:26.325Z', '-002300-12-19T16:22:26.325Z')
AssertGoodTime('+12345-12-11T13:30:10.041Z', '+012345-12-11T13:30:10.040Z')
AssertGoodTime('+12346-12-11T13:30:10.041Z', '+012346-12-11T13:30:10.040Z')
AssertGoodTime('+12347-12-11T13:30:10.041Z', '+012347-12-11T13:30:10.040Z')
AssertGoodTime('+12348-12-11T13:30:10.041Z', '+012348-12-11T13:30:10.040Z')
AssertGoodTime('-123456-01-14T22:55:12.000Z', '-123456-01-14T22:55:11.999Z')
AssertGoodTime('+123456-01-14T22:55:12.000Z', '+123456-01-14T22:55:11.999Z')
AssertGoodTime('-999995-01-14T22:55:12.297Z', '-999995-01-14T22:55:12.297Z')
AssertGoodTime('-999996-01-14T22:55:12.297Z', '-999996-01-14T22:55:12.297Z')
AssertGoodTime('-999997-01-14T22:55:12.297Z', '-999997-01-14T22:55:12.297Z')
AssertGoodTime('-999998-01-14T22:55:12.297Z', '-999998-01-14T22:55:12.297Z')
AssertGoodTime('-999999-01-14T22:55:12.000Z', '-999999-01-14T22:55:11.998Z')
AssertGoodTime('+999999-01-14T22:55:12.000Z', '+999999-01-14T22:55:11.998Z')
nyears = 0
for year in chain(range(-999999, -995999), range(-3000, 3001), range(+996000, +1000000)):
# Check just before and after each potential leap day.
if CalendarCase(year, 2, 28, 14, 45, 28.321):
return 1
if CalendarCase(year, 3, 1, 14, 45, 28.321):
return 1
nyears += 1
return Pass('AstroTime({} calendar years)'.format(nyears))
#-----------------------------------------------------------------------------------------------------------
def GeoMoon():
time = astronomy.Time.Make(2019, 6, 24, 15, 45, 37)
vec = astronomy.GeoMoon(time)
print('PY GeoMoon: vec = {:0.16f}, {:0.16f}, {:0.16f}'.format(vec.x, vec.y, vec.z))
# Correct values obtained from C version of GeoMoon calculation
cx, cy, cz = +0.002674037026701135, -0.0001531610316600666, -0.0003150159927069429
dx, dy, dz = vec.x - cx, vec.y - cy, vec.z - cz
diff = sqrt(dx*dx + dy*dy + dz*dz)
print('PY GeoMoon: diff = {}'.format(diff))
if diff > 4.34e-19:
print('PY GeoMoon: EXCESSIVE ERROR')
return 1
return 0
#-----------------------------------------------------------------------------------------------------------
def SelectJupiterMoon(jm, mindex):
return [jm.io, jm.europa, jm.ganymede, jm.callisto][mindex]
def AstroCheck(printflag):
time = astronomy.Time.Make(1700, 1, 1, 0, 0, 0)
stop = astronomy.Time.Make(2200, 1, 1, 0, 0, 0)
observer = astronomy.Observer(29, -81, 10)
if printflag:
print('o {:0.6f} {:0.6f} {:0.6f}'.format(observer.latitude, observer.longitude, observer.height))
dt = 10 + math.pi/100
bodylist = [
astronomy.Body.Sun, astronomy.Body.Moon, astronomy.Body.Mercury, astronomy.Body.Venus,
astronomy.Body.Earth, astronomy.Body.Mars, astronomy.Body.Jupiter, astronomy.Body.Saturn,
astronomy.Body.Uranus, astronomy.Body.Neptune, astronomy.Body.Pluto,
astronomy.Body.SSB, astronomy.Body.EMB
]
while time.tt < stop.tt:
for body in bodylist:
name = body.name
if body != astronomy.Body.Moon:
pos = astronomy.HelioVector(body, time)
if printflag:
print('v {} {:0.18e} {:0.18e} {:0.18e} {:0.18e}'.format(name, pos.t.tt, pos.x, pos.y, pos.z))
if body != astronomy.Body.Earth and body != astronomy.Body.EMB and body != astronomy.Body.SSB:
j2000 = astronomy.Equator(body, time, observer, False, False)
ofdate = astronomy.Equator(body, time, observer, True, True)
hor = astronomy.Horizon(time, observer, ofdate.ra, ofdate.dec, astronomy.Refraction.Airless)
if printflag:
print('s {} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e}'.format(name, time.tt, time.ut, j2000.ra, j2000.dec, j2000.dist, hor.azimuth, hor.altitude))
pos = astronomy.GeoMoon(time)
if printflag:
print('v GM {:0.18e} {:0.18e} {:0.18e} {:0.18e}'.format(pos.t.tt, pos.x, pos.y, pos.z))
j2000 = astronomy.Equator(astronomy.Body.Moon, time, observer, False, False)
ofdate = astronomy.Equator(astronomy.Body.Moon, time, observer, True, True)
hor = astronomy.Horizon(time, observer, ofdate.ra, ofdate.dec, astronomy.Refraction.Airless)
if printflag:
print('s GM {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e}'.format(time.tt, time.ut, j2000.ra, j2000.dec, j2000.dist, hor.azimuth, hor.altitude))
jm = astronomy.JupiterMoons(time)
if printflag:
for mindex in range(4):
moon = SelectJupiterMoon(jm, mindex)
print('j {:d} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e} {:0.18e}'.format(mindex, time.tt, time.ut, moon.x, moon.y, moon.z, moon.vx, moon.vy, moon.vz))
if printflag:
# Nutation calculations
print('n {:0.18e} {:0.18e}'.format(time._et.dpsi, time._et.deps))
sphere = astronomy.EclipticGeoMoon(time)
if printflag:
print('m {:0.18f} {:0.18f} {:0.18f}'.format(sphere.lat, sphere.lon, sphere.dist))
time = time.AddDays(dt)
return 0
#-----------------------------------------------------------------------------------------------------------
def Seasons(filename = 'seasons/seasons.txt'):
with open(filename, 'rt') as infile:
lnum = 0
current_year = 0
mar_count = sep_count = jun_count = dec_count = 0
max_minutes = 0.0
for line in infile:
lnum += 1
line = line.strip()
m = re.match(r'^(\d+)-(\d+)-(\d+)T(\d+):(\d+)Z\s+([A-Za-z]+)$', line)
if not m:
print('PY Seasons: Invalid data on line {} of file {}'.format(lnum, filename))
return 1
year = int(m.group(1))
month = int(m.group(2))
day = int(m.group(3))
hour = int(m.group(4))
minute = int(m.group(5))
name = m.group(6)
if year != current_year:
current_year = year
seasons = astronomy.Seasons(year)
correct_time = astronomy.Time.Make(year, month, day, hour, minute, 0)
if name == 'Equinox':
if month == 3:
calc_time = seasons.mar_equinox
mar_count += 1
elif month == 9:
calc_time = seasons.sep_equinox
sep_count += 1
else:
print('PY Seasons: {} line {}: Invalid equinox date in test data'.format(filename, lnum))
return 1
elif name == 'Solstice':
if month == 6:
calc_time = seasons.jun_solstice
jun_count += 1
elif month == 12:
calc_time = seasons.dec_solstice
dec_count += 1
else:
print('PY Seasons: {} line {}: Invalid solstice date in test data'.format(filename, lnum))
return 1
elif name == 'Aphelion':
continue # not yet calculated
elif name == 'Perihelion':
continue # not yet calculated
else:
print('PY Seasons: {} line {}: unknown event type {}'.format(filename, lnum, name))
return 1
# Verify that the calculated time matches the correct time for this event.
diff_minutes = (24.0 * 60.0) * vabs(calc_time.tt - correct_time.tt)
if diff_minutes > max_minutes:
max_minutes = diff_minutes
if diff_minutes > 2.37:
print('PY Seasons: {} line {}: excessive error ({}): {} minutes.'.format(filename, lnum, name, diff_minutes))
return 1
print('PY Seasons: verified {} lines from file {} : max error minutes = {:0.3f}'.format(lnum, filename, max_minutes))
print('PY Seasons: Event counts: mar={}, jun={}, sep={}, dec={}'.format(mar_count, jun_count, sep_count, dec_count))
return 0
def SeasonsIssue187():
# This is a regression test for:
# https://github.com/cosinekitty/astronomy/issues/187
# For years far from the present, the seasons search was sometimes failing.
for year in range(1, 9999, 1):
try:
astronomy.Seasons(year)
except astronomy.InternalError:
print('PY SeasonsIssue187: FAIL - internal error for year {}'.format(year))
return 1
print('PY SeasonsIssue187: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def MoonPhase(filename = 'moonphase/moonphases.txt'):
threshold_seconds = 90.0 # max tolerable prediction error in seconds
max_arcmin = 0.0
maxdiff = 0.0
quarter_count = 0
with open(filename, 'rt') as infile:
lnum = 0
prev_year = 0
for line in infile:
lnum += 1
line = line.strip()
m = re.match(r'^([0-3]) (\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+\.\d+)Z$', line)
if not m:
print('PY MoonPhase: invalid data format in {} line {}'.format(filename, lnum))
return 1
quarter = int(m.group(1))
year = int(m.group(2))
month = int(m.group(3))
day = int(m.group(4))
hour = int(m.group(5))
minute = int(m.group(6))
second = float(m.group(7))
expected_elong = 90.0 * quarter
expected_time = astronomy.Time.Make(year, month, day, hour, minute, second)
angle = astronomy.MoonPhase(expected_time)
degree_error = vabs(angle - expected_elong)
if degree_error > 180.0:
degree_error = 360.0 - degree_error
arcmin = 60.0 * degree_error
if arcmin > 1.0:
print('PY MoonPhase({} line {}): EXCESSIVE ANGULAR ERROR: {} arcmin'.format(filename, lnum, arcmin))
return 1
max_arcmin = vmax(max_arcmin, arcmin)
if year != prev_year:
prev_year = year
# The test data contains a single year's worth of data for every 10 years.
# Every time we see the year value change, it breaks continuity of the phases.
# Start the search over again.
start_time = astronomy.Time.Make(year, 1, 1, 0, 0, 0.0)
mq = astronomy.SearchMoonQuarter(start_time)
else:
# Yet another lunar quarter in the same year.
expected_quarter = (1 + mq.quarter) % 4
mq = astronomy.NextMoonQuarter(mq)
# Expect the next consecutive quarter.
if expected_quarter != mq.quarter:
print('PY MoonPhase({} line {}): SearchMoonQuarter returned quarter {}, but expected {}.'.format(filename, lnum, mq.quarter, expected_quarter))
return 1
quarter_count += 1
# Make sure the time matches what we expect.
diff_seconds = vabs(mq.time.tt - expected_time.tt) * SECONDS_PER_DAY
if diff_seconds > threshold_seconds:
print('PY MoonPhase({} line {}): excessive time error {:0.3f} seconds.'.format(filename, lnum, diff_seconds))
return 1
maxdiff = vmax(maxdiff, diff_seconds)
print('PY MoonPhase: passed {} lines for file {} : max_arcmin = {:0.6f}, maxdiff = {:0.3f} seconds, {} quarters.'
.format(lnum, filename, max_arcmin, maxdiff, quarter_count))
return 0
#-----------------------------------------------------------------------------------------------------------
def TestElongFile(filename, targetRelLon):
with open(filename, 'rt') as infile:
lnum = 0
for line in infile:
lnum += 1
line = line.strip()
m = re.match(r'^(\d+)-(\d+)-(\d+)T(\d+):(\d+)Z ([A-Za-z]+)$', line)
if not m:
print('PY TestElongFile({} line {}): invalid data format'.format(filename, lnum))
return 1
year = int(m.group(1))
month = int(m.group(2))
day = int(m.group(3))
hour = int(m.group(4))
minute = int(m.group(5))
name = m.group(6)
body = astronomy.BodyCode(name)
if body.value == astronomy.Body.Invalid:
print('PY TestElongFile({} line {}): invalid body name "{}"'.format(filename, lnum, name))
return 1
search_time = astronomy.Time.Make(year, 1, 1, 0, 0, 0)
expected_time = astronomy.Time.Make(year, month, day, hour, minute, 0)
found_time = astronomy.SearchRelativeLongitude(body, targetRelLon, search_time)
if found_time is None:
print('PY TestElongFile({} line {}): SearchRelativeLongitude failed.'.format(filename, lnum))
return 1
diff_minutes = (24.0 * 60.0) * (found_time.tt - expected_time.tt)
Debug('PY TestElongFile: {:<7s} error = {:6.3} minutes'.format(name, diff_minutes))
if vabs(diff_minutes) > 6.8:
print('PY TestElongFile({} line {}): EXCESSIVE ERROR.'.format(filename, lnum))
return 1
print('PY TestElongFile: passed {} rows of data'.format(lnum))
return 0
def TestPlanetLongitudes(body, outFileName, zeroLonEventName):
startYear = 1700
stopYear = 2200
rlon = 0.0
sum_diff = 0.0
count = 0
name = body.name
with open(outFileName, 'wt') as outfile:
time = astronomy.Time.Make(startYear, 1, 1, 0, 0, 0)
stopTime = astronomy.Time.Make(stopYear, 1, 1, 0, 0, 0)
while time.tt < stopTime.tt:
count += 1
event = zeroLonEventName if rlon == 0.0 else 'sup'
found_time = astronomy.SearchRelativeLongitude(body, rlon, time)
if found_time is None:
print('PY TestPlanetLongitudes({}): SearchRelativeLongitudes failed'.format(name))
return 1
if count >= 2:
# Check for consistent intervals.
# Mainly I don't want to skip over an event!
day_diff = found_time.tt - time.tt
sum_diff += day_diff
if count == 2:
min_diff = max_diff = day_diff
else:
min_diff = vmin(min_diff, day_diff)
max_diff = vmax(max_diff, day_diff)
geo = astronomy.GeoVector(body, found_time, True)
dist = geo.Length()
outfile.write('e {} {} {:0.16f} {:0.16f}\n'.format(name, event, found_time.tt, dist))
# Search for the opposite longitude vent next time.
time = found_time
rlon = 180.0 - rlon
if body == astronomy.Body.Mercury:
thresh = 1.65
elif body == astronomy.Body.Mars:
thresh = 1.30
else:
thresh = 1.07
ratio = max_diff / min_diff
Debug('PY TestPlanetLongitudes({:<7s}): {:5d} events, ratio={:5.3f}, file: {}'.format(name, count, ratio, outFileName))
if ratio > thresh:
print('PY TestPlanetLongitudes({}): EXCESSIVE EVENT INTERVAL RATIO'.format(name))
return 1
return 0
ElongTestData = [
# Max elongation data obtained from:
# http://www.skycaramba.com/greatest_elongations.shtml
( astronomy.Body.Mercury, "2010-01-17T05:22Z", "2010-01-27T05:22Z", 24.80, 'morning' ),
( astronomy.Body.Mercury, "2010-05-16T02:15Z", "2010-05-26T02:15Z", 25.10, 'morning' ),
( astronomy.Body.Mercury, "2010-09-09T17:24Z", "2010-09-19T17:24Z", 17.90, 'morning' ),
( astronomy.Body.Mercury, "2010-12-30T14:33Z", "2011-01-09T14:33Z", 23.30, 'morning' ),
( astronomy.Body.Mercury, "2011-04-27T19:03Z", "2011-05-07T19:03Z", 26.60, 'morning' ),
( astronomy.Body.Mercury, "2011-08-24T05:52Z", "2011-09-03T05:52Z", 18.10, 'morning' ),
( astronomy.Body.Mercury, "2011-12-13T02:56Z", "2011-12-23T02:56Z", 21.80, 'morning' ),
( astronomy.Body.Mercury, "2012-04-08T17:22Z", "2012-04-18T17:22Z", 27.50, 'morning' ),
( astronomy.Body.Mercury, "2012-08-06T12:04Z", "2012-08-16T12:04Z", 18.70, 'morning' ),
( astronomy.Body.Mercury, "2012-11-24T22:55Z", "2012-12-04T22:55Z", 20.60, 'morning' ),
( astronomy.Body.Mercury, "2013-03-21T22:02Z", "2013-03-31T22:02Z", 27.80, 'morning' ),
( astronomy.Body.Mercury, "2013-07-20T08:51Z", "2013-07-30T08:51Z", 19.60, 'morning' ),
( astronomy.Body.Mercury, "2013-11-08T02:28Z", "2013-11-18T02:28Z", 19.50, 'morning' ),
( astronomy.Body.Mercury, "2014-03-04T06:38Z", "2014-03-14T06:38Z", 27.60, 'morning' ),
( astronomy.Body.Mercury, "2014-07-02T18:22Z", "2014-07-12T18:22Z", 20.90, 'morning' ),
( astronomy.Body.Mercury, "2014-10-22T12:36Z", "2014-11-01T12:36Z", 18.70, 'morning' ),
( astronomy.Body.Mercury, "2015-02-14T16:20Z", "2015-02-24T16:20Z", 26.70, 'morning' ),
( astronomy.Body.Mercury, "2015-06-14T17:10Z", "2015-06-24T17:10Z", 22.50, 'morning' ),
( astronomy.Body.Mercury, "2015-10-06T03:20Z", "2015-10-16T03:20Z", 18.10, 'morning' ),
( astronomy.Body.Mercury, "2016-01-28T01:22Z", "2016-02-07T01:22Z", 25.60, 'morning' ),
( astronomy.Body.Mercury, "2016-05-26T08:45Z", "2016-06-05T08:45Z", 24.20, 'morning' ),
( astronomy.Body.Mercury, "2016-09-18T19:27Z", "2016-09-28T19:27Z", 17.90, 'morning' ),
( astronomy.Body.Mercury, "2017-01-09T09:42Z", "2017-01-19T09:42Z", 24.10, 'morning' ),
( astronomy.Body.Mercury, "2017-05-07T23:19Z", "2017-05-17T23:19Z", 25.80, 'morning' ),
( astronomy.Body.Mercury, "2017-09-02T10:14Z", "2017-09-12T10:14Z", 17.90, 'morning' ),
( astronomy.Body.Mercury, "2017-12-22T19:48Z", "2018-01-01T19:48Z", 22.70, 'morning' ),
( astronomy.Body.Mercury, "2018-04-19T18:17Z", "2018-04-29T18:17Z", 27.00, 'morning' ),
( astronomy.Body.Mercury, "2018-08-16T20:35Z", "2018-08-26T20:35Z", 18.30, 'morning' ),
( astronomy.Body.Mercury, "2018-12-05T11:34Z", "2018-12-15T11:34Z", 21.30, 'morning' ),
( astronomy.Body.Mercury, "2019-04-01T19:40Z", "2019-04-11T19:40Z", 27.70, 'morning' ),
( astronomy.Body.Mercury, "2019-07-30T23:08Z", "2019-08-09T23:08Z", 19.00, 'morning' ),
( astronomy.Body.Mercury, "2019-11-18T10:31Z", "2019-11-28T10:31Z", 20.10, 'morning' ),
( astronomy.Body.Mercury, "2010-03-29T23:32Z", "2010-04-08T23:32Z", 19.40, 'evening' ),
( astronomy.Body.Mercury, "2010-07-28T01:03Z", "2010-08-07T01:03Z", 27.40, 'evening' ),
( astronomy.Body.Mercury, "2010-11-21T15:42Z", "2010-12-01T15:42Z", 21.50, 'evening' ),
( astronomy.Body.Mercury, "2011-03-13T01:07Z", "2011-03-23T01:07Z", 18.60, 'evening' ),
( astronomy.Body.Mercury, "2011-07-10T04:56Z", "2011-07-20T04:56Z", 26.80, 'evening' ),
( astronomy.Body.Mercury, "2011-11-04T08:40Z", "2011-11-14T08:40Z", 22.70, 'evening' ),
( astronomy.Body.Mercury, "2012-02-24T09:39Z", "2012-03-05T09:39Z", 18.20, 'evening' ),
( astronomy.Body.Mercury, "2012-06-21T02:00Z", "2012-07-01T02:00Z", 25.70, 'evening' ),
( astronomy.Body.Mercury, "2012-10-16T21:59Z", "2012-10-26T21:59Z", 24.10, 'evening' ),
( astronomy.Body.Mercury, "2013-02-06T21:24Z", "2013-02-16T21:24Z", 18.10, 'evening' ),
( astronomy.Body.Mercury, "2013-06-02T16:45Z", "2013-06-12T16:45Z", 24.30, 'evening' ),
( astronomy.Body.Mercury, "2013-09-29T09:59Z", "2013-10-09T09:59Z", 25.30, 'evening' ),
( astronomy.Body.Mercury, "2014-01-21T10:00Z", "2014-01-31T10:00Z", 18.40, 'evening' ),
( astronomy.Body.Mercury, "2014-05-15T07:06Z", "2014-05-25T07:06Z", 22.70, 'evening' ),
( astronomy.Body.Mercury, "2014-09-11T22:20Z", "2014-09-21T22:20Z", 26.40, 'evening' ),
( astronomy.Body.Mercury, "2015-01-04T20:26Z", "2015-01-14T20:26Z", 18.90, 'evening' ),
( astronomy.Body.Mercury, "2015-04-27T04:46Z", "2015-05-07T04:46Z", 21.20, 'evening' ),
( astronomy.Body.Mercury, "2015-08-25T10:20Z", "2015-09-04T10:20Z", 27.10, 'evening' ),
( astronomy.Body.Mercury, "2015-12-19T03:11Z", "2015-12-29T03:11Z", 19.70, 'evening' ),
( astronomy.Body.Mercury, "2016-04-08T14:00Z", "2016-04-18T14:00Z", 19.90, 'evening' ),
( astronomy.Body.Mercury, "2016-08-06T21:24Z", "2016-08-16T21:24Z", 27.40, 'evening' ),
( astronomy.Body.Mercury, "2016-12-01T04:36Z", "2016-12-11T04:36Z", 20.80, 'evening' ),
( astronomy.Body.Mercury, "2017-03-22T10:24Z", "2017-04-01T10:24Z", 19.00, 'evening' ),
( astronomy.Body.Mercury, "2017-07-20T04:34Z", "2017-07-30T04:34Z", 27.20, 'evening' ),
( astronomy.Body.Mercury, "2017-11-14T00:32Z", "2017-11-24T00:32Z", 22.00, 'evening' ),
( astronomy.Body.Mercury, "2018-03-05T15:07Z", "2018-03-15T15:07Z", 18.40, 'evening' ),
( astronomy.Body.Mercury, "2018-07-02T05:24Z", "2018-07-12T05:24Z", 26.40, 'evening' ),
( astronomy.Body.Mercury, "2018-10-27T15:25Z", "2018-11-06T15:25Z", 23.30, 'evening' ),
( astronomy.Body.Mercury, "2019-02-17T01:23Z", "2019-02-27T01:23Z", 18.10, 'evening' ),
( astronomy.Body.Mercury, "2019-06-13T23:14Z", "2019-06-23T23:14Z", 25.20, 'evening' ),
( astronomy.Body.Mercury, "2019-10-10T04:00Z", "2019-10-20T04:00Z", 24.60, 'evening' ),
( astronomy.Body.Venus, "2010-12-29T15:57Z", "2011-01-08T15:57Z", 47.00, 'morning' ),
( astronomy.Body.Venus, "2012-08-05T08:59Z", "2012-08-15T08:59Z", 45.80, 'morning' ),
( astronomy.Body.Venus, "2014-03-12T19:25Z", "2014-03-22T19:25Z", 46.60, 'morning' ),
( astronomy.Body.Venus, "2015-10-16T06:57Z", "2015-10-26T06:57Z", 46.40, 'morning' ),
( astronomy.Body.Venus, "2017-05-24T13:09Z", "2017-06-03T13:09Z", 45.90, 'morning' ),
( astronomy.Body.Venus, "2018-12-27T04:24Z", "2019-01-06T04:24Z", 47.00, 'morning' ),
( astronomy.Body.Venus, "2010-08-10T03:19Z", "2010-08-20T03:19Z", 46.00, 'evening' ),
( astronomy.Body.Venus, "2012-03-17T08:03Z", "2012-03-27T08:03Z", 46.00, 'evening' ),
( astronomy.Body.Venus, "2013-10-22T08:00Z", "2013-11-01T08:00Z", 47.10, 'evening' ),
( astronomy.Body.Venus, "2015-05-27T18:46Z", "2015-06-06T18:46Z", 45.40, 'evening' ),
( astronomy.Body.Venus, "2017-01-02T13:19Z", "2017-01-12T13:19Z", 47.10, 'evening' ),
( astronomy.Body.Venus, "2018-08-07T17:02Z", "2018-08-17T17:02Z", 45.90, 'evening' )
]
def TestMaxElong(body, searchText, eventText, angle, visibility):
name = body.name
searchTime = astronomy.Time.Parse(searchText)
eventTime = astronomy.Time.Parse(eventText)
evt = astronomy.SearchMaxElongation(body, searchTime)
if evt is None:
print('PY TestMaxElong({} {}): SearchMaxElongation failed.'.format(name, searchText))
return 1
if evt.visibility != visibility:
print('PY TestMaxElong({} {}): SearchMaxElongation returned visibility {}, but expected {}'.format(name, searchText, evt.visibility.name, visibility.name))
return 1
hour_diff = 24.0 * vabs(evt.time.tt - eventTime.tt)
arcmin_diff = 60.0 * vabs(evt.elongation - angle)
Debug('PY TestMaxElong: {:<7s} {:<7s} elong={:5.2f} ({:4.2f} arcmin, {:5.3f} hours)'.format(name, visibility.name, evt.elongation, arcmin_diff, hour_diff))
if hour_diff > 0.6:
print('PY TestMaxElong({} {}): EXCESSIVE HOUR ERROR.'.format(name, searchText))
return 1
if arcmin_diff > 3.4:
print('PY TestMaxElong({} {}): EXCESSIVE ARCMIN ERROR.'.format(name, searchText))
return 1
return 0
def SearchElongTest():
for (body, searchText, eventText, angle, visibility) in ElongTestData:
if 0 != TestMaxElong(body, searchText, eventText, angle, astronomy.Visibility[visibility.title()]):
return 1
return 0
def Elongation():
return (
TestElongFile('longitude/opposition_2018.txt', 0.0) or
TestPlanetLongitudes(astronomy.Body.Mercury, "temp/py_longitude_Mercury.txt", "inf") or
TestPlanetLongitudes(astronomy.Body.Venus, "temp/py_longitude_Venus.txt", "inf") or
TestPlanetLongitudes(astronomy.Body.Mars, "temp/py_longitude_Mars.txt", "opp") or
TestPlanetLongitudes(astronomy.Body.Jupiter, "temp/py_longitude_Jupiter.txt", "opp") or
TestPlanetLongitudes(astronomy.Body.Saturn, "temp/py_longitude_Saturn.txt", "opp") or
TestPlanetLongitudes(astronomy.Body.Uranus, "temp/py_longitude_Uranus.txt", "opp") or
TestPlanetLongitudes(astronomy.Body.Neptune, "temp/py_longitude_Neptune.txt", "opp") or
TestPlanetLongitudes(astronomy.Body.Pluto, "temp/py_longitude_Pluto.txt", "opp") or
SearchElongTest() or
Pass('Elongation')
)
#-----------------------------------------------------------------------------------------------------------
def MonthNumber(mtext):
return 1 + ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'].index(mtext)
def ParseJplHorizonsDateTime(line):
m = re.match(r'^\s*(\d{4})-(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)-(\d{2})\s(\d{2}):(\d{2})\s+(.*)$', line)
if not m:
return None, None
year = int(m.group(1))
month = MonthNumber(m.group(2))
day = int(m.group(3))
hour = int(m.group(4))
minute = int(m.group(5))
rest = m.group(6)
time = astronomy.Time.Make(year, month, day, hour, minute, 0)
return time, rest
def CheckMagnitudeData(body, filename):
limit = 0.012
sum_squared_diff = 0.0
with open(filename, 'rt') as infile:
count = lnum = 0
for line in infile:
lnum += 1
line = line.strip()
(time, rest) = ParseJplHorizonsDateTime(line)
if (time is not None) and (rest is not None) and not ('n.a.' in rest):
data = [float(t) for t in rest.split()]
if len(data) != 7:
print('PY CheckMagnitudeData({} line {}): invalid data format'.format(filename, lnum))
return 1
(mag, sbrt, dist, rdot, delta, deldot, phase_angle) = data
illum = astronomy.Illumination(body, time)
diff = illum.mag - mag
if vabs(diff) > limit:
print('PY CheckMagnitudeData({} line {}): EXCESSIVE ERROR: correct mag={}, calc mag={}'.format(filename, lnum, mag, illum.mag))
return 1
sum_squared_diff += diff * diff
if count == 0:
diff_lo = diff_hi = diff
else:
diff_lo = vmin(diff_lo, diff)
diff_hi = vmax(diff_hi, diff)
count += 1
if count == 0:
print('PY CheckMagnitudeData: Did not find any data in file: {}'.format(filename))
return 1
rms = sqrt(sum_squared_diff / count)
Debug('PY CheckMagnitudeData: {:<21s} {:5d} rows diff_lo={:0.4f} diff_hi={:0.4f} rms={:0.4f}'.format(filename, count, diff_lo, diff_hi, rms))
return 0
def CheckSaturn():
# JPL Horizons does not include Saturn's rings in its magnitude models.
# I still don't have authoritative test data for Saturn's magnitude.
# For now, I just test for consistency with Paul Schlyter's formulas at:
# http://www.stjarnhimlen.se/comp/ppcomp.html#15
data = [
( "1972-01-01T00:00Z", -0.31725492, +24.43386475 ),
( "1980-01-01T00:00Z", +0.85796177, -1.72627324 ),
( "2009-09-04T00:00Z", +1.01932560, +0.01834451 ),
( "2017-06-15T00:00Z", -0.12303373, -26.60068380 ),
( "2019-05-01T00:00Z", +0.33124502, -23.47173574 ),
( "2025-09-25T00:00Z", +0.50543708, +1.69118986 ),
( "2032-05-15T00:00Z", -0.04649573, +26.95238680 )
]
error = 0
for (dtext, mag, tilt) in data:
time = astronomy.Time.Parse(dtext)
illum = astronomy.Illumination(astronomy.Body.Saturn, time)
Debug('PY Saturn: date={} calc mag={:12.8f} ring_tilt={:12.8f}'.format(dtext, illum.mag, illum.ring_tilt))
mag_diff = vabs(illum.mag - mag)
if mag_diff > 1.0e-4:
print('PY CheckSaturn: Excessive magnitude error {}'.format(mag_diff))
error = 1
tilt_diff = vabs(illum.ring_tilt - tilt)
if (tilt_diff > 3.0e-5):
print('PY CheckSaturn: Excessive ring tilt error {}'.format(tilt_diff))
error = 1
return error
def TestMaxMag(body, filename):
# Example of input data:
#
# 2001-02-21T08:00Z 2001-02-27T08:00Z 23.17 19.53 -4.84
#
# JPL Horizons test data has limited floating point precision in the magnitude values.
# There is a pair of dates for the beginning and end of the max magnitude period,
# given the limited precision. We pick the point halfway between as the supposed max magnitude time.
with open(filename, 'rt') as infile:
lnum = 0
search_time = astronomy.Time.Make(2001, 1, 1, 0, 0, 0)
for line in infile:
lnum += 1
line = line.strip()
tokenlist = line.split()
if len(tokenlist) == 5:
time1 = astronomy.Time.Parse(tokenlist[0])
time2 = astronomy.Time.Parse(tokenlist[1])
if time1 and time2:
center_time = time1.AddDays(0.5*(time2.ut - time1.ut))
correct_mag = float(tokenlist[4])
illum = astronomy.SearchPeakMagnitude(body, search_time)
mag_diff = vabs(illum.mag - correct_mag)
hours_diff = 24.0 * vabs(illum.time.ut - center_time.ut)
Debug('PY TestMaxMag: mag_diff={:0.3f}, hours_diff={:0.3f}'.format(mag_diff, hours_diff))
if hours_diff > 7.1:
print('PY TestMaxMag({} line {}): EXCESSIVE TIME DIFFERENCE.'.format(filename, lnum))
return 1
if mag_diff > 0.005:
print('PY TestMaxMag({} line {}): EXCESSIVE MAGNITUDE DIFFERENCE.'.format(filename, lnum))
return 1
search_time = time2
Debug('PY TestMaxMag: processed {} lines from file {}'.format(lnum, filename))
return 0
def Magnitude():
nfailed = 0
nfailed += CheckMagnitudeData(astronomy.Body.Sun, 'magnitude/Sun.txt')
nfailed += CheckMagnitudeData(astronomy.Body.Moon, 'magnitude/Moon.txt')
nfailed += CheckMagnitudeData(astronomy.Body.Mercury, 'magnitude/Mercury.txt')
nfailed += CheckMagnitudeData(astronomy.Body.Venus, 'magnitude/Venus.txt')
nfailed += CheckMagnitudeData(astronomy.Body.Mars, 'magnitude/Mars.txt')
nfailed += CheckMagnitudeData(astronomy.Body.Jupiter, 'magnitude/Jupiter.txt')
nfailed += CheckSaturn()
nfailed += CheckMagnitudeData(astronomy.Body.Uranus, 'magnitude/Uranus.txt')
nfailed += CheckMagnitudeData(astronomy.Body.Neptune, 'magnitude/Neptune.txt')
nfailed += CheckMagnitudeData(astronomy.Body.Pluto, 'magnitude/Pluto.txt')
nfailed += TestMaxMag(astronomy.Body.Venus, 'magnitude/maxmag_Venus.txt')
if nfailed == 0:
print('PY Magnitude: PASS')
else:
print('PY Magnitude: failed {} test(s).'.format(nfailed))
return 1
return 0
#-----------------------------------------------------------------------------------------------------------
def ToggleDir(dir):
return astronomy.Direction(-dir.value)
def RiseSetSlot(ut1, ut2, direction, observer):
maxDiff = 0.0
nslots = 100
for i in range(1, nslots):
ut = ut1 + (i / nslots)*(ut2 - ut1)
time = astronomy.Time(ut)
result = astronomy.SearchRiseSet(astronomy.Body.Sun, observer, direction, time, -1.0)
if not result:
print('PY RiseSetSlot: backward slot search failed for {} before {}'.format(direction, time))
return 1
diff = SECONDS_PER_DAY * vabs(result.ut - ut1)
maxDiff = max(maxDiff, diff)
result = astronomy.SearchRiseSet(astronomy.Body.Sun, observer, direction, time, +1.0)
if not result:
print('PY RiseSetSlot: forward slot search failed for {} after {}'.format(direction, time))
return 1
diff = SECONDS_PER_DAY * vabs(result.ut - ut2)
maxDiff = max(maxDiff, diff)
if maxDiff > 0.13:
print('PY RiseSetSlot: EXCESSIVE {} slot-test discrepancy = {:0.6f} seconds.'.format(direction, maxDiff))
return 1
Debug('PY RiseSetSlot: {} slot-test discrepancy = {:0.6f} seconds.'.format(direction, maxDiff))
return 0
def RiseSetReverse():
nsamples = 5000
nudge = 0.1
utList = []
observer = astronomy.Observer(30.5, -90.7, 0.0)
dtMin = +1000.0
dtMax = -1000.0
maxDiff = 0.0
# Find alternating sunrise/sunset events in forward chronological order.
dir = astronomy.Direction.Rise
time = astronomy.Time.Make(2022, 1, 1, 0, 0, 0)
for i in range(nsamples):
result = astronomy.SearchRiseSet(astronomy.Body.Sun, observer, dir, time, +1.0)
if not result:
print('PY RiseSetReverse: cannot find {} event after {}'.format(dir, time))
return 1
utList.append(result.ut)
if i > 0:
# Check the time between consecutive sunrise/sunset events.
# These will vary considerably with the seasons, so just make sure we don't miss any entirely.
dt = v(utList[i] - utList[i-1])
dtMin = min(dtMin, dt)
dtMax = max(dtMax, dt)
dir = ToggleDir(dir)
time = result.AddDays(+nudge)
Debug('PY RiseSetReverse: dtMin={:0.6f} days, dtMax={:0.6f} days.'.format(dtMin, dtMax))
if (dtMin < 0.411) or (dtMax > 0.589):
print('PY RiseSetReverse: Invalid intervals between sunrise/sunset.')
return 1
# Perform the same search in reverse. Verify we get consistent rise/set times.
for i in range(nsamples-1, -1, -1):
dir = ToggleDir(dir)
result = astronomy.SearchRiseSet(astronomy.Body.Sun, observer, dir, time, -1.0)
if not result:
print('PY RiseSetReverse: cannot find {] event before {}.'.format(dir, time))
return 1
diff = SECONDS_PER_DAY * vabs(utList[i] - result.ut)
maxDiff = max(maxDiff, diff)
time = result.AddDays(-nudge)
if maxDiff > 0.1:
print('PY RiseSetReverse: EXCESSIVE forward/backward discrepancy = {:0.6f} seconds.'.format(maxDiff))
return 1
Debug('PY RiseSetReverse: forward/backward discrepancy = {:0.6f} seconds.'.format(maxDiff))
# All even indexes in utList hold sunrise times.
# All odd indexes in utList hold sunset times.
# Verify that forward/backward searches for consecutive sunrises/sunsets
# resolve correctly for 100 time slots between them.
k = (nsamples // 2) & ~1
return (
RiseSetSlot(utList[k], utList[k+2], astronomy.Direction.Rise, observer) or
RiseSetSlot(utList[k+1], utList[k+3], astronomy.Direction.Set, observer) or
Pass('RiseSetReverse')
)
#-----------------------------------------------------------------------------------------------------------
def RiseSet(filename = 'riseset/riseset.txt'):
sum_minutes = 0.0
max_minutes = 0.0
nudge_days = 0.01
observer = None
current_body = None
a_dir = 0
b_dir = 0
with open(filename, 'rt') as infile:
lnum = 0
for line in infile:
lnum += 1
line = line.strip()
# Moon 103 -61 1944-01-02T17:08Z s
# Moon 103 -61 1944-01-03T05:47Z r
m = re.match(r'^([A-Za-z]+)\s+(-?[0-9\.]+)\s+(-?[0-9\.]+)\s+(\d+)-(\d+)-(\d+)T(\d+):(\d+)Z\s+([sr])$', line)
if not m:
print('PY RiseSet({} line {}): invalid data format'.format(filename, lnum))
return 1
name = m.group(1)
longitude = float(m.group(2))
latitude = float(m.group(3))
year = int(m.group(4))
month = int(m.group(5))
day = int(m.group(6))
hour = int(m.group(7))
minute = int(m.group(8))
kind = m.group(9)
correct_time = astronomy.Time.Make(year, month, day, hour, minute, 0)
direction = astronomy.Direction.Rise if kind == 'r' else astronomy.Direction.Set
body = astronomy.BodyCode(name)
if body == astronomy.Body.Invalid:
print('PY RiseSet({} line {}): invalid body name "{}"'.format(filename, lnum, name))
return 1
# Every time we see a new geographic location, start a new iteration
# of finding all rise/set times for that UTC calendar year.
if (observer is None) or (observer.latitude != latitude) or (observer.longitude != longitude) or (current_body != body):
current_body = body
observer = astronomy.Observer(latitude, longitude, 0)
r_search_date = s_search_date = astronomy.Time.Make(year, 1, 1, 0, 0, 0)
b_evt = None
Debug('PY RiseSet: {:<7s} lat={:0.1f} lon={:0.1f}'.format(name, latitude, longitude))
if b_evt is not None:
# Recycle the second event from the previous iteration as the first event.
a_evt = b_evt
a_dir = b_dir
b_evt = None
else:
r_evt = astronomy.SearchRiseSet(body, observer, astronomy.Direction.Rise, r_search_date, 366.0)
if r_evt is None:
print('PY RiseSet({} line {}): rise search failed'.format(filename, lnum))
return 1
s_evt = astronomy.SearchRiseSet(body, observer, astronomy.Direction.Set, s_search_date, 366.0)
if s_evt is None:
print('PY RiseSet({} line {}): set search failed'.format(filename, lnum))
return 1
# Expect the current event to match the earlier of the found times.
if r_evt.tt < s_evt.tt:
a_evt = r_evt
b_evt = s_evt
a_dir = astronomy.Direction.Rise
b_dir = astronomy.Direction.Set
else:
a_evt = s_evt
b_evt = r_evt
a_dir = astronomy.Direction.Set
b_dir = astronomy.Direction.Rise
# Nudge the event times forward a tiny amount.
r_search_date = r_evt.AddDays(nudge_days)
s_search_date = s_evt.AddDays(nudge_days)
if a_dir != direction:
print('PY RiseSet({} line {}): expected dir={} but found {}'.format(filename, lnum, direction, a_dir))
return 1
error_minutes = (24.0 * 60.0) * vabs(a_evt.tt - correct_time.tt)
sum_minutes += error_minutes ** 2
max_minutes = vmax(max_minutes, error_minutes)
if error_minutes > 1.18:
print('PY RiseSet({} line {}): excessive prediction time error = {} minutes.'.format(filename, lnum, error_minutes))
print(' correct = {}, calculated = {}'.format(correct_time, a_evt))
return 1
rms_minutes = sqrt(sum_minutes / lnum)
print('PY RiseSet: passed {} lines: time errors in minutes: rms={:0.4f}, max={:0.4f}'.format(lnum, rms_minutes, max_minutes))
return 0
#-----------------------------------------------------------------------------------------------------------
def LunarApsis(filename = 'apsides/moon.txt'):
max_minutes = 0.0
max_km = 0.0
with open(filename, 'rt') as infile:
start_time = astronomy.Time.Make(2001, 1, 1, 0, 0, 0)
lnum = 0
for line in infile:
lnum += 1
if lnum == 1:
apsis = astronomy.SearchLunarApsis(start_time)
else:
apsis = astronomy.NextLunarApsis(apsis)
tokenlist = line.split()
if len(tokenlist) != 3:
print('PY LunarApsis({} line {}): invalid data format'.format(filename, lnum))
return 1
correct_time = astronomy.Time.Parse(tokenlist[1])
if not correct_time:
print('PY LunarApsis({} line {}): invalid time'.format(filename, lnum))
return 1
kind = astronomy.ApsisKind(int(tokenlist[0]))
if apsis.kind != kind:
print('PY LunarApsis({} line {}): Expected kind {} but found {}'.format(filename, lnum, kind, apsis.kind))
return 1
dist_km = float(tokenlist[2])
diff_minutes = (24.0 * 60.0) * vabs(apsis.time.ut - correct_time.ut)
diff_km = vabs(apsis.dist_km - dist_km)
if diff_minutes > 35.0:
print('PY LunarApsis({} line {}): Excessive time error = {} minutes.'.format(filename, lnum, diff_minutes))
return 1
if diff_km > 25.0:
print('PY LunarApsis({} line {}): Excessive distance error = {} km.'.format(filename, lnum, diff_km))
return 1
max_minutes = vmax(max_minutes, diff_minutes)
max_km = vmax(max_km, diff_km)
print('PY LunarApsis: found {} events, max time error = {:0.3f} minutes, max distance error = {:0.3f} km.'.format(lnum, max_minutes, max_km))
return 0
#-----------------------------------------------------------------------------------------------------------
def CompareMatrices(caller, a, b, tolerance):
for i in range(3):
for j in range(3):
diff = vabs(a.rot[i][j] - b.rot[i][j])
if diff > tolerance:
print('PY CompareMatrices ERROR({}): matrix[{}][{}] = {}, expected {}, diff {}'.format(caller, i, j, a.rot[i][j], b.rot[i][j], diff))
sys.exit(1)
def CompareVectors(caller, a, b, tolerance):
diff = vabs(a.x - b.x)
if diff > tolerance:
print('PY CompareVectors ERROR({}): vector x = {}, expected {}, diff {}'.format(caller, a.x, b.x, diff))
sys.exit(1)
diff = vabs(a.y - b.y)
if diff > tolerance:
print('PY CompareVectors ERROR({}): vector y = {}, expected {}, diff {}'.format(caller, a.y, b.y, diff))
sys.exit(1)
diff = vabs(a.z - b.z)
if diff > tolerance:
print('PY CompareVectors ERROR({}): vector z = {}, expected {}, diff {}'.format(caller, a.z, b.z, diff))
sys.exit(1)
def Rotation_MatrixInverse():
a = astronomy.RotationMatrix([
[1, 4, 7],
[2, 5, 8],
[3, 6, 9]
])
v = astronomy.RotationMatrix([
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
])
b = astronomy.InverseRotation(a)
CompareMatrices('Rotation_MatrixInverse', b, v, 0)
def Rotation_MatrixMultiply():
a = astronomy.RotationMatrix([
[1, 4, 7],
[2, 5, 8],
[3, 6, 9]
])
b = astronomy.RotationMatrix([
[10, 13, 16],
[11, 14, 17],
[12, 15, 18]
])
v = astronomy.RotationMatrix([
[84, 201, 318],
[90, 216, 342],
[96, 231, 366]
])
c = astronomy.CombineRotation(b, a)
CompareMatrices('Rotation_MatrixMultiply', c, v, 0)
def VectorDiff(a, b):
dx = a.x - b.x
dy = a.y - b.y
dz = a.z - b.z
return sqrt(dx*dx + dy*dy + dz*dz)
def Test_GAL_EQJ_NOVAS(filename):
THRESHOLD_SECONDS = 8.8
rot = astronomy.Rotation_EQJ_GAL()
time = astronomy.Time(0.0) # placeholder time - value does not matter
with open(filename, 'rt') as infile:
lnum = 0
max_diff = 0.0
for line in infile:
lnum += 1
token = line.split()
if len(token) != 4:
print('PY Test_GAL_EQJ_NOVAS({} line {}): Wrong number of tokens.'.format(filename, lnum))
sys.exit(1)
ra = float(token[0])
dec = float(token[1])
glon = float(token[2])
glat = float(token[3])
eqj_sphere = astronomy.Spherical(dec, 15.0*ra, 1.0)
eqj_vec = astronomy.VectorFromSphere(eqj_sphere, time)
gal_vec = astronomy.RotateVector(rot, eqj_vec)
gal_sphere = astronomy.SphereFromVector(gal_vec)
dlat = gal_sphere.lat - glat
dlon = math.cos(math.radians(glat)) * (gal_sphere.lon - glon)
diff = 3600.0 * math.hypot(dlon, dlat)
if diff > THRESHOLD_SECONDS:
print('PY Test_GAL_EQJ_NOVAS({} line {}): EXCESSIVE ERROR = {:0.3f}'.format(filename, lnum, diff))
sys.exit(1)
if diff > max_diff:
max_diff = diff
Debug('PY Test_GAL_EQJ_NOVAS: PASS. max_diff = {:0.3f} arcseconds.'.format(max_diff))
return 0
def Test_EQJ_EQD(body):
# Verify conversion of equatorial J2000 to equatorial of-date, and back.
# Use established functions to calculate spherical coordinates for the body, in both EQJ and EQD.
time = astronomy.Time.Make(2019, 12, 8, 20, 50, 0)
observer = astronomy.Observer(+35, -85, 0)
eq2000 = astronomy.Equator(body, time, observer, False, True)
eqdate = astronomy.Equator(body, time, observer, True, True)
# Convert EQJ spherical coordinates to vector.
v2000 = eq2000.vec
# Find rotation matrix.
r = astronomy.Rotation_EQJ_EQD(time)
# Rotate EQJ vector to EQD vector.
vdate = astronomy.RotateVector(r, v2000)
# Convert vector back to angular equatorial coordinates.
equcheck = astronomy.EquatorFromVector(vdate)
# Compare the result with the eqdate.
ra_diff = vabs(equcheck.ra - eqdate.ra)
dec_diff = vabs(equcheck.dec - eqdate.dec)
dist_diff = vabs(equcheck.dist - eqdate.dist)
Debug('PY Test_EQJ_EQD: {} ra={}, dec={}, dist={}, ra_diff={}, dec_diff={}, dist_diff={}'.format(
body.name, eqdate.ra, eqdate.dec, eqdate.dist, ra_diff, dec_diff, dist_diff
))
if ra_diff > 1.0e-14 or dec_diff > 1.0e-14 or dist_diff > 4.0e-15:
print('PY Test_EQJ_EQD: EXCESSIVE ERROR')
sys.exit(1)
# Perform the inverse conversion back to equatorial J2000 coordinates.
ir = astronomy.Rotation_EQD_EQJ(time)
t2000 = astronomy.RotateVector(ir, vdate)
diff = VectorDiff(t2000, v2000)
Debug('PY Test_EQJ_EQD: {} inverse diff = {}'.format(body.name, diff))
if diff > 5.0e-15:
print('PY Test_EQJ_EQD: EXCESSIVE INVERSE ERROR')
sys.exit(1)
def Test_EQD_HOR(body):
# Use existing functions to calculate horizontal coordinates of the body for the time+observer.
time = astronomy.Time.Make(1970, 12, 13, 5, 15, 0)
observer = astronomy.Observer(-37, +45, 0)
eqd = astronomy.Equator(body, time, observer, True, True)
Debug('PY Test_EQD_HOR {}: OFDATE ra={}, dec={}'.format(body.name, eqd.ra, eqd.dec))
hor = astronomy.Horizon(time, observer, eqd.ra, eqd.dec, astronomy.Refraction.Normal)
# Calculate the position of the body as an equatorial vector of date.
vec_eqd = eqd.vec
# Calculate rotation matrix to convert equatorial J2000 vector to horizontal vector.
rot = astronomy.Rotation_EQD_HOR(time, observer)
# Rotate the equator of date vector to a horizontal vector.
vec_hor = astronomy.RotateVector(rot, vec_eqd)
# Convert the horizontal vector to horizontal angular coordinates.
xsphere = astronomy.HorizonFromVector(vec_hor, astronomy.Refraction.Normal)
diff_alt = vabs(xsphere.lat - hor.altitude)
diff_az = vabs(xsphere.lon - hor.azimuth)
Debug('PY Test_EQD_HOR {}: trusted alt={}, az={}; test alt={}, az={}; diff_alt={}, diff_az={}'.format(
body.name, hor.altitude, hor.azimuth, xsphere.lat, xsphere.lon, diff_alt, diff_az))
if diff_alt > 4.0e-14 or diff_az > 1.2e-13:
print('PY Test_EQD_HOR: EXCESSIVE HORIZONTAL ERROR.')
sys.exit(1)
# Confirm that we can convert back to horizontal vector.
check_hor = astronomy.VectorFromHorizon(xsphere, time, astronomy.Refraction.Normal)
diff = VectorDiff(check_hor, vec_hor)
Debug('PY Test_EQD_HOR {}: horizontal recovery: diff = {}'.format(body.name, diff))
if diff > 3.0e-15:
print('PY Test_EQD_HOR: EXCESSIVE ERROR IN HORIZONTAL RECOVERY.')
sys.exit(1)
# Verify the inverse translation from horizontal vector to equatorial of-date vector.
irot = astronomy.Rotation_HOR_EQD(time, observer)
check_eqd = astronomy.RotateVector(irot, vec_hor)
diff = VectorDiff(check_eqd, vec_eqd)
Debug('PY Test_EQD_HOR {}: OFDATE inverse rotation diff = {}'.format(body.name, diff))
if diff > 2.7e-15:
print('PY Test_EQD_HOR: EXCESSIVE OFDATE INVERSE HORIZONTAL ERROR.')
sys.exit(1)
# Exercise HOR to EQJ translation.
eqj = astronomy.Equator(body, time, observer, False, True)
vec_eqj = eqj.vec
yrot = astronomy.Rotation_HOR_EQJ(time, observer)
check_eqj = astronomy.RotateVector(yrot, vec_hor)
diff = VectorDiff(check_eqj, vec_eqj)
Debug('PY Test_EQD_HOR {}: J2000 inverse rotation diff = {}'.format(body.name, diff))
if diff > 5.0e-15:
print('PY Test_EQD_HOR: EXCESSIVE J2000 INVERSE HORIZONTAL ERROR.')
sys.exit(1)
# Verify the inverse translation: EQJ to HOR.
zrot = astronomy.Rotation_EQJ_HOR(time, observer)
another_hor = astronomy.RotateVector(zrot, vec_eqj)
diff = VectorDiff(another_hor, vec_hor)
Debug('PY Test_EQD_HOR {}: EQJ inverse rotation diff = {}'.format(body.name, diff))
if diff > 6.0e-15:
print('PY Test_EQD_HOR: EXCESSIVE EQJ INVERSE HORIZONTAL ERROR.')
sys.exit(1)
IdentityMatrix = astronomy.RotationMatrix([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
def CheckInverse(aname, bname, arot, brot):
crot = astronomy.CombineRotation(arot, brot)
caller = 'CheckInverse({},{})'.format(aname, bname)
CompareMatrices(caller, crot, IdentityMatrix, 2.0e-15)
def CheckCycle(cyclename, arot, brot, crot):
xrot = astronomy.CombineRotation(arot, brot)
irot = astronomy.InverseRotation(xrot)
CompareMatrices(cyclename, crot, irot, 2.0e-15)
def Test_RotRoundTrip():
# In each round trip, calculate a forward rotation and a backward rotation.
# Verify the two are inverse matrices.
time = astronomy.Time.Make(2067, 5, 30, 14, 45, 0)
observer = astronomy.Observer(+28, -82, 0)
# Round trip #1: EQJ <==> EQD.
eqj_eqd = astronomy.Rotation_EQJ_EQD(time)
eqd_eqj = astronomy.Rotation_EQD_EQJ(time)
CheckInverse('eqj_eqd', 'eqd_eqj', eqj_eqd, eqd_eqj)
# Round trip #2: EQJ <==> ECL.
eqj_ecl = astronomy.Rotation_EQJ_ECL()
ecl_eqj = astronomy.Rotation_ECL_EQJ()
CheckInverse('eqj_ecl', 'ecl_eqj', eqj_ecl, ecl_eqj)
# Round trip #3: EQJ <==> HOR.
eqj_hor = astronomy.Rotation_EQJ_HOR(time, observer)
hor_eqj = astronomy.Rotation_HOR_EQJ(time, observer)
CheckInverse('eqj_hor', 'hor_eqj', eqj_hor, hor_eqj)
# Round trip #4: EQD <==> HOR.
eqd_hor = astronomy.Rotation_EQD_HOR(time, observer)
hor_eqd = astronomy.Rotation_HOR_EQD(time, observer)
CheckInverse('eqd_hor', 'hor_eqd', eqd_hor, hor_eqd)
# Round trip #5: EQD <==> ECL.
eqd_ecl = astronomy.Rotation_EQD_ECL(time)
ecl_eqd = astronomy.Rotation_ECL_EQD(time)
CheckInverse('eqd_ecl', 'ecl_eqd', eqd_ecl, ecl_eqd)
# Round trip #6: HOR <==> ECL.
hor_ecl = astronomy.Rotation_HOR_ECL(time, observer)
ecl_hor = astronomy.Rotation_ECL_HOR(time, observer)
CheckInverse('hor_ecl', 'ecl_hor', hor_ecl, ecl_hor)
# Round trip #7: EQD <==> ECT
eqd_ect = astronomy.Rotation_EQD_ECT(time)
ect_eqd = astronomy.Rotation_ECT_EQD(time)
CheckInverse('eqd_ect', 'ect_eqd', eqd_ect, ect_eqd)
# Round trip #8: EQJ <==> ECT
eqj_ect = astronomy.Rotation_EQJ_ECT(time)
ect_eqj = astronomy.Rotation_ECT_EQJ(time)
CheckInverse('eqj_ect', 'ect_eqj', eqj_ect, ect_eqj)
# Verify that combining different sequences of rotations result
# in the expected combination.
# For example, (EQJ ==> HOR ==> ECL) must be the same matrix as (EQJ ==> ECL).
CheckCycle('eqj_ecl, ecl_eqd, eqd_eqj', eqj_ecl, ecl_eqd, eqd_eqj)
CheckCycle('eqj_hor, hor_ecl, ecl_eqj', eqj_hor, hor_ecl, ecl_eqj)
CheckCycle('eqj_hor, hor_eqd, eqd_eqj', eqj_hor, hor_eqd, eqd_eqj)
CheckCycle('ecl_eqd, eqd_hor, hor_ecl', ecl_eqd, eqd_hor, hor_ecl)
CheckCycle('eqj_eqd, eqd_ect, ect_eqj', eqj_eqd, eqd_ect, ect_eqj)
Debug('PY Test_RotRoundTrip: PASS')
def Rotation_Pivot():
tolerance = 1.0e-15
# Start with an identity matrix.
ident = astronomy.IdentityMatrix()
# Pivot 90 degrees counterclockwise around the z-axis.
r = astronomy.Pivot(ident, 2, +90.0)
# Put the expected answer in 'a'.
a = astronomy.RotationMatrix([
[ 0, +1, 0],
[-1, 0, 0],
[ 0, 0, +1],
])
# Compare actual 'r' with expected 'a'.
CompareMatrices('Rotation_Pivot #1', r, a, tolerance)
# Pivot again, -30 degrees around the x-axis.
r = astronomy.Pivot(r, 0, -30.0)
# Pivot a third time, 180 degrees around the y-axis.
r = astronomy.Pivot(r, 1, +180.0)
# Use the 'r' matrix to rotate a vector.
v1 = astronomy.Vector(1, 2, 3, astronomy.Time(0))
v2 = astronomy.RotateVector(r, v1)
# Initialize the expected vector 've'.
ve = astronomy.Vector(+2.0, +2.3660254037844390, -2.0980762113533156, v1.t)
CompareVectors('Rotation_Pivot #2', v2, ve, tolerance)
Debug('PY Rotation_Pivot: PASS')
def Test_EQD_ECT():
time = astronomy.Time.Make(1900, 1, 1, 0, 0, 0.0)
stopTime = astronomy.Time.Make(2100, 1, 1, 0, 0, 0.0)
count = 0
max_diff = 0.0
while time.ut <= stopTime.ut:
# Get Moon's geocentric position in EQJ.
eqj = astronomy.GeoMoon(time)
# Convert EQJ to EQD.
eqj_eqd = astronomy.Rotation_EQJ_EQD(time)
eqd = astronomy.RotateVector(eqj_eqd, eqj)
# Convert EQD to ECT.
eqd_ect = astronomy.Rotation_EQD_ECT(time)
ect = astronomy.RotateVector(eqd_ect, eqd)
# Independently get the Moon's spherical coordinates in ECT.
sphere = astronomy.EclipticGeoMoon(time)
# Convert spherical coordinates to ECT vector.
check_ect = astronomy.VectorFromSphere(sphere, time)
# Verify the two ECT vectors are identical, within tolerance.
max_diff = max(max_diff, VectorDiff(ect, check_ect))
time = time.AddDays(10.0)
count += 1
if max_diff > 3.743e-18:
print('PY Test_EQD_ECT: excessive vector diff = {:0.6e} au.'.format(max_diff))
sys.exit(1)
Debug('PY Test_EQD_ECT: PASS: count = {}, max_diff = {:0.6e} au.'.format(count, max_diff))
def Ecliptic():
time = astronomy.Time.Make(1900, 1, 1, 0, 0, 0.0)
stopTime = astronomy.Time.Make(2100, 1, 1, 0, 0, 0.0)
count = 0
max_vec_diff = 0
max_angle_diff = 0.0
while time.ut <= stopTime.ut:
# Get Moon's geocentric position in EQJ.
eqj = astronomy.GeoMoon(time)
# Convert EQJ to ECT.
eclip = astronomy.Ecliptic(eqj)
# Confirm that the ecliptic angles and ecliptic vector are consistent.
check_sphere = astronomy.Spherical(eclip.elat, eclip.elon, eclip.vec.Length())
check_vec = astronomy.VectorFromSphere(check_sphere, time)
max_angle_diff = max(max_angle_diff, VectorDiff(eclip.vec, check_vec))
# Independently get the Moon's spherical coordinates in ECT.
sphere = astronomy.EclipticGeoMoon(time)
# Convert spherical coordinates to ECT vector.
check_ect = astronomy.VectorFromSphere(sphere, time)
# Verify the two ECT vectors are identical, within tolerance.
max_vec_diff = max(max_vec_diff, VectorDiff(eclip.vec, check_ect))
time = time.AddDays(10.0)
count += 1
if max_vec_diff > 3.388e-18:
return Fail('Ecliptic', 'EXCESSIVE VECTOR DIFF = {:0.6e} au.'.format(max_vec_diff))
if max_angle_diff > 3.007e-18:
return Fail('Ecliptic', 'EXCESSIVE ANGLE DIFF = {:0.6e} au.'.format(max_angle_diff))
print('PY Ecliptic: PASS: count = {:d}, max_vec_diff = {:0.6e} au, max_angle_diff = {:0.6e} au.'.format(count, max_vec_diff, max_angle_diff))
return 0
def Rotation():
Rotation_MatrixInverse()
Rotation_MatrixMultiply()
Rotation_Pivot()
Test_GAL_EQJ_NOVAS('temp/galeqj.txt')
Test_EQJ_EQD(astronomy.Body.Mercury)
Test_EQJ_EQD(astronomy.Body.Venus)
Test_EQJ_EQD(astronomy.Body.Mars)
Test_EQJ_EQD(astronomy.Body.Jupiter)
Test_EQJ_EQD(astronomy.Body.Saturn)
Test_EQD_HOR(astronomy.Body.Mercury)
Test_EQD_HOR(astronomy.Body.Venus)
Test_EQD_HOR(astronomy.Body.Mars)
Test_EQD_HOR(astronomy.Body.Jupiter)
Test_EQD_HOR(astronomy.Body.Saturn)
Test_EQD_ECT()
Test_RotRoundTrip()
print('PY Rotation: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def Refraction():
alt = -90.1
while alt <= +90.1:
refr = astronomy.RefractionAngle(astronomy.Refraction.Normal, alt)
corrected = alt + refr
inv_refr = astronomy.InverseRefractionAngle(astronomy.Refraction.Normal, corrected)
check_alt = corrected + inv_refr
diff = vabs(check_alt - alt)
if diff > 2.0e-14:
print('PY Refraction: ERROR - excessive error: alt={}, refr={}, diff={}'.format(alt, refr, diff))
return 1
alt += 0.001
print('PY Refraction: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def PlanetApsis():
start_time = astronomy.Time.Make(1700, 1, 1, 0, 0, 0)
body = astronomy.Body.Mercury
while body.value <= astronomy.Body.Pluto.value:
count = 1
period = astronomy.PlanetOrbitalPeriod(body)
filename = os.path.join('apsides', 'apsis_{}.txt'.format(body.value))
min_interval = -1.0
max_diff_days = 0.0
max_dist_ratio = 0.0
apsis = astronomy.SearchPlanetApsis(body, start_time)
with open(filename, 'rt') as infile:
for line in infile:
token = line.split()
if len(token) != 3:
print('PY PlanetApsis({} line {}): Invalid data format: {} tokens'.format(filename, count, len(token)))
return 1
expected_kind = astronomy.ApsisKind(int(token[0]))
expected_time = astronomy.Time.Parse(token[1])
expected_distance = float(token[2])
if apsis.kind != expected_kind:
print('PY PlanetApsis({} line {}): WRONG APSIS KIND: expected {}, found {}'.format(filename, count, expected_kind, apsis.kind))
return 1
diff_days = vabs(expected_time.tt - apsis.time.tt)
max_diff_days = vmax(max_diff_days, diff_days)
diff_degrees = (diff_days / period) * 360
degree_threshold = 0.1
if diff_degrees > degree_threshold:
print('PY PlanetApsis: FAIL - {} exceeded angular threshold ({} vs {} degrees)'.format(body.name, diff_degrees, degree_threshold))
return 1
diff_dist_ratio = vabs(expected_distance - apsis.dist_au) / expected_distance
max_dist_ratio = vmax(max_dist_ratio, diff_dist_ratio)
if diff_dist_ratio > 1.05e-4:
print('PY PlanetApsis({} line {}): distance ratio {} is too large.'.format(filename, count, diff_dist_ratio))
return 1
# Calculate the next apsis.
prev_time = apsis.time
apsis = astronomy.NextPlanetApsis(body, apsis)
count += 1
interval = apsis.time.tt - prev_time.tt
if min_interval < 0.0:
min_interval = max_interval = interval
else:
min_interval = vmin(min_interval, interval)
max_interval = vmax(max_interval, interval)
if count < 2:
print('PY PlanetApsis: FAILED to find apsides for {}'.format(body))
return 1
Debug('PY PlanetApsis: {:4d} apsides for {:<9s} -- intervals: min={:0.2f}, max={:0.2f}, ratio={:0.6f}; max day={:0.3f}, degrees={:0.3f}, dist ratio={:0.6f}'.format(
count,
body.name,
min_interval, max_interval, max_interval / min_interval,
max_diff_days,
(max_diff_days / period) * 360.0,
max_dist_ratio
))
body = astronomy.Body(body.value + 1)
print('PY PlanetApsis: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def Constellation():
inFileName = 'constellation/test_input.txt'
lnum = 0
failcount = 0
with open(inFileName, 'rt') as infile:
for line in infile:
lnum += 1
m = re.match(r'^\s*(\d+)\s+(\S+)\s+(\S+)\s+([A-Z][a-zA-Z]{2})\s*$', line)
if not m:
print('PY Constellation: invalid line {} in file {}'.format(lnum, inFileName))
return 1
id = int(m.group(1))
ra = float(m.group(2))
dec = float(m.group(3))
symbol = m.group(4)
constel = astronomy.Constellation(ra, dec)
if constel.symbol != symbol:
print('Star {:6d}: expected {}, found {} at B1875 RA={:10.6f}, DEC={:10.6f}'.format(id, symbol, constel.symbol, constel.ra1875, constel.dec1875))
failcount += 1
if failcount > 0:
print('PY Constellation: {} failures'.format(failcount))
return 1
print('PY Constellation: PASS (verified {})'.format(lnum))
return 0
#-----------------------------------------------------------------------------------------------------------
def LunarEclipseIssue78():
# https://github.com/cosinekitty/astronomy/issues/78
eclipse = astronomy.SearchLunarEclipse(astronomy.Time.Make(2020, 12, 19, 0, 0, 0))
expected_peak = astronomy.Time.Make(2021, 5, 26, 11, 18, 42) # https://www.timeanddate.com/eclipse/lunar/2021-may-26
dt = (expected_peak.tt - eclipse.peak.tt) * SECONDS_PER_DAY
if vabs(dt) > 40.0:
print('LunarEclipseIssue78: Excessive prediction error = {} seconds.'.format(dt))
return 1
if eclipse.kind != astronomy.EclipseKind.Total:
print('Expected total eclipse; found: {}'.format(eclipse.kind))
return 1
print('PY LunarEclipseIssue78: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def LunarEclipse():
filename = 'eclipse/lunar_eclipse.txt'
with open(filename, 'rt') as infile:
eclipse = astronomy.SearchLunarEclipse(astronomy.Time.Make(1701, 1, 1, 0, 0, 0))
lnum = 0
skip_count = 0
diff_count = 0
sum_diff_minutes = 0.0
max_diff_minutes = 0.0
diff_limit = 2.0
for line in infile:
lnum += 1
# Make sure numeric data are finite numbers.
v(eclipse.obscuration)
v(eclipse.sd_partial)
v(eclipse.sd_penum)
v(eclipse.sd_total)
if len(line) < 17:
print('PY LunarEclipse({} line {}): line is too short.'.format(filename, lnum))
return 1
time_text = line[0:17]
peak_time = astronomy.Time.Parse(time_text)
token = line[17:].split()
if len(token) != 2:
print('PY LunarEclipse({} line {}): wrong number of tokens.'.format(filename, lnum))
return 1
partial_minutes = float(token[0])
total_minutes = float(token[1])
sd_valid = False
frac_valid = False
# Verify that the calculated eclipse semi-durations are consistent with the kind.
# Verify that obscurations also make sense for the kind.
if eclipse.kind == astronomy.EclipseKind.Penumbral:
sd_valid = (eclipse.sd_penum > 0.0) and (eclipse.sd_partial == 0.0) and (eclipse.sd_total == 0.0)
frac_valid = (eclipse.obscuration == 0.0)
elif eclipse.kind == astronomy.EclipseKind.Partial:
sd_valid = (eclipse.sd_penum > 0.0) and (eclipse.sd_partial > 0.0) and (eclipse.sd_total == 0.0)
frac_valid = (0.0 < eclipse.obscuration < 1.0)
elif eclipse.kind == astronomy.EclipseKind.Total:
sd_valid = (eclipse.sd_penum > 0.0) and (eclipse.sd_partial > 0.0) and (eclipse.sd_total > 0.0)
frac_valid = (eclipse.obscuration == 1.0)
else:
print('PY LunarEclipse({} line {}): invalid eclipse kind {}.'.format(filename, lnum, eclipse.kind))
return 1
if not sd_valid:
print('PY LunarEclipse({} line {}): invalid semidurations.'.format(filename, lnum))
return 1
if not frac_valid:
print('PY LunarEclipse({} line {}): invalid obscuration {:0.8f} for eclipsekind {}.'.format(filename, lnum, eclipse.obscuration, eclipse.kind))
# Check eclipse peak time.
diff_days = eclipse.peak.ut - peak_time.ut
# Tolerate missing penumbral eclipses - skip to next input line without calculating next eclipse.
if partial_minutes == 0.0 and diff_days > 20.0:
skip_count += 1
continue
diff_minutes = (24.0 * 60.0) * vabs(diff_days)
sum_diff_minutes += diff_minutes
diff_count += 1
if diff_minutes > diff_limit:
print("PY LunarEclipse expected center: {}".format(peak_time))
print("PY LunarEclipse found center: {}".format(eclipse.peak))
print("PY LunarEclipse({} line {}): EXCESSIVE center time error = {} minutes ({} days).".format(filename, lnum, diff_minutes, diff_days))
return 1
if diff_minutes > max_diff_minutes:
max_diff_minutes = diff_minutes
# check partial eclipse duration
diff_minutes = vabs(partial_minutes - eclipse.sd_partial)
sum_diff_minutes += diff_minutes
diff_count += 1
if diff_minutes > diff_limit:
print("PY LunarEclipse({} line {}): EXCESSIVE partial eclipse semiduration error: {} minutes".format(filename, lnum, diff_minutes))
return 1
if diff_minutes > max_diff_minutes:
max_diff_minutes = diff_minutes
# check total eclipse duration
diff_minutes = vabs(total_minutes - eclipse.sd_total)
sum_diff_minutes += diff_minutes
diff_count += 1
if diff_minutes > diff_limit:
print("PY LunarEclipse({} line {}): EXCESSIVE total eclipse semiduration error: {} minutes".format(filename, lnum, diff_minutes))
return 1
if diff_minutes > max_diff_minutes:
max_diff_minutes = diff_minutes
# calculate for next iteration
eclipse = astronomy.NextLunarEclipse(eclipse.peak)
print("PY LunarEclipse: PASS (verified {}, skipped {}, max_diff_minutes = {}, avg_diff_minutes = {})".format(lnum, skip_count, max_diff_minutes, (sum_diff_minutes / diff_count)))
return 0
#-----------------------------------------------------------------------------------------------------------
def VectorFromAngles(lat, lon):
rlat = math.radians(v(lat))
rlon = math.radians(v(lon))
coslat = math.cos(rlat)
return [
math.cos(rlon) * coslat,
math.sin(rlon) * coslat,
math.sin(rlat)
]
def AngleDiff(alat, alon, blat, blon):
a = VectorFromAngles(alat, alon)
b = VectorFromAngles(blat, blon)
dot = a[0]*b[0] + a[1]*b[1] + a[2]*b[2]
if dot <= -1.0:
return 180.0
if dot >= +1.0:
return 0.0
return v(math.degrees(math.acos(dot)))
def KindFromChar(typeChar):
return {
'P': astronomy.EclipseKind.Partial,
'A': astronomy.EclipseKind.Annular,
'T': astronomy.EclipseKind.Total,
'H': astronomy.EclipseKind.Total,
}[typeChar]
def GlobalSolarEclipse():
expected_count = 1180
max_minutes = 0.0
max_angle = 0.0
skip_count = 0
eclipse = astronomy.SearchGlobalSolarEclipse(astronomy.Time.Make(1701, 1, 1, 0, 0, 0))
filename = 'eclipse/solar_eclipse.txt'
with open(filename, 'rt') as infile:
lnum = 0
for line in infile:
lnum += 1
# 1889-12-22T12:54:15Z -6 T -12.7 -12.8
token = line.split()
if len(token) != 5:
print('PY GlobalSolarEclipse({} line {}): invalid token count = {}'.format(filename, lnum, len(token)))
return 1
peak = astronomy.Time.Parse(token[0])
expected_kind = KindFromChar(token[2])
lat = float(token[3])
lon = float(token[4])
diff_days = eclipse.peak.tt - peak.tt
# Sometimes we find marginal eclipses that aren't listed in the test data.
# Ignore them if the distance between the Sun/Moon shadow axis and the Earth's center is large.
while diff_days < -25.0 and eclipse.distance > 9000.0:
skip_count += 1
eclipse = astronomy.NextGlobalSolarEclipse(eclipse.peak)
diff_days = eclipse.peak.ut - peak.ut
# Validate the eclipse prediction.
diff_minutes = (24 * 60) * vabs(diff_days)
if diff_minutes > 7.56:
print('PY GlobalSolarEclipse({} line {}): EXCESSIVE TIME ERROR = {} minutes'.format(filename, lnum, diff_minutes))
return 1
if diff_minutes > max_minutes:
max_minutes = diff_minutes
# Validate the eclipse kind, but only when it is not a "glancing" eclipse.
if (eclipse.distance < 6360) and (eclipse.kind != expected_kind):
print('PY GlobalSolarEclipse({} line {}): WRONG ECLIPSE KIND: expected {}, found {}'.format(filename, lnum, expected_kind, eclipse.kind))
return 1
if eclipse.kind == astronomy.EclipseKind.Total or eclipse.kind == astronomy.EclipseKind.Annular:
# When the distance between the Moon's shadow ray and the Earth's center is beyond 6100 km,
# it creates a glancing blow whose geographic coordinates are excessively sensitive to
# slight changes in the ray. Therefore, it is unreasonable to count large errors there.
if eclipse.distance < 6100.0:
diff_angle = AngleDiff(lat, lon, eclipse.latitude, eclipse.longitude)
if diff_angle > 0.247:
print('PY GlobalSolarEclipse({} line {}): EXCESSIVE GEOGRAPHIC LOCATION ERROR = {} degrees'.format(filename, lnum, diff_angle))
return 1
if diff_angle > max_angle:
max_angle = diff_angle
# Verify the obscuration value is consistent with the eclipse kind.
if eclipse.kind == astronomy.EclipseKind.Partial:
if eclipse.obscuration is not None:
print('PY GlobalSolarEclipse({} line {}): Expected obscuration = None for partial eclipse, but found {}'.format(filename, lnum, eclipse.obscuration))
return 1
elif eclipse.kind == astronomy.EclipseKind.Annular:
if not (0.8 < v(eclipse.obscuration) < 1.0):
print('PY GlobalSolarEclipse({} line {}): Invalid obscuration = {:0.8f} for annular eclipse.'.format(filename, lnum, eclipse.obscuration))
return 1
elif eclipse.kind == astronomy.EclipseKind.Total:
if v(eclipse.obscuration) != 1.0:
print('PY GlobalSolarEclipse({} line {}): Invalid obscuration = {:0.8f} for total eclipse.'.format(filename, lnum, eclipse.obscuration))
return 1
else:
print('PY GlobalSolarEclipse({} line {}): Unhandled eclipse kind {}'.format(filename, lnum, eclipse.kind))
return 1
eclipse = astronomy.NextGlobalSolarEclipse(eclipse.peak)
if lnum != expected_count:
print('PY GlobalSolarEclipse: WRONG LINE COUNT = {}, expected {}'.format(lnum, expected_count))
return 1
if skip_count > 2:
print('PY GlobalSolarEclipse: EXCESSIVE SKIP COUNT = {}'.format(skip_count))
return 1
print('PY GlobalSolarEclipse: PASS ({} verified, {} skipped, max minutes = {}, max angle = {})'.format(lnum, skip_count, max_minutes, max_angle))
return 0
#-----------------------------------------------------------------------------------------------------------
def LocalSolarEclipse1():
expected_count = 1180
max_minutes = 0.0
skip_count = 0
filename = 'eclipse/solar_eclipse.txt'
with open(filename, 'rt') as infile:
lnum = 0
for line in infile:
lnum += 1
funcname = 'LocalSolarEclipse({} line {})'.format(filename, lnum)
# 1889-12-22T12:54:15Z -6 T -12.7 -12.8
token = line.split()
if len(token) != 5:
return Fail(funcname, 'invalid token count = {}'.format(len(token)))
peak = astronomy.Time.Parse(token[0])
#typeChar = token[2]
lat = float(token[3])
lon = float(token[4])
observer = astronomy.Observer(lat, lon, 0.0)
# Start the search 20 days before we know the eclipse should peak.
search_start = peak.AddDays(-20)
eclipse = astronomy.SearchLocalSolarEclipse(search_start, observer)
# Validate the predicted eclipse peak time.
diff_days = eclipse.peak.time.tt - peak.tt
if diff_days > 20:
skip_count += 1
continue
diff_minutes = (24 * 60) * vabs(diff_days)
if diff_minutes > 7.737:
return Fail(funcname, 'EXCESSIVE TIME ERROR = {} minutes'.format(diff_minutes))
if diff_minutes > max_minutes:
max_minutes = diff_minutes
# Verify obscuration makes sense for this kind of eclipse.
v(eclipse.obscuration)
if eclipse.kind in [astronomy.EclipseKind.Annular, astronomy.EclipseKind.Partial]:
frac_valid = (0.0 < eclipse.obscuration < 1.0)
elif eclipse.kind == astronomy.EclipseKind.Total:
frac_valid = (eclipse.obscuration == 1.0)
else:
return Fail(funcname, 'Invalid eclipse kind {}'.format(eclipse.kind))
if not frac_valid:
return Fail(funcname, 'Invalid eclipse obscuration {:0.8f} for {} eclipse.'.format(eclipse.obscuration, eclipse.kind))
funcname = 'LocalSolarEclipse1({})'.format(filename)
if lnum != expected_count:
return Fail(funcname, 'WRONG LINE COUNT = {}, expected {}'.format(lnum, expected_count))
if skip_count > 6:
return Fail(funcname, 'EXCESSIVE SKIP COUNT = {}'.format(skip_count))
print('PY LocalSolarEclipse1: PASS ({} verified, {} skipped, max minutes = {})'.format(lnum, skip_count, max_minutes))
return 0
def TrimLine(line):
# Treat '#' as a comment character.
poundIndex = line.find('#')
if poundIndex >= 0:
line = line[:poundIndex]
return line.strip()
def ParseEvent(time_str, alt_str, required):
if required:
time = astronomy.Time.Parse(time_str)
altitude = float(alt_str)
return astronomy.EclipseEvent(time, altitude)
if time_str != '-':
raise Exception('Expected event time to be "-" but found "{}"'.format(time_str))
return None
def LocalSolarEclipse2():
# Test ability to calculate local solar eclipse conditions away from
# the peak position on the Earth.
filename = 'eclipse/local_solar_eclipse.txt'
lnum = 0
verify_count = 0
max_minutes = 0.0
max_degrees = 0.0
def CheckEvent(calc, expect):
nonlocal max_minutes, max_degrees
diff_minutes = (24 * 60) * vabs(expect.time.ut - calc.time.ut)
if diff_minutes > max_minutes:
max_minutes = diff_minutes
if diff_minutes > 1.0:
raise Exception('CheckEvent({} line {}): EXCESSIVE TIME ERROR: {} minutes.'.format(filename, lnum, diff_minutes))
# Ignore discrepancies for negative altitudes, because of quirky and irrelevant differences in refraction models.
if expect.altitude >= 0.0:
diff_alt = vabs(expect.altitude - calc.altitude)
if diff_alt > max_degrees:
max_degrees = diff_alt
if diff_alt > 0.5:
raise Exception('CheckEvent({} line {}): EXCESSIVE ALTITUDE ERROR: {} degrees.'.format(filename, lnum, diff_alt))
with open(filename, 'rt') as infile:
for line in infile:
lnum += 1
line = TrimLine(line)
if line == '':
continue
token = line.split()
if len(token) != 13:
print('PY LocalSolarEclipse2({} line {}): Incorrect token count = {}'.format(filename, lnum, len(token)))
return 1
latitude = float(token[0])
longitude = float(token[1])
observer = astronomy.Observer(latitude, longitude, 0)
expected_kind = KindFromChar(token[2])
is_umbral = (expected_kind != astronomy.EclipseKind.Partial)
p1 = ParseEvent(token[3], token[4], True)
t1 = ParseEvent(token[5], token[6], is_umbral)
peak = ParseEvent(token[7], token[8], True)
t2 = ParseEvent(token[9], token[10], is_umbral)
p2 = ParseEvent(token[11], token[12], True)
search_time = p1.time.AddDays(-20)
eclipse = astronomy.SearchLocalSolarEclipse(search_time, observer)
if eclipse.kind != expected_kind:
print('PY LocalSolarEclipse2({} line {}): expected eclipse kind "{}" but found "{}".'.format(
filename, lnum, expected_kind, eclipse.kind
))
return 1
CheckEvent(eclipse.peak, peak)
CheckEvent(eclipse.partial_begin, p1)
CheckEvent(eclipse.partial_end, p2)
if is_umbral:
CheckEvent(eclipse.total_begin, t1)
CheckEvent(eclipse.total_end, t2)
verify_count += 1
print('PY LocalSolarEclipse2: PASS ({} verified, max_minutes = {}, max_degrees = {})'.format(verify_count, max_minutes, max_degrees))
return 0
def LocalSolarEclipse():
return (
LocalSolarEclipse1() or
LocalSolarEclipse2()
)
#-----------------------------------------------------------------------------------------------------------
def GlobalAnnularCase(year, month, day, obscuration):
# Search for the first solar eclipse that occurs after the given date.
time = astronomy.Time.Make(year, month, day, 0, 0, 0.0)
eclipse = astronomy.SearchGlobalSolarEclipse(time)
funcname = 'GlobalAnnularCase({:04d}-{:02d}-{:02d})'.format(year, month, day)
# Verify the eclipse is within 1 day after the search basis time.
dt = v(eclipse.peak.ut - time.ut)
if not (0.0 <= dt <= 1.0):
return Fail(funcname, 'found eclipse {:0.4f} days after search time.'.format(dt))
# Verify we found an annular solar eclipse.
if eclipse.kind != astronomy.EclipseKind.Annular:
return Fail(funcname, 'expected annular eclipse but found {}'.format(eclipse.kind))
# Check how accurately we calculated obscuration.
diff = v(eclipse.obscuration - obscuration)
if abs(diff) > 0.0000904:
return Fail(funcname, 'excessive obscuration error = {:0.8f}, expected = {:0.8f}, actual = {:0.8f}'.format(diff, obscuration, eclipse.obscuration))
Debug('{}: obscuration error = {:11.8f}'.format(funcname, diff))
return 0
def LocalSolarCase(year, month, day, latitude, longitude, kind, obscuration, tolerance):
funcname = 'LocalSolarCase({:04d}-{:02d}-{:02d})'.format(year, month, day)
time = astronomy.Time.Make(year, month, day, 0, 0, 0.0)
observer = astronomy.Observer(latitude, longitude, 0.0)
eclipse = astronomy.SearchLocalSolarEclipse(time, observer)
dt = v(eclipse.peak.time.ut - time.ut)
if not (0.0 <= dt <= 1.0):
return Fail(funcname, 'eclipse found {:0.4f} days after search date'.format(dt))
if eclipse.kind != kind:
return Fail(funcname, 'expected {} eclipse, but found {}.'.format(kind, eclipse.kind))
diff = v(eclipse.obscuration - obscuration)
if abs(diff) > tolerance:
return Fail(funcname, 'obscuration diff = {:0.8f}, expected = {:0.8f}, actual = {:0.8f}'.format(diff, obscuration, eclipse.obscuration))
Debug('{}: obscuration diff = {:11.8f}'.format(funcname, diff))
return 0
def SolarFraction():
return (
# Verify global solar eclipse obscurations for annular eclipses only.
# This is because they are the only nontrivial values for global solar eclipses.
# The trivial values are all validated exactly by GlobalSolarEclipseTest().
GlobalAnnularCase(2023, 10, 14, 0.90638) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2023Oct14Aprime.html
GlobalAnnularCase(2024, 10, 2, 0.86975) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2024Oct02Aprime.html
GlobalAnnularCase(2027, 2, 6, 0.86139) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2027Feb06Aprime.html
GlobalAnnularCase(2028, 1, 26, 0.84787) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2028Jan26Aprime.html
GlobalAnnularCase(2030, 6, 1, 0.89163) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2030Jun01Aprime.html
# Verify obscuration values for specific locations on the Earth.
# Local solar eclipse calculations include obscuration for all types of eclipse, not just annular and total.
LocalSolarCase(2023, 10, 14, 11.3683, -83.1017, astronomy.EclipseKind.Annular, 0.90638, 0.000080) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2023Oct14Aprime.html
LocalSolarCase(2023, 10, 14, 25.78, -80.22, astronomy.EclipseKind.Partial, 0.578, 0.000023) or # https://aa.usno.navy.mil/calculated/eclipse/solar?eclipse=22023&lat=25.78&lon=-80.22&label=Miami%2C+FL&height=0&submit=Get+Data
LocalSolarCase(2023, 10, 14, 30.2666, -97.7000, astronomy.EclipseKind.Partial, 0.8867, 0.001016) or # http://astro.ukho.gov.uk/eclipse/0332023/Austin_TX_United_States_2023Oct14.png
LocalSolarCase(2024, 4, 8, 25.2900, -104.1383, astronomy.EclipseKind.Total, 1.0, 0.0 ) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2024Apr08Tprime.html
LocalSolarCase(2024, 4, 8, 37.76, -122.44, astronomy.EclipseKind.Partial, 0.340, 0.000604) or # https://aa.usno.navy.mil/calculated/eclipse/solar?eclipse=12024&lat=37.76&lon=-122.44&label=San+Francisco%2C+CA&height=0&submit=Get+Data
LocalSolarCase(2024, 10, 2, -21.9533, -114.5083, astronomy.EclipseKind.Annular, 0.86975, 0.000061) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2024Oct02Aprime.html
LocalSolarCase(2024, 10, 2, -33.468, -70.636, astronomy.EclipseKind.Partial, 0.436, 0.000980) or # https://aa.usno.navy.mil/calculated/eclipse/solar?eclipse=22024&lat=-33.468&lon=-70.636&label=Santiago%2C+Chile&height=0&submit=Get+Data
LocalSolarCase(2030, 6, 1, 56.525, 80.0617, astronomy.EclipseKind.Annular, 0.89163, 0.000067) or # https://www.eclipsewise.com/solar/SEprime/2001-2100/SE2030Jun01Aprime.html
LocalSolarCase(2030, 6, 1, 40.388, 49.914, astronomy.EclipseKind.Partial, 0.67240, 0.000599) or # http://xjubier.free.fr/en/site_pages/SolarEclipseCalc_Diagram.html
LocalSolarCase(2030, 6, 1, 40.3667, 49.8333, astronomy.EclipseKind.Partial, 0.6736, 0.001464) or # http://astro.ukho.gov.uk/eclipse/0132030/Baku_Azerbaijan_2030Jun01.png
Pass('SolarFraction')
)
#-----------------------------------------------------------------------------------------------------------
def TransitFile(body, filename, limit_minutes, limit_sep):
lnum = 0
max_minutes = 0
max_sep = 0
with open(filename, 'rt') as infile:
transit = astronomy.SearchTransit(body, astronomy.Time.Make(1600, 1, 1, 0, 0, 0))
for line in infile:
lnum += 1
token = line.strip().split()
# 22:17 1881-11-08T00:57Z 03:38 3.8633
if len(token) != 4:
print('PY TransitFile({} line {}): bad data format.'.format(filename, lnum))
return 1
textp = token[1]
text1 = textp[0:11] + token[0] + 'Z'
text2 = textp[0:11] + token[2] + 'Z'
timep = astronomy.Time.Parse(textp)
time1 = astronomy.Time.Parse(text1)
time2 = astronomy.Time.Parse(text2)
separation = float(token[3])
# If the start time is after the peak time, it really starts on the previous day.
if time1.ut > timep.ut:
time1 = time1.AddDays(-1.0)
# If the finish time is before the peak time, it really starts on the following day.
if time2.ut < timep.ut:
time2 = time2.AddDays(+1.0)
diff_start = (24.0 * 60.0) * vabs(time1.ut - transit.start.ut )
diff_peak = (24.0 * 60.0) * vabs(timep.ut - transit.peak.ut )
diff_finish = (24.0 * 60.0) * vabs(time2.ut - transit.finish.ut)
diff_sep = vabs(separation - transit.separation)
max_minutes = vmax(max_minutes, diff_start)
max_minutes = vmax(max_minutes, diff_peak)
max_minutes = vmax(max_minutes, diff_finish)
if max_minutes > limit_minutes:
print('PY TransitFile({} line {}): EXCESSIVE TIME ERROR = {} minutes.'.format(filename, lnum, max_minutes))
return 1
max_sep = vmax(max_sep, diff_sep)
if max_sep > limit_sep:
print('PY TransitFile({} line {}): EXCESSIVE SEPARATION ERROR = {} arcminutes.'.format(filename, lnum, max_sep))
return 1
transit = astronomy.NextTransit(body, transit.finish)
print('PY TransitFile({}): PASS - verified {}, max minutes = {}, max sep arcmin = {}'.format(filename, lnum, max_minutes, max_sep))
return 0
def Transit():
if 0 != TransitFile(astronomy.Body.Mercury, 'eclipse/mercury.txt', 10.710, 0.2121):
return 1
if 0 != TransitFile(astronomy.Body.Venus, 'eclipse/venus.txt', 9.109, 0.6772):
return 1
return 0
#-----------------------------------------------------------------------------------------------------------
def PlutoCheckDate(ut, arcmin_tolerance, x, y, z):
time = astronomy.Time(ut)
try:
timeText = str(time)
except OverflowError:
timeText = "???"
Debug('PY PlutoCheck: {} = {} UT = {} TT'.format(timeText, time.ut, time.tt))
vector = astronomy.HelioVector(astronomy.Body.Pluto, time)
dx = v(vector.x - x)
dy = v(vector.y - y)
dz = v(vector.z - z)
diff = sqrt(dx*dx + dy*dy + dz*dz)
dist = sqrt(x*x + y*y + z*z) - 1.0
arcmin = (diff / dist) * (180.0 * 60.0 / math.pi)
Debug('PY PlutoCheck: calc pos = [{}, {}, {}]'.format(vector.x, vector.y, vector.z))
Debug('PY PlutoCheck: ref pos = [{}, {}, {}]'.format(x, y, z))
Debug('PY PlutoCheck: del pos = [{}, {}, {}]'.format(vector.x - x, vector.y - y, vector.z - z))
Debug('PY PlutoCheck: diff = {} AU, {} arcmin'.format(diff, arcmin))
if v(arcmin) > arcmin_tolerance:
print('PY PlutoCheck: EXCESSIVE ERROR')
return 1
Debug('')
return 0
def PlutoCheck():
if PlutoCheckDate( +18250.0, 0.089, +37.4377303523676090, -10.2466292454075898, -14.4773101310875809): return 1
if PlutoCheckDate( -856493.0, 4.067, +23.4292113199166252, +42.1452685817740829, +6.0580908436642940): return 1
if PlutoCheckDate( +435633.0, 0.016, -27.3178902095231813, +18.5887022581070305, +14.0493896259306936): return 1
if PlutoCheckDate( 0.0, 8e-9, -9.8753673425269000, -27.9789270580402771, -5.7537127596369588): return 1
if PlutoCheckDate( +800916.0, 2.286, -29.5266052645301365, +12.0554287322176474, +12.6878484911631091): return 1
print("PY PlutoCheck: PASS")
return 0
#-----------------------------------------------------------------------------------------------------------
def GeoidTestCase(time, observer, ofdate):
topo_moon = astronomy.Equator(astronomy.Body.Moon, time, observer, ofdate, False)
surface = astronomy.ObserverVector(time, observer, ofdate)
geo_moon = astronomy.GeoVector(astronomy.Body.Moon, time, False)
if ofdate:
# GeoVector() returns J2000 coordinates. Convert to equator-of-date coordinates.
rot = astronomy.Rotation_EQJ_EQD(time)
geo_moon = astronomy.RotateVector(rot, geo_moon)
dx = astronomy.KM_PER_AU * v((geo_moon.x - surface.x) - topo_moon.vec.x)
dy = astronomy.KM_PER_AU * v((geo_moon.y - surface.y) - topo_moon.vec.y)
dz = astronomy.KM_PER_AU * v((geo_moon.z - surface.z) - topo_moon.vec.z)
diff = sqrt(dx*dx + dy*dy + dz*dz)
Debug('PY GeoidTestCase: ofdate={}, time={}, obs={}, surface=({}, {}, {}), diff = {} km'.format(
ofdate,
time,
observer,
astronomy.KM_PER_AU * surface.x,
astronomy.KM_PER_AU * surface.y,
astronomy.KM_PER_AU * surface.z,
diff
))
# Require 1 millimeter accuracy! (one millionth of a kilometer).
if diff > 1.0e-6:
print('PY GeoidTestCase: EXCESSIVE POSITION ERROR.')
return 1
# Verify that we can convert the surface vector back to an observer.
vobs = astronomy.VectorObserver(surface, ofdate)
lat_diff = vabs(vobs.latitude - observer.latitude)
# Longitude is meaningless at the poles, so don't bother checking it there.
if -89.99 <= observer.latitude <= +89.99:
lon_diff = vabs(vobs.longitude - observer.longitude)
if lon_diff > 180.0:
lon_diff = 360.0 - lon_diff
lon_diff = vabs(lon_diff * math.cos(math.degrees(observer.latitude)))
if lon_diff > 1.0e-6:
print('PY GeoidTestCase: EXCESSIVE longitude check error = {}'.format(lon_diff))
return 1
else:
lon_diff = 0.0
h_diff = vabs(vobs.height - observer.height)
Debug('PY GeoidTestCase: vobs={}, lat_diff={}, lon_diff={}, h_diff={}'.format(vobs, lat_diff, lon_diff, h_diff))
if lat_diff > 1.0e-6:
print('PY GeoidTestCase: EXCESSIVE latitude check error = {}'.format(lat_diff))
return 1
if h_diff > 0.001:
print('PY GeoidTestCase: EXCESSIVE height check error = {}'.format(h_diff))
return 1
return 0
def Geoid():
time_list = [
astronomy.Time.Parse('1066-09-27T18:00:00Z'),
astronomy.Time.Parse('1970-12-13T15:42:00Z'),
astronomy.Time.Parse('1970-12-13T15:43:00Z'),
astronomy.Time.Parse('2015-03-05T02:15:45Z')
]
observer_list = [
astronomy.Observer( 0.0, 0.0, 0.0),
astronomy.Observer( +1.5, +2.7, 7.4),
astronomy.Observer( -1.5, -2.7, 7.4),
astronomy.Observer(-53.7, +141.7, 100.0),
astronomy.Observer(+30.0, -85.2, -50.0),
astronomy.Observer(+90.0, +45.0, -50.0),
astronomy.Observer(-90.0, -180.0, 0.0),
astronomy.Observer(-89.0, -81.0, 1234.0),
astronomy.Observer(+89.0, -103.4, 279.8),
astronomy.Observer(+48.2, 24.5, 2019.0),
astronomy.Observer(+28.5, -82.3, -3.4)
]
# Test hand-crafted locations.
for observer in observer_list:
for time in time_list:
if GeoidTestCase(time, observer, False):
return 1
if GeoidTestCase(time, observer, True):
return 1
# More exhaustive tests for a single time value across many different geographic coordinates.
# Solving for latitude is the most complicated part of VectorObserver, so
# I test for every 1-degree increment of latitude, but with 5-degree increments for longitude.
time = astronomy.Time.Parse('2021-06-20T15:08:00Z')
lat = -90
while lat <= +90:
lon = -175
while lon <= +180:
observer = astronomy.Observer(lat, lon, 0.0)
if GeoidTestCase(time, observer, True):
return 1
lon += 5
lat += 1
print('PY GeoidTest: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def JupiterMoons_CheckJpl(mindex, tt, pos, vel):
pos_tolerance = 9.0e-4
vel_tolerance = 9.0e-4
time = astronomy.Time.FromTerrestrialTime(tt)
jm = astronomy.JupiterMoons(time)
moon = SelectJupiterMoon(jm, mindex)
dx = v(pos[0] - moon.x)
dy = v(pos[1] - moon.y)
dz = v(pos[2] - moon.z)
mag = sqrt(pos[0]*pos[0] + pos[1]*pos[1] + pos[2]*pos[2])
pos_diff = sqrt(dx*dx + dy*dy + dz*dz) / mag
if pos_diff > pos_tolerance:
print('PY JupiterMoons_CheckJpl(mindex={}, tt={}): excessive position error {}'.format(mindex, tt, pos_diff))
return 1
dx = v(vel[0] - moon.vx)
dy = v(vel[1] - moon.vy)
dz = v(vel[2] - moon.vz)
mag = sqrt(vel[0]*vel[0] + vel[1]*vel[1] + vel[2]*vel[2])
vel_diff = sqrt(dx*dx + dy*dy + dz*dz) / mag
if vel_diff > vel_tolerance:
print('PY JupiterMoons_CheckJpl(mindex={}, tt={}): excessive velocity error {}'.format(mindex, tt, vel_diff))
return 1
Debug('PY JupiterMoons_CheckJpl: mindex={}, tt={}, pos_diff={}, vel_diff={}'.format(mindex, tt, pos_diff, vel_diff))
return 0
def JupiterMoons():
for mindex in range(4):
filename = 'jupiter_moons/horizons/jm{}.txt'.format(mindex)
with open(filename, 'rt') as infile:
lnum = 0
found = False
part = -1
expected_count = 5001
count = 0
for line in infile:
line = line.rstrip()
lnum += 1
if not found:
if line == '$$SOE':
found = True
part = 0
elif line.startswith('Revised:'):
check_mindex = int(line[76:]) - 501
if mindex != check_mindex:
print('PY JupiterMoons({} line {}): moon index does not match: check={}, mindex={}'.format(filename, lnum, check_mindex, mindex))
return 1
elif line == '$$EOE':
break
else:
if part == 0:
# 2446545.000000000 = A.D. 1986-Apr-24 12:00:00.0000 TDB
tt = float(line.split()[0]) - 2451545.0 # convert JD to J2000 TT
elif part == 1:
# X = 1.134408131605554E-03 Y =-2.590904586750408E-03 Z =-7.490427225904720E-05
match = re.match(r'\s*X =\s*(\S+) Y =\s*(\S+) Z =\s*(\S+)', line)
if not match:
print('PY JupiterMoons({} line {}): cannot parse position vector.'.format(filename, lnum))
return 1
pos = [ float(match.group(1)), float(match.group(2)), float(match.group(3)) ]
else: # part == 2
# VX= 9.148038778472862E-03 VY= 3.973823407182510E-03 VZ= 2.765660368640458E-04
match = re.match(r'\s*VX=\s*(\S+) VY=\s*(\S+) VZ=\s*(\S+)', line)
if not match:
print('PY JupiterMoons({} line {}): cannot parse velocity vector.'.format(filename, lnum))
return 1
vel = [ float(match.group(1)), float(match.group(2)), float(match.group(3)) ]
if JupiterMoons_CheckJpl(mindex, tt, pos, vel):
print('PY JupiterMoons({} line {}): FAILED VERIFICATION.'.format(filename, lnum))
return 1
count += 1
part = (part + 1) % 3
if count != expected_count:
print('PY JupiterMoons: expected {} test cases, but found {}'.format(expected_count, count))
return 1
print('PY JupiterMoons: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def Issue103():
# https://github.com/cosinekitty/astronomy/issues/103
observer = astronomy.Observer(29, -81, 10)
ut = -8.817548982869034808e+04
time = astronomy.Time(ut)
body = astronomy.Body.Venus
ofdate = astronomy.Equator(body, time, observer, True, True)
hor = astronomy.Horizon(time, observer, ofdate.ra, ofdate.dec, astronomy.Refraction.Airless)
print('tt = {:23.16f}'.format(time.tt))
print('az = {:23.16f}'.format(hor.azimuth))
print('alt = {:23.16f}'.format(hor.altitude))
return 0
#-----------------------------------------------------------------------------------------------------------
class _bary_stats_t:
def __init__(self):
self.max_rdiff = 0.0
self.max_vdiff = 0.0
def StateVectorDiff(relative, vec, x, y, z):
dx = v(vec[0] - x)
dy = v(vec[1] - y)
dz = v(vec[2] - z)
diff_squared = dx*dx + dy*dy + dz*dz
if relative:
diff_squared /= (vec[0]*vec[0] + vec[1]*vec[1] + vec[2]*vec[2])
return sqrt(diff_squared)
#-----------------------------------------------------------------------------------------------------------
def VerifyState(func, stats, filename, lnum, time, pos, vel, r_thresh, v_thresh):
state = func.Eval(time)
rdiff = StateVectorDiff((r_thresh > 0.0), pos, state.x, state.y, state.z)
if rdiff > stats.max_rdiff:
stats.max_rdiff = rdiff
vdiff = StateVectorDiff((v_thresh > 0.0), vel, state.vx, state.vy, state.vz)
if vdiff > stats.max_vdiff:
stats.max_vdiff = vdiff
if rdiff > abs(r_thresh):
print('PY VerifyState({} line {}): EXCESSIVE position error = {:0.4e}'.format(filename, lnum, rdiff))
return 1
if vdiff > abs(v_thresh):
print('PY VerifyState({} line {}): EXCESSIVE velocity error = {:0.4e}'.format(filename, lnum, vdiff))
return 1
return 0
class JplStateRecord:
def __init__(self, lnum, state):
self.lnum = lnum
self.state = state
def JplHorizonsStateVectors(filename):
with open(filename, 'rt') as infile:
lnum = 0
part = 0
found_begin = False
for line in infile:
line = line.rstrip()
lnum += 1
if not found_begin:
if line == '$$SOE':
found_begin = True
elif line == '$$EOE':
break
else:
if part == 0:
# 2446545.000000000 = A.D. 1986-Apr-24 12:00:00.0000 TDB
tt = float(line.split()[0]) - 2451545.0 # convert JD to J2000 TT
time = astronomy.Time.FromTerrestrialTime(tt)
elif part == 1:
# X = 1.134408131605554E-03 Y =-2.590904586750408E-03 Z =-7.490427225904720E-05
match = re.match(r'\s*X =\s*(\S+) Y =\s*(\S+) Z =\s*(\S+)', line)
if not match:
print('PY JplHorizonsStateVectors({} line {}): cannot parse position vector.'.format(filename, lnum))
return 1
rx, ry, rz = float(match.group(1)), float(match.group(2)), float(match.group(3))
else: # part == 2
# VX= 9.148038778472862E-03 VY= 3.973823407182510E-03 VZ= 2.765660368640458E-04
match = re.match(r'\s*VX=\s*(\S+) VY=\s*(\S+) VZ=\s*(\S+)', line)
if not match:
print('PY JplHorizonsStateVectors({} line {}): cannot parse velocity vector.'.format(filename, lnum))
return 1
vx, vy, vz = float(match.group(1)), float(match.group(2)), float(match.group(3))
yield JplStateRecord(lnum, astronomy.StateVector(rx, ry, rz, vx, vy, vz, time))
part = (part + 1) % 3
return 0
def VerifyStateBody(func, filename, r_thresh, v_thresh):
stats = _bary_stats_t()
count = 0
for rec in JplHorizonsStateVectors(filename):
time = rec.state.t
pos = [rec.state.x, rec.state.y, rec.state.z]
vel = [rec.state.vx, rec.state.vy, rec.state.vz]
if VerifyState(func, stats, filename, rec.lnum, time, pos, vel, r_thresh, v_thresh):
print('PY VerifyStateBody({} line {}): FAILED VERIFICATION.'.format(filename, rec.lnum))
return 1
count += 1
Debug('PY VerifyStateBody({}): PASS - Tested {} cases. max rdiff={:0.3e}, vdiff={:0.3e}'.format(filename, count, stats.max_rdiff, stats.max_vdiff))
return 0
#-----------------------------------------------------------------------------------------------------------
# Constants for use inside unit tests only; they doesn't make sense for public consumption.
_Body_GeoMoon = -100
_Body_Geo_EMB = -101
class BaryStateFunc:
def __init__(self, body):
self.body = body
def Eval(self, time):
if self.body == _Body_GeoMoon:
return astronomy.GeoMoonState(time)
if self.body == _Body_Geo_EMB:
return astronomy.GeoEmbState(time)
return astronomy.BaryState(self.body, time)
def BaryState():
if VerifyStateBody(BaryStateFunc(astronomy.Body.Sun), 'barystate/Sun.txt', -1.224e-05, -1.134e-07): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Mercury), 'barystate/Mercury.txt', 1.672e-04, 2.698e-04): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Venus), 'barystate/Venus.txt', 4.123e-05, 4.308e-05): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Earth), 'barystate/Earth.txt', 2.296e-05, 6.359e-05): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Mars), 'barystate/Mars.txt', 3.107e-05, 5.550e-05): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Jupiter), 'barystate/Jupiter.txt', 7.389e-05, 2.471e-04): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Saturn), 'barystate/Saturn.txt', 1.067e-04, 3.220e-04): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Uranus), 'barystate/Uranus.txt', 9.035e-05, 2.519e-04): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Neptune), 'barystate/Neptune.txt', 9.838e-05, 4.446e-04): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Pluto), 'barystate/Pluto.txt', 4.259e-05, 7.827e-05): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.Moon), "barystate/Moon.txt", 2.354e-05, 6.604e-05): return 1
if VerifyStateBody(BaryStateFunc(astronomy.Body.EMB), "barystate/EMB.txt", 2.353e-05, 6.511e-05): return 1
if VerifyStateBody(BaryStateFunc(_Body_GeoMoon), "barystate/GeoMoon.txt", 4.086e-05, 5.347e-05): return 1
if VerifyStateBody(BaryStateFunc(_Body_Geo_EMB), "barystate/GeoEMB.txt", 4.076e-05, 5.335e-05): return 1
print('PY BaryState: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
class HelioStateFunc:
def __init__(self, body):
self.body = body
def Eval(self, time):
return astronomy.HelioState(self.body, time)
def HelioState():
if VerifyStateBody(HelioStateFunc(astronomy.Body.SSB), 'heliostate/SSB.txt', -1.209e-05, -1.125e-07): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Mercury), 'heliostate/Mercury.txt', 1.481e-04, 2.756e-04): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Venus), 'heliostate/Venus.txt', 3.528e-05, 4.485e-05): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Earth), 'heliostate/Earth.txt', 1.476e-05, 6.105e-05): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Mars), 'heliostate/Mars.txt', 3.154e-05, 5.603e-05): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Jupiter), 'heliostate/Jupiter.txt', 7.455e-05, 2.562e-04): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Saturn), 'heliostate/Saturn.txt', 1.066e-04, 3.150e-04): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Uranus), 'heliostate/Uranus.txt', 9.034e-05, 2.712e-04): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Neptune), 'heliostate/Neptune.txt', 9.834e-05, 4.534e-04): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Pluto), 'heliostate/Pluto.txt', 4.271e-05, 1.198e-04): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.Moon), 'heliostate/Moon.txt', 1.477e-05, 6.195e-05): return 1
if VerifyStateBody(HelioStateFunc(astronomy.Body.EMB), 'heliostate/EMB.txt', 1.476e-05, 6.106e-05): return 1
print('PY HelioState: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
class TopoStateFunc:
def __init__(self, body):
self.body = body
def Eval(self, time):
observer = astronomy.Observer(30.0, -80.0, 1000.0)
observer_state = astronomy.ObserverState(time, observer, False)
if self.body == _Body_Geo_EMB:
state = astronomy.GeoEmbState(time)
elif self.body == astronomy.Body.Earth:
state = astronomy.StateVector(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, time)
else:
raise Exception('PY TopoStateFunction: unsupported body ' + self.body)
state.x -= observer_state.x
state.y -= observer_state.y
state.z -= observer_state.z
state.vx -= observer_state.vx
state.vy -= observer_state.vy
state.vz -= observer_state.vz
return state
def TopoState():
if VerifyStateBody(TopoStateFunc(astronomy.Body.Earth), 'topostate/Earth_N30_W80_1000m.txt', 2.108e-04, 2.430e-04): return 1
if VerifyStateBody(TopoStateFunc(_Body_Geo_EMB), 'topostate/EMB_N30_W80_1000m.txt', 7.197e-04, 2.497e-04): return 1
print('PY TopoState: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def Aberration():
THRESHOLD_SECONDS = 0.453
filename = 'equatorial/Mars_j2000_ofdate_aberration.txt'
count = 0
with open(filename, 'rt') as infile:
lnum = 0
found_begin = False
max_diff_seconds = 0.0
for line in infile:
lnum += 1
line = line.rstrip()
if not found_begin:
if line == '$$SOE':
found_begin = True
elif line == '$$EOE':
break
else:
# 2459371.500000000 * 118.566080210 22.210647456 118.874086738 22.155784122
token = line.split()
if len(token) < 5:
print('PY Aberration({} line {}): not enough tokens'.format(filename, lnum))
return 1
jd = float(token[0])
jra = float(token[-4])
jdec = float(token[-3])
dra = float(token[-2])
ddec = float(token[-1])
# Convert julian day value to AstroTime.
time = astronomy.Time(jd - 2451545.0)
# Convert EQJ angular coordinates (jra, jdec) to an EQJ vector.
# Make the maginitude of the vector the speed of light,
# to prepare for aberration correction.
eqj_sphere = astronomy.Spherical(jdec, jra, astronomy.C_AUDAY)
eqj_vec = astronomy.VectorFromSphere(eqj_sphere, time)
# Aberration correction: calculate the Earth's barycentric
# velocity vector in EQJ coordinates.
eqj_earth = astronomy.BaryState(astronomy.Body.Earth, time)
# Use non-relativistic approximation: add light vector to Earth velocity vector.
# This gives aberration-corrected apparent position of the start in EQJ.
eqj_vec.x += eqj_earth.vx
eqj_vec.y += eqj_earth.vy
eqj_vec.z += eqj_earth.vz
# Calculate the rotation matrix that converts J2000 coordinates to of-date coordinates.
rot = astronomy.Rotation_EQJ_EQD(time)
# Use the rotation matrix to re-orient the EQJ vector to an EQD vector.
eqd_vec = astronomy.RotateVector(rot, eqj_vec)
# Convert the EQD vector back to spherical angular coordinates.
eqd_sphere = astronomy.SphereFromVector(eqd_vec)
# Calculate the differences in RA and DEC between expected and calculated values.
factor = math.cos(math.radians(v(eqd_sphere.lat))) # RA errors are less important toward the poles.
xra = factor * vabs(eqd_sphere.lon - dra)
xdec = vabs(eqd_sphere.lat - ddec)
diff_seconds = 3600.0 * sqrt(xra*xra + xdec*xdec)
Debug('PY Aberration({} line {}): xra={:0.6f} deg, xdec={:0.6f} deg, diff_seconds={:0.3f}.'.format(filename, lnum, xra, xdec, diff_seconds))
if diff_seconds > THRESHOLD_SECONDS:
print('PY Aberration({} line {}): EXCESSIVE ANGULAR ERROR = {:0.3f} seconds.'.format(filename, lnum, diff_seconds));
return 1
if diff_seconds > max_diff_seconds:
max_diff_seconds = diff_seconds
# We have completed one more test case.
count += 1
print('PY AberrationTest({}): PASS - Tested {} cases. max_diff_seconds = {:0.3f}'.format(filename, count, max_diff_seconds))
return 0
#-----------------------------------------------------------------------------------------------------------
def Twilight():
tolerance_seconds = 60.0
max_diff = 0.0
filename = 'riseset/twilight.txt'
with open(filename, 'rt') as infile:
lnum = 0
for line in infile:
lnum += 1
tokens = line.split()
if len(tokens) != 9:
print('PY Twilight({} line {}): incorrect number of tokens = {}'.format(filename, lnum, len(tokens)))
return 1
observer = astronomy.Observer(float(tokens[0]), float(tokens[1]), 0.0)
searchDate = astronomy.Time.Parse(tokens[2])
correctTimes = [astronomy.Time.Parse(t) for t in tokens[3:]]
calcTimes = [
astronomy.SearchAltitude(astronomy.Body.Sun, observer, astronomy.Direction.Rise, searchDate, 1.0, -18.0), # astronomical dawn
astronomy.SearchAltitude(astronomy.Body.Sun, observer, astronomy.Direction.Rise, searchDate, 1.0, -12.0), # nautical dawn
astronomy.SearchAltitude(astronomy.Body.Sun, observer, astronomy.Direction.Rise, searchDate, 1.0, -6.0), # civil dawn
astronomy.SearchAltitude(astronomy.Body.Sun, observer, astronomy.Direction.Set, searchDate, 1.0, -6.0), # civil dusk
astronomy.SearchAltitude(astronomy.Body.Sun, observer, astronomy.Direction.Set, searchDate, 1.0, -12.0), # nautical dusk
astronomy.SearchAltitude(astronomy.Body.Sun, observer, astronomy.Direction.Set, searchDate, 1.0, -18.0) # astronomical dusk
]
for i in range(6):
correct = correctTimes[i]
calc = calcTimes[i]
diff = SECONDS_PER_DAY * vabs(calc.ut - correct.ut)
if diff > tolerance_seconds:
print('PY Twilight({} line {}): EXCESSIVE ERROR = {} seconds for case {}'.format(filename, lnum, diff, i))
return 1
if diff > max_diff:
max_diff = diff
print('PY Twilight: PASS ({} test cases, max error = {} seconds)'.format(lnum, max_diff))
return 0
#-----------------------------------------------------------------------------------------------------------
def LibrationFile(filename):
max_diff_elon = 0.0
max_diff_elat = 0.0
max_diff_distance = 0.0
max_diff_diam = 0.0
max_eclip_lon = -900.0
count = 0
with open(filename, 'rt') as infile:
lnum = 0
for line in infile:
lnum += 1
token = line.split()
if lnum == 1:
if line != " Date Time Phase Age Diam Dist RA Dec Slon Slat Elon Elat AxisA\n":
print('PY LibrationFile({} line {}): unexpected header line'.format(filename, lnum))
return 1
else:
if len(token) != 16:
print('PY LibrationFile({} line {}): expected 16 tokens, found {}'.format(filename, lnum, len(token)))
return 1
day = int(token[0])
month = MonthNumber(token[1])
year = int(token[2])
hmtoken = token[3].split(':')
if len(hmtoken) != 2:
print('PY LibrationFile({} line {}): expected hh:mm but found "{}"'.format(filename, lnum, hmtoken))
return 1
hour = int(hmtoken[0])
minute = int(hmtoken[1])
time = astronomy.Time.Make(year, month, day, hour, minute, 0.0)
diam = float(token[7]) / 3600.0
dist = float(token[8])
elon = float(token[13])
elat = float(token[14])
lib = astronomy.Libration(time)
diff_elon = 60.0 * vabs(lib.elon - elon)
if diff_elon > max_diff_elon:
max_diff_elon = diff_elon
diff_elat = 60.0 * vabs(lib.elat - elat)
if diff_elat > max_diff_elat:
max_diff_elat = diff_elat
diff_distance = vabs(lib.dist_km - dist)
if diff_distance > max_diff_distance:
max_diff_distance = diff_distance
diff_diam = vabs(lib.diam_deg - diam)
if diff_diam > max_diff_diam:
max_diff_diam = diff_diam
if lib.mlon > max_eclip_lon:
max_eclip_lon = lib.mlon
if diff_elon > 0.1304:
print('PY LibrationFile({} line {}): EXCESSIVE diff_elon = {}'.format(filename, lnum, diff_elon))
return 1
if diff_elat > 1.6476:
print('PY LibrationFile({} line {}): EXCESSIVE diff_elat = {}'.format(filename, lnum, diff_elat))
return 1
if diff_distance > 54.377:
print('PY LibrationFile({} line {}): EXCESSIVE diff_distance = {}'.format(filename, lnum, diff_distance))
return 1
if diff_diam > 0.00009:
print('PY LibrationFile({} line {}): EXCESSIVE diff_diam = {}'.format(filename, lnum, diff_diam))
return 1
count += 1
if not (359.0 < max_eclip_lon < 360.0):
print('PY LibrationFile({}): INVALID max ecliptic longitude = {:0.3f}'.format(filename, max_eclip_lon))
return 1
print('PY LibrationFile({}): PASS ({} test cases, max_diff_elon = {} arcmin, max_diff_elat = {} arcmin, max_diff_distance = {} km, max_diff_diam = {} deg)'.format(
filename, count, max_diff_elon, max_diff_elat, max_diff_distance, max_diff_diam
))
return 0
def Libration():
return (
LibrationFile('libration/mooninfo_2020.txt') or
LibrationFile('libration/mooninfo_2021.txt') or
LibrationFile('libration/mooninfo_2022.txt')
)
#-----------------------------------------------------------------------------------------------------------
def Axis():
if AxisTestBody(astronomy.Body.Sun, 'axis/Sun.txt', 0.0) : return 1
if AxisTestBody(astronomy.Body.Mercury, 'axis/Mercury.txt', 0.074340) : return 1
if AxisTestBody(astronomy.Body.Venus, 'axis/Venus.txt', 0.0) : return 1
if AxisTestBody(astronomy.Body.Earth, 'axis/Earth.txt', 0.002032) : return 1
if AxisTestBody(astronomy.Body.Moon, 'axis/Moon.txt', 0.264845) : return 1
if AxisTestBody(astronomy.Body.Mars, 'axis/Mars.txt', 0.075323) : return 1
if AxisTestBody(astronomy.Body.Jupiter, 'axis/Jupiter.txt', 0.000324) : return 1
if AxisTestBody(astronomy.Body.Saturn, 'axis/Saturn.txt', 0.000304) : return 1
if AxisTestBody(astronomy.Body.Uranus, 'axis/Uranus.txt', 0.0) : return 1
if AxisTestBody(astronomy.Body.Neptune, 'axis/Neptune.txt', 0.000464) : return 1
if AxisTestBody(astronomy.Body.Pluto, 'axis/Pluto.txt', 0.0) : return 1
print('PY AxisTest: PASS')
return 0
def AxisTestBody(body, filename, arcmin_tolerance):
max_arcmin = 0
lnum = 0
count = 0
found_data = False
with open(filename, 'rt') as infile:
for line in infile:
line = line.strip()
lnum += 1
if not found_data:
if line == '$$SOE':
found_data = True
else:
if line == '$$EOE':
break
token = line.split()
# [ '1970-Jan-01', '00:00', '2440587.500000000', '281.01954', '61.41577' ]
jd = float(token[2])
ra = float(token[3])
dec = float(token[4])
time = astronomy.Time(jd - 2451545.0)
axis = astronomy.RotationAxis(body, time)
sphere = astronomy.Spherical(dec, ra, 1.0)
north = astronomy.VectorFromSphere(sphere, time)
arcmin = 60.0 * astronomy.AngleBetween(north, axis.north)
if arcmin > max_arcmin:
max_arcmin = arcmin
count += 1
Debug('PY AxisTestBody({}): {} test cases, max arcmin error = {}.'.format(body, count, max_arcmin))
if max_arcmin > arcmin_tolerance:
print('PY AxisTestBody({}): EXCESSIVE ERROR = {}'.format(body, max_arcmin))
return 1
return 0
#-----------------------------------------------------------------------------------------------------------
def MoonNodes():
filename = 'moon_nodes/moon_nodes.txt'
with open(filename, 'rt') as infile:
max_angle = 0.0
max_minutes = 0.0
prev_kind = '?'
lnum = 0
for line in infile:
line = line.strip()
lnum += 1
token = line.split()
if len(token) != 4:
print('PY MoonNodes({} line {}): syntax error'.format(filename, lnum))
return 1
kind = token[0]
if kind not in 'AD':
print('PY MoonNodes({} line {}): invalid node kind'.format(filename, lnum))
return 1
if kind == prev_kind:
print('PY MoonNodes({} line {}): duplicate ascending/descending node'.format(filename, lnum))
return 1
time = astronomy.Time.Parse(token[1])
ra = float(token[2])
dec = float(token[3])
sphere = astronomy.Spherical(dec, 15.0 * ra, 1.0)
vec_test = astronomy.VectorFromSphere(sphere, time)
# Calculate EQD coordinates of the Moon. Verify against input file.
vec_eqj = astronomy.GeoMoon(time)
rot = astronomy.Rotation_EQJ_EQD(time)
vec_eqd = astronomy.RotateVector(rot, vec_eqj)
angle = astronomy.AngleBetween(vec_test, vec_eqd)
diff_angle = 60.0 * abs(angle)
if diff_angle > max_angle:
max_angle = diff_angle
if diff_angle > 1.54:
print('PY MoonNodes({} line {}): EXCESSIVE equatorial error = {:0.3f} arcmin'.format(filename, lnum, diff_angle))
if lnum == 1:
# The very first time, so search for the first node in the series.
# Back up a few days to make sure we really are finding it ourselves.
earlier = time.AddDays(-6.5472) # back up by a weird amount of time
node = astronomy.SearchMoonNode(earlier)
else:
# Use the previous node to find the next node.
node = astronomy.NextMoonNode(node)
# Verify the ecliptic latitude is very close to zero at the alleged node.
ecl = astronomy.EclipticGeoMoon(node.time)
diff_lat = 60.0 * abs(ecl.lat)
if diff_lat > 8.1e-4:
print('PY MoonNodes({} line {}): found node has excessive latitude = {:0.4f} arcmin.'.format(filename, lnum, diff_lat))
return 1
# Verify the time agrees with Espenak's time to within a few minutes.
diff_minutes = (24.0 * 60.0) * abs(node.time.tt - time.tt)
if diff_minutes > max_minutes:
max_minutes = diff_minutes
# Verify the kind of node matches what Espenak says (ascending or descending).
if kind == 'A' and node.kind != astronomy.NodeEventKind.Ascending:
print('PY MoonNodes({} line {}): did not find ascending node as expected.'.format(filename, lnum))
return 1
if kind == 'D' and node.kind != astronomy.NodeEventKind.Descending:
print('PY MoonNodes({} line {}): did not find descending node as expected.'.format(filename, lnum))
return 1
prev_kind = kind
if max_minutes > 3.681:
print('PY MoonNodes: EXCESSIVE time prediction error = {:0.3f} minutes.'.format(max_minutes))
return 1
print('PY MoonNodes: PASS ({} nodes, max equ error = {:0.3f} arcmin, max time error = {:0.3f} minutes.)'.format(lnum, max_angle, max_minutes))
return 0
#-----------------------------------------------------------------------------------------------------------
def MoonReversePhase(longitude):
# Verify that SearchMoonPhase works both forward and backward in time.
nphases = 5000
utList = []
dtMin = +1000.0
dtMax = -1000.0
# Search forward in time from 1800 to find consecutive phase events events.
time = astronomy.Time.Make(1800, 1, 1, 0, 0, 0.0)
for i in range(nphases):
result = astronomy.SearchMoonPhase(longitude, time, +40.0)
if result is None:
print('PY MoonReversePhase(lon={}, i={}): failed to find event after {}'.format(longitude, i, time))
return 1
utList.append(result.ut)
if i > 0:
# Verify that consecutive events are reasonably close to the synodic period (29.5 days) apart.
dt = v(utList[i] - utList[i-1])
if dt < dtMin:
dtMin = dt
if dt > dtMax:
dtMax = dt
time = result.AddDays(+0.1)
Debug('PY MoonReversePhase({}): dtMin={:0.6f} days, dtMax={:0.6f} days.'.format(longitude, dtMin, dtMax))
if (dtMin < 29.175) or (dtMax > 29.926):
print('PY MoonReversePhase({}): Time between consecutive events is suspicious.'.format(longitude))
return 1
# Do a reverse chronological search and make sure the results are consistent with the forward search.
time = time.AddDays(20.0)
maxDiff = 0.0
for i in range(nphases-1, -1, -1):
result = astronomy.SearchMoonPhase(longitude, time, -40.0)
if result is None:
print('PY MoonReversePhase(lon={}, i={}): failed to find event before {}'.format(longitude, i, time))
return 1
diff = SECONDS_PER_DAY * vabs(result.ut - utList[i])
if diff > maxDiff:
maxDiff = diff
time = result.AddDays(-0.1)
Debug('PY MoonReversePhase({}): Maximum discrepancy in reverse search = {:0.6f} seconds.'.format(longitude, maxDiff))
if maxDiff > 0.164:
print('PY MoonReversePhase({}): EXCESSIVE DISCREPANCY in reverse search.'.format(longitude))
return 1
# Pick a pair of consecutive events from the middle of the list.
# Verify forward and backward searches work correctly from many intermediate times.
nslots = 100
k = nphases // 2
ut1 = utList[k]
ut2 = utList[k+1]
for i in range(1, nslots):
ut = ut1 + (i/nslots)*(ut2 - ut1)
time = astronomy.Time(ut)
before = astronomy.SearchMoonPhase(longitude, time, -40.0)
if before is None:
print('PY MoonReversePhase(lon={}, time={}): backward search failed'.format(longitude, time))
return 1
diff = SECONDS_PER_DAY * vabs(before.ut - ut1)
if diff > 0.07:
print('PY MoonReversePhase(lon={}, time={}): backward search error = {:0.4e} seconds'.format(longitude, time, diff))
return 1
after = astronomy.SearchMoonPhase(longitude, time, +40.0)
if after is None:
print('PY MoonReversePhase(lon={}, time={}): forward search failed'.format(longitude, time))
return 1
diff = SECONDS_PER_DAY * vabs(after.ut - ut2)
if diff > 0.07:
print('PY MoonReversePhase(lon={}, time={}): forward search error = {:0.4e} seconds'.format(longitude, time, diff))
return 1
print('PY MoonReversePhase({}): PASS'.format(longitude))
return 0
def MoonReverse():
return (
MoonReversePhase(0.0) or
MoonReversePhase(90.0) or
MoonReversePhase(180.0) or
MoonReversePhase(270.0)
)
#-----------------------------------------------------------------------------------------------------------
class LagrangeFunc:
def __init__(self, point, major_body, minor_body):
self.point = point
self.major_body = major_body
self.minor_body = minor_body
def Eval(self, time):
return astronomy.LagrangePoint(self.point, time, self.major_body, self.minor_body)
def VerifyStateLagrange(major_body, minor_body, point, filename, r_thresh, v_thresh):
func = LagrangeFunc(point, major_body, minor_body)
return VerifyStateBody(func, filename, r_thresh, v_thresh)
def Lagrange():
# Test Sun/EMB Lagrange points.
if VerifyStateLagrange(astronomy.Body.Sun, astronomy.Body.EMB, 1, 'lagrange/semb_L1.txt', 1.33e-5, 6.13e-5): return 1
if VerifyStateLagrange(astronomy.Body.Sun, astronomy.Body.EMB, 2, 'lagrange/semb_L2.txt', 1.33e-5, 6.13e-5): return 1
if VerifyStateLagrange(astronomy.Body.Sun, astronomy.Body.EMB, 4, 'lagrange/semb_L4.txt', 3.75e-5, 5.28e-5): return 1
if VerifyStateLagrange(astronomy.Body.Sun, astronomy.Body.EMB, 5, 'lagrange/semb_L5.txt', 3.75e-5, 5.28e-5): return 1
# Test Earth/Moon Lagrange points.
if VerifyStateLagrange(astronomy.Body.Earth, astronomy.Body.Moon, 1, 'lagrange/em_L1.txt', 3.79e-5, 5.06e-5): return 1
if VerifyStateLagrange(astronomy.Body.Earth, astronomy.Body.Moon, 2, 'lagrange/em_L2.txt', 3.79e-5, 5.06e-5): return 1
if VerifyStateLagrange(astronomy.Body.Earth, astronomy.Body.Moon, 4, 'lagrange/em_L4.txt', 3.79e-5, 1.59e-3): return 1
if VerifyStateLagrange(astronomy.Body.Earth, astronomy.Body.Moon, 5, 'lagrange/em_L5.txt', 3.79e-5, 1.59e-3): return 1
print('PY Lagrange: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def SiderealTime():
correct = 9.3983699280076483
time = astronomy.Time.Make(2022, 3, 15, 21, 50, 0)
gast = astronomy.SiderealTime(time)
diff = abs(gast - correct)
print('PY SiderealTime: gast={:0.15f}, correct={:0.15f}, diff={:0.3e}'.format(gast, correct, diff))
if diff > 1.0e-15:
print('PY SiderealTime: EXCESSIVE ERROR')
return 1
print('PY SiderealTime: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def Repr():
time = astronomy.Time.Make(2022, 3, 31, 21, 4, 45.123)
if str(time) != '2022-03-31T21:04:45.123Z':
print('PY Repr: FAIL str(time)')
return 1
if repr(time) != "Time('2022-03-31T21:04:45.123Z')":
print('PY Repr: FAIL repr(time)')
return 1
vec = astronomy.Vector(-1.8439088914585775, 1.51657508881031, 0.8366600265340756, time)
if repr(vec) != "Vector(-1.8439088914585775, 1.51657508881031, 0.8366600265340756, Time('2022-03-31T21:04:45.123Z'))":
print('PY Repr: FAIL repr(vec)')
return 1
state = astronomy.StateVector(vec.x, vec.y, vec.z, -vec.x/3, -vec.y/3, -vec.z/3, vec.t)
if repr(state) != "StateVector(x=-1.8439088914585775, y=1.51657508881031, z=0.8366600265340756, vx=0.6146362971528592, vy=-0.5055250296034367, vz=-0.27888667551135854, t=Time('2022-03-31T21:04:45.123Z'))":
print('PY Repr: FAIL repr(state)')
return 1
observer = astronomy.Observer(32.1, 45.6, 98.765)
if repr(observer) != 'Observer(latitude=32.1, longitude=45.6, height=98.765)':
print('PY Repr: FAIL repr(observer)')
return 1
rot = astronomy.Rotation_EQJ_ECL()
if repr(rot) != 'RotationMatrix([[1, 0, 0], [0, 0.9174821430670688, -0.3977769691083922], [0, 0.3977769691083922, 0.9174821430670688]])':
print('PY Repr: FAIL repr(rot)')
return 1
sph = astronomy.Spherical(lat=-27.3, lon=85.2, dist=2.54)
if repr(sph) != 'Spherical(lat=-27.3, lon=85.2, dist=2.54)':
print('PY Repr: FAIL repr(sph)')
return 1
equ = astronomy.Equatorial(8.54, -23.753, 2.986, vec)
if repr(equ) != "Equatorial(ra=8.54, dec=-23.753, dist=2.986, vec=Vector(-1.8439088914585775, 1.51657508881031, 0.8366600265340756, Time('2022-03-31T21:04:45.123Z')))":
print('PY Repr: FAIL repr(equ)')
return 1
print('PY Repr: PASS')
return 0
#-----------------------------------------------------------------------------------------------------------
def GravSimTest():
Debug("")
if 0 != GravSimEmpty("barystate/Sun.txt", astronomy.Body.SSB, astronomy.Body.Sun, 0.0269, 1.9635): return 1
if 0 != GravSimEmpty("barystate/Mercury.txt", astronomy.Body.SSB, astronomy.Body.Mercury, 0.5725, 0.9332): return 1
if 0 != GravSimEmpty("barystate/Venus.txt", astronomy.Body.SSB, astronomy.Body.Venus, 0.1433, 0.1458): return 1
if 0 != GravSimEmpty("barystate/Earth.txt", astronomy.Body.SSB, astronomy.Body.Earth, 0.0651, 0.2098): return 1
if 0 != GravSimEmpty("barystate/Mars.txt", astronomy.Body.SSB, astronomy.Body.Mars, 0.1150, 0.1896): return 1
if 0 != GravSimEmpty("barystate/Jupiter.txt", astronomy.Body.SSB, astronomy.Body.Jupiter, 0.2546, 0.8831): return 1
if 0 != GravSimEmpty("barystate/Saturn.txt", astronomy.Body.SSB, astronomy.Body.Saturn, 0.3660, 1.0818): return 1
if 0 != GravSimEmpty("barystate/Uranus.txt", astronomy.Body.SSB, astronomy.Body.Uranus, 0.3107, 0.9321): return 1
if 0 != GravSimEmpty("barystate/Neptune.txt", astronomy.Body.SSB, astronomy.Body.Neptune, 0.3382, 1.5586): return 1
if 0 != GravSimEmpty("heliostate/Mercury.txt", astronomy.Body.Sun, astronomy.Body.Mercury, 0.5087, 0.9473): return 1
if 0 != GravSimEmpty("heliostate/Venus.txt", astronomy.Body.Sun, astronomy.Body.Venus, 0.1214, 0.1543): return 1
if 0 != GravSimEmpty("heliostate/Earth.txt", astronomy.Body.Sun, astronomy.Body.Earth, 0.0508, 0.2099): return 1
if 0 != GravSimEmpty("heliostate/Mars.txt", astronomy.Body.Sun, astronomy.Body.Mars, 0.1085, 0.1927): return 1
if 0 != GravSimEmpty("heliostate/Jupiter.txt", astronomy.Body.Sun, astronomy.Body.Jupiter, 0.2564, 0.8805): return 1
if 0 != GravSimEmpty("heliostate/Saturn.txt", astronomy.Body.Sun, astronomy.Body.Saturn, 0.3664, 1.0826): return 1
if 0 != GravSimEmpty("heliostate/Uranus.txt", astronomy.Body.Sun, astronomy.Body.Uranus, 0.3106, 0.9322): return 1
if 0 != GravSimEmpty("heliostate/Neptune.txt", astronomy.Body.Sun, astronomy.Body.Neptune, 0.3381, 1.5584): return 1
Debug("")
nsteps = 20
if 0 != GravSimFile("barystate/Ceres.txt", astronomy.Body.SSB, nsteps, 0.6640, 0.6226): return 1
if 0 != GravSimFile("barystate/Pallas.txt", astronomy.Body.SSB, nsteps, 0.4687, 0.3474): return 1
if 0 != GravSimFile("barystate/Vesta.txt", astronomy.Body.SSB, nsteps, 0.5806, 0.5462): return 1
if 0 != GravSimFile("barystate/Juno.txt", astronomy.Body.SSB, nsteps, 0.6760, 0.5750): return 1
if 0 != GravSimFile("barystate/Bennu.txt", astronomy.Body.SSB, nsteps, 3.7444, 2.6581): return 1
if 0 != GravSimFile("barystate/Halley.txt", astronomy.Body.SSB, nsteps, 0.0539, 0.0825): return 1
if 0 != GravSimFile("heliostate/Ceres.txt", astronomy.Body.Sun, nsteps, 0.0445, 0.0355): return 1
if 0 != GravSimFile("heliostate/Pallas.txt", astronomy.Body.Sun, nsteps, 0.1062, 0.0854): return 1
if 0 != GravSimFile("heliostate/Vesta.txt", astronomy.Body.Sun, nsteps, 0.1432, 0.1308): return 1
if 0 != GravSimFile("heliostate/Juno.txt", astronomy.Body.Sun, nsteps, 0.1554, 0.1328): return 1
if 0 != GravSimFile("geostate/Ceres.txt", astronomy.Body.Earth, nsteps, 6.5689, 6.4797): return 1
if 0 != GravSimFile("geostate/Pallas.txt", astronomy.Body.Earth, nsteps, 9.3288, 7.3533): return 1
if 0 != GravSimFile("geostate/Vesta.txt", astronomy.Body.Earth, nsteps, 3.2980, 3.8863): return 1
if 0 != GravSimFile("geostate/Juno.txt", astronomy.Body.Earth, nsteps, 6.0962, 7.7147): return 1
Debug("")
print("PY GravSimTest: PASS")
return 0
def GravSimEmpty(filename, origin, body, rthresh, vthresh):
max_rdiff = 0.0
max_vdiff = 0.0
sim = None
for rec in JplHorizonsStateVectors(filename):
if sim is None:
sim = astronomy.GravitySimulator(origin, rec.state.t, [])
sim.Update(rec.state.t)
calc = sim.SolarSystemBodyState(body)
if origin == astronomy.Body.SSB and body == astronomy.Body.Sun:
rdiff = SsbArcminPosError(rec.state, calc)
else:
rdiff = ArcminPosError(rec.state, calc)
if rdiff > rthresh:
print('PY GravSimEmpty({} line {}): excessive position error = {} arcmin.'.format(filename, rec.lnum, rdiff))
return 1
if rdiff > max_rdiff:
max_rdiff = rdiff
vdiff = ArcminVelError(rec.state, calc)
if vdiff > vthresh:
print('PY GravSimEmpty({} line {}): excessive velocity error = {} arcmin.'.format(filename, rec.lnum, vdiff))
return 1
if vdiff > max_vdiff:
max_vdiff = vdiff
Debug('PY GravSimEmpty({:22s}): PASS - max pos error = {:0.4f} arcmin, max vel error = {:0.4f} arcmin.'.format(filename, max_rdiff, max_vdiff))
return 0
def GravSimFile(filename, originBody, nsteps, rthresh, vthresh):
sim = None
max_rdiff = 0.0
max_vdiff = 0.0
for rec in JplHorizonsStateVectors(filename):
if sim is None:
sim = astronomy.GravitySimulator(originBody, rec.state.t, [rec.state])
time = rec.state.t
smallBodyArray = sim.Update(time)
else:
tt1 = prev.state.t.tt
tt2 = rec.state.t.tt
dt = (tt2 - tt1) / nsteps
for k in range(1, nsteps+1):
time = astronomy.Time.FromTerrestrialTime(tt1 + k*dt)
smallBodyArray = sim.Update(time)
if len(smallBodyArray) != 1:
print('PY GravSimFile({} line {}): unexpected smallBodyArray.length = {}'.format(filename, rec.lnum, len(smallBodyArray)))
return 1
if time.tt != sim.GetTime().tt:
print('PY GravSimFile({} line {}): expected {} but simulator reports {}'.format(filename, rec.lnum, time, sim.GetTime()))
return 1
rdiff = ArcminPosError(rec.state, smallBodyArray[0])
if rdiff > rthresh:
print('PY GravSimFile({} line {}): excessive position error = {}'.format(filename, rec.lnum, rdiff))
return 1
if rdiff > max_rdiff:
max_rdiff = rdiff
vdiff = ArcminVelError(rec.state, smallBodyArray[0])
if vdiff > vthresh:
print('PY GravSimFile({} line {}): excessive position error = {}'.format(filename, rec.lnum, vdiff))
return 1
if vdiff > max_vdiff:
max_vdiff = vdiff
prev = rec
Debug('PY GravSimFile({:22s}): PASS - max pos error = {:0.4f} arcmin, max vel error = {:0.4f} arcmin.'.format(filename, max_rdiff, max_vdiff))
return 0
def SsbArcminPosError(correct, calc):
# Scale the SSB based on 1 AU, not on its absolute magnitude, which can become very close to zero.
dx = calc.x - correct.x
dy = calc.y - correct.y
dz = calc.z - correct.z
diffSquared = dx*dx + dy*dy + dz*dz
radians = sqrt(diffSquared)
return 60.0 * math.degrees(radians)
def ArcminPosError(correct, calc):
dx = calc.x - correct.x
dy = calc.y - correct.y
dz = calc.z - correct.z
diffSquared = dx*dx + dy*dy + dz*dz
magSquared = correct.x*correct.x + correct.y*correct.y + correct.z*correct.z
radians = sqrt(diffSquared / magSquared)
return 60.0 * math.degrees(radians)
def ArcminVelError(correct, calc):
dx = calc.vx - correct.vx
dy = calc.vy - correct.vy
dz = calc.vz - correct.vz
diffSquared = dx*dx + dy*dy + dz*dz
magSquared = correct.vx*correct.vx + correct.vy*correct.vy + correct.vz*correct.vz
radians = sqrt(diffSquared / magSquared)
return 60.0 * math.degrees(radians)
#-----------------------------------------------------------------------------------------------------------
def CheckDecemberSolstice(year, expected):
si = astronomy.Seasons(year)
actual = str(si.dec_solstice)
if actual != expected:
print('PY DatesIssue250: FAIL: year {}, expected [{}], actual [{}]'.format(year, expected, actual))
return 1
return 0
def DatesIssue250():
# Make sure we can handle dates outside the range supported by System.DateTime.
# https://github.com/cosinekitty/astronomy/issues/250
return (
CheckDecemberSolstice( 2022, "2022-12-21T21:47:54.455Z") or
CheckDecemberSolstice(-2300, "-002300-12-19T16:22:27.929Z") or
CheckDecemberSolstice(12345, "+012345-12-11T13:30:10.276Z") or
Pass('DatesIssue250')
)
#-----------------------------------------------------------------------------------------------------------
def LunarFractionCase(year, month, day, obscuration):
time = astronomy.Time.Make(year, month, day, 0, 0, 0.0)
eclipse = astronomy.SearchLunarEclipse(time)
# This should be a partial lunar eclipse.
if eclipse.kind != astronomy.EclipseKind.Partial:
print('PY LunarFractionCase({:04d}-{:02d}-{:02d}) FAIL: expected partial eclipse, but found {}.'.format(year, month, day, eclipse.kind))
return 1
# The partial eclipse should always happen within 24 hours of the given date.
dt = v(eclipse.peak.ut - time.ut)
if dt < 0.0 or dt > 1.0:
print('PY LunarFractionCase({:04d}-{:02d}-{:02d}) FAIL: eclipse occurs {:0.4f} days after predicted date.'.format(year, month, day, dt))
return 1
diff = v(eclipse.obscuration - obscuration)
if abs(diff) > 0.00763:
print('PY LunarFractionCase({:04d}-{:02d}-{:02d}) FAIL: excessive obscuration diff = {:0.8f}, expected = {:0.8f}, actual = {:0.8f}'.format(year, month, day, diff, obscuration, eclipse.obscuration))
return 1
Debug('PY LunarFractionCase({:04d}-{:02d}-{:02d}): obscuration diff = {:11.8f}'.format(year, month, day, diff))
return 0
def LunarFraction():
# Verify calculation of the fraction of the Moon's disc covered by the Earth's umbra during a partial eclipse.
# Data for this is more tedious to gather, because Espenak data does not contain it.
# We already verify fraction=0.0 for penumbral eclipses and fraction=1.0 for total eclipses in LunarEclipseTest.
return (
LunarFractionCase(2010, 6, 26, 0.506) or # https://www.timeanddate.com/eclipse/lunar/2010-june-26
LunarFractionCase(2012, 6, 4, 0.304) or # https://www.timeanddate.com/eclipse/lunar/2012-june-4
LunarFractionCase(2013, 4, 25, 0.003) or # https://www.timeanddate.com/eclipse/lunar/2013-april-25
LunarFractionCase(2017, 8, 7, 0.169) or # https://www.timeanddate.com/eclipse/lunar/2017-august-7
LunarFractionCase(2019, 7, 16, 0.654) or # https://www.timeanddate.com/eclipse/lunar/2019-july-16
LunarFractionCase(2021, 11, 19, 0.991) or # https://www.timeanddate.com/eclipse/lunar/2021-november-19
LunarFractionCase(2023, 10, 28, 0.060) or # https://www.timeanddate.com/eclipse/lunar/2023-october-28
LunarFractionCase(2024, 9, 18, 0.035) or # https://www.timeanddate.com/eclipse/lunar/2024-september-18
LunarFractionCase(2026, 8, 28, 0.962) or # https://www.timeanddate.com/eclipse/lunar/2026-august-28
LunarFractionCase(2028, 1, 12, 0.024) or # https://www.timeanddate.com/eclipse/lunar/2028-january-12
LunarFractionCase(2028, 7, 6, 0.325) or # https://www.timeanddate.com/eclipse/lunar/2028-july-6
LunarFractionCase(2030, 6, 15, 0.464) or # https://www.timeanddate.com/eclipse/lunar/2030-june-15
Pass('LunarFraction')
)
#-----------------------------------------------------------------------------------------------------------
def StarRiseSetCulmCase(starName, ra, dec, distLy, observer, year, month, day, riseHour, riseMinute, culmHour, culmMinute, setHour, setMinute):
func = 'StarRiseSetCulmCase({})'.format(starName)
# Calculate expected event times.
expectedRiseTime = astronomy.Time.Make(year, month, day, riseHour, riseMinute, 0.0)
expectedCulmTime = astronomy.Time.Make(year, month, day, culmHour, culmMinute, 0.0)
expectedSetTime = astronomy.Time.Make(year, month, day, setHour, setMinute, 0.0)
# Define a custom star object.
astronomy.DefineStar(astronomy.Body.Star1, ra, dec, distLy)
# Use Astronomy Engine to search for event times.
searchTime = astronomy.Time.Make(year, month, day, 0, 0, 0.0)
rise = astronomy.SearchRiseSet(astronomy.Body.Star1, observer, astronomy.Direction.Rise, searchTime, 1.0)
if rise is None:
return Fail(func, 'Star rise search failed.')
culm = astronomy.SearchHourAngle(astronomy.Body.Star1, observer, 0.0, searchTime, +1)
if culm is None:
return Fail(func, 'Star culmination search failed.')
set = astronomy.SearchRiseSet(astronomy.Body.Star1, observer, astronomy.Direction.Set, searchTime, 1.0)
if set is None:
return Fail(func, 'Star set search failed.')
# Compare expected times with calculated times.
rdiff = MINUTES_PER_DAY * vabs(expectedRiseTime.ut - rise.ut)
cdiff = MINUTES_PER_DAY * vabs(expectedCulmTime.ut - culm.time.ut)
sdiff = MINUTES_PER_DAY * vabs(expectedSetTime.ut - set.ut)
Debug("{}: minutes rdiff = {:0.4f}, cdiff = {:0.4f}, sdiff = {:0.4f}".format(func, rdiff, cdiff, sdiff))
if rdiff > 0.5: return Fail(func, "excessive rise time error = {:0.4f} minutes.".format(rdiff))
if cdiff > 0.5: return Fail(func, "excessive culm time error = {:0.4f} minutes.".format(cdiff))
if sdiff > 0.5: return Fail(func, "excessive set time error = {:0.4f} minutes.".format(sdiff))
return 0
def StarRiseSetCulm():
observer = astronomy.Observer(+25.77, -80.19, 0.0)
return (
StarRiseSetCulmCase("Sirius", 6.7525, -16.7183, 8.6, observer, 2022, 11, 21, 2, 37, 8, 6, 13, 34) or
StarRiseSetCulmCase("Sirius", 6.7525, -16.7183, 8.6, observer, 2022, 11, 25, 2, 22, 7, 50, 13, 18) or
StarRiseSetCulmCase("Canopus", 6.3992, -52.6956, 310.0, observer, 2022, 11, 21, 4, 17, 7, 44, 11, 11) or
StarRiseSetCulmCase("Canopus", 6.3992, -52.6956, 310.0, observer, 2022, 11, 25, 4, 1, 7, 28, 10, 56) or
Pass("StarRiseSetCulm")
)
#-----------------------------------------------------------------------------------------------------------
class HourAngleTester:
def __init__(self):
self.cases = 0
self.maxdiff = 0.0
def Case(self, latitude, longitude, hourAngle):
threshold = 0.1 / 3600 # SearchHourAngle() accuracy: 0.1 seconds converted to hours
observer = astronomy.Observer(latitude, longitude, 0)
startTime = astronomy.Time.Make(2023, 2, 11, 0, 0, 0)
search = astronomy.SearchHourAngle(astronomy.Body.Sun, observer, hourAngle, startTime, +1)
calc = astronomy.HourAngle(astronomy.Body.Sun, search.time, observer)
diff = vabs(calc - hourAngle)
if diff > 12.0:
diff = 24.0 - diff;
if diff > self.maxdiff:
self.maxdiff = diff
self.cases += 1
if diff > threshold:
print('PY HourAngleCase: EXCESSIVE ERROR = {:0.6e}, calc HA = {:0.16f}, for hourAngle={:0.1f}'.format(diff, calc, hourAngle))
return False
Debug('PY HourAngleCase: Hour angle = {:4.1f}, longitude = {:6.1f}, diff = {:9.4e}'.format(hourAngle, longitude, diff))
return True
def Pass(self):
print('PY HourAngle ({:d} cases, maxdiff = {:9.4e}): PASS'.format(self.cases, self.maxdiff))
return 0
def HourAngle():
tester = HourAngleTester()
latitude = 35
longitude = -170
while longitude <= 180:
hour = 0
while hour < 24:
if not tester.Case(latitude, longitude, hour):
return 1
hour += 1
longitude += 5
return tester.Pass()
#-----------------------------------------------------------------------------------------------------------
def Atmosphere():
filename = 'riseset/atmosphere.csv'
maxdiff = 0.0
ncases = 0
tolerance = 8.8e-11
with open(filename, 'rt') as infile:
lnum = 0
for line in infile:
line = line.strip()
lnum += 1
if lnum == 1:
if line != 'elevation,temperature,pressure,density,relative_density':
return Fail('Atmosphere', 'Expected header line but found [{}]'.format(line))
else:
tokens = line.split(',')
if len(tokens) != 5:
return Fail('Atmosphere({} line {})'.format(filename, lnum), 'expected 5 numeric tokens but found {}'.format(len(tokens)))
elevation = v(float(tokens[0]))
temperature = v(float(tokens[1]))
pressure = v(float(tokens[2]))
# ignore tokens[3] = absolute_density
relative_density = v(float(tokens[4]))
atmos = astronomy.Atmosphere(elevation)
diff = vabs(atmos.temperature - temperature)
maxdiff = max(maxdiff, diff)
if diff > tolerance:
return Fail('Atmosphere', 'EXCESSIVE temperature difference = {}'.format(diff))
diff = vabs(atmos.pressure - pressure)
maxdiff = max(maxdiff, diff)
if diff > tolerance:
return Fail('Atmosphere', 'EXCESSIVE pressure difference = {}'.format(diff))
diff = vabs(atmos.density - relative_density)
maxdiff = max(maxdiff, diff)
if diff > tolerance:
return Fail('Atmosphere', 'EXCESSIVE density difference = {}'.format(diff))
ncases += 1
if ncases != 34:
return Fail('Atmosphere', 'expected 34 cases but found {}'.format(ncases))
return Pass('Atmosphere')
#-----------------------------------------------------------------------------------------------------------
def RiseSetElevationBodyCase(body, observer, direction, metersAboveGround, startTime, eventOffsetDays):
time = astronomy.SearchRiseSet(body, observer, direction, startTime, 2.0, metersAboveGround)
if not time:
return Fail('RiseSetElevationBodyCase {} {}: search failed.'.format(body, direction))
diff = v(time.ut - (startTime.ut + eventOffsetDays))
if diff > 0.5:
diff -= 1.0 # assume event actually takes place on the next day
diff = vabs(MINUTES_PER_DAY * diff) # convert signed days to absolute minutes
if diff > 0.5:
return Fail('RiseSetElevationBodyCase {} {}: EXCESSIVE diff = {}.'.format(body, direction, diff))
return 0
def RiseSetElevation():
regex = re.compile(r'^(\d+)-(\d+)-(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\d+):(\d+)\s+(\d+):(\d+)\s+(\d+):(\d+)\s+(\d+):(\d+)\s+(\S+)\s*$')
filename = 'riseset/elevation.txt'
with open(filename, 'rt') as infile:
lnum = 0
for line in infile:
lnum += 1
if line.startswith('#'):
continue
m = regex.match(line)
if not m:
return Fail('RiseSetElevation({} line {})'.format(filename, lnum), 'Invalid data format')
year = int(m.group(1))
month = int(m.group(2))
day = int(m.group(3))
latitude = v(float(m.group(4)))
longitude = v(float(m.group(5)))
height = v(float(m.group(6)))
metersAboveGround = v(float(m.group(7)))
srh = int(m.group( 8))
srm = int(m.group( 9))
ssh = int(m.group(10))
ssm = int(m.group(11))
mrh = int(m.group(12))
mrm = int(m.group(13))
msh = int(m.group(14))
msm = int(m.group(15))
# Get search origin time
time = astronomy.Time.Make(year, month, day, 0, 0, 0.0)
# Convert scanned values into sunrise, sunset, moonrise, moonset day offsets.
sr = (srh + srm/60.0) / 24.0
ss = (ssh + ssm/60.0) / 24.0
mr = (mrh + mrm/60.0) / 24.0
ms = (msh + msm/60.0) / 24.0
observer = astronomy.Observer(latitude, longitude, height)
if (0 != RiseSetElevationBodyCase(astronomy.Body.Sun, observer, astronomy.Direction.Rise, metersAboveGround, time, sr) or
0 != RiseSetElevationBodyCase(astronomy.Body.Sun, observer, astronomy.Direction.Set, metersAboveGround, time, ss) or
0 != RiseSetElevationBodyCase(astronomy.Body.Moon, observer, astronomy.Direction.Rise, metersAboveGround, time, mr) or
0 != RiseSetElevationBodyCase(astronomy.Body.Moon, observer, astronomy.Direction.Set, metersAboveGround, time, ms)):
return 1
return Pass('RiseSetElevation')
#-----------------------------------------------------------------------------------------------------------
UnitTests = {
'aberration': Aberration,
'atmosphere': Atmosphere,
'axis': Axis,
'barystate': BaryState,
'constellation': Constellation,
'dates250': DatesIssue250,
'ecliptic': Ecliptic,
'elongation': Elongation,
'geoid': Geoid,
'global_solar_eclipse': GlobalSolarEclipse,
'gravsim': GravSimTest,
'heliostate': HelioState,
'hour_angle': HourAngle,
'issue_103': Issue103,
'jupiter_moons': JupiterMoons,
'lagrange': Lagrange,
'libration': Libration,
'local_solar_eclipse': LocalSolarEclipse,
'lunar_apsis': LunarApsis,
'lunar_eclipse': LunarEclipse,
'lunar_eclipse_78': LunarEclipseIssue78,
'lunar_fraction': LunarFraction,
'magnitude': Magnitude,
'moon': GeoMoon,
'moon_nodes': MoonNodes,
'moon_reverse': MoonReverse,
'moonphase': MoonPhase,
'planet_apsis': PlanetApsis,
'pluto': PlutoCheck,
'refraction': Refraction,
'repr': Repr,
'riseset': RiseSet,
'riseset_elevation': RiseSetElevation,
'riseset_reverse': RiseSetReverse,
'rotation': Rotation,
'seasons': Seasons,
'seasons187': SeasonsIssue187,
'sidereal': SiderealTime,
'solar_fraction': SolarFraction,
'star_risesetculm': StarRiseSetCulm,
'time': AstroTime,
'topostate': TopoState,
'transit': Transit,
'twilight': Twilight,
}
#-----------------------------------------------------------------------------------------------------------
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == '-v':
sys.argv = sys.argv[1:]
Verbose = True
if len(sys.argv) == 2:
name = sys.argv[1]
if name in UnitTests:
sys.exit(UnitTests[name]())
if name in ['astro_check', 'astro_profile']:
sys.exit(AstroCheck(sys.argv[1] == 'astro_check'))
if name == 'all':
for name in sorted(UnitTests.keys()):
func = UnitTests[name]
Debug('test.py: Starting test "{}"'.format(name))
rc = func()
Debug('test.py: Test "{}" returned {}'.format(name, rc))
if rc != 0:
sys.exit(1)
print('test.py: ALL PASS')
sys.exit(0)
print('test.py: Invalid command line arguments.')
sys.exit(1)
| [
"[email protected]"
] | |
44c7ea4c09cf76c997f45dc2c463741f3ba3af03 | 5f6425e9d83b57b864e48f227e1dc58356a555c0 | /utils/palettes/personalized/piotr_kozlowski.py | 8f12ab59b92e28ce4bf7bd066c55b145ec16a2f9 | [
"MIT"
] | permissive | jan-warchol/selenized | b374fa7822f281b16aa8b52e34bd1e585db75904 | df1c7f1f94f22e2c717f8224158f6f4097c5ecbe | refs/heads/master | 2023-06-22T09:37:02.962677 | 2022-09-12T20:24:40 | 2022-09-12T20:24:40 | 45,570,283 | 663 | 58 | MIT | 2023-04-18T09:33:22 | 2015-11-04T22:00:52 | Emacs Lisp | UTF-8 | Python | false | false | 217 | py | import selenized_base
name = 'Piotr Kozlowski'
palette = selenized_base.generate_palette(
background=(97, 0, 8),
foreground=(25, -6, -6),
saturation=1.4,
accent_offset=5,
accent_l_spread=30,
)
| [
"[email protected]"
] | |
dac13f5e5e05cde9a0e794a85a9b310f0cb35528 | bc5f2a7cce6093ba1f5785b02215df34d7e48a68 | /src/YYOwnBlog/settings.py | e80d47fc13792931076af62f165026ea519a18bb | [] | no_license | congyingTech/YYOwnBlog | 5b84bfd1cb7884f323ec76a67ec26bb1afc8a406 | 57718ba3363385ac4da0840f56d7cd15f903d512 | refs/heads/master | 2021-01-20T20:24:43.736682 | 2016-06-17T03:20:36 | 2016-06-17T03:20:36 | 61,342,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,290 | py | """
Django settings for YYOwnBlog project.
Generated by 'django-admin startproject' using Django 1.9.6.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(d0dmfin1@=g8-16bs&r&-4u-cbd@aj_kf6ak0n=*uc%qf$!_q'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog', # don't forget register blog in INSTALLED_APPS
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'YYOwnBlog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates').replace('\\', '/')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'YYOwnBlog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
452f266344d14193f6028c183b1c1184c8728329 | fab14fae2b494068aa793901d76464afb965df7e | /benchmarks/ltl_maxplus/f3/maxplus_24_91.py | 1be82cb21b86b50ca49efd9cc197df337fc8d3d4 | [
"MIT"
] | permissive | teodorov/F3 | 673f6f9ccc25acdfdecbfc180f439253474ba250 | c863215c318d7d5f258eb9be38c6962cf6863b52 | refs/heads/master | 2023-08-04T17:37:38.771863 | 2021-09-16T07:38:28 | 2021-09-16T07:38:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60,763 | py |
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_true, msat_make_false
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_rational_type
from mathsat import msat_make_and as _msat_make_and
from mathsat import msat_make_or as _msat_make_or
from mathsat import msat_make_not
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next
def msat_make_and(menv: msat_env, *args):
if len(args) == 0:
return msat_make_true(menv)
if len(args) == 1:
return args[0]
res = _msat_make_and(menv, args[0], args[1])
for arg in args[2:]:
res = _msat_make_and(menv, res, arg)
return res
def msat_make_or(menv: msat_env, *args):
if len(args) == 0:
return msat_make_false(menv)
if len(args) == 1:
return args[0]
res = _msat_make_or(menv, args[0], args[1])
for arg in args[2:]:
res = _msat_make_or(menv, res, arg)
return res
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_m1 = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, n_m1)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
real_type = msat_get_rational_type(menv)
names = ["x_0", "x_1", "x_2", "x_3", "x_4", "x_5", "x_6", "x_7", "x_8", "x_9", "x_10", "x_11", "x_12", "x_13", "x_14", "x_15", "x_16", "x_17", "x_18", "x_19", "x_20", "x_21", "x_22", "x_23"]
xs = [msat_declare_function(menv, name, real_type)
for name in names]
xs = [msat_make_constant(menv, x) for x in xs]
x_xs = [msat_declare_function(menv, name_next(name), real_type)
for name in names]
x_xs = [msat_make_constant(menv, x_x) for x_x in x_xs]
curr2next = {x: x_x for x, x_x in zip(xs, x_xs)}
n_10_0 = msat_make_number(menv, "10.0")
n_11_0 = msat_make_number(menv, "11.0")
n_12_0 = msat_make_number(menv, "12.0")
n_13_0 = msat_make_number(menv, "13.0")
n_14_0 = msat_make_number(menv, "14.0")
n_15_0 = msat_make_number(menv, "15.0")
n_16_0 = msat_make_number(menv, "16.0")
n_17_0 = msat_make_number(menv, "17.0")
n_18_0 = msat_make_number(menv, "18.0")
n_19_0 = msat_make_number(menv, "19.0")
n_1_0 = msat_make_number(menv, "1.0")
n_20_0 = msat_make_number(menv, "20.0")
n_2_0 = msat_make_number(menv, "2.0")
n_3_0 = msat_make_number(menv, "3.0")
n_4_0 = msat_make_number(menv, "4.0")
n_5_0 = msat_make_number(menv, "5.0")
n_6_0 = msat_make_number(menv, "6.0")
n_7_0 = msat_make_number(menv, "7.0")
n_8_0 = msat_make_number(menv, "8.0")
n_9_0 = msat_make_number(menv, "9.0")
init = msat_make_true(menv)
trans = msat_make_true(menv)
# transitions
expr0 = msat_make_plus(menv, xs[3], n_9_0)
expr1 = msat_make_plus(menv, xs[4], n_5_0)
expr2 = msat_make_plus(menv, xs[6], n_16_0)
expr3 = msat_make_plus(menv, xs[9], n_15_0)
expr4 = msat_make_plus(menv, xs[10], n_8_0)
expr5 = msat_make_plus(menv, xs[12], n_10_0)
expr6 = msat_make_plus(menv, xs[14], n_9_0)
expr7 = msat_make_plus(menv, xs[15], n_18_0)
expr8 = msat_make_plus(menv, xs[17], n_4_0)
expr9 = msat_make_plus(menv, xs[18], n_6_0)
expr10 = msat_make_plus(menv, xs[21], n_4_0)
expr11 = msat_make_plus(menv, xs[22], n_18_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[0], expr0),
msat_make_geq(menv, x_xs[0], expr1),
msat_make_geq(menv, x_xs[0], expr2),
msat_make_geq(menv, x_xs[0], expr3),
msat_make_geq(menv, x_xs[0], expr4),
msat_make_geq(menv, x_xs[0], expr5),
msat_make_geq(menv, x_xs[0], expr6),
msat_make_geq(menv, x_xs[0], expr7),
msat_make_geq(menv, x_xs[0], expr8),
msat_make_geq(menv, x_xs[0], expr9),
msat_make_geq(menv, x_xs[0], expr10),
msat_make_geq(menv, x_xs[0], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[0], expr0),
msat_make_equal(menv, x_xs[0], expr1),
msat_make_equal(menv, x_xs[0], expr2),
msat_make_equal(menv, x_xs[0], expr3),
msat_make_equal(menv, x_xs[0], expr4),
msat_make_equal(menv, x_xs[0], expr5),
msat_make_equal(menv, x_xs[0], expr6),
msat_make_equal(menv, x_xs[0], expr7),
msat_make_equal(menv, x_xs[0], expr8),
msat_make_equal(menv, x_xs[0], expr9),
msat_make_equal(menv, x_xs[0], expr10),
msat_make_equal(menv, x_xs[0], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_7_0)
expr1 = msat_make_plus(menv, xs[2], n_19_0)
expr2 = msat_make_plus(menv, xs[3], n_11_0)
expr3 = msat_make_plus(menv, xs[6], n_12_0)
expr4 = msat_make_plus(menv, xs[8], n_14_0)
expr5 = msat_make_plus(menv, xs[11], n_11_0)
expr6 = msat_make_plus(menv, xs[12], n_18_0)
expr7 = msat_make_plus(menv, xs[14], n_17_0)
expr8 = msat_make_plus(menv, xs[15], n_10_0)
expr9 = msat_make_plus(menv, xs[16], n_8_0)
expr10 = msat_make_plus(menv, xs[18], n_11_0)
expr11 = msat_make_plus(menv, xs[22], n_9_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[1], expr0),
msat_make_geq(menv, x_xs[1], expr1),
msat_make_geq(menv, x_xs[1], expr2),
msat_make_geq(menv, x_xs[1], expr3),
msat_make_geq(menv, x_xs[1], expr4),
msat_make_geq(menv, x_xs[1], expr5),
msat_make_geq(menv, x_xs[1], expr6),
msat_make_geq(menv, x_xs[1], expr7),
msat_make_geq(menv, x_xs[1], expr8),
msat_make_geq(menv, x_xs[1], expr9),
msat_make_geq(menv, x_xs[1], expr10),
msat_make_geq(menv, x_xs[1], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[1], expr0),
msat_make_equal(menv, x_xs[1], expr1),
msat_make_equal(menv, x_xs[1], expr2),
msat_make_equal(menv, x_xs[1], expr3),
msat_make_equal(menv, x_xs[1], expr4),
msat_make_equal(menv, x_xs[1], expr5),
msat_make_equal(menv, x_xs[1], expr6),
msat_make_equal(menv, x_xs[1], expr7),
msat_make_equal(menv, x_xs[1], expr8),
msat_make_equal(menv, x_xs[1], expr9),
msat_make_equal(menv, x_xs[1], expr10),
msat_make_equal(menv, x_xs[1], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_18_0)
expr1 = msat_make_plus(menv, xs[2], n_7_0)
expr2 = msat_make_plus(menv, xs[3], n_1_0)
expr3 = msat_make_plus(menv, xs[4], n_13_0)
expr4 = msat_make_plus(menv, xs[6], n_9_0)
expr5 = msat_make_plus(menv, xs[7], n_8_0)
expr6 = msat_make_plus(menv, xs[8], n_18_0)
expr7 = msat_make_plus(menv, xs[10], n_7_0)
expr8 = msat_make_plus(menv, xs[13], n_2_0)
expr9 = msat_make_plus(menv, xs[15], n_18_0)
expr10 = msat_make_plus(menv, xs[18], n_14_0)
expr11 = msat_make_plus(menv, xs[21], n_8_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[2], expr0),
msat_make_geq(menv, x_xs[2], expr1),
msat_make_geq(menv, x_xs[2], expr2),
msat_make_geq(menv, x_xs[2], expr3),
msat_make_geq(menv, x_xs[2], expr4),
msat_make_geq(menv, x_xs[2], expr5),
msat_make_geq(menv, x_xs[2], expr6),
msat_make_geq(menv, x_xs[2], expr7),
msat_make_geq(menv, x_xs[2], expr8),
msat_make_geq(menv, x_xs[2], expr9),
msat_make_geq(menv, x_xs[2], expr10),
msat_make_geq(menv, x_xs[2], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[2], expr0),
msat_make_equal(menv, x_xs[2], expr1),
msat_make_equal(menv, x_xs[2], expr2),
msat_make_equal(menv, x_xs[2], expr3),
msat_make_equal(menv, x_xs[2], expr4),
msat_make_equal(menv, x_xs[2], expr5),
msat_make_equal(menv, x_xs[2], expr6),
msat_make_equal(menv, x_xs[2], expr7),
msat_make_equal(menv, x_xs[2], expr8),
msat_make_equal(menv, x_xs[2], expr9),
msat_make_equal(menv, x_xs[2], expr10),
msat_make_equal(menv, x_xs[2], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_4_0)
expr1 = msat_make_plus(menv, xs[2], n_2_0)
expr2 = msat_make_plus(menv, xs[3], n_9_0)
expr3 = msat_make_plus(menv, xs[5], n_7_0)
expr4 = msat_make_plus(menv, xs[6], n_5_0)
expr5 = msat_make_plus(menv, xs[9], n_10_0)
expr6 = msat_make_plus(menv, xs[11], n_8_0)
expr7 = msat_make_plus(menv, xs[13], n_14_0)
expr8 = msat_make_plus(menv, xs[15], n_13_0)
expr9 = msat_make_plus(menv, xs[18], n_7_0)
expr10 = msat_make_plus(menv, xs[20], n_2_0)
expr11 = msat_make_plus(menv, xs[22], n_8_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[3], expr0),
msat_make_geq(menv, x_xs[3], expr1),
msat_make_geq(menv, x_xs[3], expr2),
msat_make_geq(menv, x_xs[3], expr3),
msat_make_geq(menv, x_xs[3], expr4),
msat_make_geq(menv, x_xs[3], expr5),
msat_make_geq(menv, x_xs[3], expr6),
msat_make_geq(menv, x_xs[3], expr7),
msat_make_geq(menv, x_xs[3], expr8),
msat_make_geq(menv, x_xs[3], expr9),
msat_make_geq(menv, x_xs[3], expr10),
msat_make_geq(menv, x_xs[3], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[3], expr0),
msat_make_equal(menv, x_xs[3], expr1),
msat_make_equal(menv, x_xs[3], expr2),
msat_make_equal(menv, x_xs[3], expr3),
msat_make_equal(menv, x_xs[3], expr4),
msat_make_equal(menv, x_xs[3], expr5),
msat_make_equal(menv, x_xs[3], expr6),
msat_make_equal(menv, x_xs[3], expr7),
msat_make_equal(menv, x_xs[3], expr8),
msat_make_equal(menv, x_xs[3], expr9),
msat_make_equal(menv, x_xs[3], expr10),
msat_make_equal(menv, x_xs[3], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_20_0)
expr1 = msat_make_plus(menv, xs[3], n_4_0)
expr2 = msat_make_plus(menv, xs[4], n_12_0)
expr3 = msat_make_plus(menv, xs[5], n_3_0)
expr4 = msat_make_plus(menv, xs[6], n_16_0)
expr5 = msat_make_plus(menv, xs[11], n_6_0)
expr6 = msat_make_plus(menv, xs[14], n_16_0)
expr7 = msat_make_plus(menv, xs[15], n_9_0)
expr8 = msat_make_plus(menv, xs[16], n_1_0)
expr9 = msat_make_plus(menv, xs[18], n_6_0)
expr10 = msat_make_plus(menv, xs[21], n_3_0)
expr11 = msat_make_plus(menv, xs[22], n_12_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[4], expr0),
msat_make_geq(menv, x_xs[4], expr1),
msat_make_geq(menv, x_xs[4], expr2),
msat_make_geq(menv, x_xs[4], expr3),
msat_make_geq(menv, x_xs[4], expr4),
msat_make_geq(menv, x_xs[4], expr5),
msat_make_geq(menv, x_xs[4], expr6),
msat_make_geq(menv, x_xs[4], expr7),
msat_make_geq(menv, x_xs[4], expr8),
msat_make_geq(menv, x_xs[4], expr9),
msat_make_geq(menv, x_xs[4], expr10),
msat_make_geq(menv, x_xs[4], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[4], expr0),
msat_make_equal(menv, x_xs[4], expr1),
msat_make_equal(menv, x_xs[4], expr2),
msat_make_equal(menv, x_xs[4], expr3),
msat_make_equal(menv, x_xs[4], expr4),
msat_make_equal(menv, x_xs[4], expr5),
msat_make_equal(menv, x_xs[4], expr6),
msat_make_equal(menv, x_xs[4], expr7),
msat_make_equal(menv, x_xs[4], expr8),
msat_make_equal(menv, x_xs[4], expr9),
msat_make_equal(menv, x_xs[4], expr10),
msat_make_equal(menv, x_xs[4], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_10_0)
expr1 = msat_make_plus(menv, xs[3], n_7_0)
expr2 = msat_make_plus(menv, xs[5], n_14_0)
expr3 = msat_make_plus(menv, xs[6], n_9_0)
expr4 = msat_make_plus(menv, xs[7], n_11_0)
expr5 = msat_make_plus(menv, xs[9], n_18_0)
expr6 = msat_make_plus(menv, xs[10], n_10_0)
expr7 = msat_make_plus(menv, xs[11], n_18_0)
expr8 = msat_make_plus(menv, xs[15], n_20_0)
expr9 = msat_make_plus(menv, xs[17], n_2_0)
expr10 = msat_make_plus(menv, xs[18], n_17_0)
expr11 = msat_make_plus(menv, xs[21], n_8_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[5], expr0),
msat_make_geq(menv, x_xs[5], expr1),
msat_make_geq(menv, x_xs[5], expr2),
msat_make_geq(menv, x_xs[5], expr3),
msat_make_geq(menv, x_xs[5], expr4),
msat_make_geq(menv, x_xs[5], expr5),
msat_make_geq(menv, x_xs[5], expr6),
msat_make_geq(menv, x_xs[5], expr7),
msat_make_geq(menv, x_xs[5], expr8),
msat_make_geq(menv, x_xs[5], expr9),
msat_make_geq(menv, x_xs[5], expr10),
msat_make_geq(menv, x_xs[5], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[5], expr0),
msat_make_equal(menv, x_xs[5], expr1),
msat_make_equal(menv, x_xs[5], expr2),
msat_make_equal(menv, x_xs[5], expr3),
msat_make_equal(menv, x_xs[5], expr4),
msat_make_equal(menv, x_xs[5], expr5),
msat_make_equal(menv, x_xs[5], expr6),
msat_make_equal(menv, x_xs[5], expr7),
msat_make_equal(menv, x_xs[5], expr8),
msat_make_equal(menv, x_xs[5], expr9),
msat_make_equal(menv, x_xs[5], expr10),
msat_make_equal(menv, x_xs[5], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_18_0)
expr1 = msat_make_plus(menv, xs[3], n_14_0)
expr2 = msat_make_plus(menv, xs[4], n_2_0)
expr3 = msat_make_plus(menv, xs[5], n_5_0)
expr4 = msat_make_plus(menv, xs[6], n_8_0)
expr5 = msat_make_plus(menv, xs[9], n_7_0)
expr6 = msat_make_plus(menv, xs[11], n_17_0)
expr7 = msat_make_plus(menv, xs[15], n_5_0)
expr8 = msat_make_plus(menv, xs[16], n_6_0)
expr9 = msat_make_plus(menv, xs[19], n_16_0)
expr10 = msat_make_plus(menv, xs[20], n_20_0)
expr11 = msat_make_plus(menv, xs[22], n_10_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[6], expr0),
msat_make_geq(menv, x_xs[6], expr1),
msat_make_geq(menv, x_xs[6], expr2),
msat_make_geq(menv, x_xs[6], expr3),
msat_make_geq(menv, x_xs[6], expr4),
msat_make_geq(menv, x_xs[6], expr5),
msat_make_geq(menv, x_xs[6], expr6),
msat_make_geq(menv, x_xs[6], expr7),
msat_make_geq(menv, x_xs[6], expr8),
msat_make_geq(menv, x_xs[6], expr9),
msat_make_geq(menv, x_xs[6], expr10),
msat_make_geq(menv, x_xs[6], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[6], expr0),
msat_make_equal(menv, x_xs[6], expr1),
msat_make_equal(menv, x_xs[6], expr2),
msat_make_equal(menv, x_xs[6], expr3),
msat_make_equal(menv, x_xs[6], expr4),
msat_make_equal(menv, x_xs[6], expr5),
msat_make_equal(menv, x_xs[6], expr6),
msat_make_equal(menv, x_xs[6], expr7),
msat_make_equal(menv, x_xs[6], expr8),
msat_make_equal(menv, x_xs[6], expr9),
msat_make_equal(menv, x_xs[6], expr10),
msat_make_equal(menv, x_xs[6], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_3_0)
expr1 = msat_make_plus(menv, xs[3], n_18_0)
expr2 = msat_make_plus(menv, xs[4], n_10_0)
expr3 = msat_make_plus(menv, xs[5], n_9_0)
expr4 = msat_make_plus(menv, xs[7], n_20_0)
expr5 = msat_make_plus(menv, xs[10], n_3_0)
expr6 = msat_make_plus(menv, xs[12], n_4_0)
expr7 = msat_make_plus(menv, xs[14], n_16_0)
expr8 = msat_make_plus(menv, xs[18], n_20_0)
expr9 = msat_make_plus(menv, xs[20], n_14_0)
expr10 = msat_make_plus(menv, xs[21], n_18_0)
expr11 = msat_make_plus(menv, xs[22], n_6_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[7], expr0),
msat_make_geq(menv, x_xs[7], expr1),
msat_make_geq(menv, x_xs[7], expr2),
msat_make_geq(menv, x_xs[7], expr3),
msat_make_geq(menv, x_xs[7], expr4),
msat_make_geq(menv, x_xs[7], expr5),
msat_make_geq(menv, x_xs[7], expr6),
msat_make_geq(menv, x_xs[7], expr7),
msat_make_geq(menv, x_xs[7], expr8),
msat_make_geq(menv, x_xs[7], expr9),
msat_make_geq(menv, x_xs[7], expr10),
msat_make_geq(menv, x_xs[7], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[7], expr0),
msat_make_equal(menv, x_xs[7], expr1),
msat_make_equal(menv, x_xs[7], expr2),
msat_make_equal(menv, x_xs[7], expr3),
msat_make_equal(menv, x_xs[7], expr4),
msat_make_equal(menv, x_xs[7], expr5),
msat_make_equal(menv, x_xs[7], expr6),
msat_make_equal(menv, x_xs[7], expr7),
msat_make_equal(menv, x_xs[7], expr8),
msat_make_equal(menv, x_xs[7], expr9),
msat_make_equal(menv, x_xs[7], expr10),
msat_make_equal(menv, x_xs[7], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_10_0)
expr1 = msat_make_plus(menv, xs[2], n_2_0)
expr2 = msat_make_plus(menv, xs[3], n_14_0)
expr3 = msat_make_plus(menv, xs[4], n_6_0)
expr4 = msat_make_plus(menv, xs[10], n_8_0)
expr5 = msat_make_plus(menv, xs[11], n_3_0)
expr6 = msat_make_plus(menv, xs[12], n_14_0)
expr7 = msat_make_plus(menv, xs[14], n_4_0)
expr8 = msat_make_plus(menv, xs[16], n_15_0)
expr9 = msat_make_plus(menv, xs[18], n_5_0)
expr10 = msat_make_plus(menv, xs[20], n_3_0)
expr11 = msat_make_plus(menv, xs[23], n_8_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[8], expr0),
msat_make_geq(menv, x_xs[8], expr1),
msat_make_geq(menv, x_xs[8], expr2),
msat_make_geq(menv, x_xs[8], expr3),
msat_make_geq(menv, x_xs[8], expr4),
msat_make_geq(menv, x_xs[8], expr5),
msat_make_geq(menv, x_xs[8], expr6),
msat_make_geq(menv, x_xs[8], expr7),
msat_make_geq(menv, x_xs[8], expr8),
msat_make_geq(menv, x_xs[8], expr9),
msat_make_geq(menv, x_xs[8], expr10),
msat_make_geq(menv, x_xs[8], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[8], expr0),
msat_make_equal(menv, x_xs[8], expr1),
msat_make_equal(menv, x_xs[8], expr2),
msat_make_equal(menv, x_xs[8], expr3),
msat_make_equal(menv, x_xs[8], expr4),
msat_make_equal(menv, x_xs[8], expr5),
msat_make_equal(menv, x_xs[8], expr6),
msat_make_equal(menv, x_xs[8], expr7),
msat_make_equal(menv, x_xs[8], expr8),
msat_make_equal(menv, x_xs[8], expr9),
msat_make_equal(menv, x_xs[8], expr10),
msat_make_equal(menv, x_xs[8], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_7_0)
expr1 = msat_make_plus(menv, xs[3], n_15_0)
expr2 = msat_make_plus(menv, xs[4], n_2_0)
expr3 = msat_make_plus(menv, xs[5], n_1_0)
expr4 = msat_make_plus(menv, xs[10], n_19_0)
expr5 = msat_make_plus(menv, xs[11], n_12_0)
expr6 = msat_make_plus(menv, xs[13], n_10_0)
expr7 = msat_make_plus(menv, xs[15], n_14_0)
expr8 = msat_make_plus(menv, xs[18], n_16_0)
expr9 = msat_make_plus(menv, xs[19], n_14_0)
expr10 = msat_make_plus(menv, xs[20], n_8_0)
expr11 = msat_make_plus(menv, xs[21], n_12_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[9], expr0),
msat_make_geq(menv, x_xs[9], expr1),
msat_make_geq(menv, x_xs[9], expr2),
msat_make_geq(menv, x_xs[9], expr3),
msat_make_geq(menv, x_xs[9], expr4),
msat_make_geq(menv, x_xs[9], expr5),
msat_make_geq(menv, x_xs[9], expr6),
msat_make_geq(menv, x_xs[9], expr7),
msat_make_geq(menv, x_xs[9], expr8),
msat_make_geq(menv, x_xs[9], expr9),
msat_make_geq(menv, x_xs[9], expr10),
msat_make_geq(menv, x_xs[9], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[9], expr0),
msat_make_equal(menv, x_xs[9], expr1),
msat_make_equal(menv, x_xs[9], expr2),
msat_make_equal(menv, x_xs[9], expr3),
msat_make_equal(menv, x_xs[9], expr4),
msat_make_equal(menv, x_xs[9], expr5),
msat_make_equal(menv, x_xs[9], expr6),
msat_make_equal(menv, x_xs[9], expr7),
msat_make_equal(menv, x_xs[9], expr8),
msat_make_equal(menv, x_xs[9], expr9),
msat_make_equal(menv, x_xs[9], expr10),
msat_make_equal(menv, x_xs[9], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_15_0)
expr1 = msat_make_plus(menv, xs[2], n_14_0)
expr2 = msat_make_plus(menv, xs[3], n_4_0)
expr3 = msat_make_plus(menv, xs[4], n_20_0)
expr4 = msat_make_plus(menv, xs[5], n_3_0)
expr5 = msat_make_plus(menv, xs[8], n_18_0)
expr6 = msat_make_plus(menv, xs[9], n_5_0)
expr7 = msat_make_plus(menv, xs[10], n_20_0)
expr8 = msat_make_plus(menv, xs[11], n_10_0)
expr9 = msat_make_plus(menv, xs[12], n_1_0)
expr10 = msat_make_plus(menv, xs[14], n_15_0)
expr11 = msat_make_plus(menv, xs[15], n_20_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[10], expr0),
msat_make_geq(menv, x_xs[10], expr1),
msat_make_geq(menv, x_xs[10], expr2),
msat_make_geq(menv, x_xs[10], expr3),
msat_make_geq(menv, x_xs[10], expr4),
msat_make_geq(menv, x_xs[10], expr5),
msat_make_geq(menv, x_xs[10], expr6),
msat_make_geq(menv, x_xs[10], expr7),
msat_make_geq(menv, x_xs[10], expr8),
msat_make_geq(menv, x_xs[10], expr9),
msat_make_geq(menv, x_xs[10], expr10),
msat_make_geq(menv, x_xs[10], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[10], expr0),
msat_make_equal(menv, x_xs[10], expr1),
msat_make_equal(menv, x_xs[10], expr2),
msat_make_equal(menv, x_xs[10], expr3),
msat_make_equal(menv, x_xs[10], expr4),
msat_make_equal(menv, x_xs[10], expr5),
msat_make_equal(menv, x_xs[10], expr6),
msat_make_equal(menv, x_xs[10], expr7),
msat_make_equal(menv, x_xs[10], expr8),
msat_make_equal(menv, x_xs[10], expr9),
msat_make_equal(menv, x_xs[10], expr10),
msat_make_equal(menv, x_xs[10], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_11_0)
expr1 = msat_make_plus(menv, xs[4], n_3_0)
expr2 = msat_make_plus(menv, xs[5], n_16_0)
expr3 = msat_make_plus(menv, xs[7], n_10_0)
expr4 = msat_make_plus(menv, xs[9], n_20_0)
expr5 = msat_make_plus(menv, xs[10], n_13_0)
expr6 = msat_make_plus(menv, xs[11], n_6_0)
expr7 = msat_make_plus(menv, xs[13], n_10_0)
expr8 = msat_make_plus(menv, xs[15], n_8_0)
expr9 = msat_make_plus(menv, xs[19], n_20_0)
expr10 = msat_make_plus(menv, xs[21], n_19_0)
expr11 = msat_make_plus(menv, xs[22], n_14_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[11], expr0),
msat_make_geq(menv, x_xs[11], expr1),
msat_make_geq(menv, x_xs[11], expr2),
msat_make_geq(menv, x_xs[11], expr3),
msat_make_geq(menv, x_xs[11], expr4),
msat_make_geq(menv, x_xs[11], expr5),
msat_make_geq(menv, x_xs[11], expr6),
msat_make_geq(menv, x_xs[11], expr7),
msat_make_geq(menv, x_xs[11], expr8),
msat_make_geq(menv, x_xs[11], expr9),
msat_make_geq(menv, x_xs[11], expr10),
msat_make_geq(menv, x_xs[11], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[11], expr0),
msat_make_equal(menv, x_xs[11], expr1),
msat_make_equal(menv, x_xs[11], expr2),
msat_make_equal(menv, x_xs[11], expr3),
msat_make_equal(menv, x_xs[11], expr4),
msat_make_equal(menv, x_xs[11], expr5),
msat_make_equal(menv, x_xs[11], expr6),
msat_make_equal(menv, x_xs[11], expr7),
msat_make_equal(menv, x_xs[11], expr8),
msat_make_equal(menv, x_xs[11], expr9),
msat_make_equal(menv, x_xs[11], expr10),
msat_make_equal(menv, x_xs[11], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_16_0)
expr1 = msat_make_plus(menv, xs[4], n_18_0)
expr2 = msat_make_plus(menv, xs[6], n_13_0)
expr3 = msat_make_plus(menv, xs[7], n_4_0)
expr4 = msat_make_plus(menv, xs[8], n_10_0)
expr5 = msat_make_plus(menv, xs[9], n_8_0)
expr6 = msat_make_plus(menv, xs[12], n_4_0)
expr7 = msat_make_plus(menv, xs[15], n_9_0)
expr8 = msat_make_plus(menv, xs[19], n_1_0)
expr9 = msat_make_plus(menv, xs[20], n_12_0)
expr10 = msat_make_plus(menv, xs[22], n_4_0)
expr11 = msat_make_plus(menv, xs[23], n_19_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[12], expr0),
msat_make_geq(menv, x_xs[12], expr1),
msat_make_geq(menv, x_xs[12], expr2),
msat_make_geq(menv, x_xs[12], expr3),
msat_make_geq(menv, x_xs[12], expr4),
msat_make_geq(menv, x_xs[12], expr5),
msat_make_geq(menv, x_xs[12], expr6),
msat_make_geq(menv, x_xs[12], expr7),
msat_make_geq(menv, x_xs[12], expr8),
msat_make_geq(menv, x_xs[12], expr9),
msat_make_geq(menv, x_xs[12], expr10),
msat_make_geq(menv, x_xs[12], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[12], expr0),
msat_make_equal(menv, x_xs[12], expr1),
msat_make_equal(menv, x_xs[12], expr2),
msat_make_equal(menv, x_xs[12], expr3),
msat_make_equal(menv, x_xs[12], expr4),
msat_make_equal(menv, x_xs[12], expr5),
msat_make_equal(menv, x_xs[12], expr6),
msat_make_equal(menv, x_xs[12], expr7),
msat_make_equal(menv, x_xs[12], expr8),
msat_make_equal(menv, x_xs[12], expr9),
msat_make_equal(menv, x_xs[12], expr10),
msat_make_equal(menv, x_xs[12], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_6_0)
expr1 = msat_make_plus(menv, xs[5], n_16_0)
expr2 = msat_make_plus(menv, xs[6], n_9_0)
expr3 = msat_make_plus(menv, xs[7], n_13_0)
expr4 = msat_make_plus(menv, xs[8], n_14_0)
expr5 = msat_make_plus(menv, xs[9], n_13_0)
expr6 = msat_make_plus(menv, xs[11], n_7_0)
expr7 = msat_make_plus(menv, xs[13], n_9_0)
expr8 = msat_make_plus(menv, xs[15], n_1_0)
expr9 = msat_make_plus(menv, xs[19], n_19_0)
expr10 = msat_make_plus(menv, xs[21], n_7_0)
expr11 = msat_make_plus(menv, xs[23], n_14_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[13], expr0),
msat_make_geq(menv, x_xs[13], expr1),
msat_make_geq(menv, x_xs[13], expr2),
msat_make_geq(menv, x_xs[13], expr3),
msat_make_geq(menv, x_xs[13], expr4),
msat_make_geq(menv, x_xs[13], expr5),
msat_make_geq(menv, x_xs[13], expr6),
msat_make_geq(menv, x_xs[13], expr7),
msat_make_geq(menv, x_xs[13], expr8),
msat_make_geq(menv, x_xs[13], expr9),
msat_make_geq(menv, x_xs[13], expr10),
msat_make_geq(menv, x_xs[13], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[13], expr0),
msat_make_equal(menv, x_xs[13], expr1),
msat_make_equal(menv, x_xs[13], expr2),
msat_make_equal(menv, x_xs[13], expr3),
msat_make_equal(menv, x_xs[13], expr4),
msat_make_equal(menv, x_xs[13], expr5),
msat_make_equal(menv, x_xs[13], expr6),
msat_make_equal(menv, x_xs[13], expr7),
msat_make_equal(menv, x_xs[13], expr8),
msat_make_equal(menv, x_xs[13], expr9),
msat_make_equal(menv, x_xs[13], expr10),
msat_make_equal(menv, x_xs[13], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_19_0)
expr1 = msat_make_plus(menv, xs[6], n_4_0)
expr2 = msat_make_plus(menv, xs[7], n_8_0)
expr3 = msat_make_plus(menv, xs[9], n_11_0)
expr4 = msat_make_plus(menv, xs[10], n_14_0)
expr5 = msat_make_plus(menv, xs[11], n_20_0)
expr6 = msat_make_plus(menv, xs[12], n_16_0)
expr7 = msat_make_plus(menv, xs[15], n_15_0)
expr8 = msat_make_plus(menv, xs[17], n_15_0)
expr9 = msat_make_plus(menv, xs[21], n_10_0)
expr10 = msat_make_plus(menv, xs[22], n_7_0)
expr11 = msat_make_plus(menv, xs[23], n_17_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[14], expr0),
msat_make_geq(menv, x_xs[14], expr1),
msat_make_geq(menv, x_xs[14], expr2),
msat_make_geq(menv, x_xs[14], expr3),
msat_make_geq(menv, x_xs[14], expr4),
msat_make_geq(menv, x_xs[14], expr5),
msat_make_geq(menv, x_xs[14], expr6),
msat_make_geq(menv, x_xs[14], expr7),
msat_make_geq(menv, x_xs[14], expr8),
msat_make_geq(menv, x_xs[14], expr9),
msat_make_geq(menv, x_xs[14], expr10),
msat_make_geq(menv, x_xs[14], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[14], expr0),
msat_make_equal(menv, x_xs[14], expr1),
msat_make_equal(menv, x_xs[14], expr2),
msat_make_equal(menv, x_xs[14], expr3),
msat_make_equal(menv, x_xs[14], expr4),
msat_make_equal(menv, x_xs[14], expr5),
msat_make_equal(menv, x_xs[14], expr6),
msat_make_equal(menv, x_xs[14], expr7),
msat_make_equal(menv, x_xs[14], expr8),
msat_make_equal(menv, x_xs[14], expr9),
msat_make_equal(menv, x_xs[14], expr10),
msat_make_equal(menv, x_xs[14], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_8_0)
expr1 = msat_make_plus(menv, xs[1], n_11_0)
expr2 = msat_make_plus(menv, xs[3], n_7_0)
expr3 = msat_make_plus(menv, xs[4], n_11_0)
expr4 = msat_make_plus(menv, xs[5], n_6_0)
expr5 = msat_make_plus(menv, xs[8], n_15_0)
expr6 = msat_make_plus(menv, xs[9], n_5_0)
expr7 = msat_make_plus(menv, xs[11], n_9_0)
expr8 = msat_make_plus(menv, xs[13], n_17_0)
expr9 = msat_make_plus(menv, xs[14], n_18_0)
expr10 = msat_make_plus(menv, xs[15], n_10_0)
expr11 = msat_make_plus(menv, xs[19], n_3_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[15], expr0),
msat_make_geq(menv, x_xs[15], expr1),
msat_make_geq(menv, x_xs[15], expr2),
msat_make_geq(menv, x_xs[15], expr3),
msat_make_geq(menv, x_xs[15], expr4),
msat_make_geq(menv, x_xs[15], expr5),
msat_make_geq(menv, x_xs[15], expr6),
msat_make_geq(menv, x_xs[15], expr7),
msat_make_geq(menv, x_xs[15], expr8),
msat_make_geq(menv, x_xs[15], expr9),
msat_make_geq(menv, x_xs[15], expr10),
msat_make_geq(menv, x_xs[15], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[15], expr0),
msat_make_equal(menv, x_xs[15], expr1),
msat_make_equal(menv, x_xs[15], expr2),
msat_make_equal(menv, x_xs[15], expr3),
msat_make_equal(menv, x_xs[15], expr4),
msat_make_equal(menv, x_xs[15], expr5),
msat_make_equal(menv, x_xs[15], expr6),
msat_make_equal(menv, x_xs[15], expr7),
msat_make_equal(menv, x_xs[15], expr8),
msat_make_equal(menv, x_xs[15], expr9),
msat_make_equal(menv, x_xs[15], expr10),
msat_make_equal(menv, x_xs[15], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_11_0)
expr1 = msat_make_plus(menv, xs[5], n_19_0)
expr2 = msat_make_plus(menv, xs[8], n_2_0)
expr3 = msat_make_plus(menv, xs[9], n_6_0)
expr4 = msat_make_plus(menv, xs[10], n_6_0)
expr5 = msat_make_plus(menv, xs[13], n_18_0)
expr6 = msat_make_plus(menv, xs[14], n_17_0)
expr7 = msat_make_plus(menv, xs[15], n_2_0)
expr8 = msat_make_plus(menv, xs[17], n_11_0)
expr9 = msat_make_plus(menv, xs[20], n_18_0)
expr10 = msat_make_plus(menv, xs[21], n_14_0)
expr11 = msat_make_plus(menv, xs[23], n_2_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[16], expr0),
msat_make_geq(menv, x_xs[16], expr1),
msat_make_geq(menv, x_xs[16], expr2),
msat_make_geq(menv, x_xs[16], expr3),
msat_make_geq(menv, x_xs[16], expr4),
msat_make_geq(menv, x_xs[16], expr5),
msat_make_geq(menv, x_xs[16], expr6),
msat_make_geq(menv, x_xs[16], expr7),
msat_make_geq(menv, x_xs[16], expr8),
msat_make_geq(menv, x_xs[16], expr9),
msat_make_geq(menv, x_xs[16], expr10),
msat_make_geq(menv, x_xs[16], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[16], expr0),
msat_make_equal(menv, x_xs[16], expr1),
msat_make_equal(menv, x_xs[16], expr2),
msat_make_equal(menv, x_xs[16], expr3),
msat_make_equal(menv, x_xs[16], expr4),
msat_make_equal(menv, x_xs[16], expr5),
msat_make_equal(menv, x_xs[16], expr6),
msat_make_equal(menv, x_xs[16], expr7),
msat_make_equal(menv, x_xs[16], expr8),
msat_make_equal(menv, x_xs[16], expr9),
msat_make_equal(menv, x_xs[16], expr10),
msat_make_equal(menv, x_xs[16], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_13_0)
expr1 = msat_make_plus(menv, xs[3], n_15_0)
expr2 = msat_make_plus(menv, xs[4], n_10_0)
expr3 = msat_make_plus(menv, xs[6], n_8_0)
expr4 = msat_make_plus(menv, xs[9], n_2_0)
expr5 = msat_make_plus(menv, xs[11], n_19_0)
expr6 = msat_make_plus(menv, xs[12], n_5_0)
expr7 = msat_make_plus(menv, xs[13], n_7_0)
expr8 = msat_make_plus(menv, xs[14], n_8_0)
expr9 = msat_make_plus(menv, xs[18], n_9_0)
expr10 = msat_make_plus(menv, xs[21], n_7_0)
expr11 = msat_make_plus(menv, xs[22], n_9_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[17], expr0),
msat_make_geq(menv, x_xs[17], expr1),
msat_make_geq(menv, x_xs[17], expr2),
msat_make_geq(menv, x_xs[17], expr3),
msat_make_geq(menv, x_xs[17], expr4),
msat_make_geq(menv, x_xs[17], expr5),
msat_make_geq(menv, x_xs[17], expr6),
msat_make_geq(menv, x_xs[17], expr7),
msat_make_geq(menv, x_xs[17], expr8),
msat_make_geq(menv, x_xs[17], expr9),
msat_make_geq(menv, x_xs[17], expr10),
msat_make_geq(menv, x_xs[17], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[17], expr0),
msat_make_equal(menv, x_xs[17], expr1),
msat_make_equal(menv, x_xs[17], expr2),
msat_make_equal(menv, x_xs[17], expr3),
msat_make_equal(menv, x_xs[17], expr4),
msat_make_equal(menv, x_xs[17], expr5),
msat_make_equal(menv, x_xs[17], expr6),
msat_make_equal(menv, x_xs[17], expr7),
msat_make_equal(menv, x_xs[17], expr8),
msat_make_equal(menv, x_xs[17], expr9),
msat_make_equal(menv, x_xs[17], expr10),
msat_make_equal(menv, x_xs[17], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_6_0)
expr1 = msat_make_plus(menv, xs[3], n_10_0)
expr2 = msat_make_plus(menv, xs[4], n_1_0)
expr3 = msat_make_plus(menv, xs[5], n_14_0)
expr4 = msat_make_plus(menv, xs[7], n_8_0)
expr5 = msat_make_plus(menv, xs[8], n_17_0)
expr6 = msat_make_plus(menv, xs[9], n_2_0)
expr7 = msat_make_plus(menv, xs[15], n_3_0)
expr8 = msat_make_plus(menv, xs[20], n_8_0)
expr9 = msat_make_plus(menv, xs[21], n_10_0)
expr10 = msat_make_plus(menv, xs[22], n_17_0)
expr11 = msat_make_plus(menv, xs[23], n_1_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[18], expr0),
msat_make_geq(menv, x_xs[18], expr1),
msat_make_geq(menv, x_xs[18], expr2),
msat_make_geq(menv, x_xs[18], expr3),
msat_make_geq(menv, x_xs[18], expr4),
msat_make_geq(menv, x_xs[18], expr5),
msat_make_geq(menv, x_xs[18], expr6),
msat_make_geq(menv, x_xs[18], expr7),
msat_make_geq(menv, x_xs[18], expr8),
msat_make_geq(menv, x_xs[18], expr9),
msat_make_geq(menv, x_xs[18], expr10),
msat_make_geq(menv, x_xs[18], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[18], expr0),
msat_make_equal(menv, x_xs[18], expr1),
msat_make_equal(menv, x_xs[18], expr2),
msat_make_equal(menv, x_xs[18], expr3),
msat_make_equal(menv, x_xs[18], expr4),
msat_make_equal(menv, x_xs[18], expr5),
msat_make_equal(menv, x_xs[18], expr6),
msat_make_equal(menv, x_xs[18], expr7),
msat_make_equal(menv, x_xs[18], expr8),
msat_make_equal(menv, x_xs[18], expr9),
msat_make_equal(menv, x_xs[18], expr10),
msat_make_equal(menv, x_xs[18], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_15_0)
expr1 = msat_make_plus(menv, xs[3], n_7_0)
expr2 = msat_make_plus(menv, xs[6], n_10_0)
expr3 = msat_make_plus(menv, xs[8], n_7_0)
expr4 = msat_make_plus(menv, xs[10], n_17_0)
expr5 = msat_make_plus(menv, xs[11], n_3_0)
expr6 = msat_make_plus(menv, xs[12], n_20_0)
expr7 = msat_make_plus(menv, xs[15], n_8_0)
expr8 = msat_make_plus(menv, xs[17], n_7_0)
expr9 = msat_make_plus(menv, xs[18], n_4_0)
expr10 = msat_make_plus(menv, xs[19], n_20_0)
expr11 = msat_make_plus(menv, xs[20], n_15_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[19], expr0),
msat_make_geq(menv, x_xs[19], expr1),
msat_make_geq(menv, x_xs[19], expr2),
msat_make_geq(menv, x_xs[19], expr3),
msat_make_geq(menv, x_xs[19], expr4),
msat_make_geq(menv, x_xs[19], expr5),
msat_make_geq(menv, x_xs[19], expr6),
msat_make_geq(menv, x_xs[19], expr7),
msat_make_geq(menv, x_xs[19], expr8),
msat_make_geq(menv, x_xs[19], expr9),
msat_make_geq(menv, x_xs[19], expr10),
msat_make_geq(menv, x_xs[19], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[19], expr0),
msat_make_equal(menv, x_xs[19], expr1),
msat_make_equal(menv, x_xs[19], expr2),
msat_make_equal(menv, x_xs[19], expr3),
msat_make_equal(menv, x_xs[19], expr4),
msat_make_equal(menv, x_xs[19], expr5),
msat_make_equal(menv, x_xs[19], expr6),
msat_make_equal(menv, x_xs[19], expr7),
msat_make_equal(menv, x_xs[19], expr8),
msat_make_equal(menv, x_xs[19], expr9),
msat_make_equal(menv, x_xs[19], expr10),
msat_make_equal(menv, x_xs[19], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[5], n_17_0)
expr1 = msat_make_plus(menv, xs[6], n_6_0)
expr2 = msat_make_plus(menv, xs[7], n_5_0)
expr3 = msat_make_plus(menv, xs[9], n_12_0)
expr4 = msat_make_plus(menv, xs[10], n_13_0)
expr5 = msat_make_plus(menv, xs[12], n_3_0)
expr6 = msat_make_plus(menv, xs[15], n_14_0)
expr7 = msat_make_plus(menv, xs[16], n_17_0)
expr8 = msat_make_plus(menv, xs[17], n_10_0)
expr9 = msat_make_plus(menv, xs[19], n_3_0)
expr10 = msat_make_plus(menv, xs[22], n_18_0)
expr11 = msat_make_plus(menv, xs[23], n_1_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[20], expr0),
msat_make_geq(menv, x_xs[20], expr1),
msat_make_geq(menv, x_xs[20], expr2),
msat_make_geq(menv, x_xs[20], expr3),
msat_make_geq(menv, x_xs[20], expr4),
msat_make_geq(menv, x_xs[20], expr5),
msat_make_geq(menv, x_xs[20], expr6),
msat_make_geq(menv, x_xs[20], expr7),
msat_make_geq(menv, x_xs[20], expr8),
msat_make_geq(menv, x_xs[20], expr9),
msat_make_geq(menv, x_xs[20], expr10),
msat_make_geq(menv, x_xs[20], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[20], expr0),
msat_make_equal(menv, x_xs[20], expr1),
msat_make_equal(menv, x_xs[20], expr2),
msat_make_equal(menv, x_xs[20], expr3),
msat_make_equal(menv, x_xs[20], expr4),
msat_make_equal(menv, x_xs[20], expr5),
msat_make_equal(menv, x_xs[20], expr6),
msat_make_equal(menv, x_xs[20], expr7),
msat_make_equal(menv, x_xs[20], expr8),
msat_make_equal(menv, x_xs[20], expr9),
msat_make_equal(menv, x_xs[20], expr10),
msat_make_equal(menv, x_xs[20], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_15_0)
expr1 = msat_make_plus(menv, xs[4], n_17_0)
expr2 = msat_make_plus(menv, xs[5], n_19_0)
expr3 = msat_make_plus(menv, xs[6], n_2_0)
expr4 = msat_make_plus(menv, xs[8], n_19_0)
expr5 = msat_make_plus(menv, xs[9], n_2_0)
expr6 = msat_make_plus(menv, xs[15], n_10_0)
expr7 = msat_make_plus(menv, xs[16], n_17_0)
expr8 = msat_make_plus(menv, xs[17], n_9_0)
expr9 = msat_make_plus(menv, xs[18], n_9_0)
expr10 = msat_make_plus(menv, xs[20], n_9_0)
expr11 = msat_make_plus(menv, xs[23], n_12_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[21], expr0),
msat_make_geq(menv, x_xs[21], expr1),
msat_make_geq(menv, x_xs[21], expr2),
msat_make_geq(menv, x_xs[21], expr3),
msat_make_geq(menv, x_xs[21], expr4),
msat_make_geq(menv, x_xs[21], expr5),
msat_make_geq(menv, x_xs[21], expr6),
msat_make_geq(menv, x_xs[21], expr7),
msat_make_geq(menv, x_xs[21], expr8),
msat_make_geq(menv, x_xs[21], expr9),
msat_make_geq(menv, x_xs[21], expr10),
msat_make_geq(menv, x_xs[21], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[21], expr0),
msat_make_equal(menv, x_xs[21], expr1),
msat_make_equal(menv, x_xs[21], expr2),
msat_make_equal(menv, x_xs[21], expr3),
msat_make_equal(menv, x_xs[21], expr4),
msat_make_equal(menv, x_xs[21], expr5),
msat_make_equal(menv, x_xs[21], expr6),
msat_make_equal(menv, x_xs[21], expr7),
msat_make_equal(menv, x_xs[21], expr8),
msat_make_equal(menv, x_xs[21], expr9),
msat_make_equal(menv, x_xs[21], expr10),
msat_make_equal(menv, x_xs[21], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_10_0)
expr1 = msat_make_plus(menv, xs[2], n_19_0)
expr2 = msat_make_plus(menv, xs[5], n_12_0)
expr3 = msat_make_plus(menv, xs[6], n_14_0)
expr4 = msat_make_plus(menv, xs[7], n_20_0)
expr5 = msat_make_plus(menv, xs[8], n_3_0)
expr6 = msat_make_plus(menv, xs[9], n_1_0)
expr7 = msat_make_plus(menv, xs[10], n_8_0)
expr8 = msat_make_plus(menv, xs[11], n_9_0)
expr9 = msat_make_plus(menv, xs[13], n_10_0)
expr10 = msat_make_plus(menv, xs[16], n_16_0)
expr11 = msat_make_plus(menv, xs[21], n_9_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[22], expr0),
msat_make_geq(menv, x_xs[22], expr1),
msat_make_geq(menv, x_xs[22], expr2),
msat_make_geq(menv, x_xs[22], expr3),
msat_make_geq(menv, x_xs[22], expr4),
msat_make_geq(menv, x_xs[22], expr5),
msat_make_geq(menv, x_xs[22], expr6),
msat_make_geq(menv, x_xs[22], expr7),
msat_make_geq(menv, x_xs[22], expr8),
msat_make_geq(menv, x_xs[22], expr9),
msat_make_geq(menv, x_xs[22], expr10),
msat_make_geq(menv, x_xs[22], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[22], expr0),
msat_make_equal(menv, x_xs[22], expr1),
msat_make_equal(menv, x_xs[22], expr2),
msat_make_equal(menv, x_xs[22], expr3),
msat_make_equal(menv, x_xs[22], expr4),
msat_make_equal(menv, x_xs[22], expr5),
msat_make_equal(menv, x_xs[22], expr6),
msat_make_equal(menv, x_xs[22], expr7),
msat_make_equal(menv, x_xs[22], expr8),
msat_make_equal(menv, x_xs[22], expr9),
msat_make_equal(menv, x_xs[22], expr10),
msat_make_equal(menv, x_xs[22], expr11),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_5_0)
expr1 = msat_make_plus(menv, xs[1], n_10_0)
expr2 = msat_make_plus(menv, xs[3], n_11_0)
expr3 = msat_make_plus(menv, xs[4], n_19_0)
expr4 = msat_make_plus(menv, xs[9], n_2_0)
expr5 = msat_make_plus(menv, xs[10], n_13_0)
expr6 = msat_make_plus(menv, xs[11], n_18_0)
expr7 = msat_make_plus(menv, xs[15], n_14_0)
expr8 = msat_make_plus(menv, xs[16], n_3_0)
expr9 = msat_make_plus(menv, xs[19], n_9_0)
expr10 = msat_make_plus(menv, xs[20], n_1_0)
expr11 = msat_make_plus(menv, xs[22], n_17_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[23], expr0),
msat_make_geq(menv, x_xs[23], expr1),
msat_make_geq(menv, x_xs[23], expr2),
msat_make_geq(menv, x_xs[23], expr3),
msat_make_geq(menv, x_xs[23], expr4),
msat_make_geq(menv, x_xs[23], expr5),
msat_make_geq(menv, x_xs[23], expr6),
msat_make_geq(menv, x_xs[23], expr7),
msat_make_geq(menv, x_xs[23], expr8),
msat_make_geq(menv, x_xs[23], expr9),
msat_make_geq(menv, x_xs[23], expr10),
msat_make_geq(menv, x_xs[23], expr11),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[23], expr0),
msat_make_equal(menv, x_xs[23], expr1),
msat_make_equal(menv, x_xs[23], expr2),
msat_make_equal(menv, x_xs[23], expr3),
msat_make_equal(menv, x_xs[23], expr4),
msat_make_equal(menv, x_xs[23], expr5),
msat_make_equal(menv, x_xs[23], expr6),
msat_make_equal(menv, x_xs[23], expr7),
msat_make_equal(menv, x_xs[23], expr8),
msat_make_equal(menv, x_xs[23], expr9),
msat_make_equal(menv, x_xs[23], expr10),
msat_make_equal(menv, x_xs[23], expr11),))
trans = msat_make_and(menv, trans, _t)
# ltl property: (X (F (G (F (x_3 - x_20 >= 1)))))
ltl = enc.make_X(enc.make_F(enc.make_G(enc.make_F(msat_make_geq(menv, msat_make_minus(menv, xs[3], xs[20]), msat_make_number(menv, "1"))))))
return TermMap(curr2next), init, trans, ltl
| [
"[email protected]"
] | |
45297b843b717fd571b9a542906a15e1a9b43bb3 | 8b3ca44ee3d990233e74655b7131d616094f70c2 | /experiments/sparsity/methylation_gm/gaussian_truncatednormal_hierarchical.py | 427b535bd7e7c6d03bb77c738acc2c5ee7ee563c | [] | no_license | zshwuhan/BMF_Priors | 8b8c54271285a72d2085a56a9475c0756f375e67 | 6a600da1c41f1ccde2f2ba99298b40e68fb9910a | refs/heads/master | 2021-05-13T19:10:07.203215 | 2017-12-01T13:30:21 | 2017-12-01T13:30:21 | 116,883,181 | 1 | 0 | null | 2018-01-09T23:36:13 | 2018-01-09T23:36:13 | null | UTF-8 | Python | false | false | 1,480 | py | '''
Measure sparsity experiment on the methylation GM dataset, with
the Gaussian + Truncated Normal + hierarchical model.
'''
project_location = "/Users/thomasbrouwer/Documents/Projects/libraries/"
import sys
sys.path.append(project_location)
from BMF_Priors.code.models.bmf_gaussian_truncatednormal_hierarchical import BMF_Gaussian_TruncatedNormal_Hierarchical
from BMF_Priors.data.methylation.load_data import load_gene_body_methylation_integer
from BMF_Priors.experiments.sparsity.sparsity_experiment import sparsity_experiment
import matplotlib.pyplot as plt
''' Run the experiment. '''
R, M = load_gene_body_methylation_integer()
model_class = BMF_Gaussian_TruncatedNormal_Hierarchical
n_repeats = 10
stratify_rows = False
fractions_unknown = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.99]
settings = {
'R': R,
'M': M,
'K': 5,
'hyperparameters': { 'alpha':1., 'beta':1., 'mu_mu':0., 'tau_mu':0.1, 'a':1., 'b':1. },
'init': 'random',
'iterations': 250,
'burn_in': 200,
'thinning': 2,
}
fout = './results/performances_gaussian_truncatednormal_hierarchical.txt'
average_performances, all_performances = sparsity_experiment(
n_repeats=n_repeats, fractions_unknown=fractions_unknown, stratify_rows=stratify_rows,
model_class=model_class, settings=settings, fout=fout)
''' Plot the performance. '''
plt.figure()
plt.title("Sparsity performances")
plt.plot(fractions_unknown, average_performances['MSE'])
plt.ylim(0,10) | [
"[email protected]"
] | |
4e6151e08844e9f992a2200fb36b50f34d975023 | 9bff711e7e28f5c3b59d3faa9c0f851719dff4e6 | /blair/__init__.py | cfe408230df1f0ee7935709afce62772cba2fc14 | [] | no_license | crzeller11/Debiasing-Word-Embeddings | e61c46adc7440927030d8436ea9568710423d03a | 70de33a89be147a0091f6fb75c7e688f2db99564 | refs/heads/master | 2020-03-28T16:28:55.082077 | 2019-03-29T17:57:13 | 2019-03-29T17:57:13 | 148,699,831 | 0 | 1 | null | 2019-03-29T17:57:14 | 2018-09-13T21:23:45 | Python | UTF-8 | Python | false | false | 107 | py | from .evaluate import read_dataset_directory, score_embedding
from .src.embeddings import WrappedEmbedding
| [
"[email protected]"
] | |
250692130016bd8b68bba47f883404dbe047de02 | c7b1d4037804c809687b6bd839c45f7da0ccaac8 | /proplot/config.py | c218b25feb1503dfe66b98466efde282dd829fa1 | [
"MIT"
] | permissive | gepcel/proplot | af03d4302d6a8cbaf88bc2032368d8240c7d19d3 | afeb7da4cd52f83e34bf3e2f8e781efa1dd2b763 | refs/heads/master | 2022-11-16T00:26:52.142575 | 2020-07-10T03:38:36 | 2020-07-10T03:38:36 | 278,497,577 | 0 | 0 | MIT | 2020-07-10T00:09:13 | 2020-07-10T00:09:12 | null | UTF-8 | Python | false | false | 62,496 | py | #!/usr/bin/env python3
"""
Tools for setting up ProPlot and configuring global settings.
See the :ref:`configuration guide <ug_config>` for details.
"""
# NOTE: The matplotlib analogue to this file is actually __init__.py
# but it makes more sense to have all the setup actions in a separate file
# so the namespace of the top-level module is unpolluted.
# NOTE: Why also load colormaps and cycles in this file and not colors.py?
# Because I think it makes sense to have all the code that "runs" (i.e. not
# just definitions) in the same place, and I was having issues with circular
# dependencies and where import order of __init__.py was affecting behavior.
import logging
import os
import re
from collections import namedtuple
import cycler
import matplotlib as mpl
import matplotlib.cbook as cbook
import matplotlib.colors as mcolors
import matplotlib.font_manager as mfonts
import matplotlib.mathtext # noqa
import matplotlib.rcsetup as msetup
import matplotlib.style.core as mstyle
import numpy as np
from . import colors as pcolors
from .internals import ic # noqa: F401
from .internals import _not_none, docstring, rcsetup, timers, warnings
from .utils import to_xyz, units
try:
from IPython import get_ipython
except ImportError:
def get_ipython():
return
__all__ = [
'rc', 'RcConfigurator',
'register_cmaps', 'register_cycles', 'register_colors', 'register_fonts',
'config_inline_backend', 'use_style',
'inline_backend_fmt', 'rc_configurator', # deprecated
]
logger = logging.getLogger('matplotlib.mathtext')
logger.setLevel(logging.ERROR) # suppress warnings!
# Dictionaries used to track custom proplot settings
rc_proplot = rcsetup._rc_proplot_default.copy()
rc_matplotlib = mpl.rcParams # PEP8 4 lyfe
RcParams = mpl.RcParams # the special class
_RcContext = namedtuple('RcContext', ('mode', 'kwargs', 'rc_new', 'rc_old'))
# Misc constants
# TODO: Use explicit validators for specific settings like matplotlib.
REGEX_STRING = re.compile('\\A(\'.*\'|".*")\\Z')
REGEX_POINTS = re.compile(
r'\A(?!colorbar|subplots|pdf|ps).*(width|space|size|pad|len)\Z'
)
ALWAYS_ADD = (
*( # common fancy names or natural names
'charcoal', 'tomato', 'burgundy', 'maroon', 'burgundy', 'lavendar',
'taupe', 'ocre', 'sand', 'stone', 'earth', 'sand brown', 'sienna',
'terracotta', 'moss', 'crimson', 'mauve', 'rose', 'teal', 'forest',
'grass', 'sage', 'pine', 'vermillion', 'russet', 'cerise', 'avocado',
'wine', 'brick', 'umber', 'mahogany', 'puce', 'grape', 'blurple',
'cranberry', 'sand', 'aqua', 'jade', 'coral', 'olive', 'magenta',
'turquoise', 'sea blue', 'royal blue', 'slate blue', 'slate grey',
'baby blue', 'salmon', 'beige', 'peach', 'mustard', 'lime', 'indigo',
'cornflower', 'marine', 'cloudy blue', 'tangerine', 'scarlet', 'navy',
'cool grey', 'warm grey', 'chocolate', 'raspberry', 'denim',
'gunmetal', 'midnight', 'chartreuse', 'ivory', 'khaki', 'plum',
'silver', 'tan', 'wheat', 'buff', 'bisque', 'cerulean',
),
*( # common combos
'red orange', 'yellow orange', 'yellow green',
'blue green', 'blue violet', 'red violet',
),
*( # common names
prefix + color
for color in (
'red', 'orange', 'yellow', 'green', 'blue', 'indigo', 'violet',
'brown', 'grey'
)
for prefix in (
'', 'light ', 'dark ', 'medium ', 'pale ',
)
)
)
ALWAYS_REMOVE = ( # filter these out, let's try to be professional here...
'shit', 'poop', 'poo', 'pee', 'piss', 'puke', 'vomit', 'snot',
'booger', 'bile', 'diarrhea',
)
TRANSLATE_COLORS = ( # prevent registering similar-sounding names
('/', ' '),
("'s", ''),
('forrest', 'forest'), # typo?
('reddish', 'red'), # remove 'ish'
('purplish', 'purple'),
('bluish', 'blue'),
('ish ', ' '),
('grey', 'gray'),
('pinky', 'pink'),
('greeny', 'green'),
('bluey', 'blue'),
('purply', 'purple'),
('purpley', 'purple'),
('yellowy', 'yellow'),
('robin egg', 'robins egg'),
('egg blue', 'egg'),
('bluegray', 'blue gray'),
('grayblue', 'gray blue'),
('lightblue', 'light blue'),
('yellowgreen', 'yellow green'),
('yelloworange', 'yellow orange'),
)
OPEN_COLORS = {} # populated during register_colors
XKCD_COLORS = {} # populated during register_colors
BASE_COLORS = {
**mcolors.BASE_COLORS, # shorthand names like 'r', 'g', etc.
'blue': (0, 0, 1),
'green': (0, 0.5, 0),
'red': (1, 0, 0),
'cyan': (0, 0.75, 0.75),
'magenta': (0.75, 0, 0.75),
'yellow': (0.75, 0.75, 0),
'black': (0, 0, 0),
'white': (1, 1, 1),
}
_config_docstring = """
user : bool, optional
Whether to reload user {name}. Default is ``True``.
default : bool, optional
Whether to reload default proplot {name}. Default is ``False``.
"""
docstring.snippets['register_cmaps.params'] = _config_docstring.format(name='colormaps')
docstring.snippets['register_cycles.params'] = _config_docstring.format(name='cycles')
docstring.snippets['register_colors.params'] = _config_docstring.format(name='colors')
docstring.snippets['rc.params'] = """
local : bool, optional
Whether to reload ``.proplotrc`` settings in this directory and parent
directories. Default is ``True``.
user : bool, optional
Whether to reload ``~/.proplotrc`` user settings. Default is ``True``.
default : bool, optional
Whether to reload default proplot settings. Default is ``True``.
"""
docstring.snippets['register.ext_table'] = """
Valid file extensions are as follows:
================== =====================================================================================================================================================================================================================
Extension Description
================== =====================================================================================================================================================================================================================
``.hex`` List of HEX strings in any format (comma-separated, separate lines, with double quotes... anything goes).
``.xml`` XML files with ``<Point .../>`` tags specifying ``x``, ``r``, ``g``, ``b``, and (optionally) ``o`` parameters, where ``x`` is the coordinate and the rest are the red, blue, green, and opacity channel values.
``.rgb``, ``.txt`` 3-4 column table of red, blue, green, and (optionally) opacity channel values, delimited by commas or spaces. If values larger than 1 are detected, they are assumed to be on the 0-255 scale and are divided by 255.
================== =====================================================================================================================================================================================================================
""" # noqa: E501
def _get_data_paths(subfolder, user=True, default=True, reverse=False):
"""
Return data folder paths in reverse order of precedence.
"""
# When loading colormaps, cycles, and colors, files in the latter
# directories overwrite files in the former directories. When loading
# fonts, the resulting paths need to be *reversed*.
paths = []
if user:
paths.append(os.path.join(os.path.dirname(__file__), subfolder))
if default:
paths.append(os.path.join(os.path.expanduser('~'), '.proplot', subfolder))
if reverse:
paths = paths[::-1]
return paths
def _iter_data_paths(subfolder, **kwargs):
"""
Iterate over all files in the data paths. Also yield an index indicating
whether these are default ProPlot files or user files.
"""
for i, path in enumerate(_get_data_paths(subfolder, **kwargs)):
for dirname, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename[0] == '.': # UNIX-style hidden files
continue
yield i, dirname, filename
class RcConfigurator(object):
"""
Magical abstract class for managing matplotlib's `builtin settings <rc_matplotlib>`_
and ProPlot's :ref:`added settings <rc_proplot>`.
When ProPlot is imported, this class is instantiated as the `rc` object
and the ProPlot default settings and ``.proplotrc`` user overrides
are applied. To modify these settings, use the `rc` object.
See the :ref:`configuration guide <ug_config>` for details.
"""
def __repr__(self):
rcdict = type('rc', (dict,), {})({ # encapsulate params in temporary class
key: value for key, value in rc_proplot.items()
if '.' not in key # show short names
})
string = type(rc_matplotlib).__repr__(rcdict)
return string.strip()[:-2] + ',\n ... <rcParams> ...\n })'
def __str__(self):
rcdict = type('rc', (dict,), {})({
key: value for key, value in rc_proplot.items()
if '.' not in key # show short names
})
string = type(rc_matplotlib).__str__(rcdict)
return string + '\n... <rcParams> ...'
def __iter__(self): # lets us build dict
"""
Iterate over keys and values of matplotlib and proplot settings.
"""
for key in sorted((*rc_proplot, *rc_matplotlib)):
yield key, self[key]
def __contains__(self, key):
"""
Test whether key exists as matplotlib or proplot setting.
"""
return key in rc_proplot or key in rc_matplotlib
@docstring.add_snippets
def __init__(self, local=True, user=True, default=True):
"""
Parameters
----------
%(rc.params)s
"""
self._context = []
self.reset(local=local, user=user, default=default)
def __enter__(self):
"""
Apply settings from the most recent context block.
"""
if not self._context:
raise RuntimeError(
'rc object must be initialized for context block '
'using rc.context().'
)
context = self._context[-1]
kwargs = context.kwargs
rc_new = context.rc_new # used for context-based _get_item
rc_old = context.rc_old # used to re-apply settings without copying whole dict
for key, value in kwargs.items():
kw_proplot, kw_matplotlib = self._get_synced_params(key, value)
for rc_dict, kw_new in zip(
(rc_proplot, rc_matplotlib),
(kw_proplot, kw_matplotlib),
):
for key, value in kw_new.items():
rc_old[key] = rc_dict[key]
rc_new[key] = rc_dict[key] = value
def __exit__(self, *args): # noqa: U100
"""
Restore settings from the most recent context block.
"""
if not self._context:
raise RuntimeError(
'rc object must be initialized for context block '
'using rc.context().'
)
context = self._context[-1]
for key, value in context.rc_old.items():
kw_proplot, kw_matplotlib = self._get_synced_params(key, value)
rc_proplot.update(kw_proplot)
rc_matplotlib.update(kw_matplotlib)
del self._context[-1]
def __delitem__(self, item): # noqa: 100
"""
Raise an error. This enforces pseudo-immutability.
"""
raise RuntimeError('rc settings cannot be deleted.')
def __delattr__(self, item): # noqa: 100
"""
Raise an error. This enforces pseudo-immutability.
"""
raise RuntimeError('rc settings cannot be deleted.')
def __getattr__(self, attr):
"""
Pass the attribute to `~RcConfigurator.__getitem__` and return
the result.
"""
if attr[:1] == '_':
return super().__getattribute__(attr)
else:
return self[attr]
def __getitem__(self, key):
"""
Return a `builtin matplotlib setting <rc_matplotlib>`_
or a ProPlot :ref:`added setting <rc_proplot>`.
"""
key = self._sanitize_key(key)
if key is None: # means key was *removed*, warnings was issued
return None
for kw in (rc_proplot, rc_matplotlib):
try:
return kw[key]
except KeyError:
continue
raise KeyError(f'Invalid setting name {key!r}.')
def __setattr__(self, attr, value):
"""
Pass the attribute and value to `~RcConfigurator.__setitem__`.
"""
if attr[:1] == '_':
super().__setattr__(attr, value)
else:
self.__setitem__(attr, value)
def __setitem__(self, key, value):
"""
Modify a `builtin matplotlib setting <rc_matplotlib>`_ or
a ProPlot :ref:`added setting <rc_proplot>`.
"""
kw_proplot, kw_matplotlib = self._get_synced_params(key, value)
rc_proplot.update(kw_proplot)
rc_matplotlib.update(kw_matplotlib)
def _get_context_mode(self):
"""
Return lowest (most permissive) context mode.
"""
return min((context.mode for context in self._context), default=0)
def _get_item(self, key, mode=None):
"""
As with `~RcConfigurator.__getitem__` but the search is limited
based on the context mode and ``None`` is returned if the key is not
found in the dictionaries.
"""
if mode is None:
mode = self._get_context_mode()
cache = tuple(context.rc_new for context in self._context)
if mode == 0:
rcdicts = (*cache, rc_proplot, rc_matplotlib)
elif mode == 1:
rcdicts = (*cache, rc_proplot) # custom only!
elif mode == 2:
rcdicts = (*cache,)
else:
raise KeyError(f'Invalid caching mode {mode!r}.')
for rcdict in rcdicts:
if not rcdict:
continue
try:
return rcdict[key]
except KeyError:
continue
if mode == 0:
raise KeyError(f'Invalid setting name {key!r}.')
else:
return
def _get_synced_params(self, key, value):
"""
Return dictionaries for updating the `rc_proplot`
and `rc_matplotlib` properties associated with this key.
"""
key = self._sanitize_key(key)
if key is None: # means setting was removed
return {}, {}, {}
keys = (key,) + rcsetup._rc_children.get(key, ()) # settings to change
value = self._sanitize_value(value)
kw_proplot = {} # custom properties that global setting applies to
kw_matplotlib = {} # builtin properties that global setting applies to
# Permit arbitary units for builtin matplotlib params
# See: https://matplotlib.org/users/customizing.html, props matching
# the below strings use the units 'points'.
# TODO: Incorporate into more sophisticated validation system
if any(REGEX_POINTS.match(_) for _ in keys):
try:
self._scale_font(value) # *validate* but do not translate
except KeyError:
value = units(value, 'pt')
# Special key: configure inline backend
if key == 'inlinefmt':
config_inline_backend(value)
# Special key: apply stylesheet
elif key == 'style':
if value is not None:
kw_matplotlib, kw_proplot = _get_style_dicts(value, infer=True)
# Cycler
elif key == 'cycle':
colors = _get_cycle_colors(value)
kw_matplotlib['patch.facecolor'] = 'C0'
kw_matplotlib['axes.prop_cycle'] = cycler.cycler('color', colors)
# Zero linewidth almost always means zero tick length
# TODO: Document this feature
elif key == 'linewidth' and value == 0:
ikw_proplot, ikw_matplotlib = self._get_synced_params('ticklen', 0)
kw_proplot.update(ikw_proplot)
kw_matplotlib.update(ikw_matplotlib)
# Tick length/major-minor tick length ratio
elif key in ('tick.len', 'tick.lenratio'):
if key == 'tick.len':
ticklen = value
ratio = rc_proplot['tick.lenratio']
else:
ticklen = rc_proplot['tick.len']
ratio = value
kw_matplotlib['xtick.minor.size'] = ticklen * ratio
kw_matplotlib['ytick.minor.size'] = ticklen * ratio
# Spine width/major-minor tick width ratio
elif key in ('linewidth', 'tick.ratio'):
if key == 'linewidth':
tickwidth = value
ratio = rc_proplot['tick.ratio']
else:
tickwidth = rc_proplot['linewidth']
ratio = value
kw_matplotlib['xtick.minor.width'] = tickwidth * ratio
kw_matplotlib['ytick.minor.width'] = tickwidth * ratio
# Gridline width
elif key in ('grid.linewidth', 'grid.ratio'):
if key == 'grid.linewidth':
gridwidth = value
ratio = rc_proplot['grid.ratio']
else:
gridwidth = rc_matplotlib['grid.linewidth']
ratio = value
kw_proplot['gridminor.linewidth'] = gridwidth * ratio
# Gridline toggling, complicated because of the clunky way this is
# implemented in matplotlib. There should be a gridminor setting!
elif key in ('grid', 'gridminor'):
b = value
ovalue = rc_matplotlib['axes.grid']
owhich = rc_matplotlib['axes.grid.which']
# Instruction is to turn off gridlines
if not value:
# Gridlines are already off, or they are on for the particular
# ones that we want to turn off. Instruct to turn both off.
if (
not ovalue
or key == 'grid' and owhich == 'major'
or key == 'gridminor' and owhich == 'minor'
):
which = 'both' # disable both sides
# Gridlines are currently on for major and minor ticks, so we
# instruct to turn on gridlines for the one we *don't* want off
elif owhich == 'both': # and ovalue is True, as already tested
# if gridminor=False, enable major, and vice versa
b = True
which = 'major' if key == 'gridminor' else 'minor'
# Gridlines are on for the ones that we *didn't* instruct to
# turn off, and off for the ones we do want to turn off. This
# just re-asserts the ones that are already on.
else:
b = True
which = owhich
# Instruction is to turn on gridlines
else:
# Gridlines are already both on, or they are off only for the
# ones that we want to turn on. Turn on gridlines for both.
if (
owhich == 'both'
or key == 'grid' and owhich == 'minor'
or key == 'gridminor' and owhich == 'major'
):
which = 'both'
# Gridlines are off for both, or off for the ones that we
# don't want to turn on. We can just turn on these ones.
else:
which = owhich
# Finally apply settings
kw_matplotlib['axes.grid'] = b
kw_matplotlib['axes.grid.which'] = which
# Update original setting and linked settings
for key in keys:
if key in rc_proplot:
kw_proplot[key] = value
elif key in rc_matplotlib:
kw_matplotlib[key] = value
else:
raise KeyError(f'Invalid rc key {key!r}.')
return kw_proplot, kw_matplotlib
@staticmethod
def _get_local_paths():
"""
Return locations of local proplotrc files in this directory
and in parent directories.
"""
idir = os.getcwd()
paths = []
while idir: # not empty string
ipath = os.path.join(idir, '.proplotrc')
if os.path.exists(ipath):
paths.append(ipath)
ndir = os.path.dirname(idir)
if ndir == idir: # root
break
idir = ndir
return paths[::-1] # sort from decreasing to increasing importantce
@staticmethod
def _get_user_path():
"""
Return location of user proplotrc file.
"""
return os.path.join(os.path.expanduser('~'), '.proplotrc')
@staticmethod
def _sanitize_key(key):
"""
Ensure string and convert keys with omitted dots.
"""
if not isinstance(key, str):
raise KeyError(f'Invalid key {key!r}. Must be string.')
# Translate from nodots to 'full' version
if '.' not in key:
key = rcsetup._rc_nodots.get(key, key)
# Handle deprecations
if key in rcsetup._rc_removed:
alternative, version = rcsetup._rc_removed[key]
message = f'rc setting {key!r} was removed in version {version}.'
if alternative: # provide an alternative
message = f'{message} {alternative}'
warnings._warn_proplot(warnings)
key = None
if key in rcsetup._rc_renamed:
key_new, version = rcsetup._rc_renamed[key]
warnings._warn_proplot(
f'rc setting {key!r} was renamed to {key_new} in version {version}.'
)
key = key_new
return key.lower()
@staticmethod
def _sanitize_value(value):
"""
Convert numpy ndarray to list.
"""
if isinstance(value, np.ndarray):
if value.size <= 1:
value = value.item()
else:
value = value.tolist()
return value
@staticmethod
def _scale_font(size):
"""
Translate font size to numeric.
"""
# NOTE: Critical this remains KeyError so except clause
# in _get_synced_params works.
if isinstance(size, str):
try:
scale = mfonts.font_scalings[size]
except KeyError:
raise KeyError(
f'Invalid font scaling {size!r}. Options are: '
+ ', '.join(
f'{key!r} ({value})'
for key, value in mfonts.font_scalings.items()
) + '.'
)
else:
size = rc_matplotlib['font.size'] * scale
return size
def category(self, cat, *, trimcat=True, context=False):
"""
Return a dictionary of settings beginning with the substring
``cat + '.'``.
Parameters
----------
cat : str, optional
The `rc` setting category.
trimcat : bool, optional
Whether to trim ``cat`` from the key names in the output
dictionary. Default is ``True``.
context : bool, optional
If ``True``, then each category setting that is not found in the
context mode dictionaries is omitted from the output dictionary.
See `~RcConfigurator.context`.
"""
if cat not in rcsetup._rc_categories:
raise ValueError(
f'Invalid rc category {cat!r}. Valid categories are '
', '.join(map(repr, rcsetup._rc_categories)) + '.'
)
kw = {}
mode = 0 if not context else None
for rcdict in (rc_proplot, rc_matplotlib):
for key in rcdict:
if not re.match(fr'\A{cat}\.[^.]+\Z', key):
continue
value = self._get_item(key, mode)
if value is None:
continue
if trimcat:
key = re.sub(fr'\A{cat}\.', '', key)
kw[key] = value
return kw
def context(self, *args, mode=0, file=None, **kwargs):
"""
Temporarily modify the rc settings in a "with as" block.
Parameters
----------
*args
Dictionaries of `rc` names and values.
file : str, optional
Filename from which settings should be loaded.
**kwargs
`rc` names and values passed as keyword arguments. If the
name has dots, simply omit them.
Other parameters
----------------
mode : {0, 1, 2}, optional
The context mode. Dictates the behavior of `~RcConfigurator.get`,
`~RcConfigurator.fill`, and `~RcConfigurator.category` within a
"with as" block when called with ``context=True``.
The options are as follows:
0. Matplotlib's `builtin settings <rc_matplotlib>`_ and ProPlot's
:ref:`added settings <rc_proplot>` are all returned,
whether or not `~RcConfigurator.context` has changed them.
1. *Unchanged* `matplotlib settings <rc_matplotlib>`_ return ``None``.
All of ProPlot's :ref:`added settings <rc_proplot>` are returned
whether or not `~RcConfigurator.context` has changed them.
This is used in the `~proplot.axes.Axes.__init__` call to
`~proplot.axes.Axes.format`. When a lookup returns ``None``,
`~proplot.axes.Axes.format` does not apply it.
2. All unchanged settings return ``None``. This is used during
user calls to `~proplot.axes.Axes.format`.
Note
----
This is used by ProPlot internally but may also be useful for power users.
It was invented to prevent successive calls to `~proplot.axes.Axes.format`
from constantly looking up and re-applying unchanged settings. These
gratuitous lookups increased runtime significantly, and resulted in successive
calls to `~proplot.axes.Axes.format` overwriting the previous calls.
Example
-------
The below applies settings to axes in a specific figure using
`~RcConfigurator.context`.
>>> import proplot as plot
>>> with plot.rc.context(linewidth=2, ticklen=5):
>>> fig, ax = plot.subplots()
>>> ax.plot(data)
The below applies settings to a specific axes using `~proplot.axes.Axes.format`,
which uses `~RcConfigurator.context` internally.
>>> import proplot as plot
>>> fig, ax = plot.subplots()
>>> ax.format(linewidth=2, ticklen=5)
"""
# Add input dictionaries
for arg in args:
if not isinstance(arg, dict):
raise ValueError('Non-dictionary argument {arg!r}.')
kwargs.update(arg)
# Add settings from file
# TODO: Incoporate with matplotlib 'stylesheets'
if file is not None:
kw_proplot, kw_matplotlib = self._load_file(file)
kwargs.update(kw_proplot)
kwargs.update(kw_matplotlib)
# Activate context object
if mode not in range(3):
raise ValueError(f'Invalid mode {mode!r}.')
context = _RcContext(mode=mode, kwargs=kwargs, rc_new={}, rc_old={})
self._context.append(context)
return self
def get(self, key, *, context=False):
"""
Return a single setting.
Parameters
----------
key : str
The setting name.
context : bool, optional
If ``True``, then ``None`` is returned if the setting is not found
in the context mode dictionaries. See `~RcConfigurator.context`.
"""
mode = 0 if not context else None
return self._get_item(key, mode)
def fill(self, props, *, context=False):
"""
Return a dictionary filled with settings whose names match the
string values in the input dictionary.
Parameters
----------
props : dict-like
Dictionary whose values are `rc` setting names.
context : bool, optional
If ``True``, then each setting that is not found in the
context mode dictionaries is omitted from the output dictionary.
See `~RcConfigurator.context`.
"""
kw = {}
mode = 0 if not context else None
for key, value in props.items():
item = self._get_item(value, mode)
if item is not None:
kw[key] = item
return kw
def update(self, *args, **kwargs):
"""
Update several settings at once with a dictionary and/or
keyword arguments.
Parameters
----------
*args : str, dict, or (str, dict), optional
A dictionary containing `rc` keys and values. You can also
pass a "category" name as the first argument, in which case all
settings are prepended with ``'category.'``. For example,
``rc.update('axes', labelsize=20, titlesize=20)`` changes the
:rcraw:`axes.labelsize` and :rcraw:`axes.titlesize` properties.
**kwargs, optional
`rc` keys and values passed as keyword arguments. If the
name has dots, simply omit them.
"""
# Parse args
kw = {}
prefix = ''
if len(args) > 2:
raise ValueError(
f'rc.update() accepts 1-2 arguments, got {len(args)}. Usage '
'is rc.update(kw), rc.update(category, kw), '
'rc.update(**kwargs), or rc.update(category, **kwargs).'
)
elif len(args) == 2:
prefix = args[0]
kw = args[1]
elif len(args) == 1:
if isinstance(args[0], str):
prefix = args[0]
else:
kw = args[0]
# Apply settings
if prefix:
prefix = prefix + '.'
kw.update(kwargs)
for key, value in kw.items():
self.__setitem__(prefix + key, value)
@docstring.add_snippets
def reset(self, local=True, user=True, default=True):
"""
Reset the configurator to its initial state.
Parameters
----------
%(rc.params)s
"""
# Always remove context objects
self._context.clear()
# Update from default settings
# NOTE: see _remove_blacklisted_style_params bugfix
if default:
rc_matplotlib.update(_get_style_dicts('original', filter=False))
rc_matplotlib.update(rcsetup._rc_matplotlib_default)
rc_proplot.update(rcsetup._rc_proplot_default)
for key, value in rc_proplot.items():
kw_proplot, kw_matplotlib = self._get_synced_params(key, value)
rc_matplotlib.update(kw_matplotlib)
rc_proplot.update(kw_proplot)
# Update from user home
user_path = None
if user:
user_path = self._get_user_path()
if os.path.isfile(user_path):
self.load_file(user_path)
# Update from local paths
if local:
local_paths = self._get_local_paths()
for path in local_paths:
if path == user_path: # local files always have precedence
continue
self.load_file(path)
def _load_file(self, path):
"""
Return dictionaries of proplot and matplotlib settings loaded from the file.
"""
added = set()
path = os.path.expanduser(path)
kw_proplot = {}
kw_matplotlib = {}
with open(path, 'r') as fh:
for cnt, line in enumerate(fh):
# Parse line and ignore comments
stripped = line.split('#', 1)[0].strip()
if not stripped:
continue
pair = stripped.split(':', 1)
if len(pair) != 2:
warnings._warn_proplot(
f'Illegal line #{cnt + 1} in file {path!r}:\n{line!r}"'
)
continue
# Get key value pair
key, val = pair
key = key.strip()
val = val.strip()
if key in added:
warnings._warn_proplot(
f'Duplicate key {key!r} on line #{cnt + 1} in file {path!r}.'
)
added.add(key)
# *Very primitive* type conversion system for proplot settings.
# Matplotlib does this automatically.
if REGEX_STRING.match(val): # also do this for matplotlib settings
val = val[1:-1] # remove quotes from string
if key in rc_proplot:
if not val:
val = None # older proplot versions supported this
elif val in ('True', 'False', 'None'):
val = eval(val) # rare case where eval is o.k.
else:
try:
val = float(val) if '.' in val else int(val)
except ValueError:
pass
# Add to dictionaries
try:
ikw_proplot, ikw_matplotlib = self._get_synced_params(key, val)
kw_proplot.update(ikw_proplot)
kw_matplotlib.update(ikw_matplotlib)
except KeyError:
warnings._warn_proplot(
f'Invalid key {key!r} on line #{cnt} in file {path!r}.'
)
return kw_proplot, kw_matplotlib
def load_file(self, path):
"""
Load settings from the specified file.
Parameters
----------
path : str
The file path.
"""
kw_proplot, kw_matplotlib = self._load_file(path)
rc_proplot.update(kw_proplot)
rc_matplotlib.update(kw_matplotlib)
@staticmethod
def _save_rst(path):
"""
Used internally to create table for online docs.
"""
string = rcsetup._gen_rst_table()
with open(path, 'w') as fh:
fh.write(string)
@staticmethod
def _save_proplotrc(path, comment=False):
"""
Used internally to create initial proplotrc file and file for online docs.
"""
self = object() # self is unused when 'user' is False
RcConfigurator.save(self, path, user=False, backup=False, comment=comment)
def save(self, path=None, user=True, comment=None, backup=True, description=False):
"""
Save the current settings to a ``.proplotrc`` file. This writes
the default values commented out plus the values that *differ*
from the defaults at the top of the file.
Parameters
----------
path : str, optional
The path name. The default file name is ``.proplotrc`` and the default
directory is the home directory. Use ``path=''`` to save to the current
directory.
user : bool, optional
If ``True`` (the default), the settings you changed since importing
proplot are shown uncommented at the very top of the file.
backup : bool, optional
If the file already exists and this is set to ``True``, it is moved
to a backup file with the suffix ``.bak``.
comment : bool, optional
Whether to comment out the default settings. Default is the
value of `user`.
description : bool, optional
Whether to include descriptions of each setting as comments.
Default is ``False``.
"""
if path is None:
path = '~'
path = os.path.abspath(os.path.expanduser(path))
if os.path.isdir(path):
path = os.path.join(path, '.proplotrc')
if os.path.isfile(path) and backup:
os.rename(path, path + '.bak')
warnings._warn_proplot(
f'Existing proplotrc file {path!r} was moved to {path + ".bak"!r}.'
)
# Generate user-specific table, ignoring non-style related
# settings that may be changed from defaults like 'backend'
rc_user = ()
if user:
# Changed settings
rcdict = {
key: value for key, value in self
if value != rcsetup._get_default_param(key)
}
# Special handling for certain settings
# TODO: For now not sure how to detect if prop cycle changed since
# we cannot load it from _cmap_database in rcsetup.
rcdict.pop('interactive', None) # changed by backend
rcdict.pop('axes.prop_cycle', None)
# Filter and get table
rcdict = _get_filtered_dict(rcdict, warn=False)
rc_user_table = rcsetup._gen_yaml_table(rcdict, comment=False)
rc_user = ('# Settings changed by user', rc_user_table, '') # + blank line
# Generate tables and write
comment = _not_none(comment, user)
rc_proplot_table = rcsetup._gen_yaml_table(
rcsetup._rc_proplot, comment=comment, description=description,
)
rc_matplotlib_table = rcsetup._gen_yaml_table(
rcsetup._rc_matplotlib_default, comment=comment
)
with open(path, 'w') as fh:
fh.write('\n'.join((
'#--------------------------------------------------------------------',
'# Use this file to change the default proplot and matplotlib settings',
'# The syntax is identical to matplotlibrc syntax. For details see:',
'# https://proplot.readthedocs.io/en/latest/configuration.html',
'# https://matplotlib.org/3.1.1/tutorials/introductory/customizing',
'#--------------------------------------------------------------------',
*rc_user, # includes blank line
'# ProPlot settings',
rc_proplot_table,
'\n# Matplotlib settings',
rc_matplotlib_table,
)))
def items(self):
"""
Return an iterator that loops over all setting names and values.
Same as `dict.items`.
"""
for key in self:
yield key, self[key]
def keys(self):
"""
Return an iterator that loops over all setting names.
Same as `dict.keys`.
"""
for key in self:
yield key
def values(self):
"""
Return an iterator that loops over all setting values.
Same as `dict.values`.
"""
for key in self:
yield self[key]
def config_inline_backend(fmt=None):
"""
Set up the `ipython inline backend \
<https://ipython.readthedocs.io/en/stable/interactive/magics.html#magic-matplotlib>`__
format and ensure that inline figures always look the same as saved
figures. This runs the following ipython magic commands:
.. code-block:: ipython
%config InlineBackend.figure_formats = rc['inlinefmt']
%config InlineBackend.rc = {} # never override rc settings
%config InlineBackend.close_figures = True \
# cells start with no active figures
%config InlineBackend.print_figure_kwargs = {'bbox_inches': None} \
# never override rc settings
When the inline backend is inactive or unavailable, this has no effect.
This function is called when you modify the :rcraw:`inlinefmt` property.
Parameters
----------
fmt : str or list of str, optional
The inline backend file format(s). Default is :rc:`inlinefmt`.
Valid formats include ``'jpg'``, ``'png'``, ``'svg'``, ``'pdf'``,
and ``'retina'``.
"""
# Note if inline backend is unavailable this will fail silently
ipython = get_ipython()
if ipython is None:
return
fmt = _not_none(fmt, rc_proplot['inlinefmt'])
if isinstance(fmt, str):
fmt = [fmt]
elif np.iterable(fmt):
fmt = list(fmt)
else:
raise ValueError(
f'Invalid inline backend format {fmt!r}. Must be string or list thereof.'
)
ipython.magic('config InlineBackend.figure_formats = ' + repr(fmt))
ipython.magic('config InlineBackend.rc = {}')
ipython.magic('config InlineBackend.close_figures = True')
ipython.magic("config InlineBackend.print_figure_kwargs = {'bbox_inches': None}")
def _get_cycle_colors(cycle):
"""
Update the color cycle.
"""
try:
colors = pcolors._cmap_database[cycle].colors
except (KeyError, AttributeError):
cycles = sorted(
name for name, cmap in pcolors._cmap_database.items()
if isinstance(cmap, pcolors.ListedColormap)
)
raise ValueError(
f'Invalid cycle name {cycle!r}. Options are: '
+ ', '.join(map(repr, cycles)) + '.'
)
return colors
def _get_default_dict():
"""
Get the default rc parameters dictionary with deprecated parameters filtered.
"""
# NOTE: Use RcParams update to filter and translate deprecated settings
# before actually applying them to rcParams down pipeline. This way we can
# suppress warnings for deprecated default params but still issue warnings
# when user-supplied stylesheets have deprecated params.
# WARNING: Some deprecated rc params remain in dictionary as None so we
# filter them out. Beware if hidden attribute changes.
rcdict = _get_filtered_dict(mpl.rcParamsDefault, warn=False)
with cbook._suppress_matplotlib_deprecation_warning():
rcdict = dict(RcParams(rcdict))
for attr in ('_deprecated_remain_as_none', '_deprecated_set'):
if hasattr(mpl, attr): # _deprecated_set is in matplotlib before v3
for deprecated in getattr(mpl, attr):
rcdict.pop(deprecated, None)
return rcdict
def _get_filtered_dict(rcdict, warn=True):
"""
Filter out blacklisted style parameters.
"""
# NOTE: This implements bugfix: https://github.com/matplotlib/matplotlib/pull/17252
# This fix is *critical* for proplot because we always run style.use()
# when the configurator is made. Without fix backend is reset every time
# you import proplot in jupyter notebooks. So apply retroactively.
rcdict_filtered = {}
for key in rcdict:
if key in mstyle.STYLE_BLACKLIST:
if warn:
warnings._warn_proplot(
f'Dictionary includes a parameter, {key!r}, that is not related '
'to style. Ignoring.'
)
else:
rcdict_filtered[key] = rcdict[key]
return rcdict_filtered
def _get_style_dicts(style, infer=False, filter=True):
"""
Return a dictionary of settings belonging to the requested style(s). If `infer`
is ``True``, two dictionaries are returned, where the second contains custom
ProPlot settings "inferred" from the matplotlib settings. If `filter` is ``True``,
invalid style parameters like `backend` are filtered out.
"""
# NOTE: This is adapted from matplotlib source for the following changes:
# 1. Add 'original' option. Like rcParamsOrig except we also *reload*
# from user matplotlibrc file.
# 2. When the style is changed we reset to the *default* state ignoring
# matplotlibrc. Matplotlib applies styles on top of current state
# (including matplotlibrc changes and runtime rcParams changes) but
# IMO the word 'style' implies a *rigid* static format.
# 3. Add a separate function that returns lists of style dictionaries so
# that we can modify the active style in a context block. ProPlot context
# is more conservative than matplotlib's rc_context because it gets
# called a lot (e.g. every time you make an axes and every format() call).
# Instead of copying the entire rcParams dict we just track the keys
# that were changed.
style_aliases = {
'538': 'fivethirtyeight',
'mpl20': 'default',
'mpl15': 'classic',
'original': mpl.matplotlib_fname(),
}
# Always apply the default style *first* so styles are rigid
kw_matplotlib = _get_default_dict()
if style == 'default' or style is mpl.rcParamsDefault:
return kw_matplotlib
# Apply "pseudo" default properties. Pretend some proplot settings are part of
# the matplotlib specification so they propagate to other styles.
kw_matplotlib['font.family'] = 'sans-serif'
kw_matplotlib['font.sans-serif'] = rcsetup._rc_matplotlib_default['font.sans-serif']
# Apply user input style(s) one by one
# NOTE: Always use proplot fonts if style does not explicitly set them.
if isinstance(style, str) or isinstance(style, dict):
styles = [style]
else:
styles = style
for style in styles:
if isinstance(style, dict):
kw = style
elif isinstance(style, str):
style = style_aliases.get(style, style)
if style in mstyle.library:
kw = mstyle.library[style]
else:
try:
kw = mpl.rc_params_from_file(style, use_default_template=False)
except IOError:
raise IOError(
f'Style {style!r} not found in the style library and input is '
'not a valid URL or path. Available styles are: '
+ ', '.join(map(repr, mstyle.available)) + '.'
)
else:
raise ValueError(f'Invalid style {style!r}. Must be string or dictionary.')
if filter:
kw = _get_filtered_dict(kw, warn=True)
kw_matplotlib.update(kw)
# Infer proplot params from stylesheet params
if infer:
kw_proplot = _infer_added_params(kw_matplotlib)
return kw_matplotlib, kw_proplot
else:
return kw_matplotlib
def _infer_added_params(kw_params):
"""
Infer values for proplot's "added" parameters from stylesheets.
"""
kw_proplot = {}
mpl_to_proplot = {
'font.size': ('tick.labelsize',),
'axes.titlesize': (
'abc.size', 'suptitle.size', 'title.size',
'leftlabel.size', 'rightlabel.size',
'toplabel.size', 'bottomlabel.size',
),
'text.color': (
'abc.color', 'suptitle.color', 'tick.labelcolor', 'title.color',
'leftlabel.color', 'rightlabel.color',
'toplabel.color', 'bottomlabel.color',
),
}
for key, params in mpl_to_proplot.items():
if key in kw_params:
value = kw_params[key]
for param in params:
kw_proplot[param] = value
return kw_proplot
def use_style(style):
"""
Apply the `matplotlib style(s) \
<https://matplotlib.org/tutorials/introductory/customizing.html>`__
with `matplotlib.style.use`. This function is
called when you modify the :rcraw:`style` property.
Parameters
----------
style : str, dict, or list thereof
The matplotlib style name(s) or stylesheet filename(s), or dictionary(s)
of settings. Use ``'default'`` to apply matplotlib default settings and
``'original'`` to include settings from your user ``matplotlibrc`` file.
"""
# NOTE: This function is not really necessary but makes proplot's
# stylesheet-supporting features obvious. Plus changing the style does
# so much *more* than changing rc params or quick settings, so it is
# nice to have dedicated function instead of just another rc_param name.
kw_matplotlib, kw_proplot = _get_style_dicts(style, infer=True)
rc_matplotlib.update(kw_matplotlib)
rc_proplot.update(kw_proplot)
@docstring.add_snippets
def register_cmaps(user=True, default=False):
"""
Register colormaps packaged with ProPlot or saved to the
``~/.proplot/cmaps`` folder. This is called on import.
Colormaps are registered according to their filenames -- for example,
``name.xyz`` will be registered as ``'name'``.
%(register.ext_table)s
To visualize the registered colormaps, use `~proplot.demos.show_cmaps`.
Parameters
----------
%(register_cmaps.params)s
"""
for i, dirname, filename in _iter_data_paths('cmaps', user=user, default=default):
path = os.path.join(dirname, filename)
cmap = pcolors.LinearSegmentedColormap.from_file(path, warn_on_failure=True)
if not cmap:
continue
if i == 0 and cmap.name.lower() in (
'phase', 'graycycle', 'romao', 'broco', 'corko', 'viko',
):
cmap.set_cyclic(True)
pcolors._cmap_database[cmap.name] = cmap
@docstring.add_snippets
def register_cycles(user=True, default=False):
"""
Register color cycles packaged with ProPlot or saved to the
``~/.proplot/cycles`` folder. This is called on import. Color cycles
are registered according to their filenames -- for example, ``name.hex``
will be registered as ``'name'``.
%(register.ext_table)s
To visualize the registered color cycles, use `~proplot.demos.show_cycles`.
Parameters
----------
%(register_cycles.params)s
"""
for _, dirname, filename in _iter_data_paths('cycles', user=user, default=default):
path = os.path.join(dirname, filename)
cmap = pcolors.ListedColormap.from_file(path, warn_on_failure=True)
if not cmap:
continue
pcolors._cmap_database[cmap.name] = cmap
@docstring.add_snippets
def register_colors(user=True, default=False, space='hcl', margin=0.10):
"""
Register the `open-color <https://yeun.github.io/open-color/>`_ colors,
XKCD `color survey <https://xkcd.com/color/rgb/>`_ colors, and colors
saved to the ``~/.proplot/colors`` folder. This is called on import.
The color survey colors are filtered to a subset that is "perceptually
distinct" in the HCL colorspace. The user color names are loaded from
``.txt`` files saved in ``~/.proplot/colors``. Each file should contain
one line per color in the format ``name : hex``. Whitespace is ignored.
To visualize the registered colors, use `~proplot.demos.show_colors`.
Parameters
----------
%(register_colors.params)s
space : {'hcl', 'hsl', 'hpl'}, optional
The colorspace used to detect "perceptually distinct" colors.
margin : float, optional
The margin by which a color's normalized hue, saturation, and
luminance channel values must differ from the normalized channel
values of the other colors to be deemed "perceptually distinct."
"""
# Reset native colors dictionary
mcolors.colorConverter.colors.clear() # clean out!
mcolors.colorConverter.cache.clear() # clean out!
# Add in base colors and CSS4 colors so user has no surprises
for name, dict_ in (('base', BASE_COLORS), ('css', mcolors.CSS4_COLORS)):
mcolors.colorConverter.colors.update(dict_)
# Load colors from file and get their HCL values
# NOTE: Colors that come *later* overwrite colors that come earlier.
hex = re.compile(rf'\A{pcolors.HEX_PATTERN}\Z') # match each string
for i, dirname, filename in _iter_data_paths('colors', user=user, default=default):
path = os.path.join(dirname, filename)
cat, ext = os.path.splitext(filename)
if ext != '.txt':
raise ValueError(
f'Unknown color data file extension ({path!r}). '
'All files in this folder should have extension .txt.'
)
# Read data
loaded = {}
with open(path, 'r') as fh:
for cnt, line in enumerate(fh):
# Load colors from file
stripped = line.strip()
if not stripped or stripped[0] == '#':
continue
pair = tuple(
item.strip().lower() for item in line.split(':')
)
if len(pair) != 2 or not hex.match(pair[1]):
warnings._warn_proplot(
f'Illegal line #{cnt + 1} in file {path!r}:\n'
f'{line!r}\n'
f'Lines must be formatted as "name: hexcolor".'
)
continue
# Never overwrite "base" colors with xkcd colors.
# Only overwrite with user colors.
name, color = pair
if i == 0 and name in BASE_COLORS:
continue
loaded[name] = color
# Add every user color and every opencolor color and ensure XKCD
# colors are "perceptually distinct".
if i == 1:
mcolors.colorConverter.colors.update(loaded)
elif cat == 'opencolor':
mcolors.colorConverter.colors.update(loaded)
OPEN_COLORS.update(loaded)
elif cat == 'xkcd':
# Always add these colors, but make sure not to add other
# colors too close to them.
hcls = []
filtered = []
for name in ALWAYS_ADD:
color = loaded.pop(name, None)
if color is None:
continue
if 'grey' in name:
name = name.replace('grey', 'gray')
hcls.append(to_xyz(color, space=space))
filtered.append((name, color))
mcolors.colorConverter.colors[name] = color
XKCD_COLORS[name] = color
# Get locations of "perceptually distinct" colors
# WARNING: Unique axis argument requires numpy version >=1.13
for name, color in loaded.items():
for string, replace in TRANSLATE_COLORS:
if string in name:
name = name.replace(string, replace)
if any(string in name for string in ALWAYS_REMOVE):
continue # remove "unpofessional" names
hcls.append(to_xyz(color, space=space))
filtered.append((name, color)) # category name pair
hcls = np.asarray(hcls)
if not hcls.size:
continue
hcls = hcls / np.array([360, 100, 100])
hcls = np.round(hcls / margin).astype(np.int64)
_, idxs = np.unique(hcls, return_index=True, axis=0)
# Register "distinct" colors
for idx in idxs:
name, color = filtered[idx]
mcolors.colorConverter.colors[name] = color
XKCD_COLORS[name] = color
else:
raise ValueError(f'Unknown proplot color database {path!r}.')
def register_fonts():
"""
Add fonts packaged with ProPlot or saved to the ``~/.proplot/fonts``
folder, if they are not already added. Detects ``.ttf`` and ``.otf`` files
-- see `this link \
<https://gree2.github.io/python/2015/04/27/python-change-matplotlib-font-on-mac>`__
for a guide on converting various other font file types to ``.ttf`` and
``.otf`` for use with matplotlib.
To visualize the registered fonts, use `~proplot.demos.show_fonts`.
"""
# Find proplot fonts
# WARNING: If you include a font file with an unrecognized style,
# matplotlib may use that font instead of the 'normal' one! Valid styles:
# 'ultralight', 'light', 'normal', 'regular', 'book', 'medium', 'roman',
# 'semibold', 'demibold', 'demi', 'bold', 'heavy', 'extra bold', 'black'
# https://matplotlib.org/api/font_manager_api.html
# For macOS the only fonts with 'Thin' in one of the .ttf file names
# are Helvetica Neue and .SF NS Display Condensed. Never try to use these!
paths_proplot = _get_data_paths('fonts', reverse=True)
fnames_proplot = set(mfonts.findSystemFonts(paths_proplot))
# Detect user-input ttc fonts and issue warning
fnames_proplot_ttc = {
file for file in fnames_proplot if os.path.splitext(file)[1] == '.ttc'
}
if fnames_proplot_ttc:
warnings._warn_proplot(
'Ignoring the following .ttc fonts because they cannot be '
'saved into PDF or EPS files (see matplotlib issue #3135): '
+ ', '.join(map(repr, sorted(fnames_proplot_ttc)))
+ '. Please consider expanding them into separate .ttf files.'
)
# Rebuild font cache only if necessary! Can be >50% of total import time!
fnames_all = {font.fname for font in mfonts.fontManager.ttflist}
fnames_proplot -= fnames_proplot_ttc
if not fnames_all >= fnames_proplot:
warnings._warn_proplot('Rebuilding font cache.')
if hasattr(mfonts.fontManager, 'addfont'):
# New API lets us add font files manually
for fname in fnames_proplot:
mfonts.fontManager.addfont(fname)
mfonts.json_dump(mfonts.fontManager, mfonts._fmcache)
else:
# Old API requires us to modify TTFPATH
# NOTE: Previously we tried to modify TTFPATH before importing
# font manager with hope that it would load proplot fonts on
# initialization. But 99% of the time font manager just imports
# the FontManager from cache, so this doesn't work.
paths = ':'.join(paths_proplot)
if 'TTFPATH' not in os.environ:
os.environ['TTFPATH'] = paths
elif paths not in os.environ['TTFPATH']:
os.environ['TTFPATH'] += ':' + paths
mfonts._rebuild()
# Remove ttc files and 'Thin' fonts *after* rebuild
# NOTE: 'Thin' filter is ugly kludge but without this matplotlib picks up on
# Roboto thin ttf files installed on the RTD server when compiling docs.
mfonts.fontManager.ttflist = [
font for font in mfonts.fontManager.ttflist
if os.path.splitext(font.fname)[1] != '.ttc'
or 'Thin' in os.path.basename(font.fname)
]
def _patch_validators():
"""
Fix the fontsize validators to allow for new font scalings.
"""
# First define valdiators
# NOTE: In the future will subclass RcParams directly and control the
# validators ourselves.
def _validate_fontsize(s):
fontsizes = list(mfonts.font_scalings)
if isinstance(s, str):
s = s.lower()
if s in fontsizes:
return s
try:
return float(s)
except ValueError:
raise ValueError(
f'{s!r} is not a valid font size. Valid sizes are: '
', '.join(map(repr, fontsizes))
)
def _validate_fontsize_None(s):
if s is None or s == 'None':
return None
else:
return _validate_fontsize(s)
_validate_fontsizelist = None
if hasattr(msetup, '_listify_validator'):
_validate_fontsizelist = msetup._listify_validator(_validate_fontsize)
# Apply new functions
validate = RcParams.validate
for key in list(validate): # modify in-place
validator = validate[key]
if validator is msetup.validate_fontsize:
validate[key] = _validate_fontsize
elif validator is getattr(msetup, 'validate_fontsize_None', None):
validate[key] = _validate_fontsize_None
elif validator is getattr(msetup, 'validate_fontsizelist', None):
if _validate_fontsizelist is not None:
validate[key] = _validate_fontsizelist
# Initialize .proplotrc file
_user_rc_file = os.path.join(os.path.expanduser('~'), '.proplotrc')
if not os.path.exists(_user_rc_file):
RcConfigurator._save_proplotrc(_user_rc_file, comment=True)
# Initialize customization folders
_rc_folder = os.path.join(os.path.expanduser('~'), '.proplot')
if not os.path.isdir(_rc_folder):
os.mkdir(_rc_folder)
for _rc_sub in ('cmaps', 'cycles', 'colors', 'fonts'):
_rc_sub = os.path.join(_rc_folder, _rc_sub)
if not os.path.isdir(_rc_sub):
os.mkdir(_rc_sub)
# Add custom font scalings to font_manager and monkey patch rcParams validator
# NOTE: This is because we prefer large sizes
if hasattr(mfonts, 'font_scalings'):
mfonts.font_scalings['med-small'] = 0.9
mfonts.font_scalings['med-large'] = 1.1
_patch_validators()
# Convert colormaps that *should* be LinearSegmented from Listed
for _name in ('viridis', 'plasma', 'inferno', 'magma', 'cividis', 'twilight'):
_cmap = pcolors._cmap_database.get(_name, None)
if _cmap and isinstance(_cmap, pcolors.ListedColormap):
del pcolors._cmap_database[_name]
pcolors._cmap_database[_name] = pcolors.LinearSegmentedColormap.from_list(
_name, _cmap.colors, cyclic=(_name == 'twilight')
)
# Register objects and configure settings
with timers._benchmark('cmaps'):
register_cmaps(default=True)
with timers._benchmark('cycles'):
register_cycles(default=True)
with timers._benchmark('colors'):
register_colors(default=True)
with timers._benchmark('fonts'):
register_fonts()
with timers._benchmark('rc'):
_ = RcConfigurator()
#: Instance of `RcConfigurator`. This is used to change global settings.
#: See the :ref:`configuration guide <ug_config>` for details.
rc = _
# Modify N of existing colormaps because ProPlot settings may have changed
# image.lut. We have to register colormaps and cycles first so that the 'cycle'
# property accepts named cycles registered by ProPlot. No performance hit here.
lut = rc['image.lut']
for cmap in pcolors._cmap_database.values():
if isinstance(cmap, mcolors.LinearSegmentedColormap):
cmap.N = lut
# Deprecated
inline_backend_fmt, rc_configurator = warnings._rename_objs(
'0.6',
inline_backend_fmt=config_inline_backend,
RcConfigurator=config_inline_backend,
)
| [
"[email protected]"
] | |
77eae7de5545c636d596feec9e0fe110b7b5700a | bc441bb06b8948288f110af63feda4e798f30225 | /architecture_view_sdk/model/flowable_service/bpmn_end_event_pb2.py | 3c2b2fa877b300c5b615c5a2704c5007ff77e7ce | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 3,097 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: bpmn_end_event.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from architecture_view_sdk.model.flowable_service import bpmn_links_pb2 as architecture__view__sdk_dot_model_dot_flowable__service_dot_bpmn__links__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='bpmn_end_event.proto',
package='flowable_service',
syntax='proto3',
serialized_options=_b('ZJgo.easyops.local/contracts/protorepo-models/easyops/model/flowable_service'),
serialized_pb=_b('\n\x14\x62pmn_end_event.proto\x12\x10\x66lowable_service\x1a=architecture_view_sdk/model/flowable_service/bpmn_links.proto\"F\n\x0c\x42PMNEndEvent\x12\n\n\x02id\x18\x01 \x01(\t\x12*\n\x05links\x18\x02 \x01(\x0b\x32\x1b.flowable_service.BPMNLinksBLZJgo.easyops.local/contracts/protorepo-models/easyops/model/flowable_serviceb\x06proto3')
,
dependencies=[architecture__view__sdk_dot_model_dot_flowable__service_dot_bpmn__links__pb2.DESCRIPTOR,])
_BPMNENDEVENT = _descriptor.Descriptor(
name='BPMNEndEvent',
full_name='flowable_service.BPMNEndEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='flowable_service.BPMNEndEvent.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='links', full_name='flowable_service.BPMNEndEvent.links', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=105,
serialized_end=175,
)
_BPMNENDEVENT.fields_by_name['links'].message_type = architecture__view__sdk_dot_model_dot_flowable__service_dot_bpmn__links__pb2._BPMNLINKS
DESCRIPTOR.message_types_by_name['BPMNEndEvent'] = _BPMNENDEVENT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
BPMNEndEvent = _reflection.GeneratedProtocolMessageType('BPMNEndEvent', (_message.Message,), {
'DESCRIPTOR' : _BPMNENDEVENT,
'__module__' : 'bpmn_end_event_pb2'
# @@protoc_insertion_point(class_scope:flowable_service.BPMNEndEvent)
})
_sym_db.RegisterMessage(BPMNEndEvent)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
03bca080a7ade2f5c8e31f19c07701b55b95c6aa | 8f7c595f2b9d075a89417760b7fbf9abb1fecb72 | /tele_twitter.py | 8b5573134e70b914312a9c197b1313e688781062 | [
"MIT"
] | permissive | MainakMaitra/trading-utils | 555ed240a20b26d4876f1490fc8a2d9273231fc5 | 3e73091b4d3432e74c385a9677b7f7ca4192c67f | refs/heads/main | 2023-07-04T09:19:40.122188 | 2021-08-08T09:01:37 | 2021-08-08T09:01:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,825 | py | """
Twitter -> Telegram
"""
import logging
import os
import time
from argparse import ArgumentParser
from peewee import *
from common import uuid_gen
from common.environment import GROUP_CHAT_ID
from common.logger import init_logging
from common.tele_notifier import send_message_to_telegram
from common.twitter_api import get_twitter_home_timeline
home_dir = os.getenv("HOME")
db = SqliteDatabase(home_dir + "/tele_twitter.db")
class TweetData(Model):
id = UUIDField(primary_key=True)
twitter_handle = CharField()
timestamp = BigIntegerField()
tweet_id = CharField()
tweet = CharField()
posted_at = DateTimeField(null=True)
class Meta:
database = db
@staticmethod
def save_from(twitter_handle, tweet, tweet_id, posted_at):
entity = dict(
id=uuid_gen(),
timestamp=time.time(),
twitter_handle=twitter_handle,
tweet_id=tweet_id,
tweet=tweet,
posted_at=posted_at,
)
TweetData.insert(entity).execute()
TweetData.create_table()
def save_data(tweet_data):
TweetData.save_from(**tweet_data)
def tweet_already_processed(current_tweet_id):
selected_tweet = TweetData.get_or_none(TweetData.tweet_id == current_tweet_id)
return selected_tweet is not None
def extract_tweet_id(new_tweet):
return new_tweet.id
def extract_tweet_time(recent_tweet):
return recent_tweet.created_at
def main(poll_freq_in_secs):
home_timeline = get_twitter_home_timeline()
logging.info("==> Found tweets {}".format(len(home_timeline)))
for tweet in home_timeline:
tweet_author_name = tweet.author.name
tweet_author_screen_name = tweet.author.screen_name
tweet_id = tweet.id
tweet_posted_date = tweet.created_at
formatted_posted_dt = tweet_posted_date.strftime("%H:%M(%d %B)")
tweet_text = tweet.text
if tweet_already_processed(tweet_id):
logging.warning(
"Old Tweet from {} at {} -> {} - already processed".format(
tweet_author_screen_name, tweet_posted_date, tweet_id
)
)
continue
else:
entity = dict(
twitter_handle=tweet_author_screen_name,
tweet=tweet_text,
tweet_id=tweet_id,
posted_at=tweet_posted_date,
)
save_data(entity)
if tweet_text.startswith("RT"):
continue
try:
header = f"""👀 {tweet_author_name} at [{formatted_posted_dt}](https://twitter.com/{tweet_author_screen_name}/status/{tweet_id})"""
send_message_to_telegram(
header, disable_web_preview=False, override_chat_id=GROUP_CHAT_ID
)
except:
send_message_to_telegram(
"🚨 Something went wrong trying to process {}".format(tweet)
)
logging.info(f"⏱ Sleeping for {poll_freq_in_secs}")
time.sleep(poll_freq_in_secs)
def parse_args():
parser = ArgumentParser(description=__doc__)
parser.add_argument(
"-w",
"--wait-in-seconds",
type=int,
help="Wait between sending tweets in seconds",
default=30,
)
parser.add_argument(
"-r", "--run-once", action="store_true", default=False, help="Run once"
)
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
init_logging()
poll_freq_in_secs = args.wait_in_seconds
run_once = args.run_once
while True:
try:
main(poll_freq_in_secs)
if run_once:
logging.info("Running once => Exit")
break
except Exception:
logging.exception("🚨🚨🚨 Something is wrong")
| [
"[email protected]"
] | |
3bcd4da0f4a0652b9ceae41db83ea03b41ab9201 | 99bfa15723593ea351191d82fac80e36ab25aab1 | /LeetCode/merge_interval.py | 602b47cb4dc68d07416d23bb801d695654ec3578 | [] | no_license | Kartavya-verma/Python-Projects | f23739ef29eab67a8e25569e3f7bf110e42576cb | 02ffe926a7ed82bc783e4c4034a2fa53d4d1a870 | refs/heads/master | 2023-06-22T07:59:39.595084 | 2021-07-18T15:51:55 | 2021-07-18T15:51:55 | 387,139,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | # interval = [[1,3],[2,6],[8,10],[15,18]]
interval = [[1,4],[4,5]]
n = interval.copy()
print(n)
res = []
v = []
for i in range(len(n)-1):
for j in range(len(n)):
print(n[i][1], n[i+1][0])
if n[i][1] > n[i+1][0]:
# print(n[i][1], n[i+1][0])
n[i].pop()
n[i].append(n[i+1][1])
v = n[i+1]
print(n[i],v)
n.remove(v)
print(n)
# l = list()
# for i in interval:
# for j in range(i[0], i[1]+1):
# l.append(j)
# print(l) | [
"[email protected]"
] | |
4df3ceca1f9d06815d43914cad8c76bf3d206085 | cd78d84441e69c1fc40b6a6e9e235e7cf6882454 | /python/48.rotate_image.py | 75ea51649a271ec6a48d11d85c9fa7c4a00e2bc0 | [] | no_license | buy/leetcode | 53a12d4e0298284a5a2034c88353d0dc195aa66c | da0e834e3f2e3016396fffc96ef943ab9ec58ea4 | refs/heads/master | 2021-01-13T01:48:01.176632 | 2015-06-14T06:17:17 | 2015-06-14T06:17:17 | 31,863,627 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 821 | py | # You are given an n x n 2D matrix representing an image.
# Rotate the image by 90 degrees (clockwise).
# Follow up:
# Could you do this in-place?
# /*
# * clockwise rotate
# * first reverse up to down, then swap the symmetry
# * 1 2 3 7 8 9 7 4 1
# * 4 5 6 => 4 5 6 => 8 5 2
# * 7 8 9 1 2 3 9 6 3
# */
class Solution:
# @param matrix, a list of lists of integers
# @return a list of lists of integers
def rotate(self, matrix):
matrix.reverse()
for i in range(len(matrix)):
for j in range(i):
matrix[i][j], matrix[j][i] = matrix[j][i], matrix[i][j]
# /*
# * anticlockwise rotate
# * first reverse left to right, then swap the symmetry
# * 1 2 3 3 2 1 3 6 9
# * 4 5 6 => 6 5 4 => 2 5 8
# * 7 8 9 9 8 7 1 4 7
# */
| [
"[email protected]"
] | |
2a7ecd17534e9ce6ebfd36b4b2168cfe3d21c7a2 | 03d1982e2d594f13567afb37f2a5cea2f0d631b6 | /setup.py | cde1cd7244e468fc53e94c6fb1355245c8ab6099 | [
"Apache-2.0"
] | permissive | maartendraijer/django-fluent-dashboard | e26f29d434528d3b11360549c6452812176e4ecb | 8a00fa810f001d1a778eada88b8a390f495f9994 | refs/heads/master | 2020-04-03T04:22:38.353890 | 2012-09-26T19:55:18 | 2012-09-26T19:58:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,478 | py | #!/usr/bin/env python
from setuptools import setup, find_packages
from os.path import dirname, join
import sys, os
# When creating the sdist, make sure the django.mo file also exists:
try:
os.chdir('fluent_dashboard')
from django.core.management.commands.compilemessages import compile_messages
compile_messages(sys.stderr)
finally:
os.chdir('..')
setup(
name='django-fluent-dashboard',
version='0.4.0dev',
license='Apache License, Version 2.0',
install_requires=[
'django-admin-tools>=0.4.1', # 0.4.1 is the first release with Django 1.3 support.
],
extras_require = {
'cachestatus': ['dashboardmods>=0.2.2'],
},
description='An improved django-admin-tools dashboard for Django projects',
long_description=open(join(dirname(__file__), 'README.rst')).read(),
author='Diederik van der Boor',
author_email='[email protected]',
url='https://github.com/edoburu/django-fluent-dashboard',
download_url='https://github.com/edoburu/django-fluent-dashboard/zipball/master',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
]
)
| [
"[email protected]"
] | |
65bb65eef08655b1bc9f00fecef269efb447b5c5 | 8e115fc8273fd7123438fa8cb85cd7b7992246f5 | /App_Login/migrations/0003_follow.py | 1e2b7e845c904aeaa10db6c63664e1517b698f1c | [] | no_license | tasim313/Social_Media_django_project | 35160f83fa278acd616f9f952ac5acd3ec6430e6 | 78cf24305a32dfe937d7fcb031ed2f78649a4775 | refs/heads/main | 2023-06-16T00:45:39.025388 | 2021-07-11T06:38:38 | 2021-07-11T06:38:38 | 384,453,963 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | # Generated by Django 2.2.5 on 2021-07-07 13:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('App_Login', '0002_auto_20210706_1237'),
]
operations = [
migrations.CreateModel(
name='Follow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_date', models.DateTimeField(auto_now_add=True)),
('follower', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follower', to=settings.AUTH_USER_MODEL)),
('following', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='following', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
c321533388cef074c4a7501847d5ddca0b9ae10e | fcc88521f63a3c22c81a9242ae3b203f2ea888fd | /Python3/0838-Push-Dominoes/soln-1.py | b7c9ed30d6e1a5c6ea1654056f23001653264cab | [
"MIT"
] | permissive | wyaadarsh/LeetCode-Solutions | b5963e3427aa547d485d3a2cb24e6cedc72804fd | 3719f5cb059eefd66b83eb8ae990652f4b7fd124 | refs/heads/master | 2022-12-06T15:50:37.930987 | 2020-08-30T15:49:27 | 2020-08-30T15:49:27 | 291,811,790 | 0 | 1 | MIT | 2020-08-31T19:57:35 | 2020-08-31T19:57:34 | null | UTF-8 | Python | false | false | 1,479 | py | class Solution:
def pushDominoes(self, dominoes: str) -> str:
leftRs = []
left_R = None
n = len(dominoes)
ans = [None] * n
for i, d in enumerate(dominoes):
if d == 'R':
left_R = 0
ans[i] = 'R'
elif d == 'L':
left_R = None
ans[i] = 'L'
else:
if left_R is not None:
left_R += 1
ans[i] = 'R'
else:
ans[i] = '.'
leftRs.append(left_R)
right_L = None
for i in reversed(range(n)):
d = dominoes[i]
if d == 'L':
right_L = 0
ans[i] = 'L'
elif d == 'R':
right_L = None
ans[i] = 'R'
else:
if right_L is not None:
right_L += 1
if leftRs[i] is None:
ans[i] = 'L'
else:
if leftRs[i] < right_L:
ans[i] = 'R'
elif leftRs[i] == right_L:
ans[i] = '.'
else:
ans[i] = 'L'
else:
if leftRs[i] is not None:
ans[i] = 'R'
else:
ans[i] = '.'
return ''.join(ans)
| [
"[email protected]"
] | |
06ebdcbe79fc30e8f7dbeae6f53b24398009b675 | 6df0d7a677129e9b325d4fdb4bbf72d512dd08b2 | /PycharmProjects/my_practice/untitled/1.py | 851d7606c9479fbaebdefa185ba912d562a89abd | [] | no_license | yingxingtianxia/python | 01265a37136f2ad73fdd142f72d70f7c962e0241 | 3e1a7617a4b6552bce4a7e15a182f30e1bae221e | refs/heads/master | 2021-06-14T15:48:00.939472 | 2019-12-13T05:57:36 | 2019-12-13T05:57:36 | 152,200,507 | 0 | 0 | null | 2021-06-10T20:54:26 | 2018-10-09T06:40:10 | Python | UTF-8 | Python | false | false | 102 | py | #!/usr/bin/env python3
# -*-coding: utf8-*-
f = open('1.txt', 'a')
f.write('this is test\n')
f.close() | [
"[email protected]"
] | |
76c8f94b2f1120d880d69b6121372442259a80bc | a08409f712dc0b1045f695fd2ffee2bb6cc7835b | /math/0x02-calculus/17-integrate.py~ | 07a15858a238dab1c6d0b0de4984d551527088f4 | [] | no_license | mohsenabedelaal/holbertonschool-machine_learning | d3f2137761e10d620472ca6e5f3288c45898381d | 2765a09ba3064168b024952d18b1a2471952c8a2 | refs/heads/main | 2023-06-02T16:11:55.600921 | 2021-06-10T19:08:13 | 2021-06-10T19:08:13 | 318,244,087 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 754 | #!/usr/bin/env python3
"""derivative poly"""
def poly_integral(poly, C=0):
"""Module for integral"""
if not isinstance(poly, list):
return None
if len(poly) == 0:
return None
if len(poly) == 1:
return [0]
if len(poly) == 2:
return [poly(1)]
else:
integral = []
for i in range(0, len(poly)):
if isinstance(poly[i], (int, float)):
if i == 0:
integral.append(0)
if poly[i] % (i + 1) == 0:
result = int((1/(i+1)) * poly[i])
else:
result = (1/(i+1)) * poly[i]
integral.append(result)
else:
return None
return der
| [
"[email protected]"
] | ||
6562ceefb580fe6394f1e927b79291c2063a56c7 | 5692e8a3357f7afe6284b43c4a9770d81957a511 | /student/migrations/0015_auto_20201119_1605.py | 8fc7c2324f4ec87bd9f70cdb6eabaa98d7202789 | [] | no_license | OmarFateh/student-management-system | 49bcfbdf15a631cf7f64ff200d530a44a44409ac | 2c53f81a55fe631406b642365a68de19501c0f17 | refs/heads/master | 2023-07-16T00:02:54.796428 | 2021-08-25T01:54:02 | 2021-08-25T01:54:02 | 355,033,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | # Generated by Django 3.1.2 on 2020-11-19 14:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('student', '0014_sessionyear_date_range'),
]
operations = [
migrations.AlterModelOptions(
name='student',
options={'ordering': ['user__full_name'], 'verbose_name': 'Student', 'verbose_name_plural': 'Students'},
),
]
| [
"[email protected]"
] | |
89509c3ebcab0d26460f14bf5810e0a088b1aa54 | 1fa16c1df35fd8247f9274b26a70523a514210f0 | /dependencies/amitools-0.1.0/amitools/vamos/lib/dos/FileManager.py | c9ed60eea544afb45ca4756963bba90fd9cacfbe | [
"GPL-1.0-or-later",
"MIT"
] | permissive | limi/AGSImager | 25a816b1c5b9ce8553cd6e3a47babce574f15119 | d3771800308e61a7a07df4a9b361e5bd5ba9e409 | refs/heads/master | 2023-01-10T11:19:52.248376 | 2020-04-14T19:59:23 | 2020-04-14T19:59:23 | 255,813,813 | 0 | 0 | MIT | 2020-04-15T05:26:57 | 2020-04-15T05:26:56 | null | UTF-8 | Python | false | false | 8,753 | py | import sys
import os.path
import os
import logging
import errno
import stat
from amitools.vamos.Log import log_file
from amitools.vamos.AccessStruct import AccessStruct
from DosStruct import DosPacketDef
from amitools.vamos.lib.lexec.ExecStruct import MessageDef
from Error import *
from DosProtection import DosProtection
from FileHandle import FileHandle
class FileManager:
def __init__(self, path_mgr, alloc, mem):
self.path_mgr = path_mgr
self.alloc = alloc
self.mem = mem
self.files_by_b_addr = {}
# get current umask
self.umask = os.umask(0)
os.umask(self.umask)
def setup(self, fs_handler_port):
self.fs_handler_port = fs_handler_port
# setup std input/output
self.std_input = FileHandle(sys.stdin,'<STDIN>','',need_close=False)
self.std_output = FileHandle(sys.stdout,'<STDOUT>','',need_close=False)
self._register_file(self.std_input)
self._register_file(self.std_output)
def finish(self,have_native_shell):
if not have_native_shell: #the Shell otherwise closes the streams for us
self._unregister_file(self.std_input)
self._unregister_file(self.std_output)
def get_fs_handler_port(self):
return self.fs_handler_port
def _register_file(self, fh):
baddr = fh.alloc_fh(self.alloc, self.fs_handler_port)
self.files_by_b_addr[baddr] = fh
log_file.info("registered: %s" % fh)
def _unregister_file(self,fh):
if fh.b_addr in self.files_by_b_addr:
check = self.files_by_b_addr[fh.b_addr]
if check != fh:
raise ValueError("Invalid File to unregister: %s" % fh)
else:
raise ValueError("Invalid File to unregister: %s" % fh)
del self.files_by_b_addr[fh.b_addr]
log_file.info("unregistered: %s"% fh)
fh.free_fh(self.alloc)
def get_input(self):
return self.std_input
def get_output(self):
return self.std_output
def open(self, lock, ami_path, f_mode):
try:
# special names
uname = ami_path.upper()
# thor: NIL: and CONSOLE: also work as device names
# and the file names behind are ignored.
if uname.startswith('NIL:'):
sys_name = "/dev/null"
if f_mode == "rwb+":
f_mode = "rb+"
fobj = open(sys_name, f_mode)
fh = FileHandle(fobj, ami_path, sys_name, is_nil = True)
elif uname == '*' or uname.startswith('CONSOLE:'):
sys_name = ''
fh = FileHandle(sys.stdout,'*','',need_close=False)
else:
# map to system path
sys_path = self.path_mgr.ami_to_sys_path(lock,ami_path,searchMulti=True)
if sys_path == None:
log_file.info("file not found: '%s' -> '%s'" % (ami_path, sys_path))
return None
# make some checks on existing file
if os.path.exists(sys_path):
# if not writeable -> no append mode
if f_mode == "rwb+":
f_mode = "rb+"
if not os.access(sys_path, os.W_OK):
if f_mode[-1] == '+':
f_mode = f_mode[:-1]
else:
# if the file does not exist, but the mode is MODE_READWRITE, create it.
if f_mode == "rwb+":
f_mode = "wb+"
log_file.debug("opening file: '%s' -> '%s' f_mode=%s" % (ami_path, sys_path, f_mode))
fobj = open(sys_path, f_mode)
fh = FileHandle(fobj, ami_path, sys_path)
self._register_file(fh)
return fh
except IOError as e:
log_file.info("error opening: '%s' -> '%s' f_mode=%s -> %s" % (ami_path, sys_path, f_mode, e))
return None
def close(self, fh):
fh.close()
self._unregister_file(fh)
def get_by_b_addr(self, b_addr, for_writing = None):
if b_addr == 0:
return None
if b_addr in self.files_by_b_addr:
fh = self.files_by_b_addr[b_addr]
# AmigaDos has no problem reading from an output console handle
# or writing to the input handle for the console.
if for_writing == True and fh.obj == sys.stdin:
return self.std_output
elif for_writing == False and fh.obj == sys.stdout:
return self.std_input
return fh
else:
addr = b_addr << 2
raise ValueError("Invalid File Handle at b@%06x = %06x" % (b_addr, addr))
def delete(self, lock, ami_path):
sys_path = self.path_mgr.ami_to_sys_path(lock,ami_path)
if sys_path == None or not os.path.exists(sys_path):
log_file.info("file to delete not found: '%s'" % (ami_path))
return ERROR_OBJECT_NOT_FOUND
try:
if os.path.isdir(sys_path):
os.rmdir(sys_path)
else:
os.remove(sys_path)
return 0
except OSError as e:
if e.errno == errno.ENOTEMPTY: # Directory not empty
log_file.info("can't delete directory: '%s' -> not empty!" % (ami_path))
return ERROR_DIRECTORY_NOT_EMPTY
else:
log_file.info("can't delete file: '%s' -> %s" % (ami_path, e))
return ERROR_OBJECT_IN_USE
def rename(self, lock, old_ami_path, new_ami_path):
old_sys_path = self.path_mgr.ami_to_sys_path(lock,old_ami_path)
new_sys_path = self.path_mgr.ami_to_sys_path(lock,new_ami_path)
if old_sys_path == None or not os.path.exists(old_sys_path):
log_file.info("old file to rename not found: '%s'" % old_ami_path)
return ERROR_OBJECT_NOT_FOUND
if new_sys_path == None:
log_file.info("new file to rename not found: '%s'" % new_ami_path)
return ERROR_OBJECT_NOT_FOUND
try:
os.rename(old_sys_path, new_sys_path)
return 0
except OSError as e:
log_file.info("can't rename file: '%s','%s' -> %s" % (old_ami_path, new_ami_path, e))
return ERROR_OBJECT_IN_USE
def is_file_system(self, lock, name):
uname = name.upper()
if uname.startswith('NIL:'):
return False
elif uname == '*' or uname.startswith('CONSOLE:'):
return False
# Everything else is a file system here, we don't support any
# other devices.
return True
def set_protection(self, lock, ami_path, mask):
sys_path = self.path_mgr.ami_to_sys_path(lock, ami_path)
if sys_path == None or not os.path.exists(sys_path):
log_file.info("file to set proteciton not found: '%s'", ami_path)
return ERROR_OBJECT_NOT_FOUND
prot = DosProtection(mask)
posix_mask = 0
if prot.is_e():
posix_mask |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
if prot.is_w():
posix_mask |= stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
if prot.is_r():
posix_mask |= stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
posix_mask &= ~self.umask
log_file.info("set protection: '%s': %s -> '%s': posix_mask=%03o umask=%03o", ami_path, prot, sys_path, posix_mask, self.umask)
try:
os.chmod(sys_path, posix_mask)
return NO_ERROR
except OSError:
return ERROR_OBJECT_WRONG_TYPE
def create_dir(self, lock, ami_path):
sys_path = self.path_mgr.ami_to_sys_path(lock, ami_path)
try:
os.mkdir(sys_path)
return NO_ERROR
except OSError:
return ERROR_OBJECT_EXISTS
# ----- Direct Handler Access -----
# callback from port manager for fs handler port
# -> Async I/O
def put_msg(self, port_mgr, msg_addr):
msg = AccessStruct(self.mem,MessageDef,struct_addr=msg_addr)
dos_pkt_addr = msg.r_s("mn_Node.ln_Name")
dos_pkt = AccessStruct(self.mem,DosPacketDef,struct_addr=dos_pkt_addr)
reply_port_addr = dos_pkt.r_s("dp_Port")
pkt_type = dos_pkt.r_s("dp_Type")
log_file.info("DosPacket: msg=%06x -> pkt=%06x: reply_port=%06x type=%06x", msg_addr, dos_pkt_addr, reply_port_addr, pkt_type)
# handle packet
if pkt_type == ord('R'): # read
fh_b_addr = dos_pkt.r_s("dp_Arg1")
buf_ptr = dos_pkt.r_s("dp_Arg2")
size = dos_pkt.r_s("dp_Arg3")
# get fh and read
fh = self.get_by_b_addr(fh_b_addr)
data = fh.read(size)
self.mem.access.w_data(buf_ptr, data)
got = len(data)
log_file.info("DosPacket: Read fh_b_addr=%06x buf=%06x len=%06x -> got=%06x fh=%s", fh_b_addr, buf_ptr, size, got, fh)
dos_pkt.w_s("dp_Res1", got)
elif pkt_type == ord('W'): # write
fh_b_addr = dos_pkt.r_s("dp_Arg1")
buf_ptr = dos_pkt.r_s("dp_Arg2")
size = dos_pkt.r_s("dp_Arg3")
fh = self.get_by_b_addr(fh_b_addr)
data = self.mem.access.r_data(buf_ptr, size)
fh.write(data)
put = len(data)
log_file.info("DosPacket: Write fh=%06x buf=%06x len=%06x -> put=%06x fh=%s", fh_b_addr, buf_ptr, size, put, fh)
dos_pkt.w_s("dp_Res1", put)
else:
raise UnsupportedFeatureError("Unsupported DosPacket: type=%d" % pkt_type)
# do reply
if not port_mgr.has_port(reply_port_addr):
port_mgr.register_port(reply_port_addr)
port_mgr.put_msg(reply_port_addr, msg_addr)
| [
"[email protected]"
] | |
91a80f36411eaa0287c81fe0a4414a82d2b3022a | a7104434e0ddb4575ef0a6cd467bac6620570de8 | /hunter108.py | ff44819a2ca4401163bea362d9ae1cf41d6bc5c3 | [] | no_license | GauthamAjayKannan/GUVI-1 | 7b276eef3195bec9671eec8bb6bcc588cb5c970e | fafabab93df55abcc399f6e2664286ed511fd683 | refs/heads/master | 2020-06-25T07:38:08.465414 | 2019-05-17T11:24:53 | 2019-05-17T11:24:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | x = list(input())
list1 = []
out = 0
if len(x) == 1:
out = int(x[0]) * int(x[0])
else:
for i in x:
list1.append(int(i))
for i in range(len(list1)-1):
out += (list1[i] ** list1[i+1])
out += (list1[len(list1)-1] ** list1[0] )
print(out)
| [
"[email protected]"
] | |
def3dce5cc56dc5116d525765b8c6bc66cb2e7fa | a44cfbdacdb9d695533f425ee72da86f904232c1 | /bin/summarize-days | 3a7583591a34ce37d84e1baec23fe453d926fdf1 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | sofie-lu/quac | 434d1481949ad0a229e10b7ccc725f54740c2d44 | 03e3bd9691dddd819f629aba628e9fe6d45c2d3b | refs/heads/master | 2020-04-08T09:33:54.217874 | 2014-05-15T20:32:00 | 2014-05-15T20:32:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | #!/usr/bin/env python
'''Parse the given metadata pickle file and print, TSV format, a summary of
each day's metadata on stdout. Column order matches the metadata field
documentation.'''
# Copyright (c) 2012-2013 Los Alamos National Security, LLC, and others.
import argparse
import sys
import quacpath
import pickle_glue
import tsv_glue
import u
ap = argparse.ArgumentParser()
ap.add_argument('file', metavar='METADATA_FILE')
args = u.parse_args(ap)
tsv = tsv_glue.Writer(sys.stdout.fileno())
for (day, md) in sorted(pickle_glue.File(args.file).data['days'].items()):
tsv.writerow([str(day),
md['count'] or 0,
md['count_geotag'],
md['min_id'],
md['max_id']])
| [
"[email protected]"
] | ||
25933a755301dda6561a58f195d7462cdc9f384c | a9e3f3ad54ade49c19973707d2beb49f64490efd | /Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/api_admin/api/filters.py | 63d4bf6ec7132ca2326fad9c709142a6713249fd | [
"MIT",
"AGPL-3.0-only",
"AGPL-3.0-or-later"
] | permissive | luque/better-ways-of-thinking-about-software | 8c3dda94e119f0f96edbfe5ba60ca6ec3f5f625d | 5809eaca7079a15ee56b0b7fcfea425337046c97 | refs/heads/master | 2021-11-24T15:10:09.785252 | 2021-11-22T12:14:34 | 2021-11-22T12:14:34 | 163,850,454 | 3 | 1 | MIT | 2021-11-22T12:12:31 | 2019-01-02T14:21:30 | JavaScript | UTF-8 | Python | false | false | 432 | py | """
Filters for api_admin api
"""
from rest_framework import filters
class IsOwnerOrStaffFilterBackend(filters.BaseFilterBackend):
"""
Filter that only allows users to see their own objects or all objects if it is staff user.
"""
def filter_queryset(self, request, queryset, view):
if request.user.is_staff:
return queryset
else:
return queryset.filter(user=request.user)
| [
"[email protected]"
] | |
76ea21e0cd0bb9f8f9684fc16048be3713d1df62 | 1cc54d31a4a443230668ca063bcd27179ff096c2 | /store/urls.py | fbf6972755a0e56ef5d0ca947869dfff8b9f903d | [] | no_license | akhmadakhmedov/e-commerce | 8d84f0ae7acd4dc80c8afbe3ab55ed13873ef631 | 6708aa62dec08be9b18fae15125eeef266d869e3 | refs/heads/main | 2023-05-29T21:36:40.354231 | 2021-06-14T13:23:22 | 2021-06-14T13:23:22 | 370,982,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 350 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.store, name='store'),
path('category/<slug:category_slug>/', views.store, name='products_by_category'),
path('category/<slug:category_slug>/<product_slug>/', views.product_detail, name='product_detail'),
path('search/', views.search, name='search'),
]
| [
"[email protected]"
] | |
53aa1b2409b3fe45fb8cacb3d6c9abc63b5229eb | f6f3ade5a59fcb904a147fa3cf1933a1b225338f | /src/gate_timer.py | 536a3cef3cb573db60d205d844c69d50ccab9872 | [] | no_license | HajimeKawahara/autobop | 3b559011f9dceba68b02e47cd95fdef4fa9ef41e | 2c99625895206d24587db90a2ac03d1e536eb9ca | refs/heads/master | 2021-01-16T01:03:01.351588 | 2018-03-18T09:15:53 | 2018-03-18T09:15:53 | 107,845,791 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 607 | py | #!/usr/bin/python
import sys
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import pylab
import argparse
#import chord_probability as cp
import rest
def gate_stop(mnow,counter,finger=1,width=5.0,c=65.0):
if c==np.inf:
counter=counter+1
return mnow, counter
numrest=rest.get_numrest()
stopp=1.0/(1+np.exp(-(1.0/width)*(float(counter)-c))) #stop probability (sigmoid type)
j=np.random.random()
if j < stopp:
mnow=numrest*np.ones(finger,dtype=int)
counter=-1
else:
counter=counter+1
return mnow, counter
| [
"[email protected]"
] | |
55e4a9778ff59c0161d9877f8b727552e30befcb | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5738606668808192_0/Python/Nihilant/p3.py | 9a79afbff433b6e056e4bf1c99769fccfd98c045 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,096 | py | import sys, functools, math
def jc(n, j):
coin_n = 2**(n-1) + 1
for i in range(j):
test = True
while test:
coin = bin(coin_n)[2:]
sol = []
for base in range(2, 11):
num = int(coin, base=base)
k = -1
limit = int(math.sqrt(num))
for div in range(2, limit):
if num % div == 0:
k = div
break
if k == -1:
coin_n = coin_n + 2
break
else:
sol.append(k)
if len(sol) == 9:
coin_n = coin_n + 2
print(coin, ' '.join(map(str, sol)))
test = False
if __name__ == "__main__":
f = sys.stdin
if len(sys.argv) >= 2:
fn = sys.argv[1]
if fn != '-':
f = open(fn)
T = int(f.readline())
for i in range(T):
N, J = f.readline().strip('\n').split(" ")
print("Case #{0}:".format(i + 1))
jc(int(N), int(J)) | [
"[email protected]"
] | |
3f1ce17c7e56aa343e288281207e4e0013191cf9 | ec53949dafa4b6ad675d679b05ed7c83fef2c69a | /DataStructuresAndAlgo/DynamicProgramming/FibonacciTabulation.py | d691cd1c6b7a1de451aa33b49df8d84df1b3b17e | [] | no_license | tpotjj/Python | 9a5a20a53cd7a6ec14386c1db8ce155e0fc9ab8a | ca73c116ada4d05c0c565508163557744c86fc76 | refs/heads/master | 2023-07-11T16:37:10.039522 | 2021-08-14T11:17:55 | 2021-08-14T11:17:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | def fibTab(n):
tb = [0, 1]
for i in range(2, n):
tb.append(tb[i-1] + tb[i-2])
return tb[n-1]
print(fibTab(6)) | [
"[email protected]"
] | |
5bcb0760c6c64e527ed4a662ff790c3cb71afad6 | b1ff576cdde5adf698b98446538e0b56d18f070f | /grading/apps.py | b507c75018b33f6f0904ff9ce425d1006d934d9a | [] | no_license | DUMBALINYOLO/gbc_oms | e3cfba17a12f3600b6503fc70cc9f3dcab5cc0e2 | cdea6fd81333088b2db9911140681fec9577132a | refs/heads/main | 2023-08-20T11:48:36.418990 | 2021-10-11T23:25:35 | 2021-10-11T23:25:35 | 322,593,446 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | from django.apps import AppConfig
class GradingConfig(AppConfig):
name = 'grading'
# def ready(self):
# import grading.signals
| [
"[email protected]"
] | |
3cf0e063b91a5be11fd48040ca02637fab5c720d | cb1d0dd68b1136b8a371f7d2b423e45171e98ab7 | /src/xsd_trips/urls.py | 4d153c6902a452d9f38693e3b2a855184692fcd9 | [] | no_license | ScubaJimmE/xSACdb | 86640ab791327392f88eb4993c858aa6d340c758 | 1996ab286ee0446b0a0e38882104bbf8904d8bdc | refs/heads/develop | 2021-07-15T04:47:11.279138 | 2016-05-25T01:44:05 | 2016-05-25T01:44:05 | 62,212,226 | 0 | 0 | null | 2021-03-20T00:40:24 | 2016-06-29T09:09:50 | Python | UTF-8 | Python | false | false | 256 | py | from django.conf.urls import patterns, include, url
from django.conf import settings
from views import *
urlpatterns = patterns('',
url(r'^$', TripList.as_view(), name='TripList'),
url(r'^new/$', TripCreate.as_view(), name='TripCreate'),
)
| [
"[email protected]"
] | |
da9e00f2af1599c983cb133c32b539da17ece7fe | 155fa6aaa4ef31cc0dbb54b7cf528f36743b1663 | /Static and Class Methods/Gym/subscription.py | c93a716815e0c338d34e9dadac30833811a61828 | [] | no_license | GBoshnakov/SoftUni-OOP | efe77b5e1fd7d3def19338cc7819f187233ecab0 | 0145abb760b7633ca326d06a08564fad3151e1c5 | refs/heads/main | 2023-07-13T18:54:39.761133 | 2021-08-27T08:31:07 | 2021-08-27T08:31:07 | 381,711,275 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 467 | py | class Subscription:
_id = 0
def __init__(self, date, customer_id, trainer_id, exercise_id):
Subscription._id += 1
self.date = date
self.customer_id = customer_id
self.trainer_id = trainer_id
self.exercise_id = exercise_id
self.id = Subscription._id
@staticmethod
def get_next_id():
return Subscription._id + 1
def __repr__(self):
return f"Subscription <{self.id}> on {self.date}"
| [
"[email protected]"
] | |
cf0bc4a4904ba5ea0b1a33c31385a14eaf269f3c | 02546f99c91e46d66055ba8022f00619dbf9edcf | /lungsc/figures_immune_paper/fig4.py | 7d3f5ff2df49c1e027f182ebf3521fba76bec829 | [
"MIT"
] | permissive | iosonofabio/lung_neonatal_immune | 84a137492242a3946873e567db9eea531a90ecd6 | d0f12d4c24a778d0b7b8febf7accbc46adb7c162 | refs/heads/master | 2022-11-15T15:11:57.574654 | 2020-07-08T00:56:54 | 2020-07-08T00:56:54 | 258,082,671 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,274 | py | # vim: fdm=indent
'''
author: Fabio Zanini
date: 12/07/19
content: Plot panels for Fig 4.
'''
import os
import sys
import glob
import gzip
import pickle
import subprocess as sp
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
import seaborn as sns
from lungsc.pilots.load_dataset import DatasetLung, versions
ctms = ['Mac I', 'Mac II', 'Mac III', 'Mac IV', 'Mac V']
fig_fdn = '../../figures/immune_paper_figs/immune_paper_figure_4/'
if __name__ == '__main__':
ds0 = DatasetLung.load(preprocess=True, version=versions[-2])
ds0.query_samples_by_metadata(
'(cellType == "immune") & (doublet == 0)', inplace=True)
ds = ds0.query_samples_by_metadata('cellSubtype in @ctms', local_dict=locals())
if False:
print('Feature selection')
features = ds.feature_selection.overdispersed_within_groups('Mousename', inplace=False)
dsf = ds.query_features_by_name(features)
print('PCA')
dsc = dsf.dimensionality.pca(n_dims=25, robust=False, return_dataset='samples')
print('tSNE')
vs = dsc.dimensionality.tsne(perplexity=30)
print('Load tSNE from file')
vs = pd.read_csv(
'../../data/sequencing/datasets/all_{:}/tsne_immune.tsv'.format(versions[-2]),
sep='\t',
index_col=0,
)
vs = vs.loc[ds.samplenames]
if True:
print('Plot tSNE with Gal, Car4, Itgax, C1qa, Plac8, Ifitm6')
genes = ['Gal', 'H2-Eb1', 'Itgax', 'Car4', 'C1qa', 'Plac8', 'Ifitm6']
for gene in genes:
fig, ax = plt.subplots(figsize=(4.8, 4.2))
ds.plot.scatter_reduced_samples(
vs,
ax=ax,
s=12,
alpha=0.30,
cmap='viridis',
color_by=gene,
color_log=True,
)
ax.grid(False)
ax.set_axis_off()
fig.tight_layout()
if True:
for ext in ['svg', 'pdf', ['png', 600]]:
if isinstance(ext, list):
ext, dpi = ext
fig.savefig(fig_fdn+'immune_tsne_{:}.{:}'.format(
gene, ext),
dpi=dpi)
else:
fig.savefig(fig_fdn+'immune_tsne_{:}.{:}'.format(
gene, ext))
if True:
print('Make table with top DE genes within macrophages')
fn_comp = '../../data/gene_lists/immune_DEGs_macros.pkl'
if not os.path.isfile(fn_comp):
comps = {}
for cst in ctms:
print('DE for {:}'.format(cst))
ds.samplesheet['is_focal'] = ds.samplesheet['cellSubtype'] == cst
dsp = ds.split('is_focal')
# Subsample
for key in dsp:
if dsp[key].n_samples > 300:
dsp[key].subsample(300, inplace=True)
comp = dsp[True].compare(dsp[False])
comp['log2_fc'] = np.log2(dsp[True].counts.mean(axis=1) + 0.1) - np.log2(dsp[False].counts.mean(axis=1) + 0.1)
comp.name = cst
comps[cst] = comp
del ds.samplesheet['is_focal']
with open(fn_comp, 'wb') as f:
pickle.dump(comps, f)
else:
with open(fn_comp, 'rb') as f:
comps = pickle.load(f)
if False:
print('Save tables to file')
tops = {}
for cst in ctms:
fn_comp_tsv = '../../data/gene_lists/immune_DEGs_{:}.tsv'.format(mp)
comp = comps[cst]
top = comp.loc[comp['log2_fc'] > 0].nlargest(50, 'statistic')
tops[cst] = top
top.to_csv(fn_comp_tsv, sep='\t', index=True)
top_sum = pd.DataFrame([], index=np.arange(50))
for mp, top in tops.items():
top_sum[mp] = top.index
fn_comp_tsv_sum = '../../data/gene_lists/immune_DEGs_macros_summary.tsv'
top_sum.to_csv(fn_comp_tsv_sum, sep='\t', index=False)
if True:
print('Plot heatmap with single top DE genes')
tops = {}
for cst, comp in comps.items():
tops[cst] = comp.loc[comp['log2_fc'] > 0].nlargest(5, 'statistic').index.tolist()
genes = sum(tops.values(), [])
genes = [
# Common
'Ptprc',
'Cd68',
'Axl',
'Dab2',
# Mac I
'Gal',
'Mcm5',
'Mcm2',
'Mcm3',
'Mcm4',
'Mcm6',
'Bub1',
'Plk1',
'Top2a',
'Mki67',
# Mac II,
'Car4',
'Atp6v0d2',
'Mgll',
'Krt19',
'Slc39a2',
'Coro6',
'Marco',
'Siglecf',
'Gpnmb',
'Ear1',
'Cd200r4',
'Ctsk',
'Ly75',
'Bhlhe41',
'Slc7a2',
'Cdh1',
'Pex11a',
# Mac III
'Itgax',
'Adgrl3',
# Mac IV
'Fcrls',
'Pf4',
'C1qa',
'C1qb',
'C1qc',
'C3ar1',
'Tmem176b',
'Cxcl12',
'Ccl12',
'Cxcl16',
'Stab1',
'Ms4a7',
'Ms4a4a',
'Igfbp4',
'Apoe',
'Lgmn',
'Maf',
# Mac V
'Pla2g7',
'Ifitm2',
'Ifitm3',
'Ifitm6',
'Plac8',
'Pglyrp1',
'Serpinb10',
'Adgre4',
'Adgre5',
'Napsa',
'Rnase6',
'Fyb',
'Clec4a1',
'Itga4',
'Samhd1',
]
data = pd.DataFrame([], index=genes)
for cst in ctms:
dsi = ds.query_samples_by_metadata(
'cellSubtype == @cst',
local_dict=locals(),
)
mat = np.log10(0.1 + dsi.counts.loc[genes]).mean(axis=1)
data[cst] = mat
# Normalize by max expression of that gene
data += 1
data = (data.T / data.max(axis=1)).T
fig, ax = plt.subplots(figsize=(3, 10.5))
sns.heatmap(
data,
ax=ax,
cmap='plasma',
vmin=0,
vmax=1,
fmt='.1f',
xticklabels=True,
yticklabels=True,
cbar=False,
)
for tk in ax.get_yticklabels():
tk.set_rotation(0)
for tk in ax.get_xticklabels():
tk.set_rotation(90)
ax.set_xlim(0, 5)
ax.set_ylim(len(genes), 0)
fig.tight_layout()
if True:
fig.savefig(fig_fdn+'heatmap_single_genes_full.png')
if True:
print('Plot heatmap with pathways')
pathways = [
('cell cycle', ['Ncapd2', 'Mcm5', 'Mcm7', 'Cdca8', 'Smc2']),
('glycolysis', ['Cluh', 'Dbi', 'Eno1', 'Ldha', 'Pkm']),
('lipid metabolism', ['Lpl', 'Lipa', 'Abcg1', 'Sdc4', 'Abca9', 'Abca1']),
('matrix\nremodelling', ['Crispld2', 'Spint1', 'Tgm2']),
('angiogenesis', ['Fn1', 'Il18', 'Axl', 'Gas6', 'Pf4', 'Apoe']),
('alveolar', ['Adgrl3', 'Clec4n', 'Pparg', 'Ear2', 'Itgax', 'Car4', 'Bhlhe41', 'Trim29']),
('cell migration', ['Ccr2', 'Ccr5', 'Cx3cr1', 'Cxcl16', 'Cxcl2']),
('antibacterial', ['Acp5', 'Mpeg1', 'Plac8', 'Rnase6', 'Lyz2']),
('complement', ['C1qc', 'C1qa', 'C1qb', 'C3ar1']),
('alternative\nactivation', ['Ms4a8a', 'Axl', 'Il18', 'Maf', 'Lgmn']),
('type-I IFN', ['Adgrl3', 'Ifitm6', 'Ifitm3', 'Ifi27l2a', 'Ifitm2']),
('neg reg of\ninflammation', ['Cd200r4', 'Gpnmb', 'Il1rn', 'Dapk1', 'Dok2', 'Cd300a', 'Nr4a1', 'Lst1']),
]
genes = sum((x[1] for x in pathways), [])
data = pd.DataFrame([], index=genes)
for cst in ctms:
dsi = ds.query_samples_by_metadata(
'cellSubtype == @cst',
local_dict=locals(),
)
mat = np.log10(0.1 + dsi.counts.loc[genes]).mean(axis=1)
data[cst] = mat
# Normalize by max expression of that gene
data += 1
data = (data.T / data.max(axis=1)).T
fig, axs = plt.subplots(
2, 1, figsize=(11, 4), sharex=True,
gridspec_kw={'height_ratios': [1, 20]})
sns.heatmap(
data.iloc[:, :5].T,
ax=axs[1],
cmap='plasma',
vmin=0,
vmax=1,
fmt='.1f',
xticklabels=True,
yticklabels=True,
cbar=False,
)
for tk in axs[1].get_yticklabels():
tk.set_rotation(0)
for tk in axs[1].get_xticklabels():
tk.set_rotation(90)
tk.set_fontsize(8)
axs[1].set_ylim(5, 0)
axs[1].set_xlim(0, len(genes))
i = 0
for ipw, (pw, gns) in enumerate(pathways):
if i != 0:
axs[1].plot([i] * 2, [0, len(genes)], lw=2, color='lightgrey', alpha=0.9)
i += len(gns)
# Legend
labels = ['none', 'low', 'mid', 'high']
sfun = plt.cm.plasma
handles = [
axs[1].scatter([], [], marker='s', s=50, color=sfun(0)),
axs[1].scatter([], [], marker='s', s=50, color=sfun(0.33)),
axs[1].scatter([], [], marker='s', s=50, color=sfun(0.67)),
axs[1].scatter([], [], marker='s', s=50, color=sfun(1.0)),
]
leg = axs[1].legend(
handles, labels,
title='Gene\nexpression:',
bbox_to_anchor=(1.01, 0.99),
loc='upper left',
)
axs[0].set_ylim(0, 1)
axs[0].set_xlim(0, len(genes))
color_d = dict(zip(
(x[0] for x in pathways),
sns.color_palette('muted', n_colors=len(pathways)),
))
i = 0
for ipw, (pw, gns) in enumerate(pathways):
w = len(gns)
rect = plt.Rectangle(
(i, 0), w, 1,
facecolor=color_d[pw],
edgecolor='none',
lw=0,
)
axs[0].add_artist(rect)
wt = i + 0.5 * w - 0.15 * w * (pw == 'lipid metabolism')
ht = 2 + 1.5 * (ipw % 2)
axs[0].text(
wt, ht, pw, ha='center', va='bottom',
fontsize=10,
clip_on=False,
)
if ipw % 2:
axs[0].plot(
[wt] * 2, [ht - 0.2, 1.2], lw=1, color='k',
clip_on=False,
)
i += w
axs[0].set_axis_off()
fig.tight_layout(h_pad=0.01)
if True:
fig.savefig(fig_fdn+'heatmap_pathways.png')
plt.ion()
plt.show()
| [
"[email protected]"
] | |
5690b8a65b35121276c3493f5273eae7f9e1b7fb | 609ee4aad38036c29456581f821a9bad4d6b729a | /tests/test_pay.py | 37b2629d535646bc20848fbf05772211f9a8c3b2 | [] | no_license | sdkwe/pywe-pay | 32f14d218b0f8c029fb08a54df99ba70b90374b4 | daf1699c7dafd0960359b0c3f570f32cc906dc5f | refs/heads/master | 2020-05-29T15:13:05.371833 | 2020-04-27T07:32:54 | 2020-04-27T07:32:54 | 62,115,428 | 5 | 4 | null | null | null | null | UTF-8 | Python | false | false | 677 | py | # -*- coding: utf-8 -*-
import time
from pywe_pay import WeChatPay
from local_config_example import WechatPayConfig
class TestPayCommands(object):
def test_native_unifiedorder(self):
native = WechatPayConfig.get('JSAPI', {})
wxpay = WeChatPay(native.get('appID'), native.get('apiKey'), native.get('mchID'))
result = wxpay.order.create(body=u'支付测试', notify_url='https://a.com', out_trade_no=int(time.time() * 1000), total_fee=1, trade_type='NATIVE')
assert isinstance(result, dict)
assert result.get('return_code') == 'SUCCESS'
assert result.get('result_code') == 'SUCCESS'
assert result.get('code_url')
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.