content
stringlengths 5
1.05M
|
---|
import time
from neomodel import (
StructuredNode,
StringProperty,
IntegerProperty,
BooleanProperty,
)
from authlib.oauth2.rfc6749 import (
TokenMixin,
AuthorizationCodeMixin,
)
class OAuth2AuthorizationCodeMixin(AuthorizationCodeMixin):
code = StringProperty(max_length=120, unique_index=True, required=True)
client_id = StringProperty(max_length=48)
redirect_uri = StringProperty(default="")
response_type = StringProperty(default="")
scope = StringProperty(default="")
nonce = StringProperty()
auth_time = IntegerProperty(required=True, default=lambda: int(time.time()))
code_challenge = StringProperty()
code_challenge_method = StringProperty(max_length=48)
def is_expired(self):
return self.auth_time + 300 < time.time()
def get_redirect_uri(self):
return self.redirect_uri
def get_scope(self):
return self.scope
def get_auth_time(self):
return self.auth_time
def get_nonce(self):
return self.nonce
class OAuth2TokenMixin(TokenMixin):
client_id = StringProperty(max_length=48)
token_type = StringProperty(max_length=40)
access_token = StringProperty(
max_length=255, unique_index=True, required=True
)
refresh_token = StringProperty(max_length=255, index=True)
scope = StringProperty(default="")
revoked = BooleanProperty(default=False)
issued_at = IntegerProperty(required=True, default=lambda: int(time.time()))
expires_in = IntegerProperty(required=True, default=0)
def get_client_id(self):
return self.client_id
def get_scope(self):
return self.scope
def get_expires_in(self):
return self.expires_in
def get_expires_at(self):
return self.issued_at + self.expires_in
|
#!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Copy a directory and ignore some paths.
This copies directories and files without access stats.
"""
import os
import shutil
import sys
# This is python's implementation of shutil.copytree with some modifications
# and simplifications. Under PSF license:
# https://docs.python.org/2/library/shutil.html#copytree-example
# Changes: Removed symlink option, don't call copystat on directories and use
# copy instead of copy2 to not copy file stats.
def copytree(src, dst, ignore):
names = os.listdir(src)
ignored_names = ignore(src, names)
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if os.path.isdir(srcname):
copytree(srcname, dstname, ignore)
else:
shutil.copy(srcname, dstname)
except (IOError, os.error) as why:
errors.append((srcname, dstname, str(why)))
except shutil.Error as err:
errors.extend(err.args[0])
if errors:
raise shutil.Error(errors)
def ignore(p, files):
return [
f for path in sys.argv[3:]
for f in files
if (os.path.abspath(os.path.join(p, f)) == path or
f == path)]
copytree(sys.argv[1], sys.argv[2], ignore) |
'''
Generate root merkle tree hash in python.
I use https://github.com/bitcoin/bitcoin as reference:
BlockBuildMerkleTree --> Satoshi implmentation
BlockMerkleRoot ---> new bitcoin core implementation
'''
import pandas as pd
from hashlib import sha256
from io import StringIO
# h( h(1) + h(2) )
# 0df4085b3a65bd26ca6ab608c0f70c41213f77e56bc5b33bd9899db5d39a7cd8
# h( h(3) + h(4) )
# b26c7b49a69fe9a789facdaaad0af0bac4cd588db345d297f03359a5e40d73d2
# h( h( h(1) + h(2) ) + h( h(3) + h(4) ) )
# 93b46a24b0a418c5f6c31b4058dc5d0f3338a30951d3b4b5a74e9072f145c766
dataset = StringIO("""\
transaction1_serialized_A_B_3
transaction2_serialized_B_C_1
transaction3_serialized_D_E_2
transaction4_serialized_E_B_1
transaction5_serialized_C_B_2
transaction6_serialized_D_A_1
""")
df = pd.read_csv(dataset, encoding='utf-8', header=None)
hashes = df.iloc[:, 0].apply(lambda x: sha256(x.encode('utf-8')).hexdigest()).tolist()
while len(hashes) > 1:
if len(hashes) % 2 != 0:
hashes.append(hashes[-1])
i = 0
j = 0
while i + 1 < len(hashes):
hashes[j] = sha256(str(hashes[i] + hashes[i + 1]).encode('utf-8')).hexdigest()
i += 2
j += 1
hashes = hashes[:int(len(hashes) / 2)]
# tree condensed in a hash
print(hashes[0])
|
# -*- coding:utf-8 -*-
# https://mp.weixin.qq.com/s/C6o6T9ju34vAxNBg5zobWw
import math
import os
def JudgeEvenOrOddNumber(num):
if num % 2 == 1:
print("{0} is Odd.".format(num))
else:
print("{0} is Even.".format(num))
if num & 1 == 1:
print("{0} is Odd.".format(num))
else:
print("{0} is Even.".format(num))
pass
def SwapTwoNumber(x, y):
print("Before Swap X:{0},Y:{1}.".format(x, y))
# Triditional Method
temp = x
x = y
y = temp
print("After Swap X:{0},Y:{1}.".format(x, y))
# Use Bit Operation
x = x ^ y
y = x ^ y
x = x ^ y
print("After Swap X:{0},Y:{1}.".format(x, y))
pass
def FindNoRepeatNumber(arr):
# X ^ X = 0
a = 0
i = 0
while i < len(arr):
a = a ^ arr[i]
i += 1
print("Not Repeat Number is {0}.".format(a))
pass
def Pow(m, n):
# m^n
i = 0
r = 0
while i < n:
r = r * m
i += 1
print("Traditional{0}^{1} is {2}.".format(m, n, r))
#
sum = 1
tmp = m
nt = n
while nt!=0:
if nt&1 == 1:
sum *=tmp
tmp *=tmp
nt = nt>>1
print("Bit {0}^{1} is {2}.".format(m, nt, r))
pass
def PowderTwoNotLargeN(n):
i = 0
while i < 5:
# Get 1 2 4 8 16 32
t = 1 >> i
i += 1
n |= n >> t
return (n + 1) >> 1
def FindN(n):
pass
if __name__ == "__main__":
pass
|
# This file was automatically created by FeynRules $Revision: 535 $
# Mathematica version: 7.0 for Mac OS X x86 (64-bit) (November 11, 2008)
# Date: Fri 18 Mar 2011 18:40:51
from object_library import all_couplings, Coupling
from function_library import complexconjugate, re, im, csc, sec, acsc, asec
################
# R2 couplings #
################
R2_3Gq = Coupling(name = 'R2_3Gq',
value = '2.0*G**3/(48.0*cmath.pi**2)',
order = {'QCD':3})
R2_3Gg = Coupling(name = 'R2_3Gg',
value = 'Ncol*G**3/(48.0*cmath.pi**2)*(7.0/4.0+lhv)',
order = {'QCD':3})
#=============================================================================================
# 4-gluon R2 couplings
#=============================================================================================
# Gluon contribution to it
GC_4GR2_Gluon_delta5 = Coupling(name = 'GC_4GR2_Gluon_delta5',
value = '-4.0*complex(0,1)*RGR2*(2.0*lhv+5.0)',
order = {'QCD':4})
GC_4GR2_Gluon_delta7 = Coupling(name = 'GC_4GR2_Gluon_delta7',
value = '2.0*complex(0,1)*RGR2*(2.0*lhv+7.0)',
order = {'QCD':4})
GC_4GR2_2Struct = Coupling(name = 'GC_4GR2_2Struct',
value = '2.0*complex(0,1)*RGR2*Ncol*(lhv+3.0)',
order = {'QCD':4})
GC_4GR2_4Struct = Coupling(name = 'GC_4GR2_4Struct',
value = '-complex(0,1)*RGR2*Ncol*(4.0*lhv+11.0)',
order = {'QCD':4})
# Fermion contribution to it
GC_4GR2_Fermion_delta5 = Coupling(name = 'GC_4GR2_Fermion_delta5',
value = '(2.0/Ncol)*5.0*complex(0,1)*RGR2',
order = {'QCD':4})
GC_4GR2_Fermion_delta11 = Coupling(name = 'GC_4GR2_Fermion_delta11',
value = '-(2.0/Ncol)*11.0*complex(0,1)*RGR2',
order = {'QCD':4})
GC_4GR2_5Struct = Coupling(name = 'GC_4GR2_5Struct',
value = '5.0*complex(0,1)*RGR2',
order = {'QCD':4})
GC_4GR2_11Struct = Coupling(name = 'GC_4GR2_11Struct',
value = '-11.0*complex(0,1)*RGR2',
order = {'QCD':4})
#=============================================================================================
R2_GQQ = Coupling(name = 'R2_GQQ',
value = '-complex(0,1)*G**3/(16.0*cmath.pi**2)*((Ncol**2-1)/(2.0*Ncol))*(1.0+lhv)',
order = {'QCD':3})
R2_GGq = Coupling(name = 'R2_GGq',
value = 'complex(0,1)*G**2/(48.0*cmath.pi**2)',
order = {'QCD':2})
R2_GGb = Coupling(name = 'R2_GGb',
value = 'complex(0,1)*G**2*(-6.0*MB**2)/(48.0*cmath.pi**2)',
order = {'QCD':2})
R2_GGt = Coupling(name = 'R2_GGt',
value = 'complex(0,1)*G**2*(-6.0*MT**2)/(48.0*cmath.pi**2)',
order = {'QCD':2})
R2_GGg_1 = Coupling(name = 'R2_GGg_1',
value = 'complex(0,1)*G**2*Ncol/(48.0*cmath.pi**2)*(1.0/2.0+lhv)',
order = {'QCD':2})
R2_GGg_2 = Coupling(name = 'R2_GGg_2',
value = '-complex(0,1)*G**2*Ncol/(48.0*cmath.pi**2)*lhv',
order = {'QCD':2})
R2_QQq = Coupling(name = 'R2_QQq',
value = 'complex(0,1)*G**2*(Ncol**2-1)/(32.0*cmath.pi**2*Ncol)',
order = {'QCD':2})
R2_QQb = Coupling(name = 'R2_QQb',
value = 'complex(0,1)*G**2*(Ncol**2-1)*(-2.0*MB)/(32.0*cmath.pi**2*Ncol)',
order = {'QCD':2})
R2_QQt = Coupling(name = 'R2_QQt',
value = 'complex(0,1)*G**2*(Ncol**2-1)*(-2.0*MT)/(32.0*cmath.pi**2*Ncol)',
order = {'QCD':2})
################
# UV couplings #
################
UV_3Gg = Coupling(name = 'UV_3Gg',
value = '-G_UVg*G',
order = {'QCD':3})
UV_3Gq = Coupling(name = 'UV_3Gq',
value = '-G_UVq*G',
order = {'QCD':3})
UV_3Gb = Coupling(name = 'UV_3Gb',
value = '-G_UVb*G',
order = {'QCD':3})
UV_3Gt = Coupling(name = 'UV_3Gt',
value = '-G_UVt*G',
order = {'QCD':3})
UV_4Gg = Coupling(name = 'UV_4Gg',
value = '2.0*complex(0,1)*G_UVg*(G**2)',
order = {'QCD':4})
UV_4Gq = Coupling(name = 'UV_4Gq',
value = '2.0*complex(0,1)*G_UVq*(G**2)',
order = {'QCD':4})
UV_4Gb = Coupling(name = 'UV_4Gb',
value = '2.0*complex(0,1)*G_UVb*(G**2)',
order = {'QCD':4})
UV_4Gt = Coupling(name = 'UV_4Ggt',
value = '2.0*complex(0,1)*G_UVt*(G**2)',
order = {'QCD':4})
UV_GQQg = Coupling(name = 'UV_GQQg',
value = 'complex(0,1)*G_UVg*G',
order = {'QCD':3})
UV_GQQq = Coupling(name = 'UV_GQQq',
value = 'complex(0,1)*G_UVq*G',
order = {'QCD':3})
UV_GQQb = Coupling(name = 'UV_GQQb',
value = 'complex(0,1)*G_UVb*G',
order = {'QCD':3})
UV_GQQt = Coupling(name = 'UV_GQQt',
value = 'complex(0,1)*G_UVt*G',
order = {'QCD':3})
UV_bMass = Coupling(name = 'UV_bMass',
value = 'bMass_UV',
order = {'QCD':2})
UV_tMass = Coupling(name = 'UV_tMass',
value = 'tMass_UV',
order = {'QCD':2})
|
import gdal
import numpy as np
class RSImage(object):
def __init__(self, file_path):
self.img_path = file_path
self.img_metaInfo = None
self.projection = None
self.dataTypeName = None
self.geoTransform = None
self.bandCount = 1
self.dataset = None
self.img_arr = None
self.read_info()
self.raster_X = self.dataset.RasterXSize
self.raster_Y = self.dataset.RasterYSize
self.bandCount = self.dataset.RasterCount
self.read_data()
def read_info(self):
self.dataset = gdal.Open(self.img_path)
self.img_metaInfo = self.dataset.GetMetadata()
self.projection = self.dataset.GetProjection()
self.geoTransform = self.dataset.GetGeoTransform()
def read_data(self):
self.img_arr = np.zeros((self.raster_Y, self.raster_X,
self.bandCount), 'uint8')
for i in range(self.bandCount):
self.img_arr[..., i] = self.dataset.GetRasterBand(i + 1).ReadAsArray()
def save(self, dst_filename, input_arr):
geotransform = self.geoTransform
geoprojection = self.projection
driver = self.dataset.GetDriver()
dst_ds = driver.Create(dst_filename, xsize=self.raster_X, ysize=self.raster_Y,
bands=self.bandCount, eType=gdal.GDT_Byte)
dst_ds.SetGeoTransform(geotransform)
dst_ds.SetProjection(geoprojection)
for i in range(self.bandCount):
# read the data of one band
raster = input_arr[:, :, i]
dst_ds.GetRasterBand(i+1).WriteArray(raster)
print("band " + str(i + 1) + " has been processed")
def unit_test():
rsObj = RSImage('./data/nudt2017-08-18/nudt2017-08-18.tif')
print(rsObj.img_metaInfo)
print(type(rsObj.img_arr))
print(rsObj.img_arr.shape)
print(rsObj.dataTypeName)
rsObj.save('./data/save.tif', rsObj.img_arr)
if __name__ == '__main__':
unit_test()
|
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE 蓝鲸基础平台 available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE 蓝鲸基础平台 is licensed under the MIT License.
License for BK-BASE 蓝鲸基础平台:
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
JOBNAVI_TASK_PORT_MIN = "jobnavi.task.port.min"
JOBNAVI_TASK_PORT_MIN_DEFAULT = 20000
JOBNAVI_TASK_PORT_MAX = "jobnavi.runner.task.port.max"
JOBNAVI_TASK_PORT_MAX_DEFAULT = 30000
JOBNAVI_TASK_PORT_MAX_RETRY = "jobnavi.runner.task.port.max.retry"
JOBNAVI_TASK_PORT_MAX_RETRY_DEFAULT = 50
JOBNAVI_TASK_RPC_MAX_RETRY = "jobnavi.runner.task.rpc.max.retry"
JOBNAVI_TASK_RPC_MAX_RETRY_DEFAULT = 3
JOBNAVI_SCHEDULER_ADDRESS = "jobnavi.scheduler.address"
JOBNAVI_HA_FAILOVER_RETRY = "jobnavi.ha.failover.retry"
JOBNAVI_HA_FAILOVER_RETRY_DEFAULT = 15
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
@File : parser_test.py
@Author:
@Date : 2019-02-23 16:37
@Desc : parser test function.
"""
import logging
from dataclasses import dataclass
log = logging.getLogger("test")
log.setLevel(level=logging.ERROR)
# Test:common function
def test_f(x, y):
return x - y
def test_if(x, y):
if x:
z = x + y
else:
z = y * y
return z
def test_ifexp(x, y):
z = (x + y) if x else y * y
return z
def test_if_nested(x, y, t):
if x:
z = x * x
if y:
z = z + y
else:
z = z * z
else:
if t:
z = t * t
else:
z = t + x
return z
def test_while(x, y):
z = x + y
while z:
z = x + x
return z
def test_for(x, y):
z = y
for index in x:
z = z + index
return z
def test_compare_lt(x, y):
z = 0
if x < y:
z = x
else:
z = y
return z
def test_compare_gt(x, y):
z = 0
if x > y:
z = x
else:
z = y
return z
def test_compare_ge(x, y):
z = 0
if x >= y:
z = x
else:
z = y
return z
def test_compare_le(x, y):
z = 0
if x <= y:
z = x
else:
z = y
return z
def test_compare_eq(x, y):
z = 0
if x == y:
z = x
else:
z = y
return z
def test_compare_ne(x, y):
z = 0
if x != y:
z = x
else:
z = y
return z
def test_boolop_two_and(x, y):
if x and y:
t = x + y
else:
t = 0
return t
def test_boolop_three_and(x, y, z):
if x and y and z:
t = x + y
else:
t = z
return t
def test_boolop_two_or(x, y):
if x or y:
t = x + y
else:
t = 0
return t
def test_boolop_three_or(x, y, z):
if x or y or z:
t = x + y
else:
t = z
return t
def test_boolop_mix_and_or(x, y, z):
if x and y or z:
t = x + y
else:
t = z
return t
def test_lambda(x, y):
l = lambda x, y: x * y
t = l(x, y)
return t
def test_funcdef(x, y):
def mymax(a, b):
if a > b:
return a
return b
t = mymax(x, y)
return t
def test_tuple_fn(y):
l = (1, 2, 3, 5, 7)
l = l + l[y]
return l
def test_list_fn(y):
l = [1, 2, 3, 5, 7]
l = l + l[y]
return l
# Test:resolve function
def get_resolve_fn(x, y):
return test_f(x, y)
# Test:no return function
# pylint: disable=pointless-statement
def get_no_return_fn(x, y):
x + y
def testDoNum():
return 1
def testDoStr():
return "str"
def testDoNamedConstTrue():
return True
def testDoNamedConstFalse():
return False
# Test_Class_type
@dataclass
class TestFoo:
x: float
y: int
def inf(self):
return self.x
def test_class_fn(x):
foo = TestFoo(x, 1)
return foo.inf()
# custom test function
def test_custom(x, y, z):
def g(x1, y1):
def h(x2):
return x2 + y1 + z
return h(x1)
return g(x, y)
def test_simple_closure(a, b):
"""Test some trivial closures."""
z = 1
def f():
return a + z
def g():
return b + 2.0
return f() * g()
def test_assign_tuple():
a = 1
b = 2
t = a, b
c, d = t
return c + d
def test_unary(x, y):
a = -x
z = a + y
return z
def f1(x, y):
return x + y
def test_reslove_closure(x):
z = x
def in_f2(x, y):
x = f1(x, y)
return x + y + z
return in_f2
def test_augassign(x, y):
x += x
y -= x
return y
def test_parse_undefined_var(x, y):
a = x + y + Undef
return a
# test system call
def test_sys_call(x, y):
a = len(x) + len(y)
return a
def test_bool_not(x, y):
z = x and y
return not z
def test_call_fn_use_tuple(y):
log.info("the y is :%r", y)
log.info("y type is :%r", type(y))
z = len(y)
for i in y:
log.info("The parameter is: %r", i)
return z
def test_subscript_setitem():
t = [1, 2, 5, 6]
t[2] = t[2] + 7
t[3] = t[3] + 1
return t
def test_dict():
ret = {"a": 1, "b": 2}
return ret
def func_call(x, y, *var, a=0, b=1, **kwargs):
return x + y + var[0] + a + b + kwargs["z"]
# pylint: disable=repeated-keyword
def test_call_variable():
t = (1, 2, 3)
d = {"z": 10, "e": 11}
return func_call(0, 1, 2, *t, b=6, a=5, c=5, z=10, **d)
|
import matplotlib.pyplot as plt
import numpy as np
PM_times = []
cap_range = [10, 100, 250, 500]
for i in range(len(cap_range)):
filename = "../../results/outputs/Experiment_Binary_Cap/outBU_" + str(cap_range[i])
f = open(filename, "r")
PM_times += [[]]
for line in f.readlines():
words = line.strip().split(" ")
if words[0] == "PriorityMatch:":
PM_times[i] += [float(words[2])]
else:
print("Incorrect key: " + words[0])
PM_avgs = []
for i in range(len(cap_range)):
PM_avgs += [sum(PM_times[i]) / len(PM_times[i])]
linewidth = 2
markersize = 3
plt.plot(cap_range, PM_avgs, label='SmartPriorityMatch runtime', marker='o', markerfacecolor='orange', markersize=markersize, color='orange', linewidth=linewidth, linestyle='solid')
plt.gcf().subplots_adjust(bottom=0.15, left=0.15)
plt.rcParams.update({'font.size': 14})
plt.xticks(cap_range, fontsize=14)
plt.yticks(fontsize=14)
plt.xlabel("Applicant capacity", fontsize=18)
plt.ylabel("Runtime (sec)", fontsize=18)
plt.savefig('./capplt.png')
|
# Author : @Moglten
# Fizz , Buzz and Fizzbuzz
# from 1 to 100
def printFizzBuzz(n) :
for x in range(1,n+1) : print(x) if print_FizzBuzz(x) else None
def print_FizzBuzz(n):
if n % 5 == n % 3 == 0:
print( "FizzBuzz" )
return False
else: return print_Buzz( n )
def print_Buzz(n) :
if n % 5 == 0:
print( "Buzz" )
return False
else : return print_Fizz(n)
def print_Fizz(n) :
if n % 3 == 0:
print( "Fizz" )
return False
else : return True
if __name__ == '__main__':
n = 100
printFizzBuzz(n)
|
#!/bin/python
# title: run_monitor.py
# description: This script is meant for monitoring Linux-Computers.
# author: Michael Muyakwa - IT8g
# created: 2019-11-02
# updated: 2019-11-22
# version: 1.5
# license: MIT
# Imports der verwendeten Bibliotheken.
import os # Betriebssystem-Informationen
import sys # System-Informationen
import platform # Platform-Informationen
import psutil # Um Infos vom System zu erhalten. (Leider nicht 100% zuverlässige Informationen.)
import configparser # Spezielle Bibliothek für INI-Files
import datetime # Parsen und formatieren vom Datum
import smtplib # Zum verschicken der Alarm-Email, wenn Schwellwerte überschritten wurden.
import email.utils # Hilfsbibliothek für Emails.
from email.mime.text import MIMEText # MIMEtypes für Email-Inhalt.
from email.header import Header # Header für Email.
# Setze Variablen auf, die im Verlauf verwendet werden.
# (Nur Variablen, die unter jeder Platform funktionieren.)
# Pfade zu den verwendeten Dateien.
pathname = os.path.dirname(os.path.realpath(__file__)) # Pfad vom Ordner in dem das Skript liegt.
iniFile = os.path.abspath(pathname) + '/settings.ini' # Pfad zum INI-File
logFile = os.path.abspath(pathname) + '/log.txt' # Pfad zum Log-File
# Handler für die INI-Datei.
config = configparser.ConfigParser() # Objekt um mit dem INI-File zu arbeiten.
config.read(iniFile) # Lese das INI-File ein.
hostname = platform.node() # Hostname
# Variable für das Logging
log_str = str('################\n' + hostname + ': ' + (datetime.datetime.now()).strftime("%Y-%m-%d %H:%M:%S"))
alarm = False # Wird auf True gesetzt wenn ein vorgegebener Schwellwert überschritten wurde.
# Funktionen
# Log-Funktion zum schreieben der Log-Datei.
def writeLog(logStr):
f = open(logFile, "a+") # Öffne Log-Datei im Append-Mode
f.write(logStr) # Schreibe in die Log-Datei.
f.write('\n') # Setze eine leere neue Zeile am Ende der Log-Datei.
f.close() # Gebe Zugriff auf das Log-File wieder frei.
# Funktion um sowohl ins Log, wie auch die Ausgabe zu schreiben.
def splitPL(termStr):
print(termStr) # Ausgabe in Condole.
termStr = str(log_str + '\n' + termStr) # Setze String für Log-Datei zusammen.
writeLog(termStr) # Schreibe Log-Datei.
# Funktion für Linux-Clients.
def mon_linux():
print("This is a Linux-Environment.") # Ausgabe, das es sich um eine Linux.Umgebung handelt.
# setze Variablen auf, die im Verlauf verwendet werden (Linux only).
cpu_p = (round(float(
os.popen('''grep 'cpu ' /proc/stat | awk '{usage=($2+$4)*100/($2+$4+$5)} END {print usage }' ''').readline()),
2)) # CPU-Auslastung in Prozent (Linux only)
v_memory = psutil.virtual_memory()[2] # Virtual Memory
hdd_p = psutil.disk_usage('/')[3] # Verwendeter Speicherplatz in Prozent.
ds = os.statvfs('/') # Hole weiterführende Informationen zum Speicherplatz.
tot_m, used_m, free_m = map(int, os.popen('free -t -m').readlines()[-1].split()[1:]) # Memory (RAM) aufgeteilt.
# Informationen zur Festplatte ausgeben.
disk_str = {"Used": ((ds.f_blocks - ds.f_bfree) * ds.f_frsize) / 10 ** 9,
"Unused": (ds.f_bavail * ds.f_frsize) / 10 ** 9}
#num_processes = psutil.Process().children() # Gibt keinen zuverlässigen Wert zurück.
# Generiere Ausgabe für Log und Console-Ausgabe.
Ausgabe = ('CPU: %s' % cpu_p)
#Ausgabe += (' - Es laufen %s Prozesse' % num_processes)
Ausgabe += ('\n%s Ram' % v_memory)
Ausgabe += ('\n%s' % hdd_p)
Ausgabe += (' - %s' % disk_str)
mem_p = (used_m / tot_m * 100) # Memory in Prozent.
values=psutil.virtual_memory()
total_size = get_human_readable_size(values.total) # Speichert die gesamt vorhandene RAM-Größe.
Ausgabe += (' - Virtual Memory: %s' % (total_size))
Ausgabe += (' - Total Mem:%s Used Mem:%s Free Mem:%s - %.2f Prozent in Benutzung.' % (tot_m, used_m, free_m, mem_p))
splitPL(Ausgabe) # Ausgabe in Log und Console.
checkAlarm(cpu_p, hdd_p, mem_p) # Prüfe ob Schwellenwerte überschritten wurden.
# Funktion zum prüfen ob Schwellwerte überschritten wurden.
def checkAlarm(cpu_p, hdd_p, mem_p):
cpu_p = float(cpu_p) # Stelle sicher, das die Variable als Float verarbeitet wird.
hdd_p = float(hdd_p) # Stelle sicher, das die Variable als Float verarbeitet wird.
mem_p = float(hdd_p) # Stelle sicher, das die Variable als Float verarbeitet wird.
# Prüfe ob ein Schwellenwert aus der INI-Datei überschritten wurde.
if (cpu_p > (float(config['SCHWELLENWERTE']['CPU_P']))) or (hdd_p > (float(config['SCHWELLENWERTE']['HDD_P']))) or (mem_p > (float(config['SCHWELLENWERTE']['MEM_P']))):
alarm = True
# Prüfe ob ein Schwellwert überschritten wurde. Falls ja alamiere via Email.
if alarm:
writeLog('Alarm! Ein Schwellwert wurde überschritten.') # Schreibe in die Log-Datei.
runAlarm(cpu_p, hdd_p, mem_p); # Generiere Alarm-EMAIL.
# Funktion zum alamieren, wenn ein Schwellwert überschritten wurde.
def runAlarm(cpu_p, hdd_p, mem_p):
# Hole nötige Variablen aus der INI-Datei.
mail_host = config['EMAIL']['Smtp']
mail_user = config['EMAIL']['Username']
mail_pass = config['EMAIL']['Password']
sender = config['EMAIL']['Absender']
receivers = config['EMAIL']['Empfaenfger']
daten = 'Es wurde ein Alarm ausgelöst. \nCPU: %s, HDD: %s, Mem: %s' % (cpu_p, hdd_p, mem_p)
message = MIMEText(daten, 'plain', 'utf-8')
message['From'] = Header(sender, 'utf-8')
message['To'] = Header(receivers, 'utf-8')
message['Subject'] = Header('Es wurde ein Alarm ausgelöst.', 'utf-8')
port = config['EMAIL']['Port']
# Baue Email-Objekt auf.
smtp_obj = smtplib.SMTP(mail_host, port)
smtp_obj.set_debuglevel(True)
try:
# Kontakt mit dem SMTP aufnehmen um Verschlüsselung via TLS zu ermöglichen. First Handshake.
# Fähigkeiten des SMTP-Server erfahren. (Erwarte Rückmeldung ob TLS unterstützt wird.)
smtp_obj.ehlo()
# Wenn der Server TLS unterstützt, mit TLS-Verschlüsselung weiter machen.
if smtp_obj.has_extn('STARTTLS'):
smtp_obj.starttls() # Beginne Verschlüsselung.
smtp_obj.ehlo() # Erneut Kontakt aufnehmen MIT TLS-Verschlüsselung.
smtp_obj.login(mail_user, mail_pass) # SMTP-Zugangsdaten übergeben.
smtp_obj.sendmail(sender, receivers, message.as_string()) # Email-Inhalt übergeben.
info_str = 'Alarm-Email wurde verschickt.' # String für die Log-Datei.
splitPL(info_str) # Ausgabe in Log und Console.
except smtplib.SMTPException:
err_str = 'Beim versenden der Email ist ein Fehler aufgetreten.'
splitPL(err_str) # Ausgabe in Log und Console.
finally:
smtp_obj.quit() # Beende SMTP-Verbindung. (Schließe Stream.)
# Wandelt Werte aus PSUtils in brauchbare (gerundete) Größen um.
def get_human_readable_size(num):
# Die verschiedenen Größenangaben in einer Variable.
exp_str = [ (0, 'B'), (10, 'KB'),(20, 'MB'),(30, 'GB'),(40, 'TB'), (50, 'PB'),]
i = 0
#While-Schleife prüft ob die nächst große Größenangabe sinnvoller ist.
while i+1 < len(exp_str) and num >= (2 ** exp_str[i+1][0]):
i += 1
rounded_val = round(float(num) / 2 ** exp_str[i][0], 2)
return '%s %s' % (int(rounded_val), exp_str[i][1])
# Hauptfunktion.
def main():
# Prüfe das Betriebsystem. (Lin/Win)
if platform.system() == "Linux":
mon_linux()
# Könnte das Skript hier auf Windows erweitern.
elif platform.system() == "Windows":
print("You are not running this Script on a Linux-Environment.")
writeLog('This System is not a Linux-System.')
sys.exit(1)
# Könnte das Skript hier auf Mac erweitern mit 'elif platform.system() == "Mac":'.
elif platform.system() == "Darwin":
print("You are not running this Script on a Linux-Environment.")
writeLog('This System is not a Linux-System.')
sys.exit(1)
# Falls doch ein komplett unbekanntes System vorgefunden wird. (NOT "Lin/Win/Mac")
else:
print("You are not running this Script on a Linux-Environment.")
writeLog('This System is not a Linux-System.')
sys.exit(1)
# Ab hier startet der Lauf des Skript
if __name__ == '__main__':
main()
|
import xclim.indicators.atmos
from finch.processes.wps_base import make_xclim_indicator_process
from finch.processes.wps_xclim_indices import XclimIndicatorBase
def test_locales_simple():
base_class = XclimIndicatorBase
indicator = make_xclim_indicator_process(
xclim.indicators.atmos.cold_spell_days, "_suffix", base_class
)
assert "fr" in indicator.translations
assert "title" in indicator.translations["fr"]
|
# SPDX-License-Identifier: MIT
# Left blank for now.
|
import random
import numpy as np
import pytest
import torch
import hivemind
import hivemind.averaging.averager
from hivemind.averaging.allreduce import AveragingMode
from hivemind.averaging.key_manager import GroupKeyManager
from hivemind.averaging.load_balancing import load_balance_peers
from hivemind.averaging.partition import AllreduceException
from hivemind.p2p import PeerID
from test_utils.dht_swarms import launch_dht_instances
@pytest.mark.forked
@pytest.mark.asyncio
async def test_key_manager():
dht = hivemind.DHT(start=True)
key_manager = GroupKeyManager(
dht,
prefix="test_averaging",
initial_group_bits="10110",
target_group_size=2,
)
alice = dht.peer_id
bob = PeerID(b"bob")
t = hivemind.get_dht_time()
key = key_manager.current_key
await key_manager.declare_averager(key, alice, expiration_time=t + 60)
await key_manager.declare_averager(key, bob, expiration_time=t + 61)
q1 = await key_manager.get_averagers(key, only_active=True)
await key_manager.declare_averager(key, alice, expiration_time=t + 66)
q2 = await key_manager.get_averagers(key, only_active=True)
await key_manager.declare_averager(key, bob, expiration_time=t + 61, looking_for_group=False)
q3 = await key_manager.get_averagers(key, only_active=True)
q4 = await key_manager.get_averagers(key, only_active=False)
q5 = await key_manager.get_averagers("nonexistent_key.0b0101", only_active=False)
assert len(q1) == 2 and (alice, t + 60) in q1 and (bob, t + 61) in q1
assert len(q2) == 2 and (alice, t + 66) in q2 and (bob, t + 61) in q2
assert len(q3) == 1 and (alice, t + 66) in q3
assert len(q4) == 2 and (alice, t + 66) in q4 and (bob, t + 61) in q2
assert len(q5) == 0
dht.shutdown()
def _test_allreduce_once(n_clients, n_aux):
n_peers = 4
modes = (
[AveragingMode.CLIENT] * n_clients
+ [AveragingMode.AUX] * n_aux
+ [AveragingMode.NODE] * (n_peers - n_clients - n_aux)
)
random.shuffle(modes)
tensors1 = [torch.randn(123), torch.zeros(3)]
tensors2 = [torch.rand(123), torch.ones(3)]
tensors3 = [-torch.rand(123), torch.arange(3).to(torch.float32)]
tensors4 = [torch.randn(123) ** 3, torch.arange(3).to(torch.float32) / 2]
peer_tensors = [tensors1, tensors2, tensors3, tensors4]
reference = [
sum(tensors[i] for tensors, mode in zip(peer_tensors, modes) if mode != AveragingMode.AUX)
/ max(1, n_peers - n_aux)
for i in range(len(tensors1))
]
dht_instances = launch_dht_instances(len(peer_tensors))
averagers = [
hivemind.averaging.DecentralizedAverager(
tensors,
dht=dht,
target_group_size=4,
averaging_expiration=15,
prefix="mygroup",
client_mode=mode == AveragingMode.CLIENT,
auxiliary=mode == AveragingMode.AUX,
start=True,
)
for tensors, dht, mode in zip(peer_tensors, dht_instances, modes)
]
futures = []
for averager in averagers:
futures.append(averager.step(wait=False))
for future in futures:
result = future.result()
for averager in averagers:
assert averager.peer_id in result
for averager in averagers:
if averager.mode != AveragingMode.AUX:
with averager.get_tensors() as averaged_tensors:
for ref, our in zip(reference, averaged_tensors):
assert torch.allclose(ref, our, atol=1e-6)
for process in averagers + dht_instances:
process.shutdown()
@pytest.mark.forked
@pytest.mark.parametrize("n_clients", [0, 1, 2])
@pytest.mark.parametrize("n_aux", [0, 1, 2])
def test_allreduce_once(n_clients, n_aux):
_test_allreduce_once(n_clients, n_aux)
@pytest.mark.forked
@pytest.mark.parametrize("n_clients, n_aux", [(0, 4), (1, 3), (0, 3)])
def test_allreduce_once_edge_cases(n_clients, n_aux):
_test_allreduce_once(n_clients, n_aux)
@pytest.mark.forked
def test_allreduce_weighted(n_client_mode_peers: int = 2):
n_peers = 4
client_modes = [True] * n_client_mode_peers + [False] * (n_peers - n_client_mode_peers)
random.shuffle(client_modes)
tensors1 = [torch.randn(123), torch.zeros(3)]
tensors2 = [torch.rand(123), torch.ones(3)]
tensors3 = [-torch.rand(123), torch.arange(3).to(torch.float32)]
tensors4 = [torch.randn(123) ** 3, torch.arange(3).to(torch.float32) / 2]
dht_instances = launch_dht_instances(4)
averagers = [
hivemind.averaging.DecentralizedAverager(
tensors,
dht=dht,
target_group_size=4,
averaging_expiration=15,
prefix="mygroup",
client_mode=client_mode,
start=True,
)
for tensors, dht, client_mode in zip([tensors1, tensors2, tensors3, tensors4], dht_instances, client_modes)
]
weights = list(map(float, np.random.rand(len(averagers)) * 10 + 0.01))
reference = [
(tensors1[i] * weights[0] + tensors2[i] * weights[1] + tensors3[i] * weights[2] + tensors4[i] * weights[3])
/ sum(weights)
for i in range(len(tensors1))
]
futures = []
for averager, weight in zip(averagers, weights):
futures.append(averager.step(weight=weight, wait=False))
for future in futures:
future.result()
for future, averager in zip(futures, averagers):
with averager.get_tensors() as averaged_tensors:
for ref, our in zip(reference, averaged_tensors):
assert torch.allclose(ref, our, atol=1e-6)
for process in averagers + dht_instances:
process.shutdown()
def compute_mean_std(averagers, unbiased=True):
results = []
for averager in averagers:
with averager.get_tensors() as tensors:
results.append([tensor.clone() for tensor in tensors])
results_stacked_per_tensor = list(map(torch.stack, zip(*results)))
means = [stack.mean(dim=0) for stack in results_stacked_per_tensor]
stds = [stack.std(dim=0, unbiased=unbiased) for stack in results_stacked_per_tensor]
return means, stds
@pytest.mark.forked
def test_allreduce_grid():
dht_instances = launch_dht_instances(8)
averagers = [
hivemind.averaging.DecentralizedAverager(
averaged_tensors=[torch.randn(3)],
dht=dht,
target_group_size=2,
prefix="mygroup",
initial_group_bits=bin(i // 2)[2:].rjust(2, "0"),
start=True,
)
for i, dht in enumerate(dht_instances)
]
[means0], [stds0] = compute_mean_std(averagers)
assert not torch.allclose(stds0, torch.zeros_like(stds0))
prev_means, prev_stds = means0, stds0
for i in range(5):
step_futures = [averager.step(wait=False) for averager in averagers]
groups = [future.result() for future in step_futures]
[means], [stds] = compute_mean_std(averagers)
assert torch.allclose(means, prev_means, atol=1e-6, rtol=0)
assert all(len(group) == 2 for group in groups)
if i <= 2:
assert torch.all(torch.le(stds, prev_stds))
else:
assert torch.allclose(stds, torch.zeros_like(stds), atol=1e-6, rtol=0)
for process in averagers + dht_instances:
process.shutdown()
@pytest.mark.forked
def test_allgather(n_averagers=8, target_group_size=4):
dht_instances = launch_dht_instances(n_averagers)
averagers = [
hivemind.averaging.DecentralizedAverager(
[torch.ones(1)],
dht=dht,
target_group_size=target_group_size,
averaging_expiration=15,
prefix="mygroup",
initial_group_bits="000",
start=True,
)
for dht in dht_instances
]
futures = []
for i, averager in enumerate(averagers):
futures.append(averager.step(wait=False, gather=dict(batch_size=123 + i, foo="bar")))
reference_metadata = {
averager.peer_id: dict(batch_size=123 + i, foo="bar") for i, averager in enumerate(averagers)
}
for future in futures:
gathered = future.result()
assert len(gathered) == target_group_size
for peer_id in gathered:
assert gathered[peer_id] == reference_metadata[peer_id]
for process in averagers + dht_instances:
process.shutdown()
def get_cost(vector_size, partitions, bandwidths):
return max(
(vector_size - partitions[i] + (len(partitions) - 1) * partitions[i]) / max(bandwidths[i], 1e-9)
for i in range(len(partitions))
)
def check_optimality(vector_size, bandwidths, ref_partitions):
partitions = list(load_balance_peers(vector_size, bandwidths))
assert get_cost(vector_size, partitions, bandwidths) <= get_cost(vector_size, ref_partitions, bandwidths)
@pytest.mark.forked
def test_load_balancing():
check_optimality(60, np.array([0.25, 0.25, 0.25, 0.25]), [15, 15, 15, 15])
check_optimality(1024, np.array([0.3, 0.5, 0.9]), [0, 255, 769])
check_optimality(60, np.array([0.44, 0.33, 0.22]), [42, 18, 0])
check_optimality(60, np.array([0.55, 0.44, 0.40]), [35, 16, 9])
check_optimality(1024 * 1024, np.array([0.3, 0.5, 0.9, 0.6]), [0, 169327, 602629, 276620])
check_optimality(1024 * 1024, np.array([0.0, 0.5, 0.0, 0.6]), [0, 428963, 0, 619613])
assert load_balance_peers(60, np.array([0.55, 0.44, 0.40]), min_size=10) == (41, 19, 0)
assert load_balance_peers(60, np.array([0.32, 0.55, 0.44]), min_size=10) == (0, 40, 20)
assert load_balance_peers(2, np.array([0.55, 0.20, 0.44]), min_size=10) == (1, 0, 1)
assert load_balance_peers(1, np.array([0.55, 0.20, 0.44]), min_size=10) == (1, 0, 0)
assert load_balance_peers(100, (None, None)) == (50, 50)
assert load_balance_peers(100, (None, None, None, None, None)) == (20, 20, 20, 20, 20)
assert load_balance_peers(100, (0, 0, 0, None, None)) == (0, 0, 0, 50, 50)
with pytest.raises(AssertionError):
load_balance_peers(100, (0, 0, 0))
for i in range(10):
vector_size = np.random.randint(1, 1024 ** 3)
num_peers = np.random.randint(1, 256)
scale = 1e-9 + np.random.rand() * 1e5
bandwidths = np.random.rand(num_peers) * scale + 1e-6
min_size = np.random.choice([0, np.random.randint(0, vector_size // 10)])
assignment = load_balance_peers(vector_size, bandwidths, min_size)
assert np.sum(assignment) == vector_size
assert np.min(assignment) >= 0
@pytest.mark.forked
def test_too_few_peers():
dht_instances = launch_dht_instances(4)
averagers = [
hivemind.averaging.DecentralizedAverager(
averaged_tensors=[torch.randn(3)],
dht=dht,
target_group_size=2,
averaging_expiration=1,
request_timeout=0.5,
prefix="mygroup",
initial_group_bits=bin(i)[2:].rjust(3, "0"),
start=True,
)
for i, dht in enumerate(dht_instances)
]
step_futures = [averager.step(wait=False, timeout=2) for averager in averagers]
for future in step_futures:
with pytest.raises(AllreduceException):
future.result()
for process in averagers + dht_instances:
process.shutdown()
@pytest.mark.skip(
reason="The current implementation of elasticity (multi-stage averaging when num_peers > ~3 * target_group_size) "
"is incorrect (TODO @justheuristic)"
)
@pytest.mark.forked
def test_overcrowded(num_peers=16):
dht_instances = launch_dht_instances(num_peers)
averagers = [
hivemind.averaging.DecentralizedAverager(
averaged_tensors=[torch.randn(3)],
dht=dht,
target_group_size=2,
averaging_expiration=1,
request_timeout=0.5,
prefix="mygroup",
initial_group_bits="",
start=True,
)
for dht in dht_instances
]
for _ in range(5):
step_futures = [averager.step(wait=False, timeout=5) for averager in averagers]
assert sum(len(future.result() or []) == 2 for future in step_futures) >= len(averagers) - 1
for process in averagers + dht_instances:
process.shutdown()
@pytest.mark.forked
def test_load_state_from_peers():
num_calls = 0
super_metadata = dict(x=123)
super_tensors = (torch.randn(3), torch.randint(0, 5, (3,)))
class TestAverager(hivemind.averaging.DecentralizedAverager):
def get_current_state(self):
"""
Get current state and send it to a peer. executed in the host process. Meant to be overriden.
:returns: a tuple of (serializable_small_metadata, sequence of torch tensors)
"""
nonlocal num_calls, super_metadata, super_tensors
num_calls += 1
return super_metadata, super_tensors
dht_instances = launch_dht_instances(2)
averager1 = TestAverager(
[torch.randn(3), torch.rand(5)],
dht=dht_instances[0],
start=True,
prefix="demo-run",
target_group_size=2,
)
dht_instances[1].get("demo-run.all_averagers")
averager2 = TestAverager(
[torch.randn(3), torch.rand(5)],
dht=dht_instances[1],
start=True,
prefix="demo-run",
target_group_size=2,
)
assert num_calls == 0
got_metadata, got_tensors = averager2.load_state_from_peers()
assert num_calls == 1
assert got_metadata == super_metadata
assert all(map(torch.allclose, got_tensors, super_tensors))
super_metadata["y"] = 123
super_tensors[1][2] = 9
assert num_calls == 1
assert got_metadata != super_metadata
assert not all(map(torch.allclose, got_tensors, super_tensors))
got_metadata, got_tensors = averager2.load_state_from_peers()
assert num_calls == 2
assert got_metadata == super_metadata
assert all(map(torch.allclose, got_tensors, super_tensors))
averager1.allow_state_sharing = False
assert averager2.load_state_from_peers() is None
averager1.allow_state_sharing = True
got_metadata, got_tensors = averager2.load_state_from_peers()
assert num_calls == 3
assert got_metadata == super_metadata
for instance in [averager1, averager2] + dht_instances:
instance.shutdown()
@pytest.mark.forked
def test_getset_bits():
dht = hivemind.DHT(start=True)
averager = hivemind.averaging.DecentralizedAverager(
[torch.randn(3)],
dht=dht,
start=True,
prefix="test_prefix",
target_group_size=2,
)
averager.set_group_bits("00101011101010")
assert averager.get_group_bits() == "00101011101010"
@pytest.mark.forked
def test_training_averager(n_steps: int = 10, n_dims: int = 16):
torch.manual_seed(42)
dht_instances = launch_dht_instances(2)
common_kwargs = {
"start": True,
"prefix": "demo-run",
"target_group_size": 2,
}
x1 = torch.randn(n_dims, requires_grad=True)
opt1 = torch.optim.Adam([x1], lr=0.05)
averager1 = hivemind.averaging.TrainingAverager(
opt1,
average_gradients=True,
average_parameters=True,
average_opt_statistics=["exp_avg_sq"],
dht=dht_instances[0],
**common_kwargs
)
x2 = torch.randn(n_dims, requires_grad=True)
opt2 = torch.optim.Adam([x2], lr=0.05)
averager2 = hivemind.averaging.TrainingAverager(
opt2,
average_gradients=True,
average_parameters=True,
average_opt_statistics=["exp_avg_sq"],
dht=dht_instances[1],
**common_kwargs
)
a = torch.ones(n_dims)
for i in range(n_steps):
opt1.zero_grad()
opt2.zero_grad()
(x1 - a).pow(2).sum().backward()
(x2 - a).pow(2).sum().backward()
opt1.step()
opt2.step()
with torch.no_grad():
x_avg = 0.5 * (x1 + x2)
grad_avg = 0.5 * (x1.grad + x2.grad)
stats_avg = 0.5 * (opt1.state[x1]["exp_avg_sq"] + opt2.state[x2]["exp_avg_sq"])
# we set wait=False in order to prevent deadlock, when averager1 locks and waits for averager2
f1 = averager1.step(wait=False)
f2 = averager2.step(wait=False)
f1.result()
f2.result()
assert torch.allclose(x1, x_avg)
assert torch.allclose(x2, x_avg)
assert torch.allclose(x1.grad, grad_avg)
assert torch.allclose(x2.grad, grad_avg)
assert torch.allclose(opt1.state[x1]["exp_avg_sq"], stats_avg)
assert torch.allclose(opt2.state[x2]["exp_avg_sq"], stats_avg)
for instance in [averager1, averager2] + dht_instances:
instance.shutdown()
|
"""empty message
Revision ID: 5dea293ee313
Revises: 84f11a2b5659
Create Date: 2021-07-29 14:45:35.873685
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import column, table
from sqlalchemy.sql.sqltypes import Boolean, String
# revision identifiers, used by Alembic.
revision = '5dea293ee313'
down_revision = '84f11a2b5659'
branch_labels = None
depends_on = None
def upgrade():
delivery_mode_table = table('DeliveryModes',
column('name',String),
column('description',String),
column('isactive',Boolean),
)
op.execute('Truncate table public."DeliveryModes" RESTART IDENTITY CASCADE;commit;')
op.bulk_insert(
delivery_mode_table,
[
{'name':'Secure File Transfer','description':'Secure File Transfer','isactive':True},
{'name':'In Person Pick up','description':'In Person Pick up','isactive':True}
]
)
def downgrade():
op.execute('Truncate table public."DeliveryModes" RESTART IDENTITY CASCADE;commit;')
|
############# Credits and version info #############
# Definition generated from Assembly XML tag def
# Date generated: 2018/12/03 04:56
#
# revision: 1 author: -DeToX-
# Created layout of plugin
# revision: 2 author: DeadCanadian
# naming tags
# revision: 3 author: Moses_of_Egypt
# Cleaned up and converted to SuPyr definition
#
####################################################
from ..common_descs import *
from .objs.tag import *
from supyr_struct.defs.tag_def import TagDef
bsdt_unknown0 = Struct("unknown0",
BytesRaw("unknown_0", SIZE=8, VISIBLE=False),
h3_rawdata_ref("unknown_1", VISIBLE=False),
BytesRaw("unknown_2", SIZE=8, VISIBLE=False),
VISIBLE=False,
ENDIAN=">", SIZE=36
)
bsdt_unknown1 = Struct("unknown1",
BytesRaw("unknown_0", SIZE=8, VISIBLE=False),
h3_rawdata_ref("unknown_1", VISIBLE=False),
BytesRaw("unknown_2", SIZE=8, VISIBLE=False),
VISIBLE=False,
ENDIAN=">", SIZE=36
)
bsdt_body = Struct("tagdata",
Float("maximum_vitality"),
h3_dependency("effect"),
h3_dependency("sound"),
BytesRaw("unknown_0", SIZE=16, VISIBLE=False),
h3_dependency("crack_bitmap"),
h3_dependency("hole_bitmap"),
BytesRaw("unknown_1", SIZE=36, VISIBLE=False),
h3_reflexive("unknown0", bsdt_unknown0),
BytesRaw("unknown_2", SIZE=12, VISIBLE=False),
h3_reflexive("unknown1", bsdt_unknown1),
BytesRaw("unknown_3", SIZE=4, VISIBLE=False),
ENDIAN=">", SIZE=160
)
def get():
return bsdt_def
bsdt_def = TagDef("bsdt",
h3_blam_header('bsdt'),
bsdt_body,
ext=".%s" % h3_tag_class_fcc_to_ext["bsdt"], endian=">", tag_cls=H3Tag
) |
# coding: utf-8
import socketserver
import email
from io import StringIO
import os
# Copyright 2013 Abram Hindle, Eddie Antonio Santos
#
# Changes made by Joshua Smith
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Furthermore it is derived from the Python documentation examples thus
# some of the code is Copyright © 2001-2013 Python Software
# Foundation; All Rights Reserved
#
# http://docs.python.org/2/library/socketserver.html
#
# run: python freetests.py
# try: curl -v -X GET http://127.0.0.1:8080/
class MyWebServer(socketserver.BaseRequestHandler):
def handle(self):
self.data = self.request.recv(1024).strip().decode("utf-8")
requestLine, headers = self.data.split("\r\n", 1)
message = email.message_from_file(StringIO(headers))
headers = dict(message.items())
method, path, typ = requestLine.split(' ', 2)
if method != 'GET':
r = "HTTP/1.1 405 OK\nContent-Type: text/plain\nContent-Length: 0\r\n"
else:
r = self.getFile(path)
self.request.sendall(bytearray(r,'utf-8'))
def getFile(self, requestPath):
pathParts = requestPath.split("/")
if len(pathParts) < 2:
return self.ret404()
i = 1
path = os.getcwd()+ "/www/"
while i < len(pathParts)-1:
if pathParts[i] == "..":
return self.ret404()
#go to next directory
if os.path.exists(path+pathParts[i]):
path = path + pathParts[i] +"/"
i+=1
#if doesn't exist return 404
else:
return self.ret404()
if pathParts[len(pathParts)-1] == '':
path = path + "index.html"
if os.path.isfile(path):
return self.getFileResponseHTML(path)
else:
return self.ret404()
path = path + pathParts[len(pathParts)-1]
if os.path.isfile(path):
_ ,mimeType = pathParts[len(pathParts)-1].split(".")
try:
if mimeType == "html":
return self.getFileResponseHTML(path)
elif mimeType == "css":
return self.getFileResponseCSS(path)
else:
return self.getFileResponseOther(path)
except:
pass
if os.path.exists(path):
path = path + "/index.html"
if os.path.isfile(path):
return self.getFileResponseHTML301(path)
else:
return self.ret404()
return self.ret404()
def getFileResponseHTML(self, path):
content = open(path, "r").read()
x = "HTTP/1.1 200 OK\nContent-Type: text/html; charset=iso-8859-1\nConnection: close\nContent-Length: 1000\r\n" + content
direct, _ = path.rsplit("/", 1)
for i in os.listdir(direct):
try:
_ ,mimeType = i.split(".")
if mimeType == "css":
x = x.replace(
'<link rel="stylesheet" type="text/css" href="' + i + '">',
"<style>" +(open((direct+"/"+i), "r").read()) + "</style>"
)
except:
pass
return x
def getFileResponseHTML301(self, path):
content = open(path, "r").read()
return "HTTP/1.1 301 Moved Permanently\nContent-Type: text/html\nContent-Length: 1000\r\n" + content
def getFileResponseCSS(self, path):
content = open(path, "r").read()
return "HTTP/1.1 200 OK\nContent-Type: text/css\nContent-Length: 1000\r\n" + content
def getFileResponseOther(self, path):
content = open(path, "r").read()
return "HTTP/1.1 200 OK\nContent-Type: text/plain\nContent-Length: 0\r\n" + content
def ret404(self):
return "HTTP/1.1 404 Not Found\nContent-Type: text/plain\nContent-Length: 0\r\n"
if __name__ == "__main__":
HOST, PORT = "localhost", 8080
socketserver.TCPServer.allow_reuse_address = True
# Create the server, binding to localhost on port 8080
server = socketserver.TCPServer((HOST, PORT), MyWebServer)
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever()
|
# -*- coding: utf-8 -*-
"""
Модуль графической диаграммы для сомпонентов Table, QTable
"""
# pylint: disable=line-too-long
import matplotlib.pyplot as plt
from pyss.pyss_const import *
from pyss import statisticalseries
from pyss.pyssownerobject import PyssOwnerObject
# Вывод графической диаграммы для сомпонента Table
class PlotTable(PyssOwnerObject):
"""Формирование графической диаграммы по данным из таблиц Table, QTable
Args:
ownerModel=None - объект модели-владельца
table=None - таблица
title - заголовок диаграммы
Пример см. test_enter_leave.py, test_preempt_return.py, test_queue.py, test_seize.py
"""
def __init__(self, ownerModel=None, table=None, title=None):
super(PlotTable, self).__init__(SEGMENT, label=None, owner=ownerModel)
# pylint: disable=too-many-arguments
self.tables = []
if table:
self.tables.append(tuple([table, title]))
ownerModel.addPlot(self)
def append(self, table, title=None):
num = len(self.tables) + 1
if title:
self.tables.append(tuple([table, "Table %d. " % num + title]))
elif TITLE in table:
self.tables.append(tuple([table, "Table %d. " % num + table[TITLE]]))
else:
self.tables.append(tuple([table, "Table %d" % num]))
def extend(self, tables):
for t in tables:
self.append(t, None)
def plotOn(self, subplot, table):
ss = statisticalseries.StatisticalSeries()
x = []
y = []
for z in table[LIST]:
x.append(z)
zz = table[INTERVALS][z]
y.append(zz)
ss.append(zz, 1)
# subplot.plot((x[0], x[-1]), (ss.mean(), ss.mean()), 'k--')
m = ss.mean()
subplot.axhline(y=ss.mean(), dashes=[3, 1], color='#880000')
subplot.annotate("Mean: %.3f" % (m),
xy=(x[0], m),
xycoords='data',
xytext=(0, 2),
textcoords='offset points',)
for xx, yy in zip(x, y):
subplot.axhline(y=yy, dashes=[1, 1], color='#dddddd')
if yy > 0.001:
subplot.annotate("%.3f\n%.3f" % (xx, yy),
xy=(xx, yy),
xycoords='data',
xytext=(-2, 2),
textcoords='offset points',)
for xx, yy in zip(x, y):
subplot.bar(xx, yy,
width=0.8 * table[WIDTHINT],
align='center', color='#005500',
zorder=30)
# subplot.bar(x, y,
# width=0.8*table[WIDTHINT],
# align='center', color='#22bb22')
def plot(self):
f = 1
fig = plt.figure()
l = len(self.tables)
for (t, title) in self.tables:
subplot = fig.add_subplot(l, 1, f)
if title:
subplot.title.set_text(title)
self.plotOn(subplot, t)
f += 1
plt.show()
def plotOnFigure(self, figure, _ignore=None):
f = 1
l = len(self.tables)
for (t, title) in self.tables:
subplot = figure.add_subplot(l, 1, f)
if title:
subplot.title.set_text(title)
self.plotOn(subplot, t)
f += 1
if __name__ == '__main__':
pass
|
import io
import os
from django.core.files.uploadedfile import SimpleUploadedFile
from PIL import Image
import pytest
from app.models import Photo
def generate_image(filename):
file = io.BytesIO()
image = Image.new('RGBA', size=(10, 10), color=(0, 0, 0))
image.save(file, 'png')
file.name = filename
file.seek(0)
return file
@pytest.fixture
def sample_photos():
photos = []
for filename in ['a.png', 'b.png', 'c.png']:
with generate_image(filename) as fp:
photo = Photo.objects.create(
title=f'Sample photo {filename}',
description='Sample description',
image=SimpleUploadedFile(name=filename, content=fp.read())
)
photos.append(photo)
yield photos
for photo in photos:
photo.delete() |
from django.test import TestCase
from garnett.context import set_field_language
from library_app.models import DefaultBook
class DefaultTestCase(TestCase):
"""Test setting of default on translated field"""
def test_default(self):
"""Test that default is returned by getter"""
book = DefaultBook.objects.create(number_of_pages=100)
self.assertEqual(book.title, "DEFAULT TITLE")
def test_language_default(self):
"""Test that default creates dict using current language"""
with set_field_language("fr"):
book = DefaultBook.objects.create(number_of_pages=100)
self.assertEqual(book.title, "DEFAULT TITLE")
self.assertEqual(book.title_tsall, {"fr": "DEFAULT TITLE"})
def test_default_function(self):
"""Test that default is returned by getter when inner default is function"""
book = DefaultBook.objects.create(number_of_pages=100)
self.assertEqual(book.author, "John Jimson")
def test_language_default_function(self):
"""Test that dict is correct when inner default is function"""
with set_field_language("fr"):
book = DefaultBook.objects.create(number_of_pages=100)
self.assertEqual(book.author, "John Jimson")
self.assertEqual(book.author_tsall, {"fr": "John Jimson"})
def test_default_deconstruct(self):
"""Make sure default callable is serialized properly"""
title = DefaultBook._meta.get_field("title")
kwargs = title.deconstruct()[3]
self.assertIn("default", kwargs)
self.assertTrue(callable(kwargs["default"]))
def test_default_empty_string(self):
"""Test default works when empty string"""
book = DefaultBook(number_of_pages=100)
self.assertEqual(book.description, "")
|
import os
import glob
from gooey import Gooey, GooeyParser
from auto_crop.image import Image
def _get_args():
parser = GooeyParser(
prog="autocrop", description="tool to automatically crop images based on shapes"
)
parser.add_argument(
"folder", help="the place with all the images", widget="DirChooser"
)
parser.add_argument(
"--glob", help="The glob used to find the images", default="IMG_*.JPG"
)
parser.add_argument(
"--thres",
help="The threshold for separating forground from background",
type=int,
default=150,
)
parser.add_argument(
"--quality", help="The JPEG quality", type=int, default=60,
)
return parser.parse_args()
@Gooey(target="autocrop", program_name="autocrop", default_size=(610, 570))
def main():
""" Entry-point for the autocrop command
"""
args = _get_args()
for filename in glob.glob(os.path.join(args.folder, args.glob)):
image = Image(filename)
image.find_contours(args.thres)
image.crop_by_contour(inplace=True)
base, ext = os.path.splitext(filename)
image.save(f"{base}_mod{ext}", jpg_quality=args.quality or None)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
# Copyright 2018 The Blueoil Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import functools
from glob import glob
import imghdr
import os
import os.path
import numpy as np
import PIL.Image
from lmnet.datasets.base import Base, StoragePathCustomizable
from lmnet import data_processor
from lmnet.utils.random import shuffle, train_test_split
class ImageFolderBase(StoragePathCustomizable, Base):
"""Abstract class of dataset for loading image files stored in a folder.
structure like
$DATA_DIR/extend_dir/cat/0001.jpg
$DATA_DIR/extend_dir/cat/xxxa.jpeg
$DATA_DIR/extend_dir/cat/yyyb.png
$DATA_DIR/extend_dir/dog/123.jpg
$DATA_DIR/extend_dir/dog/023.jpg
$DATA_DIR/extend_dir/dog/wwww.jpg
When child class has `validation_extend_dir`, the `validation` subset consists from the folders.
$DATA_DIR/validation_extend_dir/cat/0001.jpg
$DATA_DIR/validation_extend_dir/cat/xxxa.png
"""
def __init__(
self,
is_shuffle=True,
*args,
**kwargs
):
super().__init__(*args, **kwargs)
self.is_shuffle = is_shuffle
self.element_counter = 0
@property
@functools.lru_cache(maxsize=None)
def classes(self):
"""Returns the classes list in the data set."""
classes = os.listdir(self.data_dir)
classes = [class_name for class_name in classes if class_name != ".DS_Store"]
classes.sort(key=lambda item: item.lower())
return classes
@property
def num_classes(self):
return len(self.classes)
@property
def num_per_epoch(self):
return len(self.data_files)
def _all_files(self):
all_image_files = []
for image_class in self.classes:
image_dir = os.path.join(self.data_dir, image_class)
for image_path in glob(os.path.join(image_dir, "*")):
if os.path.isfile(image_path) and imghdr.what(image_path) in ["jpeg", "png"]:
all_image_files.append(image_path)
return all_image_files
@property
@functools.lru_cache(maxsize=None)
def data_files(self):
all_image_files = self._all_files()
if self.validation_size > 0:
train_image_files, test_image_files = train_test_split(
all_image_files, test_size=self.validation_size, seed=1)
if self.subset == "train":
files = train_image_files
else:
files = test_image_files
return files
return all_image_files
def get_label(self, filename):
"""Returns label."""
class_name = os.path.basename(os.path.dirname(filename))
label = self.classes.index(class_name)
return label
def get_image(self, filename):
"""Returns numpy array of an image"""
image = PIL.Image.open(filename)
# sometime image data is gray.
image = image.convert("RGB")
image = np.array(image)
return image
@property
def feed_indices(self):
if not hasattr(self, "_feed_indices"):
if self.subset == "train" and self.is_shuffle:
self._feed_indices = shuffle(range(self.num_per_epoch), seed=self.seed)
else:
self._feed_indices = list(range(self.num_per_epoch))
return self._feed_indices
def _get_index(self, counter):
return self.feed_indices[counter]
def _shuffle(self):
if self.subset == "train" and self.is_shuffle:
self._feed_indices = shuffle(range(self.num_per_epoch), seed=self.seed)
print("Shuffle {} train dataset with random state {}.".format(self.__class__.__name__, self.seed))
self.seed = self.seed + 1
def _element(self):
"""Return an image and label."""
index = self._get_index(self.element_counter)
self.element_counter += 1
if self.element_counter == self.num_per_epoch:
self.element_counter = 0
self._shuffle()
target_file = self.data_files[index]
image = self.get_image(target_file)
label = self.get_label(target_file)
samples = {'image': image}
if callable(self.augmentor) and self.subset == "train":
samples = self.augmentor(**samples)
if callable(self.pre_processor):
samples = self.pre_processor(**samples)
image = samples['image']
return image, label
def feed(self):
"""Returns batch size numpy array of images and binarized labels."""
images, labels = zip(*[self._element() for _ in range(self.batch_size)])
labels = data_processor.binarize(labels, self.num_classes)
images = np.array(images)
if self.data_format == 'NCHW':
images = np.transpose(images, [0, 3, 1, 2])
return images, labels
|
# std
from typing import Optional
# external
from alembic import util
from alembic.config import Config
from alembic.script import ScriptDirectory
import pkg_resources
from .cli_utils import load_config
GLOBAL_VERSION_PATH = pkg_resources.resource_filename("molar", "migrations/versions")
def get_alembic_config(ctx, database: Optional[str] = None):
load_config(ctx, database=database)
data_dir = ctx.obj["data_dir"]
sql_url = ctx.obj["sql_url"]
alembic_config = Config()
version_locations = GLOBAL_VERSION_PATH
if data_dir:
version_locations = (
version_locations + " " + str(data_dir.resolve() / "migrations")
)
alembic_config.set_main_option("version_locations", version_locations)
alembic_config.set_main_option("script_location", "molar:migrations")
alembic_config.set_main_option("sqlalchemy.url", sql_url)
return alembic_config
def merge(
config, revisions, message=None, branch_labels=None, version_path=None, rev_id=None
):
"""
Merge allowing to specify a version_path
"""
script = ScriptDirectory.from_config(config)
return script.generate_revision(
rev_id or util.rev_id(),
message,
refresh=True,
head=revisions,
branch_labels=branch_labels,
version_path=version_path,
config=config,
)
|
from .parser import *
from .line import *
from .obfuscator import *
from .simulator import *
from .generator import * |
from catalyst.__version__ import __version__ # noqa: F401
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from . import datasets, models, util
__all__ = ["models", "util", "datasets"]
|
# create_schema.py
import sqlite3
# conectando...
conn = sqlite3.connect('base.db')
# definindo um cursor
cursor = conn.cursor()
# criando a tabela (schema)
cursor.execute("""
CREATE TABLE consultas (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
texto VARCHAR(200) NOT NULL,
tipo VARCHAR(50) NOT NULL,
criado_em DATE NOT NULL
);
""")
print('Tabela criada com sucesso.')
# desconectando...
conn.close() |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from eventlet import tpool
from nova import context
from nova import db
from nova import flags
from nova import log as logging
from nova import utils
from nova.virt.libvirt import netutils
LOG = logging.getLogger("nova.virt.libvirt.firewall")
FLAGS = flags.FLAGS
try:
import libvirt
except ImportError:
LOG.warn(_("Libvirt module could not be loaded. NWFilterFirewall will "
"not work correctly."))
class FirewallDriver(object):
def prepare_instance_filter(self, instance, network_info):
"""Prepare filters for the instance.
At this point, the instance isn't running yet."""
raise NotImplementedError()
def unfilter_instance(self, instance, network_info):
"""Stop filtering instance"""
raise NotImplementedError()
def apply_instance_filter(self, instance, network_info):
"""Apply instance filter.
Once this method returns, the instance should be firewalled
appropriately. This method should as far as possible be a
no-op. It's vastly preferred to get everything set up in
prepare_instance_filter.
"""
raise NotImplementedError()
def refresh_security_group_rules(self, security_group_id):
"""Refresh security group rules from data store
Gets called when a rule has been added to or removed from
the security group."""
raise NotImplementedError()
def refresh_security_group_members(self, security_group_id):
"""Refresh security group members from data store
Gets called when an instance gets added to or removed from
the security group."""
raise NotImplementedError()
def refresh_provider_fw_rules(self):
"""Refresh common rules for all hosts/instances from data store.
Gets called when a rule has been added to or removed from
the list of rules (via admin api).
"""
raise NotImplementedError()
def setup_basic_filtering(self, instance, network_info):
"""Create rules to block spoofing and allow dhcp.
This gets called when spawning an instance, before
:method:`prepare_instance_filter`.
"""
raise NotImplementedError()
def instance_filter_exists(self, instance, network_info):
"""Check nova-instance-instance-xxx exists"""
raise NotImplementedError()
class NWFilterFirewall(FirewallDriver):
"""
This class implements a network filtering mechanism versatile
enough for EC2 style Security Group filtering by leveraging
libvirt's nwfilter.
First, all instances get a filter ("nova-base-filter") applied.
This filter provides some basic security such as protection against
MAC spoofing, IP spoofing, and ARP spoofing.
This filter drops all incoming ipv4 and ipv6 connections.
Outgoing connections are never blocked.
Second, every security group maps to a nwfilter filter(*).
NWFilters can be updated at runtime and changes are applied
immediately, so changes to security groups can be applied at
runtime (as mandated by the spec).
Security group rules are named "nova-secgroup-<id>" where <id>
is the internal id of the security group. They're applied only on
hosts that have instances in the security group in question.
Updates to security groups are done by updating the data model
(in response to API calls) followed by a request sent to all
the nodes with instances in the security group to refresh the
security group.
Each instance has its own NWFilter, which references the above
mentioned security group NWFilters. This was done because
interfaces can only reference one filter while filters can
reference multiple other filters. This has the added benefit of
actually being able to add and remove security groups from an
instance at run time. This functionality is not exposed anywhere,
though.
Outstanding questions:
The name is unique, so would there be any good reason to sync
the uuid across the nodes (by assigning it from the datamodel)?
(*) This sentence brought to you by the redundancy department of
redundancy.
"""
def __init__(self, get_connection, **kwargs):
self._libvirt_get_connection = get_connection
self.static_filters_configured = False
self.handle_security_groups = False
def apply_instance_filter(self, instance, network_info):
"""No-op. Everything is done in prepare_instance_filter"""
pass
def _get_connection(self):
return self._libvirt_get_connection()
_conn = property(_get_connection)
def nova_dhcp_filter(self):
"""The standard allow-dhcp-server filter is an <ip> one, so it uses
ebtables to allow traffic through. Without a corresponding rule in
iptables, it'll get blocked anyway."""
return '''<filter name='nova-allow-dhcp-server' chain='ipv4'>
<uuid>891e4787-e5c0-d59b-cbd6-41bc3c6b36fc</uuid>
<rule action='accept' direction='out'
priority='100'>
<udp srcipaddr='0.0.0.0'
dstipaddr='255.255.255.255'
srcportstart='68'
dstportstart='67'/>
</rule>
<rule action='accept' direction='in'
priority='100'>
<udp srcipaddr='$DHCPSERVER'
srcportstart='67'
dstportstart='68'/>
</rule>
</filter>'''
def nova_ra_filter(self):
return '''<filter name='nova-allow-ra-server' chain='root'>
<uuid>d707fa71-4fb5-4b27-9ab7-ba5ca19c8804</uuid>
<rule action='accept' direction='inout'
priority='100'>
<icmpv6 srcipaddr='$RASERVER'/>
</rule>
</filter>'''
def setup_basic_filtering(self, instance, network_info):
"""Set up basic filtering (MAC, IP, and ARP spoofing protection)"""
logging.info('called setup_basic_filtering in nwfilter')
if self.handle_security_groups:
# No point in setting up a filter set that we'll be overriding
# anyway.
return
logging.info('ensuring static filters')
self._ensure_static_filters()
if instance['image_ref'] == str(FLAGS.vpn_image_id):
base_filter = 'nova-vpn'
else:
base_filter = 'nova-base'
for (network, mapping) in network_info:
nic_id = mapping['mac'].replace(':', '')
instance_filter_name = self._instance_filter_name(instance, nic_id)
self._define_filter(self._filter_container(instance_filter_name,
[base_filter]))
def _ensure_static_filters(self):
"""Static filters are filters that have no need to be IP aware.
There is no configuration or tuneability of these filters, so they
can be set up once and forgotten about.
"""
if self.static_filters_configured:
return
self._define_filter(self._filter_container('nova-base',
['no-mac-spoofing',
'no-ip-spoofing',
'no-arp-spoofing',
'allow-dhcp-server']))
self._define_filter(self._filter_container('nova-vpn',
['allow-dhcp-server']))
self._define_filter(self.nova_base_ipv4_filter)
self._define_filter(self.nova_base_ipv6_filter)
self._define_filter(self.nova_dhcp_filter)
self._define_filter(self.nova_ra_filter)
if FLAGS.allow_same_net_traffic:
self._define_filter(self.nova_project_filter)
if FLAGS.use_ipv6:
self._define_filter(self.nova_project_filter_v6)
self.static_filters_configured = True
def _filter_container(self, name, filters):
xml = '''<filter name='%s' chain='root'>%s</filter>''' % (
name,
''.join(["<filterref filter='%s'/>" % (f,) for f in filters]))
return xml
def nova_base_ipv4_filter(self):
retval = "<filter name='nova-base-ipv4' chain='ipv4'>"
for protocol in ['tcp', 'udp', 'icmp']:
for direction, action, priority in [('out', 'accept', 399),
('in', 'drop', 400)]:
retval += """<rule action='%s' direction='%s' priority='%d'>
<%s />
</rule>""" % (action, direction,
priority, protocol)
retval += '</filter>'
return retval
def nova_base_ipv6_filter(self):
retval = "<filter name='nova-base-ipv6' chain='ipv6'>"
for protocol in ['tcp-ipv6', 'udp-ipv6', 'icmpv6']:
for direction, action, priority in [('out', 'accept', 399),
('in', 'drop', 400)]:
retval += """<rule action='%s' direction='%s' priority='%d'>
<%s />
</rule>""" % (action, direction,
priority, protocol)
retval += '</filter>'
return retval
def nova_project_filter(self):
retval = "<filter name='nova-project' chain='ipv4'>"
for protocol in ['tcp', 'udp', 'icmp']:
retval += """<rule action='accept' direction='in' priority='200'>
<%s srcipaddr='$PROJNET' srcipmask='$PROJMASK' />
</rule>""" % protocol
retval += '</filter>'
return retval
def nova_project_filter_v6(self):
retval = "<filter name='nova-project-v6' chain='ipv6'>"
for protocol in ['tcp-ipv6', 'udp-ipv6', 'icmpv6']:
retval += """<rule action='accept' direction='inout'
priority='200'>
<%s srcipaddr='$PROJNETV6'
srcipmask='$PROJMASKV6' />
</rule>""" % (protocol)
retval += '</filter>'
return retval
def _define_filter(self, xml):
if callable(xml):
xml = xml()
# execute in a native thread and block current greenthread until done
tpool.execute(self._conn.nwfilterDefineXML, xml)
def unfilter_instance(self, instance, network_info):
"""Clear out the nwfilter rules."""
instance_name = instance.name
for (network, mapping) in network_info:
nic_id = mapping['mac'].replace(':', '')
instance_filter_name = self._instance_filter_name(instance, nic_id)
try:
self._conn.nwfilterLookupByName(instance_filter_name).\
undefine()
except libvirt.libvirtError:
LOG.debug(_('The nwfilter(%(instance_filter_name)s) '
'for %(instance_name)s is not found.') % locals())
instance_secgroup_filter_name =\
'%s-secgroup' % (self._instance_filter_name(instance))
try:
self._conn.nwfilterLookupByName(instance_secgroup_filter_name)\
.undefine()
except libvirt.libvirtError:
LOG.debug(_('The nwfilter(%(instance_secgroup_filter_name)s) '
'for %(instance_name)s is not found.') % locals())
def prepare_instance_filter(self, instance, network_info):
"""Creates an NWFilter for the given instance.
In the process, it makes sure the filters for the provider blocks,
security groups, and base filter are all in place.
"""
self.refresh_provider_fw_rules()
ctxt = context.get_admin_context()
instance_secgroup_filter_name = \
'%s-secgroup' % (self._instance_filter_name(instance))
instance_secgroup_filter_children = ['nova-base-ipv4',
'nova-base-ipv6',
'nova-allow-dhcp-server']
if FLAGS.use_ipv6:
networks = [network for (network, info) in network_info if
info['gateway6']]
if networks:
instance_secgroup_filter_children.\
append('nova-allow-ra-server')
for security_group in \
db.security_group_get_by_instance(ctxt, instance['id']):
self.refresh_security_group_rules(security_group['id'])
instance_secgroup_filter_children.append('nova-secgroup-%s' %
security_group['id'])
self._define_filter(
self._filter_container(instance_secgroup_filter_name,
instance_secgroup_filter_children))
network_filters = self.\
_create_network_filters(instance, network_info,
instance_secgroup_filter_name)
for (name, children) in network_filters:
self._define_filters(name, children)
def _create_network_filters(self, instance, network_info,
instance_secgroup_filter_name):
if instance['image_ref'] == str(FLAGS.vpn_image_id):
base_filter = 'nova-vpn'
else:
base_filter = 'nova-base'
result = []
for (_n, mapping) in network_info:
nic_id = mapping['mac'].replace(':', '')
instance_filter_name = self._instance_filter_name(instance, nic_id)
instance_filter_children = [base_filter, 'nova-provider-rules',
instance_secgroup_filter_name]
if FLAGS.allow_same_net_traffic:
instance_filter_children.append('nova-project')
if FLAGS.use_ipv6:
instance_filter_children.append('nova-project-v6')
result.append((instance_filter_name, instance_filter_children))
return result
def _define_filters(self, filter_name, filter_children):
self._define_filter(self._filter_container(filter_name,
filter_children))
def refresh_security_group_rules(self, security_group_id):
return self._define_filter(
self.security_group_to_nwfilter_xml(security_group_id))
def refresh_provider_fw_rules(self):
"""Update rules for all instances.
This is part of the FirewallDriver API and is called when the
provider firewall rules change in the database. In the
`prepare_instance_filter` we add a reference to the
'nova-provider-rules' filter for each instance's firewall, and
by changing that filter we update them all.
"""
xml = self.provider_fw_to_nwfilter_xml()
return self._define_filter(xml)
def security_group_to_nwfilter_xml(self, security_group_id):
security_group = db.security_group_get(context.get_admin_context(),
security_group_id)
rule_xml = ""
v6protocol = {'tcp': 'tcp-ipv6', 'udp': 'udp-ipv6', 'icmp': 'icmpv6'}
for rule in security_group.rules:
rule_xml += "<rule action='accept' direction='in' priority='300'>"
if rule.cidr:
version = netutils.get_ip_version(rule.cidr)
if(FLAGS.use_ipv6 and version == 6):
net, prefixlen = netutils.get_net_and_prefixlen(rule.cidr)
rule_xml += "<%s srcipaddr='%s' srcipmask='%s' " % \
(v6protocol[rule.protocol], net, prefixlen)
else:
net, mask = netutils.get_net_and_mask(rule.cidr)
rule_xml += "<%s srcipaddr='%s' srcipmask='%s' " % \
(rule.protocol, net, mask)
if rule.protocol in ['tcp', 'udp']:
rule_xml += "dstportstart='%s' dstportend='%s' " % \
(rule.from_port, rule.to_port)
elif rule.protocol == 'icmp':
LOG.info('rule.protocol: %r, rule.from_port: %r, '
'rule.to_port: %r', rule.protocol,
rule.from_port, rule.to_port)
if rule.from_port != -1:
rule_xml += "type='%s' " % rule.from_port
if rule.to_port != -1:
rule_xml += "code='%s' " % rule.to_port
rule_xml += '/>\n'
rule_xml += "</rule>\n"
xml = "<filter name='nova-secgroup-%s' " % security_group_id
if(FLAGS.use_ipv6):
xml += "chain='root'>%s</filter>" % rule_xml
else:
xml += "chain='ipv4'>%s</filter>" % rule_xml
return xml
def provider_fw_to_nwfilter_xml(self):
"""Compose a filter of drop rules from specified cidrs."""
rule_xml = ""
v6protocol = {'tcp': 'tcp-ipv6', 'udp': 'udp-ipv6', 'icmp': 'icmpv6'}
rules = db.provider_fw_rule_get_all(context.get_admin_context())
for rule in rules:
rule_xml += "<rule action='block' direction='in' priority='150'>"
version = netutils.get_ip_version(rule.cidr)
if(FLAGS.use_ipv6 and version == 6):
net, prefixlen = netutils.get_net_and_prefixlen(rule.cidr)
rule_xml += "<%s srcipaddr='%s' srcipmask='%s' " % \
(v6protocol[rule.protocol], net, prefixlen)
else:
net, mask = netutils.get_net_and_mask(rule.cidr)
rule_xml += "<%s srcipaddr='%s' srcipmask='%s' " % \
(rule.protocol, net, mask)
if rule.protocol in ['tcp', 'udp']:
rule_xml += "dstportstart='%s' dstportend='%s' " % \
(rule.from_port, rule.to_port)
elif rule.protocol == 'icmp':
LOG.info('rule.protocol: %r, rule.from_port: %r, '
'rule.to_port: %r', rule.protocol,
rule.from_port, rule.to_port)
if rule.from_port != -1:
rule_xml += "type='%s' " % rule.from_port
if rule.to_port != -1:
rule_xml += "code='%s' " % rule.to_port
rule_xml += '/>\n'
rule_xml += "</rule>\n"
xml = "<filter name='nova-provider-rules' "
if(FLAGS.use_ipv6):
xml += "chain='root'>%s</filter>" % rule_xml
else:
xml += "chain='ipv4'>%s</filter>" % rule_xml
return xml
def _instance_filter_name(self, instance, nic_id=None):
if not nic_id:
return 'nova-instance-%s' % (instance['name'])
return 'nova-instance-%s-%s' % (instance['name'], nic_id)
def instance_filter_exists(self, instance, network_info):
"""Check nova-instance-instance-xxx exists"""
for (network, mapping) in network_info:
nic_id = mapping['mac'].replace(':', '')
instance_filter_name = self._instance_filter_name(instance, nic_id)
try:
self._conn.nwfilterLookupByName(instance_filter_name)
except libvirt.libvirtError:
name = instance.name
LOG.debug(_('The nwfilter(%(instance_filter_name)s) for'
'%(name)s is not found.') % locals())
return False
return True
class IptablesFirewallDriver(FirewallDriver):
def __init__(self, execute=None, **kwargs):
from nova.network import linux_net
self.iptables = linux_net.iptables_manager
self.instances = {}
self.network_infos = {}
self.nwfilter = NWFilterFirewall(kwargs['get_connection'])
self.basicly_filtered = False
self.iptables.ipv4['filter'].add_chain('sg-fallback')
self.iptables.ipv4['filter'].add_rule('sg-fallback', '-j DROP')
self.iptables.ipv6['filter'].add_chain('sg-fallback')
self.iptables.ipv6['filter'].add_rule('sg-fallback', '-j DROP')
def setup_basic_filtering(self, instance, network_info):
"""Set up provider rules and basic NWFilter."""
self.nwfilter.setup_basic_filtering(instance, network_info)
if not self.basicly_filtered:
LOG.debug(_('iptables firewall: Setup Basic Filtering'))
self.refresh_provider_fw_rules()
self.basicly_filtered = True
def apply_instance_filter(self, instance, network_info):
"""No-op. Everything is done in prepare_instance_filter"""
pass
def unfilter_instance(self, instance, network_info):
if self.instances.pop(instance['id'], None):
# NOTE(vish): use the passed info instead of the stored info
self.network_infos.pop(instance['id'])
self.remove_filters_for_instance(instance)
self.iptables.apply()
self.nwfilter.unfilter_instance(instance, network_info)
else:
LOG.info(_('Attempted to unfilter instance %s which is not '
'filtered'), instance['id'])
def prepare_instance_filter(self, instance, network_info):
self.instances[instance['id']] = instance
self.network_infos[instance['id']] = network_info
self.add_filters_for_instance(instance)
self.iptables.apply()
def _create_filter(self, ips, chain_name):
return ['-d %s -j $%s' % (ip, chain_name) for ip in ips]
def _filters_for_instance(self, chain_name, network_info):
ips_v4 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ips']]
ipv4_rules = self._create_filter(ips_v4, chain_name)
ipv6_rules = []
if FLAGS.use_ipv6:
ips_v6 = [ip['ip'] for (_n, mapping) in network_info
for ip in mapping['ip6s']]
ipv6_rules = self._create_filter(ips_v6, chain_name)
return ipv4_rules, ipv6_rules
def _add_filters(self, chain_name, ipv4_rules, ipv6_rules):
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule(chain_name, rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule(chain_name, rule)
def add_filters_for_instance(self, instance):
network_info = self.network_infos[instance['id']]
chain_name = self._instance_chain_name(instance)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain(chain_name)
self.iptables.ipv4['filter'].add_chain(chain_name)
ipv4_rules, ipv6_rules = self._filters_for_instance(chain_name,
network_info)
self._add_filters('local', ipv4_rules, ipv6_rules)
ipv4_rules, ipv6_rules = self.instance_rules(instance, network_info)
self._add_filters(chain_name, ipv4_rules, ipv6_rules)
def remove_filters_for_instance(self, instance):
chain_name = self._instance_chain_name(instance)
self.iptables.ipv4['filter'].remove_chain(chain_name)
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].remove_chain(chain_name)
def instance_rules(self, instance, network_info):
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
# Always drop invalid packets
ipv4_rules += ['-m state --state ' 'INVALID -j DROP']
ipv6_rules += ['-m state --state ' 'INVALID -j DROP']
# Allow established connections
ipv4_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
ipv6_rules += ['-m state --state ESTABLISHED,RELATED -j ACCEPT']
# Pass through provider-wide drops
ipv4_rules += ['-j $provider']
ipv6_rules += ['-j $provider']
dhcp_servers = [info['dhcp_server'] for (_n, info) in network_info]
for dhcp_server in dhcp_servers:
ipv4_rules.append('-s %s -p udp --sport 67 --dport 68 '
'-j ACCEPT' % (dhcp_server,))
#Allow project network traffic
if FLAGS.allow_same_net_traffic:
cidrs = [network['cidr'] for (network, _m) in network_info]
for cidr in cidrs:
ipv4_rules.append('-s %s -j ACCEPT' % (cidr,))
# We wrap these in FLAGS.use_ipv6 because they might cause
# a DB lookup. The other ones are just list operations, so
# they're not worth the clutter.
if FLAGS.use_ipv6:
# Allow RA responses
gateways_v6 = [mapping['gateway6'] for (_n, mapping) in
network_info]
for gateway_v6 in gateways_v6:
ipv6_rules.append(
'-s %s/128 -p icmpv6 -j ACCEPT' % (gateway_v6,))
#Allow project network traffic
if FLAGS.allow_same_net_traffic:
cidrv6s = [network['cidr_v6'] for (network, _m) in
network_info]
for cidrv6 in cidrv6s:
ipv6_rules.append('-s %s -j ACCEPT' % (cidrv6,))
security_groups = db.security_group_get_by_instance(ctxt,
instance['id'])
# then, security group chains and rules
for security_group in security_groups:
rules = db.security_group_rule_get_by_security_group(ctxt,
security_group['id'])
for rule in rules:
LOG.debug(_('Adding security group rule: %r'), rule)
if not rule.cidr:
version = 4
else:
version = netutils.get_ip_version(rule.cidr)
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule.protocol
if version == 6 and rule.protocol == 'icmp':
protocol = 'icmpv6'
args = ['-j ACCEPT']
if protocol:
args += ['-p', protocol]
if protocol in ['udp', 'tcp']:
if rule.from_port == rule.to_port:
args += ['--dport', '%s' % (rule.from_port,)]
else:
args += ['-m', 'multiport',
'--dports', '%s:%s' % (rule.from_port,
rule.to_port)]
elif protocol == 'icmp':
icmp_type = rule.from_port
icmp_code = rule.to_port
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
args += ['-m', 'icmp', '--icmp-type',
icmp_type_arg]
elif version == 6:
args += ['-m', 'icmp6', '--icmpv6-type',
icmp_type_arg]
if rule.cidr:
LOG.info('Using cidr %r', rule.cidr)
args += ['-s', rule.cidr]
fw_rules += [' '.join(args)]
else:
if rule['grantee_group']:
for instance in rule['grantee_group']['instances']:
LOG.info('instance: %r', instance)
ips = db.instance_get_fixed_addresses(ctxt,
instance['id'])
LOG.info('ips: %r', ips)
for ip in ips:
subrule = args + ['-s %s' % ip]
fw_rules += [' '.join(subrule)]
LOG.info('Using fw_rules: %r', fw_rules)
ipv4_rules += ['-j $sg-fallback']
ipv6_rules += ['-j $sg-fallback']
return ipv4_rules, ipv6_rules
def instance_filter_exists(self, instance, network_info):
"""Check nova-instance-instance-xxx exists"""
return self.nwfilter.instance_filter_exists(instance, network_info)
def refresh_security_group_members(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
def refresh_security_group_rules(self, security_group):
self.do_refresh_security_group_rules(security_group)
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def do_refresh_security_group_rules(self, security_group):
for instance in self.instances.values():
self.remove_filters_for_instance(instance)
self.add_filters_for_instance(instance)
def refresh_provider_fw_rules(self):
"""See class:FirewallDriver: docs."""
self._do_refresh_provider_fw_rules()
self.iptables.apply()
@utils.synchronized('iptables', external=True)
def _do_refresh_provider_fw_rules(self):
"""Internal, synchronized version of refresh_provider_fw_rules."""
self._purge_provider_fw_rules()
self._build_provider_fw_rules()
def _purge_provider_fw_rules(self):
"""Remove all rules from the provider chains."""
self.iptables.ipv4['filter'].empty_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].empty_chain('provider')
def _build_provider_fw_rules(self):
"""Create all rules for the provider IP DROPs."""
self.iptables.ipv4['filter'].add_chain('provider')
if FLAGS.use_ipv6:
self.iptables.ipv6['filter'].add_chain('provider')
ipv4_rules, ipv6_rules = self._provider_rules()
for rule in ipv4_rules:
self.iptables.ipv4['filter'].add_rule('provider', rule)
if FLAGS.use_ipv6:
for rule in ipv6_rules:
self.iptables.ipv6['filter'].add_rule('provider', rule)
def _provider_rules(self):
"""Generate a list of rules from provider for IP4 & IP6."""
ctxt = context.get_admin_context()
ipv4_rules = []
ipv6_rules = []
rules = db.provider_fw_rule_get_all(ctxt)
for rule in rules:
LOG.debug(_('Adding provider rule: %s'), rule['cidr'])
version = netutils.get_ip_version(rule['cidr'])
if version == 4:
fw_rules = ipv4_rules
else:
fw_rules = ipv6_rules
protocol = rule['protocol']
if version == 6 and protocol == 'icmp':
protocol = 'icmpv6'
args = ['-p', protocol, '-s', rule['cidr']]
if protocol in ['udp', 'tcp']:
if rule['from_port'] == rule['to_port']:
args += ['--dport', '%s' % (rule['from_port'],)]
else:
args += ['-m', 'multiport',
'--dports', '%s:%s' % (rule['from_port'],
rule['to_port'])]
elif protocol == 'icmp':
icmp_type = rule['from_port']
icmp_code = rule['to_port']
if icmp_type == -1:
icmp_type_arg = None
else:
icmp_type_arg = '%s' % icmp_type
if not icmp_code == -1:
icmp_type_arg += '/%s' % icmp_code
if icmp_type_arg:
if version == 4:
args += ['-m', 'icmp', '--icmp-type',
icmp_type_arg]
elif version == 6:
args += ['-m', 'icmp6', '--icmpv6-type',
icmp_type_arg]
args += ['-j DROP']
fw_rules += [' '.join(args)]
return ipv4_rules, ipv6_rules
def _security_group_chain_name(self, security_group_id):
return 'nova-sg-%s' % (security_group_id,)
def _instance_chain_name(self, instance):
return 'inst-%s' % (instance['id'],)
|
"""Using `weakref` to create a cache."""
import gc
from weakref import WeakValueDictionary
import pytest
def test_weakref() -> None:
"""Use a `WeakValueDictionary` to cache large object."""
class BigImage:
"""Dummy class to simulate a large object."""
def __init__(self, value: int) -> None:
self.value = value
def __eq__(self, other: object) -> bool:
if not isinstance(other, BigImage):
return NotImplemented
return self.value == other.value
big_image = BigImage(10) # Create a reference
weak_dict = WeakValueDictionary()
weak_dict["big image"] = big_image
gc.collect()
assert weak_dict["big image"] is big_image
del big_image
gc.collect()
with pytest.raises(KeyError):
assert weak_dict["big image"]
|
class Person(Object):
def __init__(agent, past_traj, intention_getter, pos,):
self.agent = agent
self.neighbor = initial_neighbor_from_set(pos)
self.pos = pos
self.intention = intention_getter(past_traj)
def learn():
# a network predicting traj from current neighbor and pos and intention |
#------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: David C. Morrill Date: 11/30/2004 Description: Plugin definition for
# the Traits 'View Editing Tool' (VET)
# ------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports:
#-------------------------------------------------------------------------------
# Enthought library imports:
from enthought.envisage.core.runtime.extension import Plugin
# Plugin extension-point imports:
from enthought.envisage.core.runtime import Preferences
from enthought.envisage.ui import Action, Group, Menu, UIActions, \
UIViews, View
from enthought.envisage.ui.preference import PreferencePages, Page
#-------------------------------------------------------------------------------
# Extensions:
#-------------------------------------------------------------------------------
#--- Preferences ---------------------------------------------------------------
preferences = Preferences(
defaults = {
'explode_on_exit': True,
}
)
#--- Preference pages ----------------------------------------------------------
vet_preference_page = Page(
id = 'enthought.traits.vet.PreferencePage',
class_name = 'enthought.traits.vet.PreferencePage',
label = 'VET Preferences',
category = '',
)
preference_pages = PreferencePages(
pages = [ vet_preference_page ]
)
#--- Menus/Actions -------------------------------------------------------------
file_menu = Menu(
id = 'FileMenu',
label = 'File',
path = '',
groups = [
Group( name = 'AnExampleGroup' ),
Group( name = 'AnotherGroup' ),
]
)
sub_menu = Menu(
id = 'SubMenu',
label = 'Sub',
path = 'FileMenu/AnExampleGroup',
groups = [
Group( name = 'MainGroup' ),
Group( name = 'RadioGroup' ),
]
)
#do_it_action = Action(
# id = 'enthought.envisage.example.action.DoItAction',
# class_name = 'enthought.envisage.example.action.DoItAction',
# label = 'Do It!',
# description = "An action's description can appear in the status bar",
# icon = 'images/do_it.png',
# tooltip = 'A simple example action',
# menu_bar_path = 'FileMenu/SubMenu/MainGroup',
# tool_bar_path = 'additions',
# style = 'push',
#)
#
#higher_action = Action(
# id = 'enthought.envisage.example.action.HigherAction',
# class_name = 'enthought.envisage.example.action.DoItAction',
# label = 'Higher',
# description = "An action's description can appear in the status bar",
# icon = 'images/higher.png',
# tooltip = 'A simple example action',
# menu_bar_path = 'FileMenu/SubMenu/RadioGroup',
# tool_bar_path = 'RadioGroup',
# style = 'radio',
#)
#
#lower_action = Action(
# id = 'enthought.envisage.example.action.LowerAction',
# class_name = 'enthought.envisage.example.action.DoItAction',
# label = 'Lower',
# description = "An action's description can appear in the status bar",
# icon = 'images/lower.png',
# tooltip = 'A simple example action',
# menu_bar_path = 'FileMenu/SubMenu/RadioGroup',
# tool_bar_path = 'RadioGroup',
# style = 'radio',
#)
#
#overdrive_action = Action(
# id = 'enthought.envisage.example.action.OverdriveAction',
# class_name = 'enthought.envisage.example.action.DoItAction',
# label = 'Overdrive',
# description = "An action's description can appear in the status bar",
# icon = 'images/overdrive.png',
# tooltip = 'A simple example action',
# menu_bar_path = 'FileMenu/SubMenu/',
# tool_bar_path = 'additions',
# style = 'toggle',
#)
#
#ui_actions = UIActions(
# menus = [ file_menu, sub_menu ],
# actions = [ do_it_action, higher_action, lower_action, overdrive_action ]
#)
#--- Views ---------------------------------------------------------------------
ui_views = UIViews(
views = [
View(
name = 'VET Edit View',
icon = 'images/stuff_view.png',
id = 'enthought.traits.vet.EditView',
class_name = 'enthought.traits.vet.EditView',
position = 'left'
),
View(
name = 'VET Visual View',
icon = 'images/stuff_view.png',
id = 'enthought.traits.vet.VisualView',
class_name = 'enthought.traits.vet.VisualView',
position = 'top'
),
View(
name = 'VET Property View',
icon = 'images/stuff_view.png',
id = 'enthought.traits.vet.PropertyView',
class_name = 'enthought.traits.vet.PropertyView',
position = 'bottom'
),
]
)
#-------------------------------------------------------------------------------
# Plugin definitions:
#-------------------------------------------------------------------------------
plugin = Plugin(
# General information about the plugin:
id = 'enthought.traits.vet',
name = 'Traits View Editing Tool Plugin',
version = '1.0.0',
provider_name = 'Enthought, Inc',
provider_url = 'www.enthought.com',
autostart = True,
# The name of the class that implements the plugin:
class_name = 'enthought.traits.vet.VETPlugin',
# The Id's of the plugins that this plugin requires:
requires = [
'enthought.envisage.ui',
'enthought.envisage.ui.preference',
'enthought.envisage.ui.python_shell',
],
# The extension points offered by this plugin to allow other plugins to
# contribute to it:
extension_points = [],
# The contributions that this plugin makes to extension points offered by
# other plugins:
#extensions = [ ui_actions, ui_views, preferences, preference_pages ]
extensions = [ ui_views, preferences, preference_pages ]
)
|
from .base import X11BaseRecipe
class LibXxf86dgaRecipe(X11BaseRecipe):
def __init__(self, *args, **kwargs):
super(LibXxf86dgaRecipe, self).__init__(*args, **kwargs)
self.sha256 = '8eecd4b6c1df9a3704c04733c2f4fa93' \
'ef469b55028af5510b25818e2456c77e'
self.name = 'libXxf86dga'
self.version = '1.1.4'
self.depends = ['libX11', 'libXext']
|
"""Kotlin JS Rules"""
load("@io_bazel_rules_kotlin//kotlin:kotlin.bzl", _kt_js_import = "kt_js_import", _kt_js_library = "kt_js_library")
load("@io_bazel_rules_kotlin//kotlin/internal:defs.bzl", "KtJsInfo")
load("//third_party/bazel_rules/rules_kotlin/kotlin/js:impl.bzl", "kt_js_import_impl")
kt_js_library = _kt_js_library
kt_js_import = _kt_js_import
kt_js_import_fixed = rule(
attrs = {
"jars": attr.label_list(
allow_files = [".jar"],
mandatory = True,
),
"srcjar": attr.label(
mandatory = False,
allow_single_file = ["-sources.jar"],
),
"runtime_deps": attr.label_list(
default = [],
allow_files = [".jar"],
mandatory = False,
),
"module_name": attr.string(
doc = "internal attribute",
mandatory = False,
),
"module_root": attr.string(
doc = "internal attriubte",
mandatory = False,
),
"_importer": attr.label(
default = "//third_party/bazel_rules/rules_kotlin/kotlin/js:importer",
allow_files = True,
executable = True,
cfg = "host",
),
},
outputs = dict(
js = "%{module_name}.js",
js_map = "%{module_name}.js.map",
),
implementation = kt_js_import_impl,
provides = [KtJsInfo],
)
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import mxnet as mx
from mxnet.test_utils import *
def reldiff(a, b):
diff = np.sum(np.abs(a - b))
norm = np.sum(np.abs(a))
if diff == 0:
return 0
reldiff = diff / norm
return reldiff
def test_chain(ctx1=mx.cpu(0), ctx2=mx.cpu(1), dtype=np.float32):
n = 2
data1 = mx.sym.Variable('data1', dtype=dtype)
data2 = mx.sym.Variable('data2', dtype=dtype)
data3 = mx.sym.Variable('data3', dtype=dtype)
with mx.AttrScope(ctx_group='dev1'):
net = data1 + data2
net = net * dtype(3)
with mx.AttrScope(ctx_group='dev2'):
net = net + data3
arr = []
arr_grad = []
shape = (4, 5)
with mx.Context(ctx1):
for i in range(n):
arr.append(mx.nd.empty(shape, dtype=dtype))
arr_grad.append(mx.nd.empty(shape, dtype=dtype))
with mx.Context(ctx2):
arr.append(mx.nd.empty(shape, dtype=dtype))
arr_grad.append(mx.nd.empty(shape, dtype=dtype))
exec1 = net.bind(ctx1,
args=arr,
args_grad=arr_grad,
group2ctx={'dev1': ctx1, 'dev2': ctx2})
arr[0][:] = dtype(1)
arr[1][:] = dtype(2)
arr[2][:] = dtype(3)
arr2 = [a.copyto(ctx1) for a in arr]
arr_grad2 = [a.copyto(ctx1) for a in arr_grad]
exec2 = net.bind(ctx1,
args=arr2,
args_grad=arr_grad2)
# Show the execution plan that involves copynode
print(exec1.debug_str())
exec1.forward(is_train=True)
exec2.forward(is_train=True)
assert reldiff(exec1.outputs[0].asnumpy(), exec2.outputs[0].asnumpy()) < 1e-6
out_grad = mx.nd.empty(shape, ctx1)
out_grad[:] = dtype(1)
exec1.backward([out_grad])
exec2.backward([out_grad.copyto(ctx1)])
for a, b in zip(arr_grad, arr_grad2):
assert reldiff(a.asnumpy(), b.asnumpy()) < 1e-6
def test_chain_type_device():
ctx_pairs = [(mx.cpu(0), mx.cpu(1))]
if default_context().device_type == 'gpu':
ctx_pairs = ctx_pairs + [(mx.gpu(0), mx.gpu(0)), (mx.cpu(0), mx.gpu(0)), (mx.gpu(0), mx.cpu(0))]
for ctx1, ctx2 in ctx_pairs:
for dtype in [np.float16, np.float32, np.float64]:
test_chain(ctx1, ctx2, dtype)
if __name__ == '__main__':
test_chain_type_device()
|
import pandas as pd
import warnings
warnings.simplefilter("ignore")
import pickle
from sklearn.linear_model import LinearRegression
data = pd.read_csv(r'C:\Users\Prasanna\Desktop\model deployment\Admission_Predict.csv')
data.columns
X = data.drop('Chance of Admit ', axis = 1).copy()
y = data['Chance of Admit '].copy()
model= LinearRegression()
model.fit(X,y)
model.score(X,y)
pickle.dump(model,open('model.pkl','wb'))
# Loading model to compare the results
model = pickle.load(open('model.pkl','rb'))
|
import pygame
#button class
class Button():
def __init__(self, x, y, image, scale):
width = image.get_width()
height = image.get_height()
self.X = x
self.Y = y
self.image = pygame.transform.scale(image, (int(width * scale), int(height * scale)))
self.rect = self.image.get_rect()
self.rect.topleft = (self.X, self.Y)
self.clicked = False
def draw(self, surface):
action = False
#get mouse position
pos = pygame.mouse.get_pos()
#check mouseover and clicked conditions
if self.rect.collidepoint(pos):
if pygame.mouse.get_pressed()[0] == 1 and self.clicked == False:
self.clicked = True
action = True
if pygame.mouse.get_pressed()[0] == 0:
self.clicked = False
#draw button on screen
surface.blit(self.image, self.rect.topleft)
return action |
import pprint
import numpy as np
from core.net_errors import NetIsNotInitialized
def calculate_average_neighboring(net_object):
if net_object.net is None:
raise NetIsNotInitialized()
net = net_object.net
zero_weights = np.zeros((net_object.config[0]))
weights = np.ma.array(np.reshape(net[-1]['w'], (net_object.m, net_object.n, zero_weights.shape[0])), mask=False)
weights = np.insert(weights, (0, weights.shape[1]), 0, axis=1)
weights = np.insert(weights, (0, weights.shape[0]), 0, axis=0)
weights.mask = True
weights.mask[1:-1, 1:-1] = False
result = np.zeros((net_object.m, net_object.n))
for i, j in np.ndindex(weights.shape[:2]):
if not weights.mask[i, j].all():
a = [[i - 1, i - 1, i, i, i + 1, i + 1], [j - 1, j, j - 1, j + 1, j - 1, j]]
w = weights[a]
d = []
for weight in w:
if not np.all(weight.mask):
d.append(net_object.d(weights[i, j], weight))
result[i - 1, j - 1] = np.nanmean(d)
return result
|
"""Exercício Python 064: Crie um programa que leia vários números inteiros pelo teclado.
O programa só vai parar quando o usuário digitar o valor 999, que é a condição de parada.
No final, mostre quantos números foram digitados e qual foi a soma entre eles (desconsiderando o flag)."""
n = int(input('1º Número: '))
soma = 0
c = 1
while n != 999:
c += 1
soma += n
n = int(input('{}º Número: '.format(c)))
print('Foram digitado {} números \nA soma é {}'.format(c - 1, soma))
|
from detectron2.data.datasets import load_cityscapes_instances
from detectron2.data.datasets.cityscapes import load_cityscapes_semantic, cityscapes_files_to_dict
from pycocotools.coco import COCO
import functools
import multiprocessing as mp
from detectron2.utils import logger
from detectron2.utils.comm import get_world_size
import io
import logging
import contextlib
import os
from fvcore.common.timer import Timer
from detectron2.structures import BoxMode
from fvcore.common.file_io import PathManager
from detectron2.data import MetadataCatalog, DatasetCatalog
"""
This file contains functions to parse COCO-format annotations into dicts in "Detectron2 format".
"""
logger = logging.getLogger(__name__)
try:
import cv2 # noqa
except ImportError:
# OpenCV is an optional dependency at the moment
pass
import json
# ==== Predefined splits for raw cityscapes images ===========
COCO_CATEGORIES = [
{"color": [220, 20, 60], "isthing": 1, "id": 1, "name": "person"},
]
_RAW_CITYSCAPES_SPLITS = {
"cityscapes_fine2_{task}_train": ("cityscape/leftImg8bit/train", "cityscape/gtFine/train"),
"cityscapes_fine2_{task}_val": ("cityscape/leftImg8bit/val", "cityscape/gtFine/val"),
"cityscapes_fine2_{task}_test": ("cityscape/leftImg8bit/test", "cityscape/gtFine/test"),
"cityscapes_fine2_{task}_sub_train": ("cityscape/leftImg8bit/sub_train", "cityscape/gtFine/sub_train"),
}
def _get_coco_instances_meta():
thing_ids = [k["id"] for k in COCO_CATEGORIES if k["isthing"] == 1]
thing_colors = [k["color"] for k in COCO_CATEGORIES if k["isthing"] == 1]
assert len(thing_ids) == 1, len(thing_ids)
# Mapping from the incontiguous COCO category id to an id in [0, 79]
thing_dataset_id_to_contiguous_id = {k: i for i, k in enumerate(thing_ids)}
thing_classes = [k["name"] for k in COCO_CATEGORIES if k["isthing"] == 1]
ret = {
"thing_dataset_id_to_contiguous_id": thing_dataset_id_to_contiguous_id,
"thing_classes": thing_classes,
"thing_colors": thing_colors,
}
return ret
def _get_builtin_metadata(dataset_name):
if dataset_name == "coco_person":
return _get_coco_instances_meta()
elif dataset_name == "cityscapes":
# fmt: off
CITYSCAPES_THING_CLASSES = [
"person", "rider", "car", "truck",
"bus", "train", "motorcycle", "bicycle",
]
CITYSCAPES_STUFF_CLASSES = [
"road", "sidewalk", "building", "wall", "fence", "pole", "traffic light",
"traffic sign", "vegetation", "terrain", "sky", "person", "rider", "car",
"truck", "bus", "train", "motorcycle", "bicycle", "license plate",
]
# fmt: on
return {
"thing_classes": CITYSCAPES_THING_CLASSES,
"stuff_classes": CITYSCAPES_STUFF_CLASSES,
}
def register_all_cityscapes(root):
for key, (image_dir, gt_dir) in _RAW_CITYSCAPES_SPLITS.items():
meta = _get_builtin_metadata("cityscapes")
image_dir = os.path.join(root, image_dir)
gt_dir = os.path.join(root, gt_dir)
inst_key = key.format(task="instance_seg")
DatasetCatalog.register(
inst_key,
lambda x=image_dir, y=gt_dir: load_cityscapes_instances(
x, y, from_json=True, to_polygons=True
),
)
MetadataCatalog.get(inst_key).set(
image_dir=image_dir, gt_dir=gt_dir, evaluator_type="cityscapes", **meta
)
sem_key = key.format(task="sem_seg")
DatasetCatalog.register(
sem_key, lambda x=image_dir, y=gt_dir: load_cityscapes_semantic(x, y)
)
MetadataCatalog.get(sem_key).set(
image_dir=image_dir, gt_dir=gt_dir, evaluator_type="sem_seg", **meta
)
def register_a_cityscapes(image_dir, gt_dir, dataset_name):
meta = _get_builtin_metadata("cityscapes")
DatasetCatalog.register(
dataset_name,
lambda x=image_dir, y=gt_dir: load_cityscapes_instances(
x, y, from_json=True, to_polygons=True
),
)
MetadataCatalog.get(dataset_name).set(
image_dir=image_dir, gt_dir=gt_dir, evaluator_type="cityscapes", **meta
)
def register_a_cityscapes_from_selected_image_files(image_dir, gt_dir, selected_image_files ,dataset_name):
meta = _get_builtin_metadata("cityscapes")
DatasetCatalog.register(
dataset_name,
lambda x=image_dir, y=gt_dir, z=selected_image_files: load_cityscapes_instances_from_selected_image_files(
x, y, z, from_json=True, to_polygons=True
),
)
MetadataCatalog.get(dataset_name).set(
image_dir=image_dir, gt_dir=gt_dir, evaluator_type="cityscapes", **meta
)
def load_cityscapes_instances_from_selected_image_files(image_dir, gt_dir, selected_image_files,from_json=True, to_polygons=True):
"""
Args:
image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train".
gt_dir (str): path to the raw annotations. e.g., "~/cityscapes/gtFine/train".
from_json (bool): whether to read annotations from the raw json file or the png files.
to_polygons (bool): whether to represent the segmentation as polygons
(COCO's format) instead of masks (cityscapes's format).
Returns:
list[dict]: a list of dicts in Detectron2 standard format. (See
`Using Custom Datasets </tutorials/datasets.html>`_ )
"""
if from_json:
assert to_polygons, (
"Cityscapes's json annotations are in polygon format. "
"Converting to mask format is not supported now."
)
files = []
for image_file in selected_image_files:
suffix = "leftImg8bit.png"
assert image_file.endswith(suffix)
prefix = image_dir
instance_file = gt_dir + image_file[len(prefix) : -len(suffix)] + "gtFine_instanceIds.png"
assert os.path.isfile(instance_file), instance_file
label_file = gt_dir + image_file[len(prefix) : -len(suffix)] + "gtFine_labelIds.png"
assert os.path.isfile(label_file), label_file
json_file = gt_dir + image_file[len(prefix) : -len(suffix)] + "gtFine_polygons.json"
files.append((image_file, instance_file, label_file, json_file))
assert len(files), "No images found in {}".format(image_dir)
logger = logging.getLogger(__name__)
logger.info("Preprocessing cityscapes annotations ...")
# This is still not fast: all workers will execute duplicate works and will
# take up to 10m on a 8GPU server.
pool = mp.Pool(processes=max(mp.cpu_count() // get_world_size() // 2, 4))
ret = pool.map(
functools.partial(cityscapes_files_to_dict, from_json=from_json, to_polygons=to_polygons),
files,
)
logger.info("Loaded {} images from {}".format(len(ret), image_dir))
# Map cityscape ids to contiguous ids
from cityscapesScripts.cityscapesscripts.helpers.labels import labels
labels = [l for l in labels if l.hasInstances and not l.ignoreInEval]
dataset_id_to_contiguous_id = {l.id: idx for idx, l in enumerate(labels)}
for dict_per_image in ret:
for anno in dict_per_image["annotations"]:
anno["category_id"] = dataset_id_to_contiguous_id[anno["category_id"]]
return ret
def get_coco_dicts_from_selected_image_files(json_file, image_root, selected_image_files,
dataset_name=None, extra_annotation_keys=None):
dataset_dicts = get_coco_person_dicts(json_file=json_file,
image_root=image_root,
dataset_name=dataset_name,
extra_annotation_keys=extra_annotation_keys)
dataset_dicts = [item for item in dataset_dicts if item['image_id'] in selected_image_files]
return dataset_dicts
def register_coco_instances_from_selected_image_files(name, json_file, image_root, selected_image_files):
"""
Register a dataset in COCO's json annotation format for
instance detection, instance segmentation and keypoint detection.
(i.e., Type 1 and 2 in http://cocodataset.org/#format-data.
`instances*.json` and `person_keypoints*.json` in the dataset).
This is an example of how to register a new dataset.
You can do something similar to this function, to register new datasets.
Args:
name (str): the name that identifies a dataset, e.g. "coco_2014_train".
metadata (dict): extra metadata associated with this dataset. You can
leave it as an empty dict.
json_file (str): path to the json instance annotation file.
image_root (str): directory which contains all the images.
"""
# 1. register a function which returns dicts
DatasetCatalog.register(name, lambda: get_coco_dicts_from_selected_image_files(json_file, image_root, selected_image_files, name))
# 2. Optionally, add metadata about this dataset,
# since they might be useful in evaluation, visualization or logging
metadata = _get_builtin_metadata('coco_person')
MetadataCatalog.get(name).set(
json_file=json_file, image_root=image_root, evaluator_type="coco", **metadata
)
def get_coco_person_dicts(json_file, image_root, dataset_name=None, extra_annotation_keys=None):
"""
get a list of dicts, the dict only contain person class img and person ann
Args:
json_file (str): full path to the json file in COCO instances annotation format.
image_root (str): the directory where the images in this json file exists.
dataset_name (str): the name of the dataset (e.g., coco_2017_train).
If provided, this function will also put "thing_classes" into
the metadata associated with this dataset.
extra_annotation_keys (list[str]): list of per-annotation keys that should also be
loaded into the dataset dict (besides "iscrowd", "bbox", "keypoints",
"category_id", "segmentation"). The values for these keys will be returned as-is.
For example, the densepose annotations are loaded in this way.
Returns:
list[dict]: a list of dicts in Detectron2 standard format. (See
`Using Custom Datasets </tutorials/datasets.html>`_ )
Notes:
1. This function does not read the image files.
The results do not have the "image" field.
"""
from pycocotools.coco import COCO
timer = Timer()
json_file = PathManager.get_local_path(json_file)
with contextlib.redirect_stdout(io.StringIO()):
coco_api = COCO(json_file)
if timer.seconds() > 1:
logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds()))
id_map = None
if dataset_name is not None:
meta = MetadataCatalog.get(dataset_name)
# cat_ids = sorted(coco_api.getCatIds())
"""
fix the category as person
"""
cat_ids = coco_api.getCatIds('person')
cats = coco_api.loadCats(cat_ids)
# The categories in a custom json file may not be sorted.
thing_classes = [c["name"] for c in sorted(cats, key=lambda x: x["id"])]
meta.thing_classes = thing_classes
# In COCO, certain category ids are artificially removed,
# and by convention they are always ignored.
# We deal with COCO's id issue and translate
# the category ids to contiguous ids in [0, 80).
# It works by looking at the "categories" field in the json, therefore
# if users' own json also have incontiguous ids, we'll
# apply this mapping as well but print a warning.
if not (min(cat_ids) == 1 and max(cat_ids) == len(cat_ids)):
if "coco" not in dataset_name:
logger.warning(
"""
Category ids in annotations are not in [1, #categories]! We'll apply a mapping for you.
"""
)
id_map = {v: i for i, v in enumerate(cat_ids)}
meta.thing_dataset_id_to_contiguous_id = id_map
# sort indices for reproducible results
# img_ids = sorted(list(coco_api.imgs.keys()))
""" fix the img_ids and sort it
"""
img_ids = coco_api.getImgIds(catIds=cat_ids)
img_ids = sorted(img_ids)
# imgs is a list of dicts, each looks something like:
# {'license': 4,
# 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg',
# 'file_name': 'COCO_val2014_000000001268.jpg',
# 'height': 427,
# 'width': 640,
# 'date_captured': '2013-11-17 05:57:24',
# 'id': 1268}
imgs = coco_api.loadImgs(img_ids)
# anns is a list[list[dict]], where each dict is an annotation
# record for an object. The inner list enumerates the objects in an image
# and the outer list enumerates over images. Example of anns[0]:
# [{'segmentation': [[192.81,
# 247.09,
# ...
# 219.03,
# 249.06]],
# 'area': 1035.749,
# 'iscrowd': 0,
# 'image_id': 1268,
# 'bbox': [192.81, 224.8, 74.73, 33.43],
# 'category_id': 16,
# 'id': 42986},
# ...]
anns = [coco_api.imgToAnns[img_id] for img_id in img_ids]
if "minival" not in json_file:
# The popular valminusminival & minival annotations for COCO2014 contain this bug.
# However the ratio of buggy annotations there is tiny and does not affect accuracy.
# Therefore we explicitly white-list them.
ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image]
assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique!".format(
json_file
)
imgs_anns = list(zip(imgs, anns))
logger.info("Loaded {} images in COCO format from {}".format(len(imgs_anns), json_file))
dataset_dicts = []
ann_keys = ["iscrowd", "bbox", "keypoints", "category_id"] + (extra_annotation_keys or [])
num_instances_without_valid_segmentation = 0
for (img_dict, anno_dict_list) in imgs_anns:
record = {}
record["file_name"] = os.path.join(image_root, img_dict["file_name"])
record["height"] = img_dict["height"]
record["width"] = img_dict["width"]
image_id = record["image_id"] = img_dict["id"]
objs = []
for anno in anno_dict_list:
if anno['category_id'] == 1:
# Check that the image_id in this annotation is the same as
# the image_id we're looking at.
# This fails only when the data parsing logic or the annotation file is buggy.
# The original COCO valminusminival2014 & minival2014 annotation files
# actually contains bugs that, together with certain ways of using COCO API,
# can trigger this assertion.
assert anno["image_id"] == image_id
assert anno.get("ignore", 0) == 0
obj = {key: anno[key] for key in ann_keys if key in anno}
segm = anno.get("segmentation", None)
if segm: # either list[list[float]] or dict(RLE)
if not isinstance(segm, dict):
# filter out invalid polygons (< 3 points)
segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]
if len(segm) == 0:
num_instances_without_valid_segmentation += 1
continue # ignore this instance
obj["segmentation"] = segm
keypts = anno.get("keypoints", None)
if keypts: # list[int]
for idx, v in enumerate(keypts):
if idx % 3 != 2:
# COCO's segmentation coordinates are floating points in [0, H or W],
# but keypoint coordinates are integers in [0, H-1 or W-1]
# Therefore we assume the coordinates are "pixel indices" and
# add 0.5 to convert to floating point coordinates.
keypts[idx] = v + 0.5
obj["keypoints"] = keypts
obj["bbox_mode"] = BoxMode.XYWH_ABS
if id_map:
obj["category_id"] = id_map[obj["category_id"]]
objs.append(obj)
record["annotations"] = objs
dataset_dicts.append(record)
if num_instances_without_valid_segmentation > 0:
logger.warn(
"Filtered out {} instances without valid segmentation. "
"There might be issues in your dataset generation process.".format(
num_instances_without_valid_segmentation
)
)
debug = 1
return dataset_dicts
# dataset_person_dicts = []
# for dataset_dict in dataset_dicts:
# if dataset_dict['image_id'] in person_img_ids:
# dataset_person_dicts.append(dataset_dict)
#
# assert len(person_img_ids) == len(dataset_person_dicts)
# debug = 1
# return dataset_person_dicts
def register_coco_instances(name, json_file, image_root):
"""
Register a dataset in COCO's json annotation format for
instance detection, instance segmentation and keypoint detection.
(i.e., Type 1 and 2 in http://cocodataset.org/#format-data.
`instances*.json` and `person_keypoints*.json` in the dataset).
This is an example of how to register a new dataset.
You can do something similar to this function, to register new datasets.
Args:
name (str): the name that identifies a dataset, e.g. "coco_2014_train".
metadata (dict): extra metadata associated with this dataset. You can
leave it as an empty dict.
json_file (str): path to the json instance annotation file.
image_root (str): directory which contains all the images.
"""
# 1. register a function which returns dicts
DatasetCatalog.register(name, lambda: get_coco_person_dicts(json_file, image_root, name))
# 2. Optionally, add metadata about this dataset,
# since they might be useful in evaluation, visualization or logging
metadata = _get_builtin_metadata('coco_person')
MetadataCatalog.get(name).set(
json_file=json_file, image_root=image_root, evaluator_type="coco", **metadata
)
def get_hw_dicts(image_id=None):
"""
image_id: list[int], if given image_id, the returned dict_list only contain corresponding dict.
:return: a list[dict], dict : {'file_name': str :'the/path/to/image/2345.jpg',
'height': int,
'width': int,
'image_id': int,
'annotations': list[dict]':
{ 'bbox': list[float],
'bbox_mode': int,
'category_id':int,
'segmentation':list[list[float]] each list[float] is one
simple polygon in the format of [x1, y1, ...,xn,yn]
}
"""
dict_list = []
if image_id is not None:
dict_list = [dic for dic in dict_list if dic['image_id'] in image_id]
return dict_list
def register_hw_instances(name, image_id=None):
"""
:param image_id: see function get_hw_dicts
:param name:
:return:
"""
# 1. register a function which returns dicts
DatasetCatalog.register(name, lambda: get_hw_dicts(image_id))
MetadataCatalog.get(name).set(thing_classes=["person"])
|
from .user_urls import *
|
from behave import given, when, then
from acceptance_tests.features.pages import collection_exercise, collection_exercise_details
from common.browser_utilities import is_text_present_with_retry, wait_for
@given('the collection exercise is Scheduled')
def collection_exercise_exists_and_scheduled_displayed(context):
collection_exercise_details.go_to(context.short_name, context.period)
ce_state = collection_exercise_details.get_status()
assert collection_exercise.is_scheduled(ce_state), ce_state
@given('the collection exercise has a loaded sample and collection instruments')
def collection_exercise__exists_and_loaded_sample_cis(context):
collection_exercise_details.go_to(context.short_name, context.period)
ce_state = collection_exercise_details.get_status()
assert collection_exercise.is_scheduled(ce_state), ce_state
collection_exercise_details.load_sample('resources/sample_files/business-survey-sample-date.csv')
success_text = collection_exercise_details.get_sample_success_text()
assert success_text == 'Sample loaded successfully'
collection_exercise_details.load_collection_instrument(
test_file='resources/collection_instrument_files/064_201803_0001.xlsx')
success_text = collection_exercise_details.get_success_panel_text()
assert success_text == 'Collection instrument loaded'
@when('the user navigates to the survey details page')
def navigate_to_collection_exercise_details(context):
collection_exercise.go_to(context.short_name)
@then('the status of the collection exercise is Ready for Review')
def collection_exercise_is_ready_for_review(context):
collection_exercises = wait_for(collection_exercise.get_collection_exercises, 16, 2)
# Status updated async so wait until updated
assert is_text_present_with_retry('Ready for review', 10)
assert context.period in (ce['exercise_ref'] for ce in collection_exercises)
@given('the user has loaded the sample')
@when('the user loads the sample')
def load_sample(_):
collection_exercise_details.load_sample('resources/sample_files/business-survey-sample-date.csv')
success_text = collection_exercise_details.get_sample_success_text()
assert success_text == 'Sample loaded successfully'
@given('the user has loaded the collection instruments')
@when('the user loads the collection instruments')
def load_collection_instruments(_):
collection_exercise_details.load_collection_instrument(
test_file='resources/collection_instrument_files/064_201803_0001.xlsx')
success_text = collection_exercise_details.get_success_panel_text()
assert success_text == 'Collection instrument loaded'
@then('the collection exercise is Ready for Review')
def ce_details_state_is_ready_for_review(_):
assert is_text_present_with_retry('Ready for review', 10)
|
from django.urls import path,include
from rest_framework.routers import DefaultRouter
from loan_app import views
router = DefaultRouter()
router.register('approval',views.ApprovalViewSet)
urlpatterns = [
path('',include(router.urls)),
] |
from joecool import create_app
app = create_app() |
from hamcrest import assert_that, equal_to, is_
from marshmallow import Schema
from microcosm_flask.fields import TimestampField
from microcosm_flask.swagger.api import build_parameter
class TestSchema(Schema):
unix_timestamp = TimestampField()
iso_timestamp = TimestampField(use_isoformat=True)
def test_field_unix_timestamp():
parameter = build_parameter(TestSchema().fields["unix_timestamp"])
assert_that(parameter, is_(equal_to({
"type": "float",
"format": "timestamp",
})))
def test_field_iso_timestamp():
parameter = build_parameter(TestSchema().fields["iso_timestamp"])
assert_that(parameter, is_(equal_to({
"type": "string",
"format": "date-time",
})))
|
""" Usage:
trained_oie_extractor [--model=MODEL_DIR] --in=INPUT_FILE --out=OUTPUT_FILE [--tokenize] [--conll] [--beam=BEAM]
Options:
--beam=BEAM Beam search size [default: 1].
Run a trined OIE model on raw sentences.
MODEL_DIR - Pretrained RNN model folder (containing model.json and pretrained weights).
INPUT FILE - File where each row is a tokenized sentence to be parsed with OIE.
OUTPUT_FILE - File where the OIE tuples will be output.
tokenize - indicates that the input sentences are NOT tokenized.
conll - Print a CoNLL represenation with probabilities
Format of OUTPUT_FILE:
Sent, prob, pred, arg1, arg2, ...
"""
from rnn.model import load_pretrained_rnn
from docopt import docopt
import logging
import nltk
import re
import numpy as np
from collections import defaultdict
logging.basicConfig(level = logging.INFO)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
class Trained_oie:
"""
Compose OIE extractions given a pretrained RNN OIE model predicting classes per word
"""
def __init__(self, model, tokenize):
"""
model - pretrained supervised model
tokenize - instance-wide indication whether all of the functions should
tokenize their input
"""
self.model = model
self.tokenize = tokenize
def split_words(self, sent):
"""
Apply tokenization if needed, else just split by space
sent - string
"""
return nltk.word_tokenize(sent) if self.tokenize\
else re.split(r' +', sent) # Allow arbitrary number of spaces
def get_extractions(self, sent, beam=1):
"""
Returns a list of OIE extractions for a given sentence
sent - a list of tokens
"""
ret = []
avg_conf = lambda probs: np.average(probs)
prod_conf = lambda probs: reduce(lambda x, y: x * y, probs) + 0.001
for ((pred_ind, pred_word), labels) in self.model.predict_sentence_beamsearch(sent, k=beam):
cur_args = []
cur_arg = []
probs = []
# collect args
assert len(labels) == len(sent), '#labels should be equal to #tokens in the sentence'
for i, ((label, prob), word) in enumerate(zip(labels, sent)):
probs.append(prob)
# TODO: (1) only focus on argument (2) what if arguments are not in order
if label.startswith("A"):
cur_arg.append((word, i))
#probs.append(prob)
elif cur_arg:
cur_args.append(cur_arg)
cur_arg = []
# Create extraction
if cur_args:
ret.append(Extraction(sent,
(pred_word, pred_ind),
cur_args,
probs,
calc_prob=avg_conf,
))
return ret
def conll_with_prob(self, sent):
"""
Returns a conll representation of sentence
Format:
word index, word, pred_index, label, probability
"""
logger.debug("Parsing: {}".format(sent))
sent = self.split_words(sent)
ret = ""
for ((pred_ind, pred_word), labels) in self.model.predict_sentence(sent):
for (word_ind, ((label, prob), word)) in enumerate(zip(labels, sent)):
ret+= "\t".join(map(str,
[word_ind, word, pred_ind, label, prob]
)) + '\n'
ret += '\n'
return ret
def parse_sent(self, sent, beam=1):
"""
Returns a list of extractions for the given sentence
sent - a tokenized sentence
tokenize - boolean indicating whether the sentences should be tokenized first
"""
logger.debug("Parsing: {}".format(sent))
return self.get_extractions(self.split_words(sent), beam=beam)
def parse_sents(self, sents):
"""
Returns a list of extractions per sent in sents.
sents - list of tokenized sentences
tokenize - boolean indicating whether the sentences should be tokenized first
"""
return [self.parse_sent(sent)
for sent in sents]
class Extraction:
"""
Store and print an OIE extraction
"""
def __init__(self, sent, pred, args, probs,
calc_prob = lambda probs: 1.0 / (reduce(lambda x, y: x * y, probs) + 0.001)):
"""
sent - Tokenized sentence - list of strings
pred - Predicate word
args - List of arguments (each a tuple <string, position (zero based)>)
probs - list of float in [0,1] indicating the probability
of each of the items in the extraction
calc_prob - function which takes a list of probabilities for each of the
items and computes a single probability for the joint occurence of this extraction.
"""
self.sent = sent
self.calc_prob = calc_prob
self.probs = probs
self.prob = self.calc_prob(self.probs)
self.pred = pred
self.args = args
logger.debug(self)
def __old_str__(self):
"""
Format (tab separated):
Sent, prob, pred, arg1, arg2, ...
"""
return '\t'.join(map(str,
[' '.join(self.sent),
self.prob,
self.pred,
'\t'.join([' '.join(arg)
for arg in self.args])]))
def __str__(self):
'''
store both the word string and the start position in the original sentence.
'''
return '\t'.join(map(str,
[' '.join(self.sent),
self.prob,
'{}##{}'.format(*self.pred),
'\t'.join([' '.join(map(lambda x: x[0], arg)) + '##' + str(list(map(lambda x: x[1], arg))[0])
for arg in self.args])]))
class Mock_model:
"""
Load a conll file annotated with labels And probabilities
and present an external interface of a trained rnn model (through predict_sentence).
This can be used to alliveate running the trained model.
"""
def __init__(self, conll_file):
"""
conll_file - file from which to load the annotations
"""
self.conll_file = conll_file
logger.debug("loading file {}".format(self.conll_file))
self.dic, self.sents = self.load_annots(self.conll_file)
logger.debug("Done loading file")
def load_annots(self, conll_file):
"""
Updates internal state according to file
for ((pred_ind, pred_word), labels) in self.model.predict_sentence(sent):
for (label, prob), word in zip(labels, sent):
"""
cur_ex = []
cur_sent = []
pred_word = ''
ret = defaultdict(lambda: {})
sents = []
# Iterate over lines and populate return dictionary
for line_ind, line in enumerate(open(conll_file)):
if not (line_ind % pow(10,5)):
logger.debug(line_ind)
line = line.strip()
if not line:
if cur_ex:
assert(pred_word != '') # sanity check
cur_sent = " ".join(cur_sent)
# This is because of the dups bug --
# doesn't suppose to happen any more
ret[cur_sent][pred_word] = (((pred_ind, pred_word), cur_ex),)
sents.append(cur_sent)
cur_ex = []
pred_ind = -1
cur_sent = []
else:
word_ind, word, pred_ind, label, prob = line.split('\t')
prob = float(prob)
word_ind = int(word_ind)
pred_ind = int(pred_ind)
cur_sent.append(word)
if word_ind == pred_ind:
pred_word = word
cur_ex.append((label, prob))
return (self.flatten_ret_dic(ret, 1),
list(set(sents)))
def flatten_ret_dic(self, dic, num_of_dups):
"""
Given a dictionary of dictionaries, flatten it
to a dictionary of lists
"""
ret = defaultdict(lambda: [])
for sent, preds_dic in dic.iteritems():
for pred, exs in preds_dic.iteritems():
ret[sent].extend(exs * num_of_dups)
return ret
def predict_sentence(self, sent):
"""
Return a pre-predicted answer
"""
return self.dic[" ".join(sent)]
example_sent = "The Economist is an English language weekly magazine format newspaper owned by the Economist Group\
and edited at offices in London."
if __name__ == "__main__":
args = docopt(__doc__)
logger.debug(args)
model_dir = args["--model"]
input_fn = args["--in"]
output_fn = args["--out"]
tokenize = args["--tokenize"]
beam = int(args['--beam'])
if model_dir:
# If model dir is given, use it to load the model
model = load_pretrained_rnn(model_dir)
sents = [line.strip() for line in open(input_fn) if line.strip()]
else:
# If no model_dir is given, assume input file already contains annotations and probs
model = Mock_model(input_fn)
sents = model.sents
oie = Trained_oie(model,
tokenize = tokenize)
logger.debug("generating output for {} sentences".format(len(sents)))
# Iterate over all raw sentences
if args["--conll"]:
with open(output_fn, 'w') as fout:
fout.write('\n\n'.join([oie.conll_with_prob(sent.strip())
for sent in sents]))
else:
with open(output_fn, 'w') as fout:
fout.write('\n'.join([str(ex)
for sent in sents
for ex in oie.parse_sent(sent.strip(), beam=beam)
]))
|
import pathlib
import numpy as np
from math import log
from pudzu.sandbox.wikipage import *
from pudzu.sandbox.bamboo import *
# wikifame scraping (messy; also requires manual cleanup and verificaiton at the moment)
def extract_births(year):
DATE_PATTERN = re.compile(r"^[_ 0-9]*(January|February|March|April|May|June|July|August|September|October|November|December)[ 0-9]*$")
wp = WikiPage.from_year(year)
h2_start = find_tags(wp.bs4, all_(string='Births'), parents_("h2"))
if len(h2_start) == 0: return pd.DataFrame(columns=("link", "year"))
h2_end = find_tags(h2_start, next_siblings_('h2', limit=1))
links = find_tags(wp.bs4, select_("#mw-content-text ul li"),
all_("a", href=re.compile(r"^/wiki"), title=re_exclude(DATE_PATTERN), limit=1),
exclude_(h2_end, is_after),
exclude_(h2_start, is_before))
links = remove_duplicate_tags(links)
return pd.DataFrame([{ "year": year, "link": WikiPage.title_from_url(a['href'])} for a in links])
def extract_people(title, section=None):
wp = WikiPage(title)
if section:
h2_start = find_tags(wp.bs4, all_(string=section), parents_("h2"))
h2_end = find_tags(h2_start, next_siblings_('h2', limit=1))
links = find_tags(wp.bs4, select_("#mw-content-text ul li"),
all_("a", href=re.compile(r"^/wiki"), title=re_exclude("(List|American)"), limit=1),
exclude_(h2_end, is_after),
exclude_(h2_start, is_before))
else:
links = find_tags(wp.bs4, select_("#mw-content-text ul li"),
all_("a", href=re.compile(r"^/wiki"), title=re_exclude("(List|American)"), limit=1))
links = remove_duplicate_tags(links)
return pd.DataFrame([{ "title": title, "link": WikiPage.title_from_url(a['href'])} for a in links])
harmonic_mean = optional_import_from('statistics', 'harmonic_mean', lambda data: len(data) / sum(1/x for x in data))
LIMITS = { 'length': 1500000, 'pageviews': 1000000, 'revisions': 25000 }
def score_people(df, lang="en", translate_from=None):
df = df.assign_rows(progressbar = True,
wp = ignoring_exceptions((lambda d: WikiPage(d['link'], lang=lang)) if translate_from is None else (lambda d: WikiPage(d['link'], lang=translate_from).to_wikidata().to_wikipedia(lang=lang))))
df = df.assign_rows(progressbar = True,
title=lambda d: '?' if d['wp'] is None else d['wp'].title,
length=lambda d: 1 if d['wp'] is None else len(d['wp'].response.content),
pageviews=lambda d: 1 if d['wp'] is None else int(np.median(([pv['views'] for pv in d['wp'].pageviews("20190101", "20200101")]+[0]*12)[:12])),
revisions=lambda d: 1 if d['wp'] is None else d['wp'].revision_count(),
disambiguation=lambda d: d['wp'] and bool(d['wp'].bs4.find(alt="Disambiguation icon")))
df = df.assign_rows(score=lambda d: harmonic_mean([log(max(d[k], 2)) / log(max_value) for k,max_value in LIMITS.items()]))
return df.loc[:,df.columns != 'wp'].sort_values("score", ascending=False)
def score_by_name(names, *args, **kwargs):
df = pd.DataFrame([{'link': name} for name in make_iterable(names)])
return score_people(df, *args, **kwargs)
def score_births(years):
dfs = [score_people(extract_births(year)) for year in tqdm.tqdm(years)]
df = pd.concat(dfs, ignore_index=True).sort_values('score', ascending=False)
df.to_csv("datasets/wikibirths/en/{}-{}.csv".format(min(years), max(years)), index=False, encoding="utf-8")
return df
def score_births_by_decade(decades):
for d in tqdm.tqdm(decades):
score_births(make_iterable(range(d*10,d*10+10)))
def rescore_decades(decades, langs=["de", "es", "fr", "ja", "ru", "zh"]):
for d in tqdm.tqdm(make_iterable(decades)):
df = pd.read_csv("datasets/wikibirths/en/{d}0-{d}9.csv".format(d=d))
for lang in tqdm.tqdm(make_iterable(langs)):
lpath = pathlib.Path("datasets/wikibirths/{l}/{d}0-{d}9.csv".format(l=lang, d=d))
if not lpath.parent.exists(): lpath.parent.mkdir()
ldf = score_people(df, lang=lang, translate_from="en").sort_values('score', ascending=False)
ldf.to_csv(str(lpath), index=False, encoding="utf-8")
def load_decades(decades=range(100,190), lang="en"):
return pd.concat([pd.read_csv("datasets/wikibirths/{l}/{d}0-{d}9.csv".format(l=lang, d=d)) for d in make_iterable(decades)], ignore_index=True)
def normalise_scores(df, using=None):
if using is None: using = df
limits = { k : using[k].max() for k in LIMITS.keys() }
return df.assign_rows(score=lambda d: harmonic_mean([log(max(d[k], 2)) / log(max_value) for k,max_value in limits.items()]))
def combine_scores(decades=range(100,190), langs=["en", "de", "es", "fr", "ja", "ru", "zh"]):
dfs = [load_decades(decades, lang) for lang in tqdm.tqdm(langs)]
dfs = [df.groupby('link').first() for df in dfs]
df = normalise_scores(sum(df[['length', 'pageviews']] for df in dfs)) # , 'revisions'
return pd.concat([df, dfs[0][['year', 'title']]], axis=1).sort_values("score", ascending=False)
def normalise_and_combine_scores(decades=range(100,190), langs=["en", "de", "es", "fr", "ja", "ru", "zh"]):
dfs = [normalise_scores(load_decades(decades, lang)) for lang in tqdm.tqdm(langs)]
dfs = [df.groupby('link').first() for df in dfs]
df = sum(df[['score']] for df in dfs) / len(langs)
df = df.sort_values('score', ascending=False)
return pd.concat([df, dfs[0][['year', 'title']]], axis=1).sort_values("score", ascending=False)
def score_and_normalise_by_name(names, langs=["en", "de", "es", "fr", "ja", "ru", "zh"]):
dfs = [normalise_scores(score_by_name(names, lang=lang, translate_from="en"), using=load_decades(range(100,190), lang=lang)) for lang in tqdm.tqdm(langs)]
dfs = [df.groupby('link').first() for df in dfs]
df = sum(df[['score']] for df in dfs) / len(langs)
df = df.sort_values('score', ascending=False)
return pd.concat([df, dfs[0][['year', 'title']]], axis=1).sort_values("score", ascending=False)
def top_per_x(df, x=10):
return df.reset_index(drop=True).groupby_rows(lambda r: r['year'] // x).first()
# extract countries of birth
def write_cob(df, file, append=False, **kwargs):
with open(file, "w" if not append else "a", encoding="utf-8") as f:
if not append: print("link,score,country", file=f)
for i in tqdm.tqdm(range(len(df))):
wd = WikiPage(df.iloc[i].link).to_wikidata()
cobs = wd.countries_of_birth
if not cobs: print("MISSING COB: {} ({})".format(df.iloc[i].title, i))
print('"{}",{},"{}"'.format(df.iloc[i].title, df.iloc[i].score, '|'.join(cob.name() for cob in cobs)), file=f)
f.flush()
# extract us state of birth (for dead people only)
def is_us_state(wd):
return any(x.get('id') in ["Q35657", 'Q1352230', 'Q783733'] for x in wd.property_values("P31", convert=False))
def state_of_place(wd):
carry_on = True
if is_us_state(wd): return wd.name()
for region in wd.property_values("P131"):
state = state_of_place(region)
if state: return state
elif region.id == "Q30": carry_on = False
return None if carry_on else wd.name()
def state_of_birth_or_death(name, living=False, birth=True):
american = False
wd = WikiPage(name).to_wikidata()
if living or wd.property_values(wd.DATE_OF_DEATH, convert=False):
for pob in (wd.places_of_birth if birth else wd.places_of_death):
for cob in pob.property_values(wd.COUNTRY, lambda qs: wd.END_TIME not in qs, convert=False):
if cob.get('id') == 'Q30':
american = True
state = state_of_place(pob)
if state: return state
return "US" if american else None
def write_states(df, file, append=False, **kwargs):
with open(file, "w" if not append else "a", encoding="utf-8") as f:
if not append: print("link,score,state", file=f)
for i in tqdm.tqdm(range(len(df))):
state = state_of_birth_or_death(df.iloc[i]['link'], **kwargs)
if state:
print("{},{},{}".format(df.iloc[i]['title'].replace(',',''),df.iloc[i]['score'],state), file=f)
f.flush()
# some more messing about
def extract_wikimarkup(markup, section=None):
if section:
r = re.compile(f"=+{re.escape(section)}=+")
m = r.search(markup)
r2 = re.compile("[^=]"+m.group(0).replace(section, "[^=]+") + "[^=]")
m2 = r2.search(markup, m.end())
markup = markup[m.start():m2 and m2.start()]
links = re.findall("\[\[[^\]]*\]\]", markup)
def delink(s):
s = s.strip("[]")
if "|" in s:
link, name = s.split("|")
if name == "":
name = re.sub(" \([^)]+\)", "", link)
else:
name, link = s, s
return {"name": name, "link": link}
return pd.DataFrame(tmap(delink, links))
def score_wikimarkup(df):
df = df.assign_rows(progressbar = True,
wp = ignoring_exceptions((lambda d: WikiPage(d['link'], lang="en"))))
df = df.assign_rows(progressbar = True,
wd = ignoring_exceptions((lambda d: d['wp'].to_wikidata())))
df = df.assign_rows(progressbar = True,
title=lambda d: '?' if d['wp'] is None else d['wp'].title,
length=lambda d: 1 if d['wp'] is None else len(d['wp'].response.content),
pageviews=lambda d: 1 if d['wp'] is None else int(median(([pv['views'] for pv in d['wp'].pageviews("20190101", "20200101")]+[0]*12)[:12])),
disambiguation=lambda d: d['wp'] and bool(d['wp'].bs4.find(alt="Disambiguation icon")))
return df.sort_values("pageviews", ascending=False)
def birth_state(wd):
american = False
for pob in wd.places_of_birth:
for cob in pob.property_values(wd.COUNTRY, lambda qs: wd.END_TIME not in qs, convert=False):
if cob.get('id') == 'Q30':
american = True
return state_of_place(pob)
return "US" if american else None
|
from __future__ import annotations
import socket
import sys
from io import StringIO
from typing import Iterable
from .._exceptions import MarkerError, OfflineContractError, SilentContractError
from .._types import ExceptionType
KNOWN_MARKERS = frozenset({
# io markers
'io',
'network',
'read',
'stderr',
'stdin',
'stdout',
'syscall',
'write',
# non-io markers
'global',
'import',
'random',
'time',
# aliases
'input', # stdin
'nonlocal', # global
'print', # stdout
'socket', # network
})
NON_IO_MARKERS = frozenset({
'global',
'nonlocal',
'import',
'random',
'time',
})
class PatchedStringIO(StringIO):
__slots__ = ('exception',)
def __init__(self, exception: ExceptionType) -> None:
self.exception = exception
def write(self, *args, **kwargs):
raise self.exception
class PatchedSocket:
__slots__ = ('exception',)
def __init__(self, exception: ExceptionType) -> None:
self.exception = exception
def __call__(self, *args, **kwargs):
raise self.exception
class HasPatcher:
__slots__ = (
'markers',
'message',
'exception',
'true_socket',
'true_stdout',
'true_stderr',
)
markers: frozenset[str]
def __init__(
self,
markers: Iterable[str],
message: str | None = None,
exception: ExceptionType | None = None,
) -> None:
self.markers = frozenset(markers)
self.message = message
self.exception = exception or MarkerError
if message and isinstance(self.exception, type):
self.exception = self.exception(message)
@property
def exception_type(self) -> type[Exception]:
if isinstance(self.exception, Exception):
return type(self.exception)
return self.exception
@property
def has_network(self) -> bool:
if 'io' in self.markers:
return True
if 'network' in self.markers:
return True
if 'socket' in self.markers:
return True
return False
@property
def has_io(self) -> bool:
return bool(self.markers - NON_IO_MARKERS)
@property
def has_stdout(self) -> bool:
if 'io' in self.markers:
return True
if 'print' in self.markers:
return True
if 'stdout' in self.markers:
return True
return False
@property
def has_stderr(self) -> bool:
if 'io' in self.markers:
return True
return 'stderr' in self.markers
@property
def has_global(self) -> bool:
if 'global' in self.markers:
return True
if 'nonlocal' in self.markers:
return True
return False
@property
def has_read(self) -> bool:
if 'io' in self.markers:
return True
return 'read' in self.markers
@property
def has_stdin(self) -> bool:
if 'io' in self.markers:
return True
if 'input' in self.markers:
return True
if 'stdin' in self.markers:
return True
return False
@property
def has_write(self) -> bool:
if 'io' in self.markers:
return True
return 'write' in self.markers
# patching
def patch(self) -> None:
if not self.has_network:
self.true_socket = socket.socket
socket.socket = PatchedSocket( # type: ignore[assignment,misc]
exception=self._get_exception(OfflineContractError),
)
if not self.has_stdout:
self.true_stdout = sys.stdout
sys.stdout = PatchedStringIO(
exception=self._get_exception(SilentContractError),
)
if not self.has_stderr:
self.true_stderr = sys.stderr
sys.stderr = PatchedStringIO(
exception=self._get_exception(SilentContractError),
)
def unpatch(self) -> None:
if not self.has_network:
socket.socket = self.true_socket # type: ignore[misc]
if not self.has_stdout:
sys.stdout = self.true_stdout
if not self.has_stderr:
sys.stderr = self.true_stderr
def _get_exception(self, default: type[Exception]) -> ExceptionType:
if self.exception_type is MarkerError:
if self.message is None:
return default
return default(self.message)
return self.exception
|
from django.db import models
from django.db.models import PROTECT
from moneybird_accounting.models import MoneybirdReadWriteResourceModel
class LedgerAccount(MoneybirdReadWriteResourceModel):
class Meta:
verbose_name = "ledger account"
verbose_name_plural = "ledger accounts"
moneybird_resource_path_name = "ledger_accounts"
moneybird_resource_name = "ledger_account"
moneybird_data_fields = [
"name",
"account_type",
"account_id",
"parent_id",
] # TODO add allowed_document_types and limit foreign key choices
name = models.CharField(blank=True, null=True, max_length=100)
ACCOUNT_TYPE_NON_CURRENT_ASSETS = "non_current_assets"
ACCOUNT_TYPE_CURRENT_ASSETS = "current_assets"
ACCOUNT_TYPE_EQUITY = "equity"
ACCOUNT_TYPE_PROVISIONS = "provisions"
ACCOUNT_TYPE_NON_CURRENT_LIABILITIES = "non_current_liabilities"
ACCOUNT_TYPE_CURRENT_LIABILITIES = "current_liabilities"
ACCOUNT_TYPE_REVENUE = "revenue"
ACCOUNT_TYPE_DIRECT_COSTS = "direct_costs"
ACCOUNT_TYPE_EXPENSES = "expenses"
ACCOUNT_TYPE_OTHER_INCOME_EXPENSES = "other_income_expenses"
ACCOUNT_TYPE_CHOICES = (
(ACCOUNT_TYPE_NON_CURRENT_ASSETS, "Non-current assets"),
(ACCOUNT_TYPE_CURRENT_ASSETS, "Currents assets"),
(ACCOUNT_TYPE_EQUITY, "Equity"),
(ACCOUNT_TYPE_PROVISIONS, "Provisions"),
(ACCOUNT_TYPE_NON_CURRENT_LIABILITIES, "Non-current liabilities"),
(ACCOUNT_TYPE_CURRENT_LIABILITIES, "Current liabilities"),
(ACCOUNT_TYPE_REVENUE, "Revenue"),
(ACCOUNT_TYPE_DIRECT_COSTS, "Direct costs"),
(ACCOUNT_TYPE_EXPENSES, "Expenses"),
(ACCOUNT_TYPE_OTHER_INCOME_EXPENSES, "Other income or expenses"),
)
account_type = models.CharField(blank=True, null=True, choices=ACCOUNT_TYPE_CHOICES, max_length=50)
account_id = models.CharField(blank=True, null=True, max_length=10)
parent = models.ForeignKey("LedgerAccount", blank=True, null=True, on_delete=PROTECT, db_constraint=False)
def __str__(self):
return self.name
|
import random
import server.tca.cellaut as ca
class TCARule(ca.Rule):
vmax = 3
random_slow_p = 0.3
background = 0
change_lane_p = 0.2
class StatesRule(TCARule):
"""Rules for calculating new state of non-empty cells"""
def populate(self, map, address):
self.address = address
self.state = map.get(address)
self.front_gap = 0
self.street_id = address[0]
street = map.streets[address[0]]
self.consumer = street.consumer
self.generator = street.generator
self.street_length = street.height
self.street_front_id = street.front_id
for i, cell in enumerate(map.states(address, self.vmax)[0]):
if address[2] + i + 1 == street.height:
if street.light.color <= 0:
break
if cell == self.background:
self.front_gap += 1
else:
break
self.right_change_allowed = False
self.left_change_allowed = False
# verify if right cell is empty
if map.states(address, 1)[1][0] == self.background:
self.right_back_gap = 0
self.right_car_speed = 0
# verify if car speed < gap
for cell in map.states(address, self.vmax)[2]:
if cell == self.background:
self.right_back_gap += 1
elif cell is None:
break
else:
self.right_car_speed = cell.speed
break
# Verify if car is allowed change
if self.right_car_speed < self.right_back_gap:
self.right_change_allowed = True
# verify if left cell is empty
if map.states(address, 1)[5][0] == self.background:
self.left_back_gap = 0
self.left_car_speed = 0
# verify if car speed < gap
for cell in map.states(address, self.vmax)[4]:
if cell == self.background:
self.left_back_gap += 1
elif cell is None:
break
else:
self.left_car_speed = cell.speed
break
# Verify if car is allowed change
if self.left_car_speed < self.left_back_gap:
self.left_change_allowed = True
# can't change lane outside street width (intersection cases)
if address[1] + 1 >= map.streets[address[0]].width:
self.right_change_allowed = False
if address[1] - 1 < 0:
self.left_change_allowed = False
def apply(self):
# if background, no calculations needed
if self.state == self.background:
return self.background
if self.state.street != self.street_id:
return
if self.consumer and self.address[2] + 1 >= self.street_length:
return self.background
# if self.generator and self.address[2] == 0:
# if random.random() > 0.5:
# state = Car(street=self.street_id)
# state.next_street = self.street_front_id
# return state
self.state.change_lane_intention = 0
car = self.state.clone()
car.change_lane_intention = 0
# Nasch acceleration rule
car.speed = min(car.speed + 1, self.vmax)
# Nasch gap consideration rule
car.speed = min(car.speed, self.front_gap)
# Nasch randomly slowing of vehicle
if random.random() < car.probability['random_slow_p']:
car.speed = max(car.speed - 1, 0)
# TCA_GT changing lane intention
if random.random() < car.probability['change_lane_p']:
# Right allowed
if self.right_change_allowed and not self.left_change_allowed:
car.change_lane_intention = 1
# Left allowed
elif self.left_change_allowed and not self.right_change_allowed:
car.change_lane_intention = -1
# Both allowed
elif self.right_change_allowed and self.left_change_allowed:
if random.random() < 0.5:
car.change_lane_intention = 1
else:
car.change_lane_intention = -1
else:
car.change_lane_intention = 0
return car
class MovementRule(TCARule):
"""Rules for 'moving the cars' to their new positions"""
def populate(self, map, address):
self.state = map.get(address)
self.back_gap = 0
self.back_car = self.background
self.street_id = address[0]
self.front_street_id = map.streets[address[0]].front_id
self.address = address
for cell in map.states(address, self.vmax)[3]:
if cell == self.background:
self.back_gap += 1
else:
self.back_car = cell
break
self.left_car = self.background
self.right_car = self.background
# verify right lane
if map.states(address, 1)[1][0] != self.background and map.states(address, 1)[1][0] is not None:
if map.states(address, 1)[1][0].change_lane_intention == -1:
self.right_car = map.states(address, 1)[1][0]
# verify left lane
if map.states(address, 1)[5][0] != self.background and map.states(address, 1)[5][0] is not None:
if map.states(address, 1)[5][0].change_lane_intention == 1:
self.left_car = map.states(address, 1)[5][0]
def apply(self):
# if car is stopped on cell
if self.state != self.background and self.state.speed == 0 and self.state.change_lane_intention == 0:
return self.state
# if lane change allowed
if self.left_car != self.background and self.left_car is not None:
if self.left_car.street == self.street_id:
return self.left_car
if self.right_car != self.background and self.right_car is not None:
if self.right_car.street == self.street_id:
return self.right_car
# if back car will land on cell
if self.back_car != self.background and self.back_car is not None:
if self.back_car.speed == self.back_gap + 1 and self.back_car.change_lane_intention == 0:
if self.back_car.street == self.street_id:
return self.back_car
if self.back_car.next_street == self.street_id:
self.back_car.street = self.street_id
self.back_car.next_street = self.front_street_id
return self.back_car
# return background otherwise
return self.background |
from django.conf.urls import url
from django.views.decorators.csrf import csrf_exempt
from rest_framework_jwt.views import obtain_jwt_token
from rest_framework_jwt.views import refresh_jwt_token
from .views import PrivateGraphQLView
urlpatterns = [
url(r'^graphql', csrf_exempt(PrivateGraphQLView.as_view(graphiql=True))),
url(r'^refresh-token', refresh_jwt_token),
url(r'^login', obtain_jwt_token),
]
|
from matplotlib.pyplot import get
from pip import main
import torch
import gpytorch
from config.modelconfig import *
from tools.processdata import read_data, get_data, res_data, dwt_data_ca, dwt_data_cd
def train(config, is_res):
all_data = read_data(config.data_path)
if is_res:
train_x, train_y, test_x, test_y, draw_test_x, start_data = res_data(all_data, config.scale_train_test)
else:
train_x, train_y, test_x, test_y, draw_test_x = get_data(all_data, config.scale_train_test)
likelihood = gpytorch.likelihoods.GaussianLikelihood()
try:
model = config.get_model(train_x, train_y, likelihood)
except:
try:
model = config.get_model(train_x, train_y, likelihood, num_dims=1)
except:
model = config.get_model(train_x, train_y, likelihood, num_mixtures=50)
model.train()
likelihood.train()
for step in config.train_step:
optimizer = torch.optim.Adam(model.parameters(), lr=config.learning_rate)
config.learning_rate /= 10
mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, model)
for i in range(step):
# Zero gradients from previous iteration
optimizer.zero_grad()
# Output from model
output = model(train_x)
# Calc loss and backprop gradients
loss = -mll(output, train_y)
loss.backward()
if (i+1) % 100 == 0:
print('Iter %d/%d - Loss: %.3f' % (i + 1, step, loss.item()))
optimizer.step()
if is_res:
return model, likelihood, train_x, train_y, test_x, test_y, draw_test_x, start_data
else:
return model, likelihood, train_x, train_y, test_x, test_y, draw_test_x
def train_dwt_a(config):
all_data = read_data(config.data_path)
train_x, train_y, test_x, test_y, draw_test_x = dwt_data_ca(all_data, config.scale_train_test)
likelihood = gpytorch.likelihoods.GaussianLikelihood()
try:
model = config.get_model(train_x, train_y, likelihood)
except:
try:
model = config.get_model(train_x, train_y, likelihood, num_dims=1)
except:
model = config.get_model(train_x, train_y, likelihood, num_mixtures=50)
model.train()
likelihood.train()
for step in config.train_step:
optimizer = torch.optim.Adam(model.parameters(), lr=config.learning_rate)
config.learning_rate /= 10
mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, model)
for i in range(step):
# Zero gradients from previous iteration
optimizer.zero_grad()
# Output from model
output = model(train_x)
# Calc loss and backprop gradients
loss = -mll(output, train_y)
loss.backward()
if (i+1) % 100 == 0:
print('Iter %d/%d - Loss: %.3f' % (i + 1, step, loss.item()))
optimizer.step()
return model, likelihood, train_x, train_y, test_x, test_y, draw_test_x
def train_dwt_d(config):
all_data = read_data(config.data_path)
train_x, train_y, test_x, test_y, draw_test_x = dwt_data_cd(all_data, config.scale_train_test)
likelihood = gpytorch.likelihoods.GaussianLikelihood()
try:
model = config.get_model(train_x, train_y, likelihood)
except:
try:
model = config.get_model(train_x, train_y, likelihood, num_dims=1)
except:
model = config.get_model(train_x, train_y, likelihood, num_mixtures=50)
model.train()
likelihood.train()
for step in config.train_step:
optimizer = torch.optim.Adam(model.parameters(), lr=config.learning_rate)
config.learning_rate /= 10
mll = gpytorch.mlls.ExactMarginalLogLikelihood(likelihood, model)
for i in range(step):
# Zero gradients from previous iteration
optimizer.zero_grad()
# Output from model
output = model(train_x)
# Calc loss and backprop gradients
loss = -mll(output, train_y)
loss.backward()
if (i+1) % 100 == 0:
print('Iter %d/%d - Loss: %.3f' % (i + 1, step, loss.item()))
optimizer.step()
return model, likelihood, train_x, train_y, test_x, test_y, draw_test_x
# if __name__ == '__main__':
# config = RBFConfig()
# train(config)
|
from math import ceil
import redis
import json
import requests
import pymysql
from flask import g
from steem import Steem
from steem.amount import Amount
from pymongo import MongoClient
from dateutil.parser import parse
from datetime import datetime
from . import settings
_steem_connection = None
_mongo_connection = None
_redis_connection = None
def connect_db():
conn = pymysql.connect(*settings.DB_INFO, charset='utf8')
conn.cursorclass = pymysql.cursors.DictCursor
return conn
def get_db(new=False):
"""Opens a new database connection if there is none yet for the
current application context.
"""
if new:
return connect_db()
if not hasattr(g, 'mysql_db'):
g.mysql_db = connect_db()
return g.mysql_db
def get_steem_conn():
global _steem_connection
if not _steem_connection:
_steem_connection = Steem(nodes=settings.NODES)
return _steem_connection
def get_mongo_conn():
global _mongo_connection
if not _mongo_connection:
_mongo_connection = MongoClient('mongo1.steemdata.com',
username='steemit',
password='steemit',
authSource='SteemData',
authMechanism='SCRAM-SHA-1')
return _mongo_connection
def get_redis_conn():
global _redis_connection
if not _redis_connection:
_redis_connection = redis.StrictRedis(host='localhost', port=6379, db=0)
return _redis_connection
def prepare_witness_leaderboard():
s = get_steem_conn()
r = get_redis_conn()
witness_list = []
rank = 0
for witness in s.get_witnesses_by_vote("", 400):
active = True
if witness.get("signing_key") == "STM1111111111111111111111111111111114T1Anm":
active = False
price_uptodate = True
last_price_update = witness.get("last_sbd_exchange_update")
if last_price_update:
last_price_update = parse(last_price_update)
if (datetime.utcnow() - last_price_update).total_seconds() / 3600 > 12:
price_uptodate = False
rank += 1
witness.update({
"rank": rank,
"votes_in_mv": int(int(witness["votes"]) / 1000000000000),
"price_uptodate": price_uptodate,
"active": active,
})
price = "-"
if witness.get("sbd_exchange_rate", {}).get("base"):
price_in_float = Amount(witness.get("sbd_exchange_rate").get("base")).amount
price = "$%s" % price_in_float
witness.update({
"price": price,
})
witness_list.append(witness)
r.set("witnesses", json.dumps(witness_list))
def get_witness_list():
r = get_redis_conn()
return json.loads(r.get("witnesses"))
class Pagination(object):
def __init__(self, page, per_page, total_count):
self.page = page + 1
self.per_page = per_page
self.total_count = total_count
@property
def pages(self):
return int(ceil(self.total_count / float(self.per_page)))
@property
def has_prev(self):
return self.page > 1
@property
def has_next(self):
return self.page < self.pages
def iter_pages(self, left_edge=2, left_current=2,
right_current=5, right_edge=2):
last = 0
for num in range(1, self.pages + 1):
if num <= left_edge or \
(num > self.page - left_current - 1 and
num < self.page + right_current) or \
num > self.pages - right_edge:
if last + 1 != num:
yield None
yield num
last = num
class Coins(object):
def request_coins(self, name):
base = "https://min-api.cryptocompare.com/data/price?fsym="
compare = "&tsyms=BTC,USD,EUR,ETH,LTC"
url = base+name+compare
c = (requests.get(url)).text
return json.loads(c)
def get_coin_price(self, name, price):
if name == "STEEM":
prices = self.request_coins("STEEM")
elif name == "SBD":
prices = self.request_coins("SBD")
return "%.5f" % prices[price]
def get_payout_from_rshares(rshares, reward_balance,
recent_claims, base_price):
fund_per_share = Amount(reward_balance).amount / float(recent_claims)
payout = float(rshares) * fund_per_share * Amount(base_price).amount
return payout
def vests_to_sp(vests, info):
steem_per_mvests = (
Amount(info["total_vesting_fund_steem"]).amount /
(Amount(info["total_vesting_shares"]).amount / 1e6)
)
return vests / 1e6 * steem_per_mvests
def get_curation_rewards(account, info, checkpoint_val=100):
total_reward_in_rshares = 0
total_reward_in_sp = 0
checkpoint = int(checkpoint_val)
increase_per_checkpoint = int(checkpoint_val)
checkpoints = []
history = account.history(filter_by=["curation_reward"])
for curation_reward in history:
curation_reward_rshares = Amount(curation_reward["reward"]).amount
total_reward_in_rshares += curation_reward_rshares
total_reward_in_sp += vests_to_sp(curation_reward_rshares, info)
if int(total_reward_in_sp) % checkpoint < 25 and \
int(total_reward_in_sp) >= checkpoint:
checkpoints.append({
"timestamp": curation_reward["timestamp"],
"block": curation_reward["block"],
"sub_total": round(total_reward_in_sp, 2),
})
checkpoint += increase_per_checkpoint
return total_reward_in_sp, total_reward_in_rshares, checkpoints
def hbytes(num):
for x in ['bytes', 'KB', 'MB', 'GB']:
if num < 1024.0:
return "%3.1f%s" % (num, x)
num /= 1024.0
return "%3.1f%s" % (num, 'TB')
op_types = [
"vote",
"comment",
"custom_json",
"transfer",
"delegate_vesting_shares",
"claim_reward_balance",
"account_witness_vote",
"author_reward",
"curation_reward",
"return_vesting_delegation",
"feed_publish",
"delete_comment",
"account_create_with_delegation",
]
|
##############################################################################
# Copyright (c) 2017 ZTE Corp and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
import pytest
from qtip.ansible_library.plugins.action import collect
@pytest.fixture
def string():
return """Lorem ipsum dolor sit amet,
consectetur adipiscing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.
Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
"""
@pytest.mark.parametrize("patterns,expected", [
('not exist', {}),
('Lorem (\S+)', {}),
('nisi ut (?P<name>\S+)', {'name': ['aliquip']}),
('in\s(?P<in>\w+)', {'in': ['reprehenderit', 'voluptate', 'culpa']})
])
def test_collect(patterns, string, expected):
assert collect.collect(patterns, string) == expected
|
# This sample tests various assignment scenarios where
# there is an expected type, so bidirectional type
# inference is used.
# pyright: strict
from typing import Dict, Callable, Sequence, Tuple
AAA = float
BBB = int
CCC = str
DDD = str
AAATuple = Tuple[AAA, BBB, Callable[[Sequence[int], AAA], Sequence[float]]]
def foo():
var1: Dict[str, Tuple[AAA, BBB, CCC, DDD]] = {}
var2: Dict[str, AAATuple] = {}
for k, (var3, var4, _, _) in var1.items():
var2[k] = (var3, var4, lambda var5, var6: [v * var6 for v in var5])
|
##! python 3
'''
WHAT IS IT APP LAUNCHER developed by Mr Steven J walden
Sept. 2020
SAMROIYOD, PRACHUAP KIRI KHAN, THAILAND
[See License.txt file]
'''
#Gui's and Sprite classes for game
import sys
from PyQt5 import QtWidgets, QtGui, QtCore
import pygame as pg
from methods import *
class StartUpGui(QtWidgets.QWidget):
def __init__(self, parent=None):
super(StartUpGui, self).__init__(parent)
self.initUI()
def initUI(self):
#Set up GUI
self.resize(310, 208)
self.setMinimumSize(310, 208)
self.setMaximumSize(310, 208)
self.setWindowIcon(QtGui.QIcon("img/Ep_window_icon.ico"))
self.setWindowTitle("What is it?")
self.add_buttons()
self.tab_order()
def add_buttons(self):
bfont = QtGui.QFont()
bfont.setPointSize(14)
bfont.setBold(True)
bfont.setItalic(True)
self.EasyModeButton = QtWidgets.QPushButton(self)
self.EasyModeButton.setGeometry(10, 28, 90, 60)
self.EasyModeButton.setCheckable(True)
self.EasyModeButton.setChecked(True)
self.EasyModeButton.setFont(bfont)
self.EasyModeButton.setText("Easy\nMode")
self.MediumModeButton = QtWidgets.QPushButton(self)
self.MediumModeButton.setGeometry(110, 28, 90, 60)
self.MediumModeButton.setCheckable(True)
self.MediumModeButton.setFont(bfont)
self.MediumModeButton.setText("Medium\nMode")
self.HardModeButton = QtWidgets.QPushButton(self)
self.HardModeButton.setGeometry(210, 28, 90, 60)
self.HardModeButton.setCheckable(True)
self.HardModeButton.setFont(bfont)
self.HardModeButton.setText("Hard\nMode")
self.LoadImagesButton = QtWidgets.QPushButton(self)
self.LoadImagesButton.setGeometry(10, 98, 140, 60)
self.LoadImagesButton.setFont(bfont)
self.LoadImagesButton.setText("Load Images")
self.SelectFolderButton = QtWidgets.QPushButton(self)
self.SelectFolderButton.setGeometry(160, 98, 140, 60)
self.SelectFolderButton.setFont(bfont)
self.SelectFolderButton.setText("Select Folder")
#Button for switchiong to darkmode
bfont.setPointSize(8)
bfont.setBold(False)
self.DarkModeButton = QtWidgets.QPushButton(self)
self.DarkModeButton.setGeometry(10, 4, 40, 20)
self.DarkModeButton.setFocusPolicy(QtCore.Qt.NoFocus)
self.DarkModeButton.setCheckable(True)
self.DarkModeButton.setFont(bfont)
self.DarkModeButton.setText("Dark")
#Button box setup for OKay and cancel buttons
self.StartGameButtonBox = QtWidgets.QDialogButtonBox(self)
self.StartGameButtonBox.setGeometry(142, 174, 156, 23)
self.StartGameButtonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Close|QtWidgets.QDialogButtonBox.Ok)
def tab_order(self):
self.setTabOrder(self.EasyModeButton, self.MediumModeButton)
self.setTabOrder(self.HardModeButton, self.LoadImagesButton)
self.setTabOrder(self.SelectFolderButton, self.StartGameButtonBox)
class Spritesheet:
def __init__(self, filename):
self.spritesheet = pg.image.load(filename).convert_alpha()
def get_image(self, x, y, width, height):
#Grab an image from the sheet
image = pg.Surface((width, height), pg.SRCALPHA)
image.blit(self.spritesheet, (0,0), (x, y, width, height))
return image
class NumberMobs(pg.sprite.Sprite):
def __init__(self, spritesheet, xpos, ypos, width, height):
pg.sprite.Sprite.__init__(self)
self.sprite_sheet = spritesheet
self.image = self.sprite_sheet.get_image(xpos, ypos, width, height)
self.rect = self.image.get_rect()
self.rect.x = xpos
self.rect.y = ypos
class WrongAnswer(pg.sprite.Sprite):
"""docstring for WrongAnswer"""
def __init__(self):
pg.sprite.Sprite.__init__(self)
self.img_num = 1
self.image = pg.image.load(path.join(IMG_FOLDER, f"Wrong{self.img_num}.png")).convert_alpha()
self.rect = self.image.get_rect()
self.rect.centerx = SCREENWIDTH / 2
self.rect.centery = SCREENHEIGHT / 2
self.frame_rate = 100
self.img_last_update = pg.time.get_ticks()
def update(self):
#Change image
img_now = pg.time.get_ticks()
if img_now - self.img_last_update >= self.frame_rate:
self.img_last_update = img_now
self.img_num += 1
if self.img_num > 13:
self.img_num = 13
self.kill()
self.image = pg.image.load(path.join(IMG_FOLDER, f"Wrong{self.img_num}.png")).convert_alpha()
self.rect = self.image.get_rect()
self.rect.centerx = SCREENWIDTH / 2
self.rect.centery = SCREENHEIGHT / 2
class RightAnswer(WrongAnswer):
"""Inherent class from WrongAnswer"""
def __init__(self, game_mode):
super(RightAnswer, self).__init__()
self.game_mode = game_mode
self.image = pg.image.load(path.join(IMG_FOLDER, f"{self.game_mode}_mode_image.png")).convert()
self.rect = self.image.get_rect()
self.rect.centerx = SCREENWIDTH / 2
self.rect.centery = SCREENHEIGHT / 2
self.frame_rate = 100
self.alpha_num = 255
def update(self):
#Change image alpha
img_now = pg.time.get_ticks()
if img_now - self.img_last_update >= self.frame_rate:
self.img_last_update = img_now
self.alpha_num -= 12
if self.alpha_num < 10:
self.alpha_num = 10
self.kill()
self.image.set_alpha(self.alpha_num)
self.rect = self.image.get_rect()
self.rect.centerx = SCREENWIDTH / 2
self.rect.centery = SCREENHEIGHT / 2
#Run Gui
# if __name__ == '__main__':
# app = QtWidgets.QApplication(sys.argv)
# main_app = StartUpGui()
# main_app.show()
# sys.exit(app.exec_())
|
from unittest.mock import MagicMock
import pytest
from auto_backup.argument_assigner import assign_arguments_to_self
class AssignArgumentsMock(object):
def __init__(self, water, earth="brown", *, wind, fire="purple"):
assign_arguments_to_self()
def values(self):
return self.water, self.earth, self.wind, self.fire
def assign_argument_function(self, a, b):
assign_arguments_to_self()
def no_self_function(a, b):
assign_arguments_to_self()
def test_assigns_all_provided_arguments():
instance = AssignArgumentsMock("blue", "red", wind="green", fire="yellow")
assert instance.values() == ("blue", "red", "green", "yellow")
def test_default_argument_gets_assigned():
instance = AssignArgumentsMock("blue", wind="pink")
assert instance.values() == ("blue", "brown", "pink", "purple")
def test_raises_an_exception_when_there_is_no_self_argument():
with pytest.raises(KeyError):
no_self_function(1, 2)
def test_assign_to_self_argument_of_arbitrary_function():
mock = MagicMock()
assign_argument_function(mock, "avalue", "bvalue")
assert (mock.a, mock.b) == ("avalue", "bvalue")
|
"""
Incorrect hook file
The hook file must be in the same directory as the package. This file
is in a directory named by ``hook_import_dirs``, and we check that it
is NOT found.
"""
|
# Project: MapServer
# Purpose: xUnit style Python mapscript tests of clusterObj
# Author: Seth Girvin
#
# ===========================================================================
# Copyright (c) 2019, Seth Girvin
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# ===========================================================================
import unittest
import mapscript
class ClusterObjTestCase(unittest.TestCase):
def testClusterObjUpdateFromString(self):
"""a cluster can be updated from a string"""
c = mapscript.clusterObj()
c.updateFromString("CLUSTER \n MAXDISTANCE 5 \n REGION \n 'rectangle' END")
assert c.maxdistance == 5
assert c.region == 'rectangle'
s = c.convertToString()
assert s == 'CLUSTER\n MAXDISTANCE 5\n REGION "rectangle"\nEND # CLUSTER\n'
def testClusterObjGetSetFilter(self):
"""a cluster filter can be set and read"""
c = mapscript.clusterObj()
filter = '[attr1] > 5'
c.setFilter(filter)
assert '"{}"'.format(filter) == c.getFilterString()
def testClusterObjGetSetGroup(self):
"""a cluster filter can be set and read"""
c = mapscript.clusterObj()
exp = '100' # TODO not sure what would be a relevant expression here
c.setGroup(exp)
assert '"{}"'.format(exp) == c.getGroupString()
if __name__ == '__main__':
unittest.main()
|
from hashlib import md5
import igql
from .constants import IG_URL
def set_instagram_gis(kwargs, rhx_gis):
if "variables" in kwargs["params"]:
kwargs["headers"]["x-instagram-gis"] = md5(
(f'{rhx_gis}:{kwargs["params"]["variables"]}').encode()
).hexdigest()
return kwargs
def get_shared_data(api, path="instagram"):
response = api.GET(url=f"{IG_URL}/{path}")
response = response.split("window._sharedData = ")[1]
response = response.split(";</script>")[0]
response = igql.InstagramGraphQL.loads(response)
return response
def paginator(api, data, keys, params):
yield data[keys[0]]["edges"]
has_next_page = data[keys[0]]["page_info"]["has_next_page"]
end_cursor = data[keys[0]]["page_info"]["end_cursor"]
while has_next_page:
if isinstance(params["variables"], str):
params["variables"] = igql.InstagramGraphQL.loads(params["variables"])
params["variables"]["after"] = end_cursor
params["variables"] = igql.InstagramGraphQL.dumps(params["variables"])
data = get_value_deep_key(api.query.GET(params=params), keys[1])
has_next_page = data[keys[0]]["page_info"]["has_next_page"]
end_cursor = data[keys[0]]["page_info"]["end_cursor"]
yield data[keys[0]]["edges"]
def get_value_deep_key(data, keys):
for key in keys:
data = data[key]
return data
|
import pandas
from sklearn import linear_model
def predict(x: str,y: str,c: str, day: str):
df = pandas.read_csv(x)
depented = df[[c]]
independent = df[[y]]
linear = linear_model.LinearRegression()
linear.fit(depented, independent)
global cases_predict
cases_predict = linear.predict([[day]])
print(cases_predict)
us = predict("us.csv", "cases", "Day", "15")
us_cases = int(cases_predict)
sa = predict("SA.csv", "cases", "Day", "16")
sa_cases = int(cases_predict)
uk = predict("uk.csv", "cases", "Day", "16")
uk_cases = int(cases_predict)
us_next = predict("us.csv", "cases", "Day", "23")
us_next_week = int(cases_predict)
sa_next = predict("SA.csv", "cases", "Day", "23")
sa_next_week = int(cases_predict)
uk_next = predict("uk.csv", "cases", "Day", "23")
uk_next_week =int(cases_predict)
|
import os
from loguru import logger
from .common import retrieve_data, retrieve_data_gen, json_dump, mkdir_p
import codecs
def backup_issues(username, password, repo_cwd, repository, repos_template, since=None):
#has_issues_dir = os.path.isdir('{0}/issues/.git'.format(repo_cwd))
# if args.skip_existing and has_issues_dir:
# return
logger.info('Retrieving {0} issues'.format(repository['full_name']))
issue_cwd = os.path.join(repo_cwd, 'issues')
mkdir_p(repo_cwd, issue_cwd)
issues = {}
issues_skipped = 0
issues_skipped_message = ''
_issue_template = '{0}/{1}/issues'.format(repos_template,
repository['full_name'])
should_include_pulls = True
issue_states = ['open', 'closed']
for issue_state in issue_states:
query_args = {
'filter': 'all',
'state': issue_state
}
##since os the time stamp after which everything shall be scraped
if since:
query_args['since'] = since
_issues = retrieve_data(username, password,
_issue_template,
query_args=query_args)
for issue in _issues:
# skip pull requests which are also returned as issues
# if retrieving pull requests is requested as well
if 'pull_request' in issue:
issues_skipped += 1
continue
issues[issue['number']] = issue
if issues_skipped:
issues_skipped_message = ' (skipped {0} pull requests)'.format(
issues_skipped)
logger.info('Saving {0} issues to disk{1}'.format(
len(list(issues.keys())), issues_skipped_message))
comments_template = _issue_template + '/{0}/comments'
events_template = _issue_template + '/{0}/events'
for number, issue in list(issues.items()):
#if args.include_issue_comments or args.include_everything:
template = comments_template.format(number)
issues[number]['comment_data'] = retrieve_data(username, password, template)
#if args.include_issue_events or args.include_everything:
template = events_template.format(number)
issues[number]['event_data'] = retrieve_data(username, password, template)
issue_file = '{0}/{1}.json'.format(issue_cwd, number)
with codecs.open(issue_file, 'w', encoding='utf-8') as f:
json_dump(issue, f)
return
def backup_pulls(username, password, repo_cwd, repository, repos_template):
#has_pulls_dir = os.path.isdir('{0}/pulls/.git'.format(repo_cwd))
# if args.skip_existing and has_pulls_dir:
# return
logger.info(f"Retrieving {repository['full_name']} pull requests") # noqa
pulls_cwd = os.path.join(repo_cwd, 'pulls')
mkdir_p(repo_cwd, pulls_cwd)
pulls = {}
pulls_template = f"{repos_template}/{repository['full_name']}/pulls"
logger.info(f"Pull template is {pulls_template}")
query_args = {
'filter': 'all',
'state': 'all',
'sort': 'updated',
'direction': 'desc',
}
# if not args.include_pull_details:
# pull_states = ['open', 'closed']
# for pull_state in pull_states:
# query_args['state'] = pull_state
# _pulls = retrieve_data_gen(args,
# _pulls_template,
# query_args=query_args)
# for pull in _pulls:
# if args.since and pull['updated_at'] < args.since:
# break
# if not args.since or pull['updated_at'] >= args.since:
# pulls[pull['number']] = pull
# else:
_pulls = retrieve_data_gen(username, password,
pulls_template,
query_args=query_args)
for pull in _pulls:
# if args.since and pull['updated_at'] < args.since:
# break
# if not args.since or pull['updated_at'] >= args.since:
pulls[pull['number']] = retrieve_data(
username, password,
pulls_template + '/{}'.format(pull['number']),
single_request=True
)[0]
logger.info('Saving {0} pull requests to disk'.format(
len(list(pulls.keys()))))
comments_template = pulls_template + '/{0}/comments'
commits_template = pulls_template + '/{0}/commits'
for number, pull in list(pulls.items()):
# if args.include_pull_comments or args.include_everything:
template = comments_template.format(number)
pulls[number]['comment_data'] = retrieve_data(username, password, template)
#if args.include_pull_commits or args.include_everything:
template = commits_template.format(number)
pulls[number]['commit_data'] = retrieve_data(username, password, template)
pull_file = '{0}/{1}.json'.format(pulls_cwd, number)
with codecs.open(pull_file, 'w', encoding='utf-8') as f:
json_dump(pull, f)
def backup_milestones(username, password, repo_cwd, repository, repos_template):
milestone_cwd = os.path.join(repo_cwd, 'milestones')
# if args.skip_existing and os.path.isdir(milestone_cwd):
# return
logger.info(f"Retrieving {repository['full_name']} milestones")
mkdir_p(repo_cwd, milestone_cwd)
template = f"{repos_template}/{repository['full_name']}/milestones"
query_args = {
'state': 'all'
}
_milestones = retrieve_data(username, password, template, query_args=query_args)
milestones = {}
for milestone in _milestones:
milestones[milestone['number']] = milestone
log_info('Saving {len(list(milestones.keys()))} milestones to disk')
for number, milestone in list(milestones.items()):
milestone_file = f'{milestone}/{number}.json'
with codecs.open(milestone_file, 'w', encoding='utf-8') as f:
json_dump(milestone, f)
return
def backup_labels(username, password, repo_cwd, repository, repos_template):
label_cwd = os.path.join(repo_cwd, 'labels')
output_file = '{0}/labels.json'.format(label_cwd)
template = '{0}/{1}/labels'.format(repos_template,
repository['full_name'])
_backup_data(args,
'labels',
template,
output_file,
label_cwd)
def backup_hooks(args, repo_cwd, repository, repos_template):
auth = get_auth(args)
if not auth:
log_info("Skipping hooks since no authentication provided")
return
hook_cwd = os.path.join(repo_cwd, 'hooks')
output_file = '{0}/hooks.json'.format(hook_cwd)
template = '{0}/{1}/hooks'.format(repos_template,
repository['full_name'])
try:
_backup_data(args,
'hooks',
template,
output_file,
hook_cwd)
except SystemExit:
log_info("Unable to read hooks, skipping")
def backup_releases(args, repo_cwd, repository, repos_template, include_assets=False):
repository_fullname = repository['full_name']
# give release files somewhere to live & log intent
release_cwd = os.path.join(repo_cwd, 'releases')
log_info('Retrieving {0} releases'.format(repository_fullname))
mkdir_p(repo_cwd, release_cwd)
query_args = {}
release_template = '{0}/{1}/releases'.format(repos_template, repository_fullname)
releases = retrieve_data(args, release_template, query_args=query_args)
# for each release, store it
log_info('Saving {0} releases to disk'.format(len(releases)))
for release in releases:
release_name = release['tag_name']
output_filepath = os.path.join(release_cwd, '{0}.json'.format(release_name))
with codecs.open(output_filepath, 'w+', encoding='utf-8') as f:
json_dump(release, f)
if include_assets:
assets = retrieve_data(args, release['assets_url'])
for asset in assets:
download_file(asset['url'], os.path.join(release_cwd, asset['name']), get_auth(args))
def backup_account(username, password, output_directory):
account_cwd = os.path.join(output_directory, 'account')
# if args.include_starred or args.include_everything:
host= get_github_api_host()
output_file = f"{account_cwd}/starred.json"
template = f"https://{host}/users/{username}/starred"
_backup_data(username, password,
"starred repositories",
template,
output_file,
account_cwd)
# if args.include_watched or args.include_everything:
output_file = f'{account_cwd}/watched.json'
template = "https://{host}/users/{username}/subscriptions"
_backup_data(username, password,
"watched repositories",
template,
output_file,
account_cwd)
# if args.include_followers or args.include_everything:
output_file = f"{account_cwd}/followers.json"
template = "https://{host}/users/{usernamec}/followers"
_backup_data(username, password,
"followers",
template,
output_file,
account_cwd)
# if args.include_following or args.include_everything:
output_file = f"{account_cwd}/following.json"
template = "https://{host}/users/{usernamec}/following"
_backup_data(username, password,
"following",
template,
output_file,
account_cwd)
def _backup_data(username, password, name, template, output_file, output_directory, overwrite=True):
# skip_existing = args.skip_existing
if overwrite:
logger.info(f'Retrieving {username} {name}')
mkdir_p(output_directory)
data = retrieve_data(username, password, template)
logger.info(f'Writing {len(data)} {name} to disk')
with codecs.open(output_file, 'w', encoding='utf-8') as f:
json_dump(data, f) |
from pyverse import Pyverse
import re
import statistics
def count_letters(text):
count = 0
for char in text:
if char.isalpha():
count += 1
if count == 0:
return 1
else:
return count
def count_sentences(text):
text = text.replace("\n", "")
sentence_end = re.compile('[.:;!?\)\()]')
sencences=sentence_end.split(text)
sencences = list(filter(None, sencences))
if len(sencences) == 0:
return 1
else:
return len(sencences)
def numbers2words(text):
#e.g. 2 to two
import nal
new_text = []
for word in text.split():
formato_numerico = re.compile("^[\-]?[1-9][0-9]*\.?[0-9]+$")
if re.match(formato_numerico, word):
if type(word) == "int":
word = int(word)
else:
word = float(word)
word = nal.to_word(word)
new_text.append(word.lower())
text = ' '.join(new_text)
return text
def count_words(text):
text = numbers2words(text)
text = ''.join(filter(lambda x: not x.isdigit(), text))
clean = re.compile('\W+')
text = clean.sub(' ', text).strip()
# Prevents zero division
if len(text.split()) == 0:
return 1
else:
return len(text.split())
def count_syllables(text):
text = numbers2words(text)
text = ''.join(filter(lambda x: not x.isdigit(), text))
syllables = Pyverse(text)
return syllables.count
if __name__ == '__main__':
# test
TextoDePrueba = "Tuvo muchas veces competencia con el cura de su lugar (que era hombre docto graduado en Sigüenza), sobre cuál había sido mejor caballero, Palmerín de Inglaterra o Amadís de Gaula; mas maese Nicolás, barbero del mismo pueblo, decía que ninguno llegaba al caballero del Febo, y que si alguno se le podía comparar, era don Galaor, hermano de Amadís de Gaula, porque tenía muy acomodada condición para todo; que no era caballero melindroso, ni tan llorón como su hermano, y que en lo de la valentía no le iba en zaga.\
En resolución, él se enfrascó tanto en su lectura, que se le pasaban las noches leyendo de claro en claro, y los días de turbio en turbio, y así, del poco dormir y del mucho leer, se le secó el cerebro, de manera que vino a perder el juicio. Llenósele la fantasía de todo aquello que leía en los libros, así de encantamientos, como de pendencias, batallas, desafíos, heridas, requiebros, amores, tormentas y disparates imposibles, y asentósele de tal modo en la imaginación que era verdad toda aquella máquina de aquellas soñadas invenciones que leía, que para él no había otra historia más cierta en el mundo."
total = count_syllables(TextoDePrueba)
print(total) |
import numpy as np
class PoseCreateLayer(object):
def __init__(self, num_joint, top, bottom, error_order):
self.num_joint_ = num_joint
self.top = top
self.bottom = bottom
self.error_order_ = error_order
if bottom[0].width() != self.num_joint_ * 2:
print("The bottom width and num of joint should have the same number.")
top[0].Reshape(bottom[1].num(), self.num_joint_,
self.bottom[1].height(), self.bottom[1].width())
def pose_create(self):
bottom_data_points = self.bottom[0]
top_data_points = self.top[0]
bottom_num = self.bottom[1].num()
bottom_height = self.bottom[1].height()
bottom_width = self.bottom[1].width()
sigma = 1.0 # 1.0
for idx in range(bottom_num):
for j in range(self.num_joint_):
center_x = int(bottom_data_points[j * 2])
center_y = int(bottom_data_points[j * 2 + 1])
for yy in range(bottom_height):
for xx in range(bottom_width):
index = (j * bottom_height + yy) * bottom_width + xx
if center_x == 0 and center_y == 0:
top_data_points[index] = 0
else:
gaussian = (1 / (sigma * np.sqrt(2 * np.pi))) * np.exp(-0.5 * (np.power(
yy - center_y, 2.0) + np.power(xx - center_x, 2.0)) * np.power(1 / sigma, 2.0))
gaussian = 4 * gaussian # /4
top_data_points[index] = gaussian
bottom_data_points += self.bottom[0].offset(1)
top_data_points += self.top[0].offset(1)
@staticmethod
def class_to_joint_first(cls_):
if cls_ == 1:
return 0
elif cls_ == 2:
return 0
elif cls_ == 4:
return 0
elif cls_ == 13:
return 0
elif cls_ == 5:
return 1
elif cls_ == 7:
return 1
elif cls_ == 11:
return 1
elif cls_ == 9:
return 2
elif cls_ == 12:
return 2
elif cls_ == 14:
return 3
elif cls_ == 15:
return 4
elif cls_ == 16:
return 5
elif cls_ == 17:
return 6
elif cls_ == 18:
return 7
elif cls_ == 19:
return 8
else:
return -1
@staticmethod
def class_to_joint_second(cls_):
if cls_ == 4:
return 0
elif cls_ == 3:
return 1
elif cls_ == 2:
return 2
else:
return -1
@staticmethod
def class_to_joint_third(cls_):
if cls_ == 1:
return 0
elif cls_ == 2:
return 1
else:
return -1
def select_joint(self, num_joint_, cls_):
if num_joint_ == 9:
return self.class_to_joint_first(cls_)
elif num_joint_ == 3:
return self.class_to_joint_second(cls_)
elif num_joint_ == 2:
return self.class_to_joint_third(cls_)
else:
print("Unexpected num_joint:", num_joint_)
def pose_evaluate(self):
bottom_data = self.bottom[0].cpu_data()
top_data = self.top[0].mutable_cpu_data()
num = self.bottom[0].num()
height = self.bottom[0].height()
width = self.bottom[0].width()
x_sum_vector = [0] * self.num_joint_
y_sum_vector = [0] * self.num_joint_
for i in range(num):
for h in range(height):
for w in range(width):
cls_ = bottom_data[h * width + w]
joint_id = self.select_joint(self.num_joint_, cls_)
if 0 <= joint_id < self.num_joint_:
x_sum_vector[joint_id].push_back(w)
y_sum_vector[joint_id].push_back(h)
for w in range(self.num_joint_ * 2):
top_data[w] = 0
for n in range(self.num_joint_):
if x_sum_vector[n].size() > 0 and y_sum_vector[n].size() > 0:
ave_x = np.sum(x_sum_vector[n].begin(
), x_sum_vector[n].end(), 0.0) / x_sum_vector[n].size()
ave_y = np.sum(y_sum_vector[n].begin(
), y_sum_vector[n].end(), 0.0) / y_sum_vector[n].size()
# LOG(INFO) << "ave_x: " << ave_x << " ave_y:" << ave_y
top_data[n*2] = int(ave_x)
top_data[n*2+1] = int(ave_y)
# LOG(INFO) << "cls: " << n << " x: " << int(ave_x) << " y: " << int(ave_y)
bottom_data += self.bottom[0].offset(1)
top_data += self.top[0].offset(1)
def check_data(self):
if self.bottom[0].num() == self.bottom[1].num():
print("The bottom data should have the same number.")
if self.bottom[0].channels() == self.bottom[1].channels():
print("The bottom data should have the same channel.")
if self.bottom[0].height() == self.bottom[1].height():
print("The bottom data should have the same height.")
if self.bottom[0].width() == self.bottom[1].width():
print("The bottom data should have the same width.")
if self.bottom[0].width() == self.num_joint_ * 2:
print("The bottom data should have the same width as double num_joint_.")
self.top[0].Reshape(self.bottom[0].num(), 1, 1, 1)
def pose_error(self):
bottom_data_one = self.bottom[0].cpu_data()
bottom_data_two = self.bottom[1].cpu_data()
bottom_data_three = None
bottom_data_four = None
if self.error_order_ == 2:
bottom_data_three = self.bottom[2].cpu_data()
bottom_data_four = self.bottom[3].cpu_data()
top_data = self.top[0].mutable_cpu_data()
num = self.bottom[0].num()
x1, x2, y1, y2 = 0, 0, 0, 0
left_arm = 3
right_arm = 4
# left_leg = 5, right_leg = 6, left_shoe = 7, right_shoe = 8
for i in range(num):
total_distance = 0
for j in range(self.num_joint_):
x1 = bottom_data_one[j*2]
x2 = bottom_data_two[j*2]
y1 = bottom_data_one[j*2+1]
y2 = bottom_data_two[j*2+1]
total_distance += np.sqrt((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2))
# LOG(INFO) << "dis of 2: " << total_distance
if self.error_order_ == 2:
x1 = bottom_data_three[left_arm*2]
x2 = bottom_data_four[left_arm*2]
y1 = bottom_data_three[left_arm*2+1]
y2 = bottom_data_four[left_arm*2+1]
total_distance += np.sqrt((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2))
x1 = bottom_data_three[right_arm*2]
x2 = bottom_data_four[right_arm*2]
y1 = bottom_data_three[right_arm*2+1]
y2 = bottom_data_four[right_arm*2+1]
total_distance += np.sqrt((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2))
# LOG(INFO) << "dis plus 1: " << total_distance
if self.error_order_ == 1:
total_distance /= 10
elif self.error_order_ == 2:
total_distance /= 8
elif self.error_order_ == 3:
total_distance /= 5
else:
print("Unexpected error_order: ", self.error_order_)
# if total_distance > 10: # total_distance = 10
#
top_data[0] = total_distance
# LOG(INFO) << "total_distance: " << total_distance
bottom_data_one += self.bottom[0].offset(1)
bottom_data_two += self.bottom[1].offset(1)
top_data += self.top[0].offset(1)
if self.error_order_ == 2:
bottom_data_three += self.bottom[2].offset(1)
bottom_data_four += self.bottom[3].offset(1)
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 25 00:58:52 2021
@author: 20210595
"""
from space_to_vector2 import export, positions, positionClassifier, positionPreprocess
from typing import Any
from gama.genetic_programming.components.individual import Individual
from gama.genetic_programming.compilers.scikitlearn import compile_individual
from gama.genetic_programming.components.primitive_node import PrimitiveNode
from gama.genetic_programming.components.primitive import Primitive
from gama.genetic_programming.components.terminal import Terminal
import numpy as np
from sklearn.pipeline import make_pipeline, Pipeline
from sklearn.naive_bayes import GaussianNB, BernoulliNB, MultinomialNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import (
ExtraTreesClassifier,
RandomForestClassifier,
GradientBoostingClassifier,
)
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import LinearSVC
from sklearn.linear_model import LogisticRegression
from sklearn.cluster import FeatureAgglomeration
from sklearn.preprocessing import (
MaxAbsScaler,
MinMaxScaler,
Normalizer,
PolynomialFeatures,
RobustScaler,
StandardScaler,
Binarizer,
)
from sklearn.kernel_approximation import Nystroem, RBFSampler
from sklearn.decomposition import PCA, FastICA
from sklearn.feature_selection import (
SelectFwe,
SelectPercentile,
f_classif,
VarianceThreshold,
)
class ValuesSearchSpace(object):
def __init__(self):
self.counter = 0
def get_individuals(self, x):
dictionary_pygmo = {}
dictionary_pygmo.update({'GaussianNB': x[0]})
dictionary_pygmo.update({'BernoulliNB': x[1]})
dictionary_pygmo.update({'BernoulliNB.alpha': x[2]})
dictionary_pygmo.update({'BernoulliNB.fit_prior': self._int_to_bool(round(x[3]))})
dictionary_pygmo.update({'MultinomialNB': x[4]})
dictionary_pygmo.update({'MultinomialNB.alpha': x[5]})
dictionary_pygmo.update({'MultinomialNB.fit_prior': self._int_to_bool(round(x[6]))})
dictionary_pygmo.update({'DecisionTreeClassifier': x[7]})
dictionary_pygmo.update({'DecisionTreeClassifier.criterion': self._int_to_string(round(x[8]), gini=0, entropy=1)})
dictionary_pygmo.update({'DecisionTreeClassifier.max_depth': round(x[9])})
dictionary_pygmo.update({'DecisionTreeClassifier.min_samples_split': round(x[10])})
dictionary_pygmo.update({'DecisionTreeClassifier.min_samples_leaf': round(x[11])})
dictionary_pygmo.update({'ExtraTreesClassifier': x[12]})
dictionary_pygmo.update({'ExtraTreesClassifier.n_estimators': round(x[13])})
dictionary_pygmo.update({'ExtraTreesClassifier.criterion': self._int_to_string(round(x[14]), gini=0, entropy=1)})
dictionary_pygmo.update({'ExtraTreesClassifier.max_features': x[15]})
dictionary_pygmo.update({'ExtraTreesClassifier.min_samples_split': round(x[16])})
dictionary_pygmo.update({'ExtraTreesClassifier.min_samples_leaf': round(x[17])})
dictionary_pygmo.update({'ExtraTreesClassifier.bootstrap': self._int_to_bool(round(x[18]))})
dictionary_pygmo.update({'RandomForestClassifier': x[19]})
dictionary_pygmo.update({'RandomForestClassifier.n_estimators': round(x[20])})
dictionary_pygmo.update({'RandomForestClassifier.criterion': self._int_to_string(round(x[21]), gini=0, entropy=1)})
dictionary_pygmo.update({'RandomForestClassifier.max_features': x[22]})
dictionary_pygmo.update({'RandomForestClassifier.min_samples_split': round(x[23])})
dictionary_pygmo.update({'RandomForestClassifier.min_samples_leaf': round(x[24])})
dictionary_pygmo.update({'RandomForestClassifier.bootstrap': self._int_to_bool(round(x[25]))})
dictionary_pygmo.update({'GradientBoostingClassifier': x[26]})
dictionary_pygmo.update({'GradientBoostingClassifier.n_estimators': round(x[27])})
dictionary_pygmo.update({'GradientBoostingClassifier.learning_rate': x[28]})
dictionary_pygmo.update({'GradientBoostingClassifier.max_depth': round(x[29])})
dictionary_pygmo.update({'GradientBoostingClassifier.min_samples_split': round(x[30])})
dictionary_pygmo.update({'GradientBoostingClassifier.min_samples_leaf': round(x[31])})
dictionary_pygmo.update({'GradientBoostingClassifier.subsample': x[32]})
dictionary_pygmo.update({'GradientBoostingClassifier.max_features': x[33]})
dictionary_pygmo.update({'KNeighborsClassifier': x[34]})
dictionary_pygmo.update({'KNeighborsClassifier.n_neighbors': round(x[35])})
dictionary_pygmo.update({'KNeighborsClassifier.weights': self._int_to_string(round(x[36]), uniform=0, distance=1)})
dictionary_pygmo.update({'KNeighborsClassifier.p': round(x[37])})
dictionary_pygmo.update({'LinearSVC': x[38]})
dictionary_pygmo.update({'LinearSVC.penalty': self._int_to_string(round(x[39]), l1=0, l2=1)})
dictionary_pygmo.update({'LinearSVC.loss': self._int_to_string(round(x[40]), hinge=0, squared_hinge=1)})
dictionary_pygmo.update({'LinearSVC.dual': self._int_to_bool(round(x[41]))})
dictionary_pygmo.update({'LinearSVC.tol': x[42]})
dictionary_pygmo.update({'LinearSVC.C': x[43]})
if dictionary_pygmo['LinearSVC.penalty'] == 'l1':
dictionary_pygmo['LinearSVC.loss'] = 'squared_hinge'
if (dictionary_pygmo['LinearSVC.penalty'] == 'l2') and (dictionary_pygmo['LinearSVC.loss'] == 'hinge') and (dictionary_pygmo['LinearSVC.dual'] == False):
dictionary_pygmo['LinearSVC.dual'] == True
if (dictionary_pygmo['LinearSVC.penalty'] == 'l1') and (dictionary_pygmo['LinearSVC.loss'] == 'squared_hinge') and (dictionary_pygmo['LinearSVC.dual'] == True):
dictionary_pygmo['LinearSVC.dual'] == False
dictionary_pygmo.update({'LogisticRegression': x[44]})
dictionary_pygmo.update({'LogisticRegression.penalty': self._int_to_string(round(x[45]), l2=0)})
dictionary_pygmo.update({'LogisticRegression.C': x[46]})
dictionary_pygmo.update({'LogisticRegression.dual': self._int_to_bool(round(x[47]))})
dictionary_pygmo.update({'LogisticRegression.solver': self._int_to_string(round(x[48]), lbfgs=0)})
dictionary_pygmo.update({'Binarizer': x[49]})
dictionary_pygmo.update({'Binarizer.threshold': x[50]})
dictionary_pygmo.update({'FastICA': x[51]})
dictionary_pygmo.update({'FastICA.tol': x[52]})
dictionary_pygmo.update({'FeatureAgglomeration': x[53]})
dictionary_pygmo.update({'FeatureAgglomeration.linkage': self._int_to_string(round(x[54]), ward=0, complete=1, average=2)})
dictionary_pygmo.update({'FeatureAgglomeration.affinity': self._int_to_string(round(x[55]), euclidean=0, l1=1, l2=2, manhattan=3, cosine=4, precomputed=5)})
if dictionary_pygmo['FeatureAgglomeration.linkage'] == 'ward':
dictionary_pygmo['FeatureAgglomeration.affinity'] = 'euclidean'
dictionary_pygmo.update({'MaxAbsScaler': x[56]})
dictionary_pygmo.update({'MinMaxScaler': x[57]})
dictionary_pygmo.update({'Normalizer': x[58]})
dictionary_pygmo.update({'Normalizer.norm': self._int_to_string(round(x[59]), l1=0, l2=1, max=2)})
dictionary_pygmo.update({'Nystroem': x[60]})
dictionary_pygmo.update({'Nystroem.kernel': self._int_to_string(round(x[61]), rbf=0, cosine=1, chi2=2, laplacian=3, polynomial=4, poly=5, linear=6, additive_chi2=7, sigmoid=8)})
dictionary_pygmo.update({'Nystroem.gamma': x[62]})
dictionary_pygmo.update({'Nystroem.n_components': round(x[63])})
dictionary_pygmo.update({'PCA': x[64]})
dictionary_pygmo.update({'PCA.svd_solver': self._int_to_string(round(x[65]), randomized=0)})
dictionary_pygmo.update({'PCA.iterated_power': round(x[66])})
dictionary_pygmo.update({'PolynomialFeatures': x[67]})
dictionary_pygmo.update({'PolynomialFeatures.degree': round(x[68])})
dictionary_pygmo.update({'PolynomialFeatures.include_bias': self._int_to_bool(round(x[69]))})
dictionary_pygmo.update({'PolynomialFeatures.interaction_only': self._int_to_bool(round(x[70]))})
dictionary_pygmo.update({'RBFSampler': x[71]})
dictionary_pygmo.update({'RBFSampler.gamma': x[72]})
dictionary_pygmo.update({'RobustScaler': x[73]})
dictionary_pygmo.update({'StandardScaler': x[74]})
dictionary_pygmo.update({'SelectFwe': x[75]})
dictionary_pygmo.update({'SelectFwe.alpha': x[76]})
#dictionary_pygmo.update({'SelectFwe.score_func': {f_classif: None}})
dictionary_pygmo.update({'SelectFwe.score_func': f_classif})
dictionary_pygmo.update({'SelectPercentile': x[77]})
dictionary_pygmo.update({'SelectPercentile.percentile': round(x[78])})
#dictionary_pygmo.update({'SelectPercentile.score_func': {f_classif: None}})
dictionary_pygmo.update({'SelectPercentile.score_func': f_classif})
dictionary_pygmo.update({'VarianceThreshold': x[79]})
dictionary_pygmo.update({'VarianceThreshold.threshold': x[80]})
newpositions = self._index_function(x, dictionary_pygmo)
#pipeline = self._create_pipeline(dictionary_values=dictionary_pygmo, position=newpositions[0])
return newpositions
def _int_to_string(self, value, **kwargs):
for element in kwargs:
if kwargs[element] == value:
return element
def _int_to_bool(self, value):
return True if value == 1 else False
def _index_function(self, x, dictionary_pos):
list_index_techniques_to_use_before = [i for i in positionPreprocess if x[i] > 90]
valuesPreprocess = [x[i] for i in list_index_techniques_to_use_before]
valuesPreprocess.sort(reverse=True)
list_index_techniques_to_use = []
for i in valuesPreprocess:
for j in range(len(x)):
if x[j] == i:
list_index_techniques_to_use.append(j)
valueIndicesClassifiers = [x[i] for i in positionClassifier]
max_value = max(valueIndicesClassifiers)
max_index = valueIndicesClassifiers.index(max_value)
indexClassifier = positionClassifier[max_index]
#The last index is the classifier
list_index_techniques_to_use.append(indexClassifier)
lista_of_estimators = [self._create_individual(dictionary_pos, i) for i in list_index_techniques_to_use]
#return ind, list_index_techniques_to_use
clf = Pipeline(lista_of_estimators)
return clf
def _create_individual(self, dictionary_values, position):
if position == 0:
estimator = ('GaussianNB', GaussianNB())
if position == 1:
estimator = ('BernoulliNB', BernoulliNB(alpha=dictionary_values['BernoulliNB.alpha'],
fit_prior = dictionary_values['BernoulliNB.fit_prior']))
if position == 4:
estimator = ('MultinomialNB', MultinomialNB(alpha=dictionary_values['MultinomialNB.alpha'],
fit_prior = dictionary_values['MultinomialNB.fit_prior']))
if position == 7:
estimator = ('DecisionTreeClassifier', DecisionTreeClassifier(criterion=dictionary_values['DecisionTreeClassifier.criterion'],
max_depth=dictionary_values['DecisionTreeClassifier.max_depth'],
min_samples_split=dictionary_values['DecisionTreeClassifier.min_samples_split'],
min_samples_leaf=dictionary_values['DecisionTreeClassifier.min_samples_leaf']))
if position == 12:
estimator = ('ExtraTreesClassifier', ExtraTreesClassifier(n_estimators=dictionary_values['ExtraTreesClassifier.n_estimators'],
criterion=dictionary_values['ExtraTreesClassifier.criterion'],
max_features=dictionary_values['ExtraTreesClassifier.max_features'],
min_samples_split=dictionary_values['ExtraTreesClassifier.min_samples_split'],
min_samples_leaf=dictionary_values['ExtraTreesClassifier.min_samples_leaf'],
bootstrap=dictionary_values['ExtraTreesClassifier.bootstrap']))
if position == 19:
estimator = ('RandomForestClassifier', RandomForestClassifier(n_estimators=dictionary_values['RandomForestClassifier.n_estimators'],
criterion=dictionary_values['RandomForestClassifier.criterion'],
max_features=dictionary_values['RandomForestClassifier.max_features'],
min_samples_split=dictionary_values['RandomForestClassifier.min_samples_split'],
min_samples_leaf=dictionary_values['RandomForestClassifier.min_samples_leaf'],
bootstrap=dictionary_values['RandomForestClassifier.bootstrap']))
if position == 26:
estimator = ('GradientBoostingClassifier', GradientBoostingClassifier(n_estimators=dictionary_values['GradientBoostingClassifier.n_estimators'],
learning_rate=dictionary_values['GradientBoostingClassifier.learning_rate'],
max_depth=dictionary_values['GradientBoostingClassifier.max_depth'],
min_samples_split=dictionary_values['GradientBoostingClassifier.min_samples_split'],
min_samples_leaf=dictionary_values['GradientBoostingClassifier.min_samples_leaf'],
subsample=dictionary_values['GradientBoostingClassifier.subsample'],
max_features=dictionary_values['GradientBoostingClassifier.max_features']))
if position == 34:
estimator = ('KNeighborsClassifier', KNeighborsClassifier(n_neighbors=dictionary_values['KNeighborsClassifier.n_neighbors'],
weights=dictionary_values['KNeighborsClassifier.weights'],
p=dictionary_values['KNeighborsClassifier.p']))
if position == 38:
estimator = ('LinearSVC', LinearSVC(penalty=dictionary_values['LinearSVC.penalty'],
loss=dictionary_values['LinearSVC.loss'],
dual=dictionary_values['LinearSVC.dual'],
tol=dictionary_values['LinearSVC.tol'],
C=dictionary_values['LinearSVC.C']))
if position == 44:
estimator = ('LogisticRegression', LogisticRegression(penalty=dictionary_values['LogisticRegression.penalty'],
C=dictionary_values['LogisticRegression.C'],
dual=dictionary_values['LogisticRegression.dual'],
solver=dictionary_values['LogisticRegression.solver']))
if position == 49:
estimator = ('Binarizer', Binarizer(threshold=dictionary_values['Binarizer.threshold']))
if position == 51:
estimator = ('FastICA', FastICA(tol=dictionary_values['FastICA.tol']))
if position == 53:
estimator = ('FeatureAgglomeration', FeatureAgglomeration(linkage=dictionary_values['FeatureAgglomeration.linkage'],
affinity=dictionary_values['FeatureAgglomeration.affinity']))
if position == 56:
estimator = ('MaxAbsScaler', MaxAbsScaler())
if position == 57:
estimator = ('MinMaxScaler', MinMaxScaler())
if position == 58:
estimator = ('Normalizer', Normalizer(norm=dictionary_values['Normalizer.norm']))
if position == 60:
estimator = ('Nystroem', Nystroem(kernel=dictionary_values['Nystroem.kernel'],
gamma=dictionary_values['Nystroem.gamma'],
n_components=dictionary_values['Nystroem.n_components']))
if position == 64:
estimator = ('PCA', PCA(svd_solver=dictionary_values['PCA.svd_solver'],
iterated_power=dictionary_values['PCA.iterated_power']))
if position == 67:
estimator = ('PolynomialFeatures', PolynomialFeatures(degree=dictionary_values['PolynomialFeatures.degree'],
include_bias=dictionary_values['PolynomialFeatures.include_bias'],
interaction_only=dictionary_values['PolynomialFeatures.interaction_only']))
if position == 71:
estimator = ('RBFSampler', RBFSampler(gamma=dictionary_values['RBFSampler.gamma']))
if position == 73:
estimator = ('RobustScaler', RobustScaler())
if position == 74:
estimator = ('StandardScaler', StandardScaler())
if position == 75:
estimator = ('SelectFwe', SelectFwe(alpha=dictionary_values['SelectFwe.alpha'],
score_func=dictionary_values['SelectFwe.score_func']))
if position == 77:
estimator = ('SelectPercentile', SelectPercentile(percentile=dictionary_values['SelectPercentile.percentile'],
score_func=dictionary_values['SelectPercentile.score_func']))
if position == 79:
estimator = ('VarianceThreshold', VarianceThreshold(threshold=dictionary_values['VarianceThreshold.threshold']))
return estimator
newInstance = ValuesSearchSpace()
for i in export:
result1 = newInstance.get_individuals(i)
print(result1) |
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_table
import plotly.express as px
import dash_bootstrap_components as dbc
from dash.dependencies import Input
from dash.dependencies import Output
from dash.dependencies import State
import pandas as pd
from dashboard_data import DataInit
df_teams = init.team_names()
df_fixture_form = init.fixture_form_decending('Arsenal')
layout = html.Div([
dbc.Row([
dbc.Col(
dash_table.DataTable(
id='data-table-graph',
# editable=True,
data=df_fixture_form.to_dict('records'),
columns=[{'id': c, 'name': c} for c in df_fixture_form.columns],
style_cell_conditional=[
{
'if': {
'column_id': 'Club'},
'textAlign': 'left'
},
{
'if': {
'column_id': ['Played', 'Position']},
'textAlign': 'center'
},
],
style_cell={'padding': '5px'},
style_header={
'backgroundColor': 'white',
'fontWeight': 'bold',
},
style_as_list_view=True,
),
width=4,
),
dbc.Col(
id='league-table'
),
])
]),
@app.callback(
Output('data-table-graph', 'columns'),
[Input(str(i), 'n_clicks') for i in df_teams['teams']]
)
def columns_form_five(*args):
changed_id = [p['prop_id'] for p in dash.callback_context.triggered][0]
team = changed_id.split('.')[0]
df = init.fixture_form_decending(team)
data=df.to_dict('records')
columns=[{'id': c, 'name': c} for c in df.columns]
return columns
@app.callback(
Output('data-table-graph', 'data'),
[Input(str(i), 'n_clicks') for i in df_teams['teams']]
)
def data_form_five(*args):
changed_id = [p['prop_id'] for p in dash.callback_context.triggered][0]
team = changed_id.split('.')[0]
df = init.fixture_form_decending(team)
data=df.to_dict('records')
columns=[{'id': c, 'name': c} for c in df.columns]
return data |
import pickle
import subprocess
import sys
import fire
import pandas as pd
import tensorflow as tf
import datetime
import os
CSV_COLUMNS = ['gender', 'SeniorCitizen', 'Partner', 'Dependents',
'tenure', 'PhoneService', 'MultipleLines', 'InternetService',
'OnlineSecurity', 'OnlineBackup', 'DeviceProtection',
'TechSupport', 'StreamingTV', 'StreamingMovies', 'Contract',
'PaperlessBilling', 'PaymentMethod', 'MonthlyCharges', 'TotalCharges', 'Churn']
LABEL_COLUMN = "Churn"
DEFAULTS = [['na'], ['na'], ['na'], ['na'], [0.0], ['na'], ['na'], ['na'],
['na'], ['na'], ['na'], ['na'], ['na'], ['na'], ['na'],
['na'], ['na'], [0.0], [0.0], ['na']]
AIP_MODEL_DIR = os.environ["AIP_MODEL_DIR"]
def features_and_labels(row_data):
cols = tf.io.decode_csv(row_data, record_defaults=DEFAULTS)
feats = {
'gender': tf.reshape(cols[0], [1,]),
'SeniorCitizen': tf.reshape(cols[1],[1,]),
'Partner': tf.reshape(cols[2],[1,]),
'Dependents': tf.reshape(cols[3],[1,]),
'tenure': tf.reshape(cols[4],[1,]),
'PhoneService': tf.reshape(cols[5],[1,]),
'MultipleLines': tf.reshape(cols[6],[1,]),
'InternetService': tf.reshape(cols[7],[1,]),
'OnlineSecurity': tf.reshape(cols[8],[1,]),
'OnlineBackup': tf.reshape(cols[9],[1,]),
'DeviceProtection': tf.reshape(cols[10],[1,]),
'TechSupport': tf.reshape(cols[11],[1,]),
'StreamingTV': tf.reshape(cols[12],[1,]),
'StreamingMovies': tf.reshape(cols[13],[1,]),
'Contract': tf.reshape(cols[14],[1,]),
'PaperlessBilling': tf.reshape(cols[15],[1,]),
'PaymentMethod': tf.reshape(cols[16],[1,]),
'MonthlyCharges': tf.reshape(cols[17],[1,]),
'TotalCharges': tf.reshape(cols[18],[1,]),
'Churn': cols[19]
}
label = feats.pop('Churn')
label_int = tf.case([(tf.math.equal(label,tf.constant(['No'])), lambda: 0),
(tf.math.equal(label,tf.constant(['Yes'])), lambda: 1)])
return feats, label_int
def load_dataset(pattern, batch_size=1, mode='eval'):
# Make a CSV dataset
filelist = tf.io.gfile.glob(pattern)
dataset = tf.data.TextLineDataset(filelist).skip(1)
dataset = dataset.map(features_and_labels)
# Shuffle and repeat for training
if mode == 'train':
dataset = dataset.shuffle(buffer_size=10*batch_size).batch(batch_size).repeat()
else:
dataset = dataset.batch(10)
return dataset
def train_evaluate(training_dataset_path, validation_dataset_path, batch_size, num_train_examples, num_evals):
inputs = {
'gender': tf.keras.layers.Input(name='gender',shape=[None],dtype='string'),
'SeniorCitizen': tf.keras.layers.Input(name='SeniorCitizen',shape=[None],dtype='string'),
'Partner': tf.keras.layers.Input(name='Partner',shape=[None],dtype='string'),
'Dependents': tf.keras.layers.Input(name='Dependents',shape=[None],dtype='string'),
'tenure': tf.keras.layers.Input(name='tenure',shape=[None],dtype='int32'),
'PhoneService': tf.keras.layers.Input(name='PhoneService',shape=[None],dtype='string'),
'MultipleLines': tf.keras.layers.Input(name='MultipleLines',shape=[None],dtype='string'),
'InternetService': tf.keras.layers.Input(name='InternetService',shape=[None],dtype='string'),
'OnlineSecurity': tf.keras.layers.Input(name='OnlineSecurity',shape=[None],dtype='string'),
'OnlineBackup': tf.keras.layers.Input(name='OnlineBackup',shape=[None],dtype='string'),
'DeviceProtection': tf.keras.layers.Input(name='DeviceProtection',shape=[None],dtype='string'),
'TechSupport': tf.keras.layers.Input(name='TechSupport',shape=[None],dtype='string'),
'StreamingTV': tf.keras.layers.Input(name='StreamingTV',shape=[None],dtype='string'),
'StreamingMovies': tf.keras.layers.Input(name='StreamingMovies',shape=[None],dtype='string'),
'Contract': tf.keras.layers.Input(name='Contract',shape=[None],dtype='string'),
'PaperlessBilling': tf.keras.layers.Input(name='PaperlessBilling',shape=[None],dtype='string'),
'PaymentMethod': tf.keras.layers.Input(name='PaymentMethod',shape=[None],dtype='string'),
'MonthlyCharges': tf.keras.layers.Input(name='MonthlyCharges',shape=[None],dtype='float'),
'TotalCharges': tf.keras.layers.Input(name='TotalCharges',shape=[None],dtype='float')
}
batch_size = int(batch_size)
num_train_examples = int(num_train_examples)
num_evals = int(num_evals)
feat_cols = {
'tenure': tf.feature_column.numeric_column('tenure'),
'TotalCharges': tf.feature_column.numeric_column('TotalCharges'),
'MonthlyCharges': tf.feature_column.numeric_column('MonthlyCharges'),
'SeniorCitizen': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='SeniorCitizen', hash_bucket_size=3
)
),
'gender': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='gender', hash_bucket_size=2
)
),
'Partner': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='Partner', hash_bucket_size=2
)
),
'Dependents': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='Dependents', hash_bucket_size=2
)
),
'PhoneService': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='PhoneService', hash_bucket_size=2
)
),
'MultipleLines': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='MultipleLines', hash_bucket_size=3
)
),
'InternetService': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='InternetService', hash_bucket_size=3
)
),
'OnlineSecurity': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='OnlineSecurity', hash_bucket_size=3
)
),
'OnlineBackup': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='OnlineBackup', hash_bucket_size=3
)
),
'DeviceProtection': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='DeviceProtection', hash_bucket_size=3
)
),
'TechSupport': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='TechSupport', hash_bucket_size=3
)
),
'StreamingTV': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='StreamingTV', hash_bucket_size=3
)
),
'StreamingMovies': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='StreamingMovies', hash_bucket_size=3
)
),
'Contract': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='Contract', hash_bucket_size=3
)
),
'PaperlessBilling': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='PaperlessBilling', hash_bucket_size=2
)
),
'PaymentMethod': tf.feature_column.indicator_column(
tf.feature_column.categorical_column_with_hash_bucket(
key='PaymentMethod', hash_bucket_size=3
)
)
}
dnn_inputs = tf.keras.layers.DenseFeatures(
feature_columns=feat_cols.values())(inputs)
h1 = tf.keras.layers.Dense(64, activation='relu')(dnn_inputs)
h2 = tf.keras.layers.Dense(128, activation='relu')(h1)
h3 = tf.keras.layers.Dense(64, activation='relu')(h2)
output = tf.keras.layers.Dense(1, activation='sigmoid')(h3)
model = tf.keras.models.Model(inputs=inputs,outputs=output)
model.compile(optimizer='adam',loss='binary_crossentropy',metrics=['accuracy'])
trainds = load_dataset(
pattern=training_dataset_path,
batch_size=batch_size,
mode='train')
evalds = load_dataset(
pattern=validation_dataset_path,
mode='eval')
steps_per_epoch = num_train_examples // (batch_size * num_evals)
history = model.fit(
trainds,
validation_data=evalds,
validation_steps=100,
epochs=num_evals,
steps_per_epoch=steps_per_epoch
)
#model_export_path = os.path.join(AIP_MODEL_DIR, "savedmodel")
model_export_path = os.path.join(AIP_MODEL_DIR)
tf.saved_model.save(
obj=model, export_dir=model_export_path) # with default serving function
print("Exported trained model to {}".format(model_export_path))
if __name__ == '__main__':
fire.Fire(train_evaluate)
|
import sys
import logging
from collections import OrderedDict
from substance import (SubProgram, Core)
class Box(SubProgram):
def __init__(self):
super(Box, self).__init__()
def setupCommands(self):
self.addCommand('ls', 'substance.command.box.ls')
self.addCommand('pull', 'substance.command.box.pull')
self.addCommand('delete', 'substance.command.box.delete')
return self
def getShellOptions(self, optparser):
return optparser
def getUsage(self):
return "substance box [options] COMMAND [command-options]"
def getHelpTitle(self):
return "Substance box management"
def initCommand(self, command):
command.core = self.core
return command
|
import numpy as np
import matplotlib.pyplot as plt
N = 5000 #number of steps to take
xo = 0.2 #initial position in m
vo = 0.0 #initial velocity
tau = 4.0 #total time for the simulation in s .
dt = tau/float(N) # time step
k = 42.0 #spring constant in N/m
m = 0.25 #mass in kg
g = 9.8 #in m/ s ^2
mu = 0.15 #friction coefficient
y = np.zeros([N,2])
#y is the vector of positions and velocities.
y[0,0] = xo #initial position
y[0,1] = vo #initial velocity
#This function defines the derivatives of the system.
def SpringMass(state,time) :
g0=state[1]
if g0 > 0 :
g1=-k/m*state[0]-g*mu
else:
g1=-k/m*state[0]+g*mu
return np.array([g0,g1])
#This is the basic step in the Euler Method for solving ODEs.
def euler (y,time,dt,derivs) :
k0 = dt*derivs(y,time)
ynext = y + k0
return ynext
for j in range (N-1):
y[j+1] = euler(y[j],0,dt,SpringMass)
#Just to plot
time = np.linspace(0,tau,N)
plt.plot(time, y[:,0],'b',label="position")
plt.xlabel( "time" )
plt.ylabel( "position" )
plt.savefig('spring_mass.png') |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from example import application_pb2 as example_dot_application__pb2
class UserMortgageServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Check = channel.unary_unary(
'/UserMortgageService/Check',
request_serializer=example_dot_application__pb2.SimpleMessage.SerializeToString,
response_deserializer=example_dot_application__pb2.SimpleMessage.FromString,
)
self.Check2 = channel.unary_unary(
'/UserMortgageService/Check2',
request_serializer=example_dot_application__pb2.SimpleMessage.SerializeToString,
response_deserializer=example_dot_application__pb2.SimpleMessage.FromString,
)
class UserMortgageServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def Check(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Check2(self, request, context):
"""asd asd
WAWSdasDWDASWDaD sad asd
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_UserMortgageServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Check': grpc.unary_unary_rpc_method_handler(
servicer.Check,
request_deserializer=example_dot_application__pb2.SimpleMessage.FromString,
response_serializer=example_dot_application__pb2.SimpleMessage.SerializeToString,
),
'Check2': grpc.unary_unary_rpc_method_handler(
servicer.Check2,
request_deserializer=example_dot_application__pb2.SimpleMessage.FromString,
response_serializer=example_dot_application__pb2.SimpleMessage.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'UserMortgageService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
# -*- coding: utf8 -*-
"Increment filter"
from .abstract import AbstractFilter
class Increment(AbstractFilter):
"Increment a variable"
name = 'Incrémenter variable'
description = "Incrémente une variable numérique"
parameters = [
{
'name': 'Variable',
'key': 'target',
'type': 'integer'
},
{
'name': 'Valeur',
'key': 'inc',
'type': 'integer'
}
]
def run(self):
"Execute the filter"
inc = self._param('inc')
target = self._model.config('target')
target_value = self._param('target')
self._registery.set(target, target_value + inc)
|
class Phrase:
"""A Phrase is a directed edge between one Paragraph and a second
Paragraph. These two Paragraphs can be the same.
The Phrase also has the ability to display text, but this should
typically be short, with longer description left to Paragraphs. To
identify themselves, Phrases have names that are unique with respect
to a given source Paragraph and destination Paragraph-layer combo, but
not necessarily unique globally. To clarify, this means that a
Paragraph can have two Phrases with the same name, but only if those
two Phrases are directed to Paragraphs sitting on different layers.
The traversal of a Phrase can alter a Reader's stats or can display
text, but does not have to do either one.
"""
def __init__(self):
"""Initialize this Phrase with an empty alteration function and
prompt"""
self.prompt = ""
def accept_reader(self, reader):
"""Prints out this Phrase's prompt and alters the reader."""
pass # TODO how to represent alteration data?
|
from django.contrib import admin
from applications.blog.models import Comments
from applications.blog.models import Post
@admin.register(Post)
class PostAdminModel(admin.ModelAdmin):
pass
@admin.register(Comments)
class CommentAdminModel(admin.ModelAdmin):
pass
# class Comment(admin.ModelAdmin):
# list_display = ('name', 'email', 'post', 'created', 'active')
# list_filter = ('active', 'created', 'updated')
# search_fields = ('name', 'email', 'body')
#
#
# admin.site.register(Comment, Comment)
|
import requests
from pyloggerhelper import log
base_schema_properties = {
"log_level": {
"type": "string",
"title": "l",
"description": "log等级",
"enum": ["DEBUG", "INFO", "WARN", "ERROR"],
"default": "DEBUG"
},
"base_url": {
"type": "string",
"title": "b",
"description": "portainer的根url"
},
"retry_max_times": {
"type": "integer",
"description": "重试次数",
},
"retry_interval_backoff_factor": {
"type": "number",
"description": "重试间隔时间,的参数,间隔时间位`{backoff factor} * (2 ** ({number of total retries} - 1))`",
"default": 0.1
}
}
class HttpCodeError(Exception):
"""http请求返回错误"""
pass
def get_jwt(rq: requests.Session, base_url: str, username: str, password: str) -> str:
"""获取jwt.
Args:
rq (requests.Session): 请求会话
base_url (str): portainer的根地址
username (str): portainer用户名
password (str): 用户的密码
Returns:
str: jwt的值
"""
res = rq.post(
base_url + "/api/auth",
json={
"Username": username,
"Password": password
}
)
if res.status_code != 200:
log.error("get jwt query get error",
base_url=base_url,
username=username,
status_code=res.status_code)
raise HttpCodeError("get jwt query get error")
try:
res_json = res.json()
except Exception as e:
log.error("get jwt query get json result error",
base_url=base_url,
username=username,
err=type(e),
err_msg=str(e),
exc_info=True,
stack_info=True)
raise e
else:
jwt = res_json.get("jwt")
if jwt:
return jwt
else:
log.error("get jwt query has no field jwt",
base_url=base_url,
username=username,
res_json=res_json)
raise AttributeError("get jwt query has no field jwt")
|
from .context import KerasTools
import pandas as pd
class TestRNN:
def setup(self):
self.sales_df = pd.read_csv('https://raw.githubusercontent.com/torch/demos/master/logistic-regression/example-logistic-regression.csv')
self.helper = ""
def test_util(self):
self.helper = KerasTools.keras_tools(self.sales_df, ts_n_y_vals = 28, debug=False)
self.helper.train_test_split(split_type='sequential')
assert self.helper.ts_n_y_vals == 28
### Tests
## train_test_split
# split_pct less than 0
# split_pct greater than 1
# val_split_pct less than 0
# val_split_pct greater than 1
## initialization
# ts_n_y_vals
# y_val as string
# y_val as df |
from setuptools import setup, find_packages
file = open('README.md', 'r')
long_description = file.read()
file.close()
setup(
name='gitcode',
version='0.1',
description='Interact with Git through Python',
long_description=long_description,
url='https://github.com/WindJackal/gitcode',
author='Angus Timothy Olivier',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Version Control',
],
keywords='git version control source development',
project_urls={
'Documentation': 'https://github.com/WindJackal/gitcode/README.md',
'Source': 'https://github.com/WindJackal/gitcode',
'Tracker': 'https://github.com/WindJackal/gitcode',
},
packages=find_packages(),
python_requires='>=3.5',
include_package_data=True,
) |
#!/usr/bin/python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Contains test runners for launching tests on simulators and devices."""
# pylint: disable=relative-import
import environment_setup
import collections
import errno
import fileinput
import json
import os
import shutil
import signal
import subprocess
import sys
import tempfile
import time
import xctest_utils
from common import gtest_utils
from slave import slave_utils
from slave.ios import find_xcode
from slave.ios import utils
class Error(Exception):
pass
class TestRunnerError(Error):
pass
class AppNotFoundError(TestRunnerError):
"""The app intended to be run was not found."""
def __init__(self, app_path):
super(AppNotFoundError, self).__init__(
'App does not exist: %s.' % app_path
)
class AppInstallationError(TestRunnerError):
"""There was an error installing the app."""
pass
class AppUninstallationError(TestRunnerError):
"""There was an error uninstalling the app."""
pass
class DeviceDetectionError(TestRunnerError):
"""There was an error concerning the number of detected devices."""
def __init__(self, num):
super(DeviceDetectionError, self).__init__(
'Expected 1 connected device, found %s.' % num)
class XcodeVersionNotFoundError(TestRunnerError):
"""The Xcode version intended to be used was not found."""
def __init__(self, xcode_version):
super(XcodeVersionNotFoundError, self).__init__(
'Xcode with the specified version not found: %s.' % xcode_version
)
class UnexpectedAppExtensionError(TestRunnerError):
"""The app had an unexpected or no extension."""
def __init__(self, app_path, valid_extensions):
if not valid_extensions:
valid_extensions = 'Expected no extension.'
elif len(valid_extensions) == 1:
valid_extensions = 'Expected extension: %s.' % valid_extensions[0]
else:
valid_extensions = 'Expected extension to be one of %s.' % ', '.join(
extension for extension in valid_extensions)
super(UnexpectedAppExtensionError, self).__init__(
'Unexpected app path: %s. %s' % (app_path, valid_extensions))
class SimulatorNotFoundError(TestRunnerError):
"""The iossim path was not found."""
def __init__(self, iossim_path):
super(SimulatorNotFoundError, self).__init__(
'Simulator does not exist: %s.' % iossim_path)
class AppLaunchError(TestRunnerError):
"""There was an error launching the app."""
pass
class TestRunner(object):
"""Base class containing common TestRunner functionality."""
def __init__(
self,
app_path,
xcode_version=None,
gs_bucket=None,
perf_bot_name=None,
perf_build_number=None,
perf_builder_name=None,
perf_master_name=None,
perf_revision=None,
perf_x_value=None,
test_args=None,
env_vars=None,
):
"""Initializes a new instance of the TestRunner class.
Args:
app_path: Full path to the compiled app to run.
xcode_version: Version of Xcode to use.
gs_bucket: Google Storage bucket to upload test data to, or None if the
test data should not be uploaded.
perf_bot_name: Name of this bot as indicated to the perf dashboard.
perf_build_number: Build number to indicate to the perf dashboard.
perf_builder_name: Name of this builder as indicated to the perf
dashboard.
perf_master_name: Name of the master as indicated to the perf dashboard.
perf_revision: Revision to indicate to the perf dashboard.
perf_x_value: Value to use on the x axis for all data uploaded to the
perf dashboard.
test_args: Arguments to pass when launching the test.
env_vars: Environment variables to set when launching the test.
Raises:
AppNotFoundError: If the specified app cannot be found.
UnexpectedAppExtensionError: If the app was not an .app or an .ipa.
"""
if not os.path.exists(app_path):
raise AppNotFoundError(app_path)
self.app_path = app_path
self.app_name, ext = os.path.splitext(os.path.split(app_path)[1])
if ext not in ('.app', '.ipa'):
raise UnexpectedAppExtensionError(app_path, ['.app', '.ipa'])
if xcode_version is not None:
xcode_summary = find_xcode.find_xcode(xcode_version)
if not xcode_summary['found']:
raise XcodeVersionNotFoundError(xcode_version)
self.env_vars = env_vars or []
self.gs_bucket = gs_bucket
self.perf_bot_name = perf_bot_name
self.perf_master_name = perf_master_name
self.perf_revision = perf_revision
self.perf_build_number = perf_build_number
self.perf_builder_name = perf_builder_name
self.perf_x_value = perf_x_value
self.test_args = test_args or []
self.xcode_version = xcode_version
self.summary = {
'links': collections.OrderedDict(),
'logs': collections.OrderedDict(),
}
@staticmethod
def Print(message, blank_lines=0, time_to_sleep=0):
"""Prints a message.
Args:
message: The message to print.
blank_lines: The number of blank lines to leave after the message.
time_to_sleep: The number of seconds to wait after printing the message.
"""
print '%s%s' % (message, ''.join(['\n' for _ in xrange(blank_lines)]))
sys.stdout.flush()
if time_to_sleep:
time.sleep(time_to_sleep)
def TearDown(self):
"""Performs post-test tear down."""
raise NotImplementedError
@staticmethod
def RequireTearDown(method):
"""Ensures TearDown is called after calling the specified method.
This decorator can be used to ensure that the tear down logic executes
regardless of how the decorated method exits.
Args:
method: The method to require a tear down for.
"""
def TearDownMethodCall(self, *args, **kwargs):
try:
return method(self, *args, **kwargs)
finally:
self.TearDown()
return TearDownMethodCall
@staticmethod
def GetKIFTestFilter(tests, blacklist):
"""Returns the KIF test filter to run or exclude only the given tests.
Args:
tests: The list of tests to run or exclude.
blacklist: Whether to run all except the given tests or not.
Returns:
A string which can be supplied to GKIF_SCENARIO_FILTER.
"""
if blacklist:
blacklist = '-'
else:
blacklist = ''
# For KIF tests, a pipe-separated list of tests will run just those tests.
# However, we also need to remove the "KIF." prefix from these tests.
# Using a single minus ahead of NAME will instead run everything other than
# the listed tests.
return '%sNAME:%s' % (
blacklist,
'|'.join(test.split('KIF.', 1)[-1] for test in tests),
)
@staticmethod
def GetGTestFilter(tests, blacklist):
"""Returns the GTest filter to run or exclude only the given tests.
Args:
tests: The list of tests to run or exclude.
blacklist: Whether to run all except the given tests or not.
Returns:
A string which can be supplied to --gtest_filter.
"""
if blacklist:
blacklist = '-'
else:
blacklist = ''
# For GTests, a colon-separated list of tests will run just those tests.
# Using a single minus at the beginning will instead run everything other
# than the listed tests.
return '%s%s' % (blacklist, ':'.join(test for test in tests))
def GetLaunchCommand(self, test_filter=None, blacklist=False):
"""Returns the command which is used to launch the test.
Args:
test_filter: A list of tests to filter by, or None to mean all.
blacklist: Whether to blacklist the elements of test_filter or not. Only
works when test_filter is not None.
Returns:
A list whose elements are the args representing the command.
"""
raise NotImplementedError
@staticmethod
def _Run(command, env=None):
"""Runs the specified command, parsing GTest output.
Args:
command: The shell command to execute, as a list of arguments.
Returns:
A GTestResult instance.
"""
result = utils.GTestResult(command)
print ' '.join(command)
print 'cwd:', os.getcwd()
sys.stdout.flush()
proc = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
)
parser = gtest_utils.GTestLogParser()
while True:
line = proc.stdout.readline()
if not line:
break
line = line.rstrip()
parser.ProcessLine(line)
print line
sys.stdout.flush()
proc.wait()
for test in parser.FailedTests(include_flaky=True):
# Tests are named as TestCase.TestName.
# A TestName starting with FLAKY_ should not turn the build red.
if '.' in test and test.split('.', 1)[1].startswith('FLAKY_'):
result.flaked_tests[test] = parser.FailureDescription(test)
else:
result.failed_tests[test] = parser.FailureDescription(test)
result.passed_tests.extend(parser.PassedTests(include_flaky=True))
print command[0], 'returned', proc.returncode
print
sys.stdout.flush()
# iossim can return 5 if it exits noncleanly, even if no tests failed.
# Therefore we can't rely on this exit code to determine success or failure.
result.finalize(proc.returncode, parser.CompletedWithoutFailure())
return result
def Launch(self):
"""Launches the test."""
raise NotImplementedError
def RunAllTests(self, result, *args, **kwargs):
"""Ensures all tests run, even if any crash the test app.
Args:
result: A GTestResult instance from having run the app.
Returns:
True if all tests were successful on the initial run.
Raises:
AppLaunchError: If the given result had crashed.
"""
if result.crashed and not result.crashed_test:
# If the app crashed without even starting, give up.
raise AppLaunchError
failed_tests = result.failed_tests
flaked_tests = result.flaked_tests
passed_tests = result.passed_tests
perf_links = result.perf_links
try:
while (result.crashed
and result.crashed_test
and not kwargs.get('retries')):
# If the app crashed on a specific test, then resume at the next test,
# except when 'retries' is nonzero. The 'retries' kwarg already forces
# the underlying gtest call to retry a fixed amount of times, and we
# don't want to conflict with this, because stability and memory tests
# rely on this behavior to run the same test on successive URLs.
self.Print(
'%s appears to have crashed during %s. Resuming at next test...' % (
self.app_name, result.crashed_test,
), blank_lines=2, time_to_sleep=5)
# Now run again, filtering out every test that ran. This is equivalent
# to starting at the next test.
result = self._Run(self.GetLaunchCommand(
test_filter=passed_tests + failed_tests.keys() + flaked_tests.keys(),
blacklist=True,
), *args, **kwargs)
# We are never overwriting any old data, because we aren't running any
# tests twice here.
failed_tests.update(result.failed_tests)
flaked_tests.update(result.flaked_tests)
passed_tests.extend(result.passed_tests)
perf_links.update(result.perf_links)
if failed_tests and not result.crashed and not kwargs.get('retries'):
# If the app failed without crashing, retry the failed tests in case of
# flake, except when 'retries' is nonzero.
msg = ['The following tests appear to have failed:']
msg.extend(failed_tests.keys())
msg.append('These tests will be retried, but their retry results will'
' not affect the outcome of this test step.')
msg.append('Retry results are purely for informational purposes.')
msg.append('Retrying...')
self.Print('\n'.join(msg), blank_lines=2, time_to_sleep=5)
self._Run(self.GetLaunchCommand(
test_filter=failed_tests.keys(),
), *args, **kwargs)
except OSError as e:
if e.errno == errno.E2BIG:
self.Print(
'Too many tests were found in this app to resume.',
blank_lines=1,
time_to_sleep=0,
)
else:
self.Print(
'Unexpected OSError: %s.' % e.errno, blank_lines=1, time_to_sleep=0)
self.InterpretResult(failed_tests, flaked_tests, passed_tests, perf_links)
# At this point, all the tests have run, so used failed_tests to determine
# the success/failure.
return not failed_tests
def InterpretResult(self, failed_tests, flaked_tests, passed_tests,
perf_links):
"""Interprets the given GTestResult.
Args:
failed_tests: A dict of failed test names mapping to lines of output.
flaked_tests: A dict of failed flaky test names mapping to lines of
output.
passed_tests: A list of passed test names.
perf_links: A dict of trace names mapping to perf dashboard URLs.
"""
for test, log_lines in failed_tests.iteritems():
self.summary['logs'][test] = log_lines
for test, log_lines in flaked_tests.iteritems():
self.summary['logs'][test] = log_lines
for test in passed_tests:
self.summary['logs']['passed tests'] = passed_tests
for trace, graph_url in perf_links.iteritems():
self.summary['links'][trace] = graph_url
class SimulatorTestRunner(TestRunner):
"""Class for running a test app on an iOS simulator."""
def __init__(
self,
app_path,
iossim_path,
platform,
version,
xcode_version=None,
gs_bucket=None,
perf_bot_name=None,
perf_build_number=None,
perf_builder_name=None,
perf_master_name=None,
perf_revision=None,
perf_x_value=None,
test_args=None,
env_vars=None,
):
"""Initializes an instance of the SimulatorTestRunner class.
Args:
app_path: Full path to the compiled app to run.
iossim_path: Full path to the iossim executable to launch.
platform: The platform to simulate. Supported values can be found by
running 'iossim -l'. e.g. 'iPhone 5', 'iPhone 5s'.
version: The iOS version the simulator should be running. Supported values
can be found by running 'iossim -l'. e.g. '8.0', '7.1'.
xcode_version: Version of Xcode to use.
gs_bucket: Google Storage bucket to upload test data to, or None if the
test data should not be uploaded.
perf_bot_name: Name of this bot as indicated to the perf dashboard.
perf_build_number: Build number to indicate to the perf dashboard.
perf_builder_name: Name of this builder as indicated to the perf
dashboard.
perf_master_name: Name of the master as indicated to the perf dashboard.
perf_revision: Revision to indicate to the perf dashboard.
perf_x_value: Value to use on the x axis for all data uploaded to the
perf dashboard.
test_args: Arguments to pass when launching the test.
env_vars: Environment variables to set when launching the test.
Raises:
SimulatorNotFoundError: If the given iossim path cannot be found.
"""
super(SimulatorTestRunner, self).__init__(
app_path,
env_vars=env_vars,
gs_bucket=gs_bucket,
perf_bot_name=perf_bot_name,
perf_build_number=perf_build_number,
perf_builder_name=perf_builder_name,
perf_master_name=perf_master_name,
perf_revision=perf_revision,
perf_x_value=perf_x_value,
test_args=test_args,
xcode_version=xcode_version,
)
if not os.path.exists(iossim_path):
raise SimulatorNotFoundError(iossim_path)
self.cfbundleid = utils.call(
utils.PLIST_BUDDY,
'-c', 'Print:CFBundleIdentifier',
os.path.join(self.app_path, 'Info.plist'),
).stdout[0]
self.iossim_path = iossim_path
self.platform = platform
self.version = version
self.timeout = '120'
self.homedir = ''
self.start_time = None
self.xcode_version = xcode_version
def SetStartTime(self):
"""Sets the start time, for finding crash reports during this run."""
# Crash reports have a timestamp in their filename, formatted as
# YYYY-MM-DD-HHMMSS.
self.start_time = time.strftime('%Y-%m-%d-%H%M%S', time.localtime())
def CreateNewHomeDirectory(self):
"""Creates a new home directory for the simulator."""
if self.xcode_version == '8.0':
cmd = [
self.iossim_path,
'-d', self.platform,
'-s', self.version,
'-w'
]
subprocess.check_output(cmd)
cmd = [
self.iossim_path,
'-d', self.platform,
'-s', self.version,
'-p'
]
self.homedir = subprocess.check_output(cmd).strip()
else:
self.homedir = tempfile.mkdtemp()
def RemoveHomeDirectory(self):
"""Recursively removes the home directory being used by the simulator."""
if self.xcode_version == '8.0':
cmd = [
self.iossim_path,
'-d', self.platform,
'-s', self.version,
'-w'
]
subprocess.check_output(cmd)
self.homedir = ''
else:
if os.path.exists(self.homedir):
shutil.rmtree(self.homedir, ignore_errors=True)
self.homedir = ''
def KillSimulators(self):
"""Forcibly kills any running iOS simulator instances."""
kill_cmd = [
'pkill',
'-9',
'-x',
# The iOS simulator has a different name depending on the Xcode version.
'iPhone Simulator', # Xcode 5
'iOS Simulator', # Xcode 6
'Simulator', # Xcode 7
# The simctl tool invoked by iossim may hang. https://crbug.com/637429.
'simctl',
]
# If a signal was sent, wait for the simulator to actually be killed.
if not utils.call(*kill_cmd).returncode:
time.sleep(5)
def SetUp(self):
self.KillSimulators()
self.CreateNewHomeDirectory()
self.SetStartTime()
def TearDown(self):
"""Forcibly kills any running iOS simulator instances."""
self.UploadTestData()
self.GetCrashReports()
self.KillSimulators()
self.RemoveHomeDirectory()
def FindTestDocumentsDirectory(self, apps_dir):
"""Finds the test's Documents directory in the given Applications directory.
Args:
apps_dir: The Applications directory, containing app ID directories.
Returns:
The Documents directory, or None if it doesn't exist.
"""
for appid_dir in os.listdir(apps_dir):
appid_dir = os.path.join(apps_dir, appid_dir)
app_bundle = os.path.join(appid_dir, '%s.app' % self.app_name)
metadata_plist = os.path.join(
appid_dir, '.com.apple.mobile_container_manager.metadata.plist')
docs_dir = os.path.join(appid_dir, 'Documents')
if os.path.exists(docs_dir):
# iOS 7 app ID directories contain the app bundle. iOS 8 app ID
# directories contain a metadata plist with the CFBundleIdentifier.
if os.path.exists(app_bundle):
return docs_dir
elif os.path.exists(metadata_plist) and utils.call(
utils.PLIST_BUDDY,
'-c', 'Print:MCMMetadataIdentifier',
metadata_plist,
).stdout[0] == self.cfbundleid:
return docs_dir
self.Print('Could not find %s on the simulator.' % self.app_name)
def UploadTestData(self):
"""Uploads the contents of the test's Documents directory.
Returns:
True if test data was uploaded, False otherwise.
"""
if not self.gs_bucket:
return False
apps_dir = ''
if self.xcode_version == '8.0':
apps_dir = os.path.join(
self.homedir,
'Containers',
'Data',
'Application',
)
else:
# [homedir]/Library/Developers/CoreSimulator/Devices contains UDID
# directories for each simulated platform started with this home dir.
# We'd expect just one such directory since we generate a unique home
# directory for each SimulatorTestRunner instance. Inside the device
# UDID directory is where we find the Applications directory.
udid_dir = os.path.join(
self.homedir,
'Library',
'Developer',
'CoreSimulator',
'Devices',
)
if os.path.exists(udid_dir):
udids = os.listdir(udid_dir)
if len(udids) == 1:
apps_dir = os.path.join(
udid_dir,
udids[0],
'data',
)
if self.version.startswith('7'):
# On iOS 7 the Applications directory is found right here.
apps_dir = os.path.join(apps_dir, 'Applications')
else:
# On iOS 8+ the Application (singular) directory is a little deeper.
apps_dir = os.path.join(
apps_dir,
'Containers',
'Data',
'Application',
)
else:
self.Print(
'Unexpected number of simulated device UDIDs in %s.' % udid_dir
)
docs_dir = None
if os.path.exists(apps_dir):
self.Print('Found Applications directory.')
docs_dir = self.FindTestDocumentsDirectory(apps_dir)
if docs_dir is not None and os.path.exists(docs_dir):
subprocess.check_call([
'screencapture',
os.path.join(docs_dir, 'desktop.png'),
])
self.summary['links']['test data'] = slave_utils.ZipAndUpload(
self.gs_bucket,
'%s.zip' % self.app_name,
docs_dir,
)
summary = os.path.join(docs_dir, 'summary.json')
if os.path.exists(summary):
self.HandleJsonFileWithPath(summary)
shutil.rmtree(docs_dir, ignore_errors=True)
return True
return False
def HandleJsonFileWithPath(self, summary):
"""Parse data in summarydir and send to perf dashboard."""
with open(summary) as jsonFile:
return json.load(jsonFile)
def GetCrashReports(self):
# A crash report's naming scheme is [app]_[timestamp]_[hostname].crash.
# e.g. net_unittests_2014-05-13-150900_vm1-a1.crash.
crash_reports_dir = os.path.expanduser(os.path.join(
'~',
'Library',
'Logs',
'DiagnosticReports',
))
if os.path.exists(crash_reports_dir):
for crash_report in os.listdir(crash_reports_dir):
report_name, ext = os.path.splitext(crash_report)
if report_name.startswith(self.app_name) and ext == '.crash':
report_time = report_name[len(self.app_name) + 1:].split('_')[0]
# Timestamps are big-endian and therefore comparable this way.
if report_time > self.start_time:
with open(os.path.join(crash_reports_dir, crash_report)) as f:
self.summary['logs']['crash report (%s)' % report_time] = (
f.read().splitlines())
def GetLaunchCommand(self, test_filter=None, blacklist=False):
"""Returns the iossim invocation which is used to run the test.
Args:
test_filter: A list of tests to filter by, or None to mean all.
blacklist: Whether to blacklist the elements of test_filter or not. Only
works when test_filter is not None.
Returns:
A list whose elements are the args representing the command.
"""
cmd = [
self.iossim_path,
'-d', self.platform,
'-s', self.version,
]
args = []
if self.xcode_version != '8.0':
cmd.extend([
'-t', self.timeout,
'-u', self.homedir
])
if test_filter is not None:
kif_filter = self.GetKIFTestFilter(test_filter, blacklist)
gtest_filter = self.GetGTestFilter(test_filter, blacklist)
cmd.extend([
'-e', 'GKIF_SCENARIO_FILTER=%s' % kif_filter,
])
if self.xcode_version == '8.0':
cmd.extend([
'-c', '--gtest_filter=%s' % gtest_filter,
])
else:
args.append('--gtest_filter=%s' % gtest_filter)
for env_var in self.env_vars:
cmd.extend(['-e', env_var])
cmd.append(self.app_path)
cmd.extend(self.test_args)
cmd.extend(args)
return cmd
@TestRunner.RequireTearDown
def Launch(self, *args, **kwargs):
"""Launches the test."""
self.SetUp()
result = self._Run(self.GetLaunchCommand(), *args, **kwargs)
if result.crashed and not result.crashed_test:
# If the app crashed, but there is no specific test which crashed,
# then the app must have failed to even start. Try one more time.
self.Print(
'%s appears to have crashed on startup. Retrying...' % self.app_name,
blank_lines=2,
time_to_sleep=5,
)
# Use a new home directory to launch a fresh simulator.
self.KillSimulators()
self.CreateNewHomeDirectory()
result = self._Run(self.GetLaunchCommand(), *args, **kwargs)
return self.RunAllTests(result, *args, **kwargs)
class XCTestRunner(TestRunner):
"""Base class containing common functionalities to run xctests."""
def __init__(
self,
app_path,
test_host,
test_project_dir,
xcode_version=None,
gs_bucket=None,
perf_bot_name=None,
perf_build_number=None,
perf_builder_name=None,
perf_master_name=None,
perf_revision=None,
perf_x_value=None,
test_args=None,
env_vars=None,
):
"""Initializes an instance of the SimulatorXCTestRunner class.
Args:
app_path: Full path to the compiled app to run.
test_host: Name of the compiled test host app to run tests.
test_project_dir: Directory of the dummy test project.
xcode_version: Version of Xcode to use.
gs_bucket: Google Storage bucket to upload test data to, or None if the
test data should not be uploaded.
perf_bot_name: Name of this bot as indicated to the perf dashboard.
perf_build_number: Build number to indicate to the perf dashboard.
perf_builder_name: Name of this builder as indicated to the perf
dashboard.
perf_master_name: Name of the master as indicated to the perf dashboard.
perf_revision: Revision to indicate to the perf dashboard.
perf_x_value: Value to use on the x axis for all data uploaded to the
perf dashboard.
test_args: Arguments to pass when launching the test.
env_vars: Environment variables to set when launching the test.
Raises:
AppNotFoundError: If the specified app cannot be found.
UnexpectedAppExtensionError: If the app was not an .app or an .ipa.
"""
super(XCTestRunner, self).__init__(
app_path,
env_vars=env_vars,
gs_bucket=gs_bucket,
perf_bot_name=perf_bot_name,
perf_build_number=perf_build_number,
perf_builder_name=perf_builder_name,
perf_master_name=perf_master_name,
perf_revision=perf_revision,
perf_x_value=perf_x_value,
test_args=test_args,
xcode_version=xcode_version,
)
self.app_path = os.path.abspath(app_path)
self.test_host_name = test_host
# Test target name is its host name without '_host' suffix.
self.test_target_name = test_host.rsplit('_', 1)[0]
self.test_project_dir = test_project_dir
self.timeout = '120'
self.homedir = ''
self.start_time = None
def TearDown(self):
"""Performs post-test tear down."""
raise NotImplementedError
def HandleJsonFileWithPath(self, summary):
"""Parse data in summarydir and send to perf dashboard."""
with open(summary) as jsonFile:
return json.load(jsonFile)
def GetLaunchEnvironment(self):
"""Returns the environment which is used to run the xctest.
"""
env = dict(os.environ, APP_TARGET_NAME=self.test_host_name,
TEST_TARGET_NAME=self.test_target_name,
NSUnbufferedIO='YES')
return env
def GetLaunchCommand(self, test_filter=None, blacklist=False):
"""Returns the command which is used to launch the test.
Args:
test_filter: A list of tests to filter by, or None to mean all.
blacklist: Whether to blacklist the elements of test_filter or not. Only
works when test_filter is not None.
Returns:
A list whose elements are the args representing the command.
"""
raise NotImplementedError
@staticmethod
def _Run(command, env=None):
"""Runs the specified command, parsing GTest output.
Args:
command: The shell command to execute, as a list of arguments.
Returns:
A GTestResult instance.
"""
result = utils.GTestResult(command)
print ' '.join(command)
print 'cwd:', os.getcwd()
sys.stdout.flush()
proc = subprocess.Popen(
command,
env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
)
parser = xctest_utils.XCTestLogParser()
while True:
line = proc.stdout.readline()
if not line:
break
line = line.rstrip()
parser.ProcessLine(line)
print line
sys.stdout.flush()
proc.wait()
for test in parser.FailedTests(include_flaky=True):
# Tests are named as TestCase.TestName.
# A TestName starting with FLAKY_ should not turn the build red.
if '.' in test and test.split('.', 1)[1].startswith('FLAKY_'):
result.flaked_tests[test] = parser.FailureDescription(test)
else:
result.failed_tests[test] = parser.FailureDescription(test)
result.passed_tests.extend(parser.PassedTests(include_flaky=True))
print command[0], 'returned', proc.returncode
print
sys.stdout.flush()
# iossim can return 5 if it exits noncleanly, even if no tests failed.
# Therefore we can't rely on this exit code to determine success or failure.
result.finalize(proc.returncode, parser.CompletedWithoutFailure())
print result
return result
def Launch(self):
"""Launches the test."""
raise NotImplementedError
def RunAllTests(self, result, *args, **kwargs):
"""Ensures all tests run, even if any crash the test app.
Args:
result: A GTestResult instance from having run the app.
Returns:
True if all tests were successful on the initial run.
Raises:
AppLaunchError: If the given result had crashed.
"""
if result.crashed and not result.crashed_test:
# If the app crashed without even starting, give up.
raise AppLaunchError
failed_tests = result.failed_tests
flaked_tests = result.flaked_tests
passed_tests = result.passed_tests
perf_links = result.perf_links
try:
if (result.crashed
and result.crashed_test
and not kwargs.get('retries')):
# If the app crashed on a specific test, then resume at the next test,
# except when 'retries' is nonzero. The 'retries' kwarg already forces
# the underlying gtest call to retry a fixed amount of times, and we
# don't want to conflict with this, because stability and memory tests
# rely on this behavior to run the same test on successive URLs.
self.Print(
'%s appears to have crashed during %s. Resuming at next test...' % (
self.app_name, result.crashed_test,
), blank_lines=2, time_to_sleep=5)
# Now run again, filtering out every test that ran. This is equivalent
# to starting at the next test.
result = self._Run(self.GetLaunchCommand(
test_filter=passed_tests + failed_tests.keys() + flaked_tests.keys(),
blacklist=True,
), *args, **kwargs)
# We are never overwriting any old data, because we aren't running any
# tests twice here.
failed_tests.update(result.failed_tests)
flaked_tests.update(result.flaked_tests)
passed_tests.extend(result.passed_tests)
perf_links.update(result.perf_links)
if failed_tests and not result.crashed and not kwargs.get('retries'):
# If the app failed without crashing, retry the failed tests in case of
# flake, except when 'retries' is nonzero.
msg = ['The following tests appear to have failed:']
msg.extend(failed_tests.keys())
msg.append('These tests will be retried, but their retry results will'
' not affect the outcome of this test step.')
msg.append('Retry results are purely for informational purposes.')
msg.append('Retrying...')
self.Print('\n'.join(msg), blank_lines=2, time_to_sleep=5)
self._Run(self.GetLaunchCommand(
test_filter=failed_tests.keys(),
), self.GetLaunchEnvironment(), *args, **kwargs)
except OSError as e:
if e.errno == errno.E2BIG:
self.Print(
'Too many tests were found in this app to resume.',
blank_lines=1,
time_to_sleep=0,
)
else:
self.Print(
'Unexpected OSError: %s.' % e.errno, blank_lines=1, time_to_sleep=0)
self.InterpretResult(failed_tests, flaked_tests, passed_tests, perf_links)
# At this point, all the tests have run, so used failed_tests to determine
# the success/failure.
return not failed_tests
class SimulatorXCTestRunner(XCTestRunner):
"""Class for running xctests on an iOS simulator."""
def __init__(
self,
app_path,
test_host,
test_project_dir,
platform,
version,
xcode_version=None,
gs_bucket=None,
perf_bot_name=None,
perf_build_number=None,
perf_builder_name=None,
perf_master_name=None,
perf_revision=None,
perf_x_value=None,
test_args=None,
env_vars=None,
):
"""Initializes an instance of the SimulatorXCTestRunner class.
Args:
app_path: Full path to the compiled app to run.
test_host: Name of the compiled test host app to run tests.
test_project_dir: Directory of the dummy test project.
platform: The platform to simulate. Supported values can be found by
running 'xcodebuild -list'. e.g. 'iPhone 5', 'iPhone 5s'.
version: The iOS version the simulator should be running. Supported values
can be found by running 'xcodebuild -list'. e.g. '8.0', '7.1'.
xcode_version: Version of Xcode to use.
gs_bucket: Google Storage bucket to upload test data to, or None if the
test data should not be uploaded.
perf_bot_name: Name of this bot as indicated to the perf dashboard.
perf_build_number: Build number to indicate to the perf dashboard.
perf_builder_name: Name of this builder as indicated to the perf
dashboard.
perf_master_name: Name of the master as indicated to the perf dashboard.
perf_revision: Revision to indicate to the perf dashboard.
perf_x_value: Value to use on the x axis for all data uploaded to the
perf dashboard.
test_args: Arguments to pass when launching the test.
env_vars: Environment variables to set when launching the test.
Raises:
SimulatorNotFoundError: If the given iossim path cannot be found.
"""
super(SimulatorXCTestRunner, self).__init__(
app_path,
test_host,
test_project_dir,
env_vars=env_vars,
gs_bucket=gs_bucket,
perf_bot_name=perf_bot_name,
perf_build_number=perf_build_number,
perf_builder_name=perf_builder_name,
perf_master_name=perf_master_name,
perf_revision=perf_revision,
perf_x_value=perf_x_value,
test_args=test_args,
xcode_version=xcode_version,
)
self.cfbundleid = utils.call(
utils.PLIST_BUDDY,
'-c', 'Print:CFBundleIdentifier',
os.path.join(self.app_path, 'Info.plist'),
).stdout[0]
self.platform = platform
self.version = version
self.built_dir = os.path.split(self.app_path)[0]
self.iossim_path = os.path.join(self.built_dir, 'iossim')
def UploadTestData(self):
"""Uploads the contents of the test's Documents directory.
Returns:
True if test data was uploaded, False otherwise.
"""
if not self.gs_bucket:
return False
apps_dir = os.path.join(
self.homedir,
'Containers',
'Data',
'Application',
)
docs_dir = None
if os.path.exists(apps_dir):
self.Print('Found Applications directory.')
docs_dir = self.FindTestDocumentsDirectory(apps_dir)
if docs_dir is not None and os.path.exists(docs_dir):
subprocess.check_call([
'screencapture',
os.path.join(docs_dir, 'desktop.png'),
])
self.summary['links']['test data'] = slave_utils.ZipAndUpload(
self.gs_bucket,
'%s.zip' % self.app_name,
docs_dir,
)
summary = os.path.join(docs_dir, 'summary.json')
if os.path.exists(summary):
self.HandleJsonFileWithPath(summary)
shutil.rmtree(docs_dir, ignore_errors=True)
return True
return False
def SetStartTime(self):
"""Sets the start time, for finding crash reports during this run."""
# Crash reports have a timestamp in their filename, formatted as
# YYYY-MM-DD-HHMMSS.
self.start_time = time.strftime('%Y-%m-%d-%H%M%S', time.localtime())
def CreateNewHomeDirectory(self):
"""Creates a new home directory for the simulator."""
cmd = [
self.iossim_path,
'-d', self.platform,
'-s', self.version,
'-w'
]
subprocess.check_output(cmd)
cmd = [
self.iossim_path,
'-d', self.platform,
'-s', self.version,
'-p'
]
self.homedir = subprocess.check_output(cmd).strip()
def RemoveHomeDirectory(self):
"""Recursively removes the home directory being used by the simulator."""
cmd = [
self.iossim_path,
'-d', self.platform,
'-s', self.version,
'-w'
]
subprocess.check_output(cmd)
self.homedir = ''
def KillSimulators(self):
"""Forcibly kills any running iOS simulator instances."""
kill_cmd = [
'pkill',
'-9',
'-x',
# The iOS simulator has a different name depending on the Xcode version.
'iPhone Simulator', # Xcode 5
'iOS Simulator', # Xcode 6
'Simulator', # Xcode 7
# The simctl tool invoked by iossim may hang. https://crbug.com/637429.
'simctl',
]
# If a signal was sent, wait for the simulator to actually be killed.
if not utils.call(*kill_cmd).returncode:
time.sleep(5)
def SetUp(self):
self.KillSimulators()
self.CreateNewHomeDirectory()
self.SetStartTime()
def TearDown(self):
"""Forcibly kills any running iOS simulator instances."""
self.UploadTestData()
self.GetCrashReports()
self.KillSimulators()
self.RemoveHomeDirectory()
def FindTestDocumentsDirectory(self, apps_dir):
"""Finds the test's Documents directory in the given Applications directory.
Args:
apps_dir: The Applications directory, containing app ID directories.
Returns:
The Documents directory, or None if it doesn't exist.
"""
for appid_dir in os.listdir(apps_dir):
appid_dir = os.path.join(apps_dir, appid_dir)
app_bundle = os.path.join(appid_dir, '%s.app' % self.app_name)
metadata_plist = os.path.join(
appid_dir, '.com.apple.mobile_container_manager.metadata.plist')
docs_dir = os.path.join(appid_dir, 'Documents')
if os.path.exists(docs_dir):
# iOS 7 app ID directories contain the app bundle. iOS 8 app ID
# directories contain a metadata plist with the CFBundleIdentifier.
if os.path.exists(app_bundle):
return docs_dir
elif os.path.exists(metadata_plist) and utils.call(
utils.PLIST_BUDDY,
'-c', 'Print:MCMMetadataIdentifier',
metadata_plist,
).stdout[0] == self.cfbundleid:
return docs_dir
self.Print('Could not find %s on the simulator.' % self.app_name)
def GetCrashReports(self):
# A crash report's naming scheme is [app]_[timestamp]_[hostname].crash.
# e.g. net_unittests_2014-05-13-150900_vm1-a1.crash.
crash_reports_dir = os.path.expanduser(os.path.join(
'~',
'Library',
'Logs',
'DiagnosticReports',
))
if os.path.exists(crash_reports_dir):
for crash_report in os.listdir(crash_reports_dir):
report_name, ext = os.path.splitext(crash_report)
if report_name.startswith(self.app_name) and ext == '.crash':
report_time = report_name[len(self.app_name) + 1:].split('_')[0]
# Timestamps are big-endian and therefore comparable this way.
if report_time > self.start_time:
with open(os.path.join(crash_reports_dir, crash_report)) as f:
self.summary['logs']['crash report (%s)' % report_time] = (
f.read().splitlines())
def GetLaunchCommand(self, test_filter=None, blacklist=False):
"""Returns the invocation command which is used to run the test.
Args:
test_filter: A list of tests to filter by, or None to mean all.
blacklist: Whether to blacklist the elements of test_filter or not. Only
works when test_filter is not None.
Returns:
A list whose elements are the args representing the command.
"""
app_path = os.path.join(self.built_dir, self.test_host_name + '.app/')
xctests_fullname = self.test_target_name + '.xctest'
xctest_path = os.path.join(app_path, 'PlugIns', xctests_fullname)
cmd = [
self.iossim_path,
'-d', self.platform,
'-s', self.version,
app_path,
xctest_path
]
for env_var in self.env_vars:
cmd.extend(['-e', env_var])
return cmd
@TestRunner.RequireTearDown
def Launch(self, *args, **kwargs):
"""Launches the test."""
self.SetUp()
result = self._Run(
self.GetLaunchCommand(), self.GetLaunchEnvironment(), *args, **kwargs)
if result.crashed and not result.crashed_test:
# If the app crashed, but there is no specific test which crashed,
# then the app must have failed to even start. Try one more time.
self.Print(
'%s appears to have crashed on startup. Retrying...' % self.app_name,
blank_lines=2,
time_to_sleep=5,
)
# Use a new home directory to launch a fresh simulator.
self.KillSimulators()
self.CreateNewHomeDirectory()
result = self._Run(
self.GetLaunchCommand(), self.GetLaunchEnvironment(), *args, **kwargs)
return self.RunAllTests(result, *args, **kwargs)
class DeviceXCTestRunner(XCTestRunner):
"""Class for running xctests on an iOS device."""
def __init__(
self,
app_path,
test_host,
test_project_dir,
xcode_version=None,
gs_bucket=None,
perf_bot_name=None,
perf_build_number=None,
perf_builder_name=None,
perf_master_name=None,
perf_revision=None,
perf_x_value=None,
test_args=None,
env_vars=None,
):
"""Initializes an instance of the SimulatorXCTestRunner class.
Args:
app_path: Full path to the compiled app to run.
test_host: Name of the compiled test host app to run tests.
test_project_dir: Directory of the dummy test project.
xcode_version: Version of Xcode to use.
gs_bucket: Google Storage bucket to upload test data to, or None if the
test data should not be uploaded.
perf_bot_name: Name of this bot as indicated to the perf dashboard.
perf_build_number: Build number to indicate to the perf dashboard.
perf_builder_name: Name of this builder as indicated to the perf
dashboard.
perf_master_name: Name of the master as indicated to the perf dashboard.
perf_revision: Revision to indicate to the perf dashboard.
perf_x_value: Value to use on the x axis for all data uploaded to the
perf dashboard.
test_args: Arguments to pass when launching the test.
env_vars: Environment variables to set when launching the test.
Raises:
DeviceDetectionError: If this machine does not have exactly one device
connected. Having more than one device connected causes problems when
trying to issue commands to any one device, which interfere with
installing and running the test app.
"""
super(DeviceXCTestRunner, self).__init__(
app_path,
test_host,
test_project_dir,
env_vars=env_vars,
gs_bucket=gs_bucket,
perf_bot_name=perf_bot_name,
perf_build_number=perf_build_number,
perf_builder_name=perf_builder_name,
perf_master_name=perf_master_name,
perf_revision=perf_revision,
perf_x_value=perf_x_value,
test_args=test_args,
xcode_version=xcode_version,
)
self.cfbundleid = utils.call(
utils.PLIST_BUDDY,
'-c', 'Print:CFBundleIdentifier',
os.path.join(self.app_path, 'Info.plist'),
).stdout[0]
call_result = utils.call('idevice_id', '--list')
self.device_id = call_result.stdout[0]
if len(call_result.stdout) != 1:
raise DeviceDetectionError(len(call_result.stdout))
def IsAppInstalled(self):
"""Returns True iff the app is installed on the device."""
# Prior to iOS 8, idevicefs would list apps with an @ prefix:
# e.g. $ idevicefs ls @
# @com.google.gtest.chromeiosunittests
# @com.google.gtest.ios_unittests
#
# On iOS 8, idevicefs omits the @:
# e.g. $ idevice fs ls @
# com.google.gtest.chromeiosunittests
# com.google.gtest.ios_unittests
return self.cfbundleid in [
app.lstrip('@') for app in utils.call('idevicefs', 'ls', '@').stdout]
def InstallApp(self):
"""Ensures the app is installed on the device."""
utils.call('ideviceinstaller', '--install', self.app_path)
if not self.IsAppInstalled():
raise AppInstallationError()
signal.signal(signal.SIGTERM, self.UninstallApp)
def UninstallApp(self, *args, **kwargs):
"""Ensures the app is removed from the device."""
utils.call('ideviceinstaller', '--uninstall', self.cfbundleid)
if self.IsAppInstalled():
raise AppUninstallationError()
signal.signal(signal.SIGTERM, signal.SIG_DFL)
def TearDown(self):
"""Uninstalls the app from the device."""
self.UploadTestData()
self.UninstallApp()
def GetLaunchCommand(self, test_filter=None, blacklist=False):
"""Returns the invocation command which is used to run the test.
Args:
test_filter: A list of tests to filter by, or None to mean all.
blacklist: Whether to blacklist the elements of test_filter or not. Only
works when test_filter is not None.
Returns:
A list whose elements are the args representing the command.
"""
built_dir = os.path.split(self.app_path)[0]
cmd = [
'xcodebuild', 'test-without-building',
'BUILT_PRODUCTS_DIR=%s' % built_dir,
'CONFIGURATION_BUILD_DIR=%s' % built_dir,
'-project', self.test_project_dir,
'-configuration', 'iphoneos',
'-scheme', 'TestProject',
'-destination','id=%s' % self.device_id,
'APP_TARGET_NAME=%s' % self.test_host_name,
'TEST_TARGET_NAME=%s' % self.test_target_name,
'NSUnbufferedIO=YES'
]
return cmd
@TestRunner.RequireTearDown
def Launch(self, *args, **kwargs):
"""Launches the test."""
self.InstallApp()
result = self._Run(
self.GetLaunchCommand(), self.GetLaunchEnvironment(), *args, **kwargs)
if result.crashed and not result.crashed_test:
# If the app crashed, but there is no specific test which crashed,
# then the app must have failed to even start. Try one more time.
self.Print(
'%s appears to have crashed on startup. Retrying...' % self.app_name,
blank_lines=2,
time_to_sleep=5,
)
# Uninstall and re-install the app.
self.UninstallApp()
self.InstallApp()
result = self._Run(
self.GetLaunchCommand(), self.GetLaunchEnvironment(), *args, **kwargs)
return self.RunAllTests(result, *args, **kwargs)
|
"""
pySlip demonstration program with user-selectable tiles.
Usage: pyslip_demo.py <options>
where <options> is zero or more of:
-d|--debug <level>
where <level> is either a numeric debug level in the range [0, 50] or
one of the symbolic debug level names:
NOTSET 0 nothing is logged (default)
DEBUG 10 everything is logged
INFO 20 less than DEBUG, informational debugging
WARNING 30 less than INFO, only non-fatal warnings
ERROR 40 less than WARNING
CRITICAL 50 less than ERROR
-h|--help
prints this help and stops
-x
turns on the wxPython InspectionTool
"""
import os
import sys
import copy
import getopt
import traceback
from functools import partial
try:
import wx
except ImportError:
msg = '*'*60 + '\nSorry, you must install wxPython\n' + '*'*60
print(msg)
sys.exit(1)
try:
import pyslip
import pyslip.gmt_local as tiles
import pyslip.log as log
except ImportError:
msg = '*'*60 + '\nSorry, you must install pySlip\n' + '*'*60
print(msg)
sys.exit(1)
# initialize the logging system
try:
log = log.Log('pyslip.log')
except AttributeError:
# already have a log file, ignore
pass
# get the bits of the demo program we need
from layer_control import LayerControl, EVT_ONOFF, EVT_SHOWONOFF, EVT_SELECTONOFF
from appstaticbox import AppStaticBox
from rotextctrl import ROTextCtrl
######
# Various demo constants
######
# demo name/version
DemoName = 'pySlip %s - Demonstration' % pyslip.__version__
DemoVersion = '4.0'
DemoWidth = 1000
DemoHeight = 800
# initial view level and position
InitViewLevel = 4
# this will eventually be selectable within the app
# a selection of cities, position from WikiPedia, etc
InitViewPosition = (0.0, 0.0) # "Null" Island
#InitViewPosition = (0.0, 51.48) # Greenwich, England
#InitViewPosition = (5.33, 60.389444) # Bergen, Norway
#InitViewPosition = (153.033333, -27.466667) # Brisbane, Australia
#InitViewPosition = (98.3786761, 7.8627326) # Phuket (ภูเก็ต), Thailand
#InitViewPosition = (151.209444, -33.859972) # Sydney, Australia
#InitViewPosition = (-77.036667, 38.895111) # Washington, DC, USA
#InitViewPosition = (132.455278, 34.385278) # Hiroshima, Japan
#InitViewPosition = (-8.008889, 31.63) # Marrakech (مراكش), Morocco
#InitViewPosition = (18.95, 69.65) # Tromsø, Norway
#InitViewPosition = (-70.933333, -53.166667) # Punta Arenas, Chile
#InitViewPosition = (168.3475, -46.413056) # Invercargill, New Zealand
#InitViewPosition = (-147.723056, 64.843611) # Fairbanks, AK, USA
#InitViewPosition = (103.851959, 1.290270) # Singapore
# levels on which various layers show
MRPointShowLevels = [3, 4]
MRImageShowLevels = [3, 4]
MRTextShowLevels = [3, 4]
MRPolyShowLevels = [3, 4]
MRPolylineShowLevels = [3, 4]
# the number of decimal places in a lon/lat display
LonLatPrecision = 3
# default deltas for various layer types
DefaultPointMapDelta = 40
DefaultPointViewDelta = 40
DefaultImageMapDelta = 40
DefaultImageViewDelta = 40
DefaultTextMapDelta = 40
DefaultTextViewDelta = 40
DefaultPolygonMapDelta = 40
DefaultPolygonViewDelta = 40
DefaultPolylineMapDelta = 40
DefaultPolylineViewDelta = 40
# image used for shipwrecks, glassy buttons, etc
ShipImg = 'graphics/shipwreck.png'
GlassyImg2 = 'graphics/glassy_button_2.png'
SelGlassyImg2 = 'graphics/selected_glassy_button_2.png'
GlassyImg3 = 'graphics/glassy_button_3.png'
SelGlassyImg3 = 'graphics/selected_glassy_button_3.png'
GlassyImg4 = 'graphics/glassy_button_4.png'
SelGlassyImg4 = 'graphics/selected_glassy_button_4.png'
GlassyImg5 = 'graphics/glassy_button_5.png'
SelGlassyImg5 = 'graphics/selected_glassy_button_5.png'
GlassyImg6 = 'graphics/glassy_button_6.png'
SelGlassyImg6 = 'graphics/selected_glassy_button_6.png'
# image used for shipwrecks
CompassRoseGraphic = 'graphics/compass_rose.png'
# logging levels, symbolic to numeric mapping
LogSym2Num = {'CRITICAL': 50,
'ERROR': 40,
'WARNING': 30,
'INFO': 20,
'DEBUG': 10,
'NOTSET': 0}
# list of modules containing tile sources
# list of (<long_name>, <module_name>)
# the <long_name>s go into the Tileselect menu
Tilesets = [
('BlueMarble tiles', 'blue_marble'),
('GMT tiles', 'gmt_local'),
# ('ModestMaps tiles', 'modest_maps'), # can't access?
# ('MapQuest tiles', 'mapquest'), # can't access?
('OpenStreetMap tiles', 'open_street_map'),
('Stamen Toner tiles', 'stamen_toner'),
('Stamen Transport tiles', 'stamen_transport'),
('Stamen Watercolor tiles', 'stamen_watercolor'),
]
# index into Tilesets above to set default tileset: GMT tiles
DefaultTilesetIndex = 1
# some layout constants
HorizSpacer = 5
VertSpacer = 5
###############################################################################
# A small class to popup a moveable window.
###############################################################################
class DemoPopup(wx.PopupWindow):
"""A class for a simple popup window.
The popup window can be dragged with the left mouse button.
It is dismissed with a right mouse button click.
The basic idea comes from:
https://stackoverflow.com/questions/23415125/wxpython-popup-window-bound-to-a-wxbutton
"""
# padding size top/bottom/left/right
Padding = 20
def __init__(self, parent, style, text):
"""Constructor"""
super().__init__(parent, style)
panel = wx.Panel(self)
self.panel = panel
panel.SetBackgroundColour("LIGHT YELLOW")
st = wx.StaticText(panel, -1, text,
pos=(DemoPopup.Padding//2,DemoPopup.Padding//2))
sz = st.GetBestSize()
self.SetSize( (sz.width+DemoPopup.Padding, sz.height+DemoPopup.Padding) )
panel.SetSize( (sz.width+DemoPopup.Padding, sz.height+DemoPopup.Padding) )
panel.Bind(wx.EVT_LEFT_DOWN, self.OnMouseLeftDown)
panel.Bind(wx.EVT_MOTION, self.OnMouseMotion)
panel.Bind(wx.EVT_LEFT_UP, self.OnMouseLeftUp)
panel.Bind(wx.EVT_RIGHT_UP, self.OnRightUp)
st.Bind(wx.EVT_LEFT_DOWN, self.OnMouseLeftDown)
st.Bind(wx.EVT_MOTION, self.OnMouseMotion)
st.Bind(wx.EVT_LEFT_UP, self.OnMouseLeftUp)
st.Bind(wx.EVT_RIGHT_UP, self.OnRightUp)
wx.CallAfter(self.Refresh)
def OnMouseLeftDown(self, evt):
self.Refresh()
self.ldPos = evt.GetEventObject().ClientToScreen(evt.GetPosition())
self.wPos = self.ClientToScreen((0,0))
self.panel.CaptureMouse()
def OnMouseMotion(self, evt):
if evt.Dragging() and evt.LeftIsDown():
dPos = evt.GetEventObject().ClientToScreen(evt.GetPosition())
nPos = (self.wPos.x + (dPos.x - self.ldPos.x),
self.wPos.y + (dPos.y - self.ldPos.y))
self.Move(nPos)
def OnMouseLeftUp(self, evt):
if self.panel.HasCapture():
self.panel.ReleaseMouse()
def OnRightUp(self, evt):
self.Show(False)
self.Destroy()
###############################################################################
# A small class to manage tileset sources.
###############################################################################
class TilesetManager:
"""A class to manage multiple tileset objects.
ts = TilesetManager(source_list) # 'source_list' is list of tileset source modules
ts.get_tile_source(index) # 'index' into 'source_list' of source to use
Features 'lazy' importing, only imports when the tileset is used
the first time.
"""
def __init__(self, mod_list):
"""Create a set of tile sources.
mod_list list of module filenames to manage
The list is something like: ['open_street_map.py', 'gmt_local.py']
We can access tilesets using the index of the module in the 'mod_list'.
"""
self.modules = []
for fname in mod_list:
self.modules.append([fname, os.path.splitext(fname)[0], None])
def get_tile_source(self, mod_index):
"""Get an open tileset source for given name.
mod_index index into self.modules of tileset to use
"""
tileset_data = self.modules[mod_index]
(filename, modulename, tile_obj) = tileset_data
if not tile_obj:
# have never used this tileset, import and instantiate
obj = __import__('pyslip', globals(), locals(), [modulename])
tileset = getattr(obj, modulename)
tile_obj = tileset.Tiles()
tileset_data[2] = tile_obj
return tile_obj
###############################################################################
# The main application frame
###############################################################################
class AppFrame(wx.Frame):
def __init__(self):
super().__init__(None, size=(DemoWidth, DemoHeight),
title='%s %s' % (DemoName, DemoVersion))
# set locale to ENGLISH, object must persist
self.locale = wx.Locale(wx.LANGUAGE_ENGLISH)
# initialize the tileset handler
self.tileset_manager = self.init_tiles()
self.tile_source = self.tileset_manager.get_tile_source(DefaultTilesetIndex)
# start the GUI
self.SetMinSize((DemoWidth, DemoHeight))
self.panel = wx.Panel(self, wx.ID_ANY)
self.panel.SetBackgroundColour(wx.WHITE)
self.panel.ClearBackground()
# build the GUI
self.make_gui(self.panel)
# do initialisation stuff - all the application stuff
self.initData()
# create tileset menuitems
self.initMenu()
# create select event dispatch directory
self.demo_select_dispatch = {}
# selected point, if not None
self.point_layer = None
# variables referencing various layers
self.sel_text_highlight = None
# finally, bind events to handlers
self.pyslip.Bind(pyslip.EVT_PYSLIP_LEVEL, self.level_change_event)
self.pyslip.Bind(pyslip.EVT_PYSLIP_POSITION, self.mouse_posn_event)
self.pyslip.Bind(pyslip.EVT_PYSLIP_SELECT, self.select_event)
self.pyslip.Bind(pyslip.EVT_PYSLIP_BOXSELECT, self.select_event)
# set the size of the demo window, etc
self.Centre()
self.Show()
# set initial view position
wx.CallLater(25, self.final_setup, InitViewLevel, InitViewPosition)
def onTilesetSelect(self, event):
"""User selected a tileset from the menu.
event the menu select event
"""
self.change_tileset(event.GetId())
#####
# Build the GUI
#####
def make_gui(self, parent):
"""Create application GUI."""
# start application layout
all_display = wx.BoxSizer(wx.HORIZONTAL)
parent.SetSizer(all_display)
# put map view in left of horizontal box
self.pyslip = pyslip.pySlip(parent, tile_src=self.tile_source,
style=wx.SIMPLE_BORDER)
all_display.Add(self.pyslip, proportion=1, flag=wx.EXPAND)
# add controls at right
controls = self.make_gui_controls(parent)
all_display.Add(controls, proportion=0)
parent.SetSizerAndFit(all_display)
def make_gui_controls(self, parent):
"""Build the 'controls' part of the GUI
parent reference to parent
Returns reference to containing sizer object.
Should really use GridBagSizer layout.
"""
# all controls in vertical box sizer
controls = wx.BoxSizer(wx.VERTICAL)
# put level and position into one 'controls' position
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
level = self.make_gui_level(parent)
tmp.Add(level, proportion=0, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
mouse = self.make_gui_mouse(parent)
tmp.Add(mouse, proportion=0, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for map-relative points layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
lc_point = self.make_gui_point(parent)
tmp.Add(lc_point, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for view-relative points layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
lc_point_v = self.make_gui_point_view(parent)
tmp.Add(lc_point_v, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for map-relative image layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
image = self.make_gui_image(parent)
tmp.Add(image, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for view-relative image layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
image_view = self.make_gui_image_view(parent)
tmp.Add(image_view, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for map-relative text layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
lc_text = self.make_gui_text(parent)
tmp.Add(lc_text, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for view-relative text layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
lc_text_v = self.make_gui_text_view(parent)
tmp.Add(lc_text_v, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for map-relative polygon layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
lc_poly = self.make_gui_poly(parent)
tmp.Add(lc_poly, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for view-relative polygon layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
lc_poly_v = self.make_gui_poly_view(parent)
tmp.Add(lc_poly_v, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for map-relative polyline layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
lc_poll = self.make_gui_polyline(parent)
tmp.Add(lc_poll, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
controls.AddSpacer(VertSpacer)
# controls for view-relative polyline layer
tmp = wx.BoxSizer(wx.HORIZONTAL)
tmp.AddSpacer(HorizSpacer)
lc_poll_v = self.make_gui_polyline_view(parent)
tmp.Add(lc_poll_v, proportion=1, flag=wx.EXPAND|wx.ALL)
tmp.AddSpacer(HorizSpacer)
controls.Add(tmp, proportion=0, flag=wx.EXPAND|wx.ALL)
return controls
def initMenu(self):
"""Add the 'Tilesets' menu to the app."""
# create tileset menuitems
menuBar = wx.MenuBar()
tile_menu = wx.Menu()
# this dict: id -> (display_name, module_name, tileset_obj)
self.id2tiledata = {}
# create the tileset menuitems, add to menu and connect to handler
for (tile_index, (name, module_name)) in enumerate(Tilesets):
new_id = wx.NewId()
tile_menu.Append(new_id, name, name, wx.ITEM_RADIO)
self.Bind(wx.EVT_MENU, self.onTilesetSelect)
self.id2tiledata[new_id] = (name, module_name, None)
# self.name2guiid[name] = new_id
if tile_index == DefaultTilesetIndex:
self.default_tileset_name = name
tile_menu.Check(new_id, True)
if self.default_tileset_name is None:
raise Exception('Bad DefaultTileset (%s) or Tilesets (%s)'
% (DefaultTileset, str(Tilesets)))
menuBar.Append(tile_menu, "&Tileset")
self.SetMenuBar(menuBar)
def init_tiles(self):
"""Initialize the tileset manager.
Return a reference to the manager object.
"""
modules = []
for (action_id, (name, module_name)) in enumerate(Tilesets):
modules.append(module_name)
return TilesetManager(modules)
def change_tileset(self, menu_id):
"""Handle a tileset selection.
menu_id the index in self.id2tiledata of the required tileset
"""
# get information for the required tileset
try:
(name, module_name, new_tile_obj) = self.id2tiledata[menu_id]
except KeyError:
# badly formed self.id2tiledata element
raise RuntimeError('self.id2tiledata is badly formed:\n%s'
% str(self.id2tiledata))
if new_tile_obj is None:
# haven't seen this tileset before, import and instantiate
obj = __import__('pyslip', globals(), locals(), [module_name])
tileset = getattr(obj, module_name)
tile_name = tileset.TilesetName
new_tile_obj = tileset.Tiles()
# update the self.id2tiledata element
self.id2tiledata[menu_id] = (name, module_name, new_tile_obj)
self.pyslip.ChangeTileset(new_tile_obj)
def onClose(self):
"""Application is closing."""
pass
#self.Close(True)
def make_gui_level(self, parent):
"""Build the control that shows the level.
parent reference to parent
Returns reference to containing sizer object.
"""
# create objects
txt = wx.StaticText(parent, wx.ID_ANY, 'Level: ')
self.map_level = ROTextCtrl(parent, '', size=(30,-1),
tooltip='Shows map zoom level')
# lay out the controls
sb = AppStaticBox(parent, 'Map level')
box = wx.StaticBoxSizer(sb, orient=wx.HORIZONTAL)
box.Add(txt, flag=(wx.ALIGN_CENTER_VERTICAL
|wx.ALIGN_RIGHT|wx.LEFT))
box.Add(self.map_level, proportion=0,
flag=wx.LEFT|wx.ALIGN_RIGHT|wx.ALIGN_CENTER_VERTICAL)
return box
def make_gui_mouse(self, parent):
"""Build the mouse part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create objects
txt = wx.StaticText(parent, wx.ID_ANY, 'Lon/Lat: ')
self.mouse_position = ROTextCtrl(parent, '', size=(120,-1),
tooltip=('Shows the mouse '
'longitude and latitude '
'on the map'))
# lay out the controls
sb = AppStaticBox(parent, 'Mouse position')
box = wx.StaticBoxSizer(sb, orient=wx.HORIZONTAL)
box.Add(txt, flag=(wx.ALIGN_CENTER_VERTICAL
|wx.ALIGN_RIGHT|wx.LEFT))
box.Add(self.mouse_position, proportion=0,
flag=wx.RIGHT|wx.TOP|wx.BOTTOM)
return box
def make_gui_point(self, parent):
"""Build the points part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
point_obj = LayerControl(parent, 'Points, map relative %s'
% str(MRPointShowLevels),
selectable=True)
# tie to event handler(s)
point_obj.Bind(EVT_ONOFF, self.pointOnOff)
point_obj.Bind(EVT_SHOWONOFF, self.pointShowOnOff)
point_obj.Bind(EVT_SELECTONOFF, self.pointSelectOnOff)
return point_obj
def make_gui_point_view(self, parent):
"""Build the view-relative points part of the GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
point_obj = LayerControl(parent, 'Points, view relative',
selectable=True)
# tie to event handler(s)
point_obj.Bind(EVT_ONOFF, self.pointViewOnOff)
point_obj.Bind(EVT_SHOWONOFF, self.pointViewShowOnOff)
point_obj.Bind(EVT_SELECTONOFF, self.pointViewSelectOnOff)
return point_obj
def make_gui_image(self, parent):
"""Build the image part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
image_obj = LayerControl(parent, 'Images, map relative %s'
% str(MRImageShowLevels),
selectable=True)
# tie to event handler(s)
image_obj.Bind(EVT_ONOFF, self.imageOnOff)
image_obj.Bind(EVT_SHOWONOFF, self.imageShowOnOff)
image_obj.Bind(EVT_SELECTONOFF, self.imageSelectOnOff)
return image_obj
def make_gui_image_view(self, parent):
"""Build the view-relative image part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
image_obj = LayerControl(parent, 'Images, view relative',
selectable=True)
# tie to event handler(s)
image_obj.Bind(EVT_ONOFF, self.imageViewOnOff)
image_obj.Bind(EVT_SHOWONOFF, self.imageViewShowOnOff)
image_obj.Bind(EVT_SELECTONOFF, self.imageViewSelectOnOff)
return image_obj
def make_gui_text(self, parent):
"""Build the map-relative text part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
text_obj = LayerControl(parent,
'Text, map relative %s' % str(MRTextShowLevels),
selectable=True, editable=False)
# tie to event handler(s)
text_obj.Bind(EVT_ONOFF, self.textOnOff)
text_obj.Bind(EVT_SHOWONOFF, self.textShowOnOff)
text_obj.Bind(EVT_SELECTONOFF, self.textSelectOnOff)
return text_obj
def make_gui_text_view(self, parent):
"""Build the view-relative text part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
text_view_obj = LayerControl(parent, 'Text, view relative',
selectable=True)
# tie to event handler(s)
text_view_obj.Bind(EVT_ONOFF, self.textViewOnOff)
text_view_obj.Bind(EVT_SHOWONOFF, self.textViewShowOnOff)
text_view_obj.Bind(EVT_SELECTONOFF, self.textViewSelectOnOff)
return text_view_obj
def make_gui_poly(self, parent):
"""Build the map-relative polygon part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
poly_obj = LayerControl(parent,
'Polygon, map relative %s'
% str(MRPolyShowLevels),
selectable=True)
# tie to event handler(s)
poly_obj.Bind(EVT_ONOFF, self.polyOnOff)
poly_obj.Bind(EVT_SHOWONOFF, self.polyShowOnOff)
poly_obj.Bind(EVT_SELECTONOFF, self.polySelectOnOff)
return poly_obj
def make_gui_poly_view(self, parent):
"""Build the view-relative polygon part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
poly_view_obj = LayerControl(parent, 'Polygon, view relative',
selectable=True)
# tie to event handler(s)
poly_view_obj.Bind(EVT_ONOFF, self.polyViewOnOff)
poly_view_obj.Bind(EVT_SHOWONOFF, self.polyViewShowOnOff)
poly_view_obj.Bind(EVT_SELECTONOFF, self.polyViewSelectOnOff)
return poly_view_obj
def make_gui_polyline(self, parent):
"""Build the map-relative polyline part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
poly_obj = LayerControl(parent,
'Polyline, map relative %s'
% str(MRPolyShowLevels),
selectable=True)
# tie to event handler(s)
poly_obj.Bind(EVT_ONOFF, self.polylineOnOff)
poly_obj.Bind(EVT_SHOWONOFF, self.polylineShowOnOff)
poly_obj.Bind(EVT_SELECTONOFF, self.polylineSelectOnOff)
return poly_obj
def make_gui_polyline_view(self, parent):
"""Build the view-relative polyline part of the controls part of GUI.
parent reference to parent
Returns reference to containing sizer object.
"""
# create widgets
poly_view_obj = LayerControl(parent, 'Polyline, view relative',
selectable=True)
# tie to event handler(s)
poly_view_obj.Bind(EVT_ONOFF, self.polylineViewOnOff)
poly_view_obj.Bind(EVT_SHOWONOFF, self.polylineViewShowOnOff)
poly_view_obj.Bind(EVT_SELECTONOFF, self.polylineViewSelectOnOff)
return poly_view_obj
######
# demo control event handlers
######
##### map-relative point layer
def pointOnOff(self, event):
"""Handle OnOff event for point layer control."""
if event.state:
self.point_layer = \
self.pyslip.AddPointLayer(PointData, map_rel=True,
colour=PointDataColour, radius=3,
# offset points to exercise placement
offset_x=0, offset_y=0, visible=True,
show_levels=MRPointShowLevels,
delta=DefaultPointMapDelta,
placement='nw', # check placement
name='<pt_layer>')
else:
self.pyslip.DeleteLayer(self.point_layer)
self.point_layer = None
if self.sel_point_layer:
self.pyslip.DeleteLayer(self.sel_point_layer)
self.sel_point_layer = None
self.sel_point = None
def pointShowOnOff(self, event):
"""Handle ShowOnOff event for point layer control."""
if event.state:
self.pyslip.ShowLayer(self.point_layer)
if self.sel_point_layer:
self.pyslip.ShowLayer(self.sel_point_layer)
else:
self.pyslip.HideLayer(self.point_layer)
if self.sel_point_layer:
self.pyslip.HideLayer(self.sel_point_layer)
def pointSelectOnOff(self, event):
"""Handle SelectOnOff event for point layer control."""
layer = self.point_layer
if event.state:
self.add_select_handler(layer, self.pointSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def pointSelect(self, event):
"""Handle map-relative point select exception from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
layer_id ID of the layer the select occurred on
selection [list of] tuple (xgeo,ygeo) of selected point
(if None then no point(s) selected)
data userdata object of the selected point
button indicates the mouse button used
The selection could be a single or box select.
The point select is designed to be select point(s) for on, then select
point(s) again for off. Clicking away from the already selected point
doesn't remove previously selected point(s) if nothing is selected. We
do this to show the selection/deselection of point(s) is up to the user,
not the widget.
This code also shows how to combine handling of EventSelect and
EventBoxSelect events.
"""
if event.selection == self.sel_point:
# already have point selected, just deselect it
self.pyslip.DeleteLayer(self.sel_point_layer)
self.sel_point_layer = None
self.sel_point = None
elif event.selection:
if event.button == pyslip.MouseLeft:
# some other point(s) selected, delete previous selection, if any
if self.sel_point_layer:
self.pyslip.DeleteLayer(self.sel_point_layer)
# remember selection (need copy as highlight modifies attributes)
self.sel_point = copy.deepcopy(event.selection)
# choose different highlight colour for different type of selection
selcolour = '#00ffff'
if event.type == pyslip.EventSelect:
selcolour = '#0000ff'
# get selected points into form for display layer
# delete 'colour' and 'radius' attributes as we want different values
highlight = []
for (x, y, d) in event.selection:
del d['colour'] # AddLayer...() ensures keys exist
del d['radius']
highlight.append((x, y, d))
# layer with highlight of selected poijnts
self.sel_point_layer = \
self.pyslip.AddPointLayer(highlight, map_rel=True,
colour=selcolour,
radius=5, visible=True,
show_levels=MRPointShowLevels,
name='<sel_pt_layer>')
# make sure highlight layer is BELOW selected layer
self.pyslip.PlaceLayerBelowLayer(self.sel_point_layer,
self.point_layer)
elif event.button == pyslip.MouseMiddle:
log('SELECT event using middle mouse button at GEO coords (%.2f, %.2f)'
% (event.selection[0][0], event.selection[0][1]))
elif event.button == pyslip.MouseRight:
# RIGHT button, do a context popup, only a single point selected
msg = ('Point at GEO coords (%.2f, %.2f)'
% (event.selection[0][0], event.selection[0][1]))
self.show_popup(msg, event.vposn)
# else: we ignore an empty selection
return True
##### view-relative point layer
def pointViewOnOff(self, event):
"""Handle OnOff event for point view layer control."""
if event.state:
self.point_view_layer = \
self.pyslip.AddPointLayer(PointViewData, map_rel=False,
placement=PointViewDataPlacement,
colour=PointViewDataColour, radius=1,
delta=DefaultPointViewDelta,
visible=True,
name='<point_view_layer>')
else:
self.pyslip.DeleteLayer(self.point_view_layer)
self.point_view_layer = None
if self.sel_point_view_layer:
self.pyslip.DeleteLayer(self.sel_point_view_layer)
self.sel_point_view_layer = None
self.sel_point_view = None
def pointViewShowOnOff(self, event):
"""Handle ShowOnOff event for point view layer control."""
if event.state:
self.pyslip.ShowLayer(self.point_view_layer)
if self.sel_point_view_layer:
self.pyslip.ShowLayer(self.sel_point_view_layer)
else:
self.pyslip.HideLayer(self.point_view_layer)
if self.sel_point_view_layer:
self.pyslip.HideLayer(self.sel_point_view_layer)
def pointViewSelectOnOff(self, event):
"""Handle SelectOnOff event for point view layer control."""
layer = self.point_view_layer
if event.state:
self.add_select_handler(layer, self.pointViewSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def pointViewSelect(self, event):
"""Handle view-relative point select exception from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
selection [list of] tuple (xgeo,ygeo) of selected point
(if None then no point(s) selected)
data userdata object of the selected point
The selection could be a single or box select.
The point select is designed to be click point for on, then any other
select event turns that point off, whether there is a selection or not
and whether the same point is selected or not.
"""
# if there is a previous selection, remove it
if self.sel_point_view_layer:
self.pyslip.DeleteLayer(self.sel_point_view_layer)
self.sel_point_view_layer = None
if event.selection and event.selection != self.sel_point_view:
# it's a box selection
self.sel_point_view = event.selection
# get selected points into form for display layer
highlight = []
for (x, y, d) in event.selection:
del d['colour']
del d['radius']
highlight.append((x, y, d))
# assume a box selection
self.sel_point_view_layer = \
self.pyslip.AddPointLayer(highlight, map_rel=False,
placement='se',
colour='#0000ff',
radius=3, visible=True,
name='<sel_pt_view_layer>')
else:
self.sel_point_view = None
return True
##### map-relative image layer
def imageOnOff(self, event):
"""Handle OnOff event for map-relative image layer control."""
if event.state:
self.image_layer = \
self.pyslip.AddImageLayer(ImageData, map_rel=True,
visible=True,
delta=DefaultImageMapDelta,
show_levels=MRImageShowLevels,
name='<image_layer>')
else:
self.pyslip.DeleteLayer(self.image_layer)
self.image_layer = None
if self.sel_image_layer:
self.pyslip.DeleteLayer(self.sel_image_layer)
self.sel_image_layer = None
self.sel_image = None
def imageShowOnOff(self, event):
"""Handle ShowOnOff event for image layer control."""
if event.state:
self.pyslip.ShowLayer(self.image_layer)
if self.sel_image_layer:
self.pyslip.ShowLayer(self.sel_image_layer)
else:
self.pyslip.HideLayer(self.image_layer)
if self.sel_image_layer:
self.pyslip.HideLayer(self.sel_image_layer)
def imageSelectOnOff(self, event):
"""Handle SelectOnOff event for image layer control."""
layer = self.image_layer
if event.state:
self.add_select_handler(layer, self.imageSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def imageSelect(self, event):
"""Select event from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
selection [list of] tuple (xgeo,ygeo) of selected point
(if None then no point(s) selected)
data userdata object of the selected point
The selection could be a single or box select.
"""
#relsel = event.relsel
selection = event.selection
# select again, turn selection off
if selection == self.sel_image:
self.pyslip.DeleteLayer(self.sel_image_layer)
self.sel_image_layer = self.sel_image = None
elif selection:
# new image selected, show highlight
if self.sel_image_layer:
self.pyslip.DeleteLayer(self.sel_image_layer)
self.sel_image = selection
# get selected points into form for display layer
new_points = []
for (x, y, f, d) in selection:
del d['colour']
del d['radius']
new_points.append((x, y, d))
self.sel_image_layer = \
self.pyslip.AddPointLayer(new_points, map_rel=True,
colour='#0000ff',
radius=5, visible=True,
show_levels=[3,4],
name='<sel_pt_layer>')
self.pyslip.PlaceLayerBelowLayer(self.sel_image_layer,
self.image_layer)
return True
def imageBSelect(self, id, selection=None):
"""Select event from the widget."""
# remove any previous selection
if self.sel_image_layer:
self.pyslip.DeleteLayer(self.sel_image_layer)
self.sel_image_layer = None
if selection:
# get selected points into form for display layer
points = []
for (x, y, f, d) in selection:
del d['colour']
del d['radius']
points.append((x, y, d))
self.sel_image_layer = \
self.pyslip.AddPointLayer(points, map_rel=True,
colour='#e0e0e0',
radius=13, visible=True,
show_levels=[3,4],
name='<boxsel_img_layer>')
self.pyslip.PlaceLayerBelowLayer(self.sel_image_layer,
self.image_layer)
return True
##### view-relative image layer
def imageViewOnOff(self, event):
"""Handle OnOff event for view-relative image layer control.
event the state of the leyer control master checkbox
"""
if event.state:
self.image_view_layer = \
self.pyslip.AddImageLayer(ImageViewData, map_rel=False,
delta=DefaultImageViewDelta,
visible=True,
name='<image_view_layer>')
else:
self.pyslip.DeleteLayer(self.image_view_layer)
self.image_view_layer = None
if self.sel_image_view_layer:
self.pyslip.DeleteLayer(self.sel_image_view_layer)
self.sel_image_view_layer = None
if self.sel_imagepoint_view_layer:
self.pyslip.DeleteLayer(self.sel_imagepoint_view_layer)
self.sel_imagepoint_view_layer = None
def imageViewShowOnOff(self, event):
"""Handle ShowOnOff event for image layer control."""
if event.state:
self.pyslip.ShowLayer(self.image_view_layer)
if self.sel_image_view_layer:
self.pyslip.ShowLayer(self.sel_image_view_layer)
if self.sel_imagepoint_view_layer:
self.pyslip.ShowLayer(self.sel_imagepoint_view_layer)
else:
self.pyslip.HideLayer(self.image_view_layer)
if self.sel_image_view_layer:
self.pyslip.HideLayer(self.sel_image_view_layer)
if self.sel_imagepoint_view_layer:
self.pyslip.HideLayer(self.sel_imagepoint_view_layer)
def imageViewSelectOnOff(self, event):
"""Handle SelectOnOff event for image layer control."""
layer = self.image_view_layer
if event.state:
self.add_select_handler(layer, self.imageViewSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def imageViewSelect(self, event):
"""View-relative image select event from the widget.
event the event that contains these attributes:
selection [list of] tuple (xgeo,ygeo) of selected point
(if None then no point(s) selected)
relsel relative position of single point select,
None if box select
The selection could be a single or box select.
The selection mode is different here. An empty selection will remove
any current selection. This shows the flexibility that user code
can implement.
The code below doesn't assume a placement of the selected image, it
figures out the correct position of the 'highlight' layers. This helps
with debugging, as we can move the compass rose anywhere we like.
"""
selection = event.selection
relsel = event.relsel # None if box select
# only one image selectable, remove old selections (if any)
if self.sel_image_view_layer:
self.pyslip.DeleteLayer(self.sel_image_view_layer)
self.sel_image_view_layer = None
if self.sel_imagepoint_view_layer:
self.pyslip.DeleteLayer(self.sel_imagepoint_view_layer)
self.sel_imagepoint_view_layer = None
if selection:
# figure out compass rose attributes
attr_dict = ImageViewData[0][3]
img_placement = attr_dict['placement']
self.sel_imagepoint_view_layer = None
if relsel:
# unpack event relative selection point
(sel_x, sel_y) = relsel # select relative point in image
# FIXME This should be cleaner, user shouldn't have to know internal structure
# FIXME or fiddle with placement perturbations
# add selection point
CR_Height2 = CR_Height//2
CR_Width2 = CR_Width//2
point_place_coords = {'ne': '(sel_x - CR_Width, sel_y)',
'ce': '(sel_x - CR_Width, sel_y - CR_Height2)',
'se': '(sel_x - CR_Width, sel_y - CR_Height)',
'cs': '(sel_x - CR_Width2, sel_y - CR_Height)',
'sw': '(sel_x, sel_y - CR_Height)',
'cw': '(sel_x, sel_y - CR_Height/2.0)',
'nw': '(sel_x, sel_y)',
'cn': '(sel_x - CR_Width2, sel_y)',
'cc': '(sel_x - CR_Width2, sel_y - CR_Height2)',
'': '(sel_x, sel_y)',
None: '(sel_x, sel_y)',
}
for (key, code) in point_place_coords.items():
point_place_coords[key] = compile(code, '<string>', mode='eval')
point = eval(point_place_coords[img_placement])
self.sel_imagepoint_view_layer = \
self.pyslip.AddPointLayer((point,), map_rel=False,
colour='green',
radius=5, visible=True,
placement=img_placement,
name='<sel_image_view_point>')
# add polygon outline around image
p_dict = {'placement': img_placement, 'width': 3, 'colour': 'green', 'closed': True}
poly_place_coords = {'ne': '(((-CR_Width,0),(0,0),(0,CR_Height),(-CR_Width,CR_Height)),p_dict)',
'ce': '(((-CR_Width,-CR_Height2),(0,-CR_Height2),(0,CR_Height2),(-CR_Width,CR_Height2)),p_dict)',
'se': '(((-CR_Width,-CR_Height),(0,-CR_Height),(0,0),(-CR_Width,0)),p_dict)',
'cs': '(((-CR_Width2,-CR_Height),(CR_Width2,-CR_Height),(CR_Width2,0),(-CR_Width2,0)),p_dict)',
'sw': '(((0,-CR_Height),(CR_Width,-CR_Height),(CR_Width,0),(0,0)),p_dict)',
'cw': '(((0,-CR_Height2),(CR_Width,-CR_Height2),(CR_Width,CR_Height2),(0,CR_Height2)),p_dict)',
'nw': '(((0,0),(CR_Width,0),(CR_Width,CR_Height),(0,CR_Height)),p_dict)',
'cn': '(((-CR_Width2,0),(CR_Width2,0),(CR_Width2,CR_Height),(-CR_Width2,CR_Height)),p_dict)',
'cc': '(((-CR_Width2,-CR_Height2),(CR_Width2,-CR_Height2),(CR_Width2,CR_Heigh/2),(-CR_Width2,CR_Height2)),p_dict)',
'': '(((x, y),(x+CR_Width,y),(x+CR_Width,y+CR_Height),(x,y+CR_Height)),p_dict)',
None: '(((x, y),(x+CR_Width,y),(x+CR_Width,y+CR_Height),(x,y+CR_Height)),p_dict)',
}
for (key, code) in poly_place_coords.items():
poly_place_coords[key] = compile(code, '<string>', mode='eval')
pdata = eval(poly_place_coords[img_placement])
self.sel_image_view_layer = \
self.pyslip.AddPolygonLayer((pdata,), map_rel=False,
name='<sel_image_view_outline>',
)
return True
##### map-relative text layer
def textOnOff(self, event):
"""Handle OnOff event for map-relative text layer control."""
if event.state:
self.text_layer = \
self.pyslip.AddTextLayer(TextData, map_rel=True,
name='<text_layer>', visible=True,
delta=DefaultTextMapDelta,
show_levels=MRTextShowLevels,
placement='ne')
else:
self.pyslip.DeleteLayer(self.text_layer)
if self.sel_text_layer:
self.pyslip.DeleteLayer(self.sel_text_layer)
self.sel_text_layer = None
self.sel_text_highlight = None
def textShowOnOff(self, event):
"""Handle ShowOnOff event for text layer control."""
if event.state:
if self.text_layer:
self.pyslip.ShowLayer(self.text_layer)
if self.sel_text_layer:
self.pyslip.ShowLayer(self.sel_text_layer)
else:
if self.text_layer:
self.pyslip.HideLayer(self.text_layer)
if self.sel_text_layer:
self.pyslip.HideLayer(self.sel_text_layer)
def textSelectOnOff(self, event):
"""Handle SelectOnOff event for text layer control."""
layer = self.text_layer
if event.state:
self.add_select_handler(layer, self.textSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def textSelect(self, event):
"""Map-relative text select event from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
selection [list of] tuple (xgeo,ygeo) of selected point
(if None then no point(s) selected)
The selection could be a single or box select.
The selection mode here is more standard: empty select turns point(s)
off, selected points reselection leaves points selected.
"""
selection = event.selection
if self.sel_text_layer:
# turn previously selected point(s) off
self.pyslip.DeleteLayer(self.sel_text_layer)
self.sel_text_layer = None
if selection:
# get selected points into form for display layer
points = []
for (x, y, t, d) in selection:
del d['colour'] # remove point attributes, want different
del d['radius']
del d['offset_x'] # remove offsets, we want point not text
del d['offset_y']
points.append((x, y, d))
self.sel_text_layer = \
self.pyslip.AddPointLayer(points, map_rel=True,
colour='#0000ff',
radius=5, visible=True,
show_levels=MRTextShowLevels,
name='<sel_text_layer>')
self.pyslip.PlaceLayerBelowLayer(self.sel_text_layer,
self.text_layer)
return True
##### view-relative text layer
def textViewOnOff(self, event):
"""Handle OnOff event for view-relative text layer control."""
if event.state:
self.text_view_layer = \
self.pyslip.AddTextLayer(TextViewData, map_rel=False,
name='<text_view_layer>',
delta=DefaultTextViewDelta,
placement=TextViewDataPlace,
visible=True,
fontsize=24, textcolour='#0000ff',
offset_x=TextViewDataOffX,
offset_y=TextViewDataOffY)
else:
self.pyslip.DeleteLayer(self.text_view_layer)
self.text_view_layer = None
if self.sel_text_view_layer:
self.pyslip.DeleteLayer(self.sel_text_view_layer)
self.sel_text_view_layer = None
def textViewShowOnOff(self, event):
"""Handle ShowOnOff event for view text layer control."""
if event.state:
self.pyslip.ShowLayer(self.text_view_layer)
if self.sel_text_view_layer:
self.pyslip.ShowLayer(self.sel_text_view_layer)
else:
self.pyslip.HideLayer(self.text_view_layer)
if self.sel_text_view_layer:
self.pyslip.HideLayer(self.sel_text_view_layer)
def textViewSelectOnOff(self, event):
"""Handle SelectOnOff event for view text layer control."""
layer = self.text_view_layer
if event.state:
self.add_select_handler(layer, self.textViewSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def textViewSelect(self, event):
"""View-relative text select event from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
selection [list of] tuple (xgeo,ygeo) of selected point
(if None then no point(s) selected)
The selection could be a single or box select.
The selection mode here is more standard: empty select turns point(s)
off, selected points reselection leaves points selected.
"""
selection = event.selection
# turn off any existing selection
if self.sel_text_view_layer:
self.pyslip.DeleteLayer(self.sel_text_view_layer)
self.sel_text_view_layer = None
if selection:
# get selected points into form for point display layer
points = []
for (x, y, t, d) in selection:
del d['colour'] # want to override colour, radius
del d['radius']
points.append((x, y, d))
self.sel_text_view_layer = \
self.pyslip.AddPointLayer(points, map_rel=False,
colour='black',
radius=5, visible=True,
show_levels=MRTextShowLevels,
name='<sel_text_view_layer>')
self.pyslip.PlaceLayerBelowLayer(self.sel_text_view_layer,
self.text_view_layer)
return True
##### map-relative polygon layer
def polyOnOff(self, event):
"""Handle OnOff event for map-relative polygon layer control."""
if event.state:
self.poly_layer = \
self.pyslip.AddPolygonLayer(PolyData, map_rel=True,
visible=True,
delta=DefaultPolygonMapDelta,
show_levels=MRPolyShowLevels,
name='<poly_layer>')
else:
self.pyslip.DeleteLayer(self.poly_layer)
self.poly_layer = None
if self.sel_poly_layer:
self.pyslip.DeleteLayer(self.sel_poly_layer)
self.sel_poly_layer = None
self.sel_poly_point = None
def polyShowOnOff(self, event):
"""Handle ShowOnOff event for polygon layer control."""
if event.state:
self.pyslip.ShowLayer(self.poly_layer)
if self.sel_poly_layer:
self.pyslip.ShowLayer(self.sel_poly_layer)
else:
self.pyslip.HideLayer(self.poly_layer)
if self.sel_poly_layer:
self.pyslip.HideLayer(self.sel_poly_layer)
def polySelectOnOff(self, event):
"""Handle SelectOnOff event for polygon layer control."""
layer = self.poly_layer
if event.state:
self.add_select_handler(layer, self.polySelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def polySelect(self, event):
"""Map- and view-relative polygon select event from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
selection [list of] tuple (xgeo,ygeo) of selected point
(if None then no point(s) selected)
The selection could be a single or box select.
Select a polygon to turn it on, any other polygon selection turns
it off, unless previous selection again selected.
"""
# .seletion: [(poly,attr), ...]
selection = event.selection
# turn any previous selection off
if self.sel_poly_layer:
self.pyslip.DeleteLayer(self.sel_poly_layer)
self.sel_poly_layer = None
# box OR single selection
if selection:
# get selected polygon points into form for point display layer
points = []
for (poly, d) in selection:
try:
del d['colour']
except KeyError:
pass
try:
del d['radius']
except KeyError:
pass
for (x, y) in poly:
points.append((x, y, d))
self.sel_poly_layer = \
self.pyslip.AddPointLayer(points, map_rel=True,
colour='#ff00ff',
radius=5, visible=True,
show_levels=[3,4],
name='<sel_poly>')
return True
##### view-relative polygon layer
def polyViewOnOff(self, event):
"""Handle OnOff event for map-relative polygon layer control."""
if event.state:
self.poly_view_layer = \
self.pyslip.AddPolygonLayer(PolyViewData, map_rel=False,
delta=DefaultPolygonViewDelta,
name='<poly_view_layer>',
placement='cn', visible=True,
fontsize=24, colour='#0000ff')
else:
self.pyslip.DeleteLayer(self.poly_view_layer)
self.poly_view_layer = None
if self.sel_poly_view_layer:
self.pyslip.DeleteLayer(self.sel_poly_view_layer)
self.sel_poly_view_layer = None
self.sel_poly_view_point = None
def polyViewShowOnOff(self, event):
"""Handle ShowOnOff event for polygon layer control."""
if event.state:
self.pyslip.ShowLayer(self.poly_view_layer)
if self.sel_poly_view_layer:
self.pyslip.ShowLayer(self.sel_poly_view_layer)
else:
self.pyslip.HideLayer(self.poly_view_layer)
if self.sel_poly_view_layer:
self.pyslip.HideLayer(self.sel_poly_view_layer)
def polyViewSelectOnOff(self, event):
"""Handle SelectOnOff event for polygon layer control."""
layer = self.poly_view_layer
if event.state:
self.add_select_handler(layer, self.polyViewSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def polyViewSelect(self, event):
"""View-relative polygon select event from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
selection tuple (sel, udata, None) defining the selected
polygon (if None then no point(s) selected)
The selection could be a single or box select.
"""
selection = event.selection
# point select, turn any previous selection off
if self.sel_poly_view_layer:
self.pyslip.DeleteLayer(self.sel_poly_view_layer)
self.sel_poly_view_layer = None
# for box OR single selection
if selection:
# get selected polygon points into form for point display layer
points = []
for (poly, d) in selection:
try:
del d['colour']
except KeyError:
pass
try:
del d['radius']
except KeyError:
pass
for (x, y) in poly:
points.append((x, y, d))
self.sel_poly_view_layer = \
self.pyslip.AddPointLayer(points, map_rel=False,
colour='#ff00ff',
radius=5, visible=True,
show_levels=[3,4],
name='<sel_view_poly>')
return True
##### map-relative polyline layer
def polylineOnOff(self, event):
"""Handle OnOff event for map-relative polyline layer control."""
if event.state:
self.polyline_layer = \
self.pyslip.AddPolylineLayer(PolylineData, map_rel=True,
visible=True,
delta=DefaultPolylineMapDelta,
show_levels=MRPolyShowLevels,
name='<polyline_layer>')
else:
self.pyslip.DeleteLayer(self.polyline_layer)
self.polyline_layer = None
if self.sel_polyline_layer:
self.pyslip.DeleteLayer(self.sel_polyline_layer)
self.sel_polyline_layer = None
self.sel_polyline_point = None
if self.sel_polyline_layer2:
self.pyslip.DeleteLayer(self.sel_polyline_layer2)
self.sel_polyline_layer2 = None
def polylineShowOnOff(self, event):
"""Handle ShowOnOff event for polycwlinegon layer control."""
if event.state:
self.pyslip.ShowLayer(self.polyline_layer)
if self.sel_polyline_layer:
self.pyslip.ShowLayer(self.sel_polyline_layer)
if self.sel_polyline_layer2:
self.pyslip.ShowLayer(self.sel_polyline_layer2)
else:
self.pyslip.HideLayer(self.polyline_layer)
if self.sel_polyline_layer:
self.pyslip.HideLayer(self.sel_polyline_layer)
if self.sel_polyline_layer2:
self.pyslip.HideLayer(self.sel_polyline_layer2)
def polylineSelectOnOff(self, event):
"""Handle SelectOnOff event for polyline layer control."""
layer = self.polyline_layer
if event.state:
self.add_select_handler(layer, self.polylineSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def polylineSelect(self, event):
"""Map- and view-relative polyline select event from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
selection [list of] tuple (xgeo,ygeo) of selected point
(if None then no point(s) selected)
relsel a tuple (p1,p2) of polyline segment
The selection could be a single or box select.
Select a polyline to turn it on, any other polyline selection turns
it off, unless previous selection again selected.
"""
# .seletion: [(poly,attr), ...]
selection = event.selection
relsel = event.relsel
# turn any previous selection off
if self.sel_polyline_layer:
self.pyslip.DeleteLayer(self.sel_polyline_layer)
self.sel_polyline_layer = None
if self.sel_polyline_layer2:
self.pyslip.DeleteLayer(self.sel_polyline_layer2)
self.sel_polyline_layer2 = None
# box OR single selection
if selection:
# show segment selected first, if any
if relsel:
self.sel_polyline_layer2 = \
self.pyslip.AddPointLayer(relsel, map_rel=True,
colour='#40ff40',
radius=5, visible=True,
show_levels=[3,4],
name='<sel_polyline2>')
# get selected polygon points into form for point display layer
points = []
for (poly, d) in selection:
try:
del d['colour']
except KeyError:
pass
try:
del d['radius']
except KeyError:
pass
for (x, y) in poly:
points.append((x, y, d))
self.sel_polyline_layer = \
self.pyslip.AddPointLayer(points, map_rel=True,
colour='#ff00ff',
radius=3, visible=True,
show_levels=[3,4],
name='<sel_polyline>')
return True
##### view-relative polyline layer
def polylineViewOnOff(self, event):
"""Handle OnOff event for map-relative polyline layer control."""
if event.state:
self.polyline_view_layer = \
self.pyslip.AddPolylineLayer(PolylineViewData, map_rel=False,
delta=DefaultPolylineViewDelta,
name='<polyline_view_layer>',
placement='cn', visible=True,
fontsize=24, colour='#0000ff')
else:
self.pyslip.DeleteLayer(self.polyline_view_layer)
self.polyline_view_layer = None
if self.sel_polyline_view_layer:
self.pyslip.DeleteLayer(self.sel_polyline_view_layer)
self.sel_polyline_view_layer = None
self.sel_polyline_view_point = None
if self.sel_polyline_view_layer2:
self.pyslip.DeleteLayer(self.sel_polyline_view_layer2)
self.sel_polyline_view_layer2 = None
def polylineViewShowOnOff(self, event):
"""Handle ShowOnOff event for polyline layer control."""
if event.state:
self.pyslip.ShowLayer(self.polyline_view_layer)
if self.sel_polyline_view_layer:
self.pyslip.ShowLayer(self.sel_polyline_view_layer)
if self.sel_polyline_view_layer2:
self.pyslip.ShowLayer(self.sel_polyline_view_layer2)
else:
self.pyslip.HideLayer(self.polyline_view_layer)
if self.sel_polyline_view_layer:
self.pyslip.HideLayer(self.sel_polyline_view_layer)
if self.sel_polyline_view_layer2:
self.pyslip.HideLayer(self.sel_polyline_view_layer2)
def polylineViewSelectOnOff(self, event):
"""Handle SelectOnOff event for polyline layer control."""
layer = self.polyline_view_layer
if event.state:
self.add_select_handler(layer, self.polylineViewSelect)
self.pyslip.SetLayerSelectable(layer, True)
else:
self.del_select_handler(layer)
self.pyslip.SetLayerSelectable(layer, False)
def polylineViewSelect(self, event):
"""View-relative polyline select event from the widget.
event the event that contains these attributes:
type the type of point selection: single or box
selection tuple (sel, udata, None) defining the selected
polyline (if None then no point(s) selected)
The selection could be a single or box select.
"""
selection = event.selection
relsel = event.relsel
# point select, turn any previous selection off
if self.sel_polyline_view_layer:
self.pyslip.DeleteLayer(self.sel_polyline_view_layer)
self.sel_polyline_view_layer = None
if self.sel_polyline_view_layer2:
self.pyslip.DeleteLayer(self.sel_polyline_view_layer2)
self.sel_polyline_view_layer2 = None
# for box OR single selection
if selection:
# first, display selected segment
if relsel:
# get original polyline attributes, get placement and offsets
(_, attributes) = PolylineViewData[0]
place = attributes.get('placement', None)
offset_x = attributes.get('offset_x', 0)
offset_y = attributes.get('offset_y', 0)
self.sel_polyline_view_layer2 = \
self.pyslip.AddPointLayer(relsel, map_rel=False,
placement=place,
offset_x=offset_x,
offset_y=offset_y,
colour='#4040ff',
radius=5, visible=True,
show_levels=[3,4],
name='<sel_view_polyline2>')
# get selected polyline points into form for point display layer
points = []
for (poly, d) in selection:
try:
del d['colour']
except KeyError:
pass
try:
del d['radius']
except KeyError:
pass
for (x, y) in poly:
points.append((x, y, d))
self.sel_polyline_view_layer = \
self.pyslip.AddPointLayer(points, map_rel=False,
colour='#ff00ff',
radius=3, visible=True,
show_levels=[3,4],
name='<sel_view_polyline>')
return True
def level_change_event(self, event):
"""Handle a "level change" event from the pySlipQt widget.
event.type the type of event
event.level the new map level
"""
self.map_level.SetValue(str(event.level))
def mouse_posn_event(self, event):
"""Handle a "mouse position" event from the pySlipQt widget.
The 'event' object has these attributes:
event.etype the type of event
event.mposn the new mouse position on the map (xgeo, ygeo)
event.vposn the new mouse position on the view (x, y)
"""
if event.mposn:
(lon, lat) = event.mposn
# we clamp the lon/lat to zero here since we don't want small
# negative values displaying as "-0.00"
if abs(lon) < 0.01:
lon = 0.0
if abs(lat) < 0.01:
lat = 0.0
self.mouse_position.SetValue('%.2f/%.2f' % (lon, lat))
else:
self.mouse_position.SetValue('')
def select_event(self, event):
"""Handle a single select click, any mouse button.
event.type the event type number
event.mposn select point tuple in map (geo) coordinates: (xgeo, ygeo)
event.vposn select point tuple in view coordinates: (xview, yview)
event.layer_id the ID of the layer containing the selected object (or None)
event.selection a tuple (x,y,attrib) defining the position of the object selected (or [] if no selection)
event.data the user-supplied data object for the selected object (or [] if no selection)
event.relsel relative selection point inside a single selected image (or [] if no selection)
event.button one of pyslip.MopuseLeft, pyslip.MouseMiddle or pyslip.MouseRight
Just look at 'event.layer_id' to decide what handler to call and pass
'event' through to the handler.
"""
self.demo_select_dispatch.get(event.layer_id, self.null_handler)(event)
######
# Small utility routines
######
def unimplemented(self, msg):
"""Issue an "Sorry, ..." message."""
self.pyslip.warn('Sorry, %s is not implemented at the moment.' % msg)
def dump_event(self, msg, event):
"""Dump an event to the log.
Print attributes and values for non_dunder attributes.
"""
log('dump_event: %s' % msg)
for attr in dir(event):
if not attr.startswith('__'):
log(' event.%s=%s' % (attr, getattr(event, attr)))
######
# Finish initialization of data, etc
######
def initData(self):
global PointData, PointDataColour, PointViewDataPlacement
global PointViewData, PointViewDataColour
global ImageData
global ImageViewData
global TextData
global TextViewData
global TextViewDataPlace, TextViewDataOffX, TextViewDataOffY
global PolyData, PolyViewData
global PolylineData, PolylineViewData
global CR_Width, CR_Height
# create PointData - lots of it to test handling
PointData = []
for lon in range(-70, 290+1, 5):
for lat in range(-65, 65+1, 5):
udata = 'point(%s,%s)' % (str(lon), str(lat))
PointData.append((lon, lat, {'data': udata}))
PointDataColour = '#ff000080' # semi-transparent
# create PointViewData - a point-rendition of 'PYSLIP'
PointViewData = [(-66,-14),(-66,-13),(-66,-12),(-66,-11),(-66,-10),
(-66,-9),(-66,-8),(-66,-7),(-66,-6),(-66,-5),(-66,-4),
(-66,-3),(-65,-7),(-64,-7),(-63,-7),(-62,-7),(-61,-8),
(-60,-9),(-60,-10),(-60,-11),(-60,-12),(-61,-13),
(-62,-14),(-63,-14),(-64,-14),(65,-14), # P
(-59,-14),(-58,-13),(-57,-12),(-56,-11),(-55,-10),
(-53,-10),(-52,-11),(-51,-12),(-50,-13),(-49,-14),
(-54,-9),(-54,-8),(-54,-7),(-54,-6),(-54,-5),
(-54,-4),(-54,-3), # Y
(-41,-13),(-42,-14),(-43,-14),(-44,-14),(-45,-14),
(-46,-14),(-47,-13),(-48,-12),(-48,-11),(-47,-10),
(-46,-9),(-45,-9),(-44,-9),(-43,-9),(-42,-8),
(-41,-7),(-41,-6),(-41,-5),(-42,-4),(-43,-3),
(-44,-3),(-45,-3),(-46,-3),(-47,-3),(-48,-4), # S
(-39,-14),(-39,-13),(-39,-12),(-39,-11),(-39,-10),
(-39,-9),(-39,-8),(-39,-7),(-39,-6),(-39,-5),
(-39,-4),(-39,-3),(-38,-3),(-37,-3),(-36,-3),
(-35,-3),(-34,-3),(-33,-3),(-32,-3), # L
(-29,-14),(-29,-13),(-29,-12),
(-29,-11),(-29,-10),(-29,-9),(-29,-8),(-29,-7),
(-29,-6),(-29,-5),(-29,-4),(-29,-3), # I
(-26,-14),(-26,-13),(-26,-12),(-26,-11),(-26,-10),
(-26,-9),(-26,-8),(-26,-7),(-26,-6),(-26,-5),(-26,-4),
(-26,-3),(-25,-7),(-24,-7),(-23,-7),(-22,-7),(-21,-8),
(-20,-9),(-20,-10),(-20,-11),(-20,-12),(-21,-13),
(-22,-14),(-23,-14),(-24,-14),(25,-14)] # P
PointViewDataColour = '#00000040' # transparent
PointViewDataPlacement = 'se'
# create image data - shipwrecks off the Australian east coast
ImageData = [# Agnes Napier - 1855
(160.0, -30.0, ShipImg, {'placement': 'cc'}),
# Venus - 1826
(145.0, -11.0, ShipImg, {'placement': 'ne'}),
# Wolverine - 1879
(156.0, -23.0, ShipImg, {'placement': 'nw'}),
# Thomas Day - 1884
(150.0, -15.0, ShipImg, {'placement': 'sw'}),
# Sybil - 1902
(165.0, -19.0, ShipImg, {'placement': 'se'}),
# Prince of Denmark - 1863
(158.55, -19.98, ShipImg),
# Moltke - 1911
(146.867525, -19.152185, ShipImg)
]
ImageData2 = []
ImageData3 = []
ImageData4 = []
ImageData5 = []
ImageData6 = []
self.map_level_2_img = {0: ImageData2,
1: ImageData3,
2: ImageData4,
3: ImageData5,
4: ImageData6}
self.map_level_2_selimg = {0: SelGlassyImg2,
1: SelGlassyImg3,
2: SelGlassyImg4,
3: SelGlassyImg5,
4: SelGlassyImg6}
self.current_layer_img_layer = None
ImageViewData = [(0, 0, CompassRoseGraphic, {'placement': 'ne',
'data': 'compass rose'})]
text_placement = {'placement': 'se'}
transparent_placement = {'placement': 'se', 'colour': '#00000040'}
capital = {'placement': 'se', 'fontsize': 14, 'colour': 'red',
'textcolour': 'red'}
capital_sw = {'placement': 'sw', 'fontsize': 14, 'colour': 'red',
'textcolour': 'red'}
TextData = [
(151.20, -33.85, 'Sydney', text_placement),
(144.95, -37.84, 'Melbourne', {'placement': 'ce'}),
(153.08, -27.48, 'Brisbane', text_placement),
(115.86, -31.96, 'Perth', transparent_placement),
(138.30, -35.52, 'Adelaide', text_placement),
(130.98, -12.61, 'Darwin', text_placement),
(147.31, -42.96, 'Hobart', text_placement),
(174.75, -36.80, 'Auckland', text_placement),
(174.75, -41.29, 'Wellington', capital),
(172.61, -43.51, 'Christchurch', text_placement),
(168.74, -45.01, 'Queenstown', text_placement),
(147.30, -09.41, 'Port Moresby', capital),
(143.1048, -5.4646, 'Porgera', text_placement),
(103.833333, 1.283333, 'Singapore', capital),
(101.683333, 3.133333, 'Kuala Lumpur', capital_sw),
(106.822922, -6.185451, 'Jakarta', capital),
(110.364444, -7.801389, 'Yogyakarta', text_placement),
(121.050, 14.600, 'Manila', capital),
(271.74, +40.11, 'Champaign', text_placement),
(160.0, -30.0, 'Agnes Napier - 1855',
{'placement': 'cw', 'offset_x': 20, 'colour': 'green'}),
(145.0, -11.0, 'Venus - 1826',
{'placement': 'sw', 'colour': 'green'}),
(156.0, -23.0, 'Wolverine - 1879',
{'placement': 'ce', 'colour': 'green'}),
(150.0, -15.0, 'Thomas Day - 1884',
{'colour': 'green'}),
(165.0, -19.0, 'Sybil - 1902',
{'placement': 'cw', 'colour': 'green'}),
(158.55, -19.98, 'Prince of Denmark - 1863',
{'placement': 'nw', 'offset_x': 20, 'colour': 'green'}),
(146.867525, -19.152182, 'Moltke - 1911',
{'placement': 'ce', 'offset_x': 20, 'colour': 'green'}),
]
if sys.platform != 'win32':
# TODO: check if this works under Windows
TextData.extend([
(110.490, 24.780, '阳朔县 (Yangshuo)', {'placement': 'sw'}),
(117.183333, 39.133333, '天津市 (Tianjin)', {'placement': 'sw'}),
(106.36, +10.36, 'Mỹ Tho', {'placement': 'ne'}),
(105.85, +21.033333, 'Hà Nội', capital),
(109.18333, 12.25, 'Nha Trang', {'placement': 'sw'}),
(106.681944, 10.769444, 'Thành phố Hồ Chí Minh',
{'placement': 'sw'}),
(132.47, +34.44, '広島市 (Hiroshima City)',
{'placement': 'nw'}),
(114.000, +22.450, '香港 (Hong Kong)', text_placement),
(98.392, 7.888, 'ภูเก็ต (Phuket)', text_placement),
( 96.16, +16.80, 'ရန်ကုန် (Yangon)', capital),
(104.93, +11.54, ' ភ្នំពេញ (Phnom Penh)', capital),
(100.49, +13.75, 'กรุงเทพมหานคร (Bangkok)', capital),
( 77.56, +34.09, 'གླེ་(Leh)', text_placement),
(84.991275, 24.695102, 'बोधगया (Bodh Gaya)', text_placement)
])
TextViewData = [(0, 0, '%s %s' % (DemoName, DemoVersion))]
TextViewDataPlace = 'cn'
TextViewDataOffX = 0
TextViewDataOffY = 3
PolyData = [(((150.0,10.0),(160.0,20.0),(170.0,10.0),(165.0,0.0),(155.0,0.0)),
{'width': 3, 'colour': 'blue', 'closed': True}),
(((165.0,-35.0),(175.0,-35.0),(175.0,-45.0),(165.0,-45.0)),
{'width': 10, 'colour': '#00ff00c0', 'filled': True,
'fillcolour': '#ffff0040'}),
(((190.0,-30.0),(220.0,-50.0),(220.0,-30.0),(190.0,-50.0)),
{'width': 3, 'colour': 'green', 'filled': True,
'fillcolour': 'yellow'}),
(((190.0,+50.0),(220.0,+65.0),(220.0,+50.0),(190.0,+65.0)),
{'width': 10, 'colour': '#00000040'})]
PolyViewData = [(((230,0),(230,40),(-230,40),(-230,0)),
{'width': 3, 'colour': '#00ff00ff', 'closed': True,
'placement': 'cn', 'offset_y': 1})]
PolylineData = [(((150.0,10.0),(160.0,20.0),(170.0,10.0),(165.0,0.0),(155.0,0.0)),
{'width': 3, 'colour': 'blue'}),
(((185.0,10.0),(185.0,20.0),(180.0,10.0),(175.0,0.0),(185.0,0.0)),
{'width': 3, 'colour': 'red'})]
PolylineViewData = [(((50,100),(100,50),(150,100),(100,150)),
{'width': 3, 'colour': '#00ffffff', 'placement': 'cn'}),
(((100,250),(50,300),(100,350),(150,300)),
{'width': 3, 'colour': '#0000ffff', 'placement': 'cn'})]
# define layer ID variables & sub-checkbox state variables
self.point_layer = None
self.sel_point_layer = None
self.sel_point = None
self.point_view_layer = None
self.sel_point_view_layer = None
self.sel_point_view = None
self.image_layer = None
self.sel_image_layer = None
self.sel_image = None
self.image_view_layer = None
self.sel_image_view_layer = None
self.sel_image_view = None
self.sel_imagepoint_view_layer = None
self.text_layer = None
self.sel_text_layer = None
self.sel_text = None
self.text_view_layer = None
self.sel_text_view_layer = None
self.poly_layer = None
self.sel_poly_layer = None
self.sel_poly = None
self.poly_view_layer = None
self.sel_poly_view_layer = None
self.sel_poly = None
self.polyline_layer = None
self.sel_polyline_layer = None
self.sel_polyline_layer2 = None
self.sel_polyline = None
self.polyline_view_layer = None
self.sel_polyline_view_layer = None
self.sel_polyline_view_layer2 = None
self.sel_polyline = None
# get width and height of the compass rose image
cr_img = wx.Image(CompassRoseGraphic, wx.BITMAP_TYPE_ANY)
cr_bmap = cr_img.ConvertToBitmap()
(CR_Width, CR_Height) = cr_bmap.GetSize()
# force pyslip initialisation
self.pyslip.OnSize() # required?
# set initial view position
self.map_level.SetLabel('%d' % InitViewLevel)
wx.CallLater(25, self.final_setup, InitViewLevel, InitViewPosition)
def final_setup(self, level, position):
"""Perform final setup.
level zoom level required
position position to be in centre of view
We do this in a CallLater() function for those operations that
must not be done while the GUI is "fluid".
"""
self.pyslip.GotoLevelAndPosition(level, position)
######
# Exception handlers
######
def null_handler(self, event):
"""Routine to handle unexpected events."""
print('ERROR: null_handler!?')
log('ERROR: null_handler!?')
######
# Handle adding/removing select handler functions.
######
def add_select_handler(self, id, handler):
"""Add handler for select in layer 'id'."""
self.demo_select_dispatch[id] = handler
def del_select_handler(self, id):
"""Remove handler for select in layer 'id'."""
del self.demo_select_dispatch[id]
######
# Popup a small window with some text.
######
def show_popup(self, text, posn):
"""Display a popup with some text.
text the text to display
posn position (x, y) of the top-left corner of the popup, view coords
Tries to always draw the popup fully on the widget.
"""
# create popup window, get size
popup = DemoPopup(self.GetTopLevelParent(), wx.SIMPLE_BORDER, text)
(pop_width, pop_height) = popup.GetSize()
# get pySlip widget size and app position on screen
(pyslip_width, pyslip_height) = self.pyslip.GetSize()
screen_posn = self.ClientToScreen((0, 0))
# assume the popup is displayed in the top-left quarter of the view
# we want the top-left popup corner over the click point
x_adjusted = posn.x # assume popup displays to right
y_adjusted = posn.y # assume popup displays down
if posn.x >= pyslip_width//2:
# click in right half of widget, popup goes to the left
x_adjusted = posn.x - pop_width
if posn.y >= pyslip_height//2:
# click in bottom half of widget, popup goes up
y_adjusted = posn.y - pop_height
popup.Position(screen_posn, (x_adjusted, y_adjusted))
# move popup to final position and show it
popup.Show(True)
###############################################################################
# Main code
###############################################################################
def usage(msg=None):
if msg:
print(('*'*80 + '\n%s\n' + '*'*80) % msg)
print(__doc__)
# our own handler for uncaught exceptions
def excepthook(type, value, tback):
msg = '\n' + '=' * 80
msg += '\nUncaught exception:\n'
msg += ''.join(traceback.format_exception(type, value, tback))
msg += '=' * 80 + '\n'
log(msg)
print(msg)
sys.exit(1)
# plug our handler into the python system
sys.excepthook = excepthook
# parse the CLI params
argv = sys.argv[1:]
try:
(opts, args) = getopt.getopt(argv, 'd:hx',
['debug=', 'help', 'inspector'])
except getopt.error:
usage()
sys.exit(1)
debug = 10
inspector = False
for (opt, param) in opts:
if opt in ['-d', '--debug']:
debug = param
elif opt in ['-h', '--help']:
usage()
sys.exit(0)
elif opt == '-x':
inspector = True
# convert any symbolic debug level to a number
try:
debug = int(debug)
except ValueError:
# possibly a symbolic debug name
try:
debug = LogSym2Num[debug.upper()]
except KeyError:
usage('Unrecognized debug name: %s' % debug)
sys.exit(1)
log.set_level(debug)
# check to see if the GMT tiles directory exists in the right place
if not os.path.isdir(tiles.TilesDir):
home_dir = os.path.abspath(os.path.expanduser('~'))
msg = ("\nSorry, the GMT local tiles haven't been installed correctly.\n\n"
"You must copy the pySlip/pyslip/examples/gmt_tiles.tar.gz directory\n"
f"to your home directory ({home_dir}) and unpack it there.\n"
)
log(msg)
print(msg)
sys.exit(1)
# start wxPython app
app = wx.App()
app_frame = AppFrame()
app_frame.Show()
if inspector:
import wx.lib.inspection
wx.lib.inspection.InspectionTool().Show()
app.MainLoop()
|
from django.db import models
from django.utils.translation import gettext as _
from main.models import File, BaseModel
class History(BaseModel):
text = models.TextField()
class Meta:
verbose_name = _('History')
verbose_name_plural = _('Histories')
def __str__(self):
return self.text
class HistoryImages(models.Model):
history = models.ForeignKey(History, on_delete=models.CASCADE)
image = models.ForeignKey(File, on_delete=models.CASCADE)
class Meta:
verbose_name = _('History Image')
verbose_name_plural = _('History Images')
|
import cv2
import pafy
import numpy as np
import glob
from hitnet import HitNet, ModelType, draw_disparity, draw_depth, CameraConfig
# Initialize video
# cap = cv2.VideoCapture("video.mp4")
videoUrl = 'https://youtu.be/Yui48w71SG0'
videoPafy = pafy.new(videoUrl)
print(videoPafy.streams)
cap = cv2.VideoCapture(videoPafy.getbestvideo().url)
# Select model type
# model_type = ModelType.middlebury
# model_type = ModelType.flyingthings
model_type = ModelType.eth3d
if model_type == ModelType.middlebury:
model_path = "models/middlebury_d400/saved_model_480x640/model_float32.onnx"
elif model_type == ModelType.flyingthings:
model_path = "models/flyingthings_finalpass_xl/saved_model_480x640/model_float32.onnx"
elif model_type == ModelType.eth3d:
model_path = "models/eth3d/saved_model_480x640/model_float32.onnx"
# Store baseline (m) and focal length (pixel)
input_width = 640
camera_config = CameraConfig(0.1, 0.5*input_width) # 90 deg. FOV
max_distance = 5
# Initialize model
hitnet_depth = HitNet(model_path, model_type, camera_config)
cv2.namedWindow("Estimated depth", cv2.WINDOW_NORMAL)
while cap.isOpened():
try:
# Read frame from the video
ret, frame = cap.read()
if not ret:
break
except:
continue
# Extract the left and right images
left_img = frame[:,:frame.shape[1]//3]
right_img = frame[:,frame.shape[1]//3:frame.shape[1]*2//3]
color_real_depth = frame[:,frame.shape[1]*2//3:]
# Estimate the depth
disparity_map = hitnet_depth(left_img, right_img)
depth_map = hitnet_depth.get_depth()
color_disparity = draw_disparity(disparity_map)
color_depth = draw_depth(depth_map, max_distance)
color_depth = cv2.resize(color_depth, (left_img.shape[1],left_img.shape[0]))
cobined_image = np.hstack((left_img,color_real_depth, color_depth))
cv2.imshow("Estimated depth", cobined_image)
# Press key q to stop
if cv2.waitKey(1) == ord('q'):
break
cap.release()
cv2.destroyAllWindows() |
'''
Created on Jul 6, 2018
@author: kumykov
Wrapper for common HUB API queries.
Upon initialization Bearer tocken is obtained and used for all subsequent calls
Usage:
credentials and hub URL could be placed in the .restconfig.json file
{
"baseurl": "https://hub-hostname",
"username": "<username goes here>",
"password": "<password goes here>",
"insecure": true,
"debug": false
}
OR, using API Token
{
"baseurl": "https://hub-hostname",
"api_token": "<API token goes here>",
"insecure": true,
"debug": false
}
.restconfig.json should be present in the current directory.
from blackduck.HubRestApi import HubInstance
hub = HubInstance()
projects = hub.get_projects()
It is possible to generate generate_config file by initalizing API as following:
from blackduck.HubRestApi import HubInstance
username="<username goes here>"
password="<password goes here>"
urlbase="https://hub-hostname"
hub = HubInstance(urlbase, username, password, insecure=True)
'''
import logging
import requests
import json
class CreateFailedAlreadyExists(Exception):
pass
class CreateFailedUnknown(Exception):
pass
class HubInstance(object):
'''
classdocs
'''
# TODO: What to do about the config file for thread-safety, concurrency
configfile = ".restconfig.json"
def __init__(self, *args, **kwargs):
# Config needs to be an instance variable for thread-safety, concurrent use of HubInstance()
self.config = {}
try:
self.config['baseurl'] = args[0]
api_token = kwargs.get('api_token', False)
if api_token:
self.config['api_token'] = api_token
else:
self.config['username'] = args[1]
self.config['password'] = args[2]
self.config['insecure'] = kwargs.get('insecure', False)
self.config['debug'] = kwargs.get('debug', False)
if kwargs.get('write_config_flag', True):
self.write_config()
except Exception:
self.read_config()
if self.config['insecure']:
requests.packages.urllib3.disable_warnings()
if self.config['debug']:
print(self.configfile)
self.token, self.csrf_token = self.get_auth_token()
def read_config(self):
with open('.restconfig.json','r') as f:
self.config = json.load(f)
def write_config(self):
with open(self.configfile,'w') as f:
json.dump(self.config, f, indent=3)
def get_auth_token(self):
api_token = self.config.get('api_token', False)
if api_token:
authendpoint = "/api/tokens/authenticate"
url = self.config['baseurl'] + authendpoint
session = requests.session()
response = session.post(
url,
data={},
headers={'Authorization': 'token {}'.format(api_token)},
verify=not self.config['insecure']
)
csrf_token = response.headers['X-CSRF-TOKEN']
bearer_token = json.loads(response.content.decode('utf-8'))['bearerToken']
return (bearer_token, csrf_token)
else:
authendpoint="/j_spring_security_check"
url = self.config['baseurl'] + authendpoint
session=requests.session()
credentials = dict()
credentials['j_username'] = self.config['username']
credentials['j_password'] = self.config['password']
response = session.post(url, credentials, verify= not self.config['insecure'])
cookie = response.headers['Set-Cookie']
token = cookie[cookie.index('=')+1:cookie.index(';')]
return (token, None)
def get_urlbase(self):
return self.config['baseurl']
def get_headers(self):
if self.config.get('api_token', False):
return {
'X-CSRF-TOKEN': self.csrf_token,
'Authorization': 'Bearer {}'.format(self.token),
'Content-Type': 'application/json'}
else:
return {"Authorization":"Bearer " + self.token}
def get_api_version(self):
url = self.get_urlbase() + '/api/current-version'
response = self.execute_get(url)
version = response.json().get('version', 'unknown')
return version
def _get_parameter_string(self, parameters={}):
parameter_string = "&".join(["{}={}".format(k,v) for k,v in parameters.items()])
return "?" + parameter_string
def _get_policy_url(self):
return self.config['baseurl'] + "/api/policy-rules"
def get_policies(self, parameters={}):
url = self._get_policy_url() + self._get_parameter_string(parameters)
response = self.execute_get(url)
return response.json()
def create_policy(self, policy_json):
url = self._get_policy_url()
location = self._create(url, policy_json)
return location
def get_policy_by_id(self, policy_id):
url = self._get_policy_url() + "/{}".format(policy_id)
return self.get_policy_by_url(url)
def get_policy_by_url(self, policy_url):
response = self.execute_get(policy_url)
jsondata = response.json()
return jsondata
def update_policy_by_id(self, policy_id, update_json):
url = self._get_policy_url() + "/{}".format(policy_id)
return self.update_policy_by_url(url, update_json)
def update_policy_by_url(self, policy_url, update_json):
return self.execute_put(policy_url, update_json)
def delete_policy_by_id(self, policy_id):
url = self._get_policy_url() + "/{}".format(policy_id)
return self.delete_policy_by_url(url)
def delete_policy_by_url(self, policy_url):
return self.execute_delete(policy_url)
def find_component_info_for_protex_component(self, protex_component_id, protex_component_release_id):
'''Will return the Hub component corresponding to the protex_component_id, and if a release (version) id
is given, the response will also include the component-version. Returns an empty list if there were
no components found.
'''
url = self.config['baseurl'] + "/api/components"
if protex_component_release_id:
query = "?q=bdsuite:{}%23{}&limit=9999".format(protex_component_id, protex_component_release_id)
else:
query = "?q=bdsuite:{}&limit=9999".format(protex_component_id)
with_query = url + query
logging.debug("Finding the Hub componet for Protex component id {}, release id {} using query/url {}".format(
protex_component_id, protex_component_release_id, with_query))
response = self.execute_get(with_query)
logging.debug("query results in status code {}, json data: {}".format(response.status_code, response.json()))
# TODO: Error checking and retry? For now, as POC just assuming it worked
component_list_d = response.json()
if component_list_d['totalCount'] >= 1:
return component_list_d['items'][0]
else:
return component_list_d['items']
def get_limit_paramstring(self, limit):
return "?limit={}".format(limit)
def get_apibase(self):
return self.config['baseurl'] + "/api"
def get_projects(self, limit=100):
headers = self.get_headers()
paramstring = self.get_limit_paramstring(limit)
url = self.config['baseurl'] + "/api/projects" + paramstring
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_project_by_id(self, project_id, limit=100):
headers = self.get_headers()
paramstring = self.get_limit_paramstring(limit)
url = self.config['baseurl'] + "/api/projects/" + project_id + paramstring
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_project_versions(self, project, limit=100):
paramstring = self.get_limit_paramstring(limit)
url = project['_meta']['href'] + "/versions" + paramstring
headers = self.get_headers()
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_version_by_id(self, project_id, version_id, limit=100):
headers = self.get_headers()
paramstring = self.get_limit_paramstring(limit)
url = self.config['baseurl'] + "/api/projects/" + project_id + "/versions/" + version_id
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_version_components(self, projectversion, limit=1000):
paramstring = self.get_limit_paramstring(limit)
url = projectversion['_meta']['href'] + "/components" + paramstring
headers = self.get_headers()
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_file_matches_for_component_no_version(self, project_id, version_id, component_id, limit=1000):
headers = self.get_headers()
paramstring = self.get_limit_paramstring(limit)
url = self.get_apibase() + \
"/projects/{}/versions/{}/components/{}/matched-files".format(project_id, version_id, component_id)
print("GET ", url)
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_file_bom_entries(self, hub_release_id, limit=100):
headers = self.get_headers()
paramstring = self.get_limit_paramstring(limit)
url = self.get_apibase() + \
"/v1/releases/{}/file-bom-entries".format(hub_release_id)
print("GET ", url)
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_file_matches_for_component_with_version(self, project_id, version_id, component_id, component_version_id, limit=1000):
headers = self.get_headers()
paramstring = self.get_limit_paramstring(limit)
url = self.get_apibase() + \
"/projects/{}/versions/{}/components/{}/versions/{}/matched-files".format(project_id, version_id, \
component_id, component_version_id)
print("GET ", url)
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_snippet_bom_entries(self, project_id, version_id, reviewed=False, included=False, limit=100, offset=0):
headers = self.get_headers()
paramstring = "?limit=" + str(limit) + "&offset=" + \
str(offset) + "&filter=bomReviewStatus:" + str(reviewed).lower() + "&filter=bomInclusion:" + str(included).lower()
path = self.get_apibase() + \
"/internal/projects/{}/versions/{}/snippet-bom-entries".format(project_id, version_id)
url = path + paramstring
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def ignore_snippet_bom_entry(self, hub_version_id, snippet_bom_entry):
headers = self.get_headers()
headers['ContentType'] = "application/json"
url = self.get_apibase() + \
"/v1/releases/{}/snippet-bom-entries".format(hub_version_id)
body = self.get_ignore_snippet_json(snippet_bom_entry)
response = requests.put(url, json=body, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
return 0
def get_ignore_snippet_json(self, snippet_bom_entry):
for cur_fileSnippetBomComponents in snippet_bom_entry['fileSnippetBomComponents']:
cur_fileSnippetBomComponents['ignored'] = True
return [snippet_bom_entry]
def compare_project_versions(self, version, compareTo):
apibase = self.config['baseurl'] + "/api"
paramstring = "?limit=1000&sortField=component.securityRiskProfile&ascending=false&offset=0"
cwhat = version['_meta']['href'].replace(apibase, '')
cto = compareTo['_meta']['href'].replace(apibase, '')
url = apibase + cwhat + "/compare" + cto + "/components" + paramstring
headers = self.get_headers()
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_version_codelocations(self, version, limit=100):
apibase = self.config['baseurl'] + "/api"
paramstring = "?limit=100&offset=0"
projectversion = version['_meta']['href']
url = projectversion + "/codelocations" + paramstring
headers = self.get_headers()
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_codelocations(self, limit=100):
paramstring = "?limit={}&offset=0".format(limit)
headers = self.get_headers()
url = self.get_apibase() + "/codelocations" + paramstring
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_codelocation_scan_summaries(self, code_location_id, limit=100):
paramstring = "?limit={}&offset=0".format(limit)
headers = self.get_headers()
url = self.get_apibase() + \
"/codelocations/{}/scan-summaries".format(code_location_id)
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def get_component_by_id(self, component_id):
url = self.config['baseurl'] + "/api/components/{}".format(component_id)
return self.get_component_by_url(url)
def get_component_by_url(self, component_url):
response = self.execute_get(component_url)
jsondata = response.json()
return jsondata
def get_scanlocations(self):
url = self.config['baseurl'] + "/api/v1/scanlocations"
headers = self.get_headers()
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def update_component_by_id(self, component_id, update_json):
url = self.config["baseurl"] + "/api/components/{}".format(component_id)
return self.update_component_by_url(url, update_json)
def update_component_by_url(self, component_url, update_json):
return self.execute_put(component_url, update_json)
def delete_codelocation(self, locationid):
url = self.config['baseurl'] + "/api/codelocations/" + locationid
headers = self.get_headers()
response = requests.delete(url, headers=headers, verify = not self.config['insecure'])
return response
def execute_delete(self, url):
headers = self.get_headers()
response = requests.delete(url, headers=headers, verify = not self.config['insecure'])
return response
def get_ldap_state(self):
url = self.config['baseurl'] + "/api/v1/ldap/state"
headers = self.get_headers()
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def enable_ldap(self):
url = self.config['baseurl'] + "/api/v1/ldap/state"
headers = self.get_headers()
payload = {}
payload['ldapEnabled'] = True
response = requests.post(url, headers=headers, verify = not self.config['insecure'], json=payload)
jsondata = response.json()
return jsondata
def disable_ldap(self):
url = self.config['baseurl'] + "/api/v1/ldap/state"
headers = self.get_headers()
payload = {}
payload['ldapEnabled'] = False
response = requests.post(url, headers=headers, verify = not self.config['insecure'], json=payload)
jsondata = response.json()
return jsondata
def get_ldap_configs(self):
url = self.config['baseurl'] + "/api/v1/ldap/configs"
headers = self.get_headers()
headers['Content-Type'] = "application/json"
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
jsondata = response.json()
return jsondata
def _validated_json_data(self, data_to_validate):
if isinstance(data_to_validate, dict):
json_data = json.dumps(data_to_validate)
else:
json_data = data_to_validate
return json_data
def execute_get(self, url):
headers = self.get_headers()
response = requests.get(url, headers=headers, verify = not self.config['insecure'])
return response
def execute_put(self, url, data):
data = self._validated_json_data(data)
headers = self.get_headers()
headers["Content-Type"] = "application/json"
response = requests.put(url, headers=headers, data=data, verify = not self.config['insecure'])
return response
def _create(self, url, json_body):
response = self.execute_post(url, json_body)
if response.status_code == 201 and "location" in response.headers:
return (response.headers["location"])
elif response.status_code == 412:
raise CreateFailedAlreadyExists("Failed to create the object because it already exists - url {}, body {}, response {}".format(url, json_body, response))
else:
raise CreateFailedUnknown("Failed to create the object for an unknown reason - url {}, body {}, response {}".format(url, json_body, response))
def execute_post(self, url, data):
data = self._validated_json_data(data)
headers = self.get_headers()
headers["Content-Type"] = "application/json"
response = requests.post(url, headers=headers, data=data, verify = not self.config['insecure'])
return response
|
# Adapted for numpy/ma/cdms2 by convertcdms.py
import vcs
import cdms2 as cdms
import EzTemplate
import support
import os
def clear(*args, **kargs):
x = kargs['canvas']
x.clear()
def plot_ts(*args, **kargs):
x = kargs['canvas']
i = kargs['index_x']
j = kargs['index_y']
x.clear()
x.plot(s, t1)
ts = s[:, j, i]
x.plot(ts, t2)
def plot_lat_time(*args, **kargs):
x = kargs['canvas']
i = kargs['index_x']
x.clear()
x.plot(s, t1)
ts = s[:, :, i]
x.plot(ts, t2)
def plot_lon_time(*args, **kargs):
x = kargs['canvas']
j = kargs['index_y']
x.clear()
x.plot(s, t1)
ts = s[:, j]
x.plot(ts, t2)
if support.dogui:
x = vcs.init()
x.portrait()
y = vcs.init()
y.open()
y.portrait()
T = EzTemplate.Multi(rows=2, columns=1)
f = cdms.open(os.path.join(vcs.sample_data, 'clt.nc'))
global s, t2, t1
s = f('clt')
t1 = T.get()
t2 = T.get()
x.user_actions_names = ['Clear', 'Plot time serie']
x.user_actions = [clear, plot_ts]
x.plot(s, t1)
y.user_actions_names = [
'Clear',
'Plot lat/time cross section',
'Plot lon/time cross section']
y.user_actions = [clear, plot_lat_time, plot_lon_time]
y.plot(s, t1)
raw_input("Press enter to end")
else:
print 'You need to run this one by hand (turn support.dogui to 1 first)'
|
"""底层的数据库引擎, 初期代码可能会比较丑陋"""
from typing import Dict
from peewee import MySQLDatabase, PostgresqlDatabase, SqliteDatabase, Model, CharField, FloatField, IntegerField, \
DateTimeField
from ctpbee import current_app
from ctpbee.exceptions import ConfigError
type_map = {
'sqlite': SqliteDatabase,
'mysql': MySQLDatabase,
'postgresql': PostgresqlDatabase
}
def generate_pointer():
tick_type = current_app.config.get('TICK_DATABASE_TYPE')
bar_type = current_app.config.get('BAR_DATABASE_TYPE')
if tick_type is None or bar_type is None:
raise ConfigError(args=("配置信息异常, 请检查TICK_DATABASE_TYPE和BAR_DATABASE_TYPE有没有被设置",))
tick_pointer = type_map[tick_type](
current_app.config.get('TICK_DATABASE_NAME'),
user=current_app.config.get('TICK_DATABASE_USER'),
password=current_app.config.get('TICK_DATABASE_PWD'),
host=current_app.config.get('TICK_DATABASE_HOST'),
port=current_app.config.get('TICK_DATABASE_PORT')
)
bar_pointer = type_map[tick_type](
database=current_app.config.get('BAR_DATABASE_NAME'),
user=current_app.config.get('BAR_DATABASE_USER'),
password=current_app.config.get('BAR_DATABASE_PWD'),
host=current_app.config.get('BAR_DATABASE_HOST'),
port=current_app.config.get('BAR_DATABASE_PORT')
)
return (tick_pointer, bar_pointer)
tick_pointer, bar_pointer = generate_pointer()
class TickDatabaseBase(Model):
class Meta: database = tick_pointer
class BarDatabaseBase(Model):
class Meta: database = bar_pointer
def set_attr(self, data: Dict):
for key, d in data.items():
if hasattr(self, key):
raise ValueError('赋值对象不存在该键')
setattr(self, key, d)
def generate_data_class():
"""generate orm class map"""
orm_map = {}
subsribed_symbols = current_app.config.get('SUBSCRIBED_SYMBOL')
'''generate tick map and bar map'''
tfield = {
'symbol': CharField(),
'exchange': CharField(),
'vt_symbol': CharField(),
'datetime': DateTimeField,
'name': CharField(),
'volume': FloatField(),
'last_price': FloatField(),
'last_volume': FloatField(),
'limit_up': FloatField(),
'limit_down': FloatField(),
'open_interest': IntegerField(),
'average_price': FloatField(),
'open_price': FloatField(),
'high_price': FloatField(),
'low_price': FloatField(),
'pre_price': FloatField(),
'bid_price_1': FloatField(),
'bid_price_2': FloatField(),
'bid_price_3': FloatField(),
'bid_price_4': FloatField(),
'bid_price_5': FloatField(),
'ask_price_1': FloatField(),
'ask_price_2': FloatField(),
'ask_price_3': FloatField(),
'ask_price_4': FloatField(),
'ask_price_5': FloatField(),
'bid_volume_1': FloatField(),
'bid_volume_2': FloatField(),
'bid_volume_3': FloatField(),
'bid_volume_4': FloatField(),
'bid_volume_5': FloatField(),
'ask_volume_1': FloatField(),
'ask_volume_2': FloatField(),
'ask_volume_3': FloatField(),
'ask_volume_4': FloatField(),
'ask_volume_5': FloatField(),
'to': set_attr
}
bfield = {
'symbol': CharField(),
'exchange': CharField(),
'vt_symbol': CharField(),
'datetime': DateTimeField,
'volume': FloatField(),
'open_price': FloatField(),
'high_price': FloatField(),
'low_price': FloatField(),
'pre_price': FloatField(),
'interval': IntegerField(),
'to': set_attr
}
for symbol in subsribed_symbols:
orm_map[f"t{symbol}"] = type(symbol, (TickDatabaseBase,), tfield)
orm_map[f"b{symbol}"] = type(symbol, (BarDatabaseBase,), bfield)
return orm_map
|
import tkinter as tk
from tkinter import filedialog
from tkinter import *
from PIL import ImageTk, Image
import numpy
#To classify sign load the trained model.
from keras.models import load_model
model = load_model('traffic_classifier.h5')
#dictionary for labelling all traffic signs classes.
classes = { 1:'Speed limit (20km/h)',
2:'Speed limit (30km/h)',
3:'Speed limit (50km/h)',
4:'Speed limit (60km/h)',
5:'Speed limit (70km/h)',
6:'Speed limit (80km/h)',
7:'End of speed limit (80km/h)',
8:'Speed limit (100km/h)',
9:'Speed limit (120km/h)',
10:'No passing',
11:'No passing veh over 3.5 tons',
12:'Right-of-way at intersection',
13:'Priority road',
14:'Yield',
15:'Stop',
16:'No vehicles',
17:'Veh > 3.5 tons prohibited',
18:'No entry',
19:'General caution',
20:'Dangerous curve left',
21:'Dangerous curve right',
22:'Double curve',
23:'Bumpy road',
24:'Slippery road',
25:'Road narrows on the right',
26:'Road work',
27:'Traffic signals',
28:'Pedestrians',
29:'Children crossing',
30:'Bicycles crossing',
31:'Beware of ice/snow',
32:'Wild animals crossing',
33:'End speed + passing limits',
34:'Turn right ahead',
35:'Turn left ahead',
36:'Ahead only',
37:'Go straight or right',
38:'Go straight or left',
39:'Keep right',
40:'Keep left',
41:'End no passing veh > 3.5 tons',
42:'Roundabout mandatory',
43:'End of no passing',
#initializing GUI
top=tk.Tk()
top.geometry('800x600')
top.title('Traffic Sign Recognition')
top.configure(background='#CDCDCD')
label=Label(top,background='#CDCDCD', font=('times new roman',30,'bold'))
sign_image = Label(top)
def classify(file_path):
global label_packed
image = Image.open(file_path)
image = image.resize((30,30))
image = numpy.expand_dims(image, axis=0)
image = numpy.array(image)
print(image.shape)
pred = model.predict_classes([image])[0]
sign = classes[pred+1]
print(sign)
label.configure(foreground='#011638', text=sign)
def show_classify_button(file_path):
classify_b=Button(top,text="Classify the Sign",command=lambda: classify(file_path),padx=10,pady=5)
classify_b.configure(background='#364156', foreground='white',font=('times new roman',30,'bold'))
classify_b.place(relx=0.79,rely=0.46)
def upload_image():
try:
file_path=filedialog.askopenfilename()
uploaded=Image.open(file_path)
uploaded.thumbnail(((top.winfo_width()/2.25),(top.winfo_height()/2.25)))
im=ImageTk.PhotoImage(uploaded)
sign_image.configure(image=im)
sign_image.image=im
label.configure(text='')
show_classify_button(file_path)
except:
pass
upload=Button(top,text="Upload the traffic sign for classification/recognition",command=upload_image,padx=10,pady=5)
upload.configure(background='#364156', foreground='white',font=('times new roman',30,'bold'))
upload.pack(side=BOTTOM,pady=50)
sign_image.pack(side=BOTTOM,expand=True)
label.pack(side=BOTTOM,expand=True)
heading = Label(top, text="Know The traffic Signs",pady=30, font=('times new roman',30,'bold'))
heading.configure(background='#CDCDCD',foreground='#364156')
heading.pack()
top.mainloop()
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import cv2
import tensorflow as tf
from PIL import Image
import os
from sklearn.model_selection import train_test_split
from keras.utils import to_categorical
from keras.models import Sequential, load_model
from keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout
data = []
labels = []
classes = 43
cur_path = os.getcwd()
#Images and their labels are retrieved in this block.
for i in range(classes):
path = os.path.join(cur_path,'train',str(i))
images = os.listdir(path)
for a in images:
try:
image = Image.open(path + '\\'+ a)
image = image.resize((30,30))
image = np.array(image)
#sim = Image.fromarray(image)
data.append(image)
labels.append(i)
except:
print("Error in loading image")
# Lists conversion into numpy arrays
data = np.array(data)
labels = np.array(labels)
print(data.shape, labels.shape)
#Splitting training and testing dataset
Y_train, Y_test, x_train, x_test = train_test_split(data, labels, test_size=0.2, random_state=42)
print(Y_train.shape, Y_test.shape, x_train.shape, x_test.shape)
#Converting the labels into one hot encoding
x_train = to_categorical(x_train, 43)
x_test = to_categorical(x_test, 43)
#In this block we will be building the model
model = Sequential()
model.add(Conv2D(filters=32, kernel_size=(5,5), activation='relu', input_shape=X_train.shape[1:]))
model.add(Conv2D(filters=32, kernel_size=(5,5), activation='relu'))
model.add(MaxPool2D(pool_size=(2, 2)))
model.add(Dropout(rate=0.25))
model.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu'))
model.add(Conv2D(filters=64, kernel_size=(3, 3), activation='relu'))
model.add(MaxPool2D(pool_size=(2, 2)))
model.add(Dropout(rate=0.25))
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dropout(rate=0.5))
model.add(Dense(43, activation='softmax'))
#Model compilation
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
epochs = 15
history = model.fit(Y_train, x_train, batch_size=32, epochs=epochs, validation_data=(Y_test, x_test))
model.save("my_model.h5")
#To easily understand the acccuracy we will plot the graphs.
plt.figure(0)
plt.plot(history.history['accuracy'], label='training accuracy')
plt.plot(history.history['val_accuracy'], label='val accuracy')
plt.title('Accuracy')
plt.ylabel('epochs')
plt.xlabel('accuracy')
plt.legend()
plt.show()
plt.figure(1)
plt.plot(history.history['loss'], label='training loss')
plt.plot(history.history['val_loss'], label='val loss')
plt.title('Loss')
plt.ylabel('epochs')
plt.xlabel('loss')
plt.legend()
plt.show()
#Here we will check the accuracy on the test dataset that is available
from sklearn.metrics import accuracy_score
x_test = pd.read_csv('Test.csv')
labels = x_test["ClassId"].values
imgs = x_test["Path"].values
data=[]
for img in imgs:
image = Image.open(img)
image = image.resize((30,30))
data.append(np.array(image))
Y_test=np.array(data)
pred = model.predict_classes(X_test)
#Getting accuracy from test dataset.
from sklearn.metrics import accuracy_score
print(accuracy_score(labels, pred))
|
import datetime
import os
from jinja2 import Environment, FileSystemLoader
from bin.contentctl_project.contentctl_core.domain.entities.security_content_object import SecurityContentObject
class ConfWriter():
@staticmethod
def writeConfFileHeader(output_path : str) -> None:
utc_time = datetime.datetime.utcnow().replace(microsecond=0).isoformat()
j2_env = Environment(
loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')),
trim_blocks=True)
template = j2_env.get_template('header.j2')
output = template.render(time=utc_time)
with open(output_path, 'w') as f:
output = output.encode('ascii', 'ignore').decode('ascii')
f.write(output)
@staticmethod
def writeConfFile(template_name : str, output_path : str, objects : list) -> None:
def custom_jinja2_enrichment_filter(string, object):
customized_string = string
for key in dir(object):
if type(key) is not str:
key = key.decode()
if not key.startswith('__') and not key == "_abc_impl" and not callable(getattr(object, key)):
if hasattr(object, key):
customized_string = customized_string.replace("%" + key + "%", str(getattr(object, key)))
for key in dir(object.tags):
if type(key) is not str:
key = key.decode()
if not key.startswith('__') and not key == "_abc_impl" and not callable(getattr(object.tags, key)):
if hasattr(object.tags, key):
customized_string = customized_string.replace("%" + key + "%", str(getattr(object.tags, key)))
return customized_string
j2_env = Environment(
loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), 'templates')),
trim_blocks=True)
j2_env.filters['custom_jinja2_enrichment_filter'] = custom_jinja2_enrichment_filter
template = j2_env.get_template(template_name)
output = template.render(objects=objects)
with open(output_path, 'a') as f:
output = output.encode('ascii', 'ignore').decode('ascii')
f.write(output)
|
import os
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
import matplotlib.style as style
def APA_usage(bamfile, APA_sitefile, celltype, gene):
""" Get the abundance for each cell barcode for each APA in the gene.
:param bamfile: method for the new :class:`Request` object.
:param APA_sitefile: URL for the new :class:`Request` object.
:param celltype: (optional) The celltype file genreated from cellranger
:type gene: string
Usage:
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
Returnsass:
Generate a heatmap;
Print the Read count;
"""
from baseq.bam import BAMTYPE
bam = BAMTYPE(bamfile)
#Read CellType Table...
if celltype:
df_type = pd.read_csv(celltype)
df_type["cell"] = [x.split("-")[0] for x in df_type.Barcode.tolist()]
df_type = df_type.drop("Barcode", axis=1)
df_type = df_type.set_index('cell')
#Read APA Site Table...
df_apa = pd.read_table(APA_sitefile)
df_gene = df_apa[df_apa.gene == gene]
sample_usage = []
#Get The Mapped Read Infos For Each Peak...
for idx, row in df_gene.iterrows():
chr = row['chr']
start = row['pos']-100
end = row['pos']+100
reads = bam.get_reads(chr, start, end)
for read in reads:
read_header = read[0].split("_")
sample_usage.append([read_header[1], read_header[2], str(idx)])
#Build a Table
df_counts = pd.DataFrame(sample_usage, columns=["sample", "UMI", "APA"])
df_counts['reads'] = 1
df_counts = df_counts.groupby(by=["sample", "UMI", "APA"]).sum().reset_index()
df_counts = df_counts.drop(["UMI"], axis=1)
df_counts = df_counts.groupby(by=["sample", "APA"]).count().reset_index()
df_counts = df_counts.pivot(index='sample', columns='APA', values='reads').fillna(0)
df_counts["total"] = df_counts.sum(axis=1)
df_counts = df_counts[df_counts.total>=1]
df_counts = df_counts.sort_values("total", ascending=False)
#Aggregate By Cell Type...
if celltype:
df = df_counts.join(df_type)
df = df.groupby("Cluster").sum()
print(df)
df = df.div(df.total/100, axis=0)
print(df)
#plot heatmap....
style.use('seaborn-poster')
plt.figure()
df_counts = df_counts.drop(["total"], axis=1)
sns.heatmap(df_counts.iloc[1:40, :], cmap="YlGnBu_r")
plt.savefig("hehe.png")
print("[info] Figure Export To {}".format("hehe.png")) |
# vim: tabstop=4 shiftwidth=4 softtabstop=43
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mock
from nova import test
from nova.tests.virt.vmwareapi import stubs
from nova.virt.vmwareapi import driver
from nova.virt.vmwareapi import fake as vmwareapi_fake
from nova.virt.vmwareapi import volumeops
class VMwareVolumeOpsTestCase(test.NoDBTestCase):
def setUp(self):
def fake_del():
return
super(VMwareVolumeOpsTestCase, self).setUp()
vmwareapi_fake.reset()
stubs.set_stubs(self.stubs)
self._session = driver.VMwareAPISession()
self.stubs.Set(self._session, '__del__',
fake_del)
self._volumeops = volumeops.VMwareVolumeOps(self._session)
self.instance = {'name': 'fake_name', 'uuid': 'fake_uuid'}
def _test_detach_disk_from_vm(self, destroy_disk=False):
def fake_call_method(module, method, *args, **kwargs):
vmdk_detach_config_spec = kwargs.get('spec')
virtual_device_config = vmdk_detach_config_spec.deviceChange[0]
self.assertEqual('remove', virtual_device_config.operation)
self.assertEqual('ns0:VirtualDeviceConfigSpec',
virtual_device_config.obj_name)
if destroy_disk:
self.assertEqual('destroy',
virtual_device_config.fileOperation)
else:
self.assertFalse(hasattr(virtual_device_config,
'fileOperation'))
return 'fake_configure_task'
with contextlib.nested(
mock.patch.object(self._session, '_wait_for_task'),
mock.patch.object(self._session, '_call_method',
fake_call_method)
) as (_wait_for_task, _call_method):
fake_device = vmwareapi_fake.DataObject()
fake_device.backing = vmwareapi_fake.DataObject()
fake_device.backing.fileName = 'fake_path'
fake_device.key = 'fake_key'
self._volumeops.detach_disk_from_vm('fake_vm_ref', self.instance,
fake_device, destroy_disk)
_wait_for_task.assert_has_calls([
mock.call(self.instance['uuid'], 'fake_configure_task')])
def test_detach_with_destroy_disk_from_vm(self):
self._test_detach_disk_from_vm(destroy_disk=True)
def test_detach_without_destroy_disk_from_vm(self):
self._test_detach_disk_from_vm(destroy_disk=False)
|
# This is the practice on the web crawling book
from urllib.request import urlopen
import requests
from bs4 import BeautifulSoup
import re
import pymysql.cursors
import random
import datetime
#用系统当前时间生成一个随机数生成器,这样可以保证在每次程序运行的时候,维基百科词条的选择都是一条全新的随机路径
# random.seed(datetime.datetime.now())
#
#
# def get_links(article_url):
# html = urlopen("https://en.wikipedia.org/" + article_url)
# bs = BeautifulSoup(html, "html.parser")
#
# return bs.find("div", {"id": "bodyContent"}).findAll("a", href=re.compile("^(/wiki/)((?!:).)*$"))
#
# links = get_links("wiki/Kevin_Bacon")
#
# while len(links) > 0:
# newArticle = links[random.randint(0, len(links)-1)].attrs["href"]
# print(newArticle)
# links = get_links(newArticle)
# for link in bs.find("div", {"id": "bodyContent"}).findAll("a", href=re.compile("^(/wiki/)((?!:).)*$")):
# if "href" in link.attrs:
# print(link.attrs['href'])
#链接去重
# pages = set()
#
#
# def getlinks(pageurl):
# global pages
# html1 = urlopen("https://en.wikipedia.org"+pageurl)
# bs4 = BeautifulSoup(html1, "html.parser")
# for link in bs4.findAll("a", href=re.compile("^(/wiki/)")):
# if "href" in link.attrs:
# if link.attrs['href'] not in pages:
# #我们遇到了新页面
# newPage = link.attrs['href']
# print(newPage)
# pages.add(newPage)
# getlinks(newPage)
#
# getlinks("")
#采集数据
# pages = set()
# def getlinks(pageurl):
# global pages
# html = urlopen("https://en.wikipedia.org"+pageurl)
# bs4 = BeautifulSoup(html, 'html.parser')
# try:
# print(bs4.h1.get_text())
# print(bs4.find(id="mw-content-text").findAll("p")[0])
# print(bs4.find(id="ca-edit").find("span").attrs['href'])
# except AttributeError:
# print("页面缺少一些属性!")
#
# for link in bs4.findAll("a", href=re.compile("^(/wiki/)")):
# if 'href' in link.attrs:
# if link.attrs['href'] not in pages:
# newpage = link.attrs['href']
# print("----------------\n"+newpage)
# pages.add(newpage)
# getlinks(newpage)
#
# getlinks("")
#高级网络数据采集
# def ngrams(input1, n):
# input1 = re.sub('\n+', " ", input1)
# input1 = re.sub(' +', " ", input1)
# input1 = bytes(content, "UTF-8")
# # input1 = input1.decode("ascii", "ignore")
# input1 = input1.split(" ")
# output = []
# for i in range(len(input1)-n+1):
# output.append(input1[i:i+n])
# return output
#
# html = urlopen("https://en.wikipedia.org/wiki/Python")
# bs4 = BeautifulSoup(html, 'html.parser')
# content = bs4.find("div", {"id": "mw-content-text"}).get_text()
# ngram = ngrams(content, 2)
# print(ngram)
# print("2-ngrams count is: " + str(len(ngram)))
#提交表单
params = {'firstname': 'Ryan', 'lastname': 'Mitchell'}
r = requests.post("http://pythonscraping.com/files/processing.php", data=params)
print(r.text()) |
from django.urls import path
from accounts.views import signup_view
urlpatterns = [
path('signup/', signup_view, name="signup_view"),
]
|
import swigstarterkit
si = swigstarterkit.Script_Interface();
print si.get_a_value();
|
"""
TODO: Docstring
"""
import numpy as np
class Replay(object):
"""
For RL a memory of experiences must be written to train the batches
with old experiences and returns in the form of (s, a, r, s').
"""
def __init__(self, max_memory=100, discount=.9):
"""TODO: Docstring for __init__.
:max_memory: The size of the memory container to avoid overflows.
:discount: The penalty factor for future experiences.
"""
self.max_memory = max_memory
self.memory = list()
self.discount = discount
def remember(self, states, game_over):
"""Method to store the experiences in the class list.
:states: The possible states.
:game_over: If the game has end.
"""
self.memory.append([states, game_over])
# Remove oldest memory if list is full
if len(self.memory) > self.max_memory:
del self.memory[0]
def get_batch(self, model, batch_size=32):
"""Interact to get the training data.
:model: The NN to be trained.
:batch_size: Size of each training sample.
:returns: Training sample.
"""
len_memory = len(self.memory)
# Number of possible actions in the game.
num_actions = model.outputshape[-1]
# Existent states (game field dimension).
env_dim = self.memory[0][0][0].shape[1]
# We want to return an input and target vector with inputs from an
# observed state...
inputs = np.zeros((min(len_memory, batch_size), env_dim))
# ...and the target r + gamma * max Q(s',a')
# Note that our target is a matrix, with possible fields not only for
# the action taken but also
# for the other possible actions. The actions not take the same value
# as the prediction to not affect them
targets = np.zeros((inputs.shape[0], num_actions))
# We draw states to learn from randomly
for i, idx in enumerate(np.random.randint(0, len_memory,
size=inputs.shape[0])):
"""
Here we load one transition <s, a, r, s'> from memory
:state_t: initial state s
:action_t: action taken a
:reward_t: reward earned r
:state_tp1: the state that followed s'
"""
state_t, action_t, reward_t, state_tp1 = self.memory[idx][0]
# We also need to know whether the game ended at this state
game_over = self.memory[idx][1]
# Add the state s to the input
inputs[i:i+1] = state_t
# First we fill the target values with the prediction of the model.
# They will not be affected by training (since the training loss
# for them is 0)
targets[i] = model.predict(state_t)[0]
"""
If the game ended, the expected reward Q(s,a) should be the final
reward r.
Otherwise the target value is r + gamma * max Q(s',a')
"""
# Here Q_sa is max_a'Q(s', a')
Q_sa = np.max(model.predict(state_tp1)[0])
# If the game ended, the reward is the final reward
if game_over: # if game_over is True
targets[i, action_t] = reward_t
else:
# r + gamma * max Q(s',a')
targets[i, action_t] = reward_t + self.discount * Q_sa
return inputs, targets
|
# stdlib
import copy
# 3p
from mock import Mock
from nose.plugins.attrib import attr
# project
from tests.checks.common import AgentCheckTest
INSTANCE = {
'class': 'Win32_PerfFormattedData_PerfProc_Process',
'metrics': [
['ThreadCount', 'proc.threads.count', 'gauge'],
['IOReadBytesPerSec', 'proc.io.bytes_read', 'gauge'],
['VirtualBytes', 'proc.mem.virtual', 'gauge'],
['PercentProcessorTime', 'proc.cpu_pct', 'gauge'],
],
'tag_by': 'Name',
}
INSTANCE_METRICS = [
'proc.threads.count',
'proc.io.bytes_read',
'proc.mem.virtual',
'proc.cpu_pct',
]
@attr('windows')
@attr(requires='windows')
class WMICheckTest(AgentCheckTest):
CHECK_NAME = 'wmi_check'
def test_basic_check(self):
instance = copy.deepcopy(INSTANCE)
instance['filters'] = [{'Name': 'svchost'}]
self.run_check({'instances': [instance]})
for metric in INSTANCE_METRICS:
self.assertMetric(metric, tags=['name:svchost'], count=1)
self.coverage_report()
def test_check_with_wildcard(self):
instance = copy.deepcopy(INSTANCE)
instance['filters'] = [{'Name': 'svchost%'}]
self.run_check({'instances': [instance]})
for metric in INSTANCE_METRICS:
# We can assume that at least 2 svchost processes are running
self.assertMetric(metric, tags=['name:svchost'], count=1)
self.assertMetric(metric, tags=['name:svchost#1'], count=1)
def test_check_with_tag_queries(self):
instance = copy.deepcopy(INSTANCE)
instance['filters'] = [{'Name': 'svchost%'}]
# `CreationDate` is a good property to test the tag queries but would obviously not be useful as a tag in DD
instance['tag_queries'] = [['IDProcess', 'Win32_Process', 'Handle', 'CreationDate']]
self.run_check({'instances': [instance]})
for metric in INSTANCE_METRICS:
# No instance "number" (`#`) when tag_queries is specified
self.assertMetricTag(metric, tag='name:svchost#1', count=0)
self.assertMetricTag(metric, tag='name:svchost')
self.assertMetricTagPrefix(metric, tag_prefix='creationdate:')
def test_invalid_class(self):
instance = copy.deepcopy(INSTANCE)
instance['class'] = 'Unix'
logger = Mock()
self.run_check({'instances': [instance]}, mocks={'log': logger})
# A warning is logged
self.assertEquals(logger.warning.call_count, 1)
# No metrics/service check
self.coverage_report()
def test_invalid_metrics(self):
instance = copy.deepcopy(INSTANCE)
instance['metrics'].append(['InvalidProperty', 'proc.will.not.be.reported', 'gauge'])
logger = Mock()
self.run_check({'instances': [instance]}, mocks={'log': logger})
# A warning is logged
self.assertEquals(logger.warning.call_count, 1)
# No metrics/service check
self.coverage_report()
|
# Autogenerated by onnx-model-maker. Don't modify it manually.
import onnx
import onnx.helper
import onnx.numpy_helper
from onnx_model_maker import omm
from onnx_model_maker import onnx_mm_export
from onnx_model_maker.ops.op_helper import _add_input
@onnx_mm_export("v12.LessOrEqual")
def LessOrEqual(A, B, **kwargs):
_inputs = []
for i in (A, B):
_add_input(i, _inputs)
idx = omm.op_counter["LessOrEqual"]
omm.op_counter["LessOrEqual"] += 1
node = onnx.helper.make_node("LessOrEqual",
_inputs, [f'_t_LessOrEqual_{idx}_C'],
name=f"LessOrEqual_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.Celu")
def Celu(X, **kwargs):
_inputs = []
for i in (X, ):
_add_input(i, _inputs)
idx = omm.op_counter["Celu"]
omm.op_counter["Celu"] += 1
node = onnx.helper.make_node("Celu",
_inputs, [f'_t_Celu_{idx}_Y'],
name=f"Celu_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.GatherND")
def GatherND(data, indices, **kwargs):
_inputs = []
for i in (data, indices):
_add_input(i, _inputs)
idx = omm.op_counter["GatherND"]
omm.op_counter["GatherND"] += 1
node = onnx.helper.make_node("GatherND",
_inputs, [f'_t_GatherND_{idx}_output'],
name=f"GatherND_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.Einsum")
def Einsum(Inputs, **kwargs):
_inputs = []
for i in (Inputs, ):
_add_input(i, _inputs)
idx = omm.op_counter["Einsum"]
omm.op_counter["Einsum"] += 1
node = onnx.helper.make_node("Einsum",
_inputs, [f'_t_Einsum_{idx}_Output'],
name=f"Einsum_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.GreaterOrEqual")
def GreaterOrEqual(A, B, **kwargs):
_inputs = []
for i in (A, B):
_add_input(i, _inputs)
idx = omm.op_counter["GreaterOrEqual"]
omm.op_counter["GreaterOrEqual"] += 1
node = onnx.helper.make_node("GreaterOrEqual",
_inputs, [f'_t_GreaterOrEqual_{idx}_C'],
name=f"GreaterOrEqual_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.Max")
def Max(data_0, **kwargs):
_inputs = []
for i in (data_0, ):
_add_input(i, _inputs)
idx = omm.op_counter["Max"]
omm.op_counter["Max"] += 1
node = onnx.helper.make_node("Max",
_inputs, [f'_t_Max_{idx}_max'],
name=f"Max_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.NegativeLogLikelihoodLoss")
def NegativeLogLikelihoodLoss(input, target, weight=None, **kwargs):
_inputs = []
for i in (input, target, weight):
_add_input(i, _inputs)
idx = omm.op_counter["NegativeLogLikelihoodLoss"]
omm.op_counter["NegativeLogLikelihoodLoss"] += 1
node = onnx.helper.make_node("NegativeLogLikelihoodLoss",
_inputs, [f'_t_NegativeLogLikelihoodLoss_{idx}_loss'],
name=f"NegativeLogLikelihoodLoss_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.ReduceMin")
def ReduceMin(data, **kwargs):
_inputs = []
for i in (data, ):
_add_input(i, _inputs)
idx = omm.op_counter["ReduceMin"]
omm.op_counter["ReduceMin"] += 1
node = onnx.helper.make_node("ReduceMin",
_inputs, [f'_t_ReduceMin_{idx}_reduced'],
name=f"ReduceMin_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.ReduceMax")
def ReduceMax(data, **kwargs):
_inputs = []
for i in (data, ):
_add_input(i, _inputs)
idx = omm.op_counter["ReduceMax"]
omm.op_counter["ReduceMax"] += 1
node = onnx.helper.make_node("ReduceMax",
_inputs, [f'_t_ReduceMax_{idx}_reduced'],
name=f"ReduceMax_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.ArgMax")
def ArgMax(data, **kwargs):
_inputs = []
for i in (data, ):
_add_input(i, _inputs)
idx = omm.op_counter["ArgMax"]
omm.op_counter["ArgMax"] += 1
node = onnx.helper.make_node("ArgMax",
_inputs, [f'_t_ArgMax_{idx}_reduced'],
name=f"ArgMax_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.SoftmaxCrossEntropyLoss")
def SoftmaxCrossEntropyLoss(scores, labels, weights=None, **kwargs):
_inputs = []
for i in (scores, labels, weights):
_add_input(i, _inputs)
idx = omm.op_counter["SoftmaxCrossEntropyLoss"]
omm.op_counter["SoftmaxCrossEntropyLoss"] += 1
node = onnx.helper.make_node("SoftmaxCrossEntropyLoss",
_inputs, [f'_t_SoftmaxCrossEntropyLoss_{idx}_output', f'_t_SoftmaxCrossEntropyLoss_{idx}_log_prob'],
name=f"SoftmaxCrossEntropyLoss_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.Clip")
def Clip(input, min=None, max=None, **kwargs):
_inputs = []
for i in (input, min, max):
_add_input(i, _inputs)
idx = omm.op_counter["Clip"]
omm.op_counter["Clip"] += 1
node = onnx.helper.make_node("Clip",
_inputs, [f'_t_Clip_{idx}_output'],
name=f"Clip_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.ArgMin")
def ArgMin(data, **kwargs):
_inputs = []
for i in (data, ):
_add_input(i, _inputs)
idx = omm.op_counter["ArgMin"]
omm.op_counter["ArgMin"] += 1
node = onnx.helper.make_node("ArgMin",
_inputs, [f'_t_ArgMin_{idx}_reduced'],
name=f"ArgMin_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.Constant")
def Constant(**kwargs):
_inputs = []
for i in ():
_add_input(i, _inputs)
idx = omm.op_counter["Constant"]
omm.op_counter["Constant"] += 1
node = onnx.helper.make_node("Constant",
_inputs, [f'_t_Constant_{idx}_output'],
name=f"Constant_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.Pow")
def Pow(X, Y, **kwargs):
_inputs = []
for i in (X, Y):
_add_input(i, _inputs)
idx = omm.op_counter["Pow"]
omm.op_counter["Pow"] += 1
node = onnx.helper.make_node("Pow",
_inputs, [f'_t_Pow_{idx}_Z'],
name=f"Pow_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.MaxPool")
def MaxPool(X, **kwargs):
_inputs = []
for i in (X, ):
_add_input(i, _inputs)
idx = omm.op_counter["MaxPool"]
omm.op_counter["MaxPool"] += 1
node = onnx.helper.make_node("MaxPool",
_inputs, [f'_t_MaxPool_{idx}_Y', f'_t_MaxPool_{idx}_Indices'],
name=f"MaxPool_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.Min")
def Min(data_0, **kwargs):
_inputs = []
for i in (data_0, ):
_add_input(i, _inputs)
idx = omm.op_counter["Min"]
omm.op_counter["Min"] += 1
node = onnx.helper.make_node("Min",
_inputs, [f'_t_Min_{idx}_min'],
name=f"Min_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
@onnx_mm_export("v12.Dropout")
def Dropout(data, ratio=None, training_mode=None, **kwargs):
_inputs = []
for i in (data, ratio, training_mode):
_add_input(i, _inputs)
idx = omm.op_counter["Dropout"]
omm.op_counter["Dropout"] += 1
node = onnx.helper.make_node("Dropout",
_inputs, [f'_t_Dropout_{idx}_output', f'_t_Dropout_{idx}_mask'],
name=f"Dropout_{idx}",
**kwargs)
onnx.checker.check_node(node, omm.ctx)
omm.model.graph.node.append(node)
return node
|
# This source file is part of the Aument language
# Copyright (c) 2021 the aument contributors
#
# Licensed under Apache License v2.0 with Runtime Library Exception
# See LICENSE.txt for license information
import re
import os
def cleanup_params(array):
def each_line(x):
return CMT_CONT_REGEX.sub("", x)
return list(map(lambda inner: (inner[0], " ".join(map(each_line, inner[1]))), array))
AT_REGEX = re.compile(r'/// @([^ ]*) (.*)')
CMT_CONT_REGEX = re.compile(r'^///\s+')
BEGIN_DESC_REGEX = re.compile(r'^/// \[([^\]]+)\]\s*')
TWO_ARG_REGEX = re.compile(r'([^\s]+)\s+(.*)', re.S)
FUNC_NAME_REGEX = re.compile(r'([a-zA-Z_$][a-zA-Z_$0-9]*)\(')
STRUCT_NAME_REGEX = re.compile(r'struct ([a-zA-Z_$][a-zA-Z_$0-9]*)')
AU_FUNC_NAME_REGEX = re.compile(r'\(([a-zA-Z_$][a-zA-Z_$0-9]*)\)')
STATE_NONE = 0
STATE_FUNC_GROUP = 1
STATE_STRUCT_GROUP = 2
def parse(src, path):
groups = []
cur_group = []
state = STATE_NONE
for i in src.split("\n"):
if i.startswith("/// [func]") or i.startswith("/// [func-au]"):
if cur_group:
groups.append(cur_group)
cur_group = []
state = STATE_FUNC_GROUP
elif i.startswith("/// [struct]"):
if cur_group:
groups.append(cur_group)
cur_group = []
state = STATE_STRUCT_GROUP
if state != STATE_NONE:
cur_group.append(i)
if state == STATE_FUNC_GROUP:
if not i.startswith("///") and i.endswith(";"):
groups.append(cur_group)
cur_group = []
state = STATE_NONE
elif state == STATE_STRUCT_GROUP:
if i == "// end-struct":
groups.append(cur_group)
cur_group = []
state = STATE_NONE
if cur_group:
groups.append(cur_group)
functions = []
structs = []
for group in groups:
line_idx = 0
line_len = len(group)
while line_idx < line_len:
if group[line_idx].startswith("/// @") or not group[line_idx].startswith("///"):
break
line_idx += 1
desc = group[0:line_idx]
doc_type = BEGIN_DESC_REGEX.match(desc[0]).group(1)
desc[0] = BEGIN_DESC_REGEX.sub("", desc[0])
desc = ' '.join(map(lambda x: CMT_CONT_REGEX.sub("", x), desc))
desc = desc.replace('\\n', '\n')
params = []
while line_idx < line_len:
line = group[line_idx]
if line.startswith("/// @"):
matches = AT_REGEX.match(line)
params.append((matches.group(1), [matches.group(2)]))
elif not line.startswith("///"):
break
else:
params[-1][1].append(line)
line_idx += 1
params = cleanup_params(params)
signature = group[line_idx:]
if doc_type == "func" or doc_type == "func-au":
signature = "\n".join(signature)
func_params = []
func_returns = None
func_name = None
for (key, value) in params:
if key == 'param':
x = TWO_ARG_REGEX.match(value)
if x == None:
func_params.append((value, []))
else:
func_params.append((x.group(1), x.group(2)))
elif key == 'return':
func_returns = value
elif key=='name':
func_name = value
func = {
"path": path,
"desc": desc,
"params": func_params,
"returns": func_returns,
}
func["type"] = doc_type
if doc_type == "func":
func["signature"] = signature
func["name"] = FUNC_NAME_REGEX.search(signature).group(1)
else:
func["name"] = func_name
functions.append(func)
elif doc_type == "struct":
signature.pop()
signature = "\n".join(signature)
structs.append({
"path": path,
"desc": desc,
"name": STRUCT_NAME_REGEX.search(signature).group(1),
"signature": signature,
})
return {
"functions": functions,
"structs": structs,
}
functions = []
structs = []
for root, _, files in os.walk("src/"):
for file in files:
if file.endswith(".h"):
path = os.path.join(root, file)
path = path.replace("\\", "/")
with open(path, "r") as f:
result = parse(f.read(), path)
functions += result["functions"]
structs += result["structs"]
functions.sort(key=lambda x: x['name'])
structs.sort(key=lambda x: x['name'])
md_src = """\
# C API
The section below was generated automatically (devs: *gen_api.py*).
Please don't modify it by hand!
## Functions
"""
au_std_md_src = """\
# aument standard library reference
The section below was generated automatically (devs: *gen_api.py*).
Please don't modify it by hand!
## Functions
"""
NL = "\n"
# * Functions *
for f in functions:
if f["type"] == "func":
md_src += f"""
### {f['name']}
```c
{f['signature']}
```
Defined in *{f['path']}*.
{f['desc']}
#### Arguments
{NL.join(map(lambda x: f" * **{x[0]}:** {x[1]}", f['params'])) if f['params'] else "*none*"}
#### Return value
{f['returns'] if f['returns'] else "*none*"}
"""
elif f["type"] == "func-au":
au_std_md_src += f"""
### {f['name']}
Defined in *{f['path']}*.
{f['desc']}
#### Arguments
{NL.join(map(lambda x: f" * **{x[0]}:** {x[1]}", f['params'])) if f['params'] else "*none*"}
#### Return value
{f['returns'] if f['returns'] else "*none*"}
"""
# * Structs *
md_src += "\n## Structures\n"
for struct in structs:
md_src += f"""
### {struct["name"]}
{struct["desc"]}
```c
{struct["signature"]}
```
Defined in *{struct['path']}*.
"""
with open("docs/c-api.md", "w") as f:
f.write(md_src)
with open("docs/au-stdlib.md", "w") as f:
f.write(au_std_md_src)
|
import streamlit as st
import pandas as pd
import numpy as np
import torch
from PIL import Image, ImageChops
import os
from torch.nn.functional import cross_entropy
from streamlit_image_comparison import image_comparison
st.set_page_config(layout="wide")
@st.cache(allow_output_mutation=True)
def load_model():
efficientnet = torch.hub.load('NVIDIA/DeepLearningExamples:torchhub', 'nvidia_efficientnet_b0', pretrained=True)
return efficientnet.eval()
@st.cache(allow_output_mutation=True)
def load_classnames():
with open("classes.txt") as file:
return eval(file.read())
@st.cache(allow_output_mutation=True)
def load_images():
files = os.listdir("./images")
img_suffixes = ("jpg", "jpeg", "png")
img_files = (f for f in files if f.endswith(img_suffixes))
return [Image.open("./images/"+file) for file in img_files]
@st.cache(allow_output_mutation=True)
def load_styles():
with open("style.css") as f:
return '<style>{}</style>'.format(f.read())
st.markdown(load_styles(), unsafe_allow_html=True)
def img2tensor(img: Image) -> torch.Tensor:
arr = np.array(img).transpose(2, 0, 1)[np.newaxis, ...]
return torch.tensor(arr).float() / 255
def tensor2img(tensor: torch.Tensor) -> Image:
tensor = tensor.squeeze(0) * 255
arr = np.uint8(tensor.numpy()).transpose(1, 2, 0)
return Image.fromarray(arr)
classnames = load_classnames()
images = load_images()
model = load_model()
if "selected_img" not in st.session_state:
st.session_state["selected_img"] = images[0]
uploaded_file = st.sidebar.file_uploader("", type=['png', 'jpg', "jpeg"])
if uploaded_file is not None:
uploaded_img = Image.open(uploaded_file)
clicked = st.sidebar.button("analyze uploaded", key=100)
if clicked:
st.session_state.selected_img = uploaded_img
st.sidebar.markdown("<hr />", unsafe_allow_html=True)
st.sidebar.markdown("or select from a few examples")
for i, img in enumerate(images):
st.sidebar.markdown("<hr />", unsafe_allow_html=True)
st.sidebar.image(img)
clicked = st.sidebar.button("analyze", key=i)
if clicked:
st.session_state.selected_img = img
st.sidebar.markdown("<hr />", unsafe_allow_html=True)
st.sidebar.markdown("Photos source: "
"<a href='https://unsplash.com/photos/pk_1RdcAfbE'>street sign</a>, "
"<a href='https://unsplash.com/photos/X63FTIZFbZo'>clock on nightstand</a>, "
"<a href='https://unsplash.com/photos/fAz5Cf1ajPM'>wine</a>, "
"<a href='https://unsplash.com/photos/eWqOgJ-lfiI'>red cabin</a>, ",
unsafe_allow_html=True)
top_k = 3
st.slider(min_value=0,
max_value=40,
label="sensitivity:",
value=20,
step=4,
key="slider")
@st.cache(allow_output_mutation=True)
def process(img):
img_small = img.resize((300, 300), resample=Image.BILINEAR)
input_tensor = img2tensor(img_small).repeat(top_k, 1, 1, 1)
input_tensor.requires_grad = True
prediction = model(input_tensor)
confidences = torch.softmax(prediction.detach()[0], dim=-1)
tops = torch.topk(confidences.flatten(), top_k)
indeces = tops.indices.tolist()
values = tops.values.tolist()
target = torch.tensor(indeces)
cross_entropy(prediction, target).backward()
expl_tensors = [torch.mean(input_tensor.grad[option], axis=0, keepdim=True) for option in range(top_k)]
return indeces, values, expl_tensors
img = st.session_state.selected_img
indeces, values, expl_tensors = process(img)
def label_formatter(i):
index = indeces[i]
confidence = values[i]
return f"{classnames[index]} ({confidence*100:>.0f}%)"
option = st.radio("most likely objects in image:", options=range(top_k), format_func=label_formatter)
st.checkbox("blend explanation with image", key="blend")
expl_tensor = torch.abs(expl_tensors[option] * st.session_state.slider).clamp(0, 1).repeat(3, 1, 1)
expl_img = tensor2img(expl_tensor).resize(img.size)
if st.session_state.blend:
expl_img = ImageChops.multiply(img, expl_img)
image_comparison(img, expl_img, in_memory=True)
|
class Logger():
'''
Logs information about model progress into a file
'''
def __init__(self, filename=None):
if filename is None:
self.f = None
else:
self.f = open(filename,'a')
def log(self, message):
''' Adds message file '''
print(message)
if self.f is not None:
self.f.write(f'{message}\n')
def close(self):
''' Closes the file, instance is invalid after running this '''
if self.f is not None:
self.f.close()
|
"""
Provides a dataclass for shared variables across server processes
"""
from dataclasses import dataclass
from multiprocessing import Barrier, Value
@dataclass
class SharedVariables:
"""Shared variables used across the server processes"""
compute_barrier: Barrier
write_barrier: Barrier
price_shared: Value
weather_shared: Value
|
import pyqbdi
import struct
import rpyc
import sys
conn = None
SEG_PROT_R = 4
SEG_PROT_W = 2
SEG_PROT_X = 1
# implements the methods defined in the abstract class angrdbg.Debugger
class AngrQBDI(object):
def __init__(self, vm, mod):
self.name = "AngrQBDI"
self.vm = vm
self.mod = mod
self.maps = pyqbdi.getCurrentProcessMaps()
#-------------------------------------
def before_stateshot(self):
pass
def after_stateshot(self, state):
pass
#-------------------------------------
def is_active(self):
return True
#-------------------------------------
def input_file(self):
return sys.argv[0]
def image_base(self):
return self.maps[0].range[0]
#-------------------------------------
def get_byte(self, addr):
try:
return ord(pyqbdi.readMemory(addr, 1))
except BaseException:
return None
def get_word(self, addr):
try:
return struct.unpack("<H", pyqbdi.readMemory(addr, 2))[0]
except BaseException:
return None
def get_dword(self, addr):
try:
return struct.unpack("<I", pyqbdi.readMemory(addr, 4))[0]
except BaseException:
return None
def get_qword(self, addr):
try:
return struct.unpack("<Q", pyqbdi.readMemory(addr, 8))[0]
except BaseException:
return None
def get_bytes(self, addr, size):
try:
return str(pyqbdi.readMemory(addr, size))
except BaseException:
return None
def put_byte(self, addr, value):
pyqbdi.writeMemory(addr, chr(value))
def put_word(self, addr, value):
pyqbdi.writeMemory(addr, struct.pack("<H", value))
def put_dword(self, addr, value):
pyqbdi.writeMemory(addr, struct.pack("<I", value))
def put_qword(self, addr, value):
pyqbdi.writeMemoryy(addr, struct.pack("<Q", value))
def put_bytes(self, addr, value):
pyqbdi.writeMemory(addr, value)
#-------------------------------------
def get_reg(self, name):
gpr = self.vm.getGPRState()
if name == "efl": name = "eflags"
return getattr(gpr, name)
def set_reg(self, name, value):
gpr = self.vm.getGPRState()
if name == "efl": name = "eflags"
setattr(gpr, name, value)
self.vm.setGPRState(gpr)
#-------------------------------------
def wait_ready(self):
return
def refresh_memory(self):
return
#-------------------------------------
def seg_by_name(self, name):
s = filter(lambda x: x.name == name, self.maps)
if len(s) == 0: return None
s = s[0]
perms = 0
perms |= SEG_PROT_R if s.permission & pyqbdi.PF_READ else 0
perms |= SEG_PROT_W if s.permission & pyqbdi.PF_WRITE else 0
perms |= SEG_PROT_X if s.permission & pyqbdi.PF_EXEC else 0
return self.mod.Segment(name, s.range[0], s.range[1], s.permission)
def seg_by_addr(self, addr):
s = filter(lambda x: addr >= x.range[0] and addr < x.range[1], self.maps)
if len(s) == 0: return None
s = s[0]
perms = 0
perms |= SEG_PROT_R if s.permission & pyqbdi.PF_READ else 0
perms |= SEG_PROT_W if s.permission & pyqbdi.PF_WRITE else 0
perms |= SEG_PROT_X if s.permission & pyqbdi.PF_EXEC else 0
return self.mod.Segment(s.name, s.range[0], s.range[1], s.permission)
def get_got(self): #return tuple(start_addr, end_addr)
s = filter(lambda x: x.name == ".got.plt", self.mod.load_project().loader.main_object.sections)[0]
return (s.vaddr, s.vaddr + s.memsize)
def get_plt(self): #return tuple(start_addr, end_addr)
s = filter(lambda x: x.name == ".plt", self.mod.load_project().loader.main_object.sections)[0]
return (s.vaddr, s.vaddr + s.memsize)
#-------------------------------------
def resolve_name(self, name): #return None on fail
return None
def register_vm(vm):
conn.modules.angrdbg.register_debugger(AngrQBDI(vm, conn.modules.angrdbg))
# transfer the current vm state into an angr state
def VMShot(vm, **kwargs):
conn.modules.angrdbg.register_debugger(AngrQBDI(vm, conn.modules.angrdbg))
return conn.modules.angrdbg.StateShot(sync_brk=False, **kwargs)
def init(host, port=18812):
global conn
conn = rpyc.classic.connect(host, port)
conn.execute("import angr, cle, claripy, angrdbg")
conn.execute("import logging; logging.getLogger().setLevel(logging.ERROR)")
sys.modules["angrdbg"] = conn.modules.angrdbg
sys.modules["angr"] = conn.modules.angr
sys.modules["cle"] = conn.modules.cle
sys.modules["claripy"] = conn.modules.claripy
|
from flask import Flask
from noteapp.views.index import bp as index_bp
app = Flask(__name__)
app.register_blueprint(index_bp) |
from ..util import orm
async def get_voice_roles_by_guild(guild_id):
result = await orm.select(
'SELECT guild_id, voice_channel_id, role_id FROM voice_role WHERE guild_id=%s;',
[guild_id]
)
return result
async def create_voice_role(guild_id, voice_channel_id, role_id):
await orm.execute(
'INSERT INTO voice_role (guild_id, voice_channel_id, role_id) values (%s, %s, %s);',
[guild_id, voice_channel_id, role_id]
)
async def delete_voice_role(guild_id, role_id):
await orm.execute(
'DELETE FROM voice_role WHERE guild_id=%s AND id=%s;',
[guild_id, role_id]
)
|
import sklearn
import os
from joblib import dump, load
skip = [36, 52, 13, 39, 68, 69, 78, 79, 32, 33, 34, 72, 73, 57, 74, 75, 64]
class_dict = {
0: "W",
1: "N1",
2: "N2",
3: "N3",
4: "REM",
5: "UNKNOWN"
}
def evaluateAcc(true, predictions):
total = true.shape[0]
totalCorrect = 0.0
for i in range(total):
if(predictions[i] == true[i]):
totalCorrect += 1
accuracy = totalCorrect/total
return accuracy
def testModel(model, xTest, yTest):
pred = model.predict(xTest)
totalPreds = len(pred)
totalCorrect = 0.0
for i in range(totalPreds):
if(pred[i] == yTest[i]):
totalCorrect += 1
accuracy = totalCorrect/totalPreds
return accuracy
def validSubject(num):
for s in skip:
if num == s:
return False
return True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.