id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
11219613
|
<reponame>IINamelessII/YesOrNo
from django.db import models
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.contrib.postgres.fields import JSONField
from polls.models import Poll
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
voted = JSONField(default=dict)
rated = JSONField(default=dict)
def __str__(self):
return self.user.username
def voteYes(self, poll_id):
poll = Poll.objects.get(pk=poll_id)
if poll_id in self.voted['+']:
self.voted['+'].remove(poll_id)
poll.unvoteYes()
else:
if poll_id in self.voted['-']:
self.voted['-'].remove(poll_id)
poll.unvoteNo()
self.voted['+'].append(poll_id)
poll.voteYes()
self.save()
def voteNo(self, poll_id):
poll = Poll.objects.get(pk=poll_id)
if poll_id in self.voted['-']:
self.voted['-'].remove(poll_id)
poll.unvoteNo()
else:
if poll_id in self.voted['+']:
self.voted['+'].remove(poll_id)
poll.unvoteYes()
self.voted['-'].append(poll_id)
poll.voteNo()
self.save()
def rateLike(self, poll_id):
poll = Poll.objects.get(pk=poll_id)
if poll_id in self.rated['+']:
self.rated['+'].remove(poll_id)
poll.unrateLike()
else:
if poll_id in self.rated['-']:
self.rated['-'].remove(poll_id)
poll.unrateDislike()
self.rated['+'].append(poll_id)
poll.rateLike()
self.save()
def rateDislike(self, poll_id):
poll = Poll.objects.get(pk=poll_id)
if poll_id in self.rated['-']:
self.rated['-'].remove(poll_id)
poll.unrateDislike()
else:
if poll_id in self.rated['+']:
self.rated['+'].remove(poll_id)
poll.unrateLike()
self.rated['-'].append(poll_id)
poll.rateDislike()
self.save()
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance, voted={'+' : [], '-': []}, rated={'+' : [], '-': []})
post_save.connect(create_user_profile, sender=User)
|
StarcoderdataPython
|
77051
|
#!/usr/bin/env python
# coding=utf-8
import numpy as np
def information_entropy(x, p):
ret = [0] * (len(x)+1)
for i in range(len(x)):
ret[i] = -p[i] * np.log2(p[i])
result = np.sum(ret[i])
return result
|
StarcoderdataPython
|
5052629
|
from __future__ import absolute_import, division, print_function
from libtbx.test_utils.pytest import discover
tst_list = discover()
# To write tests for xia2:
# 1. Test file should be named test_*.py
# 2. Test methods should be named test_*()
# 3. Nothing else needed. Rest happens by magic.
# To run xia2 tests:
# run 'pytest' inside xia2 directory
# For more information see:
# https://github.com/dials/dials/wiki/pytest
|
StarcoderdataPython
|
6613294
|
<reponame>sixP-NaraKa/pyvod-chat
"""
pyvod-chat - a simple tool to download a past Twitch.tv broadcasts (VOD) chat comments!
Available on GitHub (+ documentation): https://github.com/sixP-NaraKa/pyvod-chat
"""
import os
from collections import namedtuple
import requests
import dotenv
from .vodchat import VODChat
from .exceptions import TwitchApiException
# check for a .env file and get the "twitch-client-id" which we need to identify the application for use with the API
# this is NOT the same as the Client-Secret, which we do not need here
# if there is no such Client-ID or it is empty, we use a default Client-ID
dotenv.load_dotenv()
_client_id = os.getenv("twitch-client-id")
_client_id = _client_id if _client_id else "r52h1i1phlvyxs0sdi3ooam1b3w62g"
# needed request headers
_headers = {"client-id": _client_id, "accept": "application/vnd.twitchtv.v5+json"}
# additional API url
vod_url = "https://api.twitch.tv/v5/videos/{vod_id}"
class VOD:
""" Represents a Twitch.tv VOD (video-on-demand).
The main entry point, responsible for getting the VODChat via `get_videochat()`
as well as some basic information about the VOD itself and the channel the VOD belongs to (see below).
Additional Class Attributes
-----
The following are class attributes which contain basic information about the VOD and its associated channel.
- `vod_title`:
the title of the VOD
- `vod_length`:
the length of the VOD in hours
- `vod_date`:
the date when the broadcast has been streamed
- `vod_views`:
the total amount of VOD views
- `channel`:
the name of the channel associated with the VOD
- `channel_id`:
the channel ID
- `channel_views`:
total channel views
- `channel_followers`:
total channel followers
- `channel_broadcaster_type`:
whether the channel is partnered or a affiliate
:param vod_id: the VOD ID to fetch the information for
"""
def __init__(self, vod_id):
self.vod_id = str(vod_id)
self._basic_data = self._get_basic_data()
self.vod_title = self._basic_data.title
self.vod_length = self._basic_data.vod_length
self.vod_date = self._basic_data.created_at
self.vod_game = self._basic_data.game
self.vod_views = self._basic_data.views
self.channel = self._basic_data.channel_name
self.channel_id = self._basic_data.channel_id
self.channel_views = self._basic_data.channel_views
self.channel_followers = self._basic_data.channel_followers
self.channel_broadcaster_type = self._basic_data.channel_type
def __repr__(self):
return "<VOD vod_title={0.vod_title!r} vod_length={0.vod_length!r} vod_date={0.vod_date!r} " \
"vod_game={0.vod_game!r} vod_views={0.vod_views!r} " \
"channel={0.channel!r} channel_id={0.channel_id!r} channel_views={0.channel_views!r} " \
"channel_followers={0.channel_followers!r} channel_broadcaster_type={0.channel_broadcaster_type!r}>"\
.format(self)
def _get_basic_data(self) -> namedtuple:
""" Gets some basic information in regards to the VOD and the channel associated with the VOD.
:return: the basic data as a `namedtuple`
"""
response = requests.get(url=vod_url.format(vod_id=self.vod_id), headers=_headers)
response_body = response.json()
if response.status_code != 200:
msg_from_twitch = response_body["message"]
raise TwitchApiException(
"Twitch API responded with '{1}' (status code {0}). Expected 200 (OK)."
.format(response.status_code, msg_from_twitch)
)
BasicData = namedtuple("BasicData", "title views created_at game vod_length "
"channel_name channel_id channel_date "
"channel_views channel_followers channel_type"
)
data = BasicData(
response_body["title"], # VOD title
response_body["views"], # VOD views
response_body["created_at"], # VOD stream date
response_body["game"], # what game has been streamed
response_body["length"], # VOD length in seconds (seconds / 3600 = hours)
response_body["channel"]["display_name"], # channel name (streamer name)
response_body["channel"]["_id"], # channel ID
response_body["channel"]["created_at"], # channel creation date
response_body["channel"]["views"], # total channel views
response_body["channel"]["followers"], # total channel followers
response_body["channel"]["broadcaster_type"] # broadcaster type (i.e. partner or affiliate, etc.)
)
data = data._replace(vod_length=round(float(data.vod_length) / 3600, 2))
return data
def get_vodchat(self) -> VODChat:
""" Gets the VODChat associated with the `vod_id`.
:return: the VODChat
"""
vod_chat = VODChat(vod_id=self.vod_id, _basic_vod_data=self._basic_data, _headers=_headers)
return vod_chat
|
StarcoderdataPython
|
1927457
|
from torch import nn
from ..functions import CameraBackProjection
import torch
class Camera_back_projection_layer(nn.Module):
def __init__(self, res=128):
super(Camera_back_projection_layer, self).__init__()
assert res == 128
self.res = 128
def forward(self, depth_t, fl=418.3, cam_dist=2.2, shift=True):
n = depth_t.size(0)
if type(fl) == float:
fl_v = fl
fl = torch.FloatTensor(n, 1).cuda()
fl.fill_(fl_v)
if type(cam_dist) == float:
cmd_v = cam_dist
cam_dist = torch.FloatTensor(n, 1).cuda()
cam_dist.fill_(cmd_v)
df = CameraBackProjection.apply(depth_t, fl, cam_dist, self.res)
return self.shift_tdf(df) if shift else df
@staticmethod
def shift_tdf(input_tdf, res=128):
out_tdf = 1 - res * (input_tdf)
return out_tdf
class camera_backprojection(nn.Module):
def __init__(self, vox_res=128):
super(camera_backprojection, self).__init__()
self.vox_res = vox_res
self.backprojection_layer = CameraBackProjection()
def forward(self, depth, fl, camdist):
return self.backprojection_layer(depth, fl, camdist, self.voxel_res)
|
StarcoderdataPython
|
5050339
|
<reponame>vghost2008/wml<filename>object_detection2/modeling/backbone/darknet.py
#coding=utf-8
import tensorflow as tf
from .backbone import Backbone
from .build import BACKBONE_REGISTRY
from wnets.darknets import CSPDarkNet
import collections
import wmodule
import object_detection2.od_toolkit as odt
slim = tf.contrib.slim
class DarkNet(Backbone):
def __init__(self,cfg,*args,**kwargs):
if cfg.MODEL.PREPROCESS != "ton1p1":
print("--------------------WARNING--------------------")
print(f"Preprocess for mobilenet should be ton1p1 not {cfg.MODEL.PREPROCESS}.")
print("------------------END WARNING------------------")
super().__init__(cfg,*args,**kwargs)
self.normalizer_fn, self.norm_params = odt.get_norm(self.cfg.MODEL.DARKNETS.NORM, self.is_training)
self.activation_fn = odt.get_activation_fn(self.cfg.MODEL.DARKNETS.ACTIVATION_FN)
self.out_channels = cfg.MODEL.DARKNETS.OUT_CHANNELS
self.scope_name = "50"
def forward(self, x):
res = collections.OrderedDict()
if self.cfg.MODEL.DARKNETS.DEPTH == 53:
print("DarkNet-53")
darknet = CSPDarkNet(normalizer_fn=self.normalizer_fn,
normalizer_params=self.norm_params,
activation_fn=self.activation_fn)
_,end_points = darknet.forward(x['image'],scope=f"CSPDarkNet-53")
else:
print(f"Error Depth {self.cfg.MODEL.DARKNETS.DEPTH}")
return None
self.end_points = end_points
res.update(end_points)
level = int(list(end_points.keys())[-1][1:]) + 1
x = list(self.end_points.values())[-1]
for i in range(self.cfg.MODEL.DARKNETS.ADD_CONV):
res[f"C{level+i}"] = slim.conv2d(x, self.out_channels, [3, 3], stride=2,
activation_fn=self.activation_fn,
normalizer_fn=self.normalizer_fn,
normalizer_params=self.norm_params,
scope=f"conv{i}")
return res
@BACKBONE_REGISTRY.register()
def build_darknet_backbone(cfg, *args,**kwargs):
"""
Create a ResNet instance from config.
Returns:
ResNet: a :class:`ResNet` instance.
"""
# need registration of new blocks/stems?
return DarkNet(cfg,*args,**kwargs)
|
StarcoderdataPython
|
1870786
|
<filename>greentest/test__subprocess.py
# mostly tests from test_subprocess.py that used to have problems
import sys
import os
import errno
import greentest
import gevent
from gevent import subprocess
import time
if subprocess.mswindows:
SETBINARY = 'import msvcrt; msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY);'
else:
SETBINARY = ''
class Test(greentest.TestCase):
def test_exit(self):
popen = subprocess.Popen([sys.executable, '-c', 'import sys; sys.exit(10)'])
self.assertEqual(popen.wait(), 10)
def test_wait(self):
popen = subprocess.Popen([sys.executable, '-c', 'import sys; sys.exit(11)'])
gevent.wait([popen])
self.assertEqual(popen.poll(), 11)
def test_child_exception(self):
try:
subprocess.Popen(['*']).wait()
except OSError as ex:
assert ex.errno == 2, ex
else:
raise AssertionError('Expected OSError: [Errno 2] No such file or directory')
def test_leak(self):
num_before = greentest.get_number_open_files()
p = subprocess.Popen([sys.executable, "-c", "print()"],
stdout=subprocess.PIPE)
p.wait()
del p
num_after = greentest.get_number_open_files()
self.assertEqual(num_before, num_after)
def test_communicate(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stderr.write("pineapple");'
'sys.stdout.write(sys.stdin.read())'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate("banana")
self.assertEqual(stdout, "banana")
if sys.executable.endswith('-dbg'):
assert stderr.startswith('pineapple')
else:
self.assertEqual(stderr, "pineapple")
def test_universal1(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' + SETBINARY +
'sys.stdout.write("line1\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line2\\r");'
'sys.stdout.flush();'
'sys.stdout.write("line3\\r\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line4\\r");'
'sys.stdout.flush();'
'sys.stdout.write("\\nline5");'
'sys.stdout.flush();'
'sys.stdout.write("\\nline6");'],
stdout=subprocess.PIPE,
universal_newlines=1)
try:
stdout = p.stdout.read()
if hasattr(file, 'newlines'):
# Interpreter with universal newline support
self.assertEqual(stdout,
"line1\nline2\nline3\nline4\nline5\nline6")
else:
# Interpreter without universal newline support
self.assertEqual(stdout,
"line1\nline2\rline3\r\nline4\r\nline5\nline6")
finally:
p.stdout.close()
def test_universal2(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' + SETBINARY +
'sys.stdout.write("line1\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line2\\r");'
'sys.stdout.flush();'
'sys.stdout.write("line3\\r\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line4\\r\\nline5");'
'sys.stdout.flush();'
'sys.stdout.write("\\nline6");'],
stdout=subprocess.PIPE,
universal_newlines=1)
try:
stdout = p.stdout.read()
if hasattr(file, 'newlines'):
# Interpreter with universal newline support
self.assertEqual(stdout,
"line1\nline2\nline3\nline4\nline5\nline6")
else:
# Interpreter without universal newline support
self.assertEqual(stdout,
"line1\nline2\rline3\r\nline4\r\nline5\nline6")
finally:
p.stdout.close()
if sys.platform != 'win32':
def test_nonblock_removed(self):
# see issue #134
r, w = os.pipe()
p = subprocess.Popen(['grep', 'text'], stdin=subprocess.FileObject(r))
try:
os.close(w)
time.sleep(0.1)
self.assertEqual(p.poll(), None)
finally:
if p.poll() is None:
p.kill()
def test_issue148(self):
for i in range(7):
try:
subprocess.Popen('this_name_must_not_exist')
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
else:
raise AssertionError('must fail with ENOENT')
def test_check_output_keyword_error(self):
try:
subprocess.check_output([sys.executable, '-c', 'import sys; sys.exit(44)'])
except subprocess.CalledProcessError as e:
self.assertEqual(e.returncode, 44)
else:
raise AssertionError('must fail with CalledProcessError')
if __name__ == '__main__':
greentest.main()
|
StarcoderdataPython
|
297320
|
<reponame>woakes070048/IT_Services
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Oneiric Group Pty Ltd and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
import json
class ITContract(Document):
pass
@frappe.whitelist()
def check_general_contract(customer):
if len(frappe.db.sql("""Select name from `tabIT Contract` where contract_type='General' and customer=%s""",(customer))):
return 1
def hide_completed_or_canceled_contracts(user):
if user == "Administrator":
return ""
else:
return """(`tabIT Contract`.workflow_state not in ('Completed'))"""
@frappe.whitelist()
def check_duplicate_name(amended_from):
count=0
while amended_from:
if len(frappe.db.sql("""Select name from `tabIT Contract` where amended_from=%s""",(amended_from))):
if frappe.db.sql("""Select name from `tabIT Contract` where amended_from=%s""",(amended_from))[0][0]!=None:
amended_from=frappe.db.sql("""Select name from `tabIT Contract` where amended_from=%s""",(amended_from))[0][0]
count+=1
else:
amended_from=0
else:
amended_from=0
return count
@frappe.whitelist()
def get_item_price(name):
if len(frappe.db.sql("""Select price_list_rate from `tabItem Price` where item_code=%s""",(name)))>0:
return frappe.db.sql("""Select price_list_rate from `tabItem Price` where item_code=%s""",(name))[0][0]
@frappe.whitelist()
def make_billable_to_non_billable_items(non_billable_items,contract):
data = json.loads(non_billable_items.decode("utf-8")) if (non_billable_items.decode('utf-8')) else {}
print(data)
for ii in frappe.db.sql("""Select name from `tabIT Ticket` where contract=%s""",(contract)):
for i in data:
print(i)
frappe.db.sql("""Update `tabIT Ticket Detail` set billable_check=0 where item_code=%s and parent=%s""",(i,ii[0]))
@frappe.whitelist()
def get_item_description(name):
if len(frappe.db.sql("""Select description from `tabItem` where name=%s""",(name)))>0:
return frappe.db.sql("""Select description from `tabItem` where name=%s""",(name))[0][0]
|
StarcoderdataPython
|
12844025
|
# Copyright 2020-2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""operator dsl function: pooling"""
import akg
import akg.utils as utils
from akg.utils.format_transform import get_shape
from akg.ops.nn.ascend import MaxPool, Avgpool
def _pooling_compute(x, window, stride,
mode=0, pad_mode=5, pad=(0, 0, 0, 0)):
"""compute for pooling"""
# convert mode&pad_mode to str
if mode == 0:
mode = "MAX"
elif mode == 1:
mode = "AVG"
else:
raise RuntimeError("Invalid mode parameters, mode must set 0 or 1.")
if pad_mode == 5:
pad_mode = "VALID"
elif pad_mode == 6:
pad_mode = "SAME"
else:
raise RuntimeError("Invalid pad_mode parameters, pad_mode must set 5 or 6.")
# check pad
if pad not in ((0, 0, 0, 0), [0, 0, 0, 0]):
raise RuntimeError("Not support pad now!")
in_size_h = x.shape[2].value
in_size_w = x.shape[3].value
window = list(window)
if window[0] >= in_size_h and window[1] >= in_size_w:
window[0] = in_size_h
window[1] = in_size_w
pad_mode = "VALID"
stride = [1, 1]
if mode == "MAX":
res = MaxPool(x, window, stride, pad_mode)
else:
# AVG
res = Avgpool(x, window, stride, pad_mode)
return res
@utils.check_input_type(akg.tvm.tensor.Tensor,
(list, tuple), (list, tuple), (int, type(None)),
(int, type(None)), (list, tuple, type(None)),
(bool, type(None)), (int, type(None)))
def pooling(x, window, stride,
mode=0, pad_mode=5, pad=(0, 0, 0, 0),
global_pooling=False, ceil_mode=0):
"""
Pooling operation, including MaxPool and AvgPool.
Args:
x (tvm.tensor.Tensor): Input tensor, only support float16
dtype, and NC1HWC0 format.
window (Union[list, tuple]): Pooling window, only support pooling
in H or W.
stride (Union[list, tuple]): Pooling stride, only support pooling
in H or W.
mode (int): Mode of pooling, support MaxPool and AvgPool. 0 for MaxPool,
1 for AvgPool.
pad_mode (int): Mode of padding, 5 for VALID, 6 for SAME.
pad (Union[list, tuple]): Implicit padding size to up/down/left/right.
global_pooling (bool): Global pooling flag, invalid now, should be False.
ceil_mode (int): Round_mode params, invalid now, should be 0.
Returns:
A tvm.tensor.Tensor with same dtype as input.
"""
utils.check_shape(get_shape(x))
utils.ops_dtype_check(x.dtype, utils.DtypeForDavinci.FLOAT16)
if len(window) != 2:
raise RuntimeError("Invalid shape params, window shape must be 2 dims, "
"including window_h and window_w.")
if len(stride) != 2:
raise RuntimeError("Invalid shape params, stride shape must be 2 dims, "
"including stride_h and stride_w.")
if global_pooling or ceil_mode != 0:
raise RuntimeError("Not support global_pooling and ceil_mode for now.")
return _pooling_compute(x, window, stride, mode, pad_mode, pad)
|
StarcoderdataPython
|
6670346
|
import numpy as np
def calculate_crop_number(image, crop_height, crop_width, oc):
'''
Calculate how many sub images a nuclei image can be cropped
Args:
image: original image
crop_height: the expected height of output sub images
crop_width: the expected width of output sub images
Output:
output: number of cropped images
height_number: How many segments can be divided in the height dimension
width_number: How many segments can be divided in the width dimension
'''
height = image.shape[0]
width = image.shape[1]
height_number = height // crop_height + 1
height_number = oc * (height_number - 1) + 1
width_number = width // crop_width + 1
width_number = oc * (width_number - 1) + 1
output = height_number * width_number
return output, height_number, width_number
def test_and_complement(image, crop_height, crop_width):
'''
Check the dimension of the clipped image. If it is inconsistent with the expected dimension, zero filling is performed
Args:
image: input sub image
crop_height: the expected height of output sub images
crop_width: the expected width of output sub images
Output:
image: the image with expected height and width
'''
if image.shape[0] != crop_height or image.shape[1] != crop_width:
complement = np.zeros([crop_height, crop_width, image.shape[2]]).astype(np.float32)
complement[0:image.shape[0], 0:image.shape[1], :] = image
return complement
else:
return image
def crop_image(image, crop_height = 256, crop_width = 256, oc = 2):
'''
Cropping function of original image
Args:
image: original nuclei image
crop_height: the expected height of output sub images
crop_width: the expected width of output sub images
Output:
output: a numpy array which stacks the cropped images.
'''
total_output_number, height_number, width_number = calculate_crop_number(image, crop_height, crop_width, oc)
output = np.zeros([total_output_number, crop_height, crop_width, image.shape[2]]).astype(np.float32)
count = 0
for i in range(height_number):
for j in range(width_number):
unit_crop_image = image[int(crop_height/oc*i):int(crop_height/oc*i)+crop_height,
int(crop_width/oc*j):int(crop_width/oc*j)+crop_width,:]
unit_crop_image = test_and_complement(unit_crop_image, crop_height, crop_width)
output[count] = unit_crop_image
count += 1
return output
def recover_image(cropped_image, height, width, crop_height = 256, crop_width = 256, oc = 2):
'''
Recovering the sub images to original size
Args:
cropped_image: the cropped image
height: the height of original image
width: the width of original image
Output:
output: the recovered image with original size
'''
in_height_number = height // crop_height + 1
height_number = oc * (in_height_number - 1) + 1
in_width_number = width // crop_width + 1
width_number = oc * (in_width_number - 1) + 1
output_image = np.zeros([in_height_number*crop_height, in_width_number*crop_width, cropped_image.shape[3]]).astype(np.float32)
assert crop_height * (oc - 1) % (2 * oc) == 0 and crop_width * (oc - 1) % (2 * oc) == 0,\
'The input crop image size and overlap coefficient cannot meet the exact division'
h_sec_pos = int(crop_height * (oc - 1) / (2 * oc))
w_sec_pos = int(crop_width * (oc - 1) / (2 * oc))
h_thi_pos = int(crop_height * (oc + 1) / (2 * oc))
w_thi_pos = int(crop_width * (oc + 1) / (2 * oc))
h_half_pos = int(crop_height/oc)
w_half_pos = int(crop_width/oc)
for i in range(height_number):
if i == 0:
for j in range(width_number):
if height_number == 1:
if j == 0:
if width_number == 1:
output_image[0:crop_height,0:crop_width,:]=\
cropped_image[i*width_number+j][0:crop_height,0:crop_width,:]
else:
output_image[0:crop_height,0:w_thi_pos,:]=\
cropped_image[i*width_number+j][0:crop_height,0:w_thi_pos,:]
elif j == (width_number -1):
output_image[0:crop_height,j*w_half_pos+w_sec_pos:,:] =\
cropped_image[i*width_number+j][0:crop_height,w_sec_pos:crop_width,:]
else:
output_image[0:crop_height,w_thi_pos+(j-1)*w_half_pos:w_thi_pos+j*w_half_pos,:] =\
cropped_image[i*width_number+j][0:crop_height,w_sec_pos:w_thi_pos,:]
else:
if j == 0:
if width_number == 1:
output_image[0:h_thi_pos,0:crop_width,:]=\
cropped_image[i*width_number+j][0:h_thi_pos,0:crop_width,:]
else:
output_image[0:h_thi_pos,0:w_thi_pos,:]=\
cropped_image[i*width_number+j][0:h_thi_pos,0:w_thi_pos,:]
elif j == (width_number -1):
output_image[0:h_thi_pos,j*w_half_pos+w_sec_pos:,:] =\
cropped_image[i*width_number+j][0:h_thi_pos,w_sec_pos:crop_width,:]
else:
output_image[0:h_thi_pos,w_thi_pos+(j-1)*w_half_pos:w_thi_pos+j*w_half_pos,:] =\
cropped_image[i*width_number+j][0:h_thi_pos,w_sec_pos:w_thi_pos,:]
elif i == (height_number - 1):
for j in range(width_number):
if j == 0:
if width_number == 1:
output_image[i*h_half_pos+h_sec_pos:,0:crop_width,:]=\
cropped_image[i*width_number+j][h_sec_pos:crop_height,0:crop_width,:]
else:
output_image[i*h_half_pos+h_sec_pos:,0:w_thi_pos,:]=\
cropped_image[i*width_number+j][h_sec_pos:crop_height,0:w_thi_pos,:]
elif j == (width_number - 1):
output_image[i*h_half_pos+h_sec_pos:,j*w_half_pos+w_sec_pos:,:] =\
cropped_image[i*width_number+j][h_sec_pos:crop_height,w_sec_pos:crop_width,:]
else:
output_image[i*h_half_pos+h_sec_pos:,w_thi_pos+(j-1)*w_half_pos:w_thi_pos+j*w_half_pos,:] =\
cropped_image[i*width_number+j][h_sec_pos:crop_height,w_sec_pos:w_thi_pos,:]
else:
for j in range(width_number):
if j == 0:
if width_number == 1:
output_image[h_thi_pos+(i-1)*h_half_pos:h_thi_pos+i*h_half_pos,
0:crop_width,:]=cropped_image[i*width_number+j][h_sec_pos:h_thi_pos,0:crop_width,:]
else:
output_image[h_thi_pos+(i-1)*h_half_pos:h_thi_pos+i*h_half_pos,
0:w_thi_pos,:]=cropped_image[i*width_number+j][h_sec_pos:h_thi_pos,0:w_thi_pos,:]
elif j == (width_number - 1):
output_image[h_thi_pos+(i-1)*h_half_pos:h_thi_pos+i*h_half_pos,j*w_half_pos+w_sec_pos:,:] =\
cropped_image[i*width_number+j][h_sec_pos:h_thi_pos,w_sec_pos:crop_width,:]
else:
output_image[h_thi_pos+(i-1)*h_half_pos:h_thi_pos+i*h_half_pos,
w_thi_pos+(j-1)*w_half_pos:w_thi_pos+j*w_half_pos,:] = \
cropped_image[i*width_number+j][h_sec_pos:h_thi_pos,w_sec_pos:w_thi_pos,:]
output_image = output_image[0:height,0:width,:]
return output_image
|
StarcoderdataPython
|
192296
|
<reponame>assuzzanne/my-sqreen
# -*- coding: utf-8 -*-
# Copyright (c) 2016, 2017, 2018, 2019 Sqreen. All rights reserved.
# Please refer to our terms for more information:
#
# https://www.sqreen.io/terms.html
#
""" Blank request for callbacks needing a request when no one is present
"""
from .base import BaseRequest, BaseResponse
class BlankRequest(BaseRequest):
@property
def raw_headers(self):
return {}
@property
def raw_client_ip(self):
return None
@property
def client_user_agent(self):
return None
@property
def cookies_params(self):
return {}
@property
def form_params(self):
return {}
@property
def hostname(self):
return None
@property
def method(self):
return None
@property
def path(self):
return None
@property
def query_params(self):
return {}
@property
def query_params_values(self):
return []
@property
def referer(self):
return None
@property
def remote_port(self):
return None
@property
def remote_addr(self):
return None
@property
def scheme(self):
return None
@property
def server_port(self):
return None
@property
def view_params(self):
return {}
@property
def json_params(self):
return {}
class BlankResponse(BaseResponse):
@property
def status_code(self):
return None
@property
def content_type(self):
return None
@property
def content_length(self):
return None
|
StarcoderdataPython
|
6546563
|
<filename>python/src/scraping/get_pixiv.py
import os, time, json, sys, re, datetime
import numpy as np
import i2v
from pixivpy3 import PixivAPI, AppPixivAPI
from PIL import Image
from dateutil.relativedelta import relativedelta
illust_re = re.compile("([0-9]+[^/]+$)")
illust2vec = i2v.make_i2v_with_chainer(
"illust2vec_tag_ver200.caffemodel", "tag_list.json")
root_dir = "../pixiv"
#ログイン
api = PixivAPI()
aapi = AppPixivAPI()
api.login(os.environ["PIXIV_USER"], os.environ["PIXIV_PASSWORD"])
checked_user_ids = []
#取得済みのJSONはキャッシュ
def get_json(json_name, init_func):
if os.path.isfile(json_name) == False:
res = init_func()
if "status" in res and res["status"] == "failure":
sys.exit(res)
time.sleep(1)
with open(json_name, mode="w") as f:
json.dump(res, f)
with open(json_name) as f:
return json.load(f)
#イラストタグのチェック
def contains_tags(tags):
for tag in tags:
if tag in ["艦これ","艦隊これくしょん"]:
return True
return False
def download_from_user(user_id, score=10000, ymdr="", per_page=1000):
if user_id in checked_user_ids:
return
checked_user_ids.append(user_id)
#ユーザー情報とイラスト一覧を取得
user_dir = "%s/illusts/%d"%(root_dir,user_id)
os.makedirs(user_dir, exist_ok=True)
works = get_json("%s/%d.json"%(user_dir,user_id),
lambda: api.users_works(user_id, per_page=per_page))
checked_path = "%s/%d-checked.json"%(user_dir,user_id)
checked = get_json(checked_path, lambda: [])
#イラスト一覧からイラストを取得
if "pagination" in works:
total = works["pagination"]["total"]
total = np.min([per_page, total])
for j in range(0, total):
print("\r%s %d/%d %d "%(ymdr,j+1,total,user_id), end="")
sys.stdout.flush()
#チェック済みか確認
illust = works["response"][j]
large_url = illust["image_urls"]["large"]
small_url = illust["image_urls"]["px_480mw"]
large_name = illust_re.search(large_url)
small_name = illust_re.search(small_url)
if large_name == None or small_name == None:
continue
large_name = large_name.group(1)
small_name = small_name.group(1)
if large_name in checked:
continue
#フォーマットや評価をチェック
#if illust["is_manga"]:
#continue
if illust["type"] != "illustration":
continue
if illust["stats"]["score"] < score:
continue
if illust["age_limit"] != "all-age":
continue
#if illust["sanity_level"] != "white":
#continue
if illust["width"]*0.8 > illust["height"]:
continue
if illust["width"] < illust["height"]*0.6:
continue
#if contains_tags(illust["tags"]) == False:
#continue
#縮小サイズをダウンロード
aapi.download(small_url, "%s/"%user_dir)
start = time.time()
#縮小サイズからタグ予測をチェック
small_path = "%s/%s"%(user_dir,small_name)
img = Image.open(small_path)
tags = illust2vec.estimate_specific_tags([img], ["1girl","2girls","monochrome","comic"])[0]
os.remove(small_path)
if (tags["1girl"] >= 0.8 or tags["2girls"] >= 0.8) and tags["monochrome"] < 0.8:
#待機して原寸サイズをダウンロード
time.sleep(np.max([0, 0.5 - float(time.time() - start)]))
aapi.download(large_url, "%s/"%user_dir)
start = time.time()
#チェック済みにする
checked.append(large_name)
with open(checked_path, mode="w") as f:
json.dump(checked, f)
#待機
time.sleep(np.max([0, 0.5 - float(time.time() - start)]))
os.system("find %s -type f | grep -e jpg -e png | wc -l"%root_dir)
def download_from_ranking():
mode = "monthly"
per_page = 10
start = datetime.date(year=2019, month=5, day=1)
end = datetime.date(year=2017, month=1, day=1)
delta = relativedelta(days=1)
while start >= end:
#ランキングを取得
ymd = str(start)
ranking_dir = "%s/ranking"%root_dir
os.makedirs(ranking_dir, exist_ok=True)
ranking = get_json("%s/%s-%s-%d.json"%(ranking_dir,mode,ymd,per_page),
lambda: api.ranking_all(mode=mode, date=ymd, per_page=per_page))
if "response" in ranking:
ranking = ranking["response"][0]["works"]
#ユーザーからイラスト取得
for i, ranker in enumerate(ranking):
download_from_user(ranker["work"]["user"]["id"], ymdr="%s %d/%d"%(ymd,i+1,len(ranking)))
#日付を遡る
start = start - delta
def download_from_follow():
with open("%s/follow/cute"%root_dir) as f:
follows = re.findall("id=([0-9]+)", f.read())
follows = [int(x) for x in follows]
follows = list(set(follows))
for i, user_id in enumerate(follows):
download_from_user(user_id, ymdr="%d/%d"%(i+1,len(follows)))
#実行
#download_from_ranking()
#download_from_follow()
36924420
23098486
28992125
4094653
|
StarcoderdataPython
|
11326288
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# FSRobo-R Package BSDL
# ---------
# Copyright (C) 2019 FUJISOFT. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ---------
import copy
import math
class Palletizing:
class Pattern:
ZIGZAG = 1
SNAKE = 2
"""
座標情報を保持するクラス
"""
def __init__(self, pos1, pos2, pos3,
size_x, size_y, z=0, size_z=1, pattern=Pattern.ZIGZAG):
# インスタンス変数定義
self._max_size_x = size_x
self._max_size_y = size_y
self._max_size_z = size_z
self._next_x = 0
self._next_y = 0
self._next_z = 0
self._top_next_x = 0
self._top_next_y = 0
self._top_next_z = self._max_size_z - 1
relative_x = self._get_relative_position(pos1, pos2)
relative_y = self._get_relative_position(pos1, pos3)
if size_x > 1:
interval_x = map(lambda x: x / (size_x - 1), relative_x)
else:
interval_x = 0.0
if size_y > 1:
interval_y = map(lambda x: x / (size_y - 1), relative_y)
else:
interval_y = 0.0
print("interval x:{} y:{}".format(interval_x, interval_y))
if pattern == self.Pattern.ZIGZAG:
self._palletizing_pos = self._create_zigzag_list(pos1, interval_x, interval_y, size_x, size_y)
else:
self._palletizing_pos = self._create_snake_list(pos1, interval_x, interval_y, size_x, size_y)
if size_z > 1:
pos4 = self._get_z_position(pos1, pos2, pos3, relative_x, relative_y, z)
relative_z = self._get_relative_position(pos1, pos4)
interval_x = map(lambda x: x / (size_z - 1), relative_z)
self._palletizing_pos_3d = self._create_3d_palletizing_list(interval_x, size_z)
elif size_z == 1:
self._palletizing_pos_3d = self._create_3d_palletizing_list(0, size_z)
else:
raise Exception("引数:size_zに渡された値が不正です。\n1以上の値を指定してください")
def get_pos(self, index_x, index_y):
return self._palletizing_pos[index_y][index_x]
def get_pos_3d(self, index_x, index_y, index_z):
return self._palletizing_pos_3d[index_z][index_y][index_x]
def get_next_pos(self):
position = self._palletizing_pos_3d[self._next_z][self._next_y][self._next_x]
# 次のインデックスに進める
self._next_x += 1
if self._max_size_x <= self._next_x:
self._next_x = 0
self._next_y += 1
if self._max_size_y <= self._next_y:
self._next_y = 0
self._next_z += 1
if self._max_size_z <= self._next_z:
# 最初のインデックスに戻る
self._next_z = 0
return position
def get_next_pos_top(self):
position = self._palletizing_pos_3d[self._top_next_z][self._top_next_y][self._top_next_x]
# 次のインデックスに進める
self._top_next_x += 1
if self._max_size_x <= self._top_next_x:
self._top_next_x = 0
self._top_next_y += 1
if self._max_size_y <= self._top_next_y:
self._top_next_y = 0
self._top_next_z -= 1
if -1 >= self._top_next_z:
# 最初のインデックスに戻る
self._top_next_z = self._max_size_z - 1
return position
def _get_z_position(self, pos1, pos2, pos3, relative1, relative2, height):
position_y = copy.deepcopy(pos1)
position_y.x = position_y.x + relative1[0]
position_y.y = position_y.y + relative1[1]
position_y.z = position_y.z + relative1[2]
pos4 = copy.deepcopy(position_y)
pos4.x = pos4.x + relative2[0]
pos4.y = pos4.y + relative2[1]
pos4.z = pos4.z + relative2[2]
pos_list = [pos2, pos3, pos4]
bottom_pos = pos1
high_pos = pos1
for pos in pos_list:
if bottom_pos.z > pos.z:
bottom_pos = pos
elif high_pos.z < pos.z:
high_pos = pos
relative3 = self._get_relative_position(bottom_pos, high_pos)
base_z = (relative3[0] ** 2 + relative3[1] ** 2) ** 0.5
if base_z != 0:
lean_z = math.atan(relative3[2] / base_z)
else:
lean_z = math.atan(0)
base_x = (relative1[0] ** 2 + relative1[1] ** 2) ** 0.5
if base_x != 0:
lean_x = math.atan(relative1[2] / base_x)
else:
lean_x = math.atan(0)
base_y = (relative2[0] ** 2 + relative2[1] ** 2) ** 0.5
if base_y != 0:
lean_y = math.atan(relative2[2] / base_y)
else:
lean_y = math.atan(0)
offset_z = height * math.cos(lean_z)
offset_x = offset_z * math.tan(lean_x)
offset_y = offset_z * math.tan(lean_y)
print("lean x:{} y:{} z:{}".format(math.degrees(lean_x), math.degrees(lean_y), math.degrees(lean_z)))
print("offset x:{} y:{} z:{}".format(offset_x, offset_y, offset_z))
result = copy.deepcopy(pos1)
result.x = result.x + offset_x
result.y = result.y + offset_y
result.z = result.z + offset_z
print("postion5 x:{} y:{} z:{} rz:{} ry:{} rx:{}".format(result.x, result.y, result.z, result.rz, result.ry, result.rx))
return result
def _get_relative_position(self, pos1, pos2):
relative_x = pos2.x - pos1.x
relative_y = pos2.y - pos1.y
relative_z = pos2.z - pos1.z
result = [relative_x, relative_y, relative_z]
print("relative:{}".format(result))
return result
def _create_zigzag_list(self, pos1, interval_x, interval_y, size_x, size_y):
result = []
for index_y in range(size_y):
position_y = copy.deepcopy(pos1)
print("position x:{}, y:{}".format(position_y.x, position_y.y))
if not index_y == 0:
offset_y = map(lambda x: x * index_y, interval_y)
print("offset_y:{}".format(offset_y))
position_y.x = position_y.x + offset_y[0]
position_y.y = position_y.y + offset_y[1]
position_y.z = position_y.z + offset_y[2]
result_x = []
for index_x in range(size_x):
position_x = copy.deepcopy(position_y)
if not index_x == 0:
offset_x = map(lambda x: x * index_x, interval_x)
print("offset_x:{}".format(offset_x))
position_x.x = position_x.x + offset_x[0]
position_x.y = position_x.y + offset_x[1]
position_x.z = position_x.z + offset_x[2]
result_x.append(position_x)
result.append(result_x)
return result
def _create_snake_list(self, pos1, interval_x, interval_y, size_x, size_y):
result = []
for index_y in range(size_y):
position_y = copy.deepcopy(pos1)
print("position x:{}, y:{}".format(position_y.x, position_y.y))
if not index_y == 0:
offset_y = map(lambda x: x * index_y, interval_y)
print("offset_y:{}".format(offset_y))
position_y.x = position_y.x + offset_y[0]
position_y.y = position_y.y + offset_y[1]
position_y.z = position_y.z + offset_y[2]
result_x = []
range_x = range(size_x)
if index_y % 2 == 1:
range_x = list(reversed(range_x))
for index_x in range_x:
position_x = copy.deepcopy(position_y)
if not index_x == 0:
offset_x = map(lambda x: x * index_x, interval_x)
print("offset_x:{}".format(offset_x))
position_x.x = position_x.x + offset_x[0]
position_x.y = position_x.y + offset_x[1]
position_x.z = position_x.z + offset_x[2]
result_x.append(position_x)
result.append(result_x)
return result
def _create_3d_palletizing_list(self, interval_z, size_z):
result = []
for index_z in range(size_z):
palletizing_list = copy.deepcopy(self._palletizing_pos)
if not index_z == 0:
offset = map(lambda x: x * index_z, interval_z)
for palletizing_list_x in palletizing_list:
for position in palletizing_list_x:
print("offset_x:{}".format(offset))
position.x = position.x + offset[0]
position.y = position.y + offset[1]
position.z = position.z + offset[2]
result.append(palletizing_list)
return result
|
StarcoderdataPython
|
69809
|
np = int(input('Say a number: '))
som = 0
for i in range(1,np):
if np%i == 0:
print (i),
som += i
if som == np:
print('It is a perfect number!')
else:
print ('It is not a perfect number')
|
StarcoderdataPython
|
5063456
|
import tkinter as tk
from tkinter import filedialog
import base64
import sys
import pyperclip
root = tk.Tk()
root.withdraw()
root.clipboard_clear()
file_path = filedialog.askopenfilename()
extension = ""
if file_path.endswith('.jpg'):
extension = "jpg"
elif file_path.endswith('.png'):
extension = "png"
else:
print("Incompatible image type, exiting!")
sys.exit()
with open(file_path, "rb") as image_file:
encoded_string = base64.b64encode(image_file.read())
pyperclip.copy(f"<img src='data:image/{extension};base64,{encoded_string.decode('utf-8')}'>")
print("Base64 encoded image tag has been copied to the clipboard.")
answer = input("Do you want to delete the image? Y/N: ".rstrip())
if answer == 'y' or 'Y':
os.remove(file_path)
|
StarcoderdataPython
|
9639701
|
<reponame>bitshares/nbs-pricefeed
from . import FeedSource
# pylint: disable=no-member
class Manual(FeedSource):
def _fetch(self):
return self.feed
|
StarcoderdataPython
|
1999041
|
from brownie import web3
from decimal import Decimal
from enum import Enum
from hexbytes import HexBytes
from typing import Any
class RiskParameter(Enum):
K = 0
LMBDA = 1
DELTA = 2
CAP_PAYOFF = 3
CAP_NOTIONAL = 4
CAP_LEVERAGE = 5
CIRCUIT_BREAKER_WINDOW = 6
CIRCUIT_BREAKER_MINT_TARGET = 7
MAINTENANCE_MARGIN_FRACTION = 8
MAINTENANCE_MARGIN_BURN_RATE = 9
LIQUIDATION_FEE_RATE = 10
TRADING_FEE_RATE = 11
MIN_COLLATERAL = 12
PRICE_DRIFT_UPPER_LIMIT = 13
AVERAGE_BLOCK_TIME = 14
def calculate_position_info(notional: Decimal,
leverage: Decimal,
trading_fee_rate: Decimal) -> (Decimal, Decimal,
Decimal, Decimal):
"""
Returns position attributes in decimal format (int / 1e18)
"""
collateral = notional / leverage
trade_fee = notional * trading_fee_rate
debt = notional - collateral
return collateral, notional, debt, trade_fee
def get_position_key(owner: str, id: int) -> HexBytes:
"""
Returns the position key to retrieve an individual position
from positions mapping
"""
return web3.solidityKeccak(['address', 'uint256'], [owner, id])
def mid_from_feed(data: Any) -> float:
"""
Returns mid price from oracle feed data
"""
(_, _, _, price_micro, price_macro, _, _, _) = data
ask = max(price_micro, price_macro)
bid = min(price_micro, price_macro)
mid = (ask + bid) / 2
return mid
def entry_from_mid_ratio(mid_ratio: int, mid: int) -> int:
"""
Returns entry price from mid ratio and mid price
NOTE: mid_ratio is uint48 format and mid price is int FixedPoint format
"""
# NOTE: mid_ratio "ONE" is 1e14 given uint48
entry_price = int((Decimal(mid_ratio) / Decimal(1e14)) * mid)
return entry_price
def calculate_mid_ratio(entry_price: int, mid_price: int) -> int:
"""
Returns mid ratio from entry price and mid price
NOTE: mid_ratio is uint48 format and mid, entry prices
are int FixedPoint format
"""
# NOTE: mid_ratio "ONE" is 1e14 given uint48
mid_ratio = int(Decimal(entry_price) * Decimal(1e14) / Decimal(mid_price))
return mid_ratio
|
StarcoderdataPython
|
6547389
|
<reponame>akoul1/mvlearn
import pytest
from mvlearn.cluster.base_cluster import BaseCluster
def test_base_cluster():
base_cluster = BaseCluster()
base_cluster.fit(Xs=None)
base_cluster.predict(Xs=None)
base_cluster.fit_predict(Xs=None)
|
StarcoderdataPython
|
3462524
|
from typing import List
class Solution:
def partition(self, s: str) -> List[List[str]]:
self.ans = []
ds = []
self.solve(0, s, ds)
return self.ans
def solve(self, idx, s, ds):
if idx == len(s):
self.ans.append(ds[:])
return
for i in range(idx, len(s)):
if s[idx:i + 1] == s[idx:i + 1][::-1]:
ds.append(s[idx:i + 1])
self.solve(i + 1, s, ds)
ds.pop()
return
solution = Solution()
print(solution.partition("aab"))
print(solution.partition("abcdeff"))
|
StarcoderdataPython
|
3420314
|
<filename>Labs/Scheduling/Miniversion.py
import csv
import numpy as np
from cvxopt import matrix, glpk,solvers
reader = csv.reader(open("NursesPreferences.csv",'r'), delimiter = "\t")
total = []
level = []
for row in reader:
## array = []
for i in range(71):
if i == 0:
level.append(float(row[i]))
else:
total.append(float(row[i]))
## total.append(array)
preferences = np.array(total)
level = np.array(level)
reader1 = csv.reader(open("SchedulingData.csv","r"), delimiter=",")
data = []
for row in reader1:
array = []
for row1 in row:
a = float(row1)
array.append(a)
data.append(array)
reader = csv.reader(open("NursesPreferences.csv","r"), delimiter="\t")
shifts = []
count = 0
for row in reader:
if count < 4:
count += 1
for i in range(1,9):
shifts.append(float(row[i]))
preferences = np.array([shifts])
constraint1 = []
for i in range(4):
one = []
for j in range(32):
if j < 8 * (i+1) and j >=8 * i:
one.append(1.0)
else:
one.append(0.0)
constraint1.append(one)
constraint1 = np.array(constraint1)
## print row
b = np.array([[1.,1.,1.,1.]])
print "c", preferences.shape
print "A", constraint1.shape
print "b", b.shape
constraint2 = []
for i in range(32):
array = []
for j in range(32):
if i == j:
array.append(1.)
else:
array.append(0.)
constraint2.append(array)
for i in range(32):
array = []
for j in range(32):
if i == j:
array.append(-1.)
else:
array.append(0.)
constraint2.append(array)
G = np.array(constraint2)
h = []
for i in range(32):
h.append(1.)
for i in range(32):
h.append(0.)
h = np.array(h)
print "h", h.shape
print "G", G.shape
h = matrix(h)
G = matrix(G)
c = matrix(preferences.T)
A = matrix(constraint1)
b = matrix(b.T)
sol = glpk.ilp(c,G,h,A,b,I=set([0,1]))
solution = []
for i in range(4):
array = []
for j in range(8):
array.append( sol[1][(i)*8+j])
solution.append(array)
for row in solution:
print row
print sol[1]
|
StarcoderdataPython
|
1910306
|
from typing import Tuple, TYPE_CHECKING
from sqlalchemy import delete, insert, join, select, update
from sqlalchemy.engine import RowProxy
from smorest_sfs.plugins.queries.query import SAQuery
from smorest_sfs.plugins.queries.statement import SAStatement
from tests.plugins.queries.models import Item, User
if TYPE_CHECKING:
from sqlalchemy.orm import Query
TUPLEQ = Tuple[User, Item]
class UserQuery(SAQuery["Query[User]", User, User]):
def __init__(self) -> None:
self._query = self._session.query(User)
class UserItemQuery(SAQuery["Query[TUPLEQ]", TUPLEQ, User]):
def __init__(self) -> None:
self._query = self._session.query(User, Item).join(Item, User.id == Item.uid)
class UserStatement(SAStatement[select, str]):
def __init__(self) -> None:
self._sa_sql = select([User.name, User.nickname])
class UserJoinStatement(SAStatement[select, RowProxy]):
def __init__(self) -> None:
self._sa_sql = select(
[User.name, User.nickname, Item.name.label("Itemname")]
).select_from(join(User.__table__, Item.__table__, Item.uid == User.id))
class UserDeleteStatement(SAStatement[delete, None]):
def __init__(self) -> None:
self._sa_sql = delete(User.__table__)
class UserInsertStatement(SAStatement[insert, None]):
def __init__(self) -> None:
self._sa_sql = insert(User.__table__).values((100, "test1", "test2"))
class UserUpdateStatement(SAStatement[update, None]):
def __init__(self) -> None:
self._sa_sql = update(User.__table__).where(User.id == 1).values(name="updated")
|
StarcoderdataPython
|
3237590
|
<filename>job_title_processing/tools/occupation_nomenclature.py
# -*- coding: utf-8 -*-
"""
Process external data on nomenclature to reuse it in the classifier.
"""
from job_title_processing.tools import load_root_path
import pandas as pd
import os
import re
def get_nomenclature(langage='FR'):
if langage=='FR':
return get_nomenclature_ROME_FR()
else:
return None
def get_nomenclature_ROME_FR():
"""Get occupation ROME code description, store it in csv file."""
ROOT_DIR = load_root_path()
fr_path = os.path.join(ROOT_DIR, "ressources_txt","FR", "nomenclature")
label_file = os.path.join(fr_path,"ROME_nomenclature.csv")
if os.path.exists(label_file):
df = pd.read_csv(label_file, encoding="utf-8-sig", sep=";")
return df
file = os.path.join(fr_path,"ROME_ArboPrincipale.xlsx")
if os.path.exists(file):
xl = pd.ExcelFile(file)
df = pd.read_excel(file, sheet_name=xl.sheet_names[1])
cols = df.columns
# Get ROME code and labels only
mask_2 = df[cols[2]] != ' '
mask_ogr = df['Code OGR'] == ' '
df = df.loc[mask_2 & mask_ogr].copy()
# Get relevant columns
df["ROME_code"] = df[cols[0]] + df[cols[1]] + df[cols[2]]
df.rename(columns={cols[3]: 'ROME_text'}, inplace=True)
# To csv
df.to_csv(
label_file, encoding="utf-8-sig", sep=";", index=False,
columns=['ROME_code', 'ROME_text']
)
return df[['ROME_code', 'ROME_text']]
else:
print(
'''*** \n'''
'''Please download the 'Arborescence principale' file available on'''
''' https://www.pole-emploi.org/opendata/repertoire-operationnel-'''
'''des-meti.html?type=article \n'''
'''Put the file 'ROME_ArboPrincipale.xlsx' in \n'''
'''job_title_processing\\job_title_processing\\ressources_txt'''
'''\\FR\\nomenclature \n'''
'''***'''
)
return None
def get_labels_ROME_FR():
"""
Read job titles and matching occupation code from Pole Emploi data.
Store results in a csv file.
"""
ROOT_DIR = load_root_path()
fr_path = os.path.join(ROOT_DIR, "ressources_txt","FR", "nomenclature")
label_file = os.path.join(fr_path,"ROME_label.csv")
if os.path.exists(label_file):
df = pd.read_csv(label_file, encoding="utf-8-sig", sep=";")
return df
file = os.path.join(fr_path,"ROME_ArboPrincipale.xlsx")
if os.path.exists(file):
xl = pd.ExcelFile(file)
df = pd.read_excel(file, sheet_name=xl.sheet_names[1])
cols = df.columns
# Get ROME code and labels only
mask = df['Code OGR'] != ' '
df = df.loc[mask].copy()
# Get relevant columns
df["ROME"] = df[cols[0]] + df[cols[1]] + df[cols[2]]
df.rename(columns={cols[3]: 'titre'}, inplace=True)
# To csv
df.to_csv(
label_file, encoding="utf-8-sig", sep=";", index=False,
columns=['ROME', 'titre']
)
return df[['ROME', 'titre']]
else:
print(
'''*** \n'''
'''Please download the 'Arborescence principale' file available on'''
''' https://www.pole-emploi.org/opendata/repertoire-operationnel-'''
'''des-meti.html?type=article \n'''
'''Put the file 'ROME_ArboPrincipale.xlsx' in \n'''
'''job_title_processing\\job_title_processing\\ressources_txt'''
'''\\FR\\nomenclature \n'''
'''***'''
)
return None
|
StarcoderdataPython
|
12841335
|
<gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017~2999 - cologler <<EMAIL>>
# ----------
#
# ----------
from abc import abstractmethod
import inspect
from .common import LifeTime, IServiceProvider, IDescriptor, ICallSiteMaker
from .param_type_resolver import ParameterTypeResolver
from .errors import ParameterTypeResolveError
from .callsites import (
InstanceCallSite,
ServiceProviderCallSite,
CallableCallSite,
ListedCallSite
)
class Descriptor(IDescriptor):
def __init__(self, service_type: type, lifetime: LifeTime):
if not isinstance(service_type, type):
raise TypeError('service_type must be a type')
if not isinstance(lifetime, LifeTime):
raise TypeError('lifetime must be a LifeTime')
self._service_type = service_type
self._lifetime = lifetime
@property
def service_type(self):
return self._service_type
@property
def lifetime(self):
return self._lifetime
class CallableDescriptor(Descriptor):
def __init__(self, service_type: type, func: callable, lifetime: LifeTime, **options):
super().__init__(service_type, lifetime)
if service_type is ParameterTypeResolver:
raise RuntimeError(f'service_type cannot be {ParameterTypeResolver}.')
if not callable(func):
raise TypeError
self._func = func
self._options = options
def make_callsite(self, service_provider, depend_chain):
param_callsites = {}
signature = inspect.signature(self._func)
params = signature.parameters.values()
params = [p for p in params if p.kind is p.POSITIONAL_OR_KEYWORD]
if params:
type_resolver: ParameterTypeResolver = service_provider.get(ParameterTypeResolver)
for param in params:
callsite = None
if param.default is param.empty:
try:
param_type = type_resolver.resolve(param, False)
except ParameterTypeResolveError as err:
if isinstance(self._func, type):
msg = f'error on creating type {self._func}: {err}'
else:
msg = f'error on invoke facrory {self._func}: {err}'
raise ParameterTypeResolveError(msg)
callsite = service_provider.get_callsite(param_type, depend_chain)
else:
param_type = type_resolver.resolve(param, True)
if param_type is not None:
callsite = service_provider.get_callsite(param_type, depend_chain, required=False)
if callsite is None:
callsite = InstanceCallSite(None, param.default)
param_callsites[param.name] = callsite
return CallableCallSite(self, self._func, param_callsites, self._options)
@staticmethod
def try_create(service_type: type, func: callable, lifetime: LifeTime, **options):
try:
inspect.signature(func)
except ValueError:
return None
else:
return CallableDescriptor(service_type, func, lifetime, **options)
class InstanceDescriptor(Descriptor):
def __init__(self, service_type: type, instance):
super().__init__(service_type, LifeTime.singleton)
if not isinstance(instance, service_type):
raise TypeError('obj is not a {}'.format(service_type))
self._instance = instance
def make_callsite(self, service_provider, depend_chain):
return InstanceCallSite(self, self._instance)
class ServiceProviderDescriptor(Descriptor):
def __init__(self):
super().__init__(IServiceProvider, LifeTime.scoped)
def make_callsite(self, service_provider, depend_chain):
return ServiceProviderCallSite(self)
class MapDescriptor(Descriptor):
def __init__(self, service_type: type, target_service_type: type):
super().__init__(service_type, LifeTime.transient)
if not isinstance(target_service_type, type):
raise TypeError('target_service_type must be a type')
self._target = target_service_type
def make_callsite(self, service_provider, depend_chain):
return service_provider.get_callsite(self._target, depend_chain)
class ListedDescriptor(ICallSiteMaker):
def __init__(self, descriptors):
self._descriptors = tuple(descriptors)
def __hash__(self):
return hash(self._descriptors)
def __eq__(self, other):
return self._descriptors == other
def make_callsite(self, service_provider, depend_chain):
callsites = []
for descriptor in self._descriptors:
callsites.append(service_provider.get_callsite(descriptor, depend_chain))
return ListedCallSite(callsites)
|
StarcoderdataPython
|
8057421
|
# Copyright (c) 2020.
# Thingiverse plugin is released under the terms of the LGPLv3 or higher.
from unittest.mock import patch
import pytest
from surrogate import surrogate
from ....ThingiBrowser.api.JsonObject import UserData
API_CALL_TIMEOUT = 10000
class TestMyMiniFactoryApiClient:
@pytest.fixture
@surrogate("cura.CuraApplication.CuraApplication")
@surrogate("UM.Signal.Signal")
def api_client(self, application):
with patch("cura.CuraApplication.CuraApplication", application):
from ....ThingiBrowser.drivers.myminifactory.MyMiniFactoryApiClient import MyMiniFactoryApiClient
return MyMiniFactoryApiClient()
def test_getThingsFromCollectionQuery(self, api_client):
query = api_client.getThingsFromCollectionQuery("my-collection")
assert query == "collections/my-collection"
def test_getThingsBySearchQuery(self, api_client):
query = api_client.getThingsBySearchQuery("cube")
assert query == "search?q=cube"
def test_getThingsLikedByUserQuery(self, api_client):
api_client._onGetUserData(UserData({"username": "herpaderp"}))
query = api_client.getThingsLikedByUserQuery()
assert query == "users/herpaderp/objects_liked"
def test_getThingsByUserQuery(self, api_client):
api_client._onGetUserData(UserData({"username": "herpaderp"}))
query = api_client.getThingsByUserQuery()
assert query == "users/herpaderp/objects"
def test_getThingsMadeByUserQuery(self, api_client):
api_client._onGetUserData(UserData({"username": "herpaderp"}))
query = api_client.getThingsMadeByUserQuery()
assert query == "users/herpaderp/objects"
def test_getThingsMadeByUserQuery_not_authenticated(self, api_client):
query = api_client.getThingsMadeByUserQuery()
assert query == "users/None/objects"
def test_getPopularThingsQuery(self, api_client):
query = api_client.getPopularThingsQuery()
assert query == "search?sort=popularity"
def test_getFeaturedThingsQuery(self, api_client):
query = api_client.getFeaturedThingsQuery()
assert query == "search?featured=1"
def test_getNewestThingsQuery(self, api_client):
query = api_client.getNewestThingsQuery()
assert query == "search?sort=date"
|
StarcoderdataPython
|
9615116
|
<reponame>MartinThoma/stellar-model<gh_stars>1-10
import unittest
from stellar_model.model.horizon.account_data import AccountData
from tests.model.horizon import load_horizon_file
class TestAccountData(unittest.TestCase):
def test_valid(self):
raw_data = load_horizon_file("account_data.json")
parsed_data = AccountData.parse_obj(raw_data)
self.assertEqual(parsed_data.value, "MQ==")
self.assertEqual(parsed_data.sponsor, None)
if __name__ == "__main__":
unittest.main()
|
StarcoderdataPython
|
9752041
|
<gh_stars>0
def split(items_in_list, chunk_size):
"""
Partition an input list into smaller lists.
:param items_in_list: The list of items to be split.
:param chunk_size: The number of items in each returned list.
:return: List
"""
for i in range(0, len(items_in_list), chunk_size):
yield items_in_list[i:i + chunk_size]
# .
|
StarcoderdataPython
|
3239045
|
<gh_stars>0
# Copyright (c) 2020, <NAME>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
"""Metaclass for entities using mixins."""
from queue import Queue
from pony.orm import Optional, Required, Set
from pony.orm.core import EntityMeta
class HasMixins(EntityMeta):
"""Metaclass to override entity metaclass."""
def __init__(entity, name, bases, cls_dict):
classes = Queue()
for cls in entity.__bases__:
classes.put(cls)
# Flat explore of all base classes
while not classes.empty():
cls = classes.get()
# Add the current child class to the mixin
method = getattr(cls, "extend_entity", None)
if method:
method(entity)
children = getattr(cls, "children", None)
if children is not None:
children.append(entity)
# Explore the parent class
for parent in cls.__bases__:
classes.put(parent)
entity.__class__ = EntityMeta
EntityMeta.__init__(entity, name, bases, cls_dict)
|
StarcoderdataPython
|
8190630
|
<reponame>ready-robotics/robotiq<gh_stars>0
# Copyright 2018 by READY Robotics Corporation.
# All rights reserved. No person may copy, distribute, publicly display, create derivative works from or otherwise
# use or modify this software without first obtaining a license from the READY Robotics Corporation.
import ready_logging
import rospy
from copy import copy
from plc_interface.base_action_server import BaseActionServer
from robotiq_c_model_control.base_c_model import BaseCModel
from robotiq_c_model_control.msg import (
CModel_robot_input,
CModel_robot_output,
GripperAction,
GripperGoal,
GripperResult
)
from robotiq_c_model_control.srv import (
GetRobotiqGripState,
GetRobotiqGripStateResponse
)
from threading import (
Condition,
Lock,
Thread
)
fault_text = {'/0x05': 'Action delayed, activation (reactivation) must be completed prior to renewed action.',
'/0x07': 'The activation bit must be set prior to action.',
'/0x08': 'Maximum operating temperature exceeded, wait for cool-down.',
'/0x0A': 'Under minimum operating voltage.',
'/0x0B': 'Automatic release in progress.',
'/0x0C': 'Internal processor fault.',
'/0x0D': 'Activation fault, verify that no interference or other error occurred.',
'/0x0E': 'Overcurrent triggered.',
'/0x0F': 'Automatic release completed.',
}
class RobotiqCommandTimeout(object):
"""Track command timeouts."""
def __init__(self, seconds):
"""Create a command timer.
Args:
:param seconds: timeout duration in seconds
:type seconds: float
"""
super(RobotiqCommandTimeout, self).__init__()
self.start_time = rospy.get_rostime()
self.duration = rospy.Duration(seconds)
def start(self):
"""Start the timer."""
self.start_time = rospy.get_rostime()
def expired(self):
"""Check if the timer has expired.
Returns:
True if timeout is expired
"""
return rospy.get_rostime() - self.start_time > self.duration
@ready_logging.logged
class RobotiqGripperROSInterface:
""" ROS Interface for interacting with a Robotiq Gripper """
has_goal_ = False
interrupted_ = False
resetting_ = False
has_goal_lock = Lock()
interrupt_lock = Lock()
resetting_lock = Lock()
@staticmethod
def grip_status_to_str(status):
"""Convert Gripper response status into a string.
Args:
:param status: the status registers from the gripper.
:type status: CModel_robot_input
Return: str
"""
return 'gACT:{}, gGTO:{}, gSTA:{}, gOBJ:{}, ' \
'gFLT:{}, gPR:{}, gPO:{}, gCU:{}'.format(
status.gACT,
status.gGTO,
status.gSTA,
status.gOBJ,
status.gFLT,
status.gPR,
status.gPO,
status.gCU
)
@property
def has_goal(self):
with self.has_goal_lock:
return self.has_goal_
@has_goal.setter
def has_goal(self, on_goal):
with self.has_goal_lock:
self.has_goal_ = on_goal
@property
def interrupted(self):
with self.interrupt_lock:
return self.interrupted_
@interrupted.setter
def interrupted(self, interrupt):
with self.interrupt_lock:
self.interrupted_ = interrupt
@property
def resetting(self):
with self.resetting_lock:
return self.resetting_
@resetting.setter
def resetting(self, in_reset):
with self.resetting_lock:
self.resetting_ = in_reset
def __init__(self, name, comms):
self.cmodel = BaseCModel(comms)
self.name = name
self.max_closed = 255
self.max_open = 3
self.goal_thread = None
self.last_goal_wait = False
self.reset_thread = None
self.gripper_as_ = BaseActionServer('/robotiq_gripper_action', GripperAction, self.goal_cb, auto_start=False)
self.reg_status = self.cmodel.get_status()
self.reg_status_lock = Lock()
self.reg_status_cv = Condition(self.reg_status_lock)
self.grip_state = GetRobotiqGripStateResponse.UNKNOWN
self.grip_state_lock_ = Lock()
self.grip_state_srv_ = rospy.Service('/robotiq/get_grip_state', GetRobotiqGripState, self.handle_grip_state_service)
self.command_pub = rospy.Publisher('/robotiq_input_command', CModel_robot_output, queue_size=1)
self.reg_status_pub = rospy.Publisher('/robotiq_state', CModel_robot_input, queue_size=1)
def start(self):
self.gripper_as_.start()
def shutdown(self):
if self.has_goal:
self.interrupted = True
# Wake anyone waiting on updated status
with self.reg_status_cv:
self.reg_status_cv.notify_all()
if self.goal_thread:
try:
self.goal_thread.join(3.0)
except RuntimeError:
pass
self.gripper_as_.cleanup()
with self.grip_state_lock_:
self.grip_state_srv_.shutdown()
self.command_pub.unregister()
self.reg_status_pub.unregister()
def refresh_status(self):
""" Query the gripper status and update topics/state accordingly. """
status = self.cmodel.get_status()
if status is not None:
self.reg_status_pub.publish(status)
with self.reg_status_cv:
self.reg_status = status
self.reg_status_cv.notify_all()
if status.gFLT != 0 and not self.resetting:
self.__log.info('RESETTING Robotiq {}'.format(self.name))
self.reset_gripper()
return status
def wait_for_next_status(self, seconds=0.25):
with self.reg_status_cv:
self.reg_status_cv.wait(seconds)
return copy(self.reg_status)
def goal_cb(self, goal_handle):
"""
This cb handles all gripper requests from chiron, whether it be from a node or the robot_interface of the
robot control panel.
We first complete checks to verify if we can execute the request. We adjust the position depending on the
calibration and then execute. If wait is true, we wait until the actual gripper position is within 3 values
of the requested position before returning success. We return failure only if the communication fails, the
command is interrupted, or rospy is shutdown.
Args:
:param goal_handle: A Robotiq Gripper Action goal handle. The goal is of type GripperGoal and has the
following fields:
direction: This field has three options: CUSTOM - use value from the position field, OPEN, or CLOSE
position: The position of the gripper (0: open, 255: closed)
force: The stall torque of the gripper while moving (0: no re-grasping , 255: max force)
auto_release: Activate auto release on robot emergency stop
auto_release_direction: The direction of auto release, either OPEN or CLOSE
wait: Wait for current request to finish before executing a new request
:type goal_handle: ServerGoalHandle
"""
if self.has_goal and self.last_goal_wait:
goal_handle.set_rejected(text='Not Accepting Multiple Goals at Once')
return
elif self.resetting:
goal_handle.set_rejected(text='Gripper is Currently Resetting')
return
elif self.has_goal and not self.last_goal_wait:
# Preempting previous goal
self.interrupted = True
if self.goal_thread:
self.goal_thread.join()
self.has_goal = True
goal_handle.set_accepted()
position = self.map_position_to_calibration(goal_handle.get_goal())
status = self.wait_for_next_status()
if abs(status.gPO - position) < 3:
result = self.generate_grip_result(status=status, result_code=GripperResult.SUCCESS)
self.finish_goal(goal_handle, result, 'Already at goal')
else:
thread_name = '{} goal executor'.format(self.name)
self.goal_thread = Thread(target=self.on_goal, name=thread_name, args=(goal_handle,))
self.goal_thread.start()
def on_goal(self, gh):
goal = gh.get_goal()
goal.position = self.map_position_to_calibration(goal)
result = self.send_gripper_command(goal)
if not result:
self.__log.err('Could not send position command.')
self.finish_failed_goal(gh, 'Communication Timed Out With Gripper')
return
self.interrupted = False
if goal.wait:
self.last_goal_wait = True
# Check goal direction
status = self.wait_for_next_status()
if goal.position < status.gPO:
# Gripper moving toward the open position
is_in_motion = lambda status: status.gOBJ == 0
else:
# Gripper moving toward the closed position
is_in_motion = lambda status: status.gOBJ == 0 or status.gOBJ == 2
# Wait for motion
timer = RobotiqCommandTimeout(seconds=3.0)
timer.start()
while not rospy.is_shutdown() and not self.interrupted:
status = self.wait_for_next_status()
if is_in_motion(status) or abs(status.gPO - goal.position) < 3:
break
if timer.expired():
self.__log.err('Timeout on motion start w/ status: "{}"'.format(self.grip_status_to_str(status)))
self.finish_failed_goal(gh, 'Timeout waiting for motion to start')
return
if rospy.is_shutdown():
self.finish_failed_goal(gh, 'ROS shutdown')
return
# Wait until we reach the desired position
timer = RobotiqCommandTimeout(seconds=3.0)
timer.start()
while not rospy.is_shutdown() and not self.interrupted:
status = self.wait_for_next_status()
if status.gOBJ != 0 or abs(status.gPO - goal.position) < 3:
break
if timer.expired():
self.__log.err('Timeout on position goal w/ status: "{}"'.format(self.grip_status_to_str(status)))
self.finish_failed_goal(gh, 'Timeout waiting to reach desired position')
return
status = self.wait_for_next_status()
if self.interrupted or rospy.is_shutdown():
self.abort_goal(gh, self.get_fault_text(int(status.gFLT)))
else:
result = self.generate_grip_result(status=status, result_code=GripperResult.SUCCESS)
self.finish_goal(gh, result, 'Succeeded')
else:
result = GripperResult()
result.result_code = result.SUCCESS
self.finish_goal(gh, result)
def map_position_to_calibration(self, goal):
"""
This function maps the gripper request to an actionable position. If direction is set to OPEN or CLOSE, the
position is set to the calibrated max_open or max_closed positions. If direction is set to CUSTOM, the
requested positional value is checked to ensure it is within the calibration bounds.
Args:
:param goal: The requested position and direction to move the gripper.
:type goal: GripperGoal
Returns:
:rparam pos: The calibrated position to move the gripper. Values can range from 0 to 255
:rtype pos: int
"""
if goal.direction == goal.OPEN:
pos = self.max_open
elif goal.direction == goal.CLOSE:
pos = self.max_closed
elif goal.position > self.max_closed:
pos = self.max_closed
elif goal.position < self.max_open:
pos = self.max_open
else:
pos = int(goal.position)
return pos
def generate_grip_result(self, result_code=GripperResult.SUCCESS, status=None):
"""Create a GripperResult with the next status update.
Args:
:param result_code: the Gripper goal result code.
:type result_code: GripperResult code
Return: GripperResult based on the next received status.
"""
if status is None:
status = self.wait_for_next_status()
result = GripperResult()
result.position = status.gPO
result.object_detection = status.gOBJ
if status.gGTO != 0:
grabbed_obj = (status.gOBJ == 1 or status.gOBJ == 2)
result.has_object = result.HAS_OBJ_YES if grabbed_obj else result.HAS_OBJ_NO
else:
result.has_object = result.HAS_OBJ_UKNOWN
result.result_code = result_code
return result
def set_goal_done(self):
"""Set flags indicating that there is no pending goal."""
self.has_goal = False
self.last_goal_wait = False
def finish_failed_goal(self, goal_handle, text=""):
"""Finish a goal that failed to complete.
Args:
:param goal_handle: handle to notify the ActionServer
:type goal_handle: ServerGoalHandle
:param text: optional text status to be passed back to the
ActionServer
:type text: str
"""
result = self.generate_grip_result(GripperResult.FAILURE)
self.finish_goal(goal_handle, result, text)
def finish_goal(self, goal_handle, result, text=""):
"""Finish a goal being handled with a given result.
Args:
:param goal_handle: handle to notify the ActionServer
:type goal_handle: ServerGoalHandle
:param result: the result of the gripper action
:type result: GripperResult
:param text: optional text status to be passed back to the
ActionServer.
:type text: str
"""
goal_handle.set_succeeded(result, text)
self.set_goal_done()
def abort_goal(self, goal_handle, text=""):
"""Abort a goal being handled with a given result.
Args:
:param goal_handle: handle to notify the ActionServer
:type goal_handle: ServerGoalHandle
:param text: optional text status to be passed back to the
ActionServer.
:type text: str
"""
result = self.generate_grip_result(GripperResult.FAILURE)
goal_handle.set_aborted(result, text)
self.set_goal_done()
def reset_gripper(self):
# If the gripper is currently running a goal we must stop it
self.resetting = True
self.reset_thread = Thread(target=self.reset, name='Robotiq Gripper Reset Thread')
self.reset_thread.start()
def do_calibration_move(self, direction=GripperGoal.OPEN, wait_seconds=1.25):
"""Do a calibration command.
Send a calibration command to the gripper and wait for the next status after completion.
Args:
:param direction: gripper open/close
:type direction GripperGoal direction
:param wait_seconds: time in seconds before getting a status update.
:type wait_seconds: float
Returns:
True if command sent correctly.
"""
goal = GripperGoal()
goal.force = 255
goal.direction = direction
goal.auto_release = goal.DISABLED
sent = self.send_gripper_command(goal)
if sent:
rospy.sleep(wait_seconds)
status = self.wait_for_next_status()
if direction == GripperGoal.OPEN:
self.max_open = status.gPO
elif direction == GripperGoal.CLOSE:
self.max_closed = status.gPO
return sent
def reset(self):
# Reset the gripper during startup or after a fault. A specific order of states must be sent to the gripper.
# We recalibrate after reset to be thorough.
# If we are currently executing a goal, terminate it
if self.has_goal:
self.interrupted = True
if self.goal_thread:
self.goal_thread.join()
# Lock out the action server
self.has_goal = True
# First check if we were previously initialized
status = self.wait_for_next_status()
if status.gACT == 1:
# Deactive Gripper
goal = CModel_robot_output()
goal.rACT = 0
self.send_gripper_command(goal, parse=False)
# Wait for the gripper to deactivate
timer = RobotiqCommandTimeout(seconds=3.0)
timer.start()
while not rospy.is_shutdown() and not self.interrupted:
status = self.wait_for_next_status()
if status.gSTA == 0:
break
if timer.expired():
self.__log.err('Timeout on deactivate w/ status: "{}"'.format(self.grip_status_to_str(status)))
rospy.signal_shutdown('Failed to Deactivate Gripper -- timeout')
break
if rospy.is_shutdown():
self.resetting = False
self.has_goal = False
return
# Active Gripper
goal = CModel_robot_output()
goal.rACT = 1
self.send_gripper_command(goal, parse=False)
# Wait for gripper to activate
timer = RobotiqCommandTimeout(seconds=3.0)
timer.start()
while not rospy.is_shutdown() and not self.interrupted:
status = self.wait_for_next_status()
if status.gSTA == 3:
break
if timer.expired():
self.__log.err('Timeout on activate w/ status: "{}"'.format(self.grip_status_to_str(status)))
rospy.signal_shutdown('Failed to Activate Gripper -- timeout')
break
if rospy.is_shutdown():
self.resetting = False
self.has_goal = False
return
rospy.sleep(1.0)
if not self.do_calibration_move(GripperGoal.CLOSE, wait_seconds=1.25):
self.has_goal = False
self.resetting = False
self.__log.err('Could not send calibration close command.')
rospy.signal_shutdown('Failed Close Calibration')
return
if not self.do_calibration_move(GripperGoal.OPEN, wait_seconds=1.25):
self.has_goal = False
self.resetting = False
self.__log.err('Could not send calibration open command.')
rospy.signal_shutdown('Failed Open Calibration')
return
self.has_goal = False
self.resetting = False
def parse_cmd(self, goal):
cmd = CModel_robot_output()
# Auto Release
if goal.auto_release == goal.ENABLED:
cmd.rATR = 1
cmd.rADR = 1 if goal.auto_release_direction == goal.OPEN else 0
else:
cmd.rATR = 0
# Set active and go to
cmd.rACT = 1
cmd.rGTO = 1
# Position
cmd.rPR = self.map_position_to_calibration(goal)
# Speed - Always set to max for now
cmd.rSP = 255
# Force
if goal.force > 255:
cmd.rFR = 255
elif goal.force < 0:
cmd.rFR = 0
else:
cmd.rFR = int(goal.force)
return cmd
def send_gripper_command(self, goal, parse=True):
try:
if parse:
goal = self.parse_cmd(goal)
self.command_pub.publish(goal)
return self.cmodel.send_command(goal)
except Exception as exc:
self.__log.err('Error While Sending Command [{}] - Shutting Down'.format(exc))
rospy.signal_shutdown('Error While Sending Command - Shutting Down')
return False
def handle_grip_state_service(self, request):
with self.grip_state_lock_:
resp = GetRobotiqGripStateResponse()
if self.resetting:
resp.state = resp.UNKNOWN
else:
registers = self.wait_for_next_status()
moved = registers.gGTO != 0
if moved and registers.gOBJ == 1:
# Fingers stopped due to contact while opening
resp.state = GetRobotiqGripStateResponse.OPEN
elif moved and registers.gOBJ == 2:
# Fingers stopped due to contact while closing
resp.state = GetRobotiqGripStateResponse.CLOSED
elif moved and registers.gOBJ == 3:
# Fingers moved to the requested position without detecting an
# object. Use the gripper position to determine the current
# grip state.
position = int(registers.gPO)
position_median = abs(self.max_closed - self.max_open) / 2
if position < position_median:
resp.state = GetRobotiqGripStateResponse.OPEN
else:
resp.state = GetRobotiqGripStateResponse.CLOSED
else:
# Gripper is either moving or performing a reset. Use the last
# known state.
resp.state = self.grip_state
self.grip_state = resp.state
return resp
@staticmethod
def get_fault_text(fault_code):
if fault_code in fault_text:
return fault_text[fault_code]
else:
return 'Unknown Error Code.'
|
StarcoderdataPython
|
11345567
|
"""与url字符串相关的工具代码."""
import json
from typing import Optional, Callable
from urllib.parse import urlparse
import requests as rq
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
from requests_oauthlib import OAuth1
def is_url(url: str) -> bool:
"""判断url是否是url.
Args:
url (str): 待判断的url字符串
Returns:
bool: 是否是url
"""
try:
result = urlparse(url)
return all([result.scheme])
except ValueError:
return False
def is_http_url(url: str) -> bool:
"""判断url是否是http请求的url.
Args:
url (str): 待判断的url字符串
Returns:
bool: 是否是url
"""
try:
result = urlparse(url)
return all([result.scheme, result.netloc]) and result.scheme in ("http", "https")
except ValueError:
return False
def is_file_url(url: str) -> bool:
"""判断url是否是文件协议相关的url.
Args:
url (str): 待判断的url字符串
Returns:
bool: 是否是url
"""
try:
result = urlparse(url)
return all([result.scheme]) and result.scheme == "file"
except ValueError:
return False
def query_http(url: str, method: str, *,
auth: Optional[str] = None,
auth_type: Optional[str] = None,
payload: Optional[str] = None,
payload_type: Optional[str] = None,
stream: bool = False,
verify: bool = False,
cert: Optional[str] = None,
cb: Optional[Callable[[str], None]] = None) -> None:
"""http请求并打印结果.
Args:
url (str): 要访问的http资源的地址
method (str): 访问资源的方法
auth (Optional[str], optional): 用户身份验证字符串. Defaults to None.
auth_type (Optional[str], optional): 用户身份的验证类型. Defaults to None.
payload (Optional[str], optional): 请求负载. Defaults to None.
payload_type (Optional[str], optional): 请求的负载类型. Defaults to None.
stream (bool, optional): 返回是否为流数据. Defaults to False.
verify (bool, optional): https请求是否验证. Defaults to False.
cert (Optional[str], optional): https请求的客户端认证文件. Defaults to None.
cb (Optional[Callable[[str],None]], optional): 获取到数据后的处理回调. Defaults to None.
"""
with rq.Session() as s:
if verify:
s.verify = verify
if auth_type and auth:
if auth_type == "basic":
user, pwd = auth.split(",")
s.auth = HTTPBasicAuth(user, pwd)
if auth_type == "digest":
user, pwd = auth.split(",")
s.auth = HTTPDigestAuth(user, pwd)
elif auth_type == "jwt":
s.headers = rq.structures.CaseInsensitiveDict({"Authorization": "Bearer " + auth})
elif auth_type == "oauth1":
app_key, app_secret, oauth_token, oauth_token_secret = auth.split(",")
s.auth = OAuth1(app_key, app_secret, oauth_token, oauth_token_secret)
else:
raise AttributeError(f"auth_type 参数 {auth_type} 目前不支持")
if cert:
cert_list = cert.split(",")
cert_list_len = len(cert_list)
if cert_list_len == 1:
s.cert = cert_list[0]
elif cert_list_len == 2:
s.cert = (cert_list[0], cert_list[1])
else:
raise AttributeError(f"cert 参数 {cert} 不合法")
if payload is None:
if stream is True:
with s.request(method.upper(), url, stream=True) as res:
for line in res.iter_lines(decode_unicode=True):
if line:
if cb:
cb(line)
else:
res = s.request(method.upper(), url)
if cb:
cb(res.text)
else:
if payload_type == "stream":
if stream is True:
with open(payload, "rb") as f:
with s.request(method.upper(), url, data=f, stream=True) as res:
for line in res.iter_lines(decode_unicode=True):
if line:
if cb:
cb(line)
else:
with open(payload, "rb") as f:
res = s.request(method.upper(), url, data=f)
if cb:
cb(res.text)
else:
with open(payload, "r", encoding='utf-8') as fu:
payload_dict = json.load(fu)
if stream is True:
if payload_type == "json":
with s.request(method.upper(), url, json=payload_dict, stream=True) as res:
for line in res.iter_lines(decode_unicode=True):
if line:
if cb:
cb(line)
elif payload_type == "form":
with s.request(method.upper(), url, data=payload_dict, stream=True) as res:
for line in res.iter_lines(decode_unicode=True):
if line:
if cb:
cb(line)
elif payload_type == "url":
with s.request(method.upper(), url, params=payload_dict, stream=True) as res:
for line in res.iter_lines(decode_unicode=True):
if line:
if cb:
cb(line)
else:
raise AttributeError(f"不支持的负载类型{payload_type}")
else:
if payload_type == "json":
res = s.request(method.upper(), url, json=payload_dict)
if cb:
cb(res.text)
elif payload_type == "form":
res = s.request(method.upper(), url, data=payload_dict)
if cb:
cb(res.text)
elif payload_type == "url":
res = s.request(method.upper(), url, params=payload_dict)
if cb:
cb(res.text)
else:
raise AttributeError(f"不支持的负载类型{payload_type}")
def parse_file_url(url: str) -> str:
"""从file url中提取文件系统中的路径.
Args:
url (str): file url
Returns:
str: file url中提取出的路径
"""
path_str = urlparse(url).path
if ":" in path_str:
path = path_str[1:]
else:
path = path_str
return path
def parse_http_url(url: str) -> str:
"""从file url中提取文件系统中的路径.
Args:
url (str): file url
Returns:
str: file url中提取出的路径
"""
path = urlparse(url).path
return path
def get_source_from_url(url: str) -> str:
"""从指定url中回去源数据.
注意只能获取静态http资源.
Args:
url (str): url地址
Raises:
AttributeError: url未成功返回
AttributeError: 未支持的类型
Returns:
str: 内容文本
"""
if is_http_url(url):
rs = rq.get(url)
if rs.status_code != 200:
raise AttributeError(f"url {url} 未成功返回")
else:
return rs.text
elif is_file_url(url):
path = parse_file_url(url)
with open(path, "r", encoding='utf-8') as f:
content = f.read()
return content
else:
raise AttributeError(f"url {url} 未支持的类型")
|
StarcoderdataPython
|
6599086
|
<filename>teddix-common/teddix/TeddixInventory.py
#!/usr/bin/env python
#
import os
import re
import sys
import glob
import time
import psutil
import locale
import platform
import dmidecode
import subprocess
import xml.dom.minidom as minidom
import xml.etree.ElementTree as xml
# Syslog handler
import TeddixLogger
# Config parser
import TeddixConfigFile
import TeddixParser
class TeddixBaseline:
def __init__(self,syslog,cfg):
self.syslog = syslog
self.cfg = cfg
system = platform.system()
self.syslog.info("Generating Baseline")
if system == 'Linux':
import TeddixLinux
self.osbase = TeddixLinux.TeddixLinux(syslog)
elif system == 'SunOS':
import TeddixSunOS
self.osbase = TeddixSunOS.TeddixSunOS(syslog)
elif system == 'HP-UX':
print "TODO OS"
self.osbase = TeddixHPUX.TeddixHPUX(syslog)
elif system == 'AIX':
print "TODO OS"
self.osbase = TeddixAix.TeddixAix(syslog)
elif system == 'NT':
print "TODO OS"
self.osbase = TeddixWindows.TeddixWindows(syslog)
else:
raise RuntimeError
def _getdmi(self):
from pprint import pprint
DMI = { }
# get BIOS Data
#tmp = dmidecode.bios()
#pprint(tmp)
for v in dmidecode.bios().values():
if type(v) == dict and v['dmi_type'] == 0:
DMI['bios',0,'BIOS Revision'] = str((v['data']['BIOS Revision']))
DMI['bios',0,'ROM Size'] = str((v['data']['ROM Size']))
try:
DMI['bios',0,'Release Date'] = str((v['data']['Relase Date']))
except (KeyError):
DMI['bios',0,'Release Date'] = str((v['data']['Release Date']))
DMI['bios',0,'Runtime Size'] = str((v['data']['Runtime Size']))
DMI['bios',0,'Vendor'] = str((v['data']['Vendor']))
DMI['bios',0,'Version'] = str((v['data']['Version']))
# get System Data
#tmp = dmidecode.system()
#pprint(tmp)
for v in dmidecode.system().values():
if type(v) == dict and v['dmi_type'] == 1:
DMI['system',0,'Family'] = str((v['data']['Family']))
DMI['system',0,'Manufacturer'] = str((v['data']['Manufacturer']))
DMI['system',0,'Product Name'] = str((v['data']['Product Name']))
DMI['system',0,'SKU Number'] = str((v['data']['SKU Number']))
DMI['system',0,'Serial Number'] = str((v['data']['Serial Number']))
DMI['system',0,'UUID'] = str((v['data']['UUID']))
DMI['system',0,'Version'] = str((v['data']['Version']))
DMI['system',0,'Wake-Up Type'] = str((v['data']['Wake-Up Type']))
# get BaseBoard Data
#tmp = dmidecode.baseboard()
#pprint(tmp)
for v in dmidecode.baseboard().values():
if type(v) == dict and v['dmi_type'] == 2:
DMI['baseboard',0,'Manufacturer'] = str((v['data']['Manufacturer']))
DMI['baseboard',0,'Product Name'] = str((v['data']['Product Name']))
DMI['baseboard',0,'Serial Number'] = str((v['data']['Serial Number']))
DMI['baseboard',0,'Version'] = str((v['data']['Version']))
# get chassis Data
#tmp = dmidecode.chassis()
#pprint(tmp)
for v in dmidecode.chassis().values():
if type(v) == dict and v['dmi_type'] == 3:
DMI['chassis',0,'Asset Tag'] = str((v['data']['Asset Tag']))
DMI['chassis',0,'Boot-Up State'] = str((v['data']['Boot-Up State']))
DMI['chassis',0,'Lock'] = str((v['data']['Lock']))
DMI['chassis',0,'Manufacturer'] = str((v['data']['Manufacturer']))
DMI['chassis',0,'Power Supply State'] = str((v['data']['Power Supply State']))
DMI['chassis',0,'Security Status'] = str((v['data']['Security Status']))
DMI['chassis',0,'Serial Number'] = str((v['data']['Serial Number']))
DMI['chassis',0,'Thermal State'] = str((v['data']['Thermal State']))
DMI['chassis',0,'Type'] = str((v['data']['Type']))
DMI['chassis',0,'Version'] = str((v['data']['Version']))
# get Processor Data
#tmp = dmidecode.processor()
#pprint(tmp)
i = 0
for v in dmidecode.processor().values():
if type(v) == dict and v['dmi_type'] == 4:
DMI['processor',i,'Asset Tag'] = str((v['data']['Asset Tag']))
DMI['processor',i,'Characteristics'] = str((v['data']['Characteristics']))
DMI['processor',i,'Core Count'] = str((v['data']['Core Count']))
DMI['processor',i,'Core Enabled'] = str((v['data']['Core Enabled']))
DMI['processor',i,'Current Speed'] =str((v['data']['Current Speed']))
DMI['processor',i,'External Clock'] = str((v['data']['External Clock']))
DMI['processor',i,'Family'] = str((v['data']['Family']))
DMI['processor',i,'L1 Cache Handle'] = str((v['data']['L1 Cache Handle']))
DMI['processor',i,'L2 Cache Handle'] = str((v['data']['L2 Cache Handle']))
DMI['processor',i,'L3 Cache Handle'] = str((v['data']['L3 Cache Handle']))
DMI['processor',i,'Manufacturer'] = str((v['data']['Manufacturer']['Vendor']))
DMI['processor',i,'Max Speed'] = str((v['data']['Max Speed']))
DMI['processor',i,'Part Number'] = str((v['data']['Part Number']))
DMI['processor',i,'Serial Number'] = str((v['data']['Serial Number']))
DMI['processor',i,'Socket Designation'] = str((v['data']['Socket Designation']))
DMI['processor',i,'Status'] = str((v['data']['Status']))
DMI['processor',i,'Thread Count'] = str((v['data']['Thread Count']))
DMI['processor',i,'Type'] = str((v['data']['Type']))
DMI['processor',i,'Upgrade'] = str((v['data']['Upgrade']))
DMI['processor',i,'Version'] = str((v['data']['Version']))
DMI['processor',i,'Voltage'] = str((v['data']['Voltage']))
i += 1
# get Memory Data
#tmp = dmidecode.memory()
#pprint(tmp)
i = 0
for v in dmidecode.memory().values():
if type(v) == dict and v['dmi_type'] == 17 :
if str((v['data']['Size'])) != 'None':
DMI['memory',i,'Data Width'] = str((v['data']['Data Width']))
DMI['memory',i,'Error Information Handle'] = str((v['data']['Error Information Handle']))
DMI['memory',i,'Form Factor'] = str((v['data']['Form Factor']))
DMI['memory',i,'Bank Locator'] = str((v['data']['Bank Locator']))
DMI['memory',i,'Locator'] = str((v['data']['Locator']))
DMI['memory',i,'Manufacturer'] = str((v['data']['Manufacturer']))
DMI['memory',i,'Part Number'] = str((v['data']['Part Number']))
DMI['memory',i,'Serial Number'] = str((v['data']['Serial Number']))
DMI['memory',i,'Size'] = str((v['data']['Size']))
DMI['memory',i,'Speed'] = str((v['data']['Speed']))
DMI['memory',i,'Type'] = str((v['data']['Type']))
i += 1
# get cache Data
#tmp = dmidecode.cache()
#pprint(tmp)
# get connector Data
#tmp = dmidecode.connector()
#pprint(tmp)
# get slot Data
#tmp = dmidecode.slot()
#pprint(tmp)
return DMI
# TODO: THIS SUCKS!
def __getdmi_count(self,dmi,a,b):
try:
count = 0
while dmi[a,count,b]:
count += 1
except (KeyError):
pass
return count
def create_xml (self):
dmi = self._getdmi()
server = xml.Element('server')
generated = xml.Element('generated')
generated.attrib['program'] = sys.argv[0]
generated.attrib['version'] = '2.0'
generated.attrib['scantime'] = time.asctime()
server.append(generated)
host = xml.Element('host')
host.attrib['name'] = self.cfg.global_hostname
server.append(host)
hardware = xml.Element('hardware')
server.append(hardware)
bios = xml.Element('bios')
count = self.__getdmi_count(dmi,'bios','Vendor')
i = 0
while i < count:
bios.attrib['revision'] = dmi['bios',0,'BIOS Revision']
bios.attrib['vendor'] = dmi['bios',0,'Vendor']
bios.attrib['version'] = dmi['bios',0,'Version']
bios.attrib['releasedate'] = dmi['bios',0,'Release Date']
i += 1
hardware.append(bios)
sysboard = xml.Element('baseboard')
count = self.__getdmi_count(dmi,'baseboard','Manufacturer')
i = 0
while i < count:
sysboard.attrib['manufacturer'] = dmi['baseboard',0,'Manufacturer']
sysboard.attrib['productname'] = dmi['baseboard',0,'Product Name']
sysboard.attrib['serialnumber'] = dmi['baseboard',0,'Serial Number']
sysboard.attrib['version'] = dmi['baseboard',0,'Version']
i += 1
hardware.append(sysboard)
system = xml.Element('system')
count = self.__getdmi_count(dmi,'system','Manufacturer')
i = 0
while i < count:
system.attrib['manufacturer'] = dmi['system',0,'Manufacturer']
system.attrib['productname'] = dmi['system',0,'Product Name']
system.attrib['family'] = dmi['system',0,'Family']
system.attrib['serialnumber'] = dmi['system',0,'Serial Number']
system.attrib['version'] = dmi['system',0,'Version']
i += 1
hardware.append(system)
chassis = xml.Element('chassis')
count = self.__getdmi_count(dmi,'chassis','Manufacturer')
i = 0
while i < count:
chassis.attrib['manufacturer'] = dmi['chassis',0,'Manufacturer']
chassis.attrib['serialnumber'] = dmi['chassis',0,'Serial Number']
chassis.attrib['thermalstate'] = dmi['chassis',0,'Thermal State']
chassis.attrib['type'] = dmi['chassis',0,'Type']
chassis.attrib['version'] = dmi['chassis',0,'Version']
i += 1
hardware.append(chassis)
processors = xml.Element('processors')
processors.attrib['count'] = str(self.__getdmi_count(dmi,'processor','Current Speed'))
hardware.append(processors)
# for every CPU do:
count = self.__getdmi_count(dmi,'processor','Current Speed')
i = 0
while i < count:
processor = xml.Element('processor')
processor.attrib['procid'] = str(i)
processor.attrib['family'] = dmi['processor',i,'Family']
processor.attrib['proctype'] = dmi['processor',i,'Type']
processor.attrib['socket'] = dmi['processor',i,'Socket Designation']
processor.attrib['speed'] = dmi['processor',i,'Max Speed']
processor.attrib['version'] = dmi['processor',i,'Version']
processor.attrib['cores'] = dmi['processor',i,'Core Count']
processor.attrib['threads'] = dmi['processor',i,'Thread Count']
processor.attrib['extclock'] = dmi['processor',i,'External Clock']
processor.attrib['partnumber'] = dmi['processor',i,'Part Number']
processor.attrib['serialnumber'] = dmi['processor',i,'Serial Number']
if dmi['processor',i,'Thread Count'] > dmi['processor',i,'Core Count']:
processor.attrib['htsystem'] = 'Yes'
else:
processor.attrib['htsystem'] = 'No'
processors.append(processor)
i += 1
memory = xml.Element('memory')
memory.attrib['count'] = str(self.__getdmi_count(dmi,'memory','Size'))
hardware.append(memory)
# for every memorybank do:
count = self.__getdmi_count(dmi,'memory','Size')
i = 0
while i < count:
memorymodule = xml.Element('memorymodule')
memorymodule.attrib['location'] = dmi['memory',i,'Locator']
memorymodule.attrib['bank'] = dmi['memory',i,'Bank Locator']
memorymodule.attrib['memorysize'] = dmi['memory',i,'Size']
memorymodule.attrib['formfactor'] = dmi['memory',i,'Form Factor']
memorymodule.attrib['manufacturer'] = dmi['memory',i,'Manufacturer']
memorymodule.attrib['memorytype'] = dmi['memory',i,'Type']
memorymodule.attrib['partnumber'] = dmi['memory',i,'Part Number']
memorymodule.attrib['serialnumber'] = dmi['memory',i,'Serial Number']
memorymodule.attrib['width'] = dmi['memory',i,'Data Width']
memorymodule.attrib['speed'] = dmi['memory',i,'Speed']
memory.append(memorymodule)
i += 1
# get blockdevices
blockdevs = self.osbase.getblock()
blockdevices = xml.Element('blockdevices')
blockdevices.attrib['count'] = str(len(blockdevs))
hardware.append(blockdevices)
#[name,devtype,vendor,model,nr_sectors,sect_size,rotational,readonly,removable,major,minor]
i = 0
for i in range(len(blockdevs)):
block = xml.Element('blockdevice')
block.attrib['name'] = blockdevs[i][0]
block.attrib['type'] = blockdevs[i][1]
block.attrib['vendor'] = blockdevs[i][2]
block.attrib['model'] = blockdevs[i][3]
block.attrib['sectors'] = blockdevs[i][4]
block.attrib['sectorsize'] = blockdevs[i][5]
block.attrib['rotational'] = blockdevs[i][6]
block.attrib['readonly'] = blockdevs[i][7]
block.attrib['removable'] = blockdevs[i][8]
block.attrib['major'] = blockdevs[i][9]
block.attrib['minor'] = blockdevs[i][10]
blockdevices.append(block)
# get PCIdevices
pcidevs = self.osbase.getpci()
pcidevices = xml.Element('pcidevices')
pcidevices.attrib['count'] = str(len(pcidevs))
hardware.append(pcidevices)
# [path,devtype,vendor,model,revision]
i = 0
for i in range(len(pcidevs)):
pci = xml.Element('pcidevice')
pci.attrib['path'] = pcidevs[i][0]
pci.attrib['type'] = pcidevs[i][1]
pci.attrib['vendor'] = pcidevs[i][2]
pci.attrib['model'] = pcidevs[i][3]
pci.attrib['revision'] = pcidevs[i][4]
pcidevices.append(pci)
# TODO: get info from HP tools
operatingsystem = xml.Element('system')
operatingsystem.attrib['name'] = self.osbase.system
operatingsystem.attrib['arch'] = self.osbase.machine
operatingsystem.attrib['serialnumber'] = self.osbase.serial
operatingsystem.attrib['manufacturer'] = self.osbase.manufacturer
operatingsystem.attrib['detail'] = self.osbase.detail
operatingsystem.attrib['kernel'] = self.osbase.kernel
server.append(operatingsystem)
software = xml.Element('software')
operatingsystem.append(software)
pkgs = self.osbase.getpkgs()
# for every pkg do:
# [name][ver][pkgsize][instsize][section][status][info][homepage][signed][files][arch]
for i in range(len(pkgs)):
package = xml.Element('package')
package.attrib['name'] = pkgs[i][0]
package.attrib['version'] = pkgs[i][1]
package.attrib['pkgsize'] = pkgs[i][2]
package.attrib['installedsize'] = pkgs[i][3]
package.attrib['section'] = pkgs[i][4]
package.attrib['status'] = pkgs[i][5]
package.attrib['description'] = pkgs[i][6]
package.attrib['homepage'] = pkgs[i][7]
package.attrib['signed'] = pkgs[i][8]
package.attrib['files'] = pkgs[i][9]
package.attrib['arch'] = pkgs[i][10]
software.append(package)
filesystems = xml.Element('filesystems')
operatingsystem.append(filesystems)
updates = self.osbase.getupdates()
up2date = xml.Element('updates')
operatingsystem.append(up2date)
# for every update do:
secupdate = 0
bugfixupdate = 0
totalupdate = 0
for i in range(len(updates)):
package = xml.Element('package')
package.attrib['type'] = updates[i][0]
package.attrib['name'] = updates[i][1]
package.attrib['available'] = updates[i][2]
package.attrib['info'] = 'N/A'
if updates[i][0] == "security":
secupdate += 1
if updates[i][0] == "bugfix":
bugfixupdate += 1
totalupdate += 1
up2date.append(package)
up2date.attrib['total'] = str(totalupdate)
up2date.attrib['security'] = str(secupdate)
up2date.attrib['bugfix'] = str(bugfixupdate)
partitions = self.osbase.getpartitions()
# for every partition do:
# disks[i] = [fsdev,fsmount,fstype,fsopts,fstotal,fsused,fsfree,fspercent]
for i in range(len(partitions)):
filesystem = xml.Element('filesystem')
filesystem.attrib['fsdevice'] = partitions[i][0]
filesystem.attrib['fsname'] = partitions[i][1]
filesystem.attrib['fstype'] = partitions[i][2]
filesystem.attrib['fsopts'] = partitions[i][3]
filesystem.attrib['fstotal'] = partitions[i][4]
filesystem.attrib['fsused'] = partitions[i][5]
filesystem.attrib['fsfree'] = partitions[i][6]
filesystem.attrib['fspercent'] = partitions[i][7]
filesystems.append(filesystem)
swap = xml.Element('swap')
operatingsystem.append(swap)
swaps = self.osbase.getswap()
# for every swap do:
# swaps[i] = [dev,type,total,used,free]
for i in range(len(swaps)):
swaparea = xml.Element('swaparea')
swaparea.attrib['device'] = swaps[i][0]
swaparea.attrib['swaptype'] = swaps[i][1]
swaparea.attrib['swapsize'] = swaps[i][2]
swaparea.attrib['swapused'] = swaps[i][3]
swaparea.attrib['swapfree'] = swaps[i][4]
swap.append(swaparea)
network = xml.Element('network')
operatingsystem.append(network)
nics = self.osbase.getnics()
# for every NIC do:
#(a) = nics.keys()
#print a[0][0]
adapters = xml.Element('nics')
network.append(adapters)
#[name,description,nictype,status,rx_packets,tx_packets,rx_bytes,tx_bytes,driver,drvver,firmware,kernmodule,macaddr]
for i in range(len(nics)):
adapter = xml.Element('nic')
adapter.attrib['name'] = nics[i][0]
adapter.attrib['description'] = nics[i][1]
adapter.attrib['nictype'] = nics[i][2]
adapter.attrib['status'] = nics[i][3]
adapter.attrib['RXpackets'] = nics[i][4]
adapter.attrib['TXpackets'] = nics[i][5]
adapter.attrib['RXbytes'] = nics[i][6]
adapter.attrib['TXbytes'] = nics[i][7]
adapter.attrib['driver'] = nics[i][8]
adapter.attrib['drvver'] = nics[i][9]
adapter.attrib['firmware'] = nics[i][10]
adapter.attrib['kernmodule'] = nics[i][11]
adapter.attrib['macaddress'] = nics[i][12]
adapters.append(adapter)
ips = self.osbase.getip(nics[i][0])
for j in range(len(ips)):
ip = xml.Element('ipv4')
ip.attrib['address'] = ips[j][0]
ip.attrib['mask'] = ips[j][1]
ip.attrib['broadcast'] = ips[j][2]
adapter.append(ip)
ips6 = self.osbase.getip6(nics[i][0])
for k in range(len(ips6)):
ip6 = xml.Element('ipv6')
ip6.attrib['address'] = ips6[k][0]
ip6.attrib['mask'] = ips6[k][1]
ip6.attrib['broadcast'] = ips6[k][2]
adapter.append(ip6)
dnsservers = xml.Element('dnsservers')
network.append(dnsservers)
# for every dnsserver do:
dns = self.osbase.getdns()
for i in range(len(dns)):
dnsentry = xml.Element(dns[i][0])
dnsentry.attrib['address'] = dns[i][1]
dnsservers.append(dnsentry)
routing = xml.Element('routing')
network.append(routing)
ip4routes = xml.Element('ipv4')
routing.append(ip4routes)
routes4 = self.osbase.getroutes()
# for every iproute do:
for i in range(len(routes4)):
route = xml.Element('route')
route.attrib['destination'] = routes4[i][0]
route.attrib['gateway'] = routes4[i][1]
route.attrib['mask'] = routes4[i][2]
route.attrib['flags'] = routes4[i][3]
route.attrib['metric'] = routes4[i][4]
route.attrib['interface'] = routes4[i][5]
ip4routes.append(route)
ip6routes = xml.Element('ipv6')
routing.append(ip6routes)
routes6 = self.osbase.getroutes6()
# for every iproute do:
for i in range(len(routes6)):
route6 = xml.Element('route')
route6.attrib['destination'] = routes6[i][0]
route6.attrib['mask'] = routes6[i][1]
route6.attrib['gateway'] = routes6[i][2]
route6.attrib['flags'] = routes6[i][3]
route6.attrib['metric'] = routes6[i][4]
route6.attrib['interface'] = routes6[i][5]
ip6routes.append(route6)
groups = xml.Element('groups')
operatingsystem.append(groups)
pwgroups = self.osbase.getgroups()
# for every group do:
for i in range(len(pwgroups)):
group = xml.Element('group')
group.attrib['name'] = pwgroups[i][0]
group.attrib['gid'] = pwgroups[i][1]
groups.append(group)
# for every group member do:
for usr in pwgroups[i][2].split(','):
if usr:
member = xml.Element('member')
member.attrib['name'] = usr
group.append(member)
xmlusers = xml.Element('users')
operatingsystem.append(xmlusers)
# for every user do:
users = self.osbase.getusers()
# [login,uid,gid,comment,home,shell,locked,hashtype,groups]
for i in range(len(users)):
user = xml.Element('user')
user.attrib['login'] = users[i][0]
user.attrib['uid'] = users[i][1]
user.attrib['gid'] = users[i][2]
user.attrib['comment'] = users[i][3]
user.attrib['home'] = users[i][4]
user.attrib['shell'] = users[i][5]
user.attrib['locked'] = users[i][6]
user.attrib['hashtype'] = users[i][7]
user.attrib['groups'] = users[i][8]
xmlusers.append(user)
regional = xml.Element('regional')
loc = locale.getdefaultlocale()
regional.attrib['timezone'] = time.tzname[0]
regional.attrib['charset'] = loc[0]+'.'+loc[1]
operatingsystem.append(regional)
processes = xml.Element('processes')
operatingsystem.append(processes)
# for every process do:
procs = self.osbase.getprocs()
# [ppid,powner,psystime,pusertime,pcpu,pmem,ppriority,pstatus,pname,pcmd]
for i in range(len(procs)):
process = xml.Element('process')
process.attrib['pid'] = procs[i][0]
process.attrib['owner'] = procs[i][1]
process.attrib['cpusystime'] = procs[i][2]
process.attrib['cpuusertime'] = procs[i][3]
process.attrib['pcpu'] = procs[i][4]
process.attrib['pmemory'] = procs[i][5]
process.attrib['priority'] = procs[i][6]
process.attrib['status'] = procs[i][7]
process.attrib['name'] = procs[i][8]
process.attrib['command'] = procs[i][9]
processes.append(process)
services = xml.Element('services')
operatingsystem.append(services)
svcs = self.osbase.getsvcs()
# for every service do:
# [name,boot,status]
for i in range(len(svcs)):
service = xml.Element('service')
service.attrib['name'] = svcs[i][0]
service.attrib['autostart'] = svcs[i][1]
service.attrib['running'] = svcs[i][2]
services.append(service)
# make xml pretty ;)
raw_xml = xml.tostring(server, 'utf-8')
#reparsed_xml = minidom.parseString(raw_xml)
#pretty_xml = reparsed_xml.toprettyxml(indent=" ")
return raw_xml
#f = open("/tmp/test.xml", 'w')
#f.write(pretty_xml)
#f.close()
class TeddixCfg2Html:
def __init__(self,syslog,cfg):
self.syslog = syslog
self.cfg = cfg
self.agent_cfg2html_file = self.cfg.global_workdir + '/agent' + "/" + self.cfg.global_hostname + ".txt"
def run(self):
parser = TeddixParser.TeddixStringParser()
if parser.checkexec(self.cfg.agent_cfg2html):
if parser.getretval(self.cfg.agent_cfg2html + ' -o ' + self.cfg.global_workdir + '/agent' ) == 0:
self.syslog.info("%s succeeded " % self.cfg.agent_cfg2html )
else:
self.syslog.warn("%s failed " % self.cfg.agent_cfg2html)
else:
self.syslog.warn("%s failed " % self.cfg.agent_cfg2html)
def create_html(self):
f = open(self.agent_cfg2html_file, 'r')
html = f.read()
f.close()
return html
class TeddixDmesg:
def __init__(self,syslog,cfg):
self.syslog = syslog
self.cfg = cfg
def run(self):
dmesg = ''
parser = TeddixParser.TeddixStringParser()
if parser.checkexec('dmesg'):
lines = parser.readstdout('dmesg')
for i in range(len(lines)):
dmesg += lines[i] + "\n"
i += 1
return dmesg
class TeddixBootlog:
def __init__(self,syslog,cfg):
self.syslog = syslog
self.cfg = cfg
def get(self):
bootlog = ''
t_bootdmesg1 = "test -f /var/log/dmesg"
t_bootdmesg2 = "test -f /var/log/dmesg.boot"
t_bootdmesg3 = "test -f /var/log/boot.dmesg"
t_bootdmesg4 = "test -f /var/log/boot.log"
t_bootdmesg5 = "test -d /etc/svc/volatile"
if subprocess.call(t_bootdmesg1,shell=True) == 0:
self.syslog.debug("Found /var/log/dmesg" )
f = open('/var/log/dmesg', 'r')
bootlog = f.read()
f.close()
elif subprocess.call(t_bootdmesg2,shell=True) == 0:
self.syslog.debug("Found /var/log/dmesg.boot" )
f = open('/var/log/dmesg.boot', 'r')
bootlog = f.read()
f.close()
elif subprocess.call(t_bootdmesg3,shell=True) == 0:
self.syslog.debug("Found /var/log/boot.dmesg" )
f = open('/var/log/boot.dmesg', 'r')
bootlog = f.read()
f.close()
elif subprocess.call(t_bootdmesg4,shell=True) == 0:
self.syslog.debug("Found /var/log/boot.log" )
f = open('/var/log/boot.log', 'r')
bootlog = f.read()
f.close()
elif subprocess.call(t_bootdmesg5,shell=True) == 0:
self.syslog.debug("Found /etc/svc/volatile" )
for log in glob.glob('/etc/svc/volatile/*.log'):
f = open(log, 'r')
bootlog += "%s:" % log
bootlog += f.read()
f.close()
return bootlog
if __name__ == "__main__":
cfg = TeddixConfigFile.TeddixConfigFile()
# Open syslog
syslog = TeddixLogger.TeddixLogger ("TeddixInventory")
syslog.open_console()
# Switch to working directory
try:
os.chdir(cfg.global_workdir + '/agent')
except Exception, e:
syslog.error("Unable to change workdir to %s " % cfg.global_workdir + '/agent')
syslog.exception('__init__(): %s' % e )
exit(20)
if not os.access(cfg.global_workdir + '/agent', os.R_OK):
syslog.error("workdir %s needs to be readable" % cfg.global_workdir + '/agent')
if not os.access(cfg.global_workdir + '/agent', os.W_OK):
syslog.error("workdir %s needs to be writable" % cfg.global_workdir + '/agent')
if not os.access(cfg.global_workdir + '/agent', os.X_OK):
syslog.error("workdir %s needs to be executable" % cfg.global_workdir + '/agent')
baseline = TeddixBaseline(syslog,cfg)
raw_xml = baseline.create_xml()
# make xml pretty ;)
reparsed_xml = minidom.parseString(raw_xml)
pretty_xml = reparsed_xml.toprettyxml(indent=" ")
print pretty_xml
#cfg2html = TeddixCfg2Html(syslog,cfg)
#cfg2html.run()
#dmesg = TeddixDmesg(syslog,cfg)
#print dmesg.run()
|
StarcoderdataPython
|
6553511
|
import re
class OBJ_Loader( object ):
def parse_statement( self, statement ):
"""Processes the passed in statement.
This will call the appropriate member function
for the statement dynamically.
Member functions must be in the form:
_parse_%s( self, statement )
where %s is the statement type.
Eg.
_parse_vt( self, statement )
will parse texture coordinates.
If an appropriate member function is not found
nothing will be done and a statement will be
printed.
This method does not permit statements beginning with
invalid function characters to be passed through.
This includes comments (#).
Comments and empty lines should not be passed to this
function.
'call' statements should also be handled outside
this function.
Can throw NotImplementedError for unimplemented features,
AssertError for programmatical errors and other exceptions
for malformed or unexpected data.
"""
# get the statement type
values = statement.split()
type = values[ 0 ]
if len(values) > 1:
values = values[ 1: ]
else:
# no values, so empty our arg list
values = []
# check if we have a function that handles this type of value
# all parse functions are named _parse_$ where $ is
# the statement type
func = getattr( self, '_parse_%s' % type, None )
if func != None:
func( statement )
else:
raise NotImplementedError(
'Command "%s" is unknown' % type
)
def process_filename_list( self, values ):
# filenames are normally split by whitespace
# but the specification also allows for files with spaces in them
# so we need to see if a value doesn't end with an extension
# if not, concatenate it with the next value
return re.split( r'\W+\.\W+', values )
|
StarcoderdataPython
|
3347770
|
<gh_stars>1-10
import numpy as np
from chainer import Chain, report, links as L, functions as F, as_variable, cuda
from chainer.iterators import SerialIterator
from chainer.optimizers import Adam
from chainer.training import Trainer, StandardUpdater, extensions
from chainerltr import Ranker
from chainerltr.dataset import zeropad_concat
from chainerltr.evaluation import ndcg
from test.test_dataset import get_dataset
class Loss(Chain):
def __init__(self, ranker, loss_fn):
super().__init__(ranker=ranker)
self.loss_fn = loss_fn
def __call__(self, xs, ys, nr_docs):
prediction = self.ranker(xs)
loss = self.loss_fn(prediction, ys, nr_docs)
report({"loss": loss})
xp = cuda.get_array_module(prediction)
ranking = as_variable(xp.fliplr(xp.argsort(prediction.data, axis=1)))
ndcg_score = ndcg(ranking, as_variable(ys), as_variable(nr_docs))
report({"ndcg": F.mean(ndcg_score)})
return loss
def run_linear_network(loss_fn, alpha=0.3, batch_size=2):
# Get data
np.random.seed(42)
dataset = get_dataset()
iterator = SerialIterator(dataset, batch_size, repeat=True, shuffle=True)
# Set up network and loss
predictor = L.Linear(None, 1)
ranker = Ranker(predictor)
loss = Loss(ranker, loss_fn)
# Optimizer
optimizer = Adam(alpha=alpha)
optimizer.setup(loss)
updater = StandardUpdater(iterator, optimizer, converter=zeropad_concat)
trainer = Trainer(updater, (100, 'epoch'))
log_report = extensions.LogReport(log_name=None)
trainer.extend(log_report)
np.random.seed(42)
trainer.run()
last_ndcg = log_report.log[-1]['ndcg']
return last_ndcg
|
StarcoderdataPython
|
1614093
|
<filename>blockchain/all_challenges/2021/realworld/rwctf3rd-Re-Montagy/deploy/ethbot/index.py<gh_stars>0
import sys
import os
from conf.base import alarmsecs, workdir
import signal
import codecs
from src.main import main
import hashlib
import random
import string
def getflag(seed, teamtoken):
token=teamtoken
real_flag=hashlib.md5((seed+'&'+hashlib.sha1(token[::-1].encode()).hexdigest()[:10]).encode()).hexdigest()
return 'flag{' + real_flag + '}'
def generatepow(difficulty):
prefix = ''.join(random.choice(string.ascii_letters + string.digits) for i in range(8))
msg="sha256("+prefix+" + ?).binary.endswith('"+"0"*difficulty+"')"
return prefix,msg
def pow(prefix,difficulty,answer):
hashresult=hashlib.sha256((prefix+answer).encode()).digest()
bits=bin(int(hashlib.sha256((prefix+answer).encode()).hexdigest(),16))[2:]
if bits.endswith("0"*difficulty):
return True
else:
return False
print("[$] Welcome to Re:Montagy.")
prefix,msg=generatepow(5)
print("[+]",msg)
answer=input("[-] ?=")
if not pow(prefix,5,answer):
print("[+]wrong proof")
sys.exit(0)
print("[+] passed")
class Unbuffered(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.path.append(workdir)
sys.stdout = codecs.getwriter("utf-8")(sys.stdout.detach())
sys.stdout = Unbuffered(sys.stdout)
signal.alarm(alarmsecs)
os.chdir(workdir)
main()
|
StarcoderdataPython
|
5054711
|
import logging
from django.conf import settings
logger = logging.getLogger(__name__)
DEFAULT_SETTINGS = {
"CANONICAL_HOSTNAME": "",
"INDEX_TEXT_RESOURCES": True
}
class AppSettings(object):
def __init__(self, settings_key=None, default_settings={}):
self.settings_key = settings_key
self.default_settings = default_settings
@property
def user_settings(self):
if not hasattr(self, "_user_settings"):
self._user_settings = getattr(settings, self.settings_key, {})
return self._user_settings
def __getattr__(self, attr):
if attr not in self.default_settings:
raise AttributeError(f"Invalid setting {attr} for {self.settings_key}")
try:
# Check if present in user settings
val = self.user_settings[attr]
except KeyError:
# Fall back to defaults
val = self.default_settings[attr]
# Cache the result
setattr(self, attr, val)
return val
text_store_settings = AppSettings("TEXT_STORE", DEFAULT_SETTINGS)
|
StarcoderdataPython
|
1987333
|
<reponame>mmatl/jointseg
"""
File for picking unique, easily-distinguishable colors.
Author: <NAME>
"""
def indexed_color(i):
"""Return a color based on an index. Identical indices will always return
the same color, and the colors should contrast cleanly.
Parameters
----------
i : int
An index into the color array.
Returns
-------
:obj:`tuple` of float
An rgb color tuple.
"""
r = [0.95, 0.13, 0.95, 0.53, 0.95, 0.63, 0.75, 0.76, 0.52, 0.0,
0.9, 0.0, 0.98, 0.38, 0.96, 0.7, 0.86, 0.53, 0.55, 0.4, 0.89, 0.17]
g = [0.95, 0.13, 0.76, 0.34, 0.52, 0.79, 0.0, 0.7, 0.52, 0.53,
0.56, 0.4, 0.58, 0.31, 0.65, 0.27, 0.83, 0.18, 0.71, 0.27, 0.35, 0.24]
b = [0.96, 0.13, 0.0, 0.57, 0.0, 0.95, 0.2, 0.5, 0.51, 0.34, 0.67,
0.65, 0.47, 0.59, 0.0, 0.42, 0.0, 0.09, 0.0, 0.13, 0.13, 0.15]
ci = i % len(r)
return (r[ci], g[ci], b[ci])
|
StarcoderdataPython
|
6456611
|
"""
Copyright 2020 <NAME> by<EMAIL>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.modules.normalization import LayerNorm
from third_party import transformer
import utils
import modules
class TransformerDecoder(nn.Module):
r"""
Top class of TransformerDecoder
"""
def __init__(self, config):
super(TransformerDecoder, self).__init__()
self.config = config
self.d_model = config["d_model"]
self.nhead = config["nhead"]
self.num_layers = config["num_layers"]
self.encoder_dim = config["encoder_dim"]
self.dim_feedforward = config["dim_feedforward"]
self.vocab_size = config["vocab_size"]
self.dropout_rate = config["dropout_rate"]
self.activation = config["activation"]
self.emb = nn.Embedding(self.vocab_size, self.d_model)
self.emb_scale = self.d_model**0.5
self.pe = modules.PositionalEncoding(self.d_model)
self.dropout = nn.Dropout(self.dropout_rate)
transformer_decoder_layer = transformer.TransformerDecoderLayer(
d_model=self.d_model,
nhead=self.nhead,
dim_feedforward=self.dim_feedforward,
dropout=self.dropout_rate,
activation=self.activation)
self.transformer_block = transformer.TransformerDecoder(
transformer_decoder_layer, self.num_layers)
self.output_affine = nn.Linear(self.d_model, self.vocab_size)
nn.init.xavier_normal_(self.output_affine.weight)
self.emb.weight = self.output_affine.weight # tying weight
def forward(self,
encoder_outputs,
encoder_output_lengths,
decoder_inputs,
decoder_input_lengths,
return_atten=False):
r"""
Forward function of transformer decoder layer.
encoder_outputs is from last encoder layer and will be used as K,V as in multihead attention
decoder_inputs now is seems as target id or A.K.A labels which are texts, and will be used as Q.
"""
B, T_e, D_e = encoder_outputs.shape
encoder_outputs = encoder_outputs.permute(1, 0, 2) # [S, B, D_e]
_, T_d = decoder_inputs.shape
memory_key_padding_mask = utils.get_transformer_padding_byte_masks(
B, T_e, encoder_output_lengths).to(encoder_outputs.device)
tgt_key_padding_mask = utils.get_transformer_padding_byte_masks(
B, T_d, decoder_input_lengths).to(encoder_outputs.device)
casual_masks = utils.get_transformer_casual_masks(T_d).to(
encoder_outputs.device)
outputs = self.emb(decoder_inputs) * self.emb_scale
outputs = self.pe(outputs)
outputs = self.dropout(outputs)
outputs = outputs.permute(1, 0, 2)
if return_atten:
outputs, decoder_atten_tuple_list = self.transformer_block(
outputs,
encoder_outputs,
memory_mask=None,
memory_key_padding_mask=memory_key_padding_mask,
tgt_key_padding_mask=tgt_key_padding_mask,
tgt_mask=casual_masks,
return_atten=True)
else:
outputs = self.transformer_block(
outputs,
encoder_outputs,
memory_mask=None,
memory_key_padding_mask=memory_key_padding_mask,
tgt_key_padding_mask=tgt_key_padding_mask,
tgt_mask=casual_masks,
return_atten=False)
outputs = outputs.permute(1, 0, 2)
outputs = self.output_affine(outputs)
if return_atten:
return outputs, decoder_atten_tuple_list
return outputs
|
StarcoderdataPython
|
5005140
|
class User():
"""docstring for User"""
def __init__(self, **params):
'''Init user
Load user data and return, using id or email/password
'''
self.id = 0
self.is_authenticated = True
self.is_active = True
self.is_anonymous = False
def get_id(self):
return self.id
|
StarcoderdataPython
|
1889605
|
<gh_stars>0
from . import device
class US8P150(device.Device):
def __init__(self, site, data):
super(US8P150, self).__init__(site, data)
self.port = {}
self.parse_stat(data['stat'])
self.parse_uplink(data.get('uplink'))
self.parse_port_table(data['port_table'])
self.general_temperature = data.get('general_temperature')
self.parse_sysstat(data.get('sys_stats'))
def parse_port_table(self, data):
for port in data:
self.port[port['port_idx']] = port
# {
# "_id": "58556a4fb410cf6b940e570a",
# "_uptime": 3641077,
# "adopted": true,
# "bytes": 0,
# "cfgversion": "3280a08c720ca056",
# "config_network": {
# "ip": "192.168.1.226",
# "type": "dhcp"
# },
# "connect_request_ip": "192.168.1.226",
# "connect_request_port": "40880",
# "considered_lost_at": 1485632663,
# "device_id": "58556a4fb410cf6b940e570a",
# "dhcp_server_table": [],
# "dot1x_dyvlan_enabled": false,
# "dot1x_portctrl_enabled": false,
# "downlink_table": [
# {
# "full_duplex": true,
# "mac": "44:d9:e7:f6:9f:99",
# "port_idx": 5,
# "speed": 1000
# }
# ],
# "ethernet_table": [
# {
# "mac": "f0:9f:c2:0a:4a:ca",
# "name": "eth0",
# "num_port": 10
# },
# {
# "mac": "f0:9f:c2:0a:4a:cb",
# "name": "srv0"
# }
# ],
# "flowctrl_enabled": false,
# "fw_caps": 0,
# "general_temperature": 63,
# "guest-num_sta": 0,
# "has_fan": false,
# "inform_authkey": "...",
# "inform_ip": "192.168.1.3",
# "inform_url": "http://192.168.1.3:8080/inform",
# "ip": "192.168.1.226",
# "jumboframe_enabled": false,
# "known_cfgversion": "3280a08c720ca056",
# "last_seen": 1485632591,
# "locating": false,
# "mac": "f0:9f:c2:0a:4a:ca",
# "model": "US8P150",
# "name": "switch-trappen",
# "next_heartbeat_at": 1485632615,
# "num_sta": 22,
# "overheating": false,
# "port_table": [
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 0,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": false,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "GE",
# "name": "Port 1",
# "op_mode": "switch",
# "poe_class": "Unknown",
# "poe_current": "0.00",
# "poe_enable": false,
# "poe_good": false,
# "poe_mode": "auto",
# "poe_power": "0.00",
# "poe_voltage": "0.00",
# "port_idx": 1,
# "port_poe": true,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 0,
# "rx_bytes": 0,
# "rx_bytes-r": 0,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 0,
# "speed": 0,
# "stp_pathcost": 0,
# "stp_state": "disabled",
# "tx_broadcast": 0,
# "tx_bytes": 0,
# "tx_bytes-r": 0,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 0,
# "tx_packets": 0,
# "up": false
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 0,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": false,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "GE",
# "name": "Port 2",
# "op_mode": "switch",
# "poe_class": "Unknown",
# "poe_current": "0.00",
# "poe_enable": false,
# "poe_good": false,
# "poe_mode": "auto",
# "poe_power": "0.00",
# "poe_voltage": "0.00",
# "port_idx": 2,
# "port_poe": true,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 0,
# "rx_bytes": 0,
# "rx_bytes-r": 0,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 0,
# "speed": 0,
# "stp_pathcost": 0,
# "stp_state": "disabled",
# "tx_broadcast": 0,
# "tx_bytes": 0,
# "tx_bytes-r": 0,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 0,
# "tx_packets": 0,
# "up": false
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 0,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": false,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "GE",
# "name": "Port 3",
# "op_mode": "switch",
# "poe_class": "Unknown",
# "poe_current": "0.00",
# "poe_enable": false,
# "poe_good": false,
# "poe_mode": "auto",
# "poe_power": "0.00",
# "poe_voltage": "0.00",
# "port_idx": 3,
# "port_poe": true,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 0,
# "rx_bytes": 0,
# "rx_bytes-r": 0,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 0,
# "speed": 0,
# "stp_pathcost": 0,
# "stp_state": "disabled",
# "tx_broadcast": 0,
# "tx_bytes": 0,
# "tx_bytes-r": 0,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 0,
# "tx_packets": 0,
# "up": false
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 0,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": false,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "GE",
# "name": "Port 4",
# "op_mode": "switch",
# "poe_class": "Unknown",
# "poe_current": "0.00",
# "poe_enable": false,
# "poe_good": false,
# "poe_mode": "auto",
# "poe_power": "0.00",
# "poe_voltage": "0.00",
# "port_idx": 4,
# "port_poe": true,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 0,
# "rx_bytes": 0,
# "rx_bytes-r": 0,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 0,
# "speed": 0,
# "stp_pathcost": 0,
# "stp_state": "disabled",
# "tx_broadcast": 0,
# "tx_bytes": 0,
# "tx_bytes-r": 0,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 0,
# "tx_packets": 0,
# "up": false
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 11826,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": true,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "GE",
# "name": "Port 5",
# "op_mode": "switch",
# "poe_class": "Class 0",
# "poe_current": "77.75",
# "poe_enable": true,
# "poe_good": true,
# "poe_mode": "auto",
# "poe_power": "4.16",
# "poe_voltage": "53.46",
# "port_idx": 5,
# "port_poe": true,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 1965275,
# "rx_bytes": 176744755020,
# "rx_bytes-r": 4496,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 1843103,
# "rx_packets": 311222635,
# "speed": 1000,
# "stp_pathcost": 20000,
# "stp_state": "forwarding",
# "tx_broadcast": 1843267,
# "tx_bytes": 539674069439,
# "tx_bytes-r": 7329,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 2621212,
# "tx_packets": 441936297,
# "up": true
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 13448,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": true,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "GE",
# "name": "Port 6",
# "op_mode": "switch",
# "poe_class": "Unknown",
# "poe_current": "0.00",
# "poe_enable": false,
# "poe_good": false,
# "poe_mode": "auto",
# "poe_power": "0.00",
# "poe_voltage": "0.00",
# "port_idx": 6,
# "port_poe": true,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 1580314,
# "rx_bytes": 178970189461,
# "rx_bytes-r": 6930,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 681287,
# "rx_packets": 229507948,
# "speed": 1000,
# "stp_pathcost": 20000,
# "stp_state": "forwarding",
# "tx_broadcast": 2228229,
# "tx_bytes": 230673614216,
# "tx_bytes-r": 6518,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 3783026,
# "tx_packets": 260146954,
# "up": true
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 385,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": true,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "GE",
# "name": "Port 7",
# "op_mode": "switch",
# "poe_class": "Unknown",
# "poe_current": "0.00",
# "poe_enable": false,
# "poe_good": false,
# "poe_mode": "auto",
# "poe_power": "0.00",
# "poe_voltage": "0.00",
# "port_idx": 7,
# "port_poe": true,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 12,
# "rx_bytes": 6803927,
# "rx_bytes-r": 4,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 82298,
# "speed": 100,
# "stp_pathcost": 200000,
# "stp_state": "forwarding",
# "tx_broadcast": 3808531,
# "tx_bytes": 1054811281,
# "tx_bytes-r": 380,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 4464313,
# "tx_packets": 8378604,
# "up": true
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 4256,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": true,
# "is_uplink": true,
# "jumbo": false,
# "masked": false,
# "media": "GE",
# "name": "Port 8",
# "op_mode": "switch",
# "poe_class": "Unknown",
# "poe_current": "0.00",
# "poe_enable": false,
# "poe_good": false,
# "poe_mode": "auto",
# "poe_power": "0.00",
# "poe_voltage": "0.00",
# "port_idx": 8,
# "port_poe": true,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 146507,
# "rx_bytes": 478603665596,
# "rx_bytes-r": 3057,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 380556410,
# "speed": 1000,
# "stp_pathcost": 20000,
# "stp_state": "forwarding",
# "tx_broadcast": 3662034,
# "tx_bytes": 65698353872,
# "tx_bytes-r": 1199,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 4464311,
# "tx_packets": 231549589,
# "up": true
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 0,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": false,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "SFP",
# "name": "SFP 1",
# "op_mode": "switch",
# "port_idx": 9,
# "port_poe": false,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 0,
# "rx_bytes": 0,
# "rx_bytes-r": 0,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 0,
# "sfp_found": false,
# "speed": 0,
# "stp_pathcost": 0,
# "stp_state": "disabled",
# "tx_broadcast": 0,
# "tx_bytes": 0,
# "tx_bytes-r": 0,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 0,
# "tx_packets": 0,
# "up": false
# },
# {
# "aggregated_by": false,
# "autoneg": true,
# "bytes-r": 0,
# "enable": true,
# "flowctrl_rx": false,
# "flowctrl_tx": false,
# "full_duplex": false,
# "is_uplink": false,
# "jumbo": false,
# "masked": false,
# "media": "SFP",
# "name": "SFP 2",
# "op_mode": "switch",
# "port_idx": 10,
# "port_poe": false,
# "portconf_id": "56c87bd0b41038d25762ce8f",
# "rx_broadcast": 0,
# "rx_bytes": 0,
# "rx_bytes-r": 0,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 0,
# "sfp_found": false,
# "speed": 0,
# "stp_pathcost": 0,
# "stp_state": "disabled",
# "tx_broadcast": 0,
# "tx_bytes": 0,
# "tx_bytes-r": 0,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_multicast": 0,
# "tx_packets": 0,
# "up": false
# }
# ],
# "rx_bytes": 65698353872,
# "serial": "F09FC20A4ACA",
# "site_id": "56c87bc1b41038d25762ce86",
# "stat": {
# "mac": "f0:9f:c2:0a:4a:ca",
# "port_5-rx_bytes": 174998957475,
# "port_5-rx_packets": 301949064,
# "port_5-tx_bytes": 520122521522,
# "port_5-tx_packets": 427159453,
# "port_6-rx_bytes": 166842488635,
# "port_6-rx_packets": 219854631,
# "port_6-tx_bytes": 229212375520,
# "port_6-tx_packets": 254220128,
# "port_7-rx_bytes": 6621976,
# "port_7-rx_packets": 80125,
# "port_7-tx_bytes": 1025174082,
# "port_7-tx_packets": 8145657,
# "port_8-rx_bytes": 469223202436,
# "port_8-rx_packets": 372402600,
# "port_8-tx_bytes": 63409333653,
# "port_8-tx_packets": 224828560,
# "rx_bytes": 811071270522,
# "rx_packets": 894286420,
# "tx_bytes": 813769404777,
# "tx_packets": 914353798,
# "uplink-rx_bytes": 488790756975,
# "uplink-rx_packets": 389433418,
# "uplink-tx_bytes": 69186801149,
# "uplink-tx_packets": 238979784
# },
# "state": 1,
# "stp_priority": "32768",
# "stp_version": "rstp",
# "tx_bytes": 478603665596,
# "type": "usw",
# "upgradable": true,
# "uplink": {
# "full_duplex": true,
# "ip": "192.168.1.226",
# "mac": "f0:9f:c2:0a:4a:ca",
# "max_speed": 1000,
# "media": "GE",
# "name": "eth0",
# "netmask": "255.255.255.0",
# "num_port": 10,
# "port_idx": 8,
# "rx_bytes": 478603665596,
# "rx_bytes-r": 3057,
# "rx_dropped": 0,
# "rx_errors": 0,
# "rx_multicast": 0,
# "rx_packets": 380556410,
# "speed": 1000,
# "tx_bytes": 65698353872,
# "tx_bytes-r": 1199,
# "tx_dropped": 0,
# "tx_errors": 0,
# "tx_packets": 231549589,
# "type": "wire",
# "up": true
# },
# "uplink_depth": 1,
# "uptime": 3641077,
# "user-num_sta": 22,
# "version": "3.3.12.3861",
# "x_authkey": "...",
# "x_fingerprint": "70:b5:2e:d2:ec:59:6d:18:fb:11:12:d9:cb:8d:8f:74"
# }
|
StarcoderdataPython
|
18204
|
#!/usr/bin/env/python3
"""Recipe for training a wav2vec-based ctc ASR system with librispeech.
The system employs wav2vec as its encoder. Decoding is performed with
ctc greedy decoder.
To run this recipe, do the following:
> python train_with_wav2vec.py hparams/train_with_wav2vec.yaml
The neural network is trained on CTC likelihood target and character units
are used as basic recognition tokens. Training is performed on the full
LibriSpeech dataset (960 h).
Authors
* <NAME> 2021
* <NAME> 2021
* <NAME> 2020
* <NAME> 2020
* <NAME> 2020
* <NAME> 2020
* <NAME> 2020
"""
import os
import sys
import torch
import logging
import speechbrain as sb
from speechbrain.utils.distributed import run_on_main
from hyperpyyaml import load_hyperpyyaml
from pathlib import Path
logger = logging.getLogger(__name__)
# Define training procedure
class ASR(sb.Brain):
def compute_forward(self, batch, stage):
"""Forward computations from the waveform batches to the output probabilities."""
batch = batch.to(self.device)
wavs, wav_lens = batch.sig
tokens_bos, _ = batch.tokens_bos
wavs, wav_lens = wavs.to(self.device), wav_lens.to(self.device)
# Add augmentation if specified
if stage == sb.Stage.TRAIN:
if hasattr(self.modules, "env_corrupt"):
wavs_noise = self.modules.env_corrupt(wavs, wav_lens)
wavs = torch.cat([wavs, wavs_noise], dim=0)
wav_lens = torch.cat([wav_lens, wav_lens])
tokens_bos = torch.cat([tokens_bos, tokens_bos], dim=0)
if hasattr(self.hparams, "augmentation"):
wavs = self.hparams.augmentation(wavs, wav_lens)
# Forward pass
feats = self.modules.wav2vec2(wavs)
x = self.modules.enc(feats)
# Compute outputs
p_tokens = None
logits = self.modules.ctc_lin(x)
p_ctc = self.hparams.log_softmax(logits)
if stage != sb.Stage.TRAIN:
p_tokens = sb.decoders.ctc_greedy_decode(
p_ctc, wav_lens, blank_id=self.hparams.blank_index
)
return p_ctc, wav_lens, p_tokens
def compute_objectives(self, predictions, batch, stage):
"""Computes the loss (CTC+NLL) given predictions and targets."""
p_ctc, wav_lens, predicted_tokens = predictions
ids = batch.id
tokens_eos, tokens_eos_lens = batch.tokens_eos
tokens, tokens_lens = batch.tokens
if hasattr(self.modules, "env_corrupt") and stage == sb.Stage.TRAIN:
tokens_eos = torch.cat([tokens_eos, tokens_eos], dim=0)
tokens_eos_lens = torch.cat(
[tokens_eos_lens, tokens_eos_lens], dim=0
)
tokens = torch.cat([tokens, tokens], dim=0)
tokens_lens = torch.cat([tokens_lens, tokens_lens], dim=0)
loss_ctc = self.hparams.ctc_cost(p_ctc, tokens, wav_lens, tokens_lens)
loss = loss_ctc
if stage != sb.Stage.TRAIN:
# Decode token terms to words
predicted_words = [
"".join(self.tokenizer.decode_ndim(utt_seq)).split(" ")
for utt_seq in predicted_tokens
]
target_words = [wrd.split(" ") for wrd in batch.wrd]
self.wer_metric.append(ids, predicted_words, target_words)
self.cer_metric.append(ids, predicted_words, target_words)
return loss
def fit_batch(self, batch):
"""Train the parameters given a single batch in input"""
predictions = self.compute_forward(batch, sb.Stage.TRAIN)
loss = self.compute_objectives(predictions, batch, sb.Stage.TRAIN)
loss.backward()
if self.check_gradients(loss):
self.wav2vec_optimizer.step()
self.model_optimizer.step()
self.wav2vec_optimizer.zero_grad()
self.model_optimizer.zero_grad()
return loss.detach()
def evaluate_batch(self, batch, stage):
"""Computations needed for validation/test batches"""
predictions = self.compute_forward(batch, stage=stage)
with torch.no_grad():
loss = self.compute_objectives(predictions, batch, stage=stage)
return loss.detach()
def on_stage_start(self, stage, epoch):
"""Gets called at the beginning of each epoch"""
if stage != sb.Stage.TRAIN:
self.cer_metric = self.hparams.cer_computer()
self.wer_metric = self.hparams.error_rate_computer()
def on_stage_end(self, stage, stage_loss, epoch):
"""Gets called at the end of an epoch."""
# Compute/store important stats
stage_stats = {"loss": stage_loss}
if stage == sb.Stage.TRAIN:
self.train_stats = stage_stats
else:
stage_stats["CER"] = self.cer_metric.summarize("error_rate")
stage_stats["WER"] = self.wer_metric.summarize("error_rate")
# Perform end-of-iteration things, like annealing, logging, etc.
if stage == sb.Stage.VALID:
old_lr_model, new_lr_model = self.hparams.lr_annealing_model(
stage_stats["loss"]
)
old_lr_wav2vec, new_lr_wav2vec = self.hparams.lr_annealing_wav2vec(
stage_stats["loss"]
)
sb.nnet.schedulers.update_learning_rate(
self.model_optimizer, new_lr_model
)
sb.nnet.schedulers.update_learning_rate(
self.wav2vec_optimizer, new_lr_wav2vec
)
self.hparams.train_logger.log_stats(
stats_meta={
"epoch": epoch,
"lr_model": old_lr_model,
"lr_wav2vec": old_lr_wav2vec,
},
train_stats=self.train_stats,
valid_stats=stage_stats,
)
self.checkpointer.save_and_keep_only(
meta={"WER": stage_stats["WER"]}, min_keys=["WER"],
)
elif stage == sb.Stage.TEST:
self.hparams.train_logger.log_stats(
stats_meta={"Epoch loaded": self.hparams.epoch_counter.current},
test_stats=stage_stats,
)
with open(self.hparams.wer_file, "w") as w:
self.wer_metric.write_stats(w)
def init_optimizers(self):
"Initializes the wav2vec2 optimizer and model optimizer"
self.wav2vec_optimizer = self.hparams.wav2vec_opt_class(
self.modules.wav2vec2.parameters()
)
self.model_optimizer = self.hparams.model_opt_class(
self.hparams.model.parameters()
)
if self.checkpointer is not None:
self.checkpointer.add_recoverable(
"wav2vec_opt", self.wav2vec_optimizer
)
self.checkpointer.add_recoverable("modelopt", self.model_optimizer)
def dataio_prepare(hparams):
"""This function prepares the datasets to be used in the brain class.
It also defines the data processing pipeline through user-defined functions."""
data_folder = hparams["data_folder"]
train_data = sb.dataio.dataset.DynamicItemDataset.from_csv(
csv_path=hparams["train_csv"], replacements={"data_root": data_folder},
)
if hparams["sorting"] == "ascending":
# we sort training data to speed up training and get better results.
train_data = train_data.filtered_sorted(sort_key="duration")
# when sorting do not shuffle in dataloader ! otherwise is pointless
hparams["train_dataloader_opts"]["shuffle"] = False
elif hparams["sorting"] == "descending":
train_data = train_data.filtered_sorted(
sort_key="duration", reverse=True
)
# when sorting do not shuffle in dataloader ! otherwise is pointless
hparams["train_dataloader_opts"]["shuffle"] = False
elif hparams["sorting"] == "random":
pass
else:
raise NotImplementedError(
"sorting must be random, ascending or descending"
)
valid_data = sb.dataio.dataset.DynamicItemDataset.from_csv(
csv_path=hparams["valid_csv"], replacements={"data_root": data_folder},
)
valid_data = valid_data.filtered_sorted(sort_key="duration")
# test is separate
test_datasets = {}
for csv_file in hparams["test_csv"]:
name = Path(csv_file).stem
test_datasets[name] = sb.dataio.dataset.DynamicItemDataset.from_csv(
csv_path=csv_file, replacements={"data_root": data_folder}
)
test_datasets[name] = test_datasets[name].filtered_sorted(
sort_key="duration"
)
datasets = [train_data, valid_data] + [i for k, i in test_datasets.items()]
# 2. Define audio pipeline:
@sb.utils.data_pipeline.takes("wav")
@sb.utils.data_pipeline.provides("sig")
def audio_pipeline(wav):
sig = sb.dataio.dataio.read_audio(wav)
return sig
sb.dataio.dataset.add_dynamic_item(datasets, audio_pipeline)
label_encoder = sb.dataio.encoder.CTCTextEncoder()
# 3. Define text pipeline:
@sb.utils.data_pipeline.takes("wrd")
@sb.utils.data_pipeline.provides(
"wrd", "char_list", "tokens_list", "tokens_bos", "tokens_eos", "tokens"
)
def text_pipeline(wrd):
yield wrd
char_list = list(wrd)
yield char_list
tokens_list = label_encoder.encode_sequence(char_list)
yield tokens_list
tokens_bos = torch.LongTensor([hparams["bos_index"]] + (tokens_list))
yield tokens_bos
tokens_eos = torch.LongTensor(tokens_list + [hparams["eos_index"]])
yield tokens_eos
tokens = torch.LongTensor(tokens_list)
yield tokens
sb.dataio.dataset.add_dynamic_item(datasets, text_pipeline)
lab_enc_file = os.path.join(hparams["save_folder"], "label_encoder.txt")
special_labels = {
"bos_label": hparams["bos_index"],
"eos_label": hparams["eos_index"],
"blank_label": hparams["blank_index"],
}
label_encoder.load_or_create(
path=lab_enc_file,
from_didatasets=[train_data],
output_key="char_list",
special_labels=special_labels,
sequence_input=True,
)
# 4. Set output:
sb.dataio.dataset.set_output_keys(
datasets,
["id", "sig", "wrd", "char_list", "tokens_bos", "tokens_eos", "tokens"],
)
return train_data, valid_data, test_datasets, label_encoder
if __name__ == "__main__":
# CLI:
hparams_file, run_opts, overrides = sb.parse_arguments(sys.argv[1:])
# If distributed_launch=True then
# create ddp_group with the right communication protocol
sb.utils.distributed.ddp_init_group(run_opts)
with open(hparams_file) as fin:
hparams = load_hyperpyyaml(fin, overrides)
# Create experiment directory
sb.create_experiment_directory(
experiment_directory=hparams["output_folder"],
hyperparams_to_save=hparams_file,
overrides=overrides,
)
# Dataset prep (parsing Librispeech)
from librispeech_prepare import prepare_librispeech # noqa
# multi-gpu (ddp) save data preparation
run_on_main(
prepare_librispeech,
kwargs={
"data_folder": hparams["data_folder"],
"tr_splits": hparams["train_splits"],
"dev_splits": hparams["dev_splits"],
"te_splits": hparams["test_splits"],
"save_folder": hparams["output_folder"],
"merge_lst": hparams["train_splits"],
"merge_name": "train.csv",
"skip_prep": hparams["skip_prep"],
},
)
# here we create the datasets objects as well as tokenization and encoding
train_data, valid_data, test_datasets, label_encoder = dataio_prepare(
hparams
)
# Trainer initialization
asr_brain = ASR(
modules=hparams["modules"],
hparams=hparams,
run_opts=run_opts,
checkpointer=hparams["checkpointer"],
)
# We dynamicaly add the tokenizer to our brain class.
# NB: This tokenizer corresponds to the one used for the LM!!
asr_brain.tokenizer = label_encoder
# Training
asr_brain.fit(
asr_brain.hparams.epoch_counter,
train_data,
valid_data,
train_loader_kwargs=hparams["train_dataloader_opts"],
valid_loader_kwargs=hparams["valid_dataloader_opts"],
)
# Testing
for k in test_datasets.keys(): # keys are test_clean, test_other etc
asr_brain.hparams.wer_file = os.path.join(
hparams["output_folder"], "wer_{}.txt".format(k)
)
asr_brain.evaluate(
test_datasets[k], test_loader_kwargs=hparams["test_dataloader_opts"]
)
|
StarcoderdataPython
|
4858560
|
<filename>for.py
##n=int(input("enter the number:"))
##i=1
##for i in range(1,n+1):
## if i%2==0:
## print (i)
##sum=0
##for i in range(1,101):
## if i%2==0:
## sum=sum+i
##
##print(sum)
##
##sum=0
##for i in range(1,101):
## if i%2==1:
## sum=sum+i
##print(sum)
##
##
##n=int(input("enter the number:"))
##for i in range(1,n+1):
## if i%2==1:
## print(i)
##
##n=int(input("enter any number:"))
##count=0
##for i in range(2,n):
## if(n%i==0):
## count+=1
##
##if(count==0):
## print(" prime")
##
##else:
## print(" n prime")
n=int(input("enter range:"))
a=0
b=1
c=0
for i in range(0,n+1):
c=a+b
print(c, end=" ")
b=a
a=c
|
StarcoderdataPython
|
3339326
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from flask import Blueprint
label_writer_450 = Blueprint(
'label_writer_450', __name__, template_folder='templates', static_folder='static'
)
from . import views
from . import zpl
|
StarcoderdataPython
|
9672837
|
from .six import ( # noqa: #401
urlparse,
urlunparse,
Generator,
)
|
StarcoderdataPython
|
4902078
|
"""
This module implements the Qt interface and is where every other module is
put together.
Here's a flow diagram with how the API and Player initialization is done
inside this module.
+-------------------------+
| Prompt with SetupWidget |
|-------------------------|
| Ask the user for what |
| API & Player to use |
+------------+------------+
|
+----------------+---------------+
| |
v v
+-------------+------------+ +-------------+------------+
| player.initialize_player | | api.initialize_api |
|--------------------------| |--------------------------|
| Initialize the Player | | Initialize the API |
| using the PlayerData | | using the APIData entry |
| entry information | | information |
+-------------+------------+ +-------------+------------+
|
v
+---------------- Does it need GUI interaction?
| Yes (APIData.gui_init_fn)
|
| |
| | No
v v
+------------------------+ +--------------------------+
| Call custom function | | gui.wait_for_connection |
| from APIData which | |--------------------------|
| handles initialization +---->| Wait for the API connect |
| inside the GUI window | +--------------------------+
| (APIData.gui_init_fn) | |
+------------------------+ |
|
v
START
The API and player modules are mixed using Qt events:
* Position changes -> MainWindow.change_video_position(ms)
* Status changes -> MainWindow.change_video_status(status)
* Song changes -> MainWindow.play_video(artist, title, start)
These events are emitted inside the APIs.
"""
import time
import types
import logging
import importlib
from typing import Callable, Optional
from contextlib import suppress
from qtpy.QtWidgets import QWidget, QHBoxLayout
from qtpy.QtGui import QFontDatabase
from qtpy.QtCore import Qt, QTimer, QCoreApplication, Slot, QThread
from vidify import format_name, find_module
from vidify.api import APIS, APIData
from vidify.player import initialize_player, PlayerData
from vidify.config import Config
from vidify.youtube import YouTubeDLWorker, get_direct_url, get_youtube_url
from vidify.lyrics import get_lyrics
from vidify.gui import Res, Colors
from vidify.gui.components import SetupWidget, APIConnecter
class MainWindow(QWidget):
def __init__(self, config: Config) -> None:
"""
Main window with the GUI and whatever player is being used.
"""
super().__init__()
self.setWindowTitle('vidify')
# Setting the window to stay on top
if config.stay_on_top:
self.setWindowFlags(Qt.WindowStaysOnTopHint)
# Setting the fullscreen and window size
if config.fullscreen:
self.showFullScreen()
else:
self.setMinimumSize(800, 800)
self.resize(config.width or 800, config.height or 800)
# Loading the used fonts (Inter)
font_db = QFontDatabase()
for font in Res.fonts:
font_db.addApplicationFont(font)
# Initializing the player and saving the config object in the window.
self.layout = QHBoxLayout(self)
self.layout.setContentsMargins(0, 0, 0, 0)
self.layout.setSpacing(0)
self.config = config
# Otherwise, the user is prompted for an API. After choosing one,
# it will be initialized from outside this function.
logging.info("Loading setup screen")
self.setup_widget = SetupWidget(config.api, config.player)
self.layout.addWidget(self.setup_widget)
self.setup_widget.done.connect(self.on_setup_done)
# Setting focus on the continue button after the first time.
if None not in (config.api, config.player):
self.setup_widget.continue_btn.setFocus()
def closeEvent(self, event) -> None:
"""
When the user closes the app, this makes sure that the most important
attributes are correctly deleted.
"""
logging.info("Closing event detected")
try:
# Stopping the audiosync thread (always initialized)
if self.config.audiosync:
self.audiosync.abort()
self.audiosync.wait()
# Stopping the youtube downloader thread (can be uninitialized)
with suppress(AttributeError):
if self.yt_thread.isRunning():
self.yt_thread.exit()
self.yt_thread.wait()
# Safely deleting the player and the API objects
with suppress(AttributeError):
del self.player
with suppress(AttributeError):
del self.api
except Exception as e:
logging.info("Error when closing: %s", str(e))
super().closeEvent(event)
@Slot(object, object)
def on_setup_done(self, api: APIData, player: PlayerData) -> None:
"""
Method called when the API and Player are selected with APISelection.
"""
# Completely removing the widget used to obtain the API string
self.layout.removeWidget(self.setup_widget)
self.setup_widget.setParent(None)
self.setup_widget.hide()
del self.setup_widget
# Saving the API and Player in the config
self.config.api = api.id
self.config.player = player.id
# Starting the asynchronous API initialization
self.initialize_api(api)
logging.info("Using %s as the API", api.id)
# Initializing the player
self.player = initialize_player(player, self.config)
logging.info("Using %s as the player", player.id)
def initialize_api(self, api_data: APIData) -> None:
"""
Initializes an API with the information from APIData.
"""
# The API may need interaction with the user to obtain credentials
# or similar data. This function will already take care of the
# rest of the initialization.
if api_data.gui_init_fn is not None:
fn = getattr(self, api_data.gui_init_fn)
fn()
return
# Initializing the API with dependency injection.
mod = importlib.import_module(api_data.module)
cls = getattr(mod, api_data.class_name)
self.api = cls()
self.wait_for_connection(
self.api.connect_api, message=api_data.connect_msg,
event_loop_interval=api_data.event_loop_interval)
def wait_for_connection(self, conn_fn: Callable[[], None],
message: Optional[str] = None,
event_loop_interval: int = 1000) -> None:
"""
Creates an APIConnecter instance and waits for the API to be
available, or times out otherwise.
"""
self.event_loop_interval = event_loop_interval
self.api_connecter = APIConnecter(
conn_fn, message or "Waiting for connection")
self.api_connecter.success.connect(self.on_conn_success)
self.api_connecter.fail.connect(self.on_conn_fail)
self.layout.addWidget(self.api_connecter)
self.api_connecter.start()
@Slot()
def on_conn_fail(self) -> None:
"""
If the API failed to connect, the app will be closed.
"""
print("Timed out waiting for the connection")
QCoreApplication.exit(1)
@Slot(float)
def on_conn_success(self, start_time: float) -> None:
"""
Once the connection has been established correctly, the API can
be started properly.
"""
logging.info("Succesfully connected to the API")
self.layout.removeWidget(self.api_connecter)
del self.api_connecter
# Initializing the optional audio synchronization extension, now
# that there's access to the API's data. Note that this feature
# is only available on Linux.
if self.config.audiosync:
from vidify.audiosync import AudiosyncWorker
self.audiosync = AudiosyncWorker(self.api.player_name)
if self.config.debug:
self.audiosync.debug = True
self.audiosync.success.connect(self.on_audiosync_success)
self.audiosync.failed.connect(self.on_audiosync_fail)
# Initializing the player and starting the first video
self.setStyleSheet(f"background-color:{Colors.black};")
self.layout.addWidget(self.player)
self.play_video(self.api.artist, self.api.title, start_time)
# Connecting to the signals generated by the API
self.api.new_song_signal.connect(self.play_video)
self.api.position_signal.connect(self.change_video_position)
self.api.status_signal.connect(self.change_video_status)
# Starting the event loop if it was initially passed as
# a parameter.
if self.event_loop_interval is not None:
self.start_event_loop(self.api.event_loop,
self.event_loop_interval)
def start_event_loop(self, event_loop: Callable[[], None],
ms: int) -> None:
"""
Starts a "manual" event loop with a timer every `ms` milliseconds.
This is used with the SwSpotify API and the Web API to check every
`ms` seconds if a change has happened, like if the song was paused.
"""
logging.info("Starting event loop")
timer = QTimer(self)
# Qt doesn't accept a method as the parameter so it's converted
# to a function.
if isinstance(event_loop, types.MethodType):
timer.timeout.connect(lambda: event_loop())
else:
timer.timeout.connect(event_loop)
timer.start(ms)
@Slot(bool)
def change_video_status(self, is_playing: bool) -> None:
"""
Slot used for API updates of the video status.
"""
# If there is an audiosync thread running, this will pause the sound
# recording and youtube downloading.
if self.config.audiosync and self.audiosync.status != 'idle':
self.audiosync.is_running = is_playing
self.player.pause = not is_playing
@Slot(int)
def change_video_position(self, ms: int) -> None:
"""
Slot used for API updates of the video position.
"""
# Audiosync is aborted if the position of the video changed, since
# the audio being recorded won't make sense.
if self.config.audiosync and self.audiosync.status != 'idle':
self.audiosync.abort()
if not self.config.audiosync:
self.player.seek(ms)
@Slot(str, str, float)
def play_video(self, artist: str, title: str, start_time: float) -> None:
"""
Slot used to play a video. This is called when the API is first
initialized from this GUI, and afterwards from the event loop handler
whenever a new song is detected.
If an error was detected when downloading the video, the default one
is shown instead.
Both audiosync and youtubedl work in separate threads to avoid
blocking the GUI. This method will start both of them.
"""
# Checking that the artist and title are valid first of all
if self.api.artist in (None, '') and self.api.title in (None, ''):
logging.info("The provided artist and title are empty.")
self.on_youtubedl_fail()
if self.config.audiosync:
self.on_audiosync_fail()
return
# This delay is used to know the elapsed time until the video
# actually starts playing, used in the audiosync feature.
self.timestamp = start_time
query = f"ytsearch:{format_name(artist, title)} Official Video"
if self.config.audiosync:
self.launch_audiosync(query)
self.launch_youtubedl(query)
def launch_audiosync(self, query: str) -> None:
"""
Starts the audiosync thread, that will call either
self.on_audiosync_success, or self.on_audiosync_fail once it's
finished.
First trying to stop the previous audiosync thread, as only
one audiosync thread can be running at once.
Note: QThread.start() is guaranteed to work once QThread.run()
has returned. Thus, this will wait until it's done and launch
the new one.
"""
self.audiosync.abort()
self.audiosync.wait()
self.audiosync.youtube_title = query
self.audiosync.start()
logging.info("Started a new audiosync job")
def launch_youtubedl(self, query: str) -> None:
"""
Starts a YoutubeDL thread that will call either
self.on_youtubedl_success or self.on_youtubedl_fail once it's done.
"""
logging.info("Starting the youtube-dl thread")
self.youtubedl = YouTubeDLWorker(
query, self.config.debug, self.config.width, self.config.height)
self.yt_thread = QThread()
self.youtubedl.moveToThread(self.yt_thread)
self.yt_thread.started.connect(self.youtubedl.get_url)
self.youtubedl.success.connect(self.on_youtubedl_success)
self.youtubedl.fail.connect(self.on_youtubedl_fail)
self.youtubedl.finish.connect(self.yt_thread.exit)
self.yt_thread.start()
@Slot()
def on_youtubedl_fail(self) -> None:
"""
If Youtube-dl for whatever reason failed to load the video, a fallback
error video is shown, along with a message to let the user know what
happened.
"""
print("The video wasn't found, either because of an issue with your"
" internet connection or because the provided data was invalid."
" For more information, enable the debug mode.")
# Or playing the default video in the GUI
self.player.start_video(Res.default_video, self.api.is_playing)
@Slot(dict)
def on_youtubedl_success(self, data: dict) -> None:
# Obtaining the rest of the data from the API
is_playing = self.api.is_playing
try:
position = self.api.position
except NotImplementedError:
position = 0
# Otherwise, playing the video inside the GUI. If audiosync is
# enabled, the position is ignored. That way, it can stay
# synchronized.
url = get_direct_url(data) if self.player.DIRECT_URL \
else get_youtube_url(data)
self.player.start_video(url, is_playing)
if not self.config.audiosync:
self.player.seek(position)
# Finally, the lyrics are displayed.
if self.config.lyrics:
print(get_lyrics(self.api.artist, self.api.title))
@Slot()
def on_audiosync_fail(self) -> None:
"""
Currently, when audiosync fails, nothing happens.
"""
logging.info("Audiosync module failed to return the lag")
@Slot(int)
def on_audiosync_success(self, lag: int) -> None:
"""
Slot used after the audiosync function has finished. It sets the
returned lag in milliseconds on the player.
This assumes that the song wasn't paused until this issue is fixed:
https://github.com/vidify/audiosync/issues/12
"""
logging.info("Audiosync module returned %d ms", lag)
# The current API position according to what's being recorded.
playback_delay = round((time.time() - self.timestamp) * 1000) \
- self.player.position
lag += playback_delay
# The user's custom audiosync delay. This is basically the time taken
# until the module started recording (which may depend on the user
# hardware and other things). Thus, it will almost always be a
# negative value.
lag += self.config.audiosync_calibration
logging.info("Total delay is %d ms", lag)
if lag > 0:
self.player.seek(lag, relative=True)
elif lag < 0:
# If a negative delay is larger than the current player position,
# the player position is set to zero after the lag has passed
# with a timer.
if self.player.position < -lag:
self.sync_timer = QTimer(self)
self.sync_timer.singleShot(-self.player.position - lag,
lambda: self.player.seek(0))
else:
self.player.seek(lag, relative=True)
def init_spotify_web_api(self) -> None:
"""
SPOTIFY WEB API CUSTOM FUNCTION
Note: the Tekore imports are done inside the functions so that
Tekore isn't needed for whoever doesn't plan to use the Spotify
Web API.
"""
from vidify.api.spotify.web import get_token
from vidify.gui.api.spotify_web import SpotifyWebPrompt
token = get_token(self.config.refresh_token, self.config.client_id,
self.config.client_secret)
if token is not None:
# If the previous token was valid, the API can already start.
logging.info("Reusing a previously generated token")
self.start_spotify_web_api(token, save_config=False)
else:
# Otherwise, the credentials are obtained with the GUI. When
# a valid auth token is ready, the GUI will initialize the API
# automatically exactly like above. The GUI won't ask for a
# redirect URI for now.
logging.info("Asking the user for credentials")
# The SpotifyWebPrompt handles the interaction with the user and
# emits a `done` signal when it's done.
self._spotify_web_prompt = SpotifyWebPrompt(
self.config.client_id, self.config.client_secret,
self.config.redirect_uri)
self._spotify_web_prompt.done.connect(self.start_spotify_web_api)
self.layout.addWidget(self._spotify_web_prompt)
def start_spotify_web_api(self, token: 'RefreshingToken',
save_config: bool = True) -> None:
"""
SPOTIFY WEB API CUSTOM FUNCTION
Initializes the Web API, also saving them in the config for future
usage (if `save_config` is true).
"""
from vidify.api.spotify.web import SpotifyWebAPI
logging.info("Initializing the Spotify Web API")
# Initializing the web API
self.api = SpotifyWebAPI(token)
api_data = find_module(APIS, 'SPOTIFY_WEB')
self.wait_for_connection(
self.api.connect_api, message=api_data.connect_msg,
event_loop_interval=api_data.event_loop_interval)
# The obtained credentials are saved for the future
if save_config:
logging.info("Saving the Spotify Web API credentials")
self.config.client_secret = self._spotify_web_prompt.client_secret
self.config.client_id = self._spotify_web_prompt.client_id
self.config.refresh_token = token.refresh_token
# The credentials prompt widget is removed after saving the data. It
# may not exist because start_spotify_web_api was called directly,
# so errors are taken into account.
with suppress(AttributeError):
self.layout.removeWidget(self._spotify_web_prompt)
self._spotify_web_prompt.hide()
del self._spotify_web_prompt
|
StarcoderdataPython
|
5071492
|
# -*- coding: utf-8 -*-
"""
Auto Self Report v3.1
Created on Fri Feb 26 16:34:04 2021
@author: <NAME>.
Copyright (c) 2020-2021 <NAME>.. All rights reserved.
"""
"""
3.1 更新:
支持“提前唤醒 - 准时填报”功能
"""
#**********
#使用须知
#1. 使用此脚本需先安装Chrome浏览器,然后下载"chromedriver.exe"并将其添加至系统变量中,具体方法见'https://blog.csdn.net/qq_30583611/article/details/108932842'
#2. 当然,尽管Steve在编写程序时已经尽量考虑到用户适配性进而减少了该脚本所依赖的python模块并大量使用try-except来避免可能存在的运行报错,但是想要使该脚本实现其基本功能,你至少需要准备好python 3,并在python.exe中使用"pip install selenium"命令为其安装selenium模块
#3. 设置定时任务的方法见'https://www.baidu.com/link?url=8oBylDtTMNyin0UJff5LVeMdYhi1wyZ5SpWyDmB5VOXkd0c73cpMuzX_SwETvsASEVLUi8beTCzQepLyumjyMa&wd=&eqid=b2c95c6a0004c09d0000000460019850'
#4. 使用前请先填写下方的四个变量
#**********
#在''中填入学号
ID = ''
#在''中填入密码
Password = ''
#在''中填入已知不可访问健康之路网站的wifi名称
Banned_Wifi_Name = ''
#在''中填入备用wifi名称(此wifi需要曾经连过且不需要额外登陆操作)
Spare_Wifi_Name = ''
print("Copyright (c) 2020-2021 <NAME>.. All rights reserved.\n")
print("正在执行Auto Self Report v3.1...\n")
import time
#import random
import os
# 阻止脚本运行
#os._exit(0)
try:
from selenium import webdriver
except:
i = ''
while i != 'Y' or 'N':
print("您的python缺少selenium模块,是否要现在为您安装?")
i = input("Y: 请帮我安装selenium\nN: 我要自己安装\n")
if i == 'Y':
print("正在为您安装selenium模块...\n")
try:
os.system(f'pip install selenium')
time.sleep(10)
from selenium import webdriver
except:
print("出现未知错误,未能成功安装selenium模块,请您在python.exe中使用'ip install selenium'命令手动为其安装selenium模块后重新运行此程序\n")
time.sleep(20)
exit()
elif i == 'N':
print("请您在python.exe中使用'ip install selenium'命令手动为其安装selenium模块后重新运行此程序\n")
time.sleep(20)
exit()
#检索未填报项目,并进行填报
def Search_Unreported():
print("正在检索未填报项目...\n")
Unreported_Flag = 0
for i in range(1,63):
try:
temp = driver.find_element_by_xpath('//*[@id="Panel1_DataList1"]/ul/li[' + str(i) + ']')
#print(temp.text)
temp_str = temp.text
str_len = len(temp_str)
for j in range(0, str_len):
if temp_str[j] == '(':
left_index = j
elif temp_str[j] == ')':
right_index = j
state_str = temp_str[left_index+1 : right_index]
#print(state_str)
if state_str == '未填报,请点击此处补报':
Unreported_Flag = 1
print("已定位到未填报项目,正在进行填报...\n")
try:
driver.find_element_by_xpath('//*[@id="Panel1_DataList1"]/ul/li[' + str(i) + ']').click()
except:
pass
#勾选"我承诺..." //*[@id="p1_ChengNuo-inputEl-icon"]
driver.find_element_by_xpath('//*[@id="p1_ChengNuo-inputEl-icon"]').click()
"""#新版"每日一报"系统无需填写具体体温
#生成体温并填写
tempreture = '%.1f'%(random.uniform(36.0,36.9))
driver.find_element_by_xpath('//*[@id="p1_TiWen-inputEl"]').send_keys(str(tempreture))
"""
#勾选"良好(体温不高于37.3)" //*[@id="fineui_0-inputEl-icon"]
driver.find_element_by_xpath('//*[@id="fineui_0-inputEl-icon"]').click()
#勾选"国内" //*[@id="fineui_5-inputEl-icon"]
driver.find_element_by_xpath('//*[@id="fineui_5-inputEl-icon"]').click()
#勾选"在上海" //*[@id="fineui_7-inputEl-icon"]
driver.find_element_by_xpath('//*[@id="fineui_7-inputEl-icon"]').click()
#勾选"住校" //*[@id="fineui_9-inputEl-icon"]
time.sleep(0.5)
driver.find_element_by_xpath('//*[@id="fineui_9-inputEl-icon"]').click()
time.sleep(0.5)
#勾选"不是家庭地址" //*[@id="fineui_12-inputEl-icon"]
driver.find_element_by_xpath('//*[@id="fineui_12-inputEl-icon"]').click()
"""
#勾选"不在校"
driver.find_element_by_xpath('//*[@id="fineui_5-inputEl-icon"]').click()
#勾选"不到校"
driver.find_element_by_xpath('//*[@id="fineui_10-inputEl-icon"]').click()
#勾选"国内"
driver.find_element_by_xpath('//*[@id="fineui_17-inputEl-icon"]').click()
time.sleep(0.1)
#勾选"不在上海"
driver.find_element_by_xpath('//*[@id="fineui_8-inputEl-icon"]').click()
"""
"""#网页会按照以前的记录默认勾选
#勾选"否"
driver.find_element_by_xpath('//*[@id="fineui_21-inputEl-icon"]').click()
driver.find_element_by_xpath('//*[@id="fineui_27-inputEl-icon"]').click()
driver.find_element_by_xpath('//*[@id="fineui_17-inputEl-icon"]').click()
driver.find_element_by_xpath('//*[@id="fineui_19-inputEl-icon"]').click()
"""
#点击提交按钮 //*[@id="p1_ctl01_btnSubmit"]
driver.find_element_by_xpath('//*[@id="p1_ctl01_btnSubmit"]').click()
time.sleep(0.8)
#确认对话框-点击"确定"
#driver.find_element_by_xpath('//*[@id="fineui_32"]').click() #//*[@id="fineui_42"]
for psb_num in range(0, 1024):
temp_xpath = '//*[@id="fineui_' + str(psb_num) + '"]'
try:
YorN = driver.find_element_by_xpath(temp_xpath)
temp_str = YorN.text
#print(temp_str)
if temp_str == '确定':
driver.find_element_by_xpath(temp_xpath).click()
#print("点击了1号确定!")
break
except:
pass
time.sleep(0.2)
#提交成功-"确定" 或 "现在还没到晚报时间"-确定
for psb_num in range(0, 1024):
temp_xpath = '//*[@id="fineui_' + str(psb_num) + '"]'
try:
YorN = driver.find_element_by_xpath(temp_xpath)
temp_str = YorN.text
#print(temp_str)
if temp_str == '确定':
driver.find_element_by_xpath(temp_xpath).click()
break
except:
pass
#print("点击了2号确定!")
time.sleep(0.2)
try:
#提交成功-"确定"后会回到首页,再次进入"报送历史"进行确认
driver.find_element_by_xpath('//*[@id="lnkReportHistory"]').click()
i = 1
#print("已为您填报: " + temp_str[0 : left_index] + " 使用的体温为" + str(tempreture) + "\n")
print("已为您填报: " + temp_str[0 : left_index] + "\n")
except:
"""
#"现在还没到晚报时间"-确定后返回上一页
driver.back()
print(temp_str[0 : left_index] + ' 由于"现在还没到晚报时间"原因,未能填报\n')
"""
pass
except:
pass
if Unreported_Flag == 0:
print("您没有未填报的记录!\n")
#获取当前连接的WIFI名称 - 方法来源: https://cloud.tencent.com/developer/news/311861
def Get_CurrentSSID():
from subprocess import check_output
print("正在调用'showssid.cmd'获取当前网络信息...")
scanoutput = check_output([r"C:/showSSID.cmd"]) #须将"showSSID.cmd"文件置于同一文件路径下
currentSSID = scanoutput.decode()
currentSSID = currentSSID[0:3]
print(currentSSID)
return currentSSID
#获取现在时刻的时间戳
def Get_LocalTime():
return int(time.mktime(time.strptime(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), "%Y-%m-%d %H:%M:%S")))
#计算00:00时间戳
def Find_00():
stamp_localTime = int(time.mktime(time.strptime(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), "%Y-%m-%d %H:%M:%S")))
stamp_next_zero = stamp_localTime + 86400
temp_array = time.localtime(stamp_next_zero)
str_next_zero = time.strftime('%Y-%m-%d', temp_array) + ' 00:00:00'
temp_array = time.strptime(str_next_zero, '%Y-%m-%d %H:%M:%S')
stamp_next_zero = int(time.mktime(temp_array))
return stamp_next_zero
#计算次日00:00时间戳(参考时间)
stamp_standard = Find_00()
#检查网络连接
Wifi_Change_Flag = 0 #0:程序未修改过WiFi设置;1:修改过了
Change_Success_Flag = 0
try:
if Get_CurrentSSID() == Banned_Wifi_Name:
print("您目前连接的无线网络为" + Banned_Wifi_Name + ",无法进行每日一报,将为您自动连接到" + Spare_Wifi_Name + "...\n")
#自动改连其他网络
os.system('netsh wlan connect name=' + '"' + Spare_Wifi_Name + '"')
Wifi_change_flag = 1
time.sleep(10)
while Get_CurrentSSID() == Banned_Wifi_Name:
os.system('netsh wlan connect name=' + '"' + Spare_Wifi_Name + '"')
Wifi_change_flag = 1
time.sleep(10)
except:
print("由于未知原因,未能成功连接备用网络,预计无法打开健康之路页面!\n请检查设置后重试\n")
#每秒核验时间准时启动主进程
print("脚本已做好准备,即将于00:00准时为您填报...\n")
while True:
stamp_localtime = Get_LocalTime()
if stamp_standard <= stamp_localtime:
break
elif stamp_standard - stamp_localtime > 60:
time.sleep(60)
else:
time.sleep(1)
#打开Chrome浏览器
print("正在打开Chrome浏览器...\n")
try:
driver = webdriver.Chrome()
print("已成功打开Chrome浏览器\n")
except:
print("未能打开Chrome浏览器,请根据本文件前的'使用须知'第一条检查无误后重新运行此程序\n")
time.sleep(20)
exit()
#访问健康之路网页
time.sleep(1)
print("正在跳转健康之路网页...\n")
driver.get('https://selfreport.shu.edu.cn')
#填写账号密码
print("正在填写账号密码...\n")
driver.find_element_by_xpath('//*[@id="username"]').send_keys(ID)
driver.find_element_by_xpath('//*[@id="password"]').send_keys(Password)
time.sleep(0.5)
#点击"登陆"
driver.find_element_by_xpath('//*[@id="submit"]').click()
print("登陆成功!\n")
time.sleep(0.5)
#进入"报送历史"
driver.find_element_by_xpath('//*[@id="lnkReportHistory"]').click()
time.sleep(0.5)
#检索未填报项目,并进行填报
Search_Unreported()
#检查并恢复WiFi设置
if Wifi_Change_Flag == 1:
try:
os.system('netsh wlan connect name=' + '"' + Banned_Wifi_Name + '"')
except:
print("由于未知原因未能恢复WiFi设置,请您手动恢复!\n")
print("已为您完成每日一报!\n")
#通过当前时间戳与唤醒时间戳的差值判断是否执行休眠(不可行,因为无法获取唤醒时间戳)
"""
#获取系统开机时间 - 方法来源: https://www.cnblogs.com/hushaojun/p/8202850.html
def Get_OSStartTime():
import psutil
return int(time.mktime(time.strptime(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(psutil.boot_time())), "%Y-%m-%d %H:%M:%S")))
psutil.memoize_when_activated
#获取现在时刻的时间
def Get_LocalTime():
return int(time.mktime(time.strptime(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), "%Y-%m-%d %H:%M:%S")))
#判断是否是为每日两报而自动唤醒,从而确定是否要执行完程序自动睡眠
try:
local_time = Get_LocalTime()
OS_StartTime = Get_OSStartTime()
if local_time - OS_StartTime <= 300: #如果当前时间与开机时间的差值小于5min,则倒计时30s后"shutdown -h"
i = 0
while i <= 30:
print(str(30 - i) + '秒后将执行休眠命令"shutdown -h",现在关闭此程序可阻止休眠执行...\n')
time.sleep(1)
driver.close()
#执行休眠命令
os.system(f'shutdown -h')
exit()
else:
driver.close()
exit()
except:
driver.close()
exit()
"""
#完成填报后的等待一段时间,若用户不执行特定操作则执行休眠
time_remained = 600 #预留10分钟
while time_remained >= 0:
print("请关闭程序窗口,否则将于" + str(time_remained) + "秒后自动休眠电脑")
time.sleep(60)
time_remained = time_remained - 60
#执行休眠命令
os.system(f'shutdown -h')
|
StarcoderdataPython
|
5018889
|
<filename>nova/api/openstack/compute/server_external_events.py
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import webob
from nova.api.openstack.compute.schemas import server_external_events
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
from nova import compute
from nova import exception
from nova.i18n import _
from nova.i18n import _LI
from nova import objects
LOG = logging.getLogger(__name__)
ALIAS = 'os-server-external-events'
authorize = extensions.os_compute_authorizer(ALIAS)
class ServerExternalEventsController(wsgi.Controller):
def __init__(self):
self.compute_api = compute.API()
super(ServerExternalEventsController, self).__init__()
@extensions.expected_errors((400, 403, 404))
@wsgi.response(200)
@validation.schema(server_external_events.create)
def create(self, req, body):
"""Creates a new instance event."""
context = req.environ['nova.context']
authorize(context, action='create')
response_events = []
accepted_events = []
accepted_instances = set()
instances = {}
result = 200
body_events = body['events']
for _event in body_events:
client_event = dict(_event)
event = objects.InstanceExternalEvent(context)
event.instance_uuid = client_event.pop('server_uuid')
event.name = client_event.pop('name')
event.status = client_event.pop('status', 'completed')
event.tag = client_event.pop('tag', None)
instance = instances.get(event.instance_uuid)
if not instance:
try:
instance = objects.Instance.get_by_uuid(
context, event.instance_uuid)
instances[event.instance_uuid] = instance
except exception.InstanceNotFound:
LOG.debug('Dropping event %(name)s:%(tag)s for unknown '
'instance %(instance_uuid)s',
{'name': event.name, 'tag': event.tag,
'instance_uuid': event.instance_uuid})
_event['status'] = 'failed'
_event['code'] = 404
result = 207
# NOTE: before accepting the event, make sure the instance
# for which the event is sent is assigned to a host; otherwise
# it will not be possible to dispatch the event
if instance:
if instance.host:
accepted_events.append(event)
accepted_instances.add(instance)
LOG.info(_LI('Creating event %(name)s:%(tag)s for '
'instance %(instance_uuid)s'),
{'name': event.name, 'tag': event.tag,
'instance_uuid': event.instance_uuid})
# NOTE: as the event is processed asynchronously verify
# whether 202 is a more suitable response code than 200
_event['status'] = 'completed'
_event['code'] = 200
else:
LOG.debug("Unable to find a host for instance "
"%(instance)s. Dropping event %(event)s",
{'instance': event.instance_uuid,
'event': event.name})
_event['status'] = 'failed'
_event['code'] = 422
result = 207
response_events.append(_event)
if accepted_events:
self.compute_api.external_instance_event(
context, accepted_instances, accepted_events)
else:
msg = _('No instances found for any event')
raise webob.exc.HTTPNotFound(explanation=msg)
# FIXME(cyeoh): This needs some infrastructure support so that
# we have a general way to do this
robj = wsgi.ResponseObject({'events': response_events})
robj._code = result
return robj
class ServerExternalEvents(extensions.V21APIExtensionBase):
"""Server External Event Triggers."""
name = "ServerExternalEvents"
alias = ALIAS
version = 1
def get_resources(self):
resource = extensions.ResourceExtension(ALIAS,
ServerExternalEventsController())
return [resource]
def get_controller_extensions(self):
return []
|
StarcoderdataPython
|
3230714
|
<reponame>moibenko/enstore
#!/usr/bin/env python
###############################################################################
#
# $Id$
# Plot Small Files Aggregation Statistics
#
###############################################################################
# system imports
import pg
import os
import sys
import time
import types
# enstore imports
import histogram
import enstore_plotter_module
import enstore_constants
WEB_SUB_DIRECTORY = enstore_constants.SFA_STATS_PLOTS_SUBDIR
class SFAStatsPlotterModule(enstore_plotter_module.EnstorePlotterModule):
def __init__(self,name,isActive=True):
enstore_plotter_module.EnstorePlotterModule.__init__(self,name,isActive)
def create_histogram(self, name, title, label):
h = histogram.Ntuple(name, title)
h.set_time_axis(True)
h.set_marker_type("impulses")
h.set_line_width(10)
h.set_time_axis_format("%m-%d")
h.set_ylabel(label)
h.set_xlabel("month-day")
return h
def book(self, frame):
cron_dict = frame.get_configuration_client().get("crons", {})
self.html_dir = cron_dict.get("html_dir", "")
self.plot_dir = os.path.join(self.html_dir,
enstore_constants.PLOTS_SUBDIR)
if not os.path.exists(self.plot_dir):
os.makedirs(self.plot_dir)
self.web_dir = os.path.join(self.html_dir, WEB_SUB_DIRECTORY)
if not os.path.exists(self.web_dir):
os.makedirs(self.web_dir)
dbInfo = frame.get_configuration_client().get('database')
if dbInfo == None:
print "No database info"
sys.exit(1)
#Open conection to the Enstore DB.
try:
# proper default values are supplied by edb.FileDB constructor
self.db = pg.DB(host = dbInfo.get('dbhost', 'localhost'),
dbname= dbInfo.get('dbname', 'enstoredb'),
port = dbInfo.get('dbport', 5432),
user = dbInfo.get('dbuser_reader', 'enstore_reader'))
except:
exc_type, exc_value = sys.exc_info()[:2]
message = str(exc_type)+' '+str(exc_value)+' IS POSTMASTER RUNNING?'
print message
print "CAN NOT ESTABLISH DATABASE CONNECTION ... QUIT!"
sys.exit(1)
self.files_cached_histogram = self.create_histogram(self.name+"_files_cached",
"Files Cached",
"Number of Files Cached")
self.files_purged_histogram = self.create_histogram(self.name+"_files_purged",
"Files Purged",
"Number of Files Purged")
self.files_archived_histogram = self.create_histogram(self.name+"_files_archived",
"Files Archived",
"Number of Files Archived")
def fill(self, frame):
# Files cached and purged histograms
#################################################
hc = self.files_cached_histogram
hp = self.files_purged_histogram
files_cached_purged_query = "select cache_status, count(*), sum(size), cache_mod_time::date from file where cache_status in ('PURGED','CACHED') and bfid!=package_id and cache_mod_time>CURRENT_TIMESTAMP - interval '1 mons' group by cache_mod_time::date, cache_status order by cache_mod_time::date;"
res = self.db.query(files_cached_purged_query).getresult()
cached_data_file = hc.get_data_file()
purged_data_file = hp.get_data_file()
for row in res:
data_file = None
if row[0] == 'CACHED':
h = hc
data_file = cached_data_file
elif row[0] == 'PURGED':
h = hp
data_file = purged_data_file
if data_file:
data_file.write("%d %f %s\n"%(row[1], row[2]/enstore_constants.MB, row[3]))
h.entries += 1 # temporary work around for Ntuple
cached_data_file.close()
purged_data_file.close()
##################################################
# Files archived histogram
#################################################
ha = self.files_archived_histogram
files_archived_query = "select count(bfid) , sum(size), archive_mod_time::date from file where archive_status='ARCHIVED' and bfid!=package_id and archive_mod_time between CURRENT_TIMESTAMP - interval '1 mons' and CURRENT_TIMESTAMP group by archive_mod_time::date order by archive_mod_time::date;"
total_files_archived_query = "select count(*), sum(size) from file where archive_status='ARCHIVED' and bfid!=package_id;"
res = self.db.query(files_archived_query).getresult()
data_file = ha.get_data_file()
for row in res:
data_file.write("%d %f %s\n"%(row[0], row[1]/enstore_constants.MB, row[2]))
ha.entries += 1 # temporary work around for Ntuple
data_file.close()
res= self.db.query(total_files_archived_query).getresult()
self.total_files_archived, self.total_bytes_archived = res[0]
self.db.close()
def plot(self):
h = self.files_cached_histogram
if h.n_entries() > 0:
# Files cached plot
h.plot("3:1", directory = self.web_dir)
# Bytes cached plot
h.set_title("Bytes Cached")
h.set_name(self.name+ "_bytes_cached")
h.set_ylabel("Bytes Cached (MB)")
h.plot("3:2", directory = self.web_dir)
h = self.files_archived_histogram
if h.n_entries() > 0:
# Files archived plot
h.set_title("%s (Total Files=%s)"%(h.get_title(),self.total_files_archived))
h.plot("3:1", directory = self.web_dir)
# Bytes archived plot
h.set_title("Bytes Archived (Total Bytes=%.2f GB)"%(self.total_bytes_archived/enstore_constants.GB,))
h.set_name(self.name+ "_bytes_archived")
h.set_ylabel("Bytes Archived (MB)")
h.plot("3:2", directory = self.web_dir)
h = self.files_purged_histogram
if h.n_entries() > 0:
# Files purged plot
h.plot("3:1", directory = self.web_dir)
# Bytes archived plot
h.set_title("Bytes Purged")
h.set_name(self.name+ "_bytes_purged")
h.set_ylabel("Bytes Purged (MB)")
h.plot("3:2", directory = self.web_dir)
class SFATarRatesPlotterModule(enstore_plotter_module.EnstorePlotterModule):
def __init__(self, name, isActive=True, date=None, data_file=None, grep_pattern=None, tmp_file=None):
"""
@param name - plot name
@param date - get data for this date if specified
@param data_file - file where data is kept/saved
@param grep_pattern - pattern to grep in enstore log files
@param tmp_file - file to temporary save grep results
"""
enstore_plotter_module.EnstorePlotterModule.__init__(self,name,isActive)
self.name = name
self.rate_histograms = {}
self.rate_ntuples = {}
self.date = date
self.data_file = data_file
self.data_file_name = os.path.basename(self.data_file)
self.pattern = grep_pattern
self.tmp_file = tmp_file
def book(self, frame):
cron_dict = frame.get_configuration_client().get("crons", {})
self.html_dir = cron_dict.get("html_dir", "")
self.plot_dir = os.path.join(self.html_dir,
enstore_constants.PLOTS_SUBDIR)
if not os.path.exists(self.plot_dir):
os.makedirs(self.plot_dir)
self.web_dir = os.path.join(self.html_dir, WEB_SUB_DIRECTORY)
if not os.path.exists(self.web_dir):
os.makedirs(self.web_dir)
self.csc = frame.get_configuration_client()
log_server_info = self.csc.get('log_server')
if log_server_info == None:
sys.exit("No log server info")
self.log_file_path = log_server_info.get('log_file_path')
def _get_migrators(self):
migrators = self.csc.get_migrators()
for migrator in migrators:
# get log names
migrator_configuration = self.csc.get(migrator, None)
if migrator_configuration:
self.log_names.append(migrator_configuration['logname'])
def fill(self, frame):
self.log_names = []
self._get_migrators()
# enstore log file name
if not self.date:
tm = time.localtime() # get the local time
lf_name = 'LOG-%04d-%02d-%02d' % (tm.tm_year, tm.tm_mon, tm.tm_mday)
else:
lf_name = 'LOG-%s' % (self.date)
path = os.path.join(self.log_file_path, lf_name)
if self.data_file:
# try to append data for the previous day
yesterday = time.time() - 24*60*60
date = time.strftime("%Y-%m-%d", time.localtime(yesterday))
log_file_name = "-".join(("LOG", date))
log_file_path = os.path.join(self.log_file_path, log_file_name)
# check if there already are entires for this date in self.data_file
c = "grep %s %s > /dev/null"%(log_file_name, self.data_file)
rc = os.system(c)
if rc != 0:
# not found
cmd = "grep -H '%s' %s >> %s"%(self.pattern, log_file_path, self.data_file)
os.system(cmd)
cmd = "grep -H '%s' %s | sed -e 's/.*LOG-//' | sed -e 's/:/ /' | awk '{print $1,$2,$7,$13,$16}' > %s"% \
(self.pattern, self.data_file, self.tmp_file)
else:
cmd = None
else:
# create a temporary file from log file
cmd = "grep -H '%s' %s | sed -e 's/.*LOG-//' | sed -e 's/:/ /' | awk '{print $1,$2,$7,$13,$16}' > %s"% \
(self.pattern, path, self.tmp_file)
if cmd:
os.system(cmd)
for log_name in self.log_names:
#cmd = "fgrep %s /tmp/tar_rates > /tmp/%s_tar_rates"%(log_name, log_name)
cmd = "fgrep %s %s > /tmp/%s_%s"%(log_name,
self.tmp_file,
log_name, os.path.basename(self.tmp_file))
os.system(cmd)
# find min / max
cmd = "sort -g -k 5 /tmp/%s_%s | sed -n '1p;$p' | awk '{print $5}'> /tmp/%s_min_max_%s"% \
(log_name, os.path.basename(self.tmp_file), log_name, self.data_file_name,)
os.system(cmd)
try:
mm = open("/tmp/%s_min_max_%s"%(log_name, self.data_file_name), 'r')
except OSError, IOError:
continue
try:
min = float(mm.readline())
max = float(mm.readline())
except ValueError:
continue
nbins = int(max-min)
if nbins == 0:
max = min + 1
nbins = 1
self.rate_histograms[log_name] = histogram.Histogram1D(log_name+"_%s_rates"%(self.data_file_name,),
"%s %s"%(log_name,self.name),nbins, min, max)
self.rate_histograms[log_name].set_marker_type("impulses")
self.rate_histograms[log_name].set_line_width(10)
self.rate_histograms[log_name].set_xlabel("Rates [MB/s]")
self.rate_histograms[log_name].set_ylabel("Entries")
self.rate_ntuples[log_name] = histogram.Ntuple(log_name+"_%s_rate_vs_date"%(self.data_file_name,),
"%s %s"%(log_name,self.name))
self.rate_ntuples[log_name].set_time_axis()
self.rate_ntuples[log_name].set_time_axis_format("%y-%m-%d")
df= self.rate_ntuples[log_name].get_data_file()
self.rate_ntuples[log_name].set_opt_stat()
self.rate_ntuples[log_name].set_ylabel("Rates [MB/s]")
self.rate_ntuples[log_name].set_xlabel("date")
data_file = self.rate_histograms[log_name].get_data_file()
in_f = open("/tmp/"+log_name+"_%s_rates"%(self.data_file_name,), 'r')
ln = in_f.readline()
while ln:
a = ln.split(' ')
self.rate_histograms[log_name].fill(float(a[4]))
df.write("%s %s %f \n"%(a[0], a[1], float(a[4])))
self.rate_ntuples[log_name].entries += 1
ln = in_f.readline()
data_file.close()
def plot(self):
for log_name in self.log_names:
try:
self.rate_histograms[log_name].plot(directory = self.web_dir)
self.rate_ntuples[log_name].get_data_file().flush()
self.rate_ntuples[log_name].get_data_file().close()
self.rate_ntuples[log_name].plot("1:3",directory = self.web_dir)
except:
pass
|
StarcoderdataPython
|
8168698
|
<filename>usage3.py<gh_stars>1-10
# imports
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
from polire import CustomInterpolator
import xgboost
from sklearn.ensemble import RandomForestRegressor
from sklearn.linear_model import LinearRegression
from sklearn.neighbors import KNeighborsRegressor
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import Matern
# sample data
X = [[0, 0], [0, 3], [3, 0], [3, 3]]
y = [0, 1.5, 1.5, 3]
X = np.array(X)
y = np.array(y)
for r in [
CustomInterpolator(xgboost.XGBRegressor()),
CustomInterpolator(RandomForestRegressor()),
CustomInterpolator(LinearRegression(normalize=True)),
CustomInterpolator(KNeighborsRegressor(n_neighbors=3, weights="distance")),
CustomInterpolator(GaussianProcessRegressor(
normalize_y=True, kernel=Matern()))
]:
r.fit(X, y)
Z = r.predict_grid((0, 3), (0, 3)).reshape(100, 100)
sns.heatmap(Z)
plt.title(r)
plt.show()
plt.close()
|
StarcoderdataPython
|
3540615
|
<filename>classifier.py<gh_stars>1-10
def classifier(diseases, symptoms):
if len(symptoms) == 0:
print("Empty symptoms list. Try again.")
return []
if(len(diseases) == 0):
print("No matching disease found")
return []
max_match_count = 0
min_match_count = 500
for disease in diseases:
symp = disease.get_symp()
match_count = 0
unmatch_count = 0
for item in symp:
for symptom in symptoms:
if symptom==item :
match_count = match_count+1
else:
unmatch_count = unmatch_count+1
if match_count > max_match_count:
max_match_count = match_count
if match_count < min_match_count:
min_match_count = match_count
disease_match_bucket = [[]]
#print("max match"+str(max_match_count))
#print("Min match"+str(min_match_count))
for x in range(max_match_count-min_match_count+1):
disease_match_bucket.append([])
#print((disease_match_bucket))
disease_match_bucket.remove([])
for disease in diseases:
symp = disease.get_symp()
match_count = 0
unmatch_count = 0
for item in symp:
for symptom in symptoms:
if symptom==item:
match_count = match_count+1
else:
unmatch_count = unmatch_count+1
disease_match_bucket[max_match_count-match_count].append(disease)
#print(len(disease_match_bucket))
most_pot_diseases = disease_match_bucket[0]
maybe_pot_diseases = []
if(len(disease_match_bucket) > 1):
maybe_pot_diseases = disease_match_bucket[1]
#print(disease_match_bucket)
#print(len(disease_match_bucket))
score = []
i = 0
#print(most_pot_diseases)
#print(maybe_pot_diseases)
for most_pot_disease in most_pot_diseases:
pd = most_pot_disease.get_pd()
loc = most_pot_disease.get_loc()
cli = most_pot_disease.get_cli()
score.append(max_match_count*1200 + pd * 0.3 + loc * 0.35 + cli * 0.35)
print("score " + str(max_match_count*1200 + pd * 0.3 + loc * 0.35 + cli * 0.35) + " for " + most_pot_disease.get_name())
i = i+1
for maybe_pot_disease in maybe_pot_diseases:
pd = maybe_pot_disease.get_pd()
loc = maybe_pot_disease.get_loc()
cli = maybe_pot_disease.get_cli()
score.append((max_match_count - 1) * 1000 + pd * 0.3 + loc * 0.35 + cli * 0.35)
print("score " + str(
(max_match_count - 1) * 1000 + pd * 0.3 + loc * 0.35 + cli * 0.35) + " for " + maybe_pot_disease.get_name())
i = i + 1
max_score = 0
max_score_i = 0
#print(i)
#print(len(score))
i=0
for x in score:
if(x > max_score):
max_score = score[i]
max_score_i = i
# print(pot_diseases[i].get_name()+" score "+str(x))
i = i+1
if(max_score_i < len(most_pot_diseases)):
#print("The most likely disease is " + most_pot_diseases[max_score_i].get_name())
return most_pot_diseases[max_score_i].get_name()
else:
#print("The most likely disease is " + maybe_pot_diseases[max_score_i-len(most_pot_diseases)].get_name())
return maybe_pot_diseases[max_score_i-len(most_pot_diseases)].get_name()
|
StarcoderdataPython
|
4862381
|
<gh_stars>0
n = int(input())
# I can go from a to every number in to[a]
to = [list() for i in range(n+1)]
distance = [0 for i in range(n+1)]
for i in range(1, n+1):
a = list(map(int, input().split()))
for j in range(1, a[0]+1):
to[i].append(a[j])
queue = list()
queue.append(1)
distance[1] = 1
shortest = 999999999
while queue:
current = queue.pop(0)
if not to[current]:
shortest = min(shortest, distance[current])
for nx in to[current]:
if not distance[nx]:
queue.append(nx)
distance[nx] = distance[current]+1
ok = True
for i in range(1, n+1):
if not distance[i]:
ok = False
print('Y' if ok else 'N')
print(shortest)
|
StarcoderdataPython
|
9702401
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 11 11:32:27 2018
@author: gregz
"""
import glob
import os.path as op
import numpy as np
import sys
from astropy.io import fits
from distutils.dir_util import mkpath
from input_utils import setup_parser, set_daterange, setup_logging
from utils import biweight_location
from scipy.interpolate import interp1d
def write_fits(hdu, name):
try:
hdu.writeto(name, overwrite=True)
except:
hdu.writeto(name, clobber=True)
def build_filenames(date, args):
'''
Build directory structure and search for unique observations, and return
a single file for each observation to examine the header.
'''
basedir = op.join(args.rootdir, date, args.instrument,
args.instrument + '0000*', 'exp*', args.instrument)
filenames = sorted(glob.glob(op.join(basedir, 'm*_%s_LL.fits' %
args.triplet)))
dirnames = [op.dirname(fn) for fn in filenames]
unique_dirnames, ind = np.unique(dirnames, return_index=True)
return list(unique_dirnames)
def make_avg_spec(wave, spec, binsize=35, knots=None):
''' Make Average spectrum with biweight binning '''
sel = spec > 0.0
wave = wave[sel] * 1.
spec = spec[sel] * 1.
ind = np.argsort(wave.ravel())
if wave.ndim == 1:
N = len(wave)
else:
N = wave.shape[0] * wave.shape[1]
wchunks = np.array_split(wave.ravel()[ind],
N / binsize)
schunks = np.array_split(spec.ravel()[ind],
N / binsize)
nwave = np.array([np.mean(chunk) for chunk in wchunks])
nspec = np.array([biweight_location(chunk) for chunk in schunks])
return nwave, nspec
def safe_division(num, denom, eps=1e-8, fillval=0.0):
good = np.isfinite(denom) * (np.abs(denom) > eps)
div = num * 0.
if num.ndim == denom.ndim:
div[good] = num[good] / denom[good]
div[~good] = fillval
else:
div[:, good] = num[:, good] / denom[good]
div[:, ~good] = fillval
return div
def sky_subtract(wave, spec, ftf):
newspec = safe_division(spec, ftf)
nwave, nspec = make_avg_spec(np.array(wave, dtype='float64'),
np.array(newspec, dtype='float64'))
I = interp1d(nwave, nspec, fill_value='extrapolate', bounds_error=False,
kind='quadratic')
skysub = spec * 0.
for i in np.arange(wave.shape[0]):
skysub[i] = spec[i] - I(wave[i]) * ftf[i]
return skysub
def get_image(fn):
F = fits.open(fn)
imagetype = F[0].header['IMAGETYP'].replace(' ', '')
if imagetype == 'sci':
S = F['spectrum'].data * 1.
W = F['wavelength'].data * 1.
# xarray = np.arange(S.shape[1])
chunks = np.array_split(S, 20, axis=1)
# xchunks = np.array([np.mean(x) for x in np.array_split(xarray, 20)])
avg = np.array([biweight_location(chunk, axis=(1,))
for chunk in chunks])
# I = interp1d(xchunks, avg.swapaxes(0, 1), kind='quadratic',
# bounds_error=False, fill_value='extrapolate')
# norm = I(xarray)
normavg = biweight_location(avg, axis=(1,))
divnorm = avg / normavg[:, np.newaxis]
netnorm = biweight_location(normavg)
norm = biweight_location(divnorm, axis=(0,)) * netnorm
return S / norm[:, np.newaxis], W, norm, S
else:
return None, None, None, None
def build_residual_frame(dir_list, amp, args, dateb, datee):
# Create empty lists for the left edge jump, right edge jump, and structure
sci_list = []
org_list = []
norm_list = []
for directory in dir_list:
fn = op.join(directory, 'multi_%s_%s.fits' % (args.triplet, amp))
S, W, N, O = get_image(fn)
if S is not None:
sci_list.append(S)
norm_list.append(N)
org_list.append(O)
if not len(sci_list):
args.log.warning('No reduced frames found for date range given')
return None
args.log.info('Number of sci frames from %s-%s for %s: %i' %
(dateb, datee, amp, len(sci_list)))
small_array = np.array(norm_list)
orig_array = np.array(org_list)
del org_list
big_array = np.array(sci_list)
del sci_list
func = biweight_location
mastersci = func(big_array, axis=(0,))
# Make sky model from average sky
nwave, nspec = make_avg_spec(np.array(W, dtype='float64'),
np.array(mastersci, dtype='float64'))
I = interp1d(nwave, nspec, fill_value='extrapolate', bounds_error=False,
kind='quadratic')
ftf = W * 0.
for fib in np.arange(W.shape[0]):
ftf[fib] = (mastersci[fib] - I(W[fib])) / I(W[fib])
# Get average norm
X = biweight_location(small_array, axis=(1,))[:, np.newaxis]
norm_of_norms = biweight_location(small_array / X, axis=(0,))
X = biweight_location(small_array / norm_of_norms[np.newaxis, :],
axis=(0,))
norm_of_norms = biweight_location(small_array / X, axis=(0,))
master_fiber_to_fiber = ftf + norm_of_norms[:, np.newaxis]
master_fiber_to_fiber[master_fiber_to_fiber < 0.2] = 0.0
skysub_list = []
for i, orig in enumerate(orig_array):
args.log.info('Making Sky Subtracted frame %i' % (i + 1))
skysub_list.append(sky_subtract(W, orig, master_fiber_to_fiber))
sky_array = np.array(skysub_list)
a, b = master_fiber_to_fiber.shape
hdu = fits.PrimaryHDU(np.array(master_fiber_to_fiber, dtype='float32'))
hdu1 = fits.ImageHDU(np.array(W, dtype='float32'))
hdu2 = fits.ImageHDU(np.array(small_array, dtype='float32'))
hdu3 = fits.ImageHDU(np.array(sky_array, dtype='float32'))
mkpath(op.join(args.folder, dateb))
args.log.info('Writing master_residual_%s_%s.fits' % (args.triplet, amp))
hdu.header['OBJECT'] = '%s-%s' % (dateb, datee)
hdu.header['EXTNAME'] = 'fiber_to_fiber'
hdu1.header['EXTNAME'] = 'wavelength'
hdu2.header['EXTNAME'] = 'normalization'
hdu3.header['EXTNAME'] = 'skysub'
hdulist = fits.HDUList([hdu, hdu1, hdu2, hdu3])
write_fits(hdulist, op.join(args.folder, dateb,
'master_residual_%s_%s.fits' % (args.triplet, amp)))
parser = setup_parser()
parser.add_argument("-f", "--folder",
help='''Output folder''',
type=str, default='residuals')
parser.add_argument("-m", "--maxnum",
help='''Maximum number of bias frames in masterbias''',
type=int, default=100)
parser.add_argument("-tr", "--triplet",
help='''Triplet of the specid, ifuslot, ifuid''',
type=str, default=None)
amps = ['LL', 'LU', 'RL', 'RU']
args = parser.parse_args(args=None)
args.log = setup_logging(logname='build_master_bias')
args = set_daterange(args)
if args.triplet is None:
args.log.error('Please set the "--triplet" argument.')
sys.exit(1)
filenames = []
for date in args.daterange:
date = '%04d%02d%02d' % (date.year, date.month, date.day)
filenames = filenames + build_filenames(date, args)
date_begin = args.daterange[0]
date_end = args.daterange[-1]
date_begin = '%04d%02d%02d' % (date_begin.year, date_begin.month,
date_begin.day)
date_end = '%04d%02d%02d' % (date_end.year, date_end.month, date_end.day)
args.log.info('Length of filenames found for %s-%s: %i' % (date_begin,
date_end,
len(filenames)))
if (len(filenames) % args.maxnum) == 0:
nbins = len(filenames) / args.maxnum
else:
nbins = len(filenames) / args.maxnum + 1
if nbins == 0:
args.log.warning('No files found for %s on %s-%s' % (args.triplet,
date_begin,
date_end))
sys.exit(1)
chunks = np.array_split(filenames, nbins)
for chunk in chunks:
bases = [chunk[0], chunk[-1]]
dates = []
for base in bases:
base0 = str(base)
for i in np.arange(4):
base0 = op.dirname(base0)
dates.append(op.basename(base0))
for amp in amps:
build_residual_frame(chunk, amp, args, dates[0], dates[1])
|
StarcoderdataPython
|
6608859
|
## Automatically adapted for numpy.oldnumeric Jul 23, 2007 by
#########################################################################
#
# Date: Dec 2004 Authors: <NAME>
#
# <EMAIL>
#
# The Scripps Research Institute (TSRI)
# Molecular Graphics Lab
# La Jolla, CA 92037, USA
#
# Copyright: <NAME>, and TSRI
#
#########################################################################
# latest working version of matplotlib is 0.87.7 using numpy 1.0.3
# latest working version of matplotlib is 0.87.6 using Numeric 23.8
# (matplotlib 0.87.7 has a small bug using Numeric 23.8)
# there are multiple bugs in matplotlib when using Numeric 24.2
#TODO:
# - add and verify controls such as size, labels, legends etc
# - maybe encapsulate all these common options into a single node
# - alpha values per axis
# - use axes rather than subplot
#
try:
import matplotlib
except:
import warnings
import sys
if sys.platform == 'linux2':
warnings.warn("""to use matplotlib, you need first to install openssl
the mgltools 32 bit linux binaries need openssl version 0.9.7
the mgltools 64 bit linux binaries need openssl version 0.9.8
you can have both openssl versions (0.9.7 and 0.9.8) installed on your computer.
""", stacklevel=2)
import Tkinter
import types
import weakref
import Pmw,math,os, sys
from numpy.oldnumeric import array
from matplotlib.colors import cnames
from matplotlib.lines import Line2D,lineStyles,TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN
#from matplotlib.transforms import Value
from matplotlib import rcParams
#mplversion = int(matplotlib.__version__.split(".")[2])
mplversion = map(int, matplotlib.__version__.split("."))
#print "mplversion:", mplversion
"""
This module implements Vision nodes exposing matplotlib functionatility.
MPLBaseNE:
---------
The class provides a base class for all nodes exposing matplotlib functionality
its purpose is to to create the attributes described below and implement
methods shared by all nodes.
Attributes:
self.figure = None: # node's figure object
# This attribute always points to the matplotlib Figure object which has
# a FigureCanvasTkAgg object in its .canvas attribute
# self.canvas FigureCanvasTkAgg
self.axes = None
# This attribute points to the matplotlib Axes instance used by this node
self.axes.figure # figure in which the axes is currently drawn
Methods:
def createFigure(self, master=None, width=None, height=None, dpi=None,
facecolor=None, edgecolor=None, frameon=None,
packOpts=None, toolbar=True):
# This method is used by all nodes if they need to create a Figure object
# from the matplotlib library and a FigureCanvasTkAgg object for this
# figure.
def setFigure(self, figure):
# This method place the node's axes object into the right Figure
def beforeRemovingFromNetwork(self):
# this method is called when a node is deleted from a network. Its job is
# to delete FigureCanvasTkAgg and Axes when appropriate.
MPLFigure:
-----------
The MPLFigure node allows the creation of a plotting area in which one
or more Axes can be added, where an Axes is a 2D graphical representation
of a data set (i.e. 2D plot). A 'master' can be apecified to embed the figure
in other panels. This node provides control over parameter that apply to the
MPLFigure such, width, height, dpi, etc.
Plotting Node:
-------------
Plotting nodes such as Histogram, Plot, Scatter, Pie, etc. take adtasets and
render them as 2D plots. They alwas own the axes.
If the data to be rendered is the only input to these nodes, they will create
a default Figure, add a default Plot2D to this figure, and draw the data in
this default 2D plot.
"""
from mglutil.util.callback import CallBackFunction
from NetworkEditor.widgets import TkPortWidget, PortWidget
from mglutil.gui.BasicWidgets.Tk.thumbwheel import ThumbWheel
from Vision import UserLibBuild
from NetworkEditor.items import NetworkNode
# make sure Tk is used as a backend
if not 'matplotlib.backends' in sys.modules:
matplotlib.use('TkAgg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.backends.backend_tkagg import NavigationToolbar2TkAgg
from matplotlib.figure import Figure as OriginalFigure
from matplotlib.axes import Axes, Subplot #, PolarSubplot, PolarAxes
from matplotlib.pylab import *
from Vision.colours import get_colours
from Vision.posnegFill import posNegFill
import numpy
from matplotlib.artist import setp
##
## QUESTIONS
## Does this have to be Tk specific or could I use FigureCanvasAgg
## !FigureCanvasAgg is OK
## Should I use FigureManagerBase rather than FigureCanvasAgg ?
## ! FigureCanvas Agg is OK
## how to destroy a figure ?
## ! seems to work
## figure should have a set_dpi method
## ! use figure.dpi.set() for now but might become API later
## how to remove the tool bar ?
## ! John added a note about this
## What option in a Figure can beset without destroying the Figure
## and which ones are constructor only options ?
## ! nothing should require rebuilding a Figure
## Why does add_axes return the axis that already exists if the values for
## building the Axes object are the same ? either this has to change or
## adding an instance of an Axes should be fixed (prefered)
## !John made a note to add an argument to matplotlib
## Pie seems to have a problem when shadow is on !
## !Find out the problem, because shadows are on even when check button is off
## !for ce aspect ratio to square
## Plot lineStyle, color, linewidth etc should be set by a set node that uses
## introspection on the patch? to find otu what can be set ??
## !Use ObjectInspector
## why is figimage not an axes method ?
## !use imshow
from matplotlib.cbook import iterable
try:
from matplotlib.dates import DayLocator, HourLocator, \
drange, date2num
from pytz import timezone
except:
pass
try:
from pytz import common_timezones
except:
common_timezones=[]
#global variables
locations={'best' : 0,
'upper right' : 1,
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10,}
colors={
'blue' : 'b',
'green' : 'g',
'red' : 'r',
'cyan' : 'c',
'magenta' :'m',
'yellow' :'y',
'black': 'k',
'white' : 'w',
}
markers= {
'square' : 's',
'circle' : 'o',
'triangle up' : '^',
'triangle right' : '>',
'triangle down' : 'v',
'triangle left' : '<',
'diamond' : 'd',
'pentagram' : 'p',
'hexagon' : 'h',
'octagon' : '8',
}
cmaps=['autumn','bone', 'cool','copper','flag','gray','hot','hsv','jet','pink', 'prism', 'spring', 'summer', 'winter']
def get_styles():
styles={}
for ls in Line2D._lineStyles.keys():
styles[Line2D._lineStyles[ls][6:]]=ls
for ls in Line2D._markers.keys():
styles[Line2D._markers[ls][6:]]=ls
#these styles are not recognized
if styles.has_key('steps'):
del styles['steps']
for s in styles.keys():
if s =="nothing":
del styles['nothing']
if s[:4]=='tick':
del styles[s]
return styles
class Figure(OriginalFigure):
# sub class Figure to override add_axes
def add_axes(self, *args, **kwargs):
"""hack to circumvent the issue of not adding an axes if the constructor
params have already been seen"""
if kwargs.has_key('force'):
force = kwargs['force']
del kwargs['force']
else:
force = False
if iterable(args[0]):
key = tuple(args[0]), tuple(kwargs.items())
else:
key = args[0], tuple(kwargs.items())
if not force and self._seen.has_key(key):
ax = self._seen[key]
self.sca(ax)
return ax
if not len(args): return
if isinstance(args[0], Axes):
a = args[0]
# this is too early, if done here bbox is 0->1
#a.set_figure(self)
a.figure = self
else:
rect = args[0]
#ispolar = popd(kwargs, 'polar', False)
ispolar = kwargs.get('polar', False)
if ispolar != False:
del kwargs['polar']
if ispolar:
a = PolarAxes(self, rect, **kwargs)
else:
a = Axes(self, rect, **kwargs)
self.axes.append(a)
self._axstack.push(a)
self.sca(a)
self._seen[key] = a
return a
class MPLBaseNE(NetworkNode):
"""Base class for node wrapping the maptlotlib objects
"""
def __init__(self, name='MPLBase', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
self.figure = None # matplotlib Figure instance belonging to this node
self.axes = None # matplotlib Axes instance
# this is true for Figures who create the Tk Toplevel
# or plotting nodes that have no figure parent node
# it is used to decide when the Toplevel should be destroyed
self.ownsMaster = False
def setDrawArea(self, kw):
ax = self.axes
#ax.clear()
if kw.has_key('left'):
rect = [kw['left'], kw['bottom'], kw['width'], kw['height']]
ax.set_position(rect)
if kw.has_key('frameon'):
ax.set_frame_on(kw['frameon'])
if kw.has_key("title"):
if type(kw['title'])==types.StringType:
ax.set_title(kw['title'])
else:
print 'Set title as Object'
if kw.has_key("xlabel"):
ax.set_xlabel(kw['xlabel'])
if kw.has_key("ylabel"):
ax.set_ylabel(kw['ylabel'])
if kw.has_key("xlimit"):
if kw['xlimit']!='':
ax.set_xlim(eval(kw['xlimit']))
if kw.has_key("ylimit"):
if kw['ylimit']!='':
ax.set_ylim(eval(kw['ylimit']))
if kw.has_key("xticklabels"):
if not kw['xticklabels']:
ax.set_xticklabels([])
if kw.has_key("yticklabels"):
if not kw['yticklabels']:
ax.set_yticklabels([])
if kw.has_key("axison"):
if kw['axison']:
ax.set_axis_on()
else:
ax.set_axis_off()
if kw.has_key("autoscaleon"):
if kw['autoscaleon']:
ax.set_autoscale_on(True)
else:
ax.set_autoscale_on(False)
if kw.has_key("adjustable"):
ax.set_adjustable(kw['adjustable'])
if kw.has_key("aspect"):
ax.set_aspect(kw['aspect'])
if kw.has_key("anchor"):
ax.set_anchor(kw['anchor'])
if kw.has_key("axisbelow"):
if kw['axisbelow']==1:
val=True
else:
val=False
rcParams['axes.axisbelow']=val
ax.set_axisbelow(val)
#grid properties
if kw.has_key("gridOn"):
if kw['gridOn']==1:
ax._gridOn=True
val=True
if kw.has_key('gridcolor'):
gcolor=kw['gridcolor']
else:
gcolor=rcParams['grid.color']
if kw.has_key('gridlinestyle'):
glinestyle=kw['gridlinestyle']
else:
glinestyle=rcParams['grid.linestyle']
if kw.has_key('gridlinewidth'):
glinewidth=kw['gridlinewidth']
else:
glinewidth=rcParams['grid.linewidth']
if kw.has_key('whichgrid'):
whichgrid=kw['whichgrid']
else:
whichgrid='major'
ax.grid(val,color=gcolor, linestyle=glinestyle, linewidth=glinewidth,which=whichgrid)
else:
val=False
ax.grid(val)
if kw.has_key("facecolor"):
ax.set_axis_bgcolor(kw['facecolor'])
if kw.has_key("edgecolor"):
if hasattr(ax, "axesFrame"):
ax.axesFrame.set_edgecolor(kw['edgecolor'])
elif hasattr(ax, "axesPatch"):
ax.axesPatch.set_edgecolor(kw['edgecolor'])
# if kw.has_key('zoomx'):
# #Zoom in on the x xaxis numsteps (plus for zoom in, minus for zoom out)
# ax.zoomx(kw['zoomx'])
#
# if kw.has_key('zoomy'):
# #Zoom in on the x xaxis numsteps (plus for zoom in, minus for zoom out)
# ax.zoomy(kw['zoomy'])
if kw.has_key("xtick.color"):
for i in ax.xaxis.get_ticklabels():
i.set_color(kw['xtick.color'])
if kw.has_key("ytick.color"):
for i in ax.yaxis.get_ticklabels():
i.set_color(kw['ytick.color'])
if kw.has_key('xtick.labelrotation'):
for i in ax.xaxis.get_ticklabels():
i.set_rotation(float(kw['xtick.labelrotation']))
if kw.has_key('ytick.labelrotation'):
for i in ax.yaxis.get_ticklabels():
i.set_rotation(float(kw['ytick.labelrotation']))
if kw.has_key("xtick.labelsize"):
for i in ax.xaxis.get_ticklabels():
i.set_size(float(kw['xtick.labelsize']))
if kw.has_key("ytick.labelsize"):
for i in ax.yaxis.get_ticklabels():
i.set_size(float(kw['ytick.labelsize']))
if kw.has_key("linewidth"):
if hasattr(ax, "axesFrame"):
ax.axesFrame.set_linewidth(float(kw['linewidth']))
elif hasattr(ax, "axesPatch"):
ax.axesPatch.set_linewidth(float(kw['linewidth']))
#marker
if kw.has_key("markeredgewidth"):
for i in ax.get_xticklines():
i.set_markeredgewidth(kw['markeredgewidth'])
for i in ax.get_yticklines():
i.set_markeredgewidth(kw['markeredgewidth'])
if kw.has_key("markeredgecolor"):
for i in ax.get_xticklines():
i.set_markeredgecolor(kw['markeredgecolor'])
for i in ax.get_yticklines():
i.set_markeredgecolor(kw['markeredgecolor'])
if kw.has_key("markerfacecolor"):
for i in ax.get_xticklines():
i.set_markerfacecolor(kw['markerfacecolor'])
for i in ax.get_yticklines():
i.set_markerfacecolor(kw['markerfacecolor'])
#figure_patch properties
if kw.has_key("figpatch_linewidth"):
ax.figure.figurePatch.set_linewidth(kw['figpatch_linewidth'])
if kw.has_key("figpatch_facecolor"):
ax.figure.figurePatch.set_facecolor(kw['figpatch_facecolor'])
if kw.has_key("figpatch_edgecolor"):
ax.figure.figurePatch.set_edgecolor(kw['figpatch_edgecolor'])
if kw.has_key("figpatch_antialiased"):
ax.figure.figurePatch.set_antialiased(kw['figpatch_antialiased'])
#Text properties
if kw.has_key('text'):
for i in kw['text']:
if type(i)==types.DictType:
tlab=i['textlabel']
posx=i['posx']
posy=i['posy']
horizontalalignment=i['horizontalalignment']
verticalalignment=i['verticalalignment']
rotation=i['rotation']
ax.text(x=posx,y=posy,s=tlab,horizontalalignment=horizontalalignment,verticalalignment=verticalalignment,rotation=rotation,transform = ax.transAxes)
if kw.has_key("text.color"):
for t in ax.texts:
t.set_color(kw['text.color'])
if kw.has_key("text.usetex"):
rcParams['text.usetex']=kw['text.usetex']
if kw.has_key("text.dvipnghack"):
rcParams['text.dvipnghack']=kw['text.dvipnghack']
if kw.has_key("text.fontstyle"):
for t in ax.texts:
t.set_fontstyle(kw['text.fontstyle'])
if kw.has_key("text.fontangle"):
for t in ax.texts:
t.set_fontangle(kw['text.fontangle'])
if kw.has_key("text.fontvariant"):
for t in ax.texts:
t.set_fontvariant(kw['text.fontvariant'])
if kw.has_key("text.fontweight"):
for t in ax.texts:
t.set_fontweight(kw['text.fontweight'])
if kw.has_key("text.fontsize"):
for t in ax.texts:
t.set_fontsize(kw['text.fontsize'])
#Font
if kw.has_key("Font.fontfamily"):
for t in ax.texts:
t.set_family(kw['Font.fontfamily'])
if kw.has_key("Font.fontstyle"):
for t in ax.texts:
t.set_fontstyle(kw['Font.fontstyle'])
if kw.has_key("Font.fontangle"):
for t in ax.texts:
t.set_fontangle(kw['Font.fontangle'])
if kw.has_key("Font.fontvariant"):
for t in ax.texts:
t.set_fontvariant(kw['Font.fontvariant'])
if kw.has_key("Font.fontweight"):
for t in ax.texts:
t.set_fontweight(kw['Font.fontweight'])
if kw.has_key("Font.fontsize"):
for t in ax.texts:
t.set_fontsize(kw['Font.fontsize'])
#Legend Properties
if mplversion[0]==0 and mplversion[2]<=3 :
if kw.has_key('legendlabel'):
if ',' in kw['legendlabel']:
x=kw['legendlabel'].split(",")
else:
x=(kw['legendlabel'],)
if kw.has_key('legend.isaxes'):
isaxes=kw['legend.isaxes']
else:
isaxes=rcParams['legend.isaxes']
if kw.has_key('legend.numpoints'):
numpoints=kw['legend.numpoints']
else:
numpoints=rcParams['legend.numpoints']
if kw.has_key('legend.pad'):
borderpad=kw['legend.pad']
else:
borderpad=rcParams['legend.pad']
if kw.has_key('legend.markerscale'):
markerscale=kw['legend.markerscale']
else:
markerscale=rcParams['legend.markerscale']
if kw.has_key('legend.labelsep'):
labelspacing=kw['legend.labelsep']
else:
labelspacing=rcParams['legend.labelsep']
if kw.has_key('legend.handlelen'):
handlelength=kw['legend.handlelen']
else:
handlelength=rcParams['legend.handlelen']
if kw.has_key('legend.handletextsep'):
handletextpad=kw['legend.handletextsep']
else:
handletextpad=rcParams['legend.handletextsep']
if kw.has_key('legend.axespad'):
borderaxespad=kw['legend.axespad']
else:
borderaxespad=rcParams['legend.axespad']
if kw.has_key('legend.shadow'):
shadow=kw['legend.shadow']
else:
shadow=rcParams['legend.shadow']
#import pdb;pdb.set_trace()
leg=self.axes.legend(tuple(x),loc=kw['legendlocation'],
#isaxes=isaxes,
numpoints=numpoints,
pad=borderpad,
labelsep=labelspacing,
handlelen=handlelength,
handletextsep=handletextpad,
axespad=borderaxespad,
shadow=shadow,
markerscale=markerscale)
if kw.has_key('legend.fontsize'):
setp(ax.get_legend().get_texts(),fontsize=kw['legend.fontsize'])
elif mplversion[0] > 0:
if kw.has_key('legendlabel'):
if ',' in kw['legendlabel']:
x=kw['legendlabel'].split(",")
else:
x=(kw['legendlabel'],)
if kw.has_key('legend.isaxes'):
isaxes=kw['legend.isaxes']
else:
isaxes=rcParams['legend.isaxes']
if kw.has_key('legend.numpoints'):
numpoints=kw['legend.numpoints']
else:
numpoints=rcParams['legend.numpoints']
if kw.has_key('legend.borderpad'):
borderpad=kw['legend.borderpad']
else:
borderpad=rcParams['legend.borderpad']
if kw.has_key('legend.markerscale'):
markerscale=kw['legend.markerscale']
else:
markerscale=rcParams['legend.markerscale']
if kw.has_key('legend.labelspacing'):
labelspacing=kw['legend.labelspacing']
else:
labelspacing=rcParams['legend.labelspacing']
if kw.has_key('legend.handlelength'):
handlelength=kw['legend.handlelength']
else:
handlelength=rcParams['legend.handlelength']
if kw.has_key('legend.handletextpad'):
handletextpad=kw['legend.handletextpad']
else:
handletextpad=rcParams['legend.handletextpad']
if kw.has_key('legend.borderaxespad'):
borderaxespad=kw['legend.borderaxespad']
else:
borderaxespad=rcParams['legend.borderaxespad']
if kw.has_key('legend.shadow'):
shadow=kw['legend.shadow']
else:
shadow=rcParams['legend.shadow']
#import pdb;pdb.set_trace()
leg=self.axes.legend(tuple(x),loc=kw['legendlocation'],
#isaxes=isaxes,
numpoints=numpoints,
borderpad=borderpad,
labelspacing=labelspacing,
handlelength=handlelength,
handletextpad=handletextpad,
borderaxespad=borderaxespad,
shadow=shadow,
markerscale=markerscale)
if kw.has_key('legend.fontsize'):
setp(ax.get_legend().get_texts(),fontsize=kw['legend.fontsize'])
#Tick Options
if kw.has_key('xtick.major.pad'):
for i in ax.xaxis.majorTicks:
i.set_pad(kw['xtick.major.pad'])
if kw.has_key('xtick.minor.pad'):
for i in ax.xaxis.minorTicks:
i.set_pad(kw['xtick.minor.pad'])
if kw.has_key('ytick.major.pad'):
for i in ax.yaxis.majorTicks:
i.set_pad(kw['ytick.major.pad'])
if kw.has_key('ytick.minor.pad'):
for i in ax.yaxis.minorTicks:
i.set_pad(kw['ytick.minor.pad'])
if kw.has_key('xtick.major.size'):
rcParams['xtick.major.size']=kw['xtick.major.size']
if kw.has_key('xtick.minor.size'):
rcParams['xtick.minor.size']=kw['xtick.minor.size']
if kw.has_key('xtick.direction'):
rcParams['xtick.direction']=kw['xtick.direction']
if kw.has_key('ytick.major.size'):
rcParams['ytick.major.size']=kw['ytick.major.size']
if kw.has_key('ytick.minor.size'):
rcParams['ytick.minor.size']=kw['ytick.minor.size']
if kw.has_key('ytick.direction'):
rcParams['ytick.direction']=kw['ytick.direction']
def beforeRemovingFromNetwork(self):
#print 'remove'
NetworkNode.beforeRemovingFromNetwork(self)
# this happens for drawing nodes with no axes specified
if self.axes:
self.axes.figure.delaxes(self.axes) # feel a little strange !
self.canvas._tkcanvas.master.destroy()
elif self.canvas:
self.canvas._tkcanvas.master.destroy()
def onlyDataChanged(self, data):
"""returns true if only he first port (i.e. data) has new data.
"""
# This can be used to accelerate redraw by only updating the data
# rather than redrawing the whole figure
# see examples/animation_blit_tk.py
ports = self.inputPorts
if not ports[0].hasNewValidData():
return False
for p in self.inputPorts:
if p.hasNewValidData():
return False
return True
class MPLFigureNE(MPLBaseNE):
"""This node instanciates a Figure object and its FigureCanvasTkAgg object
in its .canvas attribute.
It also provide control over parameters such as width, height, dpi, etc.
Input:
plots - Matplotlib Axes objects
figwidth - width in inches
figheigh - height in inches
dpi - resolution; defaults to rc figure.dpi
facecolor - the background color; defaults to rc figure.facecolor
edgecolor - the border color; defaults to rc figure.edgecolor
master - Defaults to None, creating a topLevel
nbRows - number of rows for subgraph2D
nbColumns - number of columns for subgraph2D
frameon - boolean
hold - boolean
toolbar - boolean (init option only)
packOpts - string representation of packing options
Output:
canvas: MPLFigure Object
Todo:
legend
text
image ?
"""
def afterAddingToNetwork(self):
self.figure = Figure()
master = Tkinter.Toplevel()
master.title(self.name)
self.canvas = FigureCanvasTkAgg(self.figure, master)
self.figure.set_canvas(self.canvas)
packOptsDict = {'side':'top', 'fill':'both', 'expand':1}
self.canvas.get_tk_widget().pack( *(), **packOptsDict )
toolbar = NavigationToolbar2TkAgg(self.canvas, master)
def __init__(self, name='Figure2', **kw):
kw['name'] = name
apply( MPLBaseNE.__init__, (self,), kw )
codeBeforeDisconnect = """def beforeDisconnect(self, c):
node1 = c.port1.node
node2 = c.port2.node
if node2.figure.axes:
node2.figure.delaxes(node1.axes)
if node1.figure.axes:
node1.figure.delaxes(node1.axes)
node1.figure.add_axes(node1.axes)
"""
ip = self.inputPortsDescr
ip.append(datatype='MPLAxes', required=False, name='plots',
singleConnection=False,
beforeDisconnect=codeBeforeDisconnect)
ip.append(datatype='float', required=False, name='width')
ip.append(datatype='float', required=False, name='height')
ip.append(datatype='float', required=False, name='linewidth', defaultValue=1)
ip.append(datatype='int', required=False, name='dpi')
ip.append(datatype='colorRGB', required=False, name='facecolor')
ip.append(datatype='colorRGB', required=False, name='edgecolor')
ip.append(datatype='None', required=False, name='master')
ip.append(datatype='int', required=False, name='nbRows')
ip.append(datatype='int', required=False, name='nbColumns')
ip.append(datatype='boolean', required=False, name='frameon', defaultValue=True)
ip.append(datatype='boolean', required=False, name='hold', defaultValue=False)
ip.append(datatype='boolean', required=False, name='toolbar')
ip.append(datatype='None', required=False, name='packOpts')
op = self.outputPortsDescr
op.append(datatype='MPLFigure', name='figure')
self.widgetDescr['width'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':8.125,
'labelCfg':{'text':'width in inches'} }
self.widgetDescr['height'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':6.125,
'labelCfg':{'text':'height in inches'} }
self.widgetDescr['linewidth'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':2, 'type':'int',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'linewidth'} }
self.widgetDescr['dpi'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':10, 'type':'int',
'wheelPad':2, 'initialValue':80,
'labelCfg':{'text':'DPI'} }
self.widgetDescr['nbRows'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':10, 'type':'int',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'nb. rows'} }
self.widgetDescr['nbColumns'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':10, 'type':'int',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'nb. col'} }
self.widgetDescr['frameon'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'initialValue':1, 'labelCfg':{'text':'frame'} }
self.widgetDescr['hold'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'initialValue':0, 'labelCfg':{'text':'hold'} }
self.widgetDescr['toolbar'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'initialValue':1, 'labelCfg':{'text':'toolbar'} }
self.widgetDescr['packOpts'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'packing Opts.:'},
'initialValue':'{"side":"top", "fill":"both", "expand":1}'}
code = """def doit(self, plots, width, height, linewidth, dpi, facecolor,
edgecolor, master, nbRows, nbColumns, frameon, hold, toolbar, packOpts):
self.figure.clear()
if plots is not None:
for p in plots:
self.figure.add_axes(p)
figure = self.figure
# configure size
if width is not None or height is not None:
defaults = matplotlib.rcParams
if width is None:
width = defaults['figure.figsize'][0]
elif height is None:
height = defaults['figure.figsize'][1]
figure.set_size_inches(width,height)
# configure dpi
if dpi is not None:
figure.set_dpi(dpi)
# configure facecolor
if facecolor is not None:
figure.set_facecolor(facecolor)
# configure edgecolor
if edgecolor is not None:
figure.set_edgecolor(facecolor)
# configure frameon
if edgecolor is not None:
figure.set_edgecolor(facecolor)
# not sure linewidth is doing anything here
figure.figurePatch.set_linewidth(linewidth)
figure.hold(hold)
# FIXME for now we store this here but we might want to add this as
# regular attributes to Figure which would be used with subplot
#figure.nbRows = nbRows
#figure.nbColumns = nbColumns
self.canvas.draw()
self.outputData(figure=self.figure)
"""
self.setFunction(code)
class MPLImageNE(MPLBaseNE):
"""This node creates a PIL image
Input:
plots - Matplotlib Axes objects
figwidth - width in inches
figheigh - height in inches
dpi - resolution; defaults to rc figure.dpi
facecolor - the background color; defaults to rc figure.facecolor
edgecolor - the border color; defaults to rc figure.edgecolor
faceAlpha - alpha value of background
edgeAlpha - alpha value of edge
frameon - boolean
hold - boolean
toolbar - boolean (init option only)
packOpts - string representation of packing options
Output:
canvas: MPLFigure Object
Todo:
legend
text
image ?
"""
def __init__(self, name='imageFigure', **kw):
kw['name'] = name
apply( MPLBaseNE.__init__, (self,), kw )
codeBeforeDisconnect = """def beforeDisconnect(self, c):
node1 = c.port1.node
node2 = c.port2.node
if node1.figure.axes:
node1.figure.delaxes(node1.axes)
node1.figure.add_axes(node1.axes)
"""
ip = self.inputPortsDescr
ip.append(datatype='MPLAxes', required=False, name='plots',
singleConnection=False,
beforeDisconnect=codeBeforeDisconnect)
ip.append(datatype='float', required=False, name='width')
ip.append(datatype='float', required=False, name='height')
ip.append(datatype='int', required=False, name='dpi')
ip.append(datatype='colorsRGB', required=False, name='facecolor')
ip.append(datatype='colorsRGB', required=False, name='edgecolor')
ip.append(datatype='float', required=False, name='alphaFace', defaultValue=0.5)
ip.append(datatype='float', required=False, name='alphaEdge', defaultValue=0.5)
ip.append(datatype='boolean', required=False, name='frameon', defaultValue=True)
ip.append(datatype='boolean', required=False, name='hold', defaultValue=False)
op = self.outputPortsDescr
op.append(datatype='image', name='image')
self.widgetDescr['width'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':6.4,
'labelCfg':{'text':'width in inches'} }
self.widgetDescr['height'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':4.8,
'labelCfg':{'text':'height in inches'} }
self.widgetDescr['dpi'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':10, 'type':'int',
'wheelPad':2, 'initialValue':80,
'labelCfg':{'text':'DPI'} }
self.widgetDescr['alphaFace'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':1., 'type':'float',
'wheelPad':2, 'initialValue':0.5, 'min':0.0, 'max':1.0,
'labelCfg':{'text':'alpha Face'} }
self.widgetDescr['alphaEdge'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':1., 'type':'float',
'wheelPad':2, 'initialValue':0.5, 'min':0.0, 'max':1.0,
'labelCfg':{'text':'alphaEdge'} }
self.widgetDescr['frameon'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'initialValue':1, 'labelCfg':{'text':'frame'} }
self.widgetDescr['hold'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'initialValue':0, 'labelCfg':{'text':'hold'} }
code = """def doit(self, plots, width, height, dpi, facecolor, edgecolor,
alphaFace, alphaEdge, frameon, hold):
figure = self.figure
try:
self.canvas.renderer.clear()
except AttributeError:
pass
figure.clear()
# Powers of 2 image to be clean
if width>height:
htc = float(height)/width
w = 512
h = int(round(512*htc))
else:
wtc = float(width)/height
w = int(round(512*wtc))
h = 512
figure.set_size_inches(float(w)/dpi, float(h)/dpi)
for p in plots:
if hasattr(p,"figure"):
p.figure.set_figwidth(float(w) / dpi)
p.figure.set_figheight(float(h) / dpi)
figure.add_axes(p)
p.set_figure(figure)
p.axesPatch.set_alpha(alphaFace)
# configure dpi
if dpi is not None:
figure.set_dpi(dpi)
# configure facecolor
if facecolor is not None:
figure.set_facecolor(tuple(facecolor[0]))
# configure edgecolor
if edgecolor is not None:
figure.set_edgecolor(tuple(edgecolor[0]))
# configure frameon
if frameon is not None:
figure.set_frameon(frameon)
figure.hold(hold)
figure.figurePatch.set_alpha(alphaEdge)
self.canvas.draw() # force a draw
import Image
im = self.canvas.buffer_rgba(0,0)
ima = Image.frombuffer("RGBA", (w, h), im)
ima = ima.transpose(Image.FLIP_TOP_BOTTOM)
self.outputData(image=ima)
"""
self.setFunction(code)
def beforeRemovingFromNetwork(self):
#print 'remove'
NetworkNode.beforeRemovingFromNetwork(self)
# this happens for drawing nodes with no axes specified
if self.axes:
self.axes.figure.delaxes(self.axes) # feel a little strange !
def afterAddingToNetwork(self):
self.figure = Figure()
from matplotlib.backends.backend_agg import FigureCanvasAgg
self.canvas = FigureCanvasAgg(self.figure)
class MPLDrawAreaNE(NetworkNode):
"""Class for configuring the axes.
The following options can be set.
left,bottom,width,height ----allows to set the position of the axes.
frame on/off --- allows to on or off frame
hold on/off --- allows to on or off hold.When hold is True, subsequent plot commands will be added to
the current axes. When hold is False, the current axes and figure will be cleared on
the next plot command
title --- allows to set title of the figure
xlabel ---allows to set xlabel of the figure
ylabel ---allows to set ylabel of the figure
xlimit --- set autoscale off before setting xlimit.
y limit --- set autoscale off before setting ylimit.
xticklabels on/off --- allows to on or off xticklabels
yticklabels on/off --- allows to on or off yticklabels
axis on/off --- allows to on or off axis
autoscale on/off --- when on sets default axes limits ,when off sets limit from xlimit and ylimit entries.
"""
def __init__(self, name='Draw Area', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='float', required=False, name='left', defaultValue=.1)
ip.append(datatype='float', required=False, name='bottom', defaultValue=.1)
ip.append(datatype='float', required=False, name='width', defaultValue=.8)
ip.append(datatype='float', required=False, name='height', defaultValue=.8)
ip.append(datatype='boolean', required=False, name='frameon', defaultValue=True)
ip.append(datatype='boolean', required=False, name='hold', defaultValue=False)
ip.append(datatype='string', required=False, name='title', defaultValue='Figure')
ip.append(datatype='string', required=False, name='xlabel', defaultValue='X')
ip.append(datatype='string', required=False, name='ylabel', defaultValue='Y')
ip.append(datatype='string', required=False, name='xlimit', defaultValue='')
ip.append(datatype='string', required=False, name='ylimit', defaultValue='')
ip.append(datatype='boolean', required=False, name='xticklabels', defaultValue=True)
ip.append(datatype='boolean', required=False, name='yticklabels', defaultValue=True)
ip.append(datatype='boolean', required=False, name='axison', defaultValue=True)
ip.append(datatype='boolean', required=False, name='autoscaleon', defaultValue=True)
self.widgetDescr['left'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':1., 'type':'float',
'labelGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':0.1,
'labelCfg':{'text':'left (0. to 1.)'} }
self.widgetDescr['bottom'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':1., 'type':'float',
'labelGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':0.1,
'labelCfg':{'text':'bottom (0. to 1.)'} }
self.widgetDescr['width'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':1., 'type':'float',
'labelGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':0.8,
'labelCfg':{'text':'width (0. to 1.)'} }
self.widgetDescr['height'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':1., 'type':'float',
'labelGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':0.8,
'labelCfg':{'text':'height (0. to 1.0)'} }
self.widgetDescr['frameon'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelGridCfg':{'sticky':'w'},
'initialValue':1, 'labelCfg':{'text':'frame'} }
self.widgetDescr['hold'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelGridCfg':{'sticky':'w'},
'initialValue':0, 'labelCfg':{'text':'hold'} }
self.widgetDescr['title'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'title'},'labelGridCfg':{'sticky':'w'},
'initialValue':'Figure:'}
self.widgetDescr['xlabel'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'X label'},'labelGridCfg':{'sticky':'w'},
'initialValue':'X'}
self.widgetDescr['ylabel'] = {
'class':'NEEntry', 'master':'ParamPanel','labelGridCfg':{'sticky':'w'},
'labelCfg':{'text':'Y label'},
'initialValue':'Y'}
self.widgetDescr['xlimit'] = {
'class':'NEEntry', 'master':'ParamPanel','labelGridCfg':{'sticky':'w'},
'labelCfg':{'text':'X limit'},
'initialValue':''}
self.widgetDescr['ylimit'] = {
'class':'NEEntry', 'master':'ParamPanel','labelGridCfg':{'sticky':'w'},
'labelCfg':{'text':'Y limit'},
'initialValue':''}
self.widgetDescr['xticklabels'] = {
'class':'NECheckButton', 'master':'ParamPanel','labelGridCfg':{'sticky':'w'},
'initialValue':1, 'labelCfg':{'text':'xticklabels'} }
self.widgetDescr['yticklabels'] = {
'class':'NECheckButton', 'master':'ParamPanel','labelGridCfg':{'sticky':'w'},'labelGridCfg':{'sticky':'w'},
'initialValue':1, 'labelCfg':{'text':'yticklabels'} }
self.widgetDescr['axison'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'initialValue':1, 'labelCfg':{'text':'axis on'} }
self.widgetDescr['autoscaleon'] = {
'class':'NECheckButton', 'master':'ParamPanel','labelGridCfg':{'sticky':'w'},
'initialValue':1, 'labelCfg':{'text':'autoscale on'} }
op = self.outputPortsDescr
op.append(datatype='MPLDrawArea', name='drawAreaDef')
code = """def doit(self, left, bottom, width, height, frameon, hold, title,
xlabel, ylabel, xlimit, ylimit, xticklabels, yticklabels, axison, autoscaleon):
kw = {'left':left, 'bottom':bottom, 'width':width, 'height':height,
'frameon':frameon, 'hold':hold, 'title':title, 'xlabel':xlabel,
'ylabel':ylabel, 'axison':axison, 'xticklabels': xticklabels,'yticklabels': yticklabels,'xlimit':xlimit,'ylimit':ylimit,'autoscaleon':autoscaleon}
self.outputData(drawAreaDef=kw)
"""
self.setFunction(code)
class MPLMergeTextNE(NetworkNode):
"""Class for writting multiple labels in the axes.Takes input from Text
nodes.
"""
def __init__(self, name='MergeText', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='MPLDrawArea', required=False,name='textlist',singleConnection=False)
op = self.outputPortsDescr
op.append(datatype='MPLDrawArea', name='drawAreaDef')
code = """def doit(self,textlist):
kw={'text':textlist}
self.outputData(drawAreaDef=kw)
"""
self.setFunction(code)
class MPLPlottingNode(MPLBaseNE):
"""Base class for plotting nodes"""
def afterAddingToNetwork(self):
self.figure = Figure()
self.axes = self.figure.add_subplot( 111 )
self.axes.node = weakref.ref(self)
master = Tkinter.Toplevel()
master.title(self.name)
self.canvas = FigureCanvasTkAgg(self.figure, master)
self.figure.set_canvas(self.canvas)
packOptsDict = {'side':'top', 'fill':'both', 'expand':1}
self.canvas.get_tk_widget().pack( *(), **packOptsDict )
self.canvas._master.protocol('WM_DELETE_WINDOW',self.canvas._master.iconify)
toolbar = NavigationToolbar2TkAgg(self.canvas, master)
def setDrawAreaDef(self, drawAreaDef):
newdrawAreaDef={}
if drawAreaDef:
if len(drawAreaDef)==1 and drawAreaDef[0] is not None:
#for d in drawAreaDef[0].keys():
# newdrawAreaDef[d]=drawAreaDef[0][d]
newdrawAreaDef = drawAreaDef[0]
elif len(drawAreaDef)>1:
for dAD in drawAreaDef:
if type(dAD)== types.DictType:
for j in dAD.keys():
newdrawAreaDef[j]=dAD[j]
self.setDrawArea(newdrawAreaDef)
codeBeforeDisconnect ="""def beforeDisconnect(self,c):
node=c.port2.node
node.axes.clear()
node.canvas.draw() """
########################################################################
####
#### PLOTTING NODES
####
########################################################################
class FillNE(MPLPlottingNode):
"""plots filled polygons.
x - list of x vertices
y - list of y vertices
fillcolor - color
"""
def __init__(self, name='Fill', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list',name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string', required=False, name='fillcolor', defaultValue='w')
ip.append(datatype='MPLDrawArea', required=False,name='drawAreaDef',singleConnection=False)
self.widgetDescr['fillcolor'] = {
'class':'NEComboBox', 'master':'node',
'choices':cnames.keys(),
'fixedChoices':True,
'initialValue':'white',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'we'},
'labelCfg':{'text':'fillcolor:'}}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype='None', name='fig')
code = """def doit(self, x, y, fillcolor, drawAreaDef):
self.axes.clear()
ax=self.axes
p=ax.fill(x,y,fillcolor)
self.setDrawAreaDef(drawAreaDef)
self.canvas.draw()
self.outputData(axes=self.axes,fig=p)
"""
self.setFunction(code)
class PolarAxesNE(MPLPlottingNode):
""" This node plots on PolarAxes
Input:
y - sequence of values
x - None; sequence of values
Adjustable parameters:
grid --grid on or off(default is on)
gridcolor --color of the grid
gridlinewidth --linewidth of the grid
gridlinestyle --gridlinestyle
xtickcolor -- color of xtick
ytickcolor -- color of ytick
xticksize --size of xtick
yticksize --size of ytick
"""
def __init__(self, name='PolarAxes', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
self.styles={}
for ls in Line2D._lineStyles.keys():
self.styles[Line2D._lineStyles[ls][6:]]=ls
for ls in Line2D._markers.keys():
self.styles[Line2D._markers[ls][6:]]=ls
#these styles are not recognized
#del self.styles['steps']
for s in self.styles.keys():
if s =="nothing":
del self.styles['nothing']
if s[:4]=='tick':
del self.styles[s]
self.colors=colors
ip = self.inputPortsDescr
#ip.append(datatype='MPLAxes', required=False, name='p',
#singleConnection=True)#,beforeDisconnect=codeBeforeDisconnect)
ip.append(datatype='list', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string', required=False, name='lineStyle', defaultValue='solid')
ip.append(datatype='None', required=False, name='color', defaultValue='black')
ip.append(datatype='boolean', required=False, name='grid', defaultValue=1)
ip.append(datatype='str', required=False, name='gridlineStyle', defaultValue='--')
ip.append(datatype='str', required=False, name='gridcolor', defaultValue='gray')
ip.append(datatype='float', required=False, name='gridlinewidth', defaultValue=1)
ip.append(datatype='str', required=False, name='axisbg', defaultValue='white')
ip.append(datatype='str', required=False, name='xtickcolor', defaultValue='black')
ip.append(datatype='str', required=False, name='ytickcolor', defaultValue='black')
ip.append(datatype='float', required=False, name='xticksize', defaultValue=12)
ip.append(datatype='float', required=False, name='yticksize', defaultValue=12)
ip.append(datatype='MPLDrawArea', required=False,name='drawAreaDef',singleConnection=False)
self.widgetDescr['grid'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'initialValue':1, 'labelCfg':{'text':'grid'} ,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},}
self.widgetDescr['lineStyle'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.styles.keys(),
'fixedChoices':True,
'initialValue':'solid',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'line style:'}}
self.widgetDescr['color'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.colors.keys(),
'fixedChoices':True,
'initialValue':'black',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'we'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['gridlineStyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':lineStyles.keys(),
'fixedChoices':True,
'initialValue':'--',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'gridlinestyle:'}}
self.widgetDescr['gridcolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cnames.keys(),
'fixedChoices':True,
'initialValue':'gray',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'gridcolor:'}}
self.widgetDescr['gridlinewidth'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':60, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'gridlinewidth'},
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'}}
self.widgetDescr['axisbg'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cnames.keys(),
'fixedChoices':True,
'initialValue':'white',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'axisbg:'}}
self.widgetDescr['xtickcolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cnames.keys(),
'fixedChoices':True,
'initialValue':'black',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'xtickcolor:'}}
self.widgetDescr['ytickcolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cnames.keys(),
'fixedChoices':True,
'initialValue':'black',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'ytickcolor:'}}
self.widgetDescr['xticksize'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':60, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':12,
'labelCfg':{'text':'xticksize'},'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'} }
self.widgetDescr['yticksize'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':60, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':12,
'labelCfg':{'text':'yticksize'},'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'} }
op = self.outputPortsDescr
op.append(datatype='MPLFigure', name='figure')
code = """def doit(self, y, x, lineStyle, color, grid, gridlineStyle,
gridcolor, gridlinewidth, axisbg, xtickcolor, ytickcolor, xticksize, yticksize, drawAreaDef):
self.figure.clear()
self.setDrawAreaDef(drawAreaDef)
if grid==1:
matplotlib.rc('grid',color=gridcolor,linewidth=gridlinewidth,linestyle=gridlineStyle)
matplotlib.rc('xtick',color=xtickcolor,labelsize=xticksize)
matplotlib.rc('ytick',color=ytickcolor,labelsize=yticksize)
colorChar = self.colors[color]
lineStyleChar = self.styles[lineStyle]
new_axes=self.figure.add_axes(self.axes.get_position(),polar=True,axisbg=axisbg)
self.axes=new_axes
self.axes.plot(x, y, colorChar+lineStyleChar)
if grid!=1:
new_axes.grid(grid)
self.canvas.draw()
self.outputData(figure=self.figure)
"""
self.setFunction(code)
class StemNE(MPLPlottingNode):
"""A stem plot plots vertical lines (using linefmt) at each x location
from the baseline to y, and places a marker there using markerfmt. A
horizontal line at 0 is is plotted using basefmt
input: list of x values
Return value is (markerline, stemlines, baseline) .
"""
def __init__(self, name='Stem', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,),kw )
ip = self.inputPortsDescr
ip.append(datatype='list',required=True, name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list',required=True, name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string',required=False,name='stemlinestyle', defaultValue='--')
ip.append(datatype='string',required=False,name='stemlinecolor', defaultValue='b')
ip.append(datatype='string',required=False,name='markerstyle', defaultValue='o')
ip.append(datatype='string',required=False,name='markerfacecolor', defaultValue='b')
ip.append(datatype='string',required=False,name='baselinecolor', defaultValue='b')
ip.append(datatype='string',required=False,name='baselinestyle', defaultValue='-')
ip.append(datatype='MPLDrawArea', required=False,name='drawAreaDef',singleConnection=False)
self.widgetDescr['stemlinestyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['-.','--','-',':'],
'fixedChoices':True,
'initialValue':'--',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'stemlinestyle:'}}
self.widgetDescr['stemlinecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'stemlinecolor:'}}
self.widgetDescr['markerstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':Line2D._markers.keys(),
'fixedChoices':True,
'initialValue':'o',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'markerstyle:'}}
self.widgetDescr['markerfacecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'k',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'markerfacecolor:'}}
self.widgetDescr['baselinestyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['-.','--','-',':'],
'fixedChoices':True,
'initialValue':'-',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'baselinestyle:'}}
self.widgetDescr['baselinecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'k',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'baselinecolor:'}}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='stem')
code = """def doit(self, x, y, stemlinestyle, stemlinecolor, markerstyle,
markerfacecolor, baselinecolor, baselinestyle, drawAreaDef):
self.axes.clear()
linefmt=stemlinecolor+stemlinestyle
markerfmt=markerfacecolor+markerstyle
basefmt= baselinecolor+baselinestyle
markerline, stemlines, baseline = self.axes.stem(x, y, linefmt=linefmt, markerfmt=markerfmt, basefmt=basefmt )
self.setDrawAreaDef(drawAreaDef)
self.canvas.draw()
self.outputData(stem=self.axes)
"""
self.setFunction(code)
class MultiPlotNE(MPLPlottingNode):
"""This node allows to plot multiple plots on same axes
input: axes instances
"""
def __init__(self, name='MultiPlot', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,),kw )
ip = self.inputPortsDescr
ip.append(datatype='MPLAxes', required=True, name='multiplot', singleConnection=False,beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef', singleConnection=False)
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='multiplot')
code = """def doit(self, plots, drawAreaDef):
self.axes.clear()
ax=self.axes
if len(plots)>0:
ax.set_xlim(plots[0].get_xlim())
ax.set_ylim(plots[0].get_ylim())
for p in plots:
if p.patches!=[]:
for pt in p.patches:
ax.add_patch(pt)
elif p.lines!=[]:
if p.lines!=[]:
for pt in p.lines:
ax.add_line(pt)
elif p.collections!=[]:
if p.collections!=[]:
for pt in p.collections:
ax.add_collection(pt)
else:
ax.add_artist(p)
ax.autoscale_view()
self.setDrawAreaDef(drawAreaDef)
self.canvas.draw()
self.outputData(multiplot=self.axes)
"""
self.setFunction(code)
class TablePlotNE(MPLPlottingNode):
"""Adds a table to the current axes and plots bars.
input:
cellText - list of values
rowLabels - list of labels
rowColours - list of colors
colLabels - list of labels
colColours - list of colors
location - location where the table to be placed.
"""
def __init__(self, name='TablePlot', **kw):
"""
TABLE(cellText=None, cellColours=None,
cellLoc='right', colWidths=None,
rowLabels=None, rowColours=None, rowLoc='left',
colLabels=None, colColours=None, colLoc='center',
loc='bottom', bbox=None):
Adds a table to the current axes and plots bars.
"""
kw['name'] = name
locs=locations.keys()
locs.append("bottom")
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list',required=True, name='values',singleConnection="auto",beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list',required=True, name='rowLabels',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list',required=True, name='colLabels',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list',required=False, name='rowColors')
ip.append(datatype='list',required=False, name='colColors')
ip.append(datatype='string',required=False, name='location', defaultValue='bottom')
ip.append(datatype='MPLDrawArea',required=False,name='drawAreaDef',singleConnection=False)
self.widgetDescr['location'] = {
'class':'NEComboBox', 'master':'node',
'choices':locs,
'fixedChoices':True,
'initialValue':'bottom',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'Location:'}}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
code = """def doit(self, values, rowLabels, colLabels, rowColors,
colColors, location, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
#self.axes.set_position([0.2, 0.2, 0.7, 0.6])
data=[]
nd=[]
for val in values :
for v in val:
nd.append(float(v))
data.append(nd)
nd=[]
#rcolours = get_colours(len(colLabels))
rows = len(data)
ind = arange(len(colLabels)) + 0.3 # the x locations for the groups
cellText = []
width = 0.4 # the width of the bars
yoff = array([0.0] * len(colLabels)) # the bottom values for stacked bar chart
for row in xrange(rows):
self.axes.bar(ind, data[row], width, bottom=yoff, color=rowColors[row])
yoff = yoff + data[row]
cellText.append(['%1.1f' % x for x in yoff])
the_table = self.axes.table(cellText=cellText,
rowLabels=rowLabels,
rowColours=rowColors,
colColours=colColors,
colLabels=colLabels,
loc=location)
if location=="bottom":
self.axes.set_xticks([])
self.axes.set_xticklabels([])
self.canvas.draw()
self.outputData(plot=self.axes)
"""
self.setFunction(code)
class HistogramNE(MPLPlottingNode):
"""This nodes takes a list of values and builds a histogram using matplotlib
http://matplotlib.sourceforge.net/matplotlib.pylab.html#-hist
Compute the histogram of x. bins is either an integer number of
bins or a sequence giving the bins. x are the data to be binned.
The return values is (n, bins, patches)
If normed is true, the first element of the return tuple will be the
counts normalized to form a probability distribtion, ie,
n/(len(x)*dbin)
Addition kwargs: hold = [True|False] overrides default hold state
Input:
values: sequence of values
bins=10: number of dequence giving the gins
normed=0 normalize
Output:
plot Matplotlib Axes object
"""
def __init__(self, name='Histogram', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
self.colors=cnames
ip = self.inputPortsDescr
ip.append(datatype='None', name='values',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', required=False, name='bins', defaultValue=10)
ip.append(datatype='boolean', required=False, name='normed', defaultValue=False)
ip.append(datatype='float', required=False, name='patch_antialiased', defaultValue=1)
ip.append(datatype='float', required=False, name='patch_linewidth', defaultValue=1)
ip.append(datatype='string', required=False, name='patch_edgecolor', defaultValue='black')
ip.append(datatype='string', required=False, name='patch_facecolor', defaultValue='blue')
ip.append(datatype='MPLDrawArea',required=False,name='drawAreaDef',singleConnection=False)
self.widgetDescr['bins'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':10, 'type':'int', 'wheelPad':2,
'initialValue':10,
'labelCfg':{'text':'# of bins'} }
self.widgetDescr['normed'] = {
'class':'NECheckButton', 'master':'node',
'initialValue':1, 'labelCfg':{'text':'normalize'},
}
self.widgetDescr['patch_linewidth'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':60, 'height':21, 'oneTurn':2, 'type':'int',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'linewidth'} }
self.widgetDescr['patch_antialiased'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'antialiased:'},
'initialValue':1,}
self.widgetDescr['patch_edgecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.colors.keys(),
'fixedChoices':True,
'initialValue':'black',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'edgecolor:'}}
self.widgetDescr['patch_facecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.colors.keys(),
'fixedChoices':True,
'initialValue':'blue',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'facecolor:'}}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
code = """def doit(self, values, bins, normed, patch_antialiased,
patch_linewidth, patch_edgecolor, patch_facecolor, drawAreaDef):
self.axes.clear()
n, bins, patches = self.axes.hist(values, bins=bins, normed=normed)
self.setDrawAreaDef(drawAreaDef)
if self.axes.patches:
for p in self.axes.patches:
p.set_linewidth(patch_linewidth)
p.set_edgecolor(patch_edgecolor)
p.set_facecolor(patch_facecolor)
p.set_antialiased(patch_antialiased)
self.canvas.draw()
self.outputData(plot=self.axes)
"""
self.setFunction(code)
#Plot Nodes
class PlotNE(MPLPlottingNode):
"""This nodes takes two lists of values and plots the the second against the first.
Input:
y - sequence of values
x - None; sequence of values
figure - None; MPLFigure object object into which to place the drawing
Output:
plot Matplotlib Axes object
line: - line
"""
def __init__(self, name='Plot', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
self.styles=get_styles()
self.colors=colors
self.joinstyles = Line2D.validJoin
self.capstyles = Line2D.validCap
ip = self.inputPortsDescr
ip.append(datatype='list', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', required=False, name='x')
ip.append(datatype='string', required=False, name='lineStyle', defaultValue='solid')
ip.append(datatype='None', required=False, name='color', defaultValue='black')
ip.append(datatype='boolean', required=False, name='line_antialiased', defaultValue=1)
ip.append(datatype='float', required=False, name='line_linewidth', defaultValue=1)
ip.append(datatype='string', required=False, name='solid_joinstyle', defaultValue='miter')
ip.append(datatype='string', required=False, name='solid_capstyle', defaultValue='projecting')
ip.append(datatype='string', required=False, name='dash_capstyle', defaultValue='butt')
ip.append(datatype='string', required=False, name='dash_joinstyle', defaultValue='miter')
ip.append(datatype='MPLDrawArea', required=False,name='drawAreaDef',singleConnection=False)
self.widgetDescr['lineStyle'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.styles.keys(),
'fixedChoices':True,
'initialValue':'solid',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'line style:'}}
self.widgetDescr['color'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.colors.keys(),
'fixedChoices':True,
'initialValue':'black',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'we'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['dash_capstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.capstyles,
'fixedChoices':True,
'initialValue':'butt',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'dash_capstyle:'}}
self.widgetDescr['dash_joinstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.joinstyles,
'fixedChoices':True,
'initialValue':'miter',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'dash _joinstyle:'}}
self.widgetDescr['solid_capstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.capstyles,
'fixedChoices':True,
'initialValue':'projecting',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'solid_capstyle:'}}
self.widgetDescr['solid_joinstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.joinstyles,
'fixedChoices':True,
'initialValue':'miter',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'solid_joinstyle:'}}
self.widgetDescr['line_linewidth'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':60, 'height':21, 'oneTurn':2, 'type':'int',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'linewidth'} }
self.widgetDescr['line_antialiased'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'antialiased:'},
'initialValue':1,}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
code = """def doit(self, y, x, lineStyle, color, line_antialiased,
line_linewidth, solid_joinstyle, solid_capstyle, dash_capstyle, dash_joinstyle,
drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
colorChar = self.colors[color]
lineStyleChar = self.styles[lineStyle]
if x is None:
l = self.axes.plot(y, colorChar+lineStyleChar)
else:
l = self.axes.plot(x, y, colorChar+lineStyleChar)
#line properties
if self.axes.lines:
for l in self.axes.lines:
l.set_linewidth(line_linewidth)
l.set_antialiased(line_antialiased)
l.set_solid_joinstyle(solid_joinstyle)
l.set_solid_capstyle(solid_capstyle)
l.set_dash_capstyle(dash_capstyle)
l.set_dash_joinstyle(dash_joinstyle)
self.canvas.draw()
self.outputData(plot=self.axes)
"""
self.setFunction(code)
class PlotDateNE(MPLPlottingNode):
"""This nodes takes two lists of values and plots the the second against the first.
Input:
y - sequence of dates
x - sequence of dates
optional arguements:
lineStyle - line style
color - color of the line
(lineStyle+colorchar --fmt)
tz - timezone
xdate - is True, the x-axis will be labeled with dates
ydate - is True, the y-axis will be labeled with dates
Output:
plot Matplotlib Axes object
line: - line
pytz is required.
checks for pytz module and returns if not
"""
def __init__(self, name='PlotDate', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
self.styles=get_styles()
self.colors=colors
self.joinstyles = Line2D.validJoin
timezones=common_timezones
self.capstyles = Line2D.validCap
ip = self.inputPortsDescr
ip.append(datatype='list', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string', required=False, name='lineStyle', defaultValue='solid')
ip.append(datatype='None', required=False, name='color', defaultValue='black')
ip.append(datatype='string', required=False, name='tz', defaultValue='US/PACIFIC')
ip.append(datatype='boolean', required=False, name='xdate', defaultValue=True)
ip.append(datatype='boolean', required=False, name='ydate', defaultValue=False)
ip.append(datatype='boolean', required=False, name='line_antialiased', defaultValue=1)
ip.append(datatype='float', required=False, name='line_linewidth', defaultValue=1)
ip.append(datatype='string', required=False, name='solid_joinstyle', defaultValue='miter')
ip.append(datatype='string', required=False, name='solid_capstyle', defaultValue='projecting')
ip.append(datatype='string', required=False, name='dash_capstyle', defaultValue='butt')
ip.append(datatype='string', required=False, name='dash_joinstyle', defaultValue='miter')
ip.append(datatype='MPLDrawArea', required=False,name='drawAreaDef',singleConnection=False)
self.widgetDescr['lineStyle'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.styles.keys(),
'fixedChoices':True,
'initialValue':'circle',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'line style:'}}
self.widgetDescr['color'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.colors.keys(),
'fixedChoices':True,
'initialValue':'blue',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'we'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['tz'] = {
'class':'NEComboBox', 'master':'node',
'choices':timezones,
'fixedChoices':True,
'initialValue':'US/PACIFIC',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'timezone:'}}
self.widgetDescr['xdate'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'xdate:'},
'initialValue':1,}
self.widgetDescr['ydate'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'ydate:'},
'initialValue':0,}
self.widgetDescr['dash_capstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.capstyles,
'fixedChoices':True,
'initialValue':'butt',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'dash_capstyle:'}}
self.widgetDescr['dash_joinstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.joinstyles,
'fixedChoices':True,
'initialValue':'miter',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'dash _joinstyle:'}}
self.widgetDescr['solid_capstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.capstyles,
'fixedChoices':True,
'initialValue':'projecting',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'solid_capstyle:'}}
self.widgetDescr['solid_joinstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.joinstyles,
'fixedChoices':True,
'initialValue':'miter',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'solid_joinstyle:'}}
self.widgetDescr['line_linewidth'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':60, 'height':21, 'oneTurn':2, 'type':'int',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'linewidth'} }
self.widgetDescr['line_antialiased'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'antialiased:'},
'initialValue':1,}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
code = """def doit(self, y, x, lineStyle, color, tz, xdate, ydate,
line_antialiased, line_linewidth, solid_joinstyle, solid_capstyle, dash_capstyle,
dash_joinstyle, drawAreaDef):
try:
from pytz import common_timezones
except:
print "Could not import pytz "
return
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
colorChar = self.colors[color]
lineStyleChar = self.styles[lineStyle]
rcParams['timezone'] = tz
tz=timezone(tz)
fmt= colorChar+lineStyleChar
l = self.axes.plot_date(x, y, fmt=colorChar+lineStyleChar, tz=tz, xdate= xdate,ydate= ydate)
#line properties
if self.axes.lines:
for l in self.axes.lines:
l.set_linewidth(line_linewidth)
l.set_antialiased(line_antialiased)
l.set_solid_joinstyle(solid_joinstyle)
l.set_solid_capstyle(solid_capstyle)
l.set_dash_capstyle(dash_capstyle)
l.set_dash_joinstyle(dash_joinstyle)
self.canvas.draw()
self.outputData(plot=self.axes)
"""
self.setFunction(code)
class PieNE(MPLPlottingNode):
"""plots a pie diagram for a list of numbers. The size of each wedge
will be the fraction x/sumnumbers).
Input:
fractions - sequence of values
labels - None; sequence of labels (has to match length of factions
explode - None; float or sequence of values which specifies the
fraction of the radius to offset that wedge.
if a single float is given the list is generated automatically
shadow - True; if True, will draw a shadow beneath the pie.
format - None; fromat string used to label the wedges with their
numeric value
Output:
plot - Matplotlib Axes object
patches - sequence of matplotlib.patches.Wedge
texts - list of the label Text instances
autotextsline - list of text instances for the numeric labels (only if
format is not None
"""
def __init__(self, name='Pie', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list', name='fractions',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', required=False, name='labels')
ip.append(datatype='None', required=False, name='explode')
ip.append(datatype='boolean', required=False, name='shadow')
ip.append(datatype='string', required=False, name='format')
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['explode'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':10, 'type':'float',
'initialValue':0.05, 'wheelPad':2,
'labelCfg':{'text':'explode'} }
self.widgetDescr['shadow'] = {
'class':'NECheckButton', 'master':'node',
'initialValue':1, 'labelCfg':{'text':'shadow'}}
self.widgetDescr['format'] = {
'class':'NEEntry', 'master':'node',
'labelCfg':{'text':'format:'},
'initialValue':'%1.1f%%'}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
op.append(datatype='None', name='patches')
op.append(datatype='None', name='texts')
op.append(datatype='None', name='autotextsline')
code = """def doit(self, fractions, labels, shadow, explode, format,
drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
if isinstance(explode, float) or isinstance(explode, int):
explode = [explode]*len(fractions)
res = self.axes.pie(fractions, explode=explode, labels=labels,
autopct=format, shadow=shadow)
if format is None:
patches, texts = res
autotextsline = None
else:
patches, texts, autotextsline = res
self.canvas.draw()
self.outputData(plot=self.axes, patches=patches, texts=texts, autotextsline=autotextsline)
"""
self.setFunction(code)
#Spy Nodes
class SpyNE(MPLPlottingNode):
"""Plots the sparsity pattern of the matrix Z using plot markers.
input:
Z - matrix
optional arguements:
marker - marker
markersize -markersize
The line handles are returned
"""
def __init__(self, name='Spy', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None',name = 'Z',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string', required=False, name='marker', defaultValue='s')
ip.append(datatype='None', required=False, name='markersize', defaultValue=10)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['marker'] = {
'class':'NEComboBox', 'master':'node',
'choices':markers.values(),
'fixedChoices':True,
'initialValue':'s',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'markers:'}}
self.widgetDescr['markersize'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':10, 'type':'float',
'initialValue':10.0, 'wheelPad':2,
'labelCfg':{'text':'size'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
code = """def doit(self, Z, marker, markersize, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
l=self.axes.spy(Z,marker=marker,markersize=markersize)
self.canvas.draw()
self.outputData(plot=self.axes)
"""
self.setFunction(code)
class Spy2NE(MPLPlottingNode):
"""SPY2 plots the sparsity pattern of the matrix Z as an image
input:
Z - matrix
The image instance is returned
"""
def __init__(self, name='Spy2', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None',name = 'Z',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
op.append(datatype='None', name='image')
code = """def doit(self, Z, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
im=self.axes.spy2(Z)
self.canvas.draw()
self.outputData(plot=self.axes,image=im)
"""
self.setFunction(code)
class VlineNE(MPLPlottingNode):
"""Plots vertical lines at each x from ymin to ymax. ymin or ymax can be
scalars or len(x) numpy arrays. If they are scalars, then the
respective values are constant, else the heights of the lines are
determined by ymin and ymax
x - array
ymin or ymax can be scalars or len(x) numpy arrays
color+marker - fmt is a plot format string, eg 'g--'
Returns a list of lines that were added
"""
def __init__(self, name='Vline', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list',name = 'x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list',name = 'ymin',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list',name = 'ymax',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string', required=False, name='color', defaultValue='k')
ip.append(datatype='string', required=False, name='linestyle', defaultValue='-')
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['color'] = {
'class':'NEComboBox', 'master':'node',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'k',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'we'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['linestyle'] = {
'class':'NEComboBox', 'master':'node',
'choices':['solid','dashed','dashdot','dotted'],
'fixedChoices':True,
'initialValue':'solid',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'linestyle:'}}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
op.append(datatype='None', name='lines')
code = """def doit(self, x, ymin, ymax, color, linestyle, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
lines=self.axes.vlines(x, ymin, ymax, color=color, linestyle=linestyle )
self.canvas.draw()
self.outputData(plot=self.axes,lines=lines)
"""
self.setFunction(code)
#Scatter Nodes
from math import fabs
class ScatterNE(MPLPlottingNode):
"""plots a scatter diagram for two lists of numbers.
Input:
x - sequence of values
y - sequence of values
s - None; sequence of values for size in area
c - None, string or sequence of colors
marker - 'circle', marker
Output:
plot Matplotlib Axes object
patches - matplotlib.collections.RegularPolyCollection instance
"""
def getPointInBin(self, data, sortind, x, eps):
#dichotomous search for indices of values in data within x+-eps
if len(sortind)>2:
if data[sortind[0]]==x: return data, [sortind[0]]
elif data[sortind[-1]]==x: return data, [sortind[-1]]
elif len(sortind)==2: return data, sortind
else:
mid = len(sortind)/2
if fabs(data[sortind[mid]]-x)<eps:
if fabs(data[sortind[0]]-x)<eps:
return data, sortind[:mid]
elif fabs(data[sortind[-1]]-x)<eps:
return data, sortind[mid:]
if data[sortind[mid]]>x:
data, sortind = self.getPointInBin(data, sortind[:mid],
x, eps)
elif data[sortind[mid]]<x:
data, sortind = self.getPointInBin(data, sortind[mid:],
x, eps)
return data, sortind
def on_click(self, event):
# get the x and y pixel coords
if event.inaxes:
d1 = self.inputPorts[0].getData()
d2 = self.inputPorts[1].getData()
mini = min(d1)
maxi = max(d1)
epsx = (maxi-mini)/200.
import numpy
d1s = numpy.argsort(d1)
x, y = event.xdata, event.ydata
dum, v1 = self.getPointInBin(d1, d1s, x, epsx)
mini = min(d2)
maxi = max(d2)
epsy = (maxi-mini)/200.
result = []
for v in v1:
if fabs(x - d1[v])<epsx and fabs(y - d2[v])<epsy:
result.append(v)
#print v, x - d1[v], epsx, y - d2[v], epsy
if len(result):
print 'point:', result, x, d1[result[0]], y, d2[result[0]]
self.outputData(pick=result)
self.scheduleChildren([self.outputPorts[2]])
return result
else:
print "NO POINT"
return None
def afterAddingToNetwork(self):
MPLPlottingNode.afterAddingToNetwork(self)
self.figure.canvas.mpl_connect('button_press_event', self.on_click)
def __init__(self, name='Scatter', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
self.cutoff = 10.0
self.joinstyles = Line2D.validJoin
self.capstyles = Line2D.validCap
self.colors = colors
self.markers = markers
self.widgetDescr['s'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':10, 'type':'float',
'initialValue':1.0, 'wheelPad':2,
'labelCfg':{'text':'size'} }
self.widgetDescr['c'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.colors.values(),
'fixedChoices':True,
'initialValue':'k',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'we'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['marker'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.markers.keys(),
'fixedChoices':True,
'initialValue':'circle',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'markers:'}}
self.widgetDescr['dash_capstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.capstyles,
'fixedChoices':True,
'initialValue':'butt',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'dash_capstyle:'}}
self.widgetDescr['dash_joinstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.joinstyles,
'fixedChoices':True,
'initialValue':'miter',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'dash _joinstyle:'}}
self.widgetDescr['solid_capstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.capstyles,
'fixedChoices':True,
'initialValue':'projecting',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'solid_capstyle:'}}
self.widgetDescr['solid_joinstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.joinstyles,
'fixedChoices':True,
'initialValue':'miter',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'solid_joinstyle:'}}
self.widgetDescr['linewidth'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':60, 'height':21, 'oneTurn':2, 'type':'int',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'linewidth'} }
self.widgetDescr['line_antialiased'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'antialiased:'},
'initialValue':1,}
ip = self.inputPortsDescr
ip.append(datatype='list', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', required=False, name='s')
ip.append(datatype='None', required=False, name='c', defaultValue='k')
ip.append(datatype='string', required=False, name='marker', defaultValue='circle')
ip.append(datatype='string', required=False, name='solid_joinstyle', defaultValue='miter')
ip.append(datatype='string', required=False, name='solid_capstyle', defaultValue='projecting')
ip.append(datatype='string', required=False, name='dash_capstyle', defaultValue='butt')
ip.append(datatype='string', required=False, name='dash_joinstyle', defaultValue='miter')
ip.append(datatype='MPLDrawArea', required=False,name='drawAreaDef',singleConnection=False)
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
op.append(datatype='None', name='patches')
op.append(datatype='int', name='pick')
code = """def doit(self, x, y, s, c, marker, solid_joinstyle,
solid_capstyle, dash_capstyle, dash_joinstyle, drawAreaDef):
kw={'solid_joinstyle':solid_joinstyle,'solid_capstyle':solid_capstyle,'dash_capstyle':dash_capstyle,'dash_joinstyle':dash_joinstyle}
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
if self.markers.has_key(marker):
marker = self.markers[marker]
res = self.axes.scatter( x, y, s, c, marker)
#collections properties
if self.axes.lines:
for c in self.axes.collections:
c.set_solid_joinstyle(solid_joinstyle)
c.set_solid_capstyle(solid_capstyle)
c.set_dash_capstyle(dash_capstyle)
c.set_dash_joinstyle(dash_joinstyle)
self.canvas.draw()
self.outputData(plot=self.axes, patches=res)
"""
self.setFunction(code)
class ScatterClassicNE(MPLPlottingNode):
"""plots a scatter diagram for two lists of numbers.
Input:
x - sequence of values
y - sequence of values
s - None; sequence of values for size in area
c - None, string or sequence of colors
Output:
plot Matplotlib Axes object
patches - matplotlib.collections.RegularPolyCollection instance
"""
def __init__(self, name='ScatterClassic', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
self.joinstyles = Line2D.validJoin
self.capstyles = Line2D.validCap
self.colors=colors
self.markers =markers
self.widgetDescr['s'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':10, 'type':'float',
'initialValue':1.0, 'wheelPad':2,
'labelCfg':{'text':'size'} }
self.widgetDescr['c'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.colors.values(),
'fixedChoices':True,
'initialValue':'k',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'we'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['dash_capstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.capstyles,
'fixedChoices':True,
'initialValue':'butt',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'dash_capstyle:'}}
self.widgetDescr['dash_joinstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.joinstyles,
'fixedChoices':True,
'initialValue':'miter',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'dash _joinstyle:'}}
self.widgetDescr['solid_capstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.capstyles,
'fixedChoices':True,
'initialValue':'projecting',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'solid_capstyle:'}}
self.widgetDescr['solid_joinstyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':self.joinstyles,
'fixedChoices':True,
'initialValue':'miter',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'solid_joinstyle:'}}
self.widgetDescr['linewidth'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':60, 'height':21, 'oneTurn':2, 'type':'int',
'wheelPad':2, 'initialValue':1,
'labelCfg':{'text':'linewidth'} }
self.widgetDescr['line_antialiased'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'antialiased:'},
'initialValue':1,}
ip = self.inputPortsDescr
ip.append(datatype='list', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', required=False, name='s')
ip.append(datatype='None', required=False, name='c')
ip.append(datatype='string', required=False, name='solid_joinstyle', defaultValue='miter')
ip.append(datatype='string', required=False, name='solid_capstyle', defaultValue='projecting')
ip.append(datatype='string', required=False, name='dash_capstyle', defaultValue='butt')
ip.append(datatype='string', required=False, name='dash_joinstyle', defaultValue='miter')
ip.append(datatype='MPLDrawArea', required=False,name='drawAreaDef',singleConnection=False)
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
op.append(datatype='None', name='patches')
code = """def doit(self, x, y, s, c, solid_joinstyle, solid_capstyle,
dash_capstyle, dash_joinstyle, drawAreaDef):
kw={'solid_joinstyle':solid_joinstyle,'solid_capstyle':solid_capstyle,'dash_capstyle':dash_capstyle,'dash_joinstyle':dash_joinstyle}
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
res = self.axes.scatter_classic( x, y, s, c)
#collections properties
if self.axes.lines:
for c in self.axes.collections:
c.set_solid_joinstyle(solid_joinstyle)
c.set_solid_capstyle(solid_capstyle)
c.set_dash_capstyle(dash_capstyle)
c.set_dash_joinstyle(dash_joinstyle)
self.canvas.draw()
self.outputData(plot=self.axes, patches=res)
"""
self.setFunction(code)
class FigImageNE(MPLPlottingNode):
"""plots an image from a 2d array fo data
Input:
data - 2D array of data
Output:
plot Matplotlib Axes object
image - image.FigureImage instance
"""
def __init__(self, name='Figimage', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', name='data',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string',required=False, name='cmap', defaultValue='jet')
ip.append(datatype='string',required=False, name='imaspect', defaultValue='equal')
ip.append(datatype='string',required=False, name='interpolation', defaultValue='bilinear')
ip.append(datatype='string',required=False, name='origin', defaultValue='upper')
ip.append(datatype='None', required=False, name='alpha', defaultValue=1.)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
imaspects=['auto', 'equal']
interpolations =['nearest', 'bilinear', 'bicubic', 'spline16', 'spline36', 'hanning', 'hamming', 'hermite', 'kaiser', 'quadric','catrom', 'gaussian', 'bessel', 'mitchell', 'sinc','lanczos', 'blackman']
self.widgetDescr['cmap'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cmaps,
'fixedChoices':True,
'initialValue':'jet',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'cmap:'}}
self.widgetDescr['imaspect'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':imaspects,
'fixedChoices':True,
'initialValue':'equal',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'aspect:'}}
self.widgetDescr['interpolation'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':interpolations,
'fixedChoices':True,
'initialValue':'bilinear',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'interpolation:'}}
self.widgetDescr['origin'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['upper','lower',],
'fixedChoices':True,
'initialValue':'upper',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'origin:'}}
self.widgetDescr['alpha'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':1, 'type':'float',
'initialValue':1.0, 'wheelPad':2,
'labelCfg':{'text':'alpha'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='plot')
op.append(datatype='None', name='image')
code = """def doit(self, data, cmap, imaspect, interpolation, origin,
alpha, drawAreaDef):
kw={'cmap':cmap,'imaspect':imaspect,'interpolation':interpolation,'origin':origin,'alpha':alpha}
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
self.setDrawArea(kw)
im = self.axes.imshow(data)
#image properties
cmp=cm.get_cmap(cmap)
im.set_cmap(cmp)
im.set_interpolation(interpolation)
im.set_alpha(alpha)
im.origin=origin
self.axes.set_aspect(imaspect)
self.canvas.draw()
self.outputData(plot=self.axes, image=im)
"""
self.setFunction(code)
#PSEUDO COLOR PLOTS
class PcolorMeshNE(MPLPlottingNode):
"""This class is for making a pseudocolor plot.
input:
arraylistx - array
arraylisty - array
arraylistz - may be a masked array
optional arguements:
cmap - cm.jet : a cm Colormap instance from matplotlib.cm.
defaults to cm.jet
shading - 'flat' : or 'faceted'. If 'faceted', a black grid is
drawn around each rectangle; if 'flat', edge colors are same as
face colors
alpha - blending value
PCOLORMESH(Z) - make a pseudocolor plot of matrix Z
PCOLORMESH(X, Y, Z) - a pseudo color plot of Z on the matrices X and Y
Return value is a matplotlib.collections.PatchCollection
object
"""
def __init__(self, name='PcolorMesh', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', required=False,name='arraylistx')
ip.append(datatype='None', required=False,name='arraylisty')
ip.append(datatype='None', name='arraylistz',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string',required=False, name='cmap', defaultValue='jet')
ip.append(datatype='string',required=False, name='shading', defaultValue='faceted')
ip.append(datatype='float',required=False, name='alpha', defaultValue=1.)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['cmap'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cmaps,
'fixedChoices':True,
'initialValue':'jet',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'cmap:'}}
self.widgetDescr['shading'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['flat','faceted'],
'fixedChoices':True,
'initialValue':'faceted',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'shading:'}}
self.widgetDescr['alpha'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'initialValue':1.0, 'wheelPad':2,
'labelCfg':{'text':'alpha'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype='None', name='patches')
code = """def doit(self, x, y, z, cmap, shading, alpha, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
#pseudo color plot of Z
Qz=z
cmap = cm.get_cmap(cmap)
if x==None or y==None:
C=self.axes.pcolormesh(Qz,cmap=cmap,shading=shading,alpha=alpha)
else:
#a pseudo color plot of Z on the matrices X and Y
Qx,Qy=array(x),array(y)
C=self.axes.pcolormesh(Qx,Qy,Qz,cmap=cmap,shading=shading,alpha=alpha)
self.canvas.draw()
self.outputData(axes=self.axes,patches=C)
"""
self.setFunction(code)
class PcolorNE(MPLPlottingNode):
"""This class is for making a pseudocolor plot.
input:
arraylistx - may be a array
arraylisty - may be a array
arraylistz - may be a masked array
optional arguements:
cmap - cm.jet : a cm Colormap instance from matplotlib.cm.
defaults to cm.jet
shading - 'flat' : or 'faceted'. If 'faceted', a black grid is
drawn around each rectangle; if 'flat', edge colors are same as
face colors
alpha - blending value
PCOLOR(Z) - make a pseudocolor plot of matrix Z
PCOLOR(X, Y, Z) - a pseudo color plot of Z on the matrices X and Y
Return value is a matplotlib.collections.PatchCollection
object
"""
def __init__(self, name='Pcolor', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', required=False,name='arraylistx')
ip.append(datatype='None', required=False,name='arraylisty')
ip.append(datatype='None', name='arraylistz',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string',required=False, name='cmap', defaultValue='jet')
ip.append(datatype='string',required=False, name='shading', defaultValue='faceted')
ip.append(datatype='float',required=False, name='alpha', defaultValue=1.)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['cmap'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cmaps,
'fixedChoices':True,
'initialValue':'jet',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'cmap:'}}
self.widgetDescr['shading'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['flat','faceted'],
'fixedChoices':True,
'initialValue':'faceted',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'shading:'}}
self.widgetDescr['alpha'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'initialValue':1.0, 'wheelPad':2,
'labelCfg':{'text':'alpha'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype='None', name='patches')
code = """def doit(self, x, y, z, cmap, shading, alpha, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
#pseudo color plot of Z
Qz=z
cmap = cm.get_cmap(cmap)
if x==None or y==None:
C=self.axes.pcolor(Qz,cmap=cmap,shading=shading,alpha=alpha)
else:
#a pseudo color plot of Z on the matrices X and Y
Qx,Qy=array(x),array(y)
C=self.axes.pcolor(Qx,Qy,Qz,cmap=cmap,shading=shading,alpha=alpha)
self.canvas.draw()
self.outputData(axes=self.axes,patches=C)
"""
self.setFunction(code)
class PcolorClassicNE(MPLPlottingNode):
"""This class is for making a pseudocolor plot.
input:
arraylistx - array
arraylisty - array
arraylistz - may be a masked array
optional arguements:
cmap - cm.jet : a cm Colormap instance from matplotlib.cm.
defaults to cm.jet
shading - 'flat' : or 'faceted'. If 'faceted', a black grid is
drawn around each rectangle; if 'flat', edge colors are same as
face colors
alpha - blending value
PCOLOR_CLASSIC(Z) - make a pseudocolor plot of matrix Z
PCOLOR_CLASSIC(X, Y, Z) - a pseudo color plot of Z on the matrices X and Y
Return value is a matplotlib.collections.PatchCollection
object
"""
def __init__(self, name='PcolorClassic', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', required=False,name='arraylistx')
ip.append(datatype='None', required=False,name='arraylisty')
ip.append(datatype='None', name='arraylistz',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string',required=False, name='cmap', defaultValue='jet')
ip.append(datatype='string',required=False, name='shading', defaultValue='faceted')
ip.append(datatype='float',required=False, name='alpha', defaultValue=.75)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['cmap'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cmaps,
'fixedChoices':True,
'initialValue':'jet',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'cmap:'}}
self.widgetDescr['shading'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['flat','faceted'],
'fixedChoices':True,
'initialValue':'faceted',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'shading:'}}
self.widgetDescr['alpha'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'initialValue':0.75, 'wheelPad':2,
'labelCfg':{'text':'alpha'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype='None', name='patches')
code = """def doit(self, x, y, z, cmap, shading, alpha, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
#pseudo color plot of Z
Qz=z
cmap = cm.get_cmap(cmap)
if x==None or y==None:
C=self.axes.pcolor_classic(Qz,cmap=cmap,shading=shading,alpha=alpha)
else:
#a pseudo color plot of Z on the matrices X and Y
Qx,Qy=array(x),array(y)
C=self.axes.pcolor_classic(Qx,Qy,Qz,cmap=cmap,shading=shading,alpha=alpha)
self.canvas.draw()
self.outputData(axes=self.axes,patches=C)
"""
self.setFunction(code)
class ContourNE(MPLPlottingNode):
"""contour and contourf draw contour lines and filled contours.
input:
arraylistx :array
arraylisty :array
arraylistz :array
optional arguements:
length_colors : no of colors required to color contour
cmap :a cm Colormap instance from matplotlib.cm.
origin :'upper'|'lower'|'image'|None.
linewidth :linewidth
hold:
contour(Z) make a contour plot of n arrayZ
contour(X,Y,Z) X,Y specify the (x,y) coordinates of the surface
"""
def __init__(self, name='Contour', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', required=False, name='arraylistx')
ip.append(datatype='None', required=False,name='arraylisty')
ip.append(datatype='None', name='arraylistz',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string', required=False, name='contour', defaultValue='default')
ip.append(datatype='int', required=False, name='length_colors')
ip.append(datatype='string', required=False, name='cmap', defaultValue='jet')
ip.append(datatype='string', required=False, name='colors', defaultValue='black')
ip.append(datatype='string', required=False, name='origin', defaultValue='upper')
ip.append(datatype='int', required=False, name='linewidth', defaultValue=1)
ip.append(datatype='boolean', required=False, name='hold', defaultValue=0)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['contour'] = {
'class':'NEComboBox', 'master':'node',
'choices':['default','filledcontour'],
'fixedChoices':True,
'initialValue':'default',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'contour:'}}
self.widgetDescr['length_colors'] = {
'class':'NEEntry', 'master':'node',
'labelCfg':{'text':'no. of colors:'},
'initialValue':10}
self.widgetDescr['cmap'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cmaps,
'fixedChoices':True,
'initialValue':'jet',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'cmap:'}}
self.widgetDescr['colors'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cnames.keys(),
'fixedChoices':True,
'initialValue':'black',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'colors:'}}
self.widgetDescr['linewidth'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':4, 'type':'int',
'initialValue':1, 'wheelPad':2,
'labelCfg':{'text':'linewidth'} }
self.widgetDescr['origin'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['upper','lower','image',None],
'fixedChoices':True,
'initialValue':'upper',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'origin:'}}
self.widgetDescr['hold'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'hold'},
'initialValue':0,}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype='None', name='contour')
code = """def doit(self, arraylistx, arraylisty, arraylistz, contour,
length_colors, cmapval, colors, origin, linewidth, hold, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
axes_list=self.axes.figure.axes
if len(axes_list):
for i in axes_list:
if not isinstance(i,Subplot):
self.axes.figure.delaxes(i)
import numpy
#Z=10.0*(numpy.array(z2) - numpy.array(z1))
Z=arraylistz
if length_colors:
cmap = cm.get_cmap(cmapval,int(length_colors))
else:
cmap=eval("cm.%s" %cmapval)
if arraylistx==None or arraylisty==None:
#contour plot of an array Z
if contour=='default':
CS=self.axes.contour(Z,cmap=cmap,linewidths=linewidth,origin=origin,hold=hold)
self.axes.clabel(CS,inline=1)
else:
CS=self.axes.contourf(numpy.array(Z),cmap=cmap,origin=origin)
else:
#(x,y) coordinates of the surface
X=numpy.array(arraylistx)
Y=numpy.array(arraylisty)
if contour=='default':
CS=self.axes.contour(X,Y,Z,cmap=cmap,linewidths=linewidth,origin=origin,hold=hold)
self.axes.clabel(CS,inline=1)
else:
CS=self.axes.contourf(X,Y,numpy.array(Z),cmap=cmap,origin=origin)
self.canvas.draw()
self.outputData(axes=self.axes,contour=CS)
"""
self.setFunction(code)
class SpecgramNE(MPLPlottingNode):
"""plots a spectrogram of data in arraylistx.
NFFT - Data are split into NFFT length segements
Fs - samplingFrequency
cmap - colormap
nOverlap- the amount of overlap of each segment.
Returns im
im is a matplotlib.image.AxesImage
"""
def __init__(self, name='Specgram', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list', name='arraylistx',singleConnection='auto',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='int', name='NFFT', defaultValue=256)
ip.append(datatype='int', name='Fs', defaultValue=2)
ip.append(datatype='string', required=False, name='cmap')
ip.append(datatype='int', name='nOverlap', defaultValue=128)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['NFFT'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'NFFT (powOf 2):'},'width':10,
'type':'int',
'initialValue':256}
self.widgetDescr['Fs'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':2,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'Fs'} }
self.widgetDescr['cmap'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cmaps,
'fixedChoices':True,
'initialValue':'jet',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'cmap:'}}
self.widgetDescr['nOverlap'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'nOverlap:'},'width':10,
'type':'int','labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'initialValue':0}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype=None,name='image')
code="""def doit(self, x, NFFT, Fs, cmapval, nOverlap, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
cmap=eval("cm.%s" %cmapval)
Pxx, freqs, bins, im=self.axes.specgram(x, NFFT=int(NFFT), Fs=Fs,cmap=cmap,noverlap=int(nOverlap))
self.canvas.draw()
self.outputData(axes=self.axes,image=im)
"""
self.setFunction(code)
class CSDNE(MPLPlottingNode):
"""plots a cross spectral density of data in arraylistx,arraylisty.
NFFT - Data are split into NFFT length segements
Fs - samplingFrequency
nOverlap- the amount of overlap of each segment.
"""
def __init__(self, name='CSD', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype=None, name='arraylistx',singleConnection='auto',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype=None, name='arraylisty',singleConnection='auto',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='int', name='NFFT', defaultValue=256)
ip.append(datatype='int', name='Fs', defaultValue=2)
ip.append(datatype='int', name='nOverlap', defaultValue=128)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['NFFT'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'NFFT (powOf 2):'},'width':10,
'type':'int',
'initialValue':256}
self.widgetDescr['Fs'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':2,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'Fs'} }
self.widgetDescr['nOverlap'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'nOverlap:'},'width':10,
'type':'int','labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'initialValue':0}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
code="""def doit(self, arraylistx, arraylisty, NFFT, Fs, nOverlap,
drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
Pxx, freqs=self.axes.csd(arraylistx,arraylisty, NFFT=int(NFFT), Fs=Fs,noverlap=int(nOverlap))
self.canvas.draw()
self.outputData(axes=self.axes)
"""
self.setFunction(code)
class PSDNE(MPLPlottingNode):
"""plots a cross spectral density of data in arraylistx.
NFFT - Data are split into NFFT length segements
Fs - samplingFrequency
nOverlap- the amount of overlap of each segment.
"""
def __init__(self, name='PSD', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype=None, name='arraylistx',singleConnection='auto',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='int', required=False,name='NFFT', defaultValue=256)
ip.append(datatype='int', required=False,name='Fs', defaultValue=2)
ip.append(datatype='int', required=False,name='nOverlap', defaultValue=0)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['NFFT'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'NFFT (powOf 2):'},'width':10,
'type':'int',
'initialValue':256}
self.widgetDescr['Fs'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':2,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'Fs'}}
self.widgetDescr['nOverlap'] = {
'class':'NEEntry', 'master':'ParamPanel',
'labelCfg':{'text':'nOverlap:'},'width':10,
'type':'int','labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'initialValue':0}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
code="""def doit(self, x, NFFT, Fs, nOverlap, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
self.axes.psd(x,NFFT=int(NFFT), Fs=Fs,noverlap=int(nOverlap))
self.canvas.draw()
self.outputData(axes=self.axes)
"""
self.setFunction(code)
class LogCurveNE(MPLPlottingNode):
"""This node is to make a loglog plot with log scaling on the x and y axis.
input:
x - list
y - list
basex - base of the x logarithm
basey - base of the y logarithm
"""
def __init__(self, name='LogCurve', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='string',required=False, name='logCurve', defaultValue='log')
ip.append(datatype='float',required=False, name='basex', defaultValue=10)
ip.append(datatype='float',required=False, name='basey', defaultValue=10)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['logCurve'] = {
'class':'NEComboBox', 'master':'node',
'choices':['logbasex','logbasey'],
'fixedChoices':True,
'initialValue':'logbasex',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'logCurve:'}}
self.widgetDescr['basex'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':10,
'labelCfg':{'text':'basex'} }
self.widgetDescr['basey'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':10,
'labelCfg':{'text':'basey'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
code="""def doit(self, x, y, logCurve, basex, basey, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
if logCurve=="logbasex":
log_curve=self.axes.loglog(x,y,basex=basex)
else:
log_curve=self.axes.loglog( x,y,basey=basey)
self.canvas.draw()
self.outputData(axes=self.axes)
"""
self.setFunction(code)
class SemilogxNE(MPLPlottingNode):
"""This node is to make a semilog plot with log scaling on the xaxis.
input:
x - list
basex - base of the x logarithm
"""
def __init__(self, name='Semilogx', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='float',required=False, name='basex', defaultValue=10)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['basex'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':10,
'labelCfg':{'text':'basex'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
code="""def doit(self, x, y, basex, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
self.axes.semilogx(x,y,basex=basex)
self.canvas.draw()
self.outputData(axes=self.axes)
"""
self.setFunction(code)
class SemilogyNE(MPLPlottingNode):
"""This node is to make a semilog plot with log scaling on the y axis.
input:
y - list
basey - base of the y logarithm
"""
def __init__(self, name='Semilogy', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='float',required=False, name='basey', defaultValue=10)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['basey'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':10,
'labelCfg':{'text':'basey'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
code="""def doit(self, x, y, basey, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
self.axes.semilogy(x,y,basey=basey)
self.canvas.draw()
self.outputData(axes=self.axes)
"""
self.setFunction(code)
class BoxPlotNE(MPLPlottingNode):
""" To plot a box and whisker plot for each column of x.
The box extends from the lower to upper quartile values
of the data, with a line at the median. The whiskers
extend from the box to show the range of the data. Flier
points are those past the end of the whiskers.
input:
x - Numeric array
optional arguements:
notch - notch = 0 (default) produces a rectangular box plot.
notch = 1 will produce a notched box plot
sym - (default 'b+') is the default symbol for flier points.
Enter an empty string ('') if you dont want to show fliers.
vert - vert = 1 (default) makes the boxes vertical.
vert = 0 makes horizontal boxes. This seems goofy, but
thats how Matlab did it.
whis - (default 1.5) defines the length of the whiskers as
a function of the inner quartile range. They extend to the
most extreme data point within ( whis*(75%-25%) ) data range.
positions- (default 1,2,...,n) sets the horizontal positions of
the boxes. The ticks and limits are automatically set to match
the positions.
widths - either a scalar or a vector and sets the width of
each box. The default is 0.5, or 0.15*(distance between extreme
positions) if that is smaller.
Returns a list of the lines added
"""
def __init__(self, name='BoxPlot', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list', name='x',singleConnection='auto',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='list', required=False, name='positions')
ip.append(datatype=None, required=False, name='widths', defaultValue=.15)
ip.append(datatype='boolean', required=False, name='notch', defaultValue=0)
ip.append(datatype='boolean', required=False, name='vert', defaultValue=0)
ip.append(datatype='string', required=False, name='color', defaultValue='b')
ip.append(datatype='string', required=False, name='linestyle', defaultValue='-')
ip.append(datatype='float', required=False, name='whis', defaultValue=1.5)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['notch'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'notch:'},'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'initialValue':0,}
self.widgetDescr['vert'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'vert:'},'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'initialValue':0,}
self.widgetDescr['color'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['linestyle'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':get_styles().values(),
'fixedChoices':True,
'initialValue':'-',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'linestyle:'}}
self.widgetDescr['whis'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':1.5,'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'whis'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype=None, name='lines')
code="""def doit(self, x, positions, widths, notch, vert, color, linestyle,
whis, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
sym=color+linestyle
ll=self.axes.boxplot(x,notch=notch, sym=sym, vert=vert, whis=whis,positions=positions,widths=widths)
self.canvas.draw()
self.outputData(axes=self.axes,lines=ll) """
self.setFunction(code)
class BarNE(MPLPlottingNode):
"""Plots a horizontal bar plot with rectangles bounded by
left, left+width, bottom, bottom+height (left, right, bottom and top edges)
bottom, width, height, and left can be either scalars or sequences
input:
height - the heights (thicknesses) of the bars
left - the x coordinates of the left edges of the bars
Optional arguments:
bottom - can be either scalars or sequences
width - can be either scalars or sequences
color - specifies the colors of the bars
edgecolor - specifies the colors of the bar edges
xerr - if not None, will be used to generate errorbars
on the bar chart
yerr - if not None, will be used to generate errorbars
on the bar chart
ecolor - specifies the color of any errorbar
capsize - determines the length in points of the error bar caps
align - 'edge' | 'center'
'edge' aligns the horizontal bars by their bottom edges in bottom, while
'center' interprets these values as the y coordinates of the bar centers.
Return value is a list of Rectangle patch instances
"""
def __init__(self, name='Bar', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', name='left',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', name='height',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', required=False,name='bottom', defaultValue=0)
ip.append(datatype='None', required=False,name='width', defaultValue=.8)
ip.append(datatype='None', required=False, name='xerr')
ip.append(datatype='None', required=False, name='yerr')
ip.append(datatype='string', required=False, name='color', defaultValue='b')
ip.append(datatype='string', required=False, name='edgecolor', defaultValue='b')
ip.append(datatype='string', required=False, name='ecolor', defaultValue='b')
ip.append(datatype='int', required=False, name='capsize', defaultValue=3)
ip.append(datatype='list', required=False, name='align', defaultValue='edge')
ip.append(datatype='list', required=False, name='orientation', defaultValue='vertical')
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['align'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['edge','center'],
'fixedChoices':True,
'initialValue':'edge',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'align:'}}
self.widgetDescr['orientation'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['vertical','horizontal'],
'fixedChoices':True,
'initialValue':'vertical',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'orientation:'}}
self.widgetDescr['edgecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'edgecolor:'}}
self.widgetDescr['ecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'ecolor:'}}
self.widgetDescr['color'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['capsize'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':3,'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'capsize'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype=None, name='patches')
code="""def doit(self, left, height, bottom, width, xerr, yerr, color,
edgecolor, ecolor, capsize, align, orientation, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
patches=self.axes.bar(left,height,bottom=bottom, width=width,
color=color, edgecolor=edgecolor, xerr=xerr, yerr=yerr, ecolor=ecolor, capsize=capsize,
align=align,orientation=orientation)
self.canvas.draw()
self.outputData(axes=self.axes,patches=patches) """
self.setFunction(code)
class BarHNE(MPLPlottingNode):
"""Plots a horizontal bar plot with rectangles bounded by
left, left+width, bottom, bottom+height (left, right, bottom and top edges)
bottom, width, height, and left can be either scalars or sequences
input:
bottom - can be either scalars or sequences
width - can be either scalars or sequences
Optional arguments:
height - the heights (thicknesses) of the bars
left - the x coordinates of the left edges of the bars
color - specifies the colors of the bars
edgecolor - specifies the colors of the bar edges
xerr - if not None, will be used to generate errorbars
on the bar chart
yerr - if not None, will be used to generate errorbars
on the bar chart
ecolor - specifies the color of any errorbar
capsize - determines the length in points of the error bar caps
align - 'edge' | 'center'
'edge' aligns the horizontal bars by their bottom edges in bottom, while
'center' interprets these values as the y coordinates of the bar centers.
Return value is a list of Rectangle patch instances
"""
def __init__(self, name='BarH', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', name='bottom',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', name='width',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', required=False, name='height', defaultValue=.8)
ip.append(datatype='None', required=False, name='left', defaultValue=0)
ip.append(datatype='None', required=False, name='xerr')
ip.append(datatype='None', required=False, name='yerr')
ip.append(datatype='string', required=False, name='color', defaultValue='b')
ip.append(datatype='string', required=False, name='edgecolor', defaultValue='b')
ip.append(datatype='string', required=False, name='ecolor', defaultValue='b')
ip.append(datatype='int', required=False, name='capsize', defaultValue=3)
ip.append(datatype='list', required=False, name='align', defaultValue='edge')
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['align'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['edge','center'],
'fixedChoices':True,
'initialValue':'edge',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'align:'}}
self.widgetDescr['edgecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'edgecolor:'}}
self.widgetDescr['ecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'ecolor:'}}
self.widgetDescr['color'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['capsize'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':3,'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'capsize'} }
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
op.append(datatype=None, name='patches')
code="""def doit(self, bottom, width, height, left, xerr, yerr, color,
edgecolor, ecolor, capsize, align, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
patches=self.axes.barh(bottom, width, height=height, left=left,
color=color, edgecolor=edgecolor, xerr=xerr, yerr=yerr, ecolor=ecolor, capsize=capsize,
align=align)
self.canvas.draw()
self.outputData(axes=self.axes,patches=patches) """
self.setFunction(code)
class QuiverNE(MPLPlottingNode):
"""Makes a vector plot (U, V) with arrows on a grid (X, Y)
If X and Y are not specified, U and V must be 2D arrays. Equally spaced
X and Y grids are then generated using the meshgrid command.
color -color
S -used to scale the vectors.Use S=0 to disable automatic scaling.
If S!=0, vectors are scaled to fit within the grid and then are multiplied by S.
pivot -'mid','tip' etc
units - 'inches','width','x','y'
width - a scalar that controls the width of the arrows
"""
def __init__(self, name='Quiver', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', name='u',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', name='v',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', required=False,name='x')
ip.append(datatype='None', required=False,name='y')
ip.append(datatype='None', required=False, name='color')
ip.append(datatype='float', required=False, name='S', defaultValue=.2)
ip.append(datatype='float', required=False, name='width', defaultValue=1.)
ip.append(datatype='string', required=False, name='pivot', defaultValue='tip')
ip.append(datatype='string', required=False, name='units', defaultValue='inches')
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['color'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'color:'}}
self.widgetDescr['S'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1,
'type':'float','precision':3,
'wheelPad':2, 'initialValue':0.20,'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'S'} }
self.widgetDescr['width'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1,
'type':'float','precision':3,
'wheelPad':2, 'initialValue':0.002,'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'width'} }
self.widgetDescr['pivot'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['tip','mid'],
'fixedChoices':True,
'initialValue':'tip',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'pivot:'}}
self.widgetDescr['units'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['inches','width','x','y'],
'fixedChoices':True,
'initialValue':'units',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'units:'}}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
code="""def doit(self, u, v, x, y, color, S, width, pivot, units, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
if x!=None:
self.axes.quiver(x,y,u,v,S,pivot=pivot,color=color,width=width,units=units)
else:
self.axes.quiver(u,v,S,color=color,width=width,units=units)
self.canvas.draw()
self.outputData(axes=self.axes) """
self.setFunction(code)
class ErrorBarNE(MPLPlottingNode):
"""Plot x versus y with error deltas in yerr and xerr.
Vertical errorbars are plotted if yerr is not None
Horizontal errorbars are plotted if xerr is not None.
input:
x - scalar or sequence of vectors
y - scalar or sequence of vectors
xerr - scalar or sequence of vectors, plots a single error bar at x, y.,default is None
yerr - scalar or sequence of vectors, plots a single error bar at x, y.,default is None
optional arguements:
controlmarkers - controls errorbar markers(with props: markerfacecolor, markeredgecolor, markersize and
markeredgewith)
fmt - plot format symbol for y. if fmt is None, just
plot the errorbars with no line symbols. This can be useful
for creating a bar plot with errorbars
ecolor - a matplotlib color arg which gives the color the
errorbar lines; if None, use the marker color.
capsize - the size of the error bar caps in points
barsabove- if True, will plot the errorbars above the plot symbols
default is below
"""
def __init__(self, name='ErrorBar', **kw):
kw['name'] = name
apply( MPLPlottingNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', name='x',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', name='y',beforeDisconnect=self.codeBeforeDisconnect)
ip.append(datatype='None', required=False, name='xerr')
ip.append(datatype='None', required=False, name='yerr')
ip.append(datatype='string', required=False, name='format', defaultValue='b-')
ip.append(datatype='string', required=False, name='ecolor', defaultValue='b')
ip.append(datatype='int', required=False, name='capsize', defaultValue=3)
ip.append(datatype='boolean', required=False, name='barsabove', defaultValue=0)
ip.append(datatype='boolean', required=False, name='controlmarkers', defaultValue=0)
ip.append(datatype='MPLDrawArea', required=False, name='drawAreaDef',singleConnection=False)
self.widgetDescr['format'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':get_styles().values(),
'fixedChoices':True,
'initialValue':'-',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'format:'}}
self.widgetDescr['ecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':colors.values(),
'fixedChoices':True,
'initialValue':'b',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'ecolor:'}}
self.widgetDescr['capsize'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':1, 'type':'float',
'wheelPad':2, 'initialValue':3,'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'capsize'} }
self.widgetDescr['barsabove'] = {
'class':'NECheckButton', 'master':'ParamPanel',
'labelCfg':{'text':'barsabove:'},'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'initialValue':0,}
self.widgetDescr['controlmarkers'] = {
'class':'NECheckButton', 'master':'node',
'labelCfg':{'text':'barsabove:'},'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'initialValue':0,}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
code="""def doit(self, x, y, xerr, yerr, format, ecolor, capsize,
barsabove, controlmarkers, drawAreaDef):
self.axes.clear()
self.setDrawAreaDef(drawAreaDef)
fmt=ecolor+format
if controlmarkers == 0:
self.axes.errorbar(x,y,xerr=xerr,yerr=yerr,fmt=fmt,ecolor=ecolor,capsize=capsize,barsabove=barsabove)
else:
def set_markerparams(dAD):
if dAD.has_key('marker'):
marker = dAD['marker']
else:
marker = 'solid'
if dAD.has_key('markerfacecolor'):
markerfacecolor = dAD['markerfacecolor']
else:
markerfacecolor = 'blue'
if dAD.has_key('markeredgecolor'):
markeredgecolor = dAD['markeredgecolor']
else:
markeredgecolor = 'blue'
if dAD.has_key('markersize'):
markersize = dAD['markersize']
else:
markersize = 6
if dAD.has_key('makeredgewidth'):
makeredgewidth = dAD['makeredgewidth']
else:
makeredgewidth = 0.5
return marker,markerfacecolor,markeredgecolor,markersize,makeredgewidth
if drawAreaDef:
markerdict={}
if len(drawAreaDef) == 1 and type(drawAreaDef[0])==types.DictType:
for d in drawAreaDef[0].keys():
if d[:6]=="marker":
markerdict[d]=drawAreaDef[0][d]
if len(drawAreaDef)>1:
for dAD in drawAreaDef:
if type(dAD) == types.DictType:
for d in dAD.keys():
if d[:6]=="marker":
markerdict[d]=dAD[d]
if markerdict!={}:
marker,markerfacecolor,markeredgecolor,markersize,makeredgewidth=set_markerparams(markerdict)
self.axes.errorbar(x, y, xerr=xerr,yerr=yerr, marker=marker,
mfc=markerfacecolor, mec=markeredgecolor, ms=markersize, mew=makeredgewidth)
self.canvas.draw()
self.outputData(axes=self.axes) """
self.setFunction(code)
###########################################################################
#
# Nodes generating data for demos
#
###########################################################################
class RandNormDist(NetworkNode):
"""
Outputs values describing a randomized normal distribution
Input:
mu -
sigma -
dpi - number of value points
Output:
data: list of values
mu -
sigma -
"""
def __init__(self, name='RandNormDist', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='float', name='mu')
ip.append(datatype='float', name='sigma')
ip.append(datatype='int', name='npts')
self.widgetDescr['mu'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':100,
'labelCfg':{'text':'mu'} }
self.widgetDescr['sigma'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':15,
'labelCfg':{'text':'sigma'} }
self.widgetDescr['npts'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':1000, 'type':'int',
'wheelPad':2, 'initialValue':10000,
'labelCfg':{'text':'nb. points'} }
op = self.outputPortsDescr
op.append(datatype='list', name='data')
op.append(datatype='float', name='mu')
op.append(datatype='float', name='sigma')
code = """def doit(self, mu, sigma, npts):
from numpy.oldnumeric.mlab import randn
self.outputData( data=mu+sigma*randn(npts), mu=mu, sigma=sigma )
"""
self.setFunction(code)
class SinFunc(NetworkNode):
"""
Outputs values describing a sinusoidal function.
Input:
start - first x value
end - last x value
step - step size
Output:
x: list x values
y = list of y values
"""
def __init__(self, name='SinFunc', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='float', name='x0')
ip.append(datatype='float', name='x1')
ip.append(datatype='float', name='step')
self.widgetDescr['x0'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':10., 'type':'float',
'wheelPad':2, 'initialValue':0.,
'labelCfg':{'text':'x0'} }
self.widgetDescr['x1'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':10., 'type':'float',
'wheelPad':2, 'initialValue':3.,
'labelCfg':{'text':'x1'} }
self.widgetDescr['step'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':10., 'type':'float',
'wheelPad':2, 'initialValue':0.01,
'labelCfg':{'text':'nb. points'} }
op = self.outputPortsDescr
op.append(datatype='list', name='x')
op.append(datatype='list', name='y')
code = """def doit(self, x0, x1, step):
import numpy
x = numpy.arange(x0, x1, step)
y = numpy.sin(2*numpy.pi*x)
self.outputData( x=x, y=y)
"""
self.setFunction(code)
class SinFuncSerie(NetworkNode):
"""
Outputs a list of y values of a sinusoidal function
Input:
Output:
x: list y values
"""
def __init__(self, name='SinFuncSerie', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
## ip = self.inputPortsDescr
## ip.append(datatype='float', name='x0')
## ip.append(datatype='float', name='x1')
## ip.append(datatype='float', name='step')
## self.widgetDescr['x0'] = {
## 'class':'NEThumbWheel','master':'node',
## 'width':75, 'height':21, 'oneTurn':10., 'type':'float',
## 'wheelPad':2, 'initialValue':0.,
## 'labelCfg':{'text':'x0'} }
## self.widgetDescr['x1'] = {
## 'class':'NEThumbWheel','master':'node',
## 'width':75, 'height':21, 'oneTurn':10., 'type':'float',
## 'wheelPad':2, 'initialValue':3.,
## 'labelCfg':{'text':'x1'} }
##
## self.widgetDescr['step'] = {
## 'class':'NEThumbWheel','master':'ParamPanel',
## 'width':75, 'height':21, 'oneTurn':10., 'type':'float',
## 'wheelPad':2, 'initialValue':0.01,
## 'labelCfg':{'text':'nb. points'} }
op = self.outputPortsDescr
op.append(datatype='list', name='X')
code = """def doit(self):
import numpy
ind = numpy.arange(60)
x_tmp=[]
for i in range(100):
x_tmp.append(numpy.sin((ind+i)*numpy.pi/15.0))
X=numpy.array(x_tmp)
self.outputData(X=X)
"""
self.setFunction(code)
class MatPlotLibOptions(NetworkNode):
"""This node allows to set various rendering Options.
Choose a category from,
Axes,Font,Figure,Text,Tick,Grid,Legend.
if "Grid" choosen,allows you to set following properties
gridOn/Off,gridlinewidth,gridlinestyle,gridcolor,whichgrid major/minor.
To ignore any property rightclick on property(sets to default value when
ignored)
"""
def __init__(self, name='Set Matplotlib Options', canvas=None, **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(name='matplotlibOptions', datatype='dict')
self.widgetDescr['matplotlibOptions'] = {
'class':'NEMatPlotLibOptions', 'lockedOnPort':True}
op = self.outputPortsDescr
op.append(datatype='dict', name='matplotlibOptions')
code = """def doit(self, matplotlibOptions):
self.outputData(matplotlibOptions=matplotlibOptions)
"""
self.setFunction(code)
class NEMatPlotLibOptions(PortWidget):
configOpts = PortWidget.configOpts.copy()
ownConfigOpts = {}
ownConfigOpts['initialValue'] = {
'defaultValue':{}, 'type':'dict',
}
configOpts.update(ownConfigOpts)
def __init__(self, port, **kw):
# call base class constructor
apply( PortWidget.__init__, (self, port), kw)
colors=cnames
self.styles=lineStyles
self.markers = markers
from DejaVu import viewerConst
self.main_props=['Axes','Font','Text','Figure','Tick','Grid','Legend']#,'MathText',
self.booleanProps =['axisbelow','gridOn','figpatch_antialiased','text.usetex','text.dvipnghack',
'hold','legend.isaxes','legend.shadow','mathtext.mathtext2']#visible
if mplversion[0]==0 and mplversion[2]<=3:
self.twProps=['linewidth','xtick.labelsize','xtick.labelrotation','ytick.labelsize',
'ytick.labelrotation','gridlinewidth','figpatch_linewidth','markeredgewidth',
#'zoomx','zoomy',
'legend.numpoints','legend.pad','legend.markerscale','legend.handlelen',
'legend.axespad','legend.labelsep','legend.handletextsep','xtick.major.size',
'ytick.major.size','xtick.minor.size','ytick.minor.size','xtick.major.pad',
'ytick.major.pad','xtick.minor.pad','ytick.minor.pad','font.size']
elif mplversion[0] > 0:
self.twProps=['linewidth','xtick.labelsize','xtick.labelrotation','ytick.labelsize',
'ytick.labelrotation','gridlinewidth','figpatch_linewidth','markeredgewidth',
#'zoomx','zoomy',
'legend.numpoints','legend.borderpad','legend.markerscale','legend.handlelength',
'legend.borderaxespad','legend.labelspacing','legend.handletextpad','xtick.major.size',
'ytick.major.size','xtick.minor.size','ytick.minor.size','xtick.major.pad',
'ytick.major.pad','xtick.minor.pad','ytick.minor.pad','font.size']
self.choiceProps ={'facecolor':tuple(colors.keys()),
'edgecolor':tuple(colors.keys()),
'gridcolor':tuple(colors.keys()),
'xtick.color':tuple(colors.keys()),
'ytick.color':tuple(colors.keys()),
'figpatch_facecolor':tuple(colors.keys()),
'figpatch_edgecolor':tuple(colors.keys()),
'marker':tuple(self.markers.values()),
'markeredgecolor':tuple(colors.keys()),
'markerfacecolor':tuple(colors.keys()),
'gridlinestyle':tuple(self.styles.keys()),
'adjustable':('box','datalim'),
'anchor':('C', 'SW', 'S', 'SE', 'E', 'NE', 'N', 'NW', 'W'),
'aspect':('auto', 'equal' ,'normal',),
'text.color':tuple(colors.keys()),
'text.fontstyle':('normal',),
'text.fontvariant':('normal',),
'text.fontweight':('normal',),
'text.fontsize':('medium','small','large'),
'xtick.direction':('in','out'),
'ytick.direction':('in','out'),
'text.fontangle':('normal',),
'font.family':('serif',),
'font.style':('normal',),
'font.variant':('normal',),
'font.weight':('normal',),
'mathtext.rm':('cmr10.ttf',),
'mathtext.it':('cmmi10.ttf',),
'mathtext.tt':('cmtt10.ttf',),
'mathtext.mit':('cmmi10.ttf',),
'mathtext.cal':('cmsy10.ttf',),
'mathtext.nonascii' : ('cmex10.ttf',),
'legend.fontsize':('small','medium','large'),
'titlesize':('large','medium','small'),
'whichgrid':('minor','major')
}
self.frame = Tkinter.Frame(self.widgetFrame, borderwidth=3,
relief = 'ridge')
self.propWidgets = {} # will hold handle to widgets created
self.optionsDict = {} # widget's value
self.labels={}
self.delvar=0
self.new_list_to_add=[]
self.delete_proplist=[]
self.initialvalues={'font.family':'serif','font.style':'normal','font.variant':'normal',
'font.weight':'normal','font.size':12.0,'text.color':'black','text.usetex':False,
'text.dvipnghack':False,'text.fontstyle':'normal','text.fontangle':'normal',
'text.fontvariant':'normal','text.fontweight':'normal','text.fontsize':'medium',
'axisbelow':False,'hold':True,'facecolor':'white','edgecolor':'black','linewidth':1,
'titlesize':'large','gridOn':False,'legend.isaxes':True,'legend.numpoints':4,
'legend.fontsize':"small",'legend.markerscale':0.6,
#'legend.pad':0.2,'legend.labelsep':0.005, 'legend.handlelen':0.05,
#'legend.handletextsep':0.02, 'legend.axespad':0.02,
'legend.shadow':True,'xtick.major.size':5,'xtick.minor.size':2,
'xtick.major.pad':3,'xtick.minor.pad':3,'xtick.labelsize':10,'xtick.labelrotation':0,
'ytick.labelsize':10,'ytick.labelrotation':0, 'xtick.color':'black','xtick.direction':'in',
'ytick.major.size':5,'ytick.minor.size':2,'ytick.major.pad':3,'ytick.minor.pad':3,
'ytick.color':'black','ytick.direction':'in','gridcolor':'black','gridlinestyle':':',
'gridlinewidth':0.5,'whichgrid':'major','mathtext.mathtext2':False,'mathtext.rm': 'cmr10.ttf',
'mathtext.it':'cmmi10.ttf','mathtext.tt':'cmtt10.ttf','mathtext.mit':'cmmi10.ttf',
'mathtext.cal':'cmsy10.ttf','mathtext.nonascii' : 'cmex10.ttf','figpatch_linewidth':1.0,
'figpatch_facecolor':'darkgray','figpatch_edgecolor':'white','marker':'s','markeredgewidth':0.5,
'markeredgecolor':'black','markerfacecolor':'blue',
#'zoomx':0,'zoomy':0,
'adjustable':'box','aspect':'auto','anchor':'C','figpatch_antialiased':False}
if mplversion[0]==0 and mplversion[2]<=3:
self.initialvalues.update({'legend.pad':0.2, 'legend.labelsep':0.005, 'legend.handlelen':0.05, 'legend.handletextsep':0.02, 'legend.axespad':0.02})
elif mplversion[0]>0:
self.initialvalues.update({'legend.borderpad':0.2, 'legend.labelspacing':0.005, 'legend.handlelength':0.05, 'legend.handletextpad':0.02, 'legend.borderaxespad':0.02} )
import Pmw
#items = self.booleanProps +self.choiceProps.keys()+self.twProps
items=self.main_props
w = Pmw.Group(self.frame, tag_text='Choose a Category')
val=items[0]
cb=CallBackFunction(self.properties_list, (val,))
self.chooser = Pmw.ComboBox(
w.interior(), label_text='', labelpos='w',
entryfield_entry_width=20, scrolledlist_items=items,selectioncommand=cb)
self.chooser.pack(padx=2, pady=2, expand='yes', fill='both')
w.pack(fill = 'x', expand = 1, side='top')
# configure without rebuilding to avoid enless loop
apply( self.configure, (False,), kw)
self.frame.pack(expand='yes', fill='both')
w1 = Pmw.Group(self.frame, tag_text='choose a Property')
self.propWidgetMaster = w1.interior()
cb = CallBackFunction( self.mainChoices, (val,))
self.chooser1 = Pmw.ComboBox(w1.interior(), label_text='', labelpos='w',entryfield_entry_width=20, scrolledlist_items=self.new_list_to_add,selectioncommand=cb)
self.chooser1.pack(padx=2, pady=2, expand='yes', fill='both')
w1.pack(fill = 'x', expand = 1, side='top')
self.chooser1.grid(row=len(self.propWidgets), column=1, sticky='w')
if self.initialValue is not None:
self.set(self.initialValue, run=0)
self._setModified(False) # will be set to True by configure method
def properties_list(self,prop1,prop2):
prop=prop2
if prop=='Text':
list_to_add=['text.color','text.usetex','text.dvipnghack','text.fontstyle','text.fontangle','text.fontvariant','text.fontweight','text.fontsize']
elif prop=='Axes':
list_to_add=['axisbelow','hold','facecolor','edgecolor','linewidth','titlesize','marker','markeredgewidth','markeredgecolor','markerfacecolor',
#'zoomx','zoomy',
'adjustable','anchor','aspect']
elif prop=='Grid':
list_to_add=['gridOn','gridlinewidth','gridcolor','gridlinestyle','whichgrid']
elif prop=='Legend':
if mplversion[0]==0 and mplversion[2]<=3:
list_to_add=['legend.isaxes','legend.numpoints','legend.fontsize','legend.pad','legend.markerscale','legend.labelsep','legend.handlelen','legend.handletextsep','legend.axespad','legend.shadow']
elif mplversion[0] > 0:
list_to_add=['legend.isaxes','legend.numpoints','legend.fontsize','legend.borderpad','legend.markerscale','legend.labelspacing','legend.handlelength','legend.handletextpad','legend.borderaxespad','legend.shadow']
elif prop=="Tick":
list_to_add=['xtick.major.pad','ytick.major.pad','xtick.minor.pad','ytick.minor.pad','xtick.color','ytick.color','xtick.labelsize','ytick.labelsize','xtick.labelrotation','ytick.labelrotation']
#['xtick.major.size','ytick.major.size','xtick.minor.size','ytick.minor.size','xtick.major.pad','ytick.major.pad','xtick.minor.pad','ytick.minor.pad','xtick.color','ytick.color','xtick.size','ytick.size','xtick.direction','ytick.direction']
elif prop=="Font":
list_to_add=['font.family','font.style','font.variant','font.weight','font.size']
elif prop=="MathText":
list_to_add=['mathtext.mathtext2','mathtext.rm','mathtext.it','mathtext.tt','mathtext.mit', 'mathtext.cal','mathtext.nonascii' ,]
elif prop=="Figure":
list_to_add=['figpatch_antialiased','figpatch_linewidth','figpatch_edgecolor','figpatch_facecolor']
self.new_list_to_add=list_to_add
self.chooser1.setlist(self.new_list_to_add)
def mainChoices(self,prop,val):
self.addProp(val)
def deleteProp(self):
prop=self.property
widget= self.propWidgets[prop][0]
if prop in self.choiceProps:
widget.selectitem(self.initialvalues[prop])
widget.update_idletasks()
self.setChoice((prop,), self.initialvalues[prop])
if prop in self.booleanProps:
widget.deselect()
if prop in self.twProps:
widget.setValue(self.initialvalues[prop])
self.setTwValue(prop, self.initialvalues[prop])
self.scheduleNode()
widget.pack_forget()
widget.place_forget()
widget.grid_forget()
self.labels[prop].pack_forget()
self.labels[prop].place_forget()
self.labels[prop].grid_forget()
del self.propWidgets[prop]
del self.labels[prop]
def addProp(self, prop):
if self.propWidgets.has_key(prop):
return
labwidget = Tkinter.Label(self.propWidgetMaster, text=prop)
rown=self.propWidgetMaster.size()[1]
labwidget.grid(padx=2, pady=2, row=rown, column=0,
sticky='w')
self.labels[prop]=labwidget
#Right Click Menu
popup = Tkinter.Menu(labwidget, tearoff=0)
#popup.add_command(label="ToolTip")
#popup.add_separator()
popup.add_command(label="Ignore",command=self.deleteProp)
def do_popup(event):
# display the popup menu
self.property=labwidget['text']
try:
popup.tk_popup(event.x_root, event.y_root, 0)
finally:
popup.grab_release()
labwidget.bind("<Button-3>", do_popup)
if prop in self.booleanProps:
var = Tkinter.IntVar()
var.set(self.initialvalues[prop])
cb = CallBackFunction( self.setBoolean, (prop, var))
widget = Tkinter.Checkbutton(self.propWidgetMaster,
variable=var, command=cb)
if prop not in self.propWidgets:
self.propWidgets[prop] = (widget, var.get())
self.setBoolean( (prop, var) )
elif prop in self.choiceProps:
items = self.choiceProps[prop]
var = None
cb = CallBackFunction( self.setChoice, (prop,))
widget = Pmw.ComboBox(
self.propWidgetMaster,
entryfield_entry_width=10,
scrolledlist_items=items, selectioncommand=cb)
if prop not in self.propWidgets:
self.propWidgets[prop] = (widget, var)
self.setChoice( (prop,), self.initialvalues[prop] )
elif prop in self.twProps:
cb = CallBackFunction( self.setTwValue,prop)
val=self.initialvalues[prop]
self.twwidget=widget =ThumbWheel(width=75, height=21,wheelPad=2,master=self.propWidgetMaster,labcfg={'fg':'black', 'side':'left', 'text':prop},min = 0.0,type='float',showlabel =1,continuous =0,oneTurn =10,value=val,callback=cb)
if prop not in self.propWidgets:
self.propWidgets[prop] = (widget,val)
widget.grid(row=rown, column=1, sticky='w')
def setTwValue(self,prop,val):
self.optionsDict[prop] = val
if self.port.node.paramPanel.immediateTk.get():
self.scheduleNode()
def setBoolean(self,args):
prop, val = args
#if type(val)==types.InstanceType:
from mglutil.util.misc import isInstance
if isInstance(val) is True:
self.optionsDict[prop] = val.get()
else:
self.optionsDict[prop] = val
if self.optionsDict[prop]==1:
self.propWidgets[prop][0].select()
else:
self.propWidgets[prop][0].deselect()
if self.port.node.paramPanel.immediateTk.get():
self.scheduleNode()
def setChoice(self, prop, value):
self.optionsDict[prop[0]] = value
self.propWidgets[prop[0]][0].selectitem(value)
if self.port.node.paramPanel.immediateTk.get():
self.scheduleNode()
def set(self, valueDict, run=1):
self._setModified(True)
for k,v in valueDict.items():
self.addProp(k)
if k in self.booleanProps:
self.setBoolean( (k, v) )
elif k in self.choiceProps:
self.setChoice((k,), v)
else:
self.setTwValue(k, v)
self._newdata = True
if run:
self.scheduleNode()
def get(self):
return self.optionsDict
def configure(self, rebuild=True, **kw):
action, rebuildDescr = apply( PortWidget.configure, (self, 0), kw)
# this methods just creates a resize action if width changes
if self.widget is not None:
if 'width' in kw:
action = 'resize'
if action=='rebuild' and rebuild:
action, rebuildDescr = self.rebuild(rebuildDescr)
if action=='resize' and rebuild:
self.port.node.autoResize()
return action, rebuildDescr
class LegendNE(NetworkNode):
"""This nodes takes two lists of values and plots the the second against the first.
Input:
labels - sequence of strings
loc - 'best' : 0,
'upper right' : 1, (default)
'upper left' : 2,
'lower left' : 3,
'lower right' : 4,
'right' : 5,
'center left' : 6,
'center right' : 7,
'lower center' : 8,
'upper center' : 9,
'center' : 10,
If none of these are suitable, loc can be a 2-tuple giving x,y in axes coords, ie,
loc = 0, 1 is left top
loc = 0.5, 0.5 is center, center
lines - sequence of lines
Output:
legend Matplotlib Axes object
"""
def __init__(self, name='Legend', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw)
codeBeforeDisconnect = """def beforeDisconnect(self, c):
node1 = c.port1.node
node2 = c.port2.node
node1.figure.delaxes(node1.axes)
node1.figure.add_axes(node1.axes)
"""
ip = self.inputPortsDescr
ip.append(datatype='list',required=True, name='label')
ip.append(datatype='string',required=False, name='location', defaultValue='upper right')
self.widgetDescr['label'] = {
'class':'NEEntry', 'master':'node',
'labelCfg':{'text':'Label:'},
'initialValue':''}
self.widgetDescr['location'] = {
'class':'NEComboBox', 'master':'node',
'choices':locations.keys(),
'fixedChoices':True,
'initialValue':'upper right',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'Location:'}}
op = self.outputPortsDescr
op.append(datatype='MPLDrawArea', name='drawAreaDef')
code = """def doit(self, label, location):
kw={ 'legendlabel':label,'legendlocation':location}
self.outputData(drawAreaDef=kw) """
self.setFunction(code)
class ColorBarNE(NetworkNode):
"""Class for drawing color bar
input :
plot - axes instance
current_image -image instance
extend - both,neither,min or max.If not 'neither', make pointed end(s) for
out-of-range values. These are set for a given colormap using the
colormap set_under and set_over methods.
orientation - horizontal or vertical
spacing -uniform or proportional.Uniform spacing gives each discrete color the same space;proportional makes the space proportional to the data interval.
shrink -fraction by which to shrink the colorbar
"""
def __init__(self, name='ColorBar', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='MPLAxes', required=True, name='plot')
ip.append(datatype='None', required=True, name='current_image')
ip.append(datatype='string', required=False, name='extend', defaultValue='neither')
ip.append(datatype='float', required=False, name='orientation', defaultValue='vertical')
ip.append(datatype='string', required=False, name='spacing', defaultValue='uniform')
ip.append(datatype='float', required=False, name='shrink', defaultValue=1.)
self.widgetDescr['shrink'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':1.0,'min':0.0,'max':1.0,
'labelCfg':{'text':'shrink'} }
self.widgetDescr['extend'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['both','neither','min','max'],
'fixedChoices':True,
'initialValue':'neither',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'extend:'}}
self.widgetDescr['orientation'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['vertical','horizontal'],
'fixedChoices':True,
'initialValue':'vertical',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'orientation:'}}
self.widgetDescr['spacing'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['uniform',' proportional'],
'fixedChoices':True,
'initialValue':'uniform',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'spacing:'}}
op = self.outputPortsDescr
op.append(datatype='MPLAxes', name='axes')
code = """def doit(self, plot, current_image, extend, orientation, spacing, shrink):
axes_list=plot.figure.axes
if len(axes_list):
pos=plot.figure.axes[0].get_position()
for i in axes_list:
if not isinstance(i,Subplot):
plot.figure.delaxes(i)
if current_image!=None:
pl=plot.figure.colorbar(current_image,cmap=current_image.cmap,shrink=shrink,extend=extend,orientation=orientation,spacing=spacing,filled=True)
if orientation=="vertical":
plot.figure.axes[0].set_position([0.125, 0.1, 0.62, 0.8])
if shrink>=1.0:
pl.ax.set_position([0.785, 0.1, 0.03, 0.8])
else:
pl.ax.set_position([0.785,pl.ax.get_position()[1],0.03,pl.ax.get_position()[-1]])
else:
plot.figure.axes[0].set_position([0.125, 0.34, 0.62, 0.56])
if shrink>=1.0:
pl.ax.set_position([0.125, 0.18, 0.62, 0.04])
else:
pl.ax.set_position([pl.ax.get_position()[0],0.18,pl.ax.get_position()[2],0.04])
plot.figure.canvas.draw()
else:
plot.figure.canvas.delete(pl)
self.outputData(axes=pl)
"""
self.setFunction(code)
class Text(NetworkNode):
"""Class for writting text in the axes
posx: x coordinate
posy: y coordinate
textlabel: label name
rotation: angle to be rotated
horizontal alignment: ['center', 'right', 'left']
vertical alignment: ['center', 'right', 'left']
"""
def __init__(self, name='Text', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='float', required=False, name='posx', defaultValue=.1)
ip.append(datatype='float', required=False, name='posy', defaultValue=.1)
ip.append(datatype='string', required=False, name='textlabel', defaultValue='')
ip.append(datatype='string', required=False, name='rotation', defaultValue=0)
ip.append(datatype='string', required=False, name='horizontalalignment', defaultValue='center')
ip.append(datatype='string', required=False, name='verticalalignment', defaultValue='center')
self.widgetDescr['posx'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':1., 'type':'float',
'wheelPad':2, 'initialValue':0.1,
'labelCfg':{'text':'posx'} }
self.widgetDescr['posy'] = {
'class':'NEThumbWheel','master':'node',
'width':75, 'height':21, 'oneTurn':1., 'type':'float',
'wheelPad':2, 'initialValue':0.1,
'labelCfg':{'text':'posy'} }
self.widgetDescr['textlabel'] = {
'class':'NEEntry', 'master':'node',
'labelCfg':{'text':'text'},
'initialValue':''}
self.widgetDescr['rotation'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':75, 'height':21, 'oneTurn':10, 'type':'float',
'wheelPad':2, 'initialValue':0,
'labelCfg':{'text':'rotation'} }
self.widgetDescr['horizontalalignment'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['center', 'right', 'left'],
'fixedChoices':True,
'initialValue':'center',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'horizontalalignment:'}}
self.widgetDescr['verticalalignment'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['top', 'bottom', 'center'],
'fixedChoices':True,
'initialValue':'center',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'verticalalignment:'}}
op = self.outputPortsDescr
op.append(datatype='MPLDrawArea', name='drawAreaDef')
code = """def doit(self, posx, posy, textlabel, rotation, horizontalalignment, verticalalignment):
kw = {'posx':posx, 'posy':posy, 'textlabel':textlabel, 'horizontalalignment':horizontalalignment, 'verticalalignment':verticalalignment,'rotation':rotation}
self.outputData(drawAreaDef=kw)
"""
self.setFunction(code)
class SaveFig(NetworkNode):
"""Save the current figure.
fname - the filename to save the current figure to. The
output formats supported depend on the backend being
used. and are deduced by the extension to fname.
Possibilities are eps, jpeg, pdf, png, ps, svg. fname
can also be a file or file-like object - cairo backend
only. dpi - is the resolution in dots per inch. If
None it will default to the value savefig.dpi in the
matplotlibrc file
facecolor and edgecolor are the colors of the figure rectangle
orientation - either 'landscape' or 'portrait'
papertype - is one of 'letter', 'legal', 'executive', 'ledger', 'a0' through
'a10', or 'b0' through 'b10' - only supported for postscript output
format - one of 'pdf', 'png', 'ps', 'svg'. It is used to specify the
output when fname is a file or file-like object - cairo
backend only.
"""
def __init__(self, name='saveFig', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='MPLFigure', required=False, name='figure')
ip.append(datatype=None, required=False, name='fname')
ip.append(datatype=None, required=False, name='dpi', defaultValue=80)
ip.append(datatype=None, required=False, name='facecolor', defaultValue='w')
ip.append(datatype=None, required=False, name='edgecolor', defaultValue='w')
ip.append(datatype=None, required=False, name='orientation', defaultValue='portrait')
ip.append(datatype=None, required=False, name='papertype')
ip.append(datatype=None, required=False, name='format')
self.widgetDescr['fname'] = {
'class':'NEEntry', 'master':'node',
'labelCfg':{'text': 'filename:'},
'initialValue':''}
self.widgetDescr['dpi'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':2, 'type':'int',
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':80,
'labelCfg':{'text':'dpi'} }
self.widgetDescr['facecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cnames.keys(),
'fixedChoices':True,
'initialValue':'w',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'facecolor:'}}
self.widgetDescr['edgecolor'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':cnames.keys(),
'fixedChoices':True,
'initialValue':'w',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'edgecolor:'}}
self.widgetDescr['orientation'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['landscape','portrait'],
'fixedChoices':True,
'initialValue':'portrait',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'orientation:'}}
self.widgetDescr['papertype'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['letter', 'legal', 'executive', 'ledger','a0','a1','a2','a3','a4','a5','a6','a7','a8','a9','a10','b0','b1','b2','b3','b4','b5','b6','b7','b8','b9','b10',None],
'fixedChoices':True,
'initialValue':'None',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'papertype:'}}
self.widgetDescr['format'] = {
'class':'NEComboBox', 'master':'ParamPanel',
'choices':['pdf', 'png', 'ps', 'svg',None],
'fixedChoices':True,
'initialValue':'None',
'entryfield_entry_width':7,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'format:'}}
code = """def doit(self, figure, fname, dpi, facecolor, edgecolor,
orientation, papertype, format):
if figure:
figure.savefig(fname,dpi=dpi,facecolor=facecolor,edgecolor=edgecolor,orientation=orientation,papertype=papertype,format=format)
"""
self.setFunction(code)
class MeshGrid(NetworkNode):
"""This class converts vectors x, y with lengths Nx=len(x) and Ny=len(y) to X, Y and returns them.
where X and Y are (Ny, Nx) shaped arrays with the elements of x
and y repeated to fill the matrix
"""
def __init__(self, name='MeshGrid', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='list',required=False, name='x')
ip.append(datatype='list',required=False, name='y')
op = self.outputPortsDescr
op.append(datatype='None', name='X')
op.append(datatype='None', name='Y')
code = """def doit(self,x,y):
X,Y=meshgrid(x, y)
self.outputData(X=X,Y=Y)
"""
self.setFunction(code)
class BivariateNormal(NetworkNode):
"""Bivariate gaussan distribution for equal shape X, Y"""
def __init__(self, name='BivariateNormal', **kw):
kw['name'] = name
apply( NetworkNode.__init__, (self,), kw )
ip = self.inputPortsDescr
ip.append(datatype='None', name='arraylist1')
ip.append(datatype='None', name='arraylist2')
ip.append(datatype='float', required=False, name='sigmax', defaultValue=1.)
ip.append(datatype='float', required=False, name='sigmay', defaultValue=1.)
ip.append(datatype='float', required=False, name='mux', defaultValue=0.)
ip.append(datatype='float', required=False, name='muy', defaultValue=0.)
ip.append(datatype='float', required=False, name='sigmaxy', defaultValue=0.)
self.widgetDescr['sigmax'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':2, 'type':'float',
'wheelPad':2, 'initialValue':1.0,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'sigmax'} }
self.widgetDescr['sigmay'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':2, 'type':'float',
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':1.0,
'labelCfg':{'text':'sigmay'} }
self.widgetDescr['mux'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':2, 'type':'float',
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':0.0,
'labelCfg':{'text':'mux'} }
self.widgetDescr['muy'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':2, 'type':'float',
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':0.0,
'labelCfg':{'text':'muy'} }
self.widgetDescr['sigmaxy'] = {
'class':'NEThumbWheel','master':'ParamPanel',
'width':61, 'height':21, 'oneTurn':2, 'type':'float',
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'wheelPad':2, 'initialValue':0.0,
'labelCfg':{'text':'sigmaxy'} }
op = self.outputPortsDescr
op.append(datatype=None, name='z')
code = """def doit(self, x, y, sigmax, sigmay, mux, muy, sigmaxy):
import numpy
X=numpy.array(x)
Y=numpy.array(y)
z=bivariate_normal( X, Y, sigmax=sigmax, sigmay=sigmay, mux=mux, muy=muy, sigmaxy=sigmaxy)
self.outputData(z=z)
"""
self.setFunction(code)
###########################################################################
#
# Create and populate library
#
###########################################################################
from Vision.VPE import NodeLibrary
matplotliblib = NodeLibrary('MatPlotLib', '#99AFD8')
matplotliblib.addNode(MPLFigureNE, 'Figure', 'Input')
matplotliblib.addNode(MPLImageNE, 'ImageFigure', 'Input')
matplotliblib.addNode(MPLDrawAreaNE, 'Draw Area', 'Input')
matplotliblib.addNode(MultiPlotNE, 'MultiPlot', 'Misc')
matplotliblib.addNode(MPLMergeTextNE, 'MergeText', 'Input')
matplotliblib.addNode(ColorBarNE, 'ColorBar', 'Misc')
matplotliblib.addNode(Text, 'Text', 'Input')
matplotliblib.addNode(PolarAxesNE, 'PolarAxes', 'Plotting')
matplotliblib.addNode(HistogramNE, 'Histogram', 'Plotting')
matplotliblib.addNode(PlotNE, 'Plot', 'Plotting')
matplotliblib.addNode(PlotDateNE, 'PlotDate', 'Plotting')
matplotliblib.addNode(PieNE, 'Pie', 'Plotting')
matplotliblib.addNode(SpyNE, 'Spy', 'Plotting')
#matplotliblib.addNode(Spy2NE, 'Spy2', 'Plotting')
matplotliblib.addNode(VlineNE, 'Vline', 'Plotting')
matplotliblib.addNode(ScatterNE, 'Scatter', 'Plotting')
#matplotliblib.addNode(ScatterClassicNE, 'ScatterClassic', 'Plotting')
matplotliblib.addNode(FigImageNE, 'Figimage', 'Plotting')
matplotliblib.addNode(FillNE, 'Fill', 'Plotting')
matplotliblib.addNode(ContourNE, 'Contour', 'Plotting')
matplotliblib.addNode(PcolorMeshNE,'PcolorMesh', 'Plotting')
matplotliblib.addNode(PcolorNE,'Pcolor', 'Plotting')
#matplotliblib.addNode(PcolorClassicNE,'PcolorClassic', 'Plotting')
matplotliblib.addNode(RandNormDist, 'RandNormDist', 'Demo')
matplotliblib.addNode(SinFunc, 'SinFunc', 'Demo')
matplotliblib.addNode(SinFuncSerie, 'SinFuncSerie', 'Demo')
matplotliblib.addNode(MatPlotLibOptions, 'Set Matplotlib options', 'Input')
matplotliblib.addNode(LegendNE, 'Legend', 'Input')
matplotliblib.addNode(TablePlotNE, 'TablePlot', 'Plotting')
matplotliblib.addNode(SpecgramNE, 'Specgram', 'Plotting')
matplotliblib.addNode(CSDNE, 'CSD', 'Plotting')
matplotliblib.addNode(PSDNE, 'PSD', 'Plotting')
matplotliblib.addNode(LogCurveNE, 'LogCurve', 'Plotting')
matplotliblib.addNode(SemilogxNE, 'Semilogx', 'Plotting')
matplotliblib.addNode(SemilogyNE, 'Semilogy', 'Plotting')
matplotliblib.addNode(BoxPlotNE, 'BoxPlot', 'Plotting')
matplotliblib.addNode(ErrorBarNE, 'ErrorBar', 'Plotting')
matplotliblib.addNode(BarHNE, 'BarH', 'Plotting')
matplotliblib.addNode(BarNE, 'Bar', 'Plotting')
matplotliblib.addNode(QuiverNE, 'Quiver', 'Plotting')
matplotliblib.addNode(StemNE,'Stem','Plotting')
matplotliblib.addNode(BivariateNormal,'BivariateNormal','Demo')
matplotliblib.addNode(MeshGrid,'MeshGrid','Demo')
matplotliblib.addNode(SaveFig,'SaveFig','Misc')
matplotliblib.addWidget(NEMatPlotLibOptions)
###########################################################################
#
# Library specific data types
#
###########################################################################
UserLibBuild.addTypes(matplotliblib, 'Vision.matplotlibTypes')
try:
UserLibBuild.addTypes(matplotliblib, 'Vision.PILTypes')
except:
pass
|
StarcoderdataPython
|
1960017
|
from load import ROOT as R
import numpy as N
import gna.constructors as C
from gna.bundle import *
from gna.expression import NIndex
class dummyvar(TransformationBundleLegacy):
def __init__(self, *args, **kwargs):
super(dummyvar, self).__init__( *args, **kwargs )
def define_variables(self):
idx = self.cfg.indices
if not isinstance(idx, NIndex):
idx = NIndex(fromlist=self.cfg.indices)
for name, var in self.cfg.variables.items():
for i, nidx in enumerate(idx.iterate()):
self.context.set_variable( name, nidx, var )
|
StarcoderdataPython
|
362173
|
import zipfile
from tqdm import tqdm
from discopy_data.data.doc import Document
from discopy_data.data.relation import Relation
def extract_arguments(annos, text):
args = {}
for a in annos:
if a[0][0] == 'T':
args[a[0]] = {
'type': a[1].split(" ")[0],
'offset': text.find(a[2]),
'length': len(a[2]),
}
for a in annos:
if a[0][0] == 'A':
arg_type, arg_id, arg_stance = a[1].split(' ')
args[arg_id]['stance'] = arg_stance
arguments = []
for r in annos:
if r[0][0] == 'R':
rtype, arg1, arg2 = r[1].split(' ')
arg1 = args[arg1.split(':')[1]]
arg2 = args[arg2.split(':')[1]]
arguments.append({
'Sense': [rtype],
'ID': len(arguments),
'Arg1': arg1,
'Type': 'Argumentation',
'Arg2': arg2
})
return arguments
def extract(source_path: str):
with zipfile.ZipFile(source_path) as zh_all:
brat_file = [f for f in zh_all.filelist if f.filename.endswith('brat-project-final.zip')][0]
with zipfile.ZipFile(zh_all.open(brat_file)) as zh_brat:
annotation_files = sorted(filter(lambda f: not f.startswith('_') and f.endswith('.txt'),
(f.filename for f in zh_brat.filelist)))
for doc_i, path in tqdm(enumerate(annotation_files)):
content = zh_brat.open(path).read().decode().splitlines(keepends=True)
yield {
'meta': {
'title': content[0].strip(),
'corpus': 'argumentative_essays',
'path': path,
},
'text': '\n'.join(p.strip() for p in content[2:]),
}
def update_annotations(source_path: str, options: dict):
zh_all = zipfile.ZipFile(source_path)
brat_file = [f for f in zh_all.filelist if f.filename.endswith('brat-project-final.zip')][0]
zh_brat = zipfile.ZipFile(zh_all.open(brat_file))
def helper(doc: Document):
content = zh_brat.open(doc.meta['path'][:-3] + 'ann').read().decode().splitlines(keepends=True)
annos = [tuple(a.strip().split("\t")) for a in content]
arguments = extract_arguments(annos, doc.text)
words = doc.get_tokens()
relations = [
Relation([t for t in words if
arg['Arg1']['offset'] <= t.offset_begin <= (arg['Arg1']['offset'] + arg['Arg1']['length'])],
[t for t in words if
arg['Arg2']['offset'] <= t.offset_begin <= (arg['Arg2']['offset'] + arg['Arg2']['length'])],
[],
arg['Sense'], 'Argumentation') for arg in arguments
]
return doc.with_relations(relations)
return helper
|
StarcoderdataPython
|
8076974
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-04-13 03:57
from __future__ import unicode_literals
from django.db import migrations
import django_countries.fields
class Migration(migrations.Migration):
dependencies = [
('case_studies', '0008_casestudy_lead_content'),
]
operations = [
migrations.AddField(
model_name='casestudy',
name='countries',
field=django_countries.fields.CountryField(default='', max_length=746, multiple=True),
preserve_default=False,
),
]
|
StarcoderdataPython
|
3279198
|
from cbuild.core import logger, paths, template
from cbuild.apk import create as apk_c, sign as apk_s
import glob
import time
import pathlib
import subprocess
def genpkg(pkg, repo, arch, binpkg):
if not pkg.destdir.is_dir():
pkg.log_warn(f"cannot find pkg destdir, skipping...")
return
binpath = repo / binpkg
lockpath = binpath.with_suffix(binpath.suffix + ".lock")
repo.mkdir(parents = True, exist_ok = True)
while lockpath.is_file():
pkg.log_warn(f"binary package being created, waiting...")
time.sleep(1)
try:
lockpath.touch()
metadata = {}
args = []
pkgdesc = pkg.pkgdesc
metadata["pkgdesc"] = pkgdesc
metadata["url"] = pkg.rparent.url
metadata["maintainer"] = pkg.rparent.maintainer
#metadata["packager"] = pkg.rparent.maintainer
metadata["origin"] = pkg.rparent.pkgname
metadata["license"] = pkg.license
if pkg.rparent.git_revision:
metadata["commit"] = pkg.rparent.git_revision + (
"-dirty" if pkg.rparent.git_dirty else ""
)
if len(pkg.provides) > 0:
pkg.provides.sort()
metadata["provides"] = pkg.provides
if pkg.provider_priority > 0:
metadata["provider_priority"] = pkg.provider_priority
mdeps = []
for c in pkg.depends:
mdeps.append(c.removeprefix("virtual:"))
mdeps.sort()
metadata["depends"] = mdeps
metadata["install_if"] = list(pkg.install_if)
if hasattr(pkg, "aso_provides"):
pkg.aso_provides.sort(key = lambda x: x[0])
metadata["shlib_provides"] = pkg.aso_provides
if hasattr(pkg, "so_requires"):
pkg.so_requires.sort()
metadata["shlib_requires"] = pkg.so_requires
if hasattr(pkg, "pc_provides"):
pkg.pc_provides.sort()
metadata["pc_provides"] = pkg.pc_provides
if hasattr(pkg, "cmd_provides"):
pkg.cmd_provides.sort()
metadata["cmd_provides"] = pkg.cmd_provides
if hasattr(pkg, "pc_requires"):
pkg.pc_requires.sort()
metadata["pc_requires"] = pkg.pc_requires
if len(pkg.triggers) > 0:
# check validity first
for t in pkg.triggers:
p = pathlib.Path(t)
if not p or not p.is_absolute():
pkg.error(f"invalid trigger path: {t}")
# finally pass metadata
metadata["triggers"] = list(pkg.triggers)
metadata["file_modes"] = pkg.file_modes
logger.get().out(f"Creating {binpkg} in repository {repo}...")
apk_c.create(
pkg.pkgname, f"{pkg.pkgver}-r{pkg.pkgrel}", arch,
pkg.rparent.source_date_epoch, pkg.destdir, pkg.statedir, binpath,
pkg.rparent.signing_key, metadata
)
finally:
lockpath.unlink()
pkg.rparent._stage[repo] = True
def invoke(pkg):
arch = pkg.rparent.profile().arch
binpkg = f"{pkg.pkgname}-{pkg.pkgver}-r{pkg.pkgrel}.apk"
repobase = paths.repository() / pkg.rparent.repository
if pkg.pkgname.endswith("-dbg"):
repo = repobase / "debug"
else:
repo = repobase
repo = repo / ".stage" / arch
genpkg(pkg, repo, arch, binpkg)
for apkg, adesc, iif, takef in template.autopkgs:
binpkg = f"{pkg.pkgname}-{apkg}-{pkg.pkgver}-r{pkg.pkgrel}.apk"
# is an explicit package, do not autosplit that
if pkg.pkgname.endswith(f"-{apkg}"):
continue
# explicitly defined, so do not try autosplit
foundpkg = False
for sp in pkg.rparent.subpkg_list:
if sp.pkgname == f"{pkg.pkgname}-{apkg}":
foundpkg = True
break
if foundpkg:
continue
ddest = pkg.rparent.destdir_base / f"{pkg.pkgname}-{apkg}-{pkg.pkgver}"
# destdir does not exist, so skip
if not ddest.is_dir():
continue
# subpkg repository
srepo = repo
if apkg == "dbg":
srepo = repobase / "debug/.stage" / arch
# create a temporary subpkg instance
# it's only complete enough to satisfy the generator
spkg = template.Subpackage(
f"{pkg.pkgname}-{apkg}", pkg.rparent, pkg.pkgdesc
)
genpkg(spkg, srepo, arch, binpkg)
|
StarcoderdataPython
|
1743990
|
'''8. Write a Python program to solve (x + y) * (x + y).
Test Data : x = 4, y = 3
Expected Output : (4 + 3) ^ 2) = 49'''
x, y = 4, 3
ans = (4+3) * (4+3)
print(f"(({x} + {y}) ^ 2) = {ans}")
|
StarcoderdataPython
|
1808579
|
<reponame>jerryuhoo/PaddleSpeech
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import sys
from abc import ABC
from abc import abstractmethod
from collections import OrderedDict
from typing import Any
from typing import Dict
from typing import List
from typing import Union
import paddle
from .log import logger
from .utils import download_and_decompress
from .utils import MODEL_HOME
class BaseExecutor(ABC):
"""
An abstract executor of paddlespeech tasks.
"""
def __init__(self):
self._inputs = OrderedDict()
self._outputs = OrderedDict()
self.pretrained_models = OrderedDict()
self.model_alias = OrderedDict()
@abstractmethod
def _init_from_path(self, *args, **kwargs):
"""
Init model and other resources from arguments. This method should be called by `__call__()`.
"""
pass
@abstractmethod
def preprocess(self, input: Any, *args, **kwargs):
"""
Input preprocess and return paddle.Tensor stored in self._inputs.
Input content can be a text(tts), a file(asr, cls), a stream(not supported yet) or anything needed.
Args:
input (Any): Input text/file/stream or other content.
"""
pass
@paddle.no_grad()
@abstractmethod
def infer(self, *args, **kwargs):
"""
Model inference and put results into self._outputs.
This method get input tensors from self._inputs, and write output tensors into self._outputs.
"""
pass
@abstractmethod
def postprocess(self, *args, **kwargs) -> Union[str, os.PathLike]:
"""
Output postprocess and return results.
This method get model output from self._outputs and convert it into human-readable results.
Returns:
Union[str, os.PathLike]: Human-readable results such as texts and audio files.
"""
pass
@abstractmethod
def execute(self, argv: List[str]) -> bool:
"""
Command line entry. This method can only be accessed by a command line such as `paddlespeech asr`.
Args:
argv (List[str]): Arguments from command line.
Returns:
int: Result of the command execution. `True` for a success and `False` for a failure.
"""
pass
@abstractmethod
def __call__(self, *arg, **kwargs):
"""
Python API to call an executor.
"""
pass
def get_task_source(self, input_: Union[str, os.PathLike, None]
) -> Dict[str, Union[str, os.PathLike]]:
"""
Get task input source from command line input.
Args:
input_ (Union[str, os.PathLike, None]): Input from command line.
Returns:
Dict[str, Union[str, os.PathLike]]: A dict with ids and inputs.
"""
if self._is_job_input(input_):
ret = self._get_job_contents(input_)
else:
ret = OrderedDict()
if input_ is None: # Take input from stdin
for i, line in enumerate(sys.stdin):
line = line.strip()
if len(line.split(' ')) == 1:
ret[str(i + 1)] = line
elif len(line.split(' ')) == 2:
id_, info = line.split(' ')
ret[id_] = info
else: # No valid input info from one line.
continue
else:
ret[1] = input_
return ret
def process_task_results(self,
input_: Union[str, os.PathLike, None],
results: Dict[str, os.PathLike],
job_dump_result: bool=False):
"""
Handling task results and redirect stdout if needed.
Args:
input_ (Union[str, os.PathLike, None]): Input from command line.
results (Dict[str, os.PathLike]): Task outputs.
job_dump_result (bool, optional): if True, dumps job results into file. Defaults to False.
"""
if not self._is_job_input(input_) and len(
results) == 1: # Only one input sample
raw_text = list(results.values())[0]
else:
raw_text = self._format_task_results(results)
print(raw_text, end='') # Stdout
if self._is_job_input(
input_) and job_dump_result: # Dump to *.job.done
try:
job_output_file = os.path.abspath(input_) + '.done'
sys.stdout = open(job_output_file, 'w')
print(raw_text, end='')
logger.info(f'Results had been saved to: {job_output_file}')
finally:
sys.stdout.close()
def _is_job_input(self, input_: Union[str, os.PathLike]) -> bool:
"""
Check if current input file is a job input or not.
Args:
input_ (Union[str, os.PathLike]): Input file of current task.
Returns:
bool: return `True` for job input, `False` otherwise.
"""
return input_ and os.path.isfile(input_) and (input_.endswith('.job') or
input_.endswith('.txt'))
def _get_job_contents(
self, job_input: os.PathLike) -> Dict[str, Union[str, os.PathLike]]:
"""
Read a job input file and return its contents in a dictionary.
Args:
job_input (os.PathLike): The job input file.
Returns:
Dict[str, str]: Contents of job input.
"""
job_contents = OrderedDict()
with open(job_input) as f:
for line in f:
line = line.strip()
if not line:
continue
k, v = line.split(' ')
job_contents[k] = v
return job_contents
def _format_task_results(
self, results: Dict[str, Union[str, os.PathLike]]) -> str:
"""
Convert task results to raw text.
Args:
results (Dict[str, str]): A dictionary of task results.
Returns:
str: A string object contains task results.
"""
ret = ''
for k, v in results.items():
ret += f'{k} {v}\n'
return ret
def disable_task_loggers(self):
"""
Disable all loggers in current task.
"""
loggers = [
logging.getLogger(name) for name in logging.root.manager.loggerDict
]
for l in loggers:
l.disabled = True
def _get_pretrained_path(self, tag: str) -> os.PathLike:
"""
Download and returns pretrained resources path of current task.
"""
support_models = list(self.pretrained_models.keys())
assert tag in self.pretrained_models, 'The model "{}" you want to use has not been supported, please choose other models.\nThe support models includes:\n\t\t{}\n'.format(
tag, '\n\t\t'.join(support_models))
res_path = os.path.join(MODEL_HOME, tag)
decompressed_path = download_and_decompress(self.pretrained_models[tag],
res_path)
decompressed_path = os.path.abspath(decompressed_path)
logger.info(
'Use pretrained model stored in: {}'.format(decompressed_path))
return decompressed_path
def show_rtf(self, info: Dict[str, List[float]]):
"""
Calculate rft of current task and show results.
"""
num_samples = 0
task_duration = 0.0
wav_duration = 0.0
for start, end, dur in zip(info['start'], info['end'], info['extra']):
num_samples += 1
task_duration += end - start
wav_duration += dur
logger.info('Sample Count: {}'.format(num_samples))
logger.info('Avg RTF: {}'.format(task_duration / wav_duration))
|
StarcoderdataPython
|
33236
|
<reponame>daaawx/bearblog<filename>blogs/migrations/0012_auto_20200601_1247.py
# Generated by Django 3.0.6 on 2020-06-01 12:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blogs', '0011_auto_20200531_0915'),
]
operations = [
migrations.RemoveField(
model_name='post',
name='tags',
),
migrations.AddField(
model_name='blog',
name='hashtags',
field=models.TextField(blank=True),
),
]
|
StarcoderdataPython
|
6454770
|
"""
Unit tests for the Deis CLI auth commands.
Run these tests with "python -m unittest client.tests.test_auth"
or with "./manage.py test client.AuthTest".
"""
from __future__ import unicode_literals
from unittest import TestCase
import pexpect
from .utils import DEIS
from .utils import DEIS_SERVER
from .utils import purge
from .utils import register
class AuthTest(TestCase):
@classmethod
def setUpClass(cls):
cls.username, cls.password = register()
@classmethod
def tearDownClass(cls):
purge(cls.username, cls.password)
def test_login(self):
# log in the interactive way
child = pexpect.spawn("{} login {}".format(DEIS, DEIS_SERVER))
child.expect('username: ')
child.sendline(self.username)
child.expect('password: ')
child.sendline(self.password)
child.expect("Logged in as {}".format(self.username))
child.expect(pexpect.EOF)
def test_logout(self):
child = pexpect.spawn("{} logout".format(DEIS))
child.expect('Logged out')
# log in the one-liner way
child = pexpect.spawn("{} login {} --username={} --password={}".format(
DEIS, DEIS_SERVER, self.username, self.password))
child.expect("Logged in as {}".format(self.username))
child.expect(pexpect.EOF)
|
StarcoderdataPython
|
6501963
|
<reponame>eigenfoo/ml-adventure
# Copyright 2018 <NAME>, <NAME>, <NAME>, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from adventurelib import (when, start, Room, Item, Bag,
say, set_context, get_context)
def dungeon01():
room1 = Room("You enter your home.")
room1.desc = """
Going abroad for college, you don’t really get
to go home a lot.
Most of your friends take a two or three hour train ride back home,
but it’s a 16 hour flight for you.
You’ve spent 18 years of your life in between these four walls,
and one day you packed your life into a suitcase and left.
That’s how you left home,
and that’s how you come back to it:
with a suitcase.
"""
room2 = room1.west = Room("""
The dining table is set for dinner.
Your mom is sitting watching the evening news on TV.
""")
room2.desc = """
You mentally tune out the news.
Something about more demonstrations and protests.
You get so tired of the politicking that goes on here sometimes.
"""
room2.dinner = False
room3 = room2.west = Room("""
The hallway is dark.
You turn on the lights, which throws amber light
on what would otherwise be bare white walls.
""")
room3.desc = """
You’ve always thought that the walls should be decorated.
Perhaps photos of dad, you think.
He’s always the one taking photos, he always insists on it,
but that means that no one takes photos of him.
It’d be nice to see a photo of him.
"""
room6 = room3.north = Room("""
You enter your dad’s study.
""")
room6.desc = """
The study is dark, but the light from the corridor pierces through
the hinges of the door and collides into the bookshelf.
It reflects off something, startling you.
"""
photo = Item('framed photograph')
room6.items = Bag({photo})
room7 = room6.north = Room("""
You enter the master bedroom.
""")
room7.desc = """
It’s dark and cold.
The lights are off, and nobody never turns off the air conditioning here.
The imperturbable blue light in corner of the ceiling stares back at you,
as it continues to blow cold air into the room.
"""
room4 = room3.south = Room("""
You enter your helper’s room.
""")
room4.desc = """
It’s by far the smallest of the rooms, and the most full of stuff.
Growing up, you never really understood that domestic helpers
weren’t common in other parts of the world...
In fact, thinking about it now, it’s a bit strange
to let someone else into your home
and treat them basically as part of the family,
but that they’re under your employ.
"""
room5 = room3.west = Room("""
You enter your room.
""")
room5.desc = """
It’s your room.
You can navigate it even though its pitch black.
You still remember how your dad thought that
the bookshelf was too deep,
leaving you no space to play.
So he sheared one and a half inches off the entire face of the bookshelf.
"""
music_box = Item('music box')
room5.items = Bag({music_box})
room8 = room1.north = Room("""
You move into the kitchen.
Your dad is cooking.
It smells wonderful, as always.
""")
room8.desc = """
A beef stew with salad: one of your dad’s specials.
The smell of lovingly caramelized beef and delightfully cooked vegetables
envelops the kitchen as your dad plates up the food.
"""
dad = Item('Dad')
dinner = Item('dinner')
room8.items = Bag({dad, dinner})
wait = room8.east = Room("""
You are in a waiting room.
""")
wait.desc = "You must wait!"
exit_dir = 'east'
return [room1, room2, room3, room4, room5, room6, room7, room8,
wait, exit_dir]
def dungeon02():
room1 = Room("""
The boat is shaking quite vigorously.
""")
room1.desc = """
You hope the dramamine that you took last night
will help fight off the motion sickness.
Your friend and his mom are also on the boat.
The captain is mumbling something incomprehensible in the corner.
"""
room2 = room1.east = Room("""
This is the right side of the boat.
""")
room2.desc = """
Clouds are forming in the distance, you hope it doesn’t rain.
"""
rope = Item('rope')
backpack = Item('backpack')
room2.items = Bag({rope, backpack})
room3 = room2.south = Room("""
The captain is demonstrating how to catch flounder.
""")
room3.desc = """
The fishing line takes 5 minutes to reach the bottom of these open waters.
"""
room4 = room3.south = Room("""
You see the giant anchor holding the boat’s position.
""")
room4.desc = """
Best not to touch the anchor!
"""
anchor = Item('anchor')
room4.items = Bag({anchor})
room5 = room1.west = Room("""
This is the left side of the boat.
""")
room5.desc = """
You can barely see the shores of Anchorage.
Seagulls fly over the boat.
Your friend is staring off into the distance with his fishing rod.
Maybe he’s thinking of home?
"""
room6 = room5.west = Room("""
You start to feel the motion sickness creep up.
""")
room6.desc = """
You should probably go back.
"""
room7 = room5.south = Room("""
The boat’s name is Big Thunder.
""")
room7.desc = """
You wonder who came up
with such a stupid sounding name.
"""
fishing_rod = Item('fishing rod')
room7.items = Bag({fishing_rod})
room8 = room7.south = Room("""
The captain has set up station for you and your friend to fish.
""")
room8.desc = """
He seems to be getting impatient.
"""
fishing_station = Item('fishing station')
room8.items = Bag({fishing_station})
wait = room8.west = Room("""
You are in a waiting room.
""")
wait.desc = "You must wait!"
exit_dir = 'west'
return [room1, room2, room3, room4, room5, room6, room7, room8,
wait, exit_dir]
def dungeon03():
room1 = Room("""
You wake up to the faint smell of morning air.
""")
room1.desc = """
The ambient snoring of your friend fills the room.
It suddenly dawns upon you that you are in an AirBnb in Kyoto.
You should probably get ready and prepare for a full day of adventuring.
"""
phone = Item('phone')
friend = Item('friend')
room1.items = Bag({phone, friend})
room2 = room1.north = Room("""
The sliding door to the balcony is slightly ajar.
""")
room2.desc = """
The dawn light streams through the curtains in a surreal manner.
The buildings of Kyoto stand proud against the horizon.
A chilly breeze continuously enters the room from beyond the balcony.
"""
room3 = room2.north = Room("""
You open the sliding door and step onto the balcony.
""")
room3.desc = """
It isn’t such a windy day today, thankfully.
You were lucky with this AirBnb, the view is incredible.
You can see all the way to the river.
Perhaps today would be a good day to go to the Kyoto Animation Store.
"""
room4 = room3.west = Room("""
You are faced with a giant building.
""")
room4.desc = """
Well, not every direction can have amazing views.
"""
room5 = room1.west = Room("""
The bathroom is pretty small.
""")
room5.desc = """
In fact, the entire room is the shower room.
The plumbing is amazing though.
The shower is hot and has high water pressure.
"""
shower = Item('shower')
towel = Item('towel')
soap = Item('soap')
room5.items = Bag({shower, towel, soap})
room6 = room5.south = Room("""
The kitchen area is pretty small.
""")
room6.desc = """
There’s barely enough space for 1 stove burner and a tiny sink.
This is where the two of you store your toothbrushes.
"""
toothbrush = Item('toothbrush')
toothpaste = Item('toothpaste')
sink = Item('sink')
room5.items = Bag({toothbrush, toothpaste, sink})
room7 = room6.south = Room("""
A dark hallway.
""")
room7.desc = """
Strangely, this part of the apartment doesn’t have lights.
How peculiar.
"""
room8 = room7.east = Room("""
The entrance to the apartment.
""")
room8.desc = """
Your shoes are neatly arranged in the corner.
"""
shoes = Item('pair of shoes')
room8.items = Bag({shoes})
wait = room8.south = Room("""
You are in a waiting room.
""")
wait.desc = "You must wait!"
exit_dir = 'south'
return [room1, room2, room3, room4, room5, room6, room7, room8,
wait, exit_dir]
def dungeon04():
room1 = Room("""
You stand in front of <NAME>'s mahogany wall unit.
""")
room1.desc = """
Books are crammed into the unit's shelves. The Iliad,
Atlas Shrugged, and The Brothers Karamazov jump out at you.
The middle shelf houses photos of five generations of Muller women.
A black and white photo in a pewter frame shows
a young woman wearing a large cartwheel hat covered
in feathers.
"""
room2 = room1.east = Room("""
You sit next to Grandma on her couch in the living room.
She has been working on Wednesday's crossword
for a few hours. She asks if you'd like to help her.
""")
room2.desc = """
On the center table sits a ceramic vignette
of a mother and her two children. The son, an infant,
is cradled in her arms. The daughter fawns over the son
from her mother's side.
"""
pen = Item('pen')
room2.items = Bag({pen})
room3 = room1.north = Room("""
You step into the foyer
""")
room3.desc = """
Next to the front door hangs a wicker knot tied in Savannah.
Beneath the knot is a basket filled with balls made of China.
Each is speckled with a distinct pattern, deep blue and cracked white.
"""
room4 = room3.west = Room("""
You walk to the window in Grandma's kitchen
and look out at the Henry Hudson Parkway.
The smell of stale cigarettes hangs in the air.
""")
room4.desc = """
A tea bag, a spoon, a bowl of sugar, and a ceramic cup
are set up on the kitchen counter for tomorrow morning.
The cup is stained brown on the inside from years of service.
Its sides are dotted with soft pink roses
flowering off a green vine.
This week’s tea is Twinings Ceylon Orange Pekoe.
"""
room5 = room3.north = Room("""
You step into a short hallway. A mirror hangs at the far end.
""")
room5.desc = """
You've looked better.
"""
room6 = room5.east = Room("""
You proceed down a long hallway
""")
room6.desc = """
To the right is Grandma's bedroom.
The door is closed.
You're not allowed to go there.
"""
room7 = room6.north = Room("""
You go into the bathroom. It's painted
dark grey with white accents.
""")
room7.desc = """
The bathroom's wooden door is cracked and
no longer fits in the door frame.
The shower curtain is made of clear plastic. Through it
you can see a shower cap, a loofah, and Dove body wash.
"""
room8 = room6.east = Room("""
You creep into Grandma's bedroom. The blinds are drawn.
A small painting hangs on the wall next to the
door of the bedroom.
""")
room8.desc = """
The paint near the windows has chipped
due to the rain that seeped in during the hurricane.
There is a small shelf hung on each side of Grandma's bed,
each supporting a small statuette of a child
riding a merry-go-round horse.
"""
small_painting = Item('small painting')
room8.items = Bag({small_painting})
wait = room8.east = Room("""
You are in a waiting room.
""")
wait.desc = "You must wait!"
exit_dir = 'east'
return [room1, room2, room3, room4, room5, room6, room7, room8,
wait, exit_dir]
|
StarcoderdataPython
|
6636697
|
from . import BatsException
class InputArgsException(BatsException):
pass
|
StarcoderdataPython
|
1667854
|
#!/usr/bin/python3
# Simple MQTT publishing of Modbus TCP sources
#
# Written and (C) 2018 by <NAME> <<EMAIL>>
# Provided under the terms of the MIT license
#
# Requires:
# - pyModbusTCP - https://github.com/sourceperl/pyModbusTCP
# - Eclipse Paho for Python - http://www.eclipse.org/paho/clients/python/
# frequency before is 30,now change to 5*60=300
import json
import _thread
import datetime
import threading
import argparse
import logging
import logging.handlers
import time
import paho.mqtt.client as mqtt
import paho.mqtt.subscribe as subscribe
import sys
import configparser
import traceback
from pyModbusTCP.client import ModbusClient
parser = argparse.ArgumentParser(description='Bridge between Modbus TCP and MQTT')
parser.add_argument('--mqtt-host', default='localhost', help='MQTT server address. \
Defaults to "localhost"')
parser.add_argument('--mqtt-port', default='8883', type=int, help='MQTT server port. \
Defaults to 8883')
parser.add_argument('--mqtt-topic', default='', help='Topic prefix to be used for \
subscribing/publishing. Defaults to "modbus/"')
parser.add_argument('--modbus-host', help='Modbus server address')
parser.add_argument('--modbus-port', default='502', type=int, help='Modbus server port. \
Defaults to 502')
parser.add_argument('--registers', help='Register definition file. Required!')
parser.add_argument('--frequency', default='50', help='How often is the source \
checked for the changes, in seconds. Only integers. Defaults to 3')
parser.add_argument('--only-changes', default='False', help='When set to True then \
only changed values are published')
args = parser.parse_args()
# logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.INFO)
handler = logging.FileHandler("log.txt")
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
# 把logger加到其他地方。
#logger.info("Start print log")
topic = args.mqtt_topic
# if not topic.endswith("/"):
# topic += "/"
frequency = int(args.frequency)
print("ModBusTcp2Mqtt App Started...Time:%s" % (datetime.datetime.now()))
mbClient = None
lastValue = {}
config = configparser.ConfigParser()
config.read(args.registers)
config01 = config['0x01']
config02 = config['0x02']
config03 = config['0x03']
config04 = config['0x04']
config05 = config['0x05']
config06 = config['0x06']
config0F = config['0x0F']
config10 = config['0x10']
config2B = config['0x2B']
# Any received value in the upper range (32768-65535)
# is interpreted as negative value (in the range -32768 to -1).
def reMap(value, maxInput=65535, minInput=64535, maxOutput=-1, minOutput=-1001):
# if value >= minInput:
# value = maxInput if value > maxInput else value
# value = minInput if value < minInput else value
#
# inputSpan = maxInput - minInput
# outputSpan = maxOutput - minOutput
#
# scaledThrust = float(value - minInput) / float(inputSpan)
#
# return minOutput + (scaledThrust * outputSpan)
# else:
return value
class Element:
def __init__(self, row):
self.topic = row[0]
self.value = row[1]
def publish(self):
try:
if self.value != lastValue.get(self.topic, 0) or args.only_changes == 'False':
lastValue[self.topic] = self.value
fulltopic = topic + self.topic
## mqClient.subscribe(fulltopic)
mqClient.publish(fulltopic, reMap(self.value), qos=1, retain=False)
except Exception as exc:
logging.info("Error reading " + self.topic + ": %s", exc)
def readMb():
#while True:
# open or reconnect TCP to server
logger.info("readMb Run...Time:%s" % (datetime.datetime.now()))
if not mbClient.is_open():
if not mbClient.open():
logging.error("unable to connect to " + SERVER_HOST + ":" + str(SERVER_PORT))
data = []
for key, value in config01.items():
# 读取时增加对指令key的过滤
if mbClient.is_open() and not str(key).__contains__('command'):
row = mbClient.read_coils(int(value))
if not row is None:
row.insert(0, key)
data.append(row)
for key, value in config02.items():
if mbClient.is_open() and not str(key).__contains__('command'):
row = mbClient.read_discrete_inputs(int(value))
if not row is None:
row.insert(0, key)
data.append(row)
for key, value in config03.items():
if mbClient.is_open() and not str(key).__contains__('command'):
row = mbClient.read_holding_registers(int(value))
if not row is None:
row.insert(0, key)
data.append(row)
for key, value in config04.items():
if mbClient.is_open() and not str(key).__contains__('command'):
row = mbClient.read_input_registers(int(value))
if not row is None:
row.insert(0, key)
data.append(row)
for row in data:
e = Element(row)
e.publish()
#time.sleep(int(frequency))
global timer
timer = threading.Timer(120, readMb)
timer.start()
logger.info("readMb Started...Time:%s" % (datetime.datetime.now()))
# 指令回执发回mqtt
def down_back(result):
callback_topic = "command/huailaiwaterworks/one/downback"
mqClient.publish(callback_topic, result, qos=1, retain=False)
print("publishCallBack-Msg:%s" % result)
logger.info("publishCallBack-Msg:%s" % result)
# call back msg
def msgCallback():
def on_message_print(client, userdata, message):
msg = str(message.payload)
newstr = msg.strip('b')
print("callback msg:%s" % (newstr))
logger.info("callback msg:%s" % (newstr))
if not mbClient.is_open():
if not mbClient.open():
logging.error("unable to connect to " + SERVER_HOST + ":" + str(SERVER_PORT))
mbClient.open()
print("reconnected to modbus finished...")
# 水厂泵开关
for key, value in config01.items():
if mbClient.is_open():
if newstr == key:
row = mbClient.read_coils(int(value))
print("coils-read-back:%s" % (row))
logger.info("coils-read-back:%s" % (row))
result1 = mbClient.write_single_coil(int(value), True)
row = mbClient.read_coils(int(value))
print("coils-write-back1:%s ,NOW Status:%s" % (result1, row))
logger.info("coils-write-back1:%s ,NOW Status:%s" % (result1, row))
time.sleep(2) # PLC反应时间
result2 = mbClient.write_single_coil(int(value), False)
row = mbClient.read_coils(int(value))
# 执行回执,也publish出去;
if result1 is not None:
if result1:
down_back(newstr + '/0000')
if result1 is None or row is None:
down_back(newstr + '/9999')
print("coils-write-back2:%s,NOW Status:%s" % (result2, row))
print(key + ":coils-operation-over...")
logger.info("coils-write-back2:%s,NOW Status:%s" % (result2, row))
logger.info(key + ":coils-operation-over...")
# 寄存器加压站 井
for key, value in config03.items():
if mbClient.is_open():
if newstr == key:
# 根据topic 构造write value
# 加压站 确定首位是不是零位。
# 地址顺序有误, 1号---会开4号;3号---会开1号;4号---3号,所以做调整
if 'station_pump4#start' in newstr:
write_value = 2
if 'station_pump4#stop' in newstr:
write_value = 4
if 'station_pump2#start' in newstr:
write_value = 8
if 'station_pump2#stop' in newstr:
write_value = 16
if 'station_pump1#start' in newstr:
write_value = 32
if 'station_pump1#stop' in newstr:
write_value = 64
if 'station_pump3#start' in newstr:
write_value = 128
if 'station_pump3#stop' in newstr:
write_value = 256
# 井 保留联动,优化选择到井的条件
if 'command/well' in newstr and 'pump#start' in newstr:
write_value = 1
if 'command/well' in newstr and 'pump#stop' in newstr:
write_value = 2
if 'command/well' in newstr and 'pump#linkact' in newstr:
write_value = 4
row = mbClient.read_holding_registers(int(value))
print("holding-Register-read-back:%s" % (row))
logger.info("holding-Register-read-back:%s" % (row))
result1 = mbClient.write_single_register(int(value), write_value)
row = mbClient.read_holding_registers(int(value))
print("holding-Register-write-back1:%s ,addr:%s ,writeValue:%s,NOW value:%s" % (
result1, value, write_value, row))
logger.info("holding-Register-write-back1:%s ,addr:%s ,writeValue:%s,NOW value:%s" % (
result1, value, write_value, row))
time.sleep(2)
result2 = mbClient.write_single_register(int(value), 0)
row = mbClient.read_holding_registers(int(value))
if result1 is not None:
if result1:
down_back(newstr + '/0000')
if result1 is None or row is None:
down_back(newstr + '/9999')
print("holding-Register-write-back2:%s,NOW Status:%s" % (result2, row))
print(key + ":holding-Register-operation-over...")
logger.info("holding-Register-write-back2:%s,NOW Status:%s" % (result2, row))
logger.info(key + ":holding-Register-operation-over...")
subscribe.callback(on_message_print, command_topic, hostname="192.168.127.12")
try:
mqClient = mqtt.Client()
# mqClient.connect("MacBook-Air.local", 1883) 上线时可以还原为这个地址
mqClient.connect("192.168.127.12", 1883)
mqClient.tls_set("cacert.pem", "client-cert.pem", "client-key.pem")
mqClient.loop_start()
# 订阅指令topic 增加topic复杂度,防止误触发
command_topic = "huailaiwater/ESLink/prod/command/"
mqClient.subscribe(command_topic)
print("SUBCRIBE " + command_topic + " Successfully")
mbClient = ModbusClient()
# define modbus server host, port
SERVER_HOST = args.modbus_host
SERVER_PORT = args.modbus_port
mbClient.host(SERVER_HOST)
mbClient.port(SERVER_PORT)
# 启动读modbus与订阅指令线程
# _thread.start_new_thread(readMb, ())
readMb()
_thread.start_new_thread(msgCallback, ())
except Exception as e:
# traceback.print_exc()+
logging.error("Unhandled error [" + str(e) + traceback.print_exc() + "]")
sys.exit(1)
while 1:
pass
|
StarcoderdataPython
|
8020344
|
<filename>retro/const.py<gh_stars>0
# -*- coding: utf-8 -*-
# pylint: disable=wrong-import-position, range-builtin-not-iterating
"""
Physical constants and constant-for-us values
"""
from __future__ import absolute_import, division, print_function
__all__ = [
'omkeys_to_sd_indices', 'get_sd_idx', 'get_string_dom_pair',
# Constants
'PI', 'TWO_PI', 'PI_BY_TWO', 'SPEED_OF_LIGHT_M_PER_NS',
# Pre-calculated values
'COS_CKV', 'THETA_CKV', 'SIN_CKV',
'TRACK_M_PER_GEV', 'TRACK_PHOTONS_PER_M', 'CASCADE_PHOTONS_PER_GEV',
'IC_DOM_JITTER_NS', 'DC_DOM_JITTER_NS', 'POL_TABLE_DCOSTHETA',
'POL_TABLE_DRPWR', 'POL_TABLE_DT', 'POL_TABLE_RPWR', 'POL_TABLE_RMAX',
'POL_TABLE_NTBINS', 'POL_TABLE_NRBINS', 'POL_TABLE_NTHETABINS',
'IC_DOM_QUANT_EFF', 'DC_DOM_QUANT_EFF',
# Particle naming conventions
'ABS_FLAV_STR', 'ABS_FLAV_TEX', 'BAR_NOBAR_STR', 'BAR_NOBAR_TEX',
'INT_TYPE_STR', 'INT_TYPE_TEX', 'PDG_STR', 'PDG_TEX', 'PDG_INTER_STR',
'PDG_INTER_TEX', 'STR_TO_PDG_INTER',
# "Enum"-like things
'STR_ALL', 'STR_IC', 'STR_DC', 'AGG_STR_NONE', 'AGG_STR_ALL',
'AGG_STR_SUBDET', 'DOM_ALL',
'NUM_STRINGS', 'NUM_DOMS_PER_STRING', 'NUM_DOMS_TOT',
'IC_STRS', 'DC_STRS', 'DC_IC_STRS', 'DC_ALL_STRS', 'DC_SUBDUST_DOMS',
'IC_SUBDUST_DOMS', 'DC_SUBDUST_STRS_DOMS', 'DC_IC_SUBDUST_STRS_DOMS',
'DC_ALL_SUBDUST_STRS_DOMS', 'ALL_STRS', 'ALL_DOMS', 'ALL_STRS_DOMS',
'ALL_STRS_DOMS_SET', 'DC_ALL_STRS_DOMS',
'EMPTY_HITS', 'EMPTY_SOURCES',
'SRC_OMNI', 'SRC_CKV_BETA1',
'PARAM_NAMES', 'PEGLEG_PARAM_NAMES', 'SCALING_PARAM_NAMES',
]
__author__ = '<NAME>, <NAME>'
__license__ = '''Copyright 2017 <NAME> and <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.'''
from itertools import product
from os.path import abspath, dirname
import sys
import numpy as np
if __name__ == '__main__' and __package__ is None:
RETRO_DIR = dirname(dirname(dirname(abspath(__file__))))
if RETRO_DIR not in sys.path:
sys.path.append(RETRO_DIR)
from retro import FTYPE
from retro import retro_types
def omkeys_to_sd_indices(omkeys):
"""Get a single integer index from OMKeys.
Parameters
----------
omkeys : array of dtype OMKEY_T
The dtype `OMKEY_T` must contain "string" and "dom" and can optionally
include "pmt".
Returns
-------
sd_idx : array of np.uint32
"""
if 'pmt' in omkeys.dtype.names:
raise NotImplementedError("OMKey field 'pmt' not implemented")
return get_sd_idx(string=omkeys['string'], dom=omkeys['dom'])
def get_sd_idx(string, dom, pmt=0):
"""Get a single integer index from an IceCube string number (from 1 to 86)
and DOM number (from 1 to 60).
Parameters
----------
string : int in [1, 60]
String number
dom : int in [1, 60]
DOM number
pmt : int
PMT number in the DOM; if == 0, then this is ignored.
Returns
-------
sd_idx : int
"""
if pmt > 0:
raise NotImplementedError('PMT != 0 is not implemented')
return (dom - 1) * NUM_STRINGS + (string - 1)
def get_string_dom_pair(sd_idx):
"""Get an IceCube string number (1 to 86) and a DOM number (1 to 60) from
the single-integer index (sd_idx).
Parameters
----------
sd_idx : int in [0, 5159]
Returns
-------
string : int in [1, 86]
dom : int in [1, 60]
"""
dom_idx, string_idx = divmod(sd_idx, NUM_STRINGS)
string = string_idx + 1
dom = dom_idx + 1
return string, dom
# -- Physical / mathematical constants -- #
PI = FTYPE(np.pi)
"""pi"""
TWO_PI = FTYPE(2*np.pi)
"""2 * pi"""
PI_BY_TWO = FTYPE(np.pi / 2)
"""pi / 2"""
SPEED_OF_LIGHT_M_PER_NS = FTYPE(299792458 / 1e9)
"""Speed of light in units of m/ns"""
# -- Pre-calculated values -- #
COS_CKV = 0.764540803152
"""Cosine of the Cherenkov angle for beta ~1 and IceCube phase index as used"""
THETA_CKV = np.arccos(0.764540803152)
"""Cosine of the Cherenkov angle for beta ~1 and IceCube phase index as used"""
SIN_CKV = np.sin(THETA_CKV)
"""Cosine of the Cherenkov angle for beta ~1 and IceCube phase index as used"""
TRACK_M_PER_GEV = FTYPE(15 / 3.3)
"""Track length per energy, in units of m/GeV"""
TRACK_PHOTONS_PER_M = FTYPE(2451.4544553)
"""Track photons per length, in units of 1/m (see ``nphotons.py``)"""
CASCADE_PHOTONS_PER_GEV = FTYPE(12805.3383311)
"""Cascade photons per energy, in units of 1/GeV (see ``nphotons.py``)"""
# TODO: Is jitter same (or close enough to the same) for all DOMs? Is it
# different for DeepCore vs. non-DeepCore DOMs? Didn't see as much in
# section 3.3. of arXiv:1612.05093v2 so assuming same for now.
# See arXiv:1612.05093v2, section 3.3
IC_DOM_JITTER_NS = 1.7
"""Timing jitter (stddev) for string 0-79 DOMs, in units of ns"""
# See arXiv:1612.05093v2, section 3.3
DC_DOM_JITTER_NS = 1.7
"""Timing jitter (stddev) for DeepCore (strings 80-86) DOMs, in units of ns"""
# TODO: figure these out from the tables rather than defining as constants
POL_TABLE_RMAX = 400 # m
POL_TABLE_DT = 10 # ns
POL_TABLE_RPWR = 2
POL_TABLE_DRPWR = 0.1
POL_TABLE_DCOSTHETA = -0.05
POL_TABLE_NTBINS = 300
POL_TABLE_NRBINS = 200
POL_TABLE_NTHETABINS = 40
#IC_DOM_QUANT_EFF = 0.25
IC_DOM_QUANT_EFF = 1.
"""scalar in [0, 1] : (Very rough approximation!) IceCube (i.e. non-DeepCore)
DOM quantum efficiency. Multiplies the tabulated detection probabilities to
yield the actual probabilitiy that a photon is detected."""
#DC_DOM_QUANT_EFF = 0.35
DC_DOM_QUANT_EFF = 1.
"""scalar in [0, 1] : (Very rough approximation!) DeepCore DOM quantum
efficiency. Multiplies the tabulated detection probabilities to yield the
actual probabilitiy that a photon is detected."""
# -- Particle / interaction type naming conventions -- #
ABS_FLAV_STR = {12: 'nue', 13: 'numu', 14: 'nutau'}
ABS_FLAV_TEX = {12: r'\nu_e', 13: r'\nu_\mu', 14: r'\nu_\tau'}
BAR_NOBAR_STR = {-1: 'bar', 1: ''}
BAR_NOBAR_TEX = {-1: r'\bar', 1: ''}
INT_TYPE_STR = {1: 'cc', 2: 'nc'}
INT_TYPE_TEX = {1: r'\, {\rm CC}', 2: r'\, {\rm NC}'}
PDG_STR = {}
PDG_TEX = {}
for _bnb, _abs_code in product(BAR_NOBAR_STR.keys(), ABS_FLAV_STR.keys()):
PDG_STR[_abs_code*_bnb] = ABS_FLAV_STR[_abs_code] + BAR_NOBAR_STR[_bnb]
PDG_TEX[_abs_code*_bnb] = BAR_NOBAR_TEX[_bnb] + ABS_FLAV_TEX[_abs_code]
PDG_INTER_STR = {}
PDG_INTER_TEX = {}
for _pdg, _it in product(PDG_STR.keys(), INT_TYPE_STR.keys()):
PDG_INTER_STR[(_pdg, _it)] = '%s_%s' % (PDG_STR[_pdg], INT_TYPE_STR[_it])
PDG_INTER_TEX[(_pdg, _it)] = '%s %s' % (PDG_TEX[_pdg], INT_TYPE_TEX[_it])
STR_TO_PDG_INTER = {v: k for k, v in PDG_INTER_STR.items()}
# -- "enums" -- #
STR_ALL, STR_IC, STR_DC = -1, -2, -3
AGG_STR_NONE, AGG_STR_ALL, AGG_STR_SUBDET = 0, 1, 2
DOM_ALL = -1
# -- geom constants --- #
NUM_STRINGS = 86
NUM_DOMS_PER_STRING = 60
NUM_DOMS_TOT = NUM_STRINGS * NUM_DOMS_PER_STRING
IC_STRS = np.array(range(1, 78+1), dtype=np.uint8)
DC_STRS = np.array(range(79, 86+1), dtype=np.uint8)
DC_IC_STRS = np.array([26, 27, 35, 36, 37, 45, 46], dtype=np.uint8)
DC_ALL_STRS = np.concatenate([DC_STRS, DC_IC_STRS], axis=0)
DC_SUBDUST_DOMS = np.array(range(11, 60+1), dtype=np.uint8)
IC_SUBDUST_DOMS = np.array(range(25, 60+1), dtype=np.uint8)
DC_SUBDUST_STRS_DOMS = np.array(
[get_sd_idx(s, d) for s, d in product(DC_STRS, DC_SUBDUST_DOMS)]
)
DC_IC_SUBDUST_STRS_DOMS = np.array(
[get_sd_idx(s, d) for s, d in product(DC_IC_STRS, IC_SUBDUST_DOMS)]
)
DC_ALL_SUBDUST_STRS_DOMS = np.concatenate(
(DC_SUBDUST_STRS_DOMS, DC_IC_SUBDUST_STRS_DOMS)
)
ALL_STRS = list(range(1, 86+1))
ALL_DOMS = list(range(1, 60+1))
ALL_STRS_DOMS = np.array([get_sd_idx(s, d) for s, d in product(ALL_STRS, ALL_DOMS)])
ALL_STRS_DOMS_SET = set(ALL_STRS_DOMS)
DC_ALL_STRS_DOMS = np.array([get_sd_idx(s, d) for s, d in product(DC_STRS, ALL_DOMS)])
EMPTY_HITS = np.empty(shape=0, dtype=retro_types.HIT_T)
EMPTY_SOURCES = np.empty(shape=0, dtype=retro_types.SRC_T)
SRC_OMNI = np.uint32(0)
"""Source kind designator for a point emitting omnidirectional light"""
SRC_CKV_BETA1 = np.uint32(1)
"""Source kind designator for a point emitting Cherenkov light with beta ~ 1"""
PARAM_NAMES = [
'time', 'x', 'y', 'z', 'track_azimuth', 'track_zenith', 'cascade_azimuth',
'cascade_zenith', 'track_energy', 'cascade_energy', 'cascade_d_zenith',
'cascade_d_azimuth'
]
"""All possible hypothesis param names"""
PEGLEG_PARAM_NAMES = ['track_energy']
"""Hypothesis param names handled by pegleg, if it's used"""
SCALING_PARAM_NAMES = ['cascade_energy']
"""Hypothesis param names handled by scaling, if it's used"""
|
StarcoderdataPython
|
11315213
|
<gh_stars>1-10
#!/usr/bin/env /Users/kcoufal/miniconda3/bin/python3.7
# -*- coding: utf8 -*-
from flask import Flask
import datetime as dt
import platform
server = Flask(__name__)
@server.route("/")
def message():
return "<html><body><h1>Hi, welcome to the website</h1></body></html>"
@server.route("/date")
def date():
return dt.datetime.now().strftime('%s')
if __name__ == "__main__":
server.run(host='0.0.0.0')
|
StarcoderdataPython
|
1665968
|
def fibonacci_generator(n):
result = []
index = 0
while index <= n:
if(index == 0 or index == 1):
result.append(index)
yield index
else:
result.append(result[index-1] + result[index-2])
yield result[index]
index = index+1
fib_gen = fibonacci_generator(8)
print(next(fib_gen))
print(next(fib_gen))
print(next(fib_gen))
print(next(fib_gen))
print(next(fib_gen))
print()
for seq in fibonacci_generator(8):
print(seq)
print()
|
StarcoderdataPython
|
3463279
|
#!/usr/bin/env python3
"""
Module with function to adds two arrays element-wise
"""
def add_arrays(arr1, arr2):
"""
Function to adds two arrays element-wise
Returns the a new array with the result
"""
if len(arr1) == len(arr2):
return [arr1[i] + arr2[i] for i in range(len(arr1))]
return None
|
StarcoderdataPython
|
3442863
|
<filename>UI/youtubePlayer.py
from PyQt5 import QtCore, QtWebEngineWidgets, QtWebChannel, QtNetwork
from Assets import firebaseConfig
HTML = '''
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="initial-scale=1.0, user-scalable=no"/>
<style type="text/css">
html {
height: 100%;
zo width: 100%;
}
body {
height: 100%;
margin: 0;
padding: 0
width: 100%;
}
#video {
height: 100%;
margin: 0;
padding: 0
width: 100%;
}
</style>
<script type="text/javascript" src="qrc:///qtwebchannel/qwebchannel.js"></script>
<script type="text/javascript">
function reload(link) {
document.getElementById('video').src = 'https://www.youtube.com/embed/' + link;
}
</script>
</head>
<body>
<iframe frameborder="0" width="560" height="315" id="video" style="width:100%; height:100%;"
src="https://www.youtube.com/embed/LINK">
</iframe>
</body>
</html>
'''
class YoutubePlayer(QtWebEngineWidgets.QWebEngineView):
def __init__(self, parent=None, link=''):
super(YoutubePlayer, self).__init__(parent)
channel = QtWebChannel.QWebChannel(self)
self.page().setWebChannel(channel)
channel.registerObject("YoutubePlayer", self)
html = HTML.replace("LINK", link)
self.setHtml(html)
self._manager = QtNetwork.QNetworkAccessManager(self)
def runScript(self, script, callback=None):
if callback is None:
self.page().runJavaScript(script)
else:
self.page().runJavaScript(script, callback)
def reloadVideo(self, link):
html = HTML.replace("LINK", link)
self.setHtml(html)
|
StarcoderdataPython
|
3515176
|
<filename>opt_example.py
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 19 20:13:29 2022
@author: mahom
"""
import torch
from load_obj import load_obj
def opt_example(model,likelihood):
#DATA = load_obj("C:/Users/mahom/Desktop/GPt24_Full__std_y_allLocations.pkl");
DATA = load_obj("C:/Users/mahom/Desktop/GPK_NMF_std_y_allLocations.pkl");
M = DATA['MODELS_ALL']
ct = M[3]
AL = DATA['Alphas_ALL']
n_tasks = len(ct)
for task in range(0,n_tasks):
print(ct[task].kernel_)
for task in range(0,n_tasks):
with torch.no_grad():
model['task{}'.format(task+1)].covar_module.kernels[0].outputscale = ct[task].kernel_.k1.k1.k1.constant_value
model['task{}'.format(task+1)].covar_module.kernels[0].base_kernel.lengthscale = ct[task].kernel_.k1.k1.k2.length_scale
model['task{}'.format(task+1)].covar_module.kernels[1].bias = ct[task].kernel_.k1.k2.constant_value
model['task{}'.format(task+1)].likelihood.noise_covar.noise = ct[task].kernel_.k2.noise_level + torch.tensor([1e-6])
likelihood['task{}'.format(task+1)].noise_covar.noise = ct[task].kernel_.k2.noise_level+ torch.tensor([1e-6])
model['task{}'.format(task+1)].mean_module.constant = torch.nn.Parameter(torch.tensor([0.0], requires_grad=True))
# n_tasks = 24
# outputScales = torch.zeros(n_tasks,requires_grad=True)
# LScales = torch.zeros(n_tasks,1,1,requires_grad=True)
# bias = torch.zeros(n_tasks,requires_grad=True)
# noises = torch.zeros(n_tasks,requires_grad=True)
# for task in range(0,24):
# with torch.no_grad():
# noises[task] = ct[task].kernel_.k2.noise_level
# bias[task] = ct[task].kernel_.k1.k2.constant_value
# LScales[task] = ct[task].kernel_.k1.k1.k2.length_scale
# outputScales[task] = ct[task].kernel_.k1.k1.k1.constant_value
# model.likelihood.task_noises = noises # 0.5
# model.covar_module.kernels[0].base_kernel.lengthscale = LScales # probar 15 #3
# model.covar_module.kernels[1].bias = bias
# model.covar_module.kernels[0].outputscale = outputScales
|
StarcoderdataPython
|
8110416
|
<filename>src/uvm/reg/sequences/uvm_mem_walk_seq.py
#//
#// -------------------------------------------------------------
#// Copyright 2004-2008 Synopsys, Inc.
#// Copyright 2010 Mentor Graphics Corporation
#// Copyright 2019-2020 <NAME> (tpoikela)
#// All Rights Reserved Worldwide
#//
#// Licensed under the Apache License, Version 2.0 (the
#// "License"); you may not use this file except in
#// compliance with the License. You may obtain a copy of
#// the License at
#//
#// http://www.apache.org/licenses/LICENSE-2.0
#//
#// Unless required by applicable law or agreed to in
#// writing, software distributed under the License is
#// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
#// CONDITIONS OF ANY KIND, either express or implied. See
#// the License for the specific language governing
#// permissions and limitations under the License.
#// -------------------------------------------------------------
#//
import cocotb
from cocotb.triggers import Timer
from ..uvm_reg_sequence import UVMRegSequence
from ..uvm_reg_model import (UVM_FRONTDOOR, UVM_IS_OK, UVM_NO_HIER,
UVM_STATUS_NAMES)
from ...macros import (uvm_info, uvm_error, uvm_object_utils)
from ...base.uvm_resource_db import UVMResourceDb
from ...base.sv import sv
from ...base.uvm_object_globals import UVM_LOW
from ...base.uvm_globals import uvm_empty_delay
#//------------------------------------------------------------------------------
#// Title: Memory Walking-Ones Test Sequences
#//
#// This section defines sequences for applying a "walking-ones"
#// algorithm on one or more memories.
#//------------------------------------------------------------------------------
#//------------------------------------------------------------------------------
#// Class: UVMMemSingleWalkSeq
#//
#// Runs the walking-ones algorithm on the memory given by the <mem> property,
#// which must be assigned prior to starting this sequence.
#//
#// If bit-type resource named
#// "NO_REG_TESTS", "NO_MEM_TESTS", or "NO_MEM_WALK_TEST"
#// in the "REG::" namespace
#// matches the full name of the memory,
#// the memory is not tested.
#//
#//| uvm_resource_db#(bit)::set({"REG::",regmodel.blk.mem0.get_full_name()},
#//| "NO_MEM_TESTS", 1, this);
#//
#// The walking ones algorithm is performed for each map in which the memory
#// is defined.
#//
#//| for (k = 0 thru memsize-1)
#//| write addr=k data=~k
#//| if (k > 0) {
#//| read addr=k-1, expect data=~(k-1)
#//| write addr=k-1 data=k-1
#//| if (k == last addr)
#//| read addr=k, expect data=~k
#//
#//------------------------------------------------------------------------------
class UVMMemSingleWalkSeq(UVMRegSequence): # (uvm_sequence #(uvm_reg_item))
# // Function: new
# //
# // Creates a new instance of the class with the given name.
#
def __init__(self, name="UVMMemWalkSeq"):
super().__init__(name)
# // Variable: mem
# //
# // The memory to test; must be assigned prior to starting sequence.
self.mem = None
# endfunction
# // Task: body
# //
# // Performs the walking-ones algorithm on each map of the memory
# // specified in <mem>.
async def body(self):
maps = [] # uvm_reg_map [$]
n_bits = 0
mem = self.mem
if mem is None:
uvm_error("UVMMemWalkSeq", "No memory specified to run sequence on")
return
# Memories with some attributes are not to be tested
if (UVMResourceDb.get_by_name("REG::" + mem.get_full_name(),
"NO_REG_TESTS", 0) is not None or
UVMResourceDb.get_by_name("REG::" + mem.get_full_name(),
"NO_MEM_TESTS", 0) is not None or
UVMResourceDb.get_by_name("REG::" + mem.get_full_name(),
"NO_MEM_WALK_TEST", 0) is not None):
return
n_bits = mem.get_n_bits()
# Memories may be accessible from multiple physical interfaces (maps)
mem.get_maps(maps)
# Walk the memory via each map
for j in range(len(maps)):
status = 0
val = 0
exp = 0
# v = 0
# Only deal with RW memories
if mem.get_access(maps[j]) != "RW":
continue
uvm_info("UVMMemWalkSeq", sv.sformatf("Walking memory %s (n_bits: %d) in map \"%s\"...",
mem.get_full_name(), n_bits, maps[j].get_full_name()), UVM_LOW)
# The walking process is, for address k:
# - Write ~k
# - Read k-1 and expect ~(k-1) if k > 0
# - Write k-1 at k-1
# - Read k and expect ~k if k == last address
for k in range(mem.get_size()):
status = []
await mem.write(status, k, ~k, UVM_FRONTDOOR, maps[j], self)
status = status[0]
if status != UVM_IS_OK:
uvm_error("UVMMemWalkSeq", sv.sformatf(
"Status was %s when writing \"%s[%0d]\" through map \"%s\".",
status.name(), mem.get_full_name(), k, maps[j].get_full_name()))
if k > 0:
status = []
val = []
await mem.read(status, k-1, val, UVM_FRONTDOOR, maps[j], self)
status = status[0]
if status != UVM_IS_OK:
uvm_error("UVMMemWalkSeq", sv.sformatf(
"Status was %s when reading \"%s[%0d]\" through map \"%s\".",
UVM_STATUS_NAMES[status], mem.get_full_name(), k, maps[j].get_full_name()))
else:
exp = ~(k-1) & ((1 << n_bits)-1)
val = val[0]
if val != exp:
uvm_error("UVMMemWalkSeq",
sv.sformatf("\"%s[%0d-1]\" read back as 'h%h instead of 'h%h.",
mem.get_full_name(), k, val, exp))
status = []
await mem.write(status, k-1, k-1, UVM_FRONTDOOR, maps[j], self)
status = status[0]
if status != UVM_IS_OK:
uvm_error("UVMMemWalkSeq", sv.sformatf(
"Status was %s when writing \"%s[%0d-1]\" through map \"%s\".",
UVM_STATUS_NAMES[status], mem.get_full_name(), k, maps[j].get_full_name()))
if k == mem.get_size() - 1:
status = []
val = []
await mem.read(status, k, val, UVM_FRONTDOOR, maps[j], self)
status = status[0]
if status != UVM_IS_OK:
uvm_error("UVMMemWalkSeq", sv.sformatf(
"Status was %s when reading \"%s[%0d]\" through map \"%s\".",
UVM_STATUS_NAMES[status], mem.get_full_name(), k, maps[j].get_full_name()))
else:
exp = ~(k) & ((1<<n_bits)-1)
val = val[0]
if val != exp:
uvm_error("UVMMemWalkSeq", sv.sformatf("\"%s[%0d]\" read back as 'h%h instead of 'h%h.",
mem.get_full_name(), k, val, exp))
uvm_object_utils(UVMMemSingleWalkSeq)
#//------------------------------------------------------------------------------
#// Class: UVMMemWalkSeq
#//
#// Verifies the all memories in a block
#// by executing the <UVMMemSingleWalkSeq> sequence on
#// every memory within it.
#//
#// If bit-type resource named
#// "NO_REG_TESTS", "NO_MEM_TESTS", or "NO_MEM_WALK_TEST"
#// in the "REG::" namespace
#// matches the full name of the block,
#// the block is not tested.
#//
#//| uvm_resource_db#(bit)::set({"REG::",regmodel.blk.get_full_name(),".*"},
#//| "NO_MEM_TESTS", 1, this);
#//
#//------------------------------------------------------------------------------
class UVMMemWalkSeq(UVMRegSequence): # (uvm_sequence #(uvm_reg_item))
def __init__(self, name="UVMMemWalkSeq"):
super().__init__(name)
# // Variable: model
# //
# // The block to be tested. Declared in the base class.
# //
# //| uvm_reg_block model;
self.model = None
# // Variable: mem_seq
# //
# // The sequence used to test one memory
# //
self.mem_seq = None
#
# // Task: body
# //
# // Executes the mem walk sequence, one block at a time.
# // Do not call directly. Use seq.start() instead.
# //
async def body(self):
if self.model is None:
uvm_error("UVMMemWalkSeq", "No register model specified to run sequence on")
return
uvm_info("STARTING_SEQ","\n\nStarting " + self.get_name() + " sequence...\n",UVM_LOW)
self.mem_seq = UVMMemSingleWalkSeq.type_id.create("single_mem_walk_seq")
await self.reset_blk(self.model)
self.model.reset()
await self.do_block(self.model)
# // Task: do_block
# //
# // Test all of the memories in a given ~block~
# //
async def do_block(self, blk):
mems = [] # uvm_mem[$]
if (UVMResourceDb.get_by_name("REG::" + blk.get_full_name(),
"NO_REG_TESTS", 0) is not None or
UVMResourceDb.get_by_name("REG::" + blk.get_full_name(),
"NO_MEM_TESTS", 0) is not None or
UVMResourceDb.get_by_name("REG::" + blk.get_full_name(),
"NO_MEM_ACCESS_TEST", 0) is not None):
return
# Iterate over all memories, checking accesses
blk.get_memories(mems, UVM_NO_HIER)
for i in range(len(mems)):
# Memories with some attributes are not to be tested
if (UVMResourceDb.get_by_name("REG::" + mems[i].get_full_name(),
"NO_REG_TESTS", 0) is not None or
UVMResourceDb.get_by_name("REG::" + mems[i].get_full_name(),
"NO_MEM_TESTS", 0) is not None or
UVMResourceDb.get_by_name("REG::" + mems[i].get_full_name(),
"NO_MEM_WALK_TEST", 0) is not None):
continue
self.mem_seq.mem = mems[i]
await self.mem_seq.start(None, self)
blks = [] # uvm_reg_block [$]
blk.get_blocks(blks)
for i in range(len(blks)):
await self.do_block(blks[i])
# // Task: reset_blk
# //
# // Reset the DUT that corresponds to the specified block abstraction class.
# //
# // Currently empty.
# // Will rollback the environment's phase to the ~reset~
# // phase once the new phasing is available.
# //
# // In the meantime, the DUT should be reset before executing this
# // test sequence or this method should be implemented
# // in an extension to reset the DUT.
# //
async def reset_blk(self, blk):
await uvm_empty_delay()
uvm_object_utils(UVMMemWalkSeq)
|
StarcoderdataPython
|
6636047
|
#!/usr/bin/env python
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from metrilyx.dataserver import cli
from metrilyx.dataserver.server import ServerManager
def parseCliOptions():
parser = cli.DataserverOptionParser()
parser.add_option("-l", "--log-level", dest="logLevel", default="INFO",
help="Log level. (default: INFO)")
parser.add_option("--log-format", dest="logFormat", default=cli.DEFAULT_LOG_FORMAT,
help="Log output format. (default: '"+cli.DEFAULT_LOG_FORMAT+"')")
parser.add_option("--log-dir", dest="logDir", default=None,
help="Log directory.")
parser.add_option("--hostname", dest="hostname", default="localhost",
help="Resolvable hostname of the server. (default: localhost)")
parser.add_option("-p", "--port", dest="port", type="int", default=9000,
help="Port to listen on. (default: 9000)")
parser.add_option("-e", "--external-port", dest="extPort", type="int", default=None,
help="External port if running behind a proxy such as nginx. This would be the port of the proxy, usually port 80.")
parser.add_option("--check-interval", dest="checkInterval", default=15.0, type="float",
help="Interval to check for process stats. (default: 15.0 secs)")
parser.add_option("--max-memory", dest="maxAllowedMemory", type="float", default=1500.0,
help="Maximum allowed memory (MB) before server is gracefully respawned. (default: 1500.0 MB)")
return parser.parse_args()
if __name__ == "__main__":
(opts, args) = parseCliOptions()
smgr = ServerManager(opts)
smgr.start()
|
StarcoderdataPython
|
1883448
|
from time import sleep
def contagem(a, b, c):
if c < 0:
c *= -1
if c == 0:
c = 1
print(f'Contagem de {a} até {b} de {c} em {c}')
if a < b:
cont = a
while cont <= b:
print(f'{cont} ', end='')
cont += c
sleep(0.3)
print('Fim')
else:
cont = a
while cont >= b:
print(f'{cont} ', end='')
cont -= c
sleep(0.3)
print('Fim')
contagem(1,10,1)
contagem(10,0,2)
a = int(input('Inicio: '))
b = int(input('Fim: '))
c = int(input('Passo: '))
contagem(a,b,c)
|
StarcoderdataPython
|
152666
|
<filename>tests/test_pykblib_exceptions.py
from unittest import TestCase
from pykblib.exceptions import KBLibException
class KeybaseExceptionTest(TestCase):
def test_exception(self):
with self.assertRaises(KBLibException) as raised:
raise KBLibException("test message")
self.assertEqual(raised.exception.message, "test message")
|
StarcoderdataPython
|
4975150
|
# Generated by Django 2.2.13 on 2020-07-20 06:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('pipeline', '0033_road'),
]
operations = [
migrations.RemoveField(
model_name='community',
name='municipality_id',
),
migrations.AddField(
model_name='community',
name='municipality',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='pipeline.Municipality'),
),
]
|
StarcoderdataPython
|
3493723
|
<reponame>scarcella/torchchem<gh_stars>10-100
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 22 06:50:13 2020
@author: zqwu
"""
import torch
import torch.nn as nn
import torchchem
import numpy as np
import os
from sklearn.metrics import roc_auc_score
# Settings
tox21_path = './data/tox21/tox21.csv'
tasks = ['NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', 'NR-ER-LBD',
'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', 'SR-HSE', 'SR-MMP', 'SR-p53']
lr = 0.001
batch_size = 128
weight_decay = 0.001
n_epochs = 1000
gpu = False
# Load dataset
dataset = torchchem.data.load_csv_dataset(tox21_path, tasks)
# Split dataset
inds = np.arange(len(dataset))
np.random.seed(123)
np.random.shuffle(inds)
train_inds = inds[:int(0.8*len(dataset))]
valid_inds = inds[int(0.8*len(dataset)):]
train_dataset = dataset.index_select(list(train_inds))
valid_dataset = dataset.index_select(list(valid_inds))
# Initialize model
net = torchchem.models.GraphConvolutionNet(
n_node_features=dataset.num_node_features,
n_tasks=len(tasks),
post_op=nn.Sigmoid())
model = torchchem.models.GraphConvolutionModel(
net,
criterion=torchchem.models.WeightedBCEWithLogits(),
lr=lr,
weight_decay=weight_decay,
gpu=gpu)
def evaluate(dataset, model):
outputs = model.predict(dataset)
ys = []
ws = []
for data in dataset:
ys.append(data.y.cpu().data.numpy())
ws.append(data.w.cpu().data.numpy())
ys = np.stack(ys, 0)
ws = np.stack(ws, 0)
scores = []
for i in range(len(tasks)):
y_pred = outputs[:, i]
y = ys[:, i]
w = ws[:, i]
scores.append(roc_auc_score(y[np.where(w > 0)], y_pred[np.where(w > 0)]))
return scores
# Training and evaluation
for i in range(n_epochs):
model.train_epoch(train_dataset, batch_size=batch_size, shuffle=False)
print(np.mean(evaluate(train_dataset, model)))
print(np.mean(evaluate(valid_dataset, model)))
|
StarcoderdataPython
|
11201446
|
<filename>src/tuning/custom_trial.py
import random
from typing import Dict
import numpy as np
import nni
import torch
from ..data_loader.load import DataLoader, CryptoDataset
from ..models.custom.custom import train_model
# set seed
seed = 80085
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
def prepare_custom(params: Dict) -> float:
"""
Helper function for AutoML, helps to tune our hyper-parameters.
Args:
:param params: The hyper-parameters to test.
"""
data = DataLoader()
dataset = data.load_data(CryptoDataset("BITCOIN", "BTC-USD.csv"))
val_min, _ = train_model(
dataset, "2017-01-01", "2021-01-01", params, params["features"]
)
return val_min
val = prepare_custom(nni.get_next_parameter())
nni.report_final_result(val)
|
StarcoderdataPython
|
11363637
|
"""
.. module: historical.common.proxy
:platform: Unix
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. author:: <NAME> <<EMAIL>>
"""
import logging
import json
import math
import os
import sys
import boto3
from retrying import retry
from raven_python_lambda import RavenLambdaWrapper
from historical.common.dynamodb import DESER, remove_global_dynamo_specific_fields
from historical.common.exceptions import MissingProxyConfigurationException
from historical.common.sqs import produce_events
from historical.constants import CURRENT_REGION, EVENT_TOO_BIG_FLAG, PROXY_REGIONS, REGION_ATTR, SIMPLE_DURABLE_PROXY
from historical.mapping import DURABLE_MAPPING, HISTORICAL_TECHNOLOGY
LOG = logging.getLogger('historical')
@retry(stop_max_attempt_number=4, wait_exponential_multiplier=1000, wait_exponential_max=1000)
def _publish_sns_message(client, blob, topic_arn):
client.publish(TopicArn=topic_arn, Message=blob)
def shrink_blob(record, deletion):
"""
Makes a shrunken blob to be sent to SNS/SQS (due to the 256KB size limitations of SNS/SQS messages).
This will essentially remove the "configuration" field such that the size of the SNS/SQS message remains under
256KB.
:param record:
:return:
"""
item = {
"eventName": record["eventName"],
EVENT_TOO_BIG_FLAG: (not deletion)
}
# To handle TTLs (if they happen)
if record.get("userIdentity"):
item["userIdentity"] = record["userIdentity"]
# Remove the 'configuration' and 'requestParameters' fields from new and old images if applicable:
if not deletion:
# Only remove it from non-deletions:
if record['dynamodb'].get('NewImage'):
record['dynamodb']['NewImage'].pop('configuration', None)
record['dynamodb']['NewImage'].pop('requestParameters', None)
if record['dynamodb'].get('OldImage'):
record['dynamodb']['OldImage'].pop('configuration', None)
record['dynamodb']['OldImage'].pop('requestParameters', None)
item['dynamodb'] = record['dynamodb']
return item
@RavenLambdaWrapper()
def handler(event, context): # pylint: disable=W0613
"""Historical S3 DynamoDB Stream Forwarder (the 'Proxy').
Passes events from the Historical DynamoDB stream and passes it to SNS or SQS for additional events to trigger.
You can optionally use SNS or SQS. It is preferable to use SNS -> SQS, but in some cases, such as the Current stream
to the Differ, this will make use of SQS to directly feed into the differ for performance purposes.
"""
queue_url = os.environ.get('PROXY_QUEUE_URL')
topic_arn = os.environ.get('PROXY_TOPIC_ARN')
if not queue_url and not topic_arn:
raise MissingProxyConfigurationException('[X] Must set the `PROXY_QUEUE_URL` or the `PROXY_TOPIC_ARN` vars.')
items_to_ship = []
# Must ALWAYS shrink for SQS because of 256KB limit of sending batched messages
force_shrink = True if queue_url else False
# Is this a "Simple Durable Proxy" -- that is -- are we stripping out all of the DynamoDB data from
# the Differ?
record_maker = make_proper_simple_record if SIMPLE_DURABLE_PROXY else make_proper_dynamodb_record
for record in event['Records']:
# We should NOT be processing this if the item in question does not
# reside in the PROXY_REGIONS
correct_region = True
for img in ['NewImage', 'OldImage']:
if record['dynamodb'].get(img):
if record['dynamodb'][img][REGION_ATTR]['S'] not in PROXY_REGIONS:
LOG.debug(f"[/] Not processing record -- record event took place in:"
f" {record['dynamodb'][img][REGION_ATTR]['S']}")
correct_region = False
break
if not correct_region:
continue
# Global DynamoDB tables will update a record with the global table specific fields. This creates 2 events
# whenever there is an update. The second update, which is a MODIFY event is not relevant and noise. This
# needs to be skipped over to prevent duplicated events. This is a "gotcha" in Global DynamoDB tables.
if detect_global_table_updates(record):
continue
items_to_ship.append(record_maker(record, force_shrink=force_shrink))
if items_to_ship:
# SQS:
if queue_url:
produce_events(items_to_ship, queue_url, batch_size=int(os.environ.get('PROXY_BATCH_SIZE', 10)))
# SNS:
else:
client = boto3.client("sns", region_name=CURRENT_REGION)
for i in items_to_ship:
_publish_sns_message(client, i, topic_arn)
def detect_global_table_updates(record):
"""This will detect DDB Global Table updates that are not relevant to application data updates. These need to be
skipped over as they are pure noise.
:param record:
:return:
"""
# This only affects MODIFY events.
if record['eventName'] == 'MODIFY':
# Need to compare the old and new images to check for GT specific changes only (just pop off the GT fields)
old_image = remove_global_dynamo_specific_fields(record['dynamodb']['OldImage'])
new_image = remove_global_dynamo_specific_fields(record['dynamodb']['NewImage'])
if json.dumps(old_image, sort_keys=True) == json.dumps(new_image, sort_keys=True):
return True
return False
def make_proper_dynamodb_record(record, force_shrink=False):
"""Prepares and ships an individual DynamoDB record over to SNS/SQS for future processing.
:param record:
:param force_shrink:
:return:
"""
# Get the initial blob and determine if it is too big for SNS/SQS:
blob = json.dumps(record)
size = math.ceil(sys.getsizeof(blob) / 1024)
# If it is too big, then we need to send over a smaller blob to inform the recipient that it needs to go out and
# fetch the item from the Historical table!
if size >= 200 or force_shrink:
deletion = False
# ^^ However -- deletions need to be handled differently, because the Differ won't be able to find a
# deleted record. For deletions, we will only shrink the 'OldImage', but preserve the 'NewImage' since that is
# "already" shrunken.
if record['dynamodb'].get('NewImage'):
# Config will be empty if there was a deletion:
if not (record['dynamodb']['NewImage'].get('configuration', {}) or {}).get('M'):
deletion = True
blob = json.dumps(shrink_blob(record, deletion))
return blob
def _get_durable_pynamo_obj(record_data, durable_model):
image = remove_global_dynamo_specific_fields(record_data)
data = {}
for item, value in image.items():
# This could end up as loss of precision
data[item] = DESER.deserialize(value)
return durable_model(**data)
def make_proper_simple_record(record, force_shrink=False):
"""Prepares and ships an individual simplified durable table record over to SNS/SQS for future processing.
:param record:
:param force_shrink:
:return:
"""
# Convert to a simple object
item = {
'arn': record['dynamodb']['Keys']['arn']['S'],
'event_time': record['dynamodb']['NewImage']['eventTime']['S'],
'tech': HISTORICAL_TECHNOLOGY
}
# We need to de-serialize the raw DynamoDB object into the proper PynamoDB obj:
prepped_new_record = _get_durable_pynamo_obj(record['dynamodb']['NewImage'],
DURABLE_MAPPING.get(HISTORICAL_TECHNOLOGY))
item['item'] = dict(prepped_new_record)
# Get the initial blob and determine if it is too big for SNS/SQS:
blob = json.dumps(item)
size = math.ceil(sys.getsizeof(blob) / 1024)
# If it is too big, then we need to send over a smaller blob to inform the recipient that it needs to go out and
# fetch the item from the Historical table!
if size >= 200 or force_shrink:
del item['item']
item[EVENT_TOO_BIG_FLAG] = True
blob = json.dumps(item)
return blob.replace('<empty>', '')
|
StarcoderdataPython
|
4838748
|
<gh_stars>1-10
"""
This module contrains all the flags for the motion representation learning repository
"""
from __future__ import division
import os
from os.path import join as pjoin
import tensorflow as tf
# Modify this function to set your home directory for this repo
def home_out(path):
return pjoin(os.environ['HOME'], 'tmp', 'MoCap', path)
flags = tf.app.flags
FLAGS = flags.FLAGS
""" Fine-tuning Parameters """
# Flags about the sequence processing
flags.DEFINE_integer('chunk_length', 1, 'Length of the chunks, for the data processing.')
# Flags about training
flags.DEFINE_float('learning_rate', 0.0001,
'learning rate for training .')
flags.DEFINE_float('pretraining_learning_rate', 0.001 ,
'learning rate for training .')
flags.DEFINE_float('variance_of_noise', 0.05, 'Coefficient for the gaussian noise '
'added to every point in input during the training')
flags.DEFINE_boolean('pretrain', False,' Whether we pretrain the model in a layerwise way')
flags.DEFINE_boolean('restore', False,' Whether we restore the model from the checkpoint')
flags.DEFINE_boolean('evaluate', False, ' Whether we are evaluating the system')
flags.DEFINE_float('dropout', 0.9, 'Probability to keep the neuron on')
flags.DEFINE_integer('batch_size', 128,
'Size of the mini batch')
flags.DEFINE_integer('training_epochs', 50,
"Number of training epochs for pretraining layers")
flags.DEFINE_integer('pretraining_epochs', 5,
"Number of training epochs for pretraining layers")
flags.DEFINE_float('weight_decay', 0.5, ' Whether we apply weight decay')
flags.DEFINE_boolean('early_stopping', True, ' Whether we do early stopping')
flags.DEFINE_float('delta_for_early_stopping', 0.5, 'How much worst the results must get in order'
' for training to be terminated.'
' 0.05 mean 5% worst than best we had.')
# Network Architecture Specific Flags
flags.DEFINE_integer('frame_size', 24, 'Dimensionality of the input for a single frame')
flags.DEFINE_integer("num_hidden_layers", 1, "Number of hidden layers")
flags.DEFINE_integer("middle_layer", 1, "Number of hidden layers")
flags.DEFINE_integer('layer1_width', 20, 'Number of units in each hidden layer ')
flags.DEFINE_integer('layer2_width', 248, 'Number of units in each hidden layer ')
flags.DEFINE_integer('layer3_width', 312, 'Number of units in each hidden layer ')
# Constants
flags.DEFINE_integer('seed', 123456, 'Random seed')
flags.DEFINE_string('summary_dir', home_out('summaries_exp'),
'Directory to put the summary data')
flags.DEFINE_string('chkpt_dir', home_out('chkpts_exp'),
'Directory to put the model checkpoints')
flags.DEFINE_string('results_file', home_out('results.txt'),
'File to put the experimental results')
|
StarcoderdataPython
|
202892
|
<reponame>fpoulain/django-tapeforms
from django import forms
from tapeforms.fieldsets import TapeformFieldset, TapeformFieldsetsMixin
from tapeforms.mixins import TapeformMixin
class LargeForm(TapeformMixin, forms.Form):
first_name = forms.CharField(label='First name')
last_name = forms.CharField(label='Last name', help_text='Some hints')
confirm = forms.BooleanField(label='Please confirm')
some_text = forms.CharField(label='First name')
some_other = forms.CharField(label='Last name', required=False)
choose_options = forms.MultipleChoiceField(label='Please choose', choices=(
('foo', 'foo'),
('bar', 'bar'),
('baz', 'bar')
), widget=forms.RadioSelect)
special_text = forms.IntegerField(label='A number')
birthdate = forms.DateField(widget=forms.SelectDateWidget())
class ManualFieldsetsForm(LargeForm):
def basic(self):
return TapeformFieldset(
self, fields=('first_name', 'last_name'), primary=True)
def other_stuff(self):
return TapeformFieldset(self, exclude=('first_name', 'last_name'))
class PropertyFieldsetsForm(TapeformFieldsetsMixin, LargeForm):
fieldsets = ({
'extra': {
'title': 'Basic',
},
'fields': ('first_name', 'last_name'),
}, {
'extra': {
'title': 'Other stuff',
'css_class': 'classy',
},
'exclude': ('first_name', 'last_name'),
})
|
StarcoderdataPython
|
6576281
|
<filename>main.py
#opencv --> comp vision
import os
num = os.getcwd()
os.chdir(num+ "/photo")
num = os.getcwd()
list = os.listdir(num)
for i in range(len(list)):
print(list[i])
|
StarcoderdataPython
|
1814514
|
"""
@author: <NAME>
@date: 2020/02/18 (yyyy/mm/dd)
9. Program a recursive function to calculate the following sum:
S = 1 + 2 + 3 + 4 + (...) + n-1 + n.
Analyze the efficiency and complexity of the provided solution.
"""
# Testing
import unittest
from U1.src.e9 import summation
class TestSummationMethods(unittest.TestCase):
""" Test methods in <EMAIL>/algo/U1/src/e9.py """
# Test summation
def test_summation01(self):
""" Test summation """
provided = summation(10)
expected = sum(range(10+1))
self.assertEqual(provided, expected)
def test_summation02(self):
""" Test summation """
provided = summation(100)
expected = sum(range(100+1))
self.assertEqual(provided, expected)
def test_summation03(self):
""" Test summation """
provided = summation(300)
expected = sum(range(300+1))
self.assertEqual(provided, expected)
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
8164881
|
<reponame>human-analysis/3dfacefill
# 3dmm.py
import torch
import torch.nn as nn
import torch.nn.functional as F
import os
import numpy as np
# __all__ = ['Encoder', 'ShapeDecoder', 'AlbedoDecoder', 'AutoEncoder']
class ConvUnit(nn.Module):
def __init__(self, in_c, out_c, n_groupf=4, kernel_size=3, stride=1, padding=1, bias=True, norm=True):
super(ConvUnit, self).__init__()
self.norm = norm
self.conv2d = nn.utils.spectral_norm(nn.Conv2d(in_c, out_c, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias))
# self.conv2d = nn.Conv2d(in_c, out_c, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias)
if self.norm:
self.norm_layer = nn.GroupNorm(out_c//n_groupf, out_c)
self.elu = nn.ELU()
def forward(self, x):
temp = self.conv2d(x)
if self.norm:
temp = self.norm_layer(temp)
return self.elu(temp)
class ConvNoActUnit(nn.Module):
def __init__(self, in_c, out_c, n_groupf=4, kernel_size=3, stride=1, padding=1, bias=True):
super(ConvNoActUnit, self).__init__()
self.conv2d = nn.utils.spectral_norm(nn.Conv2d(in_c, out_c, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias))
# self.conv2d = nn.Conv2d(in_c, out_c, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias)
self.norm = nn.GroupNorm(out_c//n_groupf, out_c)
def forward(self, x):
return self.norm(self.conv2d(x))
class DeconvUnit(nn.Module):
"""docstring for DeconvUnit"""
def __init__(self, in_c, out_c, n_groupf=4, kernel_size=4, stride=2, padding=1, bias=True, upsample=True):
super(DeconvUnit, self).__init__()
if upsample:
if type(kernel_size) is tuple:
factor = (kernel_size[0]/stride, kernel_size[1]/stride)
else:
factor = kernel_size / stride
deconv2d = [nn.Upsample(scale_factor=factor), nn.utils.spectral_norm(nn.Conv2d(in_c, out_c, kernel_size=3, stride=1, padding=1, bias=bias))]
self.deconv2d = nn.Sequential(*deconv2d)
else:
self.deconv2d = nn.utils.spectral_norm(nn.ConvTranspose2d(in_c, out_c, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias))
# self.deconv2d = nn.ConvTranspose2d(in_c, out_c, kernel_size=kernel_size, stride=stride, padding=padding, bias=bias)
self.norm = nn.GroupNorm(out_c//n_groupf, out_c)
self.elu = nn.ELU()
def forward(self, x):
return self.elu(self.norm(self.deconv2d(x)))
class Encoder(nn.Module):
"""docstring for Encoder"""
def __init__(self, args):
super(Encoder, self).__init__()
self.args = args
self.nc = args.nchannels # 3
self.ngf = args.ngf # 32
self.ngfc = args.ngfc # 512
self.mdim = args.mdim # 6
self.ildim = args.ildim # 27
self.n_group = args.n_groupnorm # nc_out/4
self.use_conf = args.use_conf
self.k0_1 = ConvUnit(self.nc, self.ngf*1, self.n_group, kernel_size=7, stride=2, padding=3, bias=False) # 112
self.k0_2 = ConvUnit(self.ngf*1, self.ngf*2, self.n_group, bias=False)
self.k1_0 = ConvUnit(self.ngf*2, self.ngf*2, self.n_group, stride=2, bias=False) # 56
self.k1_1 = ConvUnit(self.ngf*2, self.ngf*3, self.n_group, bias=False)
self.k1_2 = ConvUnit(self.ngf*3, self.ngf*4, self.n_group, bias=False)
self.k2_0 = ConvUnit(self.ngf*4, self.ngf*4, self.n_group, stride=2, bias=False) # 28
self.k2_1 = ConvUnit(self.ngf*4, self.ngf*6, self.n_group, bias=False)
self.k2_2 = ConvUnit(self.ngf*6, self.ngf*8, self.n_group, bias=False)
self.k3_0 = ConvUnit(self.ngf*8, self.ngf*8, self.n_group, stride=2, bias=False) # 14
self.k3_1 = ConvUnit(self.ngf*8, self.ngf*8, self.n_group, bias=False)
self.k3_2 = ConvUnit(self.ngf*8, self.ngf*8, self.n_group, bias=False)
self.k4_0 = ConvUnit(self.ngf*8, self.ngf*16, self.n_group, stride=2, bias=False) # 7
self.k4_1 = ConvUnit(self.ngf*16, self.ngf*16, self.n_group, bias=False)
# M
self.k5_m = ConvUnit(self.ngf*16, self.ngf*5, self.n_group)
self.k6_m = nn.Linear(self.ngf*5, self.mdim)
self.act_m = nn.Tanh()
# IL
self.k5_il = ConvUnit(self.ngf*16, self.ngf*5, self.n_group)
self.k6_il = nn.Linear(self.ngf*5, self.ildim)
# Shape
self.k5_shape = ConvUnit(self.ngf*16, self.ngfc, self.n_group)
self.k6_shape = ConvUnit(self.ngfc, self.ngfc, self.n_group)
self.k7_shape = nn.Linear(self.ngfc, 199+29)
# Albedo
self.k5_tex = ConvUnit(self.ngf*16, int(self.ngfc), self.n_group)
# Confidence
# if self.use_conf:
self.k5_conf = ConvUnit(self.ngf*16, int(self.ngfc), self.n_group)
def forward(self, input):
temp = self.k0_1(input)
temp = self.k0_2(temp)
temp = self.k1_0(temp)
temp = self.k1_1(temp)
temp = self.k1_2(temp)
temp = self.k2_0(temp)
temp = self.k2_1(temp)
temp = self.k2_2(temp)
temp = self.k3_0(temp)
temp = self.k3_1(temp)
temp = self.k3_2(temp)
temp = self.k4_0(temp)
temp = self.k4_1(temp)
# M
m_temp = self.k5_m(temp)
_shape = m_temp.shape[-1]
m_temp = nn.functional.avg_pool2d(m_temp, kernel_size=_shape, stride=1)
m_temp = m_temp.view(-1, self.ngf*5)
m_temp = self.k6_m(m_temp)
m_out = self.act_m(m_temp[:,1:])
scale_out = 0.5*(self.act_m(m_temp[:,0])+1)*2e-3
# IL
il_temp = self.k5_il(temp)
il_temp = nn.functional.avg_pool2d(il_temp, kernel_size=_shape, stride=1)
il_temp = il_temp.view(-1, self.ngf*5)
il_out = self.k6_il(il_temp)
# Shape
shape_temp = self.k5_shape(temp)
shape_temp = self.k6_shape(shape_temp)
shape_temp = nn.functional.avg_pool2d(shape_temp, kernel_size=_shape, stride=1)
shape_temp = shape_temp.view(-1, int(self.ngfc))
shape_out = self.k7_shape(shape_temp)
# Albedo
tex_temp = self.k5_tex(temp) # change back to self.k5_tex
tex_temp = nn.functional.avg_pool2d(tex_temp, kernel_size=_shape, stride=1)
tex_out = tex_temp.view(-1, int(self.ngfc))
# Confidence
if self.use_conf:
conf_temp = self.k5_conf(temp)
conf_temp = nn.functional.avg_pool2d(conf_temp, kernel_size=_shape, stride=1)
conf_out = conf_temp.view(-1, int(self.ngfc))
return shape_out, tex_out, conf_out, scale_out, m_out, il_out
return shape_out, tex_out, scale_out, m_out, il_out
# Texture Decoder
class AlbedoDecoder(nn.Module):
"""docstring for TextureDecoder"""
def __init__(self, args):
super(AlbedoDecoder, self).__init__()
self.args = args
self.texture_size = args.texture_size
self.s_h = int(self.texture_size[0])
self.s_w = int(self.texture_size[1])
self.s32_h = int(self.s_h/32)
self.s32_w = int(self.s_w/32)
self.ngfc = args.ngfc
self.ngf = args.ngf
self.nc = args.nchannels
self.n_groupf = args.n_groupnorm
self.h6_1 = DeconvUnit(self.ngfc, self.ngfc, self.n_groupf, kernel_size=(3,4), stride=1, padding=0)
self.h6_0 = ConvUnit(self.ngfc, self.ngfc//2, norm=False)
self.h5_2 = DeconvUnit(self.ngfc//2, self.ngfc//2, self.n_groupf, stride=2)
self.h5_1 = ConvUnit(self.ngfc//2, self.ngfc//4)
self.h5_0 = ConvUnit(self.ngfc//4, self.ngfc//4)
self.h4_2 = DeconvUnit(self.ngfc//4, self.ngf*5, self.n_groupf, stride=2)
self.h4_1 = ConvUnit(self.ngf*5, self.ngf*3, self.n_groupf)
self.h4_0 = ConvUnit(self.ngf*3, self.ngf*4, self.n_groupf)
self.h3_2 = DeconvUnit(self.ngf*4, self.ngf*4, self.n_groupf, stride=2)
self.h3_1 = ConvUnit(self.ngf*4, self.ngf*2, self.n_groupf)
self.h3_0 = ConvUnit(self.ngf*2, self.ngf*3, self.n_groupf)
self.h2_2 = DeconvUnit(self.ngf*3, self.ngf*3, self.n_groupf, stride=2)
self.h2_1 = ConvUnit(self.ngf*3, self.ngf*2, self.n_groupf)
self.h2_0 = ConvUnit(self.ngf*2, self.ngf*2, self.n_groupf)
self.h1_2 = DeconvUnit(self.ngf*2, self.ngf*2, self.n_groupf, stride=2)
self.h1_1 = ConvUnit(self.ngf*2, self.ngf, self.n_groupf)
self.h1_0 = ConvUnit(self.ngf, self.ngf, self.n_groupf)
self.h0_2 = DeconvUnit(self.ngf, self.ngf, self.n_groupf, stride=2)
self.h0_1 = ConvUnit(self.ngf, self.ngf//2, self.n_groupf)
self.h0_0 = ConvUnit(self.ngf//2, self.ngf//2, self.n_groupf)
self.final = nn.Conv2d(self.ngf//2, self.nc, kernel_size=1, stride=1, padding=0)
self.final_act = nn.Tanh()
def forward(self, input):
temp = input.view(-1, self.ngfc, 1, 1)
temp = self.h6_1(temp)
temp = self.h6_0(temp)
temp = self.h5_2(temp)
temp = self.h5_1(temp)
temp = self.h5_0(temp)
temp = self.h4_2(temp)
temp = self.h4_1(temp)
temp = self.h4_0(temp)
temp = self.h3_2(temp)
temp = self.h3_1(temp)
temp = self.h3_0(temp)
temp = self.h2_2(temp)
temp = self.h2_1(temp)
temp = self.h2_0(temp)
temp = self.h1_2(temp)
temp = self.h1_1(temp)
temp = self.h1_0(temp)
temp = self.h0_2(temp)
temp = self.h0_1(temp)
temp = self.h0_0(temp)
out = self.final_act(self.final(temp))
return out
class ShapeDecoder(nn.Module):
def __init__(self, args):
super(ShapeDecoder, self).__init__()
# Symmetry Decoder
class ConfidenceDecoder(nn.Module):
"""docstring for TextureDecoder"""
def __init__(self, args):
super(ConfidenceDecoder, self).__init__()
self.args = args
self.texture_size = args.texture_size
self.s_h = int(self.texture_size[0])
self.s_w = int(self.texture_size[1])
self.s32_h = int(self.s_h/32)
self.s32_w = int(self.s_w/32)
self.ngfc = args.ngfc
self.ngf = args.ngf
self.nc = args.nchannels
self.n_groupf = args.n_groupnorm
self.h6 = DeconvUnit(self.ngfc, self.ngfc//2, self.n_groupf, kernel_size=(3,4), stride=1, padding=0)
self.h5 = DeconvUnit(self.ngfc//2, self.ngfc//4, self.n_groupf, stride=2)
self.h4 = DeconvUnit(self.ngfc//4, self.ngf*5, self.n_groupf, stride=2)
self.h3 = DeconvUnit(self.ngf*5, self.ngf*4, self.n_groupf, stride=2)
self.h2 = DeconvUnit(self.ngf*4, self.ngf*3, self.n_groupf, stride=2)
self.h1 = DeconvUnit(self.ngf*3, self.ngf*2, self.n_groupf, stride=2)
self.h0 = DeconvUnit(self.ngf*2, self.ngf, self.n_groupf, stride=2)
self.final = nn.Conv2d(self.ngf, 1, kernel_size=1, stride=1, padding=0)
self.final_act = nn.Softplus()
def forward(self, input):
temp = input.view(-1, self.ngfc, 1, 1)
temp = self.h6(temp)
temp = self.h5(temp)
temp = self.h4(temp)
temp = self.h3(temp)
temp = self.h2(temp)
temp = self.h1(temp)
temp = self.h0(temp)
out = self.final(temp)
out = torch.clamp(out, min=-10)
return out
# Autoencoder model
class AutoEncoder(nn.Module):
"""docstring for AutoEncoder"""
def __init__(self, args, in_channels=3, out_channels=1):
super(AutoEncoder, self).__init__()
self.args = args
self.in_c = in_channels # 3
self.out_c = out_channels
self.ngf = args.ngf # 32
self.n_groupf = args.n_groupnorm # nc_out/4
self.z = args.enc_z
self.s_h = int(args.resolution_high/32)
self.s_w = int(args.resolution_wide/32)
self.he0_0 = ConvUnit(self.in_c, self.ngf*1, self.n_groupf, kernel_size=7, stride=2, padding=3, bias=False) # 112
self.he0_1 = ConvUnit(self.ngf*1, self.ngf*1, self.n_groupf, bias=False)
self.he1_0 = ConvUnit(self.ngf*1, self.ngf*2, self.n_groupf, stride=2, bias=False) # 56
self.he1_1 = ConvUnit(self.ngf*2, self.ngf*2, self.n_groupf, bias=False)
self.he2_0 = ConvUnit(self.ngf*2, self.ngf*4, self.n_groupf, stride=2, bias=False) # 28
self.he2_1 = ConvUnit(self.ngf*4, self.ngf*4, self.n_groupf, bias=False)
self.he3_0 = ConvUnit(self.ngf*4, self.ngf*8, self.n_groupf, stride=2, bias=False) # 14
self.he3_1 = ConvUnit(self.ngf*8, self.ngf*8, self.n_groupf, bias=False)
self.he4_0 = ConvUnit(self.ngf*8, self.ngf*8, self.n_groupf, stride=2, bias=False) # 7
self.he4_1 = ConvUnit(self.ngf*8, self.ngf*16, self.n_groupf, bias=False)
self.he5_lin = nn.Linear(self.ngf*16, self.z) # Nxz
self.hd5_lin = nn.Linear(self.z, self.ngf*8*self.s_h*self.s_w) # Nx256x7x7
self.hd5_0 = ConvUnit(self.ngf*8, self.ngf*8, self.n_groupf, norm=False)
self.hd4_1 = DeconvUnit(self.ngf*8, self.ngf*8, self.n_groupf, stride=2) # 14
self.hd4_0 = ConvUnit(self.ngf*8, self.ngf*4, self.n_groupf)
self.hd3_1 = DeconvUnit(self.ngf*4, self.ngf*4, self.n_groupf, stride=2) # 28
self.hd3_0 = ConvUnit(self.ngf*4, self.ngf*4, self.n_groupf)
self.hd2_1 = DeconvUnit(self.ngf*4, self.ngf*2, self.n_groupf, stride=2) # 56
self.hd2_0 = ConvUnit(self.ngf*2, self.ngf*2, self.n_groupf)
self.hd1_1 = DeconvUnit(self.ngf*2, self.ngf*1, self.n_groupf, stride=2) # 112
self.hd1_0 = ConvUnit(self.ngf*1, self.ngf*1, self.n_groupf)
self.hd0_1 = DeconvUnit(self.ngf*1, self.ngf//2, self.n_groupf, stride=2) # 224
self.hd0_0 = ConvUnit(self.ngf//2, self.ngf//2, self.n_groupf)
self.hd_final = nn.Conv2d(self.ngf//2, self.out_c, kernel_size=3, stride=1, padding=1)
self.hd_act = nn.Sigmoid()
def forward(self, x):
x = self.encoder(x)
x = self.decoder(x)
return x
def encoder(self, x):
x = self.he0_1(self.he0_0(x))
x = self.he1_1(self.he1_0(x))
x = self.he2_1(self.he2_0(x))
x = self.he3_1(self.he3_0(x))
x = self.he4_1(self.he4_0(x))
_shape = x.shape[-1]
x = F.avg_pool2d(x, kernel_size=_shape, stride=1)
x = x.view(-1, self.ngf*16)
x = F.elu(self.he5_lin(x))
return x
def decoder(self, x):
batch_size = x.shape[0]
x = F.elu(self.hd5_lin(x))
x = x.reshape(batch_size, -1, self.s_h, self.s_w)
x = self.hd5_0(x)
x = self.hd4_0(self.hd4_1(x))
x = self.hd3_0(self.hd3_1(x))
x = self.hd2_0(self.hd2_1(x))
x = self.hd1_0(self.hd1_1(x))
x = self.hd0_0(self.hd0_1(x))
x = self.hd_act(self.hd_final(x))
return x
class FCN8_VGG(nn.Module):
"""docstring for AutoEncoder"""
def __init__(self, args, in_channels=3, out_channels=1):
super(FCN8_VGG, self).__init__()
self.args = args
self.in_c = in_channels # 3
self.out_c = out_channels
self.ngf = args.ngf # 32
self.n_groupf = args.n_groupnorm # nc_out/4
self.z = args.enc_z
self.s_h = int(args.resolution_high/32)
self.s_w = int(args.resolution_wide/32)
self.e1_1 = ConvUnit(self.in_c, self.ngf*2, self.n_groupf, bias=False)
self.e1_2 = ConvUnit(self.ngf*2, self.ngf*2, self.n_groupf, bias=False) # 224
self.e1_pool = nn.MaxPool2d(2, 2) # 112
self.e2_1 = ConvUnit(self.ngf*2, self.ngf*4, self.n_groupf, bias=False)
self.e2_2 = ConvUnit(self.ngf*4, self.ngf*4, self.n_groupf, bias=False) # 112
self.e2_pool = nn.MaxPool2d(2, 2) # 56
self.e3_1 = ConvUnit(self.ngf*4, self.ngf*8, self.n_groupf, bias=False)
self.e3_2 = ConvUnit(self.ngf*8, self.ngf*8, self.n_groupf, bias=False)
self.e3_3 = ConvUnit(self.ngf*8, self.ngf*8, self.n_groupf, bias=False) # 56
self.e3_pool = nn.MaxPool2d(2, 2) # 28
self.d3 = nn.Conv2d(self.ngf*8, self.out_c, kernel_size=1, stride=1) # 28
self.e4_1 = ConvUnit(self.ngf*8, self.ngf*16, self.n_groupf, bias=False)
self.e4_2 = ConvUnit(self.ngf*16, self.ngf*16, self.n_groupf, bias=False)
self.e4_3 = ConvUnit(self.ngf*16, self.ngf*16, self.n_groupf, bias=False) # 28
self.e4_pool = nn.MaxPool2d(2, 2) # 14 # 7
self.d4_1 = ConvUnit(self.ngf*16, self.ngf*32, self.n_groupf, bias=False)
self.d4_2 = ConvUnit(self.ngf*32, self.ngf*32, self.n_groupf, bias=False)
self.d4 = nn.Conv2d(self.ngf*32, self.out_c, kernel_size=1, stride=1) # 14
self.d4_up = DeconvUnit(self.out_c, self.out_c, n_groupf=2, kernel_size=2, stride=2, padding=0) # 28
self.final_upsample = nn.Upsample(size=224, mode='bilinear')
self.final_act = nn.Softmax2d()
def forward(self, x):
x = self.e1_1(x)
x = self.e1_2(x)
x = self.e1_pool(x)
x = self.e2_1(x)
x = self.e2_2(x)
x = self.e2_pool(x)
x = self.e3_1(x)
x = self.e3_2(x)
x = self.e3_3(x)
x = self.e3_pool(x)
d3 = self.d3(x)
x = self.e4_1(x)
x = self.e4_2(x)
x = self.e4_3(x)
x = self.e4_pool(x)
x = self.d4_1(x)
x = self.d4_2(x)
x = self.d4(x)
d4 = self.d4_up(x)
out = d3 + d4
out = self.final_upsample(out)
out = self.final_act(out)
return out
class FCN4_VGG(nn.Module):
"""docstring for AutoEncoder"""
def __init__(self, args, in_channels=3, out_channels=1):
super(FCN4_VGG, self).__init__()
self.args = args
self.in_c = in_channels # 3
self.out_c = out_channels
self.ngf = args.ngf # 32
self.n_groupf = args.n_groupnorm # nc_out/4
self.z = args.enc_z
self.s_h = int(args.resolution_high/32)
self.s_w = int(args.resolution_wide/32)
self.e1_1 = ConvUnit(self.in_c, self.ngf*2, self.n_groupf, bias=False)
self.e1_2 = ConvUnit(self.ngf*2, self.ngf*2, self.n_groupf, bias=False) # 224
self.e1_pool = nn.MaxPool2d(2, 2) # 112
self.e2_1 = ConvUnit(self.ngf*2, self.ngf*4, self.n_groupf, bias=False)
self.e2_2 = ConvUnit(self.ngf*4, self.ngf*4, self.n_groupf, bias=False) # 112
self.e2_pool = nn.MaxPool2d(2, 2) # 56
self.d2 = nn.Conv2d(self.ngf*4, self.out_c, kernel_size=1, stride=1) # 56
self.e3_1 = ConvUnit(self.ngf*4, self.ngf*8, self.n_groupf, bias=False)
self.e3_2 = ConvUnit(self.ngf*8, self.ngf*8, self.n_groupf, bias=False)
self.e3_3 = ConvUnit(self.ngf*8, self.ngf*8, self.n_groupf, bias=False) # 56
self.e3_pool = nn.MaxPool2d(2, 2) # 28
self.d3 = nn.Conv2d(self.ngf*8, self.out_c, kernel_size=1, stride=1) # 28
self.d3_up = DeconvUnit(self.out_c, self.out_c, n_groupf=2, kernel_size=2, stride=2, padding=0) # 56
self.e4_1 = ConvUnit(self.ngf*8, self.ngf*16, self.n_groupf, bias=False)
self.e4_2 = ConvUnit(self.ngf*16, self.ngf*16, self.n_groupf, bias=False)
self.e4_3 = ConvUnit(self.ngf*16, self.ngf*16, self.n_groupf, bias=False) # 28
self.e4_pool = nn.MaxPool2d(2, 2) # 14 # 7
self.d4_1 = ConvUnit(self.ngf*16, self.ngf*32, self.n_groupf, bias=False)
self.d4_2 = ConvUnit(self.ngf*32, self.ngf*32, self.n_groupf, bias=False)
self.d4 = nn.Conv2d(self.ngf*32, self.out_c, kernel_size=1, stride=1) # 14
self.d4_up = DeconvUnit(self.out_c, self.out_c, n_groupf=2, kernel_size=2, stride=2, padding=0) # 28
self.final_upsample = nn.Upsample(size=224, mode='bilinear')
self.final_act = nn.Softmax2d()
def forward(self, x):
x = self.e1_1(x)
x = self.e1_2(x)
x = self.e1_pool(x)
x = self.e2_1(x)
x = self.e2_2(x)
x = self.e2_pool(x)
d2 = self.d2(x)
x = self.e3_1(x)
x = self.e3_2(x)
x = self.e3_3(x)
x = self.e3_pool(x)
d3 = self.d3(x)
x = self.e4_1(x)
x = self.e4_2(x)
x = self.e4_3(x)
x = self.e4_pool(x)
x = self.d4_1(x)
x = self.d4_2(x)
x = self.d4(x)
d4 = self.d4_up(x)
out1 = d3+d4
out1 = self.d3_up(out1)
out2 = out1 + d2
out = self.final_upsample(out2)
out = self.final_act(out)
return out
|
StarcoderdataPython
|
1652732
|
<reponame>stephenneuendorffer/hls_tuner<filename>HLS_tuner/hlstuner/search/modeltuner.py
# Tuner for Regression Models
#
# Author: <NAME> (<EMAIL>)
#######################################################################################################################
# TODO: Reimplement the ModelTuner class using an SQLite in-memory database
# Number of iterations to spend on searching the optimum
ITERATIONS = 1000
#######################################################################################################################
import copy
import logging
from opentuner.driverbase import DriverBase
from opentuner.resultsdb.models import Configuration, Result
log = logging.getLogger(__name__)
#######################################################################################################################
class QueryResult(object):
"""This class replaces the Query object returned by results_query() in the absence of a database."""
def __init__(self, result):
"""Constructor
Parameters
----------
result : Result
Result to be put in the query object.
"""
self._result = result
self._done = False
def __iter__(self):
"""Returns the iterator"""
return self
def next(self):
"""Returns the next Result.
Returns
-------
Result
Next result
"""
if self._done:
raise StopIteration
self._done = True
return self._result
def count(self):
"""Returns the number of results in the object
Returns
-------
Integer
Number of results
"""
return 1
def one(self):
"""Returns the first result in the object
Returns
-------
Result
First result
"""
return self._result
class ModelDriver(DriverBase):
"""Driver with necessary callbacks for ModelTuner class"""
def __init__(self, objective, manipulator):
"""Create a new driver object."""
self._results = {}
self.objective = objective
self.manipulator = manipulator
self.best_result = None
# Some techniques need these attributes.
self.generation = None
self.tuning_run = None
def add_plugin(self, plugin):
"""Callback to install a plugin into the search driver.
Parameters
----------
plugin
Unused
"""
pass
def has_results(self, cfg):
"""Callback to check whether results for the given configuration are ready.
Parameters
----------
cfg : Configuration
Returns
-------
Boolean
True iff results are ready
"""
return cfg.hash in self._results
def get_configuration(self, cfg):
"""Callback for creating Configuration objects
Parameters
----------
cfg : dict
Configuration
Returns
-------
Configuration
Configuration
Notes
-----
It is guaranteed that no other Configuration object with the same configuration values exists inside the model
tuner, but outside the tuner duplicates may exist.
"""
self.manipulator.normalize(cfg)
cfg_hash = self.manipulator.hash_config(cfg)
if cfg_hash in self._results:
return self._results[cfg_hash].configuration
return Configuration(hash = cfg_hash, data=cfg)
def add_result(self, result):
"""Remember a result such that search techniques can look it up.
Parameters
----------
result : Result
Result
"""
self._results[result.configuration.hash] = result
def results_query(self, config):
"""Look up the result obtained for a given configuration.
Parameters
----------
config : Configuration
Configuration
Returns
-------
Result
Result
"""
return QueryResult(self._results[config.hash])
def register_result_callback(self, desired_result, callback):
"""Register a callback function to handle the result of evaluating a configuration.
Parameters
----------
desired_result : DesiredResult
Unused
callback : function taking Result
Callback function
"""
self._result_callback = callback
def invoke_result_callback(self, result):
"""Invoke the callback function to provide the result to the search technique.
Parameters
----------
result : Result
Result
"""
self._result_callback(result)
def set_best_result(self, result):
"""Set the best result. Some search techniques rely on this attribute.
Parameters
----------
result : Result
Result
"""
self.best_result = result
class ModelTuner(object):
"""This class minimizes machine-learning models using OpenTuner search techniques.
Parameters
----------
models : list of Model
Machine-learning models over which the objective function must be minimized
technique : SearchTechnique
Search technique(s) that should be used to locate the minimum
objective : SearchObjective
Objective function
manipulator : ConfigurationManipulator
Configuration manipulator describing design parameters
data_set : list of Result
Evaluated configurations. They are used to avoid suggesting already evaluated configurations.
"""
def __init__(self, models, technique, objective, manipulator, data_set):
"""Create a new tuner object.
"""
# Copy the objective because we need to use our own driver.
self._objective = copy.copy(objective)
self._driver = ModelDriver(self._objective, manipulator)
self._models = models
self._technique = technique
self._technique.set_driver(self._driver)
self._objective.set_driver(self._driver)
self._data_set = data_set
def tune(self):
"""Optimize the objective function.
Returns
-------
dict
Best configuration found that has not been evaluated yet
"""
old_results = {result.configuration.hash: result for result in self._data_set}
best_result = None
best_new_result = None
for _ in range(ITERATIONS):
desired_result = self._technique.desired_result()
if desired_result == None:
break
cfg = desired_result.configuration
if self._driver.has_results(cfg):
continue
result = Result()
result.configuration = cfg
old_result = old_results.get(cfg.hash)
if old_result:
# Avoid making predictions for evaluated points and getting inaccurate results due to noise.
for model in self._models:
setattr(result, model.metric, getattr(old_result, model.metric))
setattr(result, model.metric + "_std_dev", 0.0)
else:
for model in self._models:
mean, std_dev = model.predict(cfg.data)
setattr(result, model.metric, mean)
setattr(result, model.metric + "_std_dev", std_dev)
self._driver.add_result(result)
self._driver.invoke_result_callback(result)
# Even though we don't return the best result if we have already evaluated it, we still need it because some
# search algorithms rely on it.
if best_result == None or self._objective.lt(result, best_result):
best_result = result
self._driver.set_best_result(best_result)
if not old_result and (best_new_result == None or self._objective.lt(result, best_new_result)):
best_new_result = result
# Return the configuration associated with the best result. We do not return a Configuration object because there
# may already be another Configuration object with the same parameters in the database. Returning a new object
# with the same parameters may mess up code that expects the configuration to be unique.
return best_new_result.configuration.data if best_new_result is not None else None
|
StarcoderdataPython
|
3536333
|
"""
Create a zip with all recording folders required for evaluation - instead of having to store all recordings"""
import argparse
import os
from glob import glob
from os.path import join
from shutil import copytree
import numpy as np
import pandas as pd
from PIL import Image
from config import STATIONS
from helpers import extract_sample_frames_from_video, zipit, build_file_path_for_countings
parser = argparse.ArgumentParser(description='Estimate luminosity of videos and add to evaluation file')
parser.add_argument('--station', type=str, required=True, choices=STATIONS,
help='one of our two stations')
parser.add_argument('--board', type=str, required=True, choices=['nano', 'tx2', 'xavier'],
help='type of board')
args = parser.parse_args()
if __name__ == '__main__':
file = build_file_path_for_countings(args.station, args.board)
df = pd.read_csv(file)
eval_dirs = ["/".join(p.split("/")[:-1]) for p in df["movie_file"].unique()]
zipit(eval_dirs, "eval_dirs.zip")
|
StarcoderdataPython
|
1724339
|
#!/usr/bin/python
import MySQLdb
import re
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
execfile(os.path.join(BASE_DIR, 'settings.py'))
db = MySQLdb.connect(host=SETTINGS_SQL_HOST,
user=SETTINGS_SQL_USER,
passwd=<PASSWORD>,
db=SETTINGS_SQL_DB)
cur = db.cursor()
cur.execute("SELECT post_id, meta_value FROM wp_postmeta where meta_key='_wp_attachment_metadata' ORDER BY post_id DESC")
images = []
##
## First, we collect all media data that we will need to insert the bucket info rows for
##
for row in cur.fetchall():
rexp = 's:4:"file";s:(?:\d+):"((?:.+?)\.(?:jpg|jpeg|png|gif))";'
if re.search(rexp, row[1]):
p = re.search(rexp, row[1])
file = str(p.groups()[0])
images.append([row[0], file])
##
## Then, let's insert the bucket info data to wp_postmeta
##
## !!! CAUTION: THIS IS GOING TO MODIFY YOUR DB !!!
##
for i in images:
post_id = i[0]
key = 'wp-content/uploads/' + i[1]
cur.execute("DELETE FROM wp_postmeta WHERE post_id=" + str(post_id) + " AND meta_key='amazonS3_info';")
cur.execute("INSERT INTO wp_postmeta (post_id, meta_key, meta_value) VALUES (" +
str(post_id) + ", 'amazonS3_info', 'a:2:{s:6:\"bucket\";s:" + str(len(SETTINGS_S3_BUCKET)) +
":\"" + SETTINGS_S3_BUCKET + "\";s:3:\"key\";s:" + str(len(key)) + ":\"" + key + "\";}')")
print 'done: ' + str(post_id) + ' -> ' + i[1]
cur.close()
db.close()
|
StarcoderdataPython
|
9654287
|
#
# This file is part of the ErlotinibGefitinib repository
# (https://github.com/DavAug/ErlotinibGefitinib/) which is released under the
# BSD 3-clause license. See accompanying LICENSE.md for copyright notice and
# full license details.
#
import numpy as np
import plotly.colors
import plotly.graph_objects as go
def plot_optimised_parameters(parameters, scores, xlabels=None, ids=None):
"""
Returns a `plotly.graph_objects.Figure` containing a box plot of the
optimised parameter values and objective function scores for each
individual and optimisation run.
Arguments:
parameters -- Array-like object with parameters for each individual and
each optimisation run.
Shape: (n_individuals, n_runs, n_parameters)
scores -- Array-like object with objective function scores for each
individual and each optimisation run.
Shape: (n_individuals, n_runs)
ids -- IDs of individuals. If `None` IDs are set to index.
"""
parameters = np.asarray(parameters)
scores = np.asarray(scores)
if parameters.shape[0] != scores.shape[0]:
raise ValueError(
'Parameters and score do not have the same number of individuals.'
'Shape parmeters <' + str(parameters.shape) + '>; shape scores <'
+ str(scores.shape) + '>.')
if parameters.shape[1] != scores.shape[1]:
raise ValueError(
'Parameters and score do not have the same number of runs.'
'Shape parmeters <' + str(parameters.shape) + '>; shape scores <'
+ str(scores.shape) + '>.')
if xlabels is None:
# Enumerate parameters and call score 'score'
xlabels = [str(param_id) for param_id in range(parameters.shape[2])]
xlabels += ['score']
n_params = parameters.shape[2] + 1
if len(xlabels) != parameters.shape[2]+1:
raise ValueError(
'Number of x labels does not match number of parameters plus '
'score.')
if ids is None:
# Enumerate individuals
ids = np.arange(parameters.shape[0])
if len(ids) != parameters.shape[0]:
raise ValueError(
'Number of provided ids do not match number of individuals in '
'`parameters` and `scores`.')
# Define colorscheme
colors = plotly.colors.qualitative.Plotly[:n_params]
# Create figure
fig = go.Figure()
# Box plot of optimised model parameters
n_ids = len(ids)
for index in range(n_ids):
# Get optimised parameters for individual
params = parameters[index, ...]
# Get scores
score = scores[index, :]
# Create box plot of for parameters
for param_id in range(n_params-1):
fig.add_trace(
go.Box(
y=params[:, param_id],
name=xlabels[param_id],
boxpoints='all',
jitter=0.2,
pointpos=-1.5,
visible=True if index == 0 else False,
marker=dict(
symbol='circle',
opacity=0.7,
line=dict(color='black', width=1)),
marker_color=colors[param_id],
line_color=colors[param_id]))
# Create box plot of for score
fig.add_trace(
go.Box(
y=score,
name=xlabels[-1],
boxpoints='all',
jitter=0.2,
pointpos=-1.5,
visible=True if index == 0 else False,
marker=dict(
symbol='circle',
opacity=0.7,
line=dict(color='black', width=1)),
marker_color=colors[-1],
line_color=colors[-1]))
# Set figure size
fig.update_layout(
autosize=True,
template="plotly_white",
yaxis_title="Estimates")
# Add switch between individuals
fig.update_layout(
updatemenus=[
dict(
type="buttons",
direction="right",
buttons=list([dict(
args=[{
"visible": [False] * (n_params * idx) +
[True] * n_params +
[False] * (n_params * (n_ids - idx - 1))}],
label="ID: %s" % str(ids[idx]),
method="restyle") for idx in range(n_ids)]),
pad={"r": 0, "t": -10},
showactive=True,
x=0.0,
xanchor="left",
y=1.1,
yanchor="top"
)
]
)
# Position legend
fig.update_layout(legend=dict(
yanchor="bottom",
y=0.01,
xanchor="left",
x=1.05))
return fig
|
StarcoderdataPython
|
93978
|
try:
# Python 3
from http.client import HTTPResponse, IncompleteRead
except (ImportError):
# Python 2
from httplib import HTTPResponse, IncompleteRead
from ..console_write import console_write
class DebuggableHTTPResponse(HTTPResponse):
"""
A custom HTTPResponse that formats debugging info for Sublime Text
"""
_debug_protocol = 'HTTP'
def __init__(self, sock, debuglevel=0, method=None, **kwargs):
# We have to use a positive debuglevel to get it passed to here,
# however we don't want to use it because by default debugging prints
# to the stdout and we can't capture it, so we use a special -1 value
if debuglevel == 5:
debuglevel = -1
HTTPResponse.__init__(self, sock, debuglevel=debuglevel, method=method)
def begin(self):
return_value = HTTPResponse.begin(self)
if self.debuglevel == -1:
console_write(u'Urllib %s Debug Read' % self._debug_protocol, True)
# Python 2
if hasattr(self.msg, 'headers'):
headers = self.msg.headers
# Python 3
else:
headers = []
for header in self.msg:
headers.append("%s: %s" % (header, self.msg[header]))
versions = {
9: 'HTTP/0.9',
10: 'HTTP/1.0',
11: 'HTTP/1.1'
}
status_line = versions[self.version] + ' ' + str(self.status) + ' ' + self.reason
headers.insert(0, status_line)
for line in headers:
console_write(u" %s" % line.rstrip())
return return_value
def is_keep_alive(self):
# Python 2
if hasattr(self.msg, 'headers'):
connection = self.msg.getheader('connection')
# Python 3
else:
connection = self.msg['connection']
if connection and connection.lower() == 'keep-alive':
return True
return False
def read(self, *args):
try:
return HTTPResponse.read(self, *args)
except (IncompleteRead) as e:
return e.partial
|
StarcoderdataPython
|
6615955
|
<reponame>rjmolina13/CanIJailbreak2-Website
# Minimum Version List
MinVersionMap = {
"iPhone 2G": "1.0",
"iPhone 3G": "2.0", "iPhone 3GS": "3.0",
"iPhone 4": "4.0", "iPhone 4S": "5.0",
"iPhone 5": "6.0", "iPhone 5C": "7.0", "iPhone 5S": "7.0",
"iPhone 6": "8.0", "iPhone 6 Plus": "8.0",
"iPhone 6S": "9.0", "iPhone 6S Plus": "9.0",
"iPhone 7": "10.0", "iPhone 7 Plus": "10.0",
"iPhone 8": "11.0", "iPhone 8 Plus": "11.0", "iPhone X": "11.1",
"iPhone XS": "12.0", "iPhone XS Max": "12.0", "iPhone XR": "12.0",
"iPhone 11": "13.0", "iPhone 11 Pro": "13.0", "iPhone 11 Pro Max": "13.0",
"iPhone SE 1": "9.3", "iPhone SE 2": "13.4.1",
"iPhone 12": "14.1", "iPhone 12 Mini": "14.1", "iPhone 12 Pro": "14.1", "iPhone 12 Pro Max": "14.1",
"iPad 1": "3.2", "iPad 2": "4.3", "iPad 3": "5.1", "iPad 4": "6.0", "iPad 5": "10.2.1", "iPad 6": "11.3", "iPad 7": "13.1", "iPad 8": "14",
"iPad Air 1": "7.0.3", "iPad Air 2": "8.1", "iPad Air 3": "12.2", "iPad Air 4": "14.0",
"iPad Mini 1": "6.0.1", "iPad Mini 2": "7.0.3", "iPad Mini 3": "8.1", "iPad Mini 4": "9.0", "iPad Mini 5": "12.2",
"iPad Pro 12.9 1": "9.1", "iPad Pro 9.7": "9.3", "iPad Pro 12.9 2": "10.3.2", "iPad Pro 10.5": "10.3.2", "iPad Pro 11 1": "12.1", "iPad Pro 12.9 3": "12.1", "iPad Pro 11 2": "13.4", "iPad Pro 12.9 4": "13.4", "iPad Pro 11 3": "14.4", "iPad Pro 12.9 5": "14.4",
"iPod Touch 1": "1.1", "iPod Touch 2": "2.1.1", "iPod Touch 3": "3.1.1", "iPod Touch 4": "4.1", "iPod Touch 5": "6.0", "iPod Touch 6": "8.4", "iPod Touch 7": "12.3.1"
}
# Maximum Version List
GMaxiOS = "14.5"
MaxVersionMap = {
"iPhone 2G": "3.1.3",
"iPhone 3G": "4.2.1", "iPhone 3GS": "6.1.6",
"iPhone 4": "7.1.2", "iPhone 4S": "9.3.6",
"iPhone 5": "10.3.4", "iPhone 5C": "10.3.3", "iPhone 5S": "12.5.1",
"iPhone 6": "12.5.1", "iPhone 6 Plus": "12.5.1",
"iPhone 6S": GMaxiOS, "iPhone 6S Plus": GMaxiOS,
"iPhone 7": GMaxiOS, "iPhone 7 Plus": GMaxiOS,
"iPhone 8": GMaxiOS, "iPhone 8 Plus": GMaxiOS, "iPhone X": GMaxiOS,
"iPhone XS": GMaxiOS, "iPhone XS Max": GMaxiOS, "iPhone XR": GMaxiOS,
"iPhone 11": GMaxiOS, "iPhone 11 Pro": GMaxiOS, "iPhone 11 Pro Max": GMaxiOS,
"iPhone SE 1": GMaxiOS, "iPhone SE 2": GMaxiOS,
"iPhone 12": GMaxiOS, "iPhone 12 Mini": GMaxiOS, "iPhone 12 Pro": GMaxiOS, "iPhone 12 Pro Max": GMaxiOS,
"iPad 1": "5.1.1", "iPad 2": "9.3.6", "iPad 3": "9.3.6", "iPad 4": "10.3.4", "iPad 5": GMaxiOS, "iPad 6": GMaxiOS, "iPad 7": GMaxiOS, "iPad 8": GMaxiOS,
"iPad Air 1": "12.5.1", "iPad Air 2": GMaxiOS, "iPad Air 3": GMaxiOS, "iPad Air 4": GMaxiOS,
"iPad Mini 1": "9.3.6", "iPad Mini 2": "12.5.1", "iPad Mini 3": "12.5.1", "iPad Mini 4": GMaxiOS, "iPad Mini 5": GMaxiOS,
"iPad Pro 12.9 1": GMaxiOS, "iPad Pro 9.7": GMaxiOS, "iPad Pro 12.9 2": GMaxiOS, "iPad Pro 10.5": GMaxiOS, "iPad Pro 11 1": GMaxiOS, "iPad Pro 12.9 3": GMaxiOS, "iPad Pro 11 2": GMaxiOS, "iPad Pro 12.9 4": GMaxiOS, "iPad Pro 11 3": GMaxiOS, "iPad Pro 12.9 5": GMaxiOS,
"iPod Touch 1": "3.1.3", "iPod Touch 2": "4.2.1", "iPod Touch 3": "5.1.1", "iPod Touch 4": "6.1.6", "iPod Touch 5": "9.3.5", "iPod Touch 6": "12.5.1", "iPod Touch 7": GMaxiOS
}
# Mappings of Processor Generations (with some exceptions such as iP8 & X)
DeviceMapPGNamed = {
'2G': ['iPhone 2G'],
'3G': ['iPhone 3G', 'iPod Touch 1', 'iPod Touch 2'],
'3GS': ['iPhone 3GS', 'iPod Touch 3'],
'4': ['iPhone 4', 'iPad 1', 'iPod Touch 4'],
'4S': ['iPhone 4S', 'iPad 2', 'iPad 3', 'iPad Mini 1', 'iPod Touch 5'],
'5': ['iPhone 5', 'iPhone 5C', 'iPad 4'],
'5S': ['iPhone 5S', 'iPad Air 1', 'iPad Mini 2', 'iPad Mini 3'],
'6': ['iPhone 6', 'iPhone 6 Plus', 'iPad Air 2', 'iPad Mini 4', 'iPod Touch 6'],
'6S': ['iPhone 6S', 'iPhone 6S Plus', 'iPhone SE 1', 'iPad 5', 'iPad Pro 12.9 1', 'iPad Pro 9.7'],
'7': ['iPhone 7', 'iPhone 7 Plus', 'iPad 6', 'iPad 7', 'iPad Pro 12.9 2', 'iPad Pro 10.5', 'iPod Touch 7'],
'8': ['iPhone 8', 'iPhone 8 Plus'],
'X': ['iPhone X'],
'XS': ['iPhone XR', 'iPhone XS', 'iPhone XS Max', 'iPad 8', 'iPad Air 3', 'iPad Pro 11 1', 'iPad Pro 12.9 3', 'iPad Pro 11 2', 'iPad Pro 12.9 4', 'iPad Mini 5'],
'11': ['iPhone 11', 'iPhone 11 Pro', 'iPhone 11 Pro Max', 'iPhone SE 2'],
'12': ['iPhone 12 Mini', 'iPhone 12', 'iPhone 12 Pro', 'iPhone 12 Pro Max', 'iPad Air 4'],
'M1': ['iPad Pro 11 3', 'iPad Pro 12.9 5']
}
# List DeviceProcessors in chronological order
DeviceMapPG = ["2G", "3G", "3GS", "4", "4S", "5", "5S", "6", "6S", "7", "8", "X", "XS", "11", "12", "M1"]
|
StarcoderdataPython
|
1907731
|
import numpy as np
import random
import json
import h5py
from patch_library import PatchLibrary
from glob import glob
import matplotlib.pyplot as plt
from skimage import io, color, img_as_float
from skimage.exposure import adjust_gamma
from skimage.segmentation import mark_boundaries
from sklearn.feature_extraction.image import extract_patches_2d
from sklearn.metrics import classification_report
from keras.models import Sequential, Graph, model_from_json
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.layers.core import Dense, Dropout, Activation, Flatten, Merge, Reshape, MaxoutDense
from keras.layers.normalization import BatchNormalization
from keras.regularizers import l1l2
from keras.optimizers import SGD
from keras.constraints import maxnorm
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras.utils import np_utils
class SegmentationModel(object):
def __init__(self, n_epoch=10, n_chan=4, batch_size=128, loaded_model=False, architecture='single', w_reg=0.01, n_filters=[64,128,128,128], k_dims = [7,5,5,3], activation = 'relu'):
'''
A class for compiling/loading, fitting and saving various models, viewing segmented images and analyzing results
INPUT (1) int 'n_epoch': number of eopchs to train on. defaults to 10
(2) int 'n_chan': number of channels being assessed. defaults to 4
(3) int 'batch_size': number of images to train on for each batch. defaults to 128
(4) bool 'loaded_model': True if loading a pre-existing model. defaults to False
(5) str 'architecture': type of model to use, options = single, dual, or two_path. defaults to single (only currently optimized version)
(6) float 'w_reg': value for l1 and l2 regularization. defaults to 0.01
(7) list 'n_filters': number of filters for each convolutional layer (4 total)
(8) list 'k_dims': dimension of kernel at each layer (will be a k_dim[n] x k_dim[n] square). Four total.
(9) string 'activation': activation to use at each convolutional layer. defaults to relu.
'''
self.n_epoch = n_epoch
self.n_chan = n_chan
self.batch_size = batch_size
self.architecture = architecture
self.loaded_model = loaded_model
self.w_reg = w_reg
self.n_filters = n_filters
self.k_dims = k_dims
self.activation = activation
if not self.loaded_model:
if self.architecture == 'two_path':
self.model_comp = self.comp_two_path()
elif self.architecture == 'dual':
self.model_comp = self.comp_double()
else:
self.model_comp = self.compile_model()
else:
model = str(raw_input('Which model should I load? '))
self.model_comp = self.load_model_weights(model)
def compile_model(self):
'''
compiles standard single model with 4 convolitional/max-pooling layers.
'''
print 'Compiling single model...'
single = Sequential()
single.add(Convolution2D(self.n_filters[0], self.k_dims[0], self.k_dims[0], border_mode='valid', W_regularizer=l1l2(l1=self.w_reg, l2=self.w_reg), input_shape=(self.n_chan,33,33)))
single.add(Activation(self.activation))
single.add(BatchNormalization(mode=0, axis=1))
single.add(MaxPooling2D(pool_size=(2,2), strides=(1,1)))
single.add(Dropout(0.5))
single.add(Convolution2D(self.n_filters[1], self.k_dims[1], self.k_dims[1], activation=self.activation, border_mode='valid', W_regularizer=l1l2(l1=self.w_reg, l2=self.w_reg)))
single.add(BatchNormalization(mode=0, axis=1))
single.add(MaxPooling2D(pool_size=(2,2), strides=(1,1)))
single.add(Dropout(0.5))
single.add(Convolution2D(self.n_filters[2], self.k_dims[2], self.k_dims[2], activation=self.activation, border_mode='valid', W_regularizer=l1l2(l1=self.w_reg, l2=self.w_reg)))
single.add(BatchNormalization(mode=0, axis=1))
single.add(MaxPooling2D(pool_size=(2,2), strides=(1,1)))
single.add(Dropout(0.5))
single.add(Convolution2D(self.n_filters[3], self.k_dims[3], self.k_dims[3], activation=self.activation, border_mode='valid', W_regularizer=l1l2(l1=self.w_reg, l2=self.w_reg)))
single.add(Dropout(0.25))
single.add(Flatten())
single.add(Dense(5))
single.add(Activation('softmax'))
sgd = SGD(lr=0.001, decay=0.01, momentum=0.9)
single.compile(loss='categorical_crossentropy', optimizer='sgd')
print 'Done.'
return single
def comp_two_path(self):
'''
compiles two-path model, takes in a 4x33x33 patch and assesses global and local paths, then merges the results.
'''
print 'Compiling two-path model...'
model = Graph()
model.add_input(name='input', input_shape=(self.n_chan, 33, 33))
# local pathway, first convolution/pooling
model.add_node(Convolution2D(64, 7, 7, border_mode='valid', activation='relu', W_regularizer=l1l2(l1=0.01, l2=0.01)), name='local_c1', input= 'input')
model.add_node(MaxPooling2D(pool_size=(4,4), strides=(1,1), border_mode='valid'), name='local_p1', input='local_c1')
# local pathway, second convolution/pooling
model.add_node(Dropout(0.5), name='drop_lp1', input='local_p1')
model.add_node(Convolution2D(64, 3, 3, border_mode='valid', activation='relu', W_regularizer=l1l2(l1=0.01, l2=0.01)), name='local_c2', input='drop_lp1')
model.add_node(MaxPooling2D(pool_size=(2,2), strides=(1,1), border_mode='valid'), name='local_p2', input='local_c2')
# global pathway
model.add_node(Convolution2D(160, 13, 13, border_mode='valid', activation='relu', W_regularizer=l1l2(l1=0.01, l2=0.01)), name='global', input='input')
# merge local and global pathways
model.add_node(Dropout(0.5), name='drop_lp2', input='local_p2')
model.add_node(Dropout(0.5), name='drop_g', input='global')
model.add_node(Convolution2D(5, 21, 21, border_mode='valid', activation='relu', W_regularizer=l1l2(l1=0.01, l2=0.01)), name='merge', inputs=['drop_lp2', 'drop_g'], merge_mode='concat', concat_axis=1)
# Flatten output of 5x1x1 to 1x5, perform softmax
model.add_node(Flatten(), name='flatten', input='merge')
model.add_node(Dense(5, activation='softmax'), name='dense_output', input='flatten')
model.add_output(name='output', input='dense_output')
sgd = SGD(lr=0.005, decay=0.1, momentum=0.9)
model.compile('sgd', loss={'output':'categorical_crossentropy'})
print 'Done.'
return model
def comp_double(self):
'''
double model. Simialar to two-pathway, except takes in a 4x33x33 patch and it's center 4x5x5 patch. merges paths at flatten layer.
'''
print 'Compiling double model...'
single = Sequential()
single.add(Convolution2D(64, 7, 7, border_mode='valid', W_regularizer=l1l2(l1=0.01, l2=0.01), input_shape=(4,33,33)))
single.add(Activation('relu'))
single.add(BatchNormalization(mode=0, axis=1))
single.add(MaxPooling2D(pool_size=(2,2), strides=(1,1)))
single.add(Dropout(0.5))
single.add(Convolution2D(nb_filter=128, nb_row=5, nb_col=5, activation='relu', border_mode='valid', W_regularizer=l1l2(l1=0.01, l2=0.01)))
single.add(BatchNormalization(mode=0, axis=1))
single.add(MaxPooling2D(pool_size=(2,2), strides=(1,1)))
single.add(Dropout(0.5))
single.add(Convolution2D(nb_filter=256, nb_row=5, nb_col=5, activation='relu', border_mode='valid', W_regularizer=l1l2(l1=0.01, l2=0.01)))
single.add(BatchNormalization(mode=0, axis=1))
single.add(MaxPooling2D(pool_size=(2,2), strides=(1,1)))
single.add(Dropout(0.5))
single.add(Convolution2D(nb_filter=128, nb_row=3, nb_col=3, activation='relu', border_mode='valid', W_regularizer=l1l2(l1=0.01, l2=0.01)))
single.add(Dropout(0.25))
single.add(Flatten())
# add small patch to train on
five = Sequential()
five.add(Reshape((100,1), input_shape = (4,5,5)))
five.add(Flatten())
five.add(MaxoutDense(128, nb_feature=5))
five.add(Dropout(0.5))
model = Sequential()
# merge both paths
model.add(Merge([five, single], mode='concat', concat_axis=1))
model.add(Dense(5))
model.add(Activation('softmax'))
sgd = SGD(lr=0.001, decay=0.01, momentum=0.9)
model.compile(loss='categorical_crossentropy', optimizer='sgd')
print 'Done.'
return model
def load_model_weights(self, model_name):
'''
INPUT (1) string 'model_name': filepath to model and weights, not including extension
OUTPUT: Model with loaded weights. can fit on model using loaded_model=True in fit_model method
'''
print 'Loading model {}'.format(model_name)
model = '{}.json'.format(model_name)
weights = '{}.hdf5'.format(model_name)
with open(model) as f:
m = f.next()
model_comp = model_from_json(json.loads(m))
model_comp.load_weights(weights)
print 'Done.'
return model_comp
def fit_model(self, X_train, y_train, X5_train = None, save=True):
'''
INPUT (1) numpy array 'X_train': list of patches to train on in form (n_sample, n_channel, h, w)
(2) numpy vector 'y_train': list of labels corresponding to X_train patches in form (n_sample,)
(3) numpy array 'X5_train': center 5x5 patch in corresponding X_train patch. if None, uses single-path architecture
OUTPUT (1) Fits specified model
'''
Y_train = np_utils.to_categorical(y_train, 5)
shuffle = zip(X_train, Y_train)
np.random.shuffle(shuffle)
X_train = np.array([shuffle[i][0] for i in xrange(len(shuffle))])
Y_train = np.array([shuffle[i][1] for i in xrange(len(shuffle))])
es = EarlyStopping(monitor='val_loss', patience=2, verbose=1, mode='auto')
# Save model after each epoch to check/bm_epoch#-val_loss
checkpointer = ModelCheckpoint(filepath="./check/bm_{epoch:02d}-{val_loss:.2f}.hdf5", verbose=1)
if self.architecture == 'dual':
self.model_comp.fit([X5_train, X_train], Y_train, batch_size=self.batch_size, nb_epoch=self.n_epoch, validation_split=0.1, show_accuracy=True, verbose=1, callbacks=[checkpointer])
elif self.architecture == 'two_path':
data = {'input': X_train, 'output': Y_train}
self.model_comp.fit(data, batch_size=self.batch_size, nb_epoch=self.n_epoch, validation_split=0.1, show_accuracy=True, verbose=1, callbacks=[checkpointer])
else:
self.model_comp.fit(X_train, Y_train, batch_size=self.batch_size, nb_epoch=self.n_epoch, validation_split=0.1, show_accuracy=True, verbose=1, callbacks=[checkpointer])
def save_model(self, model_name):
'''
INPUT string 'model_name': name to save model and weigths under, including filepath but not extension
Saves current model as json and weigts as h5df file
'''
model = '{}.json'.format(model_name)
weights = '{}.hdf5'.format(model_name)
json_string = self.model_comp.to_json()
self.model_comp.save_weights(weights)
with open(model, 'w') as f:
json.dump(json_string, f)
def class_report(self, X_test, y_test):
'''
returns skilearns test report (precision, recall, f1-score)
INPUT (1) list 'X_test': test data of 4x33x33 patches
(2) list 'y_test': labels for X_test
OUTPUT (1) confusion matrix of precision, recall and f1 score
'''
y_pred = self.model_load.predict_class(X_test)
print classification_report(y_pred, y_test)
def predict_image(self, test_img, show=False):
'''
predicts classes of input image
INPUT (1) str 'test_image': filepath to image to predict on
(2) bool 'show': True to show the results of prediction, False to return prediction
OUTPUT (1) if show == False: array of predicted pixel classes for the center 208 x 208 pixels
(2) if show == True: displays segmentation results
'''
imgs = io.imread(test_img).astype('float').reshape(5,240,240)
plist = []
# create patches from an entire slice
for img in imgs[:-1]:
if np.max(img) != 0:
img /= np.max(img)
p = extract_patches_2d(img, (33,33))
plist.append(p)
patches = np.array(zip(np.array(plist[0]), np.array(plist[1]), np.array(plist[2]), np.array(plist[3])))
# predict classes of each pixel based on model
full_pred = self.model_comp.predict_classes(patches)
fp1 = full_pred.reshape(208,208)
if show:
io.imshow(fp1)
plt.show
else:
return fp1
def show_segmented_image(self, test_img, modality='t1c', show = False):
'''
Creates an image of original brain with segmentation overlay
INPUT (1) str 'test_img': filepath to test image for segmentation, including file extension
(2) str 'modality': imaging modelity to use as background. defaults to t1c. options: (flair, t1, t1c, t2)
(3) bool 'show': If true, shows output image. defaults to False.
OUTPUT (1) if show is True, shows image of segmentation results
(2) if show is false, returns segmented image.
'''
modes = {'flair':0, 't1':1, 't1c':2, 't2':3}
segmentation = self.predict_image(test_img, show=False)
img_mask = np.pad(segmentation, (16,16), mode='edge')
ones = np.argwhere(img_mask == 1)
twos = np.argwhere(img_mask == 2)
threes = np.argwhere(img_mask == 3)
fours = np.argwhere(img_mask == 4)
test_im = io.imread(test_img)
test_back = test_im.reshape(5,240,240)[-2]
# overlay = mark_boundaries(test_back, img_mask)
gray_img = img_as_float(test_back)
# adjust gamma of image
image = adjust_gamma(color.gray2rgb(gray_img), 0.65)
sliced_image = image.copy()
red_multiplier = [1, 0.2, 0.2]
yellow_multiplier = [1,1,0.25]
green_multiplier = [0.35,0.75,0.25]
blue_multiplier = [0,0.25,0.9]
# change colors of segmented classes
for i in xrange(len(ones)):
sliced_image[ones[i][0]][ones[i][1]] = red_multiplier
for i in xrange(len(twos)):
sliced_image[twos[i][0]][twos[i][1]] = green_multiplier
for i in xrange(len(threes)):
sliced_image[threes[i][0]][threes[i][1]] = blue_multiplier
for i in xrange(len(fours)):
sliced_image[fours[i][0]][fours[i][1]] = yellow_multiplier
if show:
io.imshow(sliced_image)
plt.show()
else:
return sliced_image
def get_dice_coef(self, test_img, label):
'''
Calculate dice coefficient for total slice, tumor-associated slice, advancing tumor and core tumor
INPUT (1) str 'test_img': filepath to slice to predict on
(2) str 'label': filepath to ground truth label for test_img
OUTPUT: Summary of dice scores for the following classes:
- all classes
- all classes excluding background (ground truth and segmentation)
- all classes excluding background (only ground truth-based)
- advancing tumor
- core tumor (1,3 and 4)
'''
segmentation = self.predict_image(test_img)
seg_full = np.pad(segmentation, (16,16), mode='edge')
gt = io.imread(label).astype(int)
# dice coef of total image
total = (len(np.argwhere(seg_full == gt)) * 2.) / (2 * 240 * 240)
def unique_rows(a):
'''
helper function to get unique rows from 2D numpy array
'''
a = np.ascontiguousarray(a)
unique_a = np.unique(a.view([('', a.dtype)]*a.shape[1]))
return unique_a.view(a.dtype).reshape((unique_a.shape[0], a.shape[1]))
# dice coef of entire non-background image
gt_tumor = np.argwhere(gt != 0)
seg_tumor = np.argwhere(seg_full != 0)
combo = np.append(pred_core, core, axis = 0)
core_edema = unique_rows(combo) # intersection of locations defined as tumor_assoc in gt and segmentation
gt_c, seg_c = [], [] # predicted class of each
for i in core_edema:
gt_c.append(gt[i[0]][i[1]])
seg_c.append(seg_full[i[0]][i[1]])
tumor_assoc = len(np.argwhere(np.array(gt_c) == np.array(seg_c))) / float(len(core))
tumor_assoc_gt = len(np.argwhere(np.array(gt_c) == np.array(seg_c))) / float(len(gt_tumor))
# dice coef advancing tumor
adv_gt = np.argwhere(gt == 4)
gt_a, seg_a = [], [] # classification of
for i in adv_gt:
gt_a.append(gt[i[0]][i[1]])
seg_a.append(fp[i[0]][i[1]])
gta = np.array(gt_a)
sega = np.array(seg_a)
adv = float(len(np.argwhere(gta == sega))) / len(adv_gt)
# dice coef core tumor
noadv_gt = np.argwhere(gt == 3)
necrosis_gt = np.argwhere(gt == 1)
live_tumor_gt = np.append(adv_gt, noadv_gt, axis = 0)
core_gt = np.append(live_tumor_gt, necrosis_gt, axis = 0)
gt_core, seg_core = [],[]
for i in core_gt:
gt_core.append(gt[i[0]][i[1]])
seg_core.append(seg_full[i[0]][i[1]])
gtcore, segcore = np.array(gt_core), np.array(seg_core)
core = len(np.argwhere(gtcore == segcore)) / float(len(core_gt))
print ' '
print 'Region_______________________| Dice Coefficient'
print 'Total Slice__________________| {0:.2f}'.format(total)
print 'No Background gt_____________| {0:.2f}'.format(tumor_assoc_gt)
print 'No Background both___________| {0:.2f}'.format(tumor_assoc)
print 'Advancing Tumor______________| {0:.2f}'.format(adv)
print 'Core Tumor___________________| {0:.2f}'.format(core)
if __name__ == '__main__':
train_data = glob('train_data/**')
patches = PatchLibrary((33,33), train_data, 50000)
X,y = patches.make_training_patches()
model = SegmentationModel()
model.fit_model(X, y)
model.save_model('models/example')
# tests = glob('test_data/2_*')
# test_sort = sorted(tests, key= lambda x: int(x[12:-4]))
# model = BasicModel(loaded_model=True)
# segmented_images = []
# for slice in test_sort[15:145]:
# segmented_images.append(model.show_segmented_image(slice))
|
StarcoderdataPython
|
1841054
|
from checkov.common.models.enums import CheckResult, CheckCategories
from checkov.arm.base_resource_check import BaseResourceCheck
# https://docs.microsoft.com/en-us/azure/templates/microsoft.keyvault/vaults/secrets
class SecretExpirationDate(BaseResourceCheck):
def __init__(self):
name = "Ensure that the expiration date is set on all secrets"
id = "CKV_AZURE_41"
supported_resources = ['Microsoft.KeyVault/vaults/secrets']
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
if "properties" in conf:
if "attributes" in conf["properties"]:
if "exp" in conf["properties"]["attributes"]:
if conf["properties"]["attributes"]["exp"]:
return CheckResult.PASSED
return CheckResult.FAILED
check = SecretExpirationDate()
|
StarcoderdataPython
|
3500995
|
<reponame>KhadijaMahanga/bluetail<filename>bluetail/migrations/0005_externalperson.py<gh_stars>1-10
# Generated by Django 2.2.13 on 2020-07-20 16:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bluetail', '0004_ocdsrecordjson'),
]
operations = [
migrations.CreateModel(
name='ExternalPerson',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('scheme', models.CharField(max_length=1024)),
('name', models.CharField(max_length=1024)),
('identifier', models.CharField(max_length=1024)),
('date_of_birth', models.DateField(null=True)),
('flag', models.ForeignKey(on_delete=None, to='bluetail.Flag')),
],
),
migrations.AddConstraint(
model_name='externalperson',
constraint=models.UniqueConstraint(fields=('name', 'scheme', 'identifier', 'flag'), name='unique_person_flag'),
),
]
|
StarcoderdataPython
|
385630
|
<filename>src/skmultiflow/demos/_test_sam_knn_prequential.py
import numpy as np
from skmultiflow.classification.lazy.sam_knn import SAMKNN
from skmultiflow.data.file_stream import FileStream
from skmultiflow.evaluation.evaluate_prequential import EvaluatePrequential
from skmultiflow.core.pipeline import Pipeline
def demo(output_file=None, instances=50000):
""" _test_sam_knn_prequential
This demo shows how to produce a prequential evaluation.
The first thing needed is a stream. For this case we use a file stream
which gets its samples from the movingSquares.csv file, inside the datasets
folder.
Then we need to setup a classifier, which in this case is an instance
of scikit-multiflow's SAMKNN. Then, optionally we create a
pipeline structure, initialized on that classifier.
The evaluation is then run.
Parameters
----------
output_file: string
The name of the csv output file
instances: int
The evaluation's max number of instances
"""
# Setup the File Stream
stream = FileStream("../datasets/movingSquares.csv", -1, 1)
# stream = WaveformGenerator()
stream.prepare_for_use()
# Setup the classifier
# classifier = SGDClassifier()
# classifier = KNNAdwin(k=8, max_window_size=2000,leaf_size=40, categorical_list=None)
# classifier = OzaBaggingAdwin(h=KNN(k=8, max_window_size=2000, leaf_size=30, categorical_list=None))
classifier = SAMKNN(n_neighbors=5, knnWeights='distance', maxSize=1000, STMSizeAdaption='maxACCApprox',
useLTM=False)
# classifier = SGDRegressor()
# classifier = PerceptronMask()
# Setup the pipeline
# pipe = Pipeline([('Classifier', classifier)])
# Setup the evaluator
evaluator = EvaluatePrequential(pretrain_size=0, max_samples=instances, batch_size=1, n_wait=100, max_time=1000,
output_file=output_file, show_plot=True, metrics=['performance'])
# Evaluate
evaluator.evaluate(stream=stream, model=classifier)
if __name__ == '__main__':
demo()
|
StarcoderdataPython
|
9705026
|
"""
The contents of this file are subject to the Mozilla Public License
Version 1.1 (the "License"); you may not use this file except in
compliance with the License.
You may obtain a copy of the License at http://www.mozilla.org/MPL/
Software distributed under the License is distributed on an "AS IS"
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
License for the specific language governing rights and limitations under
the License.
The Original Code is available at
http://downloads.xmlschemata.org/python/xvif/
The Initial Developer of the Original Code is <NAME>. Portions
created by <NAME> are Copyright (C) 2002. All Rights Reserved.
Contributor(s):
"""
def transform(self, node):
from Ft.Xml.Xslt import Processor, StylesheetReader, DomWriter
from Ft.Xml import InputSource
processor = Processor.Processor()
xreader = StylesheetReader.StylesheetReader()
style=xreader.fromDocument(self.applyElt.dom, baseUri="dummy")
processor.appendStylesheetInstance(style)
factory = InputSource.DefaultFactory
isrc=factory.fromString("dummy", "dummy")
resWriter = DomWriter.DomWriter()
processor.execute(node, isrc, ignorePis=1, writer=resWriter)
dom = resWriter.getResult()
#print dom.firstChild.nodeValue
return dom.firstChild
|
StarcoderdataPython
|
99633
|
# Desenvolva um programa que pergunte a distancia de uma viagem em Km.
#Calcule o preço da passagem, cobrando R$0,50 por Km para viagens de até 200Km e R$0,45
# para viagens mais longas.
#minha resposta
#dist = float(input('Qual a distancia da sua viagem? '))
#print('Voce está prestes a começar uma viagem de {:.1f}Km'.format(dist))
#if dist <= 200:
# print('E o preço da sua passagem será de R${:.2f}'.format(dist * 0.50))
#else:
# print('E o preço da sua passagem será de R${:.2f}'.format(dist * 0.45))
#resposta1 do Gustavo
#distancia = float(input('Qual a distancia da sua viagem? '))
#print('Voce está prestes a começar uma viagem de {:.1f}Km'.format(distancia))
#if distancia <= 200:
# preco = distancia * 0.50
#else:
# preco = distancia * 0.45
#print('E o preço da sua passagem será de R${:.2f}'.format(preco))
#resposta2 do gustavo
distancia = float(input('Qual a distancia da sua viagem? '))
print('Voce está prestes a começar uma viagem de {:.1f}Km'.format(distancia))
preco = distancia * 0.50 if distancia <= 200 else distancia * 0.45 #if in line ou operador ternario
print('E o preço da sua passagem será de R${:.2f}'.format(preco))
|
StarcoderdataPython
|
3300154
|
<reponame>JiwanChung/tapm
from torch import nn
import torch.nn.functional as F
from exp import ex
from .temporal_corr import TemporalCorrGlobal
from .no_gt import NoGtSos
# from .ss_loss import calc_l2_loss
class AblationJointSS(TemporalCorrGlobal):
def forward(self, batch, **kwargs):
return self._forward(batch, **kwargs)
class AblationNoSS(TemporalCorrGlobal):
def forward(self, batch, **kwargs):
hypo, logit, target, reg_loss, stats, batch = self._forward(batch, **kwargs)
reg_loss = None
return hypo, logit, target, reg_loss, stats, batch
class AblationSplitGen(TemporalCorrGlobal):
def forward(self, batch, **kwargs):
if self.training:
self.fix_gpt(kwargs.get('epoch', 0))
hypo, logit, target, reg_loss, stats, batch = self._forward(batch, **kwargs)
reg_loss = None
return hypo, logit, target, reg_loss, stats, batch
class AblationNoPred(TemporalCorrGlobal):
def get_reg_loss(self, h, c, group_mask):
rank_loss, rank_stats = self.get_rank_loss(h, c, group_mask)
return rank_loss, rank_stats
class AblationNoMatch(TemporalCorrGlobal):
def get_reg_loss(self, h, c, group_mask):
roll_loss, roll_stats = self.get_roll_losses(h, c, group_mask)
return roll_loss, roll_stats
class AblationS(NoGtSos):
def mean_pool_text(self, o):
# BGLC
return o.mean(dim=2) # use the [sos] token only
class AblationLossL2(NoGtSos):
@ex.capture
def __init__(self, transformer, tokenizer, dropout_before, fix_gpt_epoch):
super().__init__(transformer, tokenizer, dropout_before, fix_gpt_epoch)
def calc_l2_loss(self, x1, x2, group_mask=None, margin=None, pool='mean', skip_idx=0):
loss = F.mse_loss(x1, x2, reduction=pool)
acc = 0
return loss, acc
def run_rank_loss(self, x1, x2, group_mask, skip_idx=0):
x1 = x1.view(-1, x1.shape[-1])
x2 = x2.view(-1, x2.shape[-1])
group_mask = group_mask.view(-1)
loss1, acc1 = self.calc_l2_loss(x1, x2, group_mask,
margin=self.margin, pool='mean',
skip_idx=skip_idx)
loss2, acc2 = self.calc_l2_loss(x2, x1, group_mask,
margin=self.margin, pool='mean',
skip_idx=-skip_idx)
return loss1, acc1, loss2, acc2
class AblationLossCycle(NoGtSos):
@ex.capture
def __init__(self, transformer, tokenizer, dropout_before, fix_gpt_epoch):
super().__init__(transformer, tokenizer, dropout_before, fix_gpt_epoch)
dim = self.gpt_dim
self.cycle_linears = nn.ModuleDict({
'vis_to_txt': nn.Linear(dim, dim),
'txt_to_vis': nn.Linear(dim, dim),
})
def calc_cycle_loss(self, x1, x2, group_mask=None, pool='mean', skip_idx=0):
l1 = F.mse_loss(self.cycle_linears['vis_to_txt'](x2), x1.detach(), reduction=pool)
l2 = F.mse_loss(self.cycle_linears['txt_to_vis'](x1), x2.detach(), reduction=pool)
return l1, l2
def get_rank_loss(self, h, c, group_mask, skip_idx=0):
x1 = F.normalize(h)
x2 = F.normalize(c)
l1, l2 = self.run_rank_loss(x1, x2, group_mask, skip_idx)
loss = l1 + l2
# stats = {'rank_accuracy': acc}
stats = {'loss_ttov': l1.item(), 'loss_vtot': l2.item()}
return loss, stats
def run_rank_loss(self, x1, x2, group_mask, skip_idx=0):
x1 = x1.view(-1, x1.shape[-1])
x2 = x2.view(-1, x2.shape[-1])
group_mask = group_mask.view(-1)
l1, l2 = self.calc_cycle_loss(x1, x2, group_mask, pool='mean', skip_idx=skip_idx)
return l1, l2
def get_reg_loss(self, h, c, group_mask):
loss, stats = self.get_rank_loss(h, c, group_mask)
return loss, stats
class AblationIterSS(TemporalCorrGlobal):
@classmethod
def get_args(cls):
return {
**super().get_args(),
'iter_ss': 1
}
@ex.capture
def __init__(self, transformer, tokenizer, dropout_before, fix_gpt_epoch,
iter_ss):
super().__init__(transformer, tokenizer, dropout_before, fix_gpt_epoch)
self.iter_ss = iter_ss
self.current_epoch = -1
def fix_gpt(self, epoch):
if epoch != self.current_epoch:
if (epoch + 1) % self.iter_ss == 0:
# revert ss
if not self.net.transformer.weight_freezed:
self._fix_gpt()
self.net.transformer.weight_freezed = True
else:
self._fix_gpt(False)
self.net.transformer.weight_freezed = False
self.reset_optimizer = True
self.current_epoch = epoch
|
StarcoderdataPython
|
5182068
|
<gh_stars>1-10
"""
clic.region.metadata: Tag metadata.* regions
********************************************
Add metadata.* tags to regions.
metadata.title / metadata.author regions
----------------------------------------
If there are 2 lines at the start, with an empty line after, we treat this as a
title / author combination::
>>> run_tagger('''
... Fly Fishing
... <NAME>
...
... INTRODUCTION.
...
... Fly Fishing: Memories of Angling Days, also published as Fly Fishing by
... '''.strip(), tagger_metadata)
[('metadata.title', 0, 11, None, 'Fly Fishing'),
('metadata.author', 12, 23, None, '<NAME>')]
Anything that doesn't match this gets ignored::
>>> run_tagger('''
... Fly Fishing
... <NAME>
... INTRODUCTION.
...
... Fly Fishing: Memories of Angling Days, also published as Fly Fishing by
... '''.strip(), tagger_metadata)
[]
"""
import re
TITLE_AUTHOR_REGEX = re.compile(r"^(.+)\n(.+)\n\n")
def tagger_metadata(book):
"""
Add metadata.* tags to regions
"""
m = re.match(TITLE_AUTHOR_REGEX, book["content"])
if not m:
# Can't find title/author, nothing to do
return
if len(book.get("metadata.title", [])) == 0:
# Title (should) be first line
book["metadata.title"] = [m.span(1)]
if len(book.get("metadata.author", [])) == 0:
# Author (should) be second line
book["metadata.author"] = [m.span(2)]
|
StarcoderdataPython
|
105371
|
<reponame>OnionIoT/tau-lidar-camera
## Command format
COMMAND_SIZE_TOTAL = 14 ## Cammand size total
COMMAND_SIZE_HEADER = 4 ## Cammand header size
COMMAND_SIZE_CHECKSUM = 4 ## Cammand checksum size
COMMAND_SIZE_OVERHEAD = 8 ## COMMAND_SIZE_HEADER + COMMAND_SIZE_CHECKSUM
COMMAND_START_MARK = 0xF5 ## Command start marking
COMMAND_INDEX_COMMAND = 1 ## Cammand index
COMMAND_INDEX_DATA = 2 ## Cammand payload data
## setup commands
COMMAND_SET_INTEGRATION_TIME_3D = 0x00 ## Command to set the integration time for 3D operation
COMMAND_SET_INTEGRATION_TIME_GRAYSCALE = 0x01 ## Command to set the integration time for grayscale
COMMAND_SET_ROI = 0x02 ## Command to set the region of interest
COMMAND_SET_BINNING = 0x03 ## Command to set the binning
COMMAND_SET_MODE = 0x04 ## Command to set the mode
COMMAND_SET_MODULATION_FREQUENCY = 0x05 ## Command to set the modulation frequency
COMMAND_SET_DLL_STEP = 0x06 ## Command to set the DLL step
COMMAND_SET_FILTER = 0x07 ## Command to set the filter parameters
COMMAND_SET_OFFSET = 0x08 ## Command to set the offset
COMMAND_SET_MINIMAL_AMPLITUDE = 0x09 ## Command to set th minimal amplitude
COMMAND_SET_DCS_FILTER = 0x0A ## Command to set the DCS filter
COMMAND_SET_GAUSSIAN_FILTER = 0x0B ## Command to set the Gaussian filter
COMMAND_SET_FRAME_RATE = 0x0C ## Command to set/limit the frame rate
COMMAND_SET_HDR = 0x0D
COMMAND_SET_MODULATION_CHANNEL = 0x0E ## Command to set the modulation channel
COMMAND_SET_FILTER_SINGLE_SPOT = 0x0F ## Command to set the temporal filter for the single spot
## acquisition commands
COMMAND_GET_DISTANCE = 0x20 ## Command to request distance data
COMMAND_GET_AMPLITUDE = 0x21 ## Command to request amplitude data
COMMAND_GET_DISTANCE_AMPLITUDE = 0x22 ## Command to request distance and amplitude data
COMMAND_GET_DCS_DISTANCE_AMPLITUDE = 0x23 ## Command to request distance, amplitude and DCS data at once
COMMAND_GET_GRAYSCALE = 0x24 ## Command to request grayscale data
COMMAND_GET_DCS = 0x25 ## Command to request DCS data
COMMAND_SET_AUTO_ACQUISITION = 0x26 ## Command to enable/disable the auto acquisition
COMMAND_GET_INTEGRATION_TIME_3D = 0x27 ## Command to read the integration time. Important when using automatic mode
COMMAND_STOP_STREAM = 0x28 ## Command to stop the stream
COMMAND_GET_DISTANCE_GRAYSCALE = 0x29 ## Command to request distance and grayscale
COMMAND_GET_IDENTIFICATION = 0x47 ## Command to identification
COMMAND_GET_CHIP_INFORMATION = 0x48 ## Command to read the chip information
COMMAND_GET_FIRMWARE_RELEASE = 0x49 ## Command to read the firmware release
COMMAND_GET_PRODUCTION_INFO = 0x50 ## Command to get the production info
## MODULATION
COMMAND_SET_MODULATION_FREQUENCY = 0x05
VALUE_10MHZ = 0 ## Value for 10MHz for command "COMMAND_SET_MODULATION_FREQUENCY"
VALUE_20MHZ = 1 ## Value for 20MHz for command "COMMAND_SET_MODULATION_FREQUENCY"
## 635 op mode
MODE_BEAM_A = 0 ## Normal operation with illumination beam A
MODE_BEAM_B_MANUAL = 1 ## Normal operation with illumination beam B (all settings by user, same as)
MODE_BEAM_B_RESULT = 2 ## Beam B with calibrated ROI, only one distance as result
MODE_BEAM_B_RESULT_DATA = 3 ## Beam B with calibrated ROI, one distance and the pixels as result
MODE_BEAM_AB_RESULT = 4 ## Beam A and B operating with calibrated ROI and only one distance as result
MODE_BEAM_AB_AUTO_RESULT = 5 ## Beam A and B with automatic selection
MODE_BEAM_AB_INTERLEAVED_DATA = 6 ## Beam A and B interleaved output
## Stream mode
SINGLE = 0 ## Single frame mode
AUTO_REPEAT = 1 ## Auto repeat frame using same parameters
STREAM = 3 ## Stream mode
## HDR
HDR_OFF = 0 ## HDR off
HDR_SPATIAL = 1 ## Spatial HDR
HDR_TEMPORAL = 2 ## Temporal HDR
## IntegrationTime
INDEX_INDEX_3D = 2 ## Index of the integration time 3d index
INDEX_INTEGRATION_TIME_3D = 3 ## Index of the integration time 3d
## AMPLITUDE
INDEX_INDEX_AMPLITUDE = 2 ## Index of the index
INDEX_AMPLITUDE = 3 ## Index of the minimal amplitude
## ROI
INDEX_ROI_X_MIN = 2 ## Index of ROI X MIN
INDEX_ROI_Y_MIN = 4 ## Index of ROI Y MIN
INDEX_ROI_X_MAX = 6 ## Index of ROI X MAX
INDEX_ROI_Y_MAX = 8 ## Index of ROI Y MAX
## Data format
DATA_START_MARK = 0xFA ## Data start marking
DATA_INDEX_LENGTH = 2 ## Data length
DATA_INDEX_TYPE = 1 ## Data type
## firmware returned data type
DATA_ACK = 0x00 ## Acknowledge from sensor to host
DATA_NACK = 0x01 ## Not acknowledge from sensor to host
DATA_IDENTIFICATION = 0x02 ## Identification to identify the device
DATA_DISTANCE = 0x03 ## Distance information
DATA_AMPLITUDE = 0x04 ## Amplitude information
DATA_DISTANCE_AMPLITUDE = 0x05 ## Distance and amplitude information
DATA_GRAYSCALE = 0x06 ## Grayscale information
DATA_DCS = 0x07 ## DCS data
DATA_DCS_DISTANCE_AMPLITUDE = 0x08 ## DCS, distance and amplitude all together
DATA_INTEGRATION_TIME = 0x09 ## Integration time, answer to COMMAND_GET_INTEGRATION_TIME_3D
DATA_DISTANCE_GRAYSCALE = 0x0A ## Distance and grayscale data
DATA_LENS_CALIBRATION_DATA = 0xF7 ## Lens calibration data
DATA_TRACE = 0xF8 ## Trace data
DATA_PRODUCTION_INFO = 0xF9 ## Production info
DATA_CALIBRATION_DATA = 0xFA ## Calibration data
DATA_REGISTER = 0xFB ## Register data
DATA_TEMPERATURE = 0xFC ## Temperature data
DATA_CHIP_INFORMATION = 0xFD ## Chip information data
DATA_FIRMWARE_RELEASE = 0xFE ## Firmware release
DATA_ERROR = 0xFF ## Error number
## CHIP
MASK_CHIP_TYPE_DEVICE = 0x00FFFF00 ## Chip information mask
SHIFT_CHIP_TYPE_DEVICE = 8 ## Chip information shift
CHIP_INFORMATION_DATA_SIZE = 4 ## Chip information data size
## IDENTITY
DATA_IDENTIFICATION = 0x02
DATA_FIRMWARE_RELEASE = 0xFE
IDENTIFICATION_DATA_SIZE = 4 ## Chip information data size
INDEX_WAFER_ID = 6
INDEX_CHIP_ID = 4
## Firmware release
FIRMWARE_RELEASE_DATA_SIZE = 4 ## Chip information data size
MASK_VERSION = 0x000000FF
SHIFT_VERSION = 0
## TOF 635 image
TOF_635_IMAGE_HEADER_SIZE = 80 ## 635 IMAGE HEADER SIZE
|
StarcoderdataPython
|
294623
|
<gh_stars>0
import logging
from captcha.fields import ReCaptchaField
from django import forms
from django.contrib.auth import get_user_model
from .models import Profile
logger = logging.getLogger(__name__)
class UserForm(forms.ModelForm):
captcha = ReCaptchaField(attrs={'_no_label': True, '_no_errors': True})
def clean_email(self):
email = self.cleaned_data.get('email')
if email:
email = email.lower()
return email
class Meta:
model = get_user_model()
fields = ['email']
def save(self, commit=True, profile_kwargs={}):
"""Sends out email with pw reset link if user is created."""
user = super().save(commit=commit)
if commit:
Profile.objects.create(user=user, **profile_kwargs)
logger.info(f'Created profile for {user}')
user.send_activation_mail()
return user
class UpdateEmailForm(forms.ModelForm):
password = forms.CharField(label='Bestätigen Sie Ihr Passwort', widget=forms.PasswordInput)
def clean_password(self):
password = self.cleaned_data['password']
if not self.instance.check_password(password):
raise forms.ValidationError('Invalid password')
return password
class Meta:
model = get_user_model()
fields = ['email']
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['title', 'first_name', 'last_name', 'organization', 'street', 'postcode', 'city', 'country']
_field_class = {
'title': 'three wide'
}
# Layout used by django-semanticui
layout = [
("Three Fields",
("Field", "title"),
("Field", "first_name"),
("Field", "last_name")),
("Field", "organization"),
("Field", "street"),
("Two Fields",
("Field", "postcode"),
("Field", "city")),
]
|
StarcoderdataPython
|
5078680
|
<filename>processing/shots_Distribution_Career.py<gh_stars>10-100
import requests
import urllib
import csv
import os
import sys
from time import time
from py_data_getter import data_getter
from py_db import db
db = db('nba_shots')
def initiate():
print "-------------------------"
print "shots_Distribution_Career.py"
start_time = time()
process()
end_time = time()
elapsed_time = float(end_time - start_time)
print "time elapsed (in seconds): " + str(elapsed_time)
print "time elapsed (in minutes): " + str(elapsed_time/60.0)
print "shots_Distribution_Career.py"
print "-------------------------"
def process():
for _type in ('Player', 'Team', 'League'):
print '\t' + _type
if _type == 'Player':
_join = 'JOIN players USING (player_id)\n\t'
_career = 'CONCAT(GREATEST(1996, from_year),to_year)'
else:
_join = ''
_career = "'1'"
query = """SELECT *
FROM(
SELECT
%s_id, %s AS career, season_type, shot_zone_basic, shot_zone_area,
all_games AS games,
SUM(attempts) AS attempts,
SUM(attempts)/all_atts AS zone_pct,
SUM(points)/SUM(attempts)/2 AS efg
FROM shots_%s_Breakdown
%sJOIN(
SELECT %s_id, season_type, SUM(games) AS all_games, SUM(attempts) AS all_atts
FROM shots_%s_Breakdown
WHERE shot_zone_basic = 'all'
AND shot_zone_area = 'all'
GROUP BY %s_id, season_type
) allatts USING (%s_id, season_type)
GROUP BY %s_id, shot_zone_basic, shot_zone_area, season_type
) a
ORDER BY %s_id ASC, shot_zone_basic ASC, shot_zone_area ASC, season_type DESC
"""
q = query % (_type, _career, _type, _join, _type, _type, _type, _type, _type, _type)
# raw_input(q)
res = db.query(q)
entries = []
_id = '%s_id' % (_type.lower())
for row in res:
type_id, career, season_type, shot_zone_basic, shot_zone_area, games, attempts, zone_pct, efg = row
entry = {_id:type_id, "season_id":career, "season_type":season_type, "shot_zone_basic":shot_zone_basic, "shot_zone_area":shot_zone_area, "games":games, "attempts":attempts, "zone_pct":zone_pct, "efg":efg}
entries.append(entry)
table = "shots_%s_Distribution_Career" % (_type)
if entries != []:
for i in range(0, len(entries), 1000):
db.insertRowDict(entries[i: i + 1000], table, insertMany=True, replace=True, rid=0,debug=1)
db.conn.commit()
if __name__ == "__main__":
initiate()
|
StarcoderdataPython
|
11267233
|
<reponame>rcbops/tempest-zigzag<filename>tests/test_tempest_test_list.py
from tempest_zigzag.tempest_test_list import TempestTestList
from tempest_zigzag.tempest_testcase_list import TempestTestcaseList
class TestTempestTestList(object):
def test_find_by_classname(self, file_test_list):
tl = TempestTestList(file_test_list)
for test in tl.find_tests_by_classname(tl[0].classname):
assert type(test) is TempestTestcaseList
assert tl[0].classname == test.classname
|
StarcoderdataPython
|
6493698
|
from utils.enums import ScrollEnum
from utils.context_csv import CSVCustom
from scrapper.base import BaseReviewScrapper
from logs import default_logger
class AppleReviewScrapper(BaseReviewScrapper):
RE_ASSERT_URL = '^https://apps.apple.com'
CARD_XPATH = r'/html/body/div[4]/div/main/div/div/div/section/div[2]/div[{}]/div[2]'
NAME_XPATH = r'/html/body/div[4]/div/main/div/div/div/section/div[2]/div[{}]/div[2]/div/span[1]'
DATE_XPATH = r'/html/body/div[4]/div/main/div/div/div/section/div[2]/div[{}]/div[2]/div/time'
STAR_XPATH = r'/html/body/div[4]/div/main/div/div/div/section/div[2]/div[{}]/div[2]/figure'
TITLE_XPATH = r'/html/body/div[4]/div/main/div/div/div/section/div[2]/div[{}]/div[2]/h3'
DESCRIPTIONS_XPATH = r'/html/body/div[4]/div/main/div/div/div/section/div[2]/div[{}]/div[2]/blockquote/div'
def run(self, n: int, output_file: str):
""" Execute the scrapper and save all data in a csv file
:param n: how many data to be extracted
:type: int
:param output_file: csv file path where data will be saved
:type: str
"""
with self.driver as web:
web.get(self.url)
with CSVCustom(output_file, ['name', 'date', 'title', 'star', 'description']) as csv:
n_length = len(str(n))
not_found = 0
for i in range(1, n + 1):
default_logger.debug(f'getting: {str(i).rjust(n_length, "0")}')
# find card element
if web.wait_find_element_by_xpath(self.CARD_XPATH.format(i)):
# resetting not_found counter
not_found = 0
# getting all web elements
name = web.find_element_by_xpath(self.NAME_XPATH.format(i)).text
date = web.find_element_by_xpath(self.DATE_XPATH.format(i)).text
star = web.find_element_by_xpath(self.STAR_XPATH.format(i)).get_attribute('aria-label')
title = web.find_element_by_xpath(self.TITLE_XPATH.format(i)).text
descriptions_element = web.find_element_by_xpath(self.DESCRIPTIONS_XPATH.format(i))
description = '\n'.join([description.text for description in descriptions_element.find_elements_by_tag_name('p')])
# data to dict
data = self.to_dict(name=name, date=date, star=star, title=title, description=description)
# saving to csv file
csv.write_row(data)
else:
default_logger.warning(f'not found the element: {not_found}')
# scroll the page up to unsure that the page will load more data
web.scroll_page('/html', ScrollEnum.UP)
# increment 1
not_found += 1
# if not found 4 sequence elements, stop
if not_found == 5:
break
# scroll down the page to load more data
web.scroll_page('/html', ScrollEnum.DOWN)
else:
default_logger.debug(f'ended with {i} elements')
return
default_logger.error(f'ended with no more element found')
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.