ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a50a0e77f04ab7df9104136f597d7edc945f00d | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.test import TestCase
from prereq_map.models.course_title import CourseTitle
from prereq_map.utils.typeahead import get_course_typeahead
import pandas as pd
class TestCourseTitle(TestCase):
def test_titles(self):
dataframe = pd.DataFrame(
{'department_abbrev': ['CSE', 'LAW'],
'course_number': [142, 354],
'course_college': ['School of Arts and Sci', 'Law School'],
'long_course_title': ['Intro To Java', 'Bird Law']
}
)
CourseTitle.update_titles(dataframe)
self.assertEqual(len(CourseTitle.objects.all()), 2)
title = CourseTitle().get_course_title("CSE 142")
self.assertEqual(title, "Intro To Java")
with self.assertRaises(CourseTitle.DoesNotExist):
CourseTitle.get_course_title("CSE 500")
def test_typeahead(self):
CourseTitle(department_abbrev="CSE",
course_number="142",
long_course_title="Intro to Comp Sci").save()
CourseTitle(department_abbrev="MATH",
course_number="123",
long_course_title="Counting by Numbers").save()
ct_typeahead = get_course_typeahead()
self.assertEqual(len(ct_typeahead), 2)
self.assertEqual(ct_typeahead[0], "CSE 142: Intro to Comp Sci")
|
py | 1a50a39b96229aad0fbe4d11b9d4899df2b6fb8e | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class FormList(ListResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. """
def __init__(self, version):
"""
Initialize the FormList
:param Version version: Version that contains the resource
:returns: twilio.rest.verify.v2.form.FormList
:rtype: twilio.rest.verify.v2.form.FormList
"""
super(FormList, self).__init__(version)
# Path Solution
self._solution = {}
def get(self, form_type):
"""
Constructs a FormContext
:param form_type: The Type of this Form
:returns: twilio.rest.verify.v2.form.FormContext
:rtype: twilio.rest.verify.v2.form.FormContext
"""
return FormContext(self._version, form_type=form_type, )
def __call__(self, form_type):
"""
Constructs a FormContext
:param form_type: The Type of this Form
:returns: twilio.rest.verify.v2.form.FormContext
:rtype: twilio.rest.verify.v2.form.FormContext
"""
return FormContext(self._version, form_type=form_type, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Verify.V2.FormList>'
class FormPage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. """
def __init__(self, version, response, solution):
"""
Initialize the FormPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.verify.v2.form.FormPage
:rtype: twilio.rest.verify.v2.form.FormPage
"""
super(FormPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of FormInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.verify.v2.form.FormInstance
:rtype: twilio.rest.verify.v2.form.FormInstance
"""
return FormInstance(self._version, payload, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Verify.V2.FormPage>'
class FormContext(InstanceContext):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. """
def __init__(self, version, form_type):
"""
Initialize the FormContext
:param Version version: Version that contains the resource
:param form_type: The Type of this Form
:returns: twilio.rest.verify.v2.form.FormContext
:rtype: twilio.rest.verify.v2.form.FormContext
"""
super(FormContext, self).__init__(version)
# Path Solution
self._solution = {'form_type': form_type, }
self._uri = '/Forms/{form_type}'.format(**self._solution)
def fetch(self):
"""
Fetch the FormInstance
:returns: The fetched FormInstance
:rtype: twilio.rest.verify.v2.form.FormInstance
"""
payload = self._version.fetch(method='GET', uri=self._uri, )
return FormInstance(self._version, payload, form_type=self._solution['form_type'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Verify.V2.FormContext {}>'.format(context)
class FormInstance(InstanceResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. """
class FormTypes(object):
FORM_PUSH = "form-push"
def __init__(self, version, payload, form_type=None):
"""
Initialize the FormInstance
:returns: twilio.rest.verify.v2.form.FormInstance
:rtype: twilio.rest.verify.v2.form.FormInstance
"""
super(FormInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'form_type': payload.get('form_type'),
'forms': payload.get('forms'),
'form_meta': payload.get('form_meta'),
'url': payload.get('url'),
}
# Context
self._context = None
self._solution = {'form_type': form_type or self._properties['form_type'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: FormContext for this FormInstance
:rtype: twilio.rest.verify.v2.form.FormContext
"""
if self._context is None:
self._context = FormContext(self._version, form_type=self._solution['form_type'], )
return self._context
@property
def form_type(self):
"""
:returns: The Type of this Form
:rtype: FormInstance.FormTypes
"""
return self._properties['form_type']
@property
def forms(self):
"""
:returns: Object that contains the available forms for this type.
:rtype: dict
"""
return self._properties['forms']
@property
def form_meta(self):
"""
:returns: Additional information for the available forms for this type.
:rtype: dict
"""
return self._properties['form_meta']
@property
def url(self):
"""
:returns: The URL to access the forms for this type.
:rtype: unicode
"""
return self._properties['url']
def fetch(self):
"""
Fetch the FormInstance
:returns: The fetched FormInstance
:rtype: twilio.rest.verify.v2.form.FormInstance
"""
return self._proxy.fetch()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Verify.V2.FormInstance {}>'.format(context)
|
py | 1a50a3e9ba008c15bf6b7b0d456acf3bc52accc1 | import random
from perf_load.perf_req_gen import RequestGenerator
class RGSeqReqs(RequestGenerator):
def __init__(self, *args, reqs=list(), next_random: bool=False, **kwargs):
super().__init__(*args, **kwargs)
self._req_idx = -1
self._next_idx = self._rand_idx if next_random else self._seq_idx
if not isinstance(reqs, list):
raise RuntimeError("Bad Requests sequence provided")
self._reqs_collection = []
for reqc, prms in reqs:
if not issubclass(reqc, RequestGenerator):
raise RuntimeError("Bad Request class provided")
cnt = 1
param = {}
if isinstance(prms, int) and prms > 0:
cnt = prms
elif isinstance(prms, dict):
cnt = prms.get('count', 1)
param = prms
else:
raise RuntimeError("Bad Request params provided")
new_req = reqc(*args, **param, **kwargs)
for i in range(0, cnt):
self._reqs_collection.append(new_req)
if len(self._reqs_collection) == 0:
raise RuntimeError("At least one class should be provided")
async def on_pool_create(self, pool_handle, wallet_handle, submitter_did, sign_req_f, send_req_f, *args, **kwargs):
for req_builder in set(self._reqs_collection):
await req_builder.on_pool_create(pool_handle, wallet_handle, submitter_did, sign_req_f, send_req_f, *args, **kwargs)
def _seq_idx(self):
return (self._req_idx + 1) % len(self._reqs_collection)
def _rand_idx(self):
return random.randint(0, len(self._reqs_collection) - 1)
def _gen_req_data(self):
self._req_idx = self._next_idx()
return self._reqs_collection[self._req_idx]._gen_req_data()
def get_label(self):
return self._reqs_collection[self._req_idx].get_label()
async def _gen_req(self, submit_did, req_data):
req_gen = self._reqs_collection[self._req_idx]
return await req_gen._gen_req(submit_did, req_data)
async def on_request_generated(self, req_data, gen_req):
for r in self._reqs_collection:
await r.on_request_generated(req_data, gen_req)
async def on_request_replied(self, req_data, req, resp_or_exp):
for r in self._reqs_collection:
await r.on_request_replied(req_data, req, resp_or_exp)
def req_did(self):
return self._reqs_collection[self._req_idx].req_did()
|
py | 1a50a64d6906052996681ed6a0f72a280dd4daea | """Tests that the config singleton is working properly
"""
from os.path import expanduser
from os.path import join
from unittest import TestCase
from mock import patch
from testfixtures import TempDirectory
from nose.tools import eq_
from nose.tools import raises
from ..config import get_config_files
from ..config import load_yaml
class TestConfig(TestCase):
"""Tests for config singleton
"""
def setUp(self):
self.test_yaml_file = '\n'.join([
'test:',
' test_sub:',
' - test_sub1: foo',
' test_sub1_other: bar',
' - test_sub2: foobar',
])
self.test_config_dict = {
'test': {
'test_sub': [
{
'test_sub1': 'foo',
'test_sub1_other': 'bar',
},
{
'test_sub2': 'foobar',
}
]
}
}
@staticmethod
@patch.dict('os.environ', {}, clear=True)
def test_get_config_files_no_enviroment_variable():
"""Tests that correct config file paths are returned when there's no
enviroment variable
"""
expected = [
'/etc/dataduct.cfg',
expanduser('~/.dataduct/dataduct.cfg'),
]
result = get_config_files()
eq_(result, expected)
@staticmethod
@patch.dict('os.environ', {'DATADUCT_CONFIG_PATH': '/test/test.cfg'})
def test_get_config_files_with_enviroment_variable():
"""Tests that correct config file paths are returned when there is
an enviroment variable
"""
expected = [
'/etc/dataduct.cfg',
expanduser('~/.dataduct/dataduct.cfg'),
'/test/test.cfg',
]
result = get_config_files()
eq_(result, expected)
def test_load_yaml_works_correctly(self):
"""Tests that the yaml file can be loaded correctly
"""
with TempDirectory() as d:
d.write('test.yaml', self.test_yaml_file.encode('utf8'))
result = load_yaml([join(d.path, 'test.yaml')])
eq_(result, self.test_config_dict)
@staticmethod
@raises(IOError)
def test_no_config_file_raises():
"""Tests that an exception is raised if no yaml file path is passed in
"""
load_yaml([])
@staticmethod
@raises(IOError)
def test_cannot_find_config_file_raises():
"""Tests that an exception is raised if it cannot find any yaml files
"""
with TempDirectory() as d:
with TempDirectory() as d2:
load_yaml([join(d.path, 'test.cfg'),
join(d2.path, 'test.cfg')])
|
py | 1a50a677014a14a7520e0ca28fc53d4d526c6487 | """cfmfile - Interface to code fragments on file"""
import struct
import Res
import macfs
import string
Error = 'cfmfile.Error'
READ = 1
WRITE = 2
smAllScripts = -3
BUFSIZE = 0x100000
class FragmentInfo:
"""Information on a single fragment"""
def __init__(self):
self.arch = 'pwpc'
self.current_version = 0
self.oldest_version = 0
self.stacksize = 0
self.libdir = 0
self.fragtype = 1
self.location = 1
self.offset = 0
self.length = 0
self.res_0 = 0
self.res_1 = 0
self.name = ''
self.ifp = None
def load(self, data):
if len(data) < 43:
raise Error, 'Not enough data in cfrg resource'
self.arch = data[:4]
self.update, self.current_version, self.oldest_version, \
self.stacksize, self.libdir, self.fragtype, self.location, \
self.offset, self.length, self.res_0, self.res_1, length = \
struct.unpack("llllhbbllllh", data[4:42])
namelen = ord(data[42])
self.name = data[43:43+namelen]
if len(self.name) != namelen:
raise Error, 'Not enough data in cfrg resource'
return length
def save(self):
length = (43+len(self.name)+3) & ~3
data = self.arch + struct.pack("llllhbbllllh", self.update, \
self.current_version, self.oldest_version, self.stacksize, \
self.libdir, self.fragtype, self.location, self.offset, \
self.length, self.res_0, self.res_1, length)
data = data + chr(len(self.name)) + self.name
data = data + ('\0'*(length-len(data)))
return data
def copydata(self, ofp):
"""Copy fragment data to a new file, updating self.offset"""
if self.location != 1:
raise Error, 'Can only copy kOnDiskFlat (data fork) fragments'
if not self.ifp:
raise Error, 'No source file for fragment'
# Find out real length (if zero)
if self.length == 0:
self.ifp.seek(0, 2)
self.length = self.ifp.tell()
# Position input file and record new offset from output file
self.ifp.seek(self.offset)
self.offset = ofp.tell()
l = self.length
while l:
if l > BUFSIZE:
ofp.write(self.ifp.read(BUFSIZE))
l = l - BUFSIZE
else:
ofp.write(self.ifp.read(l))
l = 0
self.ifp = ofp
def setfile(self, ifp):
self.ifp = ifp
class FragmentResource:
def __init__(self, data):
self.load(data)
def load(self, data):
r0, r1, version, r3, r4, r5, r6, nfrag = struct.unpack("llllllll", data[:32])
if version != 1:
raise Error, 'Unsupported cfrg version number %d'%version
data = data[32:]
self.fragments = []
for i in range(nfrag):
f = FragmentInfo()
len = f.load(data)
data = data[len:]
self.fragments.append(f)
if data:
raise Error, 'Spurious data after fragment descriptions'
def save(self):
data = struct.pack("llllllll", 0, 0, 1, 0, 0, 0, 0, len(self.fragments))
for f in self.fragments:
data = data+f.save()
return data
def setfile(self, ifp):
for f in self.fragments:
f.setfile(ifp)
def copydata(self, ofp):
for f in self.fragments:
f.copydata(ofp)
def getfragments(self):
return self.fragments
def addfragments(self, fragments):
self.fragments = self.fragments + fragments
class ResourceCollection:
def __init__(self, fhandle):
self.reslist = []
self.fhandle = fhandle
oldresfile = Res.CurResFile()
Res.UseResFile(fhandle)
Res.SetResLoad(0)
ntypes = Res.Count1Types()
for itype in range(1, 1+ntypes):
type = Res.Get1IndType(itype)
nresources = Res.Count1Resources(type)
for ires in range(1, 1+nresources):
res = Res.Get1IndResource(type, ires)
id, type, name = res.GetResInfo()
self.reslist.append((type, id))
Res.SetResLoad(1)
Res.UseResFile(oldresfile)
def contains(self, type, id):
return (type, id) in self.reslist
def getresource(self, type, id):
oldresfile = Res.CurResFile()
Res.UseResFile(self.fhandle)
Res.SetResLoad(1)
resource = Res.Get1Resource(type, id)
Res.UseResFile(oldresfile)
return resource
def saveresto(self, type, id, fhandle):
oldresfile = Res.CurResFile()
resource = self.getresource(type, id)
id, type, name = resource.GetResInfo()
resource.DetachResource()
Res.UseResFile(fhandle)
resource.AddResource(type, id, name)
Res.UseResFile(oldresfile)
def getreslist(self):
return self.reslist
class CfmFile(ResourceCollection, FragmentResource):
def __init__(self, fsspec):
rfork = Res.FSpOpenResFile(fsspec, READ)
dfork = open(fsspec.as_pathname(), 'rb')
ResourceCollection.__init__(self, rfork)
cfrg_resource = self.getresource('cfrg', 0)
FragmentResource.__init__(self, cfrg_resource.data)
self.setfile(dfork)
def mergecfmfiles(inputs, output):
# Convert inputs/outputs to fsspecs
inputs = map(None, inputs)
for i in range(len(inputs)):
if type(inputs[i]) == type(''):
inputs[i] = macfs.FSSpec(inputs[i])
if type(output) == type(''):
output = macfs.FSSpec(output)
input_list = []
for i in inputs:
input_list.append(CfmFile(i))
# Create output file, if needed
creator, tp = inputs[0].GetCreatorType()
try:
Res.FSpCreateResFile(output, creator, tp, smAllScripts)
except Res.Error:
pass
# Copy fragments
dfork = open(output.as_pathname(), 'wb')
for i in input_list:
i.copydata(dfork)
dfork.close()
# Merge cfrg's
for i in input_list[1:]:
input_list[0].addfragments(i.getfragments())
old_res_file = Res.CurResFile()
rfork = Res.FSpOpenResFile(output, WRITE)
Res.UseResFile(rfork)
# Write cfrg
data = input_list[0].save()
cfrg_resource = Res.Resource(data)
cfrg_resource.AddResource('cfrg', 0, '')
resources_done = [('cfrg', 0)]
# Write other resources
for i in input_list:
todo = i.getreslist()
for tp, id in todo:
if (tp, id) in resources_done:
continue
i.saveresto(tp, id, rfork)
resources_done.append(tp, id)
def main():
list = []
while 1:
fss, ok = macfs.PromptGetFile("Next input file:", "shlb", "APPL")
if not ok: break
list.append(fss)
if not list:
sys.exit(0)
output, ok = macfs.StandardPutFile("Output file:")
if not ok:
sys.exit(0)
mergecfmfiles(list, output)
if __name__ == '__main__':
main()
|
py | 1a50a7e0ffe6fa514dd678916d10433536c6aef3 | from azureml.core.webservice import AciWebservice
from azureml.core.webservice import Webservice
from azureml.core.image import Image
from azureml.core import Workspace
import sys
import json
# Get workspace
ws = Workspace.from_config()
# Get the Image to deploy details
try:
with open("aml_config/image.json") as f:
config = json.load(f)
except:
print('No new model, thus no deployment on ACI')
sys.exit(0)
image_name = config['image_name']
image_version = config['image_version']
images = Image.list(workspace=ws)
image, = (m for m in images if m.version==image_version and m.name == image_name)
print('From image.json, Image used to deploy webservice on ACI: {}\nImage Version: {}\nImage Location = {}'.format(image.name, image.version, image.image_location))
aciconfig = AciWebservice.deploy_configuration(cpu_cores=1,
auth_enabled=True, # this flag generates API keys to secure access
memory_gb=1,
tags={'name':'prednet', 'framework': 'Keras'},
description='Prednet')
aci_service_name = image_name
print(aci_service_name)
service = Webservice.deploy_from_image(deployment_config = aciconfig,
image = image,
name = aci_service_name,
workspace = ws)
service.wait_for_deployment(True)
print('Deployed ACI Webservice: {} \nWebservice Uri: {}'.format(service.name, service.scoring_uri))
#service=Webservice(name ='aciws0622', workspace =ws)
# Writing the ACI details to /aml_config/aci_webservice.json
aci_webservice = {}
aci_webservice['aci_name'] = service.name
aci_webservice['aci_url'] = service.scoring_uri
with open('aml_config/aci_webservice.json', 'w') as outfile:
json.dump(aci_webservice,outfile)
|
py | 1a50a88fd668b64f80b90c0ed898d1d9140f3531 | #! /usr/bin/env python
"""
Module with contrast curve generation function.
"""
__author__ = 'C. Gomez, O. Absil @ ULg'
__all__ = ['contrast_curve',
'noise_per_annulus',
'throughput',
'aperture_flux']
import numpy as np
import pandas as pd
import photutils
import inspect
from scipy.interpolate import InterpolatedUnivariateSpline
from scipy import stats
from scipy.signal import savgol_filter
from skimage.draw import disk
from matplotlib import pyplot as plt
from .fakecomp import (cube_inject_companions, frame_inject_companion,
normalize_psf)
from ..conf import time_ini, timing
from ..conf.utils_conf import sep
from ..var import frame_center, dist
def contrast_curve(cube, angle_list, psf_template, fwhm, pxscale, starphot,
algo, sigma=5, nbranch=1, theta=0, inner_rad=1,
wedge=(0,360), fc_snr=100, student=True, transmission=None,
smooth=True, interp_order=2, plot=True, dpi=100, debug=False,
verbose=True, full_output=False, save_plot=None,
object_name=None, frame_size=None, fix_y_lim=(),
figsize=(8, 4), **algo_dict):
""" Computes the contrast curve at a given SIGMA (``sigma``) level for an
ADI cube or ADI+IFS cube. The contrast is calculated as
sigma*noise/throughput. This implementation takes into account the small
sample statistics correction proposed in Mawet et al. 2014.
Parameters
----------
cube : numpy ndarray
The input cube, 3d (ADI data) or 4d array (IFS data), without fake
companions.
angle_list : numpy ndarray
Vector with the parallactic angles.
psf_template : numpy ndarray
Frame with the psf template for the fake companion(s).
PSF must be centered in array. Normalization is done internally.
fwhm: int or float or 1d array, optional
The the Full Width Half Maximum in pixels. It can handle a different
FWHM value for different wavelengths (IFS data).
pxscale : float
Plate scale or pixel scale of the instrument.
starphot : int or float or 1d array
If int or float it corresponds to the aperture photometry of the
non-coronagraphic PSF which we use to scale the contrast. If a vector
is given it must contain the photometry correction for each frame.
algo : callable or function
The post-processing algorithm, e.g. vip_hci.pca.pca.
sigma : int
Sigma level for contrast calculation. Note this is a "Gaussian sigma"
regardless of whether Student t correction is performed (set by the
'student' parameter). E.g. setting sigma to 5 will yield the contrast
curve corresponding to a false alarm probability of 3e-7.
nbranch : int, optional
Number of branches on which to inject fakes companions. Each branch
is tested individually.
theta : float, optional
Angle in degrees for rotating the position of the first branch that by
default is located at zero degrees. Theta counts counterclockwise from
the positive x axis. When working on a wedge, make sure that theta is
located inside of it.
inner_rad : int, optional
Innermost radial distance to be considered in terms of FWHM.
wedge : tuple of floats, optional
Initial and Final angles for using a wedge. For example (-90,90) only
considers the right side of an image.
fc_snr: float optional
Signal to noise ratio of injected fake companions (w.r.t a Gaussian
distribution).
student : bool, optional
If True uses Student t correction to inject fake companion.
transmission : tuple of 2 1d arrays, optional
If not None, then the tuple contains a vector with the factors to be
applied to the sensitivity and a vector of the radial distances [px]
where it is sampled (in this order).
smooth : bool, optional
If True the radial noise curve is smoothed with a Savitzky-Golay filter
of order 2.
interp_order : int or None, optional
If True the throughput vector is interpolated with a spline of order
``interp_order``. Takes values from 1 to 5. If None, then the
throughput is not interpolated.
plot : bool, optional
Whether to plot the final contrast curve or not. True by default.
dpi : int optional
Dots per inch for the plots. 100 by default. 300 for printing quality.
imlib : {'opencv', 'ndimage-fourier', 'ndimage-interp', 'vip-fft'}, str opt
Library or method used for image operations (rotations). Opencv is the
default for being the fastest. See description of
`vip_hci.preproc.frame_rotate`.
interpolation: str, opt
See description of ``vip_hci.preproc.frame_rotate`` function
debug : bool, optional
Whether to print and plot additional info such as the noise, throughput,
the contrast curve with different X axis and the delta magnitude instead
of contrast.
verbose : {True, False, 0, 1, 2}, optional
If True or 1 the function prints to stdout intermediate info and timing,
if set to 2 more output will be shown.
full_output : bool, optional
If True returns intermediate arrays.
save_plot: string
If provided, the contrast curve will be saved to this path.
object_name: string
Target name, used in the plot title.
frame_size: int
Frame size used for generating the contrast curve, used in the plot
title.
fix_y_lim: tuple
If provided, the y axis limits will be fixed, for easier comparison
between plots.
**algo_dict
Any other valid parameter of the post-processing algorithms can be
passed here, including e.g. imlib and interpolation.
Returns
-------
datafr : pandas dataframe
Dataframe containing the sensitivity (Gaussian and Student corrected if
Student parameter is True), the interpolated throughput, the distance in
pixels, the noise and the sigma corrected (if Student is True).
If full_output is True then the function returns:
datafr, cube_fc_all, frame_fc_all, frame_nofc and fc_map_all.
frame_fc_all : numpy ndarray
3d array with the 3 frames of the 3 (patterns) processed cubes with
companions.
frame_nofc : numpy ndarray
2d array, PCA processed frame without companions.
fc_map_all : numpy ndarray
3d array with 3 frames containing the position of the companions in the
3 patterns.
"""
if cube.ndim != 3 and cube.ndim != 4:
raise TypeError('The input array is not a 3d or 4d cube')
if cube.ndim == 3 and (cube.shape[0] != angle_list.shape[0]):
raise TypeError('Input parallactic angles vector has wrong length')
if cube.ndim == 4 and (cube.shape[1] != angle_list.shape[0]):
raise TypeError('Input parallactic angles vector has wrong length')
if cube.ndim == 3 and psf_template.ndim != 2:
raise TypeError('Template PSF is not a frame (for ADI case)')
if cube.ndim == 4 and psf_template.ndim != 3:
raise TypeError('Template PSF is not a cube (for ADI+IFS case)')
if transmission is not None:
if not isinstance(transmission, tuple) or not len(transmission) == 2:
raise TypeError('transmission must be a tuple with 2 1d vectors')
if isinstance(fwhm, (np.ndarray,list)):
fwhm_med = np.median(fwhm)
else:
fwhm_med = fwhm
if verbose:
start_time = time_ini()
if isinstance(starphot, float) or isinstance(starphot, int):
msg0 = 'ALGO : {}, FWHM = {}, # BRANCHES = {}, SIGMA = {},'
msg0 += ' STARPHOT = {}'
print(msg0.format(algo.__name__, fwhm_med, nbranch, sigma, starphot))
else:
msg0 = 'ALGO : {}, FWHM = {}, # BRANCHES = {}, SIGMA = {}'
print(msg0.format(algo.__name__, fwhm_med, nbranch, sigma))
print(sep)
# throughput
verbose_thru = False
if verbose == 2:
verbose_thru = True
res_throug = throughput(cube, angle_list, psf_template, fwhm, pxscale,
nbranch=nbranch, theta=theta, inner_rad=inner_rad,
wedge=wedge, fc_snr=fc_snr, full_output=True,
algo=algo, verbose=verbose_thru, **algo_dict)
vector_radd = res_throug[3]
if res_throug[0].shape[0] > 1:
thruput_mean = np.nanmean(res_throug[0], axis=0)
else:
thruput_mean = res_throug[0][0]
frame_fc_all = res_throug[4]
frame_nofc = res_throug[5]
fc_map_all = res_throug[6]
if verbose:
print('Finished the throughput calculation')
timing(start_time)
if interp_order is not None:
# noise measured in the empty frame with better sampling, every px
# starting from 1*FWHM
noise_samp, res_lev_samp, rad_samp = noise_per_annulus(frame_nofc,
separation=1,
fwhm=fwhm_med,
init_rad=fwhm_med,
wedge=wedge)
radmin = vector_radd.astype(int).min()
cutin1 = np.where(rad_samp.astype(int) == radmin)[0][0]
noise_samp = noise_samp[cutin1:]
res_lev_samp = res_lev_samp[cutin1:]
rad_samp = rad_samp[cutin1:]
radmax = vector_radd.astype(int).max()
cutin2 = np.where(rad_samp.astype(int) == radmax)[0][0]
noise_samp = noise_samp[:cutin2 + 1]
res_lev_samp = res_lev_samp[:cutin2 + 1]
rad_samp = rad_samp[:cutin2 + 1]
# interpolating the throughput vector, spline order 2
f = InterpolatedUnivariateSpline(vector_radd, thruput_mean,
k=interp_order)
thruput_interp = f(rad_samp)
# interpolating the transmission vector, spline order 1
if transmission is not None:
trans = transmission[0]
radvec_trans = transmission[1]
f2 = InterpolatedUnivariateSpline(radvec_trans, trans, k=1)
trans_interp = f2(rad_samp)
thruput_interp *= trans_interp
else:
rad_samp = vector_radd
noise_samp = res_throug[1]
res_lev_samp = res_throug[2]
thruput_interp = thruput_mean
if transmission is not None:
if not transmission[0].shape == thruput_interp.shape[0]:
msg = 'Transmiss. and throughput vectors have different length'
raise ValueError(msg)
thruput_interp *= transmission[0]
rad_samp_arcsec = rad_samp * pxscale
# take abs value of the mean residual fluxes otherwise the more
# oversubtraction (negative res_lev_samp), the better the contrast!!
res_lev_samp = np.abs(res_lev_samp)
if smooth:
# smoothing the noise vector using a Savitzky-Golay filter
win = min(noise_samp.shape[0]-2, int(2*fwhm_med))
if win % 2 == 0:
win += 1
noise_samp_sm = savgol_filter(noise_samp, polyorder=2, mode='nearest',
window_length=win)
res_lev_samp_sm = savgol_filter(res_lev_samp, polyorder=2,
mode='nearest', window_length=win)
else:
noise_samp_sm = noise_samp
res_lev_samp_sm = res_lev_samp
# calculating the contrast
if isinstance(starphot, float) or isinstance(starphot, int):
cont_curve_samp = ((sigma * noise_samp_sm + res_lev_samp_sm
)/ thruput_interp) / starphot
else:
cont_curve_samp = ((sigma * noise_samp_sm + res_lev_samp_sm
) / thruput_interp) / np.median(starphot)
cont_curve_samp[np.where(cont_curve_samp < 0)] = 1
cont_curve_samp[np.where(cont_curve_samp > 1)] = 1
# calculating the Student corrected contrast
if student:
n_res_els = np.floor(rad_samp/fwhm_med*2*np.pi)
ss_corr = np.sqrt(1 + 1/n_res_els)
sigma_corr = stats.t.ppf(stats.norm.cdf(sigma), n_res_els-1)*ss_corr
if isinstance(starphot, float) or isinstance(starphot, int):
cont_curve_samp_corr = ((sigma_corr*noise_samp_sm + res_lev_samp_sm
)/thruput_interp)/starphot
else:
cont_curve_samp_corr = ((sigma_corr*noise_samp_sm + res_lev_samp_sm
)/thruput_interp) / np.median(starphot)
cont_curve_samp_corr[np.where(cont_curve_samp_corr < 0)] = 1
cont_curve_samp_corr[np.where(cont_curve_samp_corr > 1)] = 1
if debug:
plt.rc("savefig", dpi=dpi)
plt.figure(figsize=figsize, dpi=dpi)
# throughput
plt.plot(vector_radd * pxscale, thruput_mean, '.', label='computed',
alpha=0.6)
plt.plot(rad_samp_arcsec, thruput_interp, ',-', label='interpolated',
lw=2, alpha=0.5)
plt.grid('on', which='both', alpha=0.2, linestyle='solid')
plt.xlabel('Angular separation [arcsec]')
plt.ylabel('Throughput')
plt.legend(loc='best')
plt.xlim(0, np.max(rad_samp*pxscale))
# noise
plt.figure(figsize=figsize, dpi=dpi)
plt.plot(rad_samp_arcsec, noise_samp, '.', label='computed', alpha=0.6)
if smooth:
plt.plot(rad_samp_arcsec, noise_samp_sm, ',-',
label='noise smoothed', lw=2, alpha=0.5)
plt.grid('on', alpha=0.2, linestyle='solid')
plt.xlabel('Angular separation [arcsec]')
plt.ylabel('Noise')
plt.legend(loc='best')
plt.xlim(0, np.max(rad_samp_arcsec))
# mean residual level
plt.figure(figsize=figsize, dpi=dpi)
plt.plot(rad_samp_arcsec, res_lev_samp, '.',
label='computed residual level', alpha=0.6)
if smooth:
plt.plot(rad_samp_arcsec, res_lev_samp_sm, ',-',
label='smoothed residual level', lw=2, alpha=0.5)
plt.grid('on', alpha=0.2, linestyle='solid')
plt.xlabel('Angular separation [arcsec]')
plt.ylabel('Mean residual level')
plt.legend(loc='best')
plt.xlim(0, np.max(rad_samp_arcsec))
# plotting
if plot or debug:
if student:
label = ['Sensitivity (Gaussian)',
'Sensitivity (Student-t correction)']
else:
label = ['Sensitivity (Gaussian)']
plt.rc("savefig", dpi=dpi)
fig = plt.figure(figsize=figsize, dpi=dpi)
ax1 = fig.add_subplot(111)
con1, = ax1.plot(rad_samp_arcsec, cont_curve_samp, '-',
alpha=0.2, lw=2, color='green')
con2, = ax1.plot(rad_samp_arcsec, cont_curve_samp, '.',
alpha=0.2, color='green')
if student:
con3, = ax1.plot(rad_samp_arcsec, cont_curve_samp_corr, '-',
alpha=0.4, lw=2, color='blue')
con4, = ax1.plot(rad_samp_arcsec, cont_curve_samp_corr, '.',
alpha=0.4, color='blue')
lege = [(con1, con2), (con3, con4)]
else:
lege = [(con1, con2)]
plt.legend(lege, label, fancybox=True, fontsize='medium')
plt.xlabel('Angular separation [arcsec]')
plt.ylabel(str(sigma)+' sigma contrast')
plt.grid('on', which='both', alpha=0.2, linestyle='solid')
ax1.set_yscale('log')
ax1.set_xlim(0, np.max(rad_samp_arcsec))
# Give a title to the contrast curve plot
if object_name is not None and frame_size is not None:
# Retrieve ncomp and pca_type info to use in title
ncomp = algo_dict['ncomp']
if algo_dict['cube_ref'] is None:
pca_type = 'ADI'
else:
pca_type = 'RDI'
title = "{} {} {}pc {} + {}".format(pca_type, object_name, ncomp,
frame_size, inner_rad)
plt.title(title, fontsize=14)
# Option to fix the y-limit
if len(fix_y_lim) == 2:
min_y_lim = min(fix_y_lim[0], fix_y_lim[1])
max_y_lim = max(fix_y_lim[0], fix_y_lim[1])
ax1.set_ylim(min_y_lim, max_y_lim)
# Optionally, save the figure to a path
if save_plot is not None:
fig.savefig(save_plot, dpi=dpi)
if debug:
fig2 = plt.figure(figsize=figsize, dpi=dpi)
ax3 = fig2.add_subplot(111)
cc_mags = -2.5*np.log10(cont_curve_samp)
con4, = ax3.plot(rad_samp_arcsec, cc_mags, '-',
alpha=0.2, lw=2, color='green')
con5, = ax3.plot(rad_samp_arcsec, cc_mags, '.', alpha=0.2,
color='green')
if student:
cc_mags_corr = -2.5*np.log10(cont_curve_samp_corr)
con6, = ax3.plot(rad_samp_arcsec, cc_mags_corr, '-',
alpha=0.4, lw=2, color='blue')
con7, = ax3.plot(rad_samp_arcsec, cc_mags_corr, '.',
alpha=0.4, color='blue')
lege = [(con4, con5), (con6, con7)]
else:
lege = [(con4, con5)]
plt.legend(lege, label, fancybox=True, fontsize='medium')
plt.xlabel('Angular separation [arcsec]')
plt.ylabel('Delta magnitude')
plt.gca().invert_yaxis()
plt.grid('on', which='both', alpha=0.2, linestyle='solid')
ax3.set_xlim(0, np.max(rad_samp*pxscale))
ax4 = ax3.twiny()
ax4.set_xlabel('Distance [pixels]')
ax4.plot(rad_samp, cc_mags, '', alpha=0.)
ax4.set_xlim(0, np.max(rad_samp))
if student:
datafr = pd.DataFrame({'sensitivity_gaussian': cont_curve_samp,
'sensitivity_student': cont_curve_samp_corr,
'throughput': thruput_interp,
'distance': rad_samp,
'distance_arcsec': rad_samp_arcsec,
'noise': noise_samp_sm,
'residual_level': res_lev_samp_sm,
'sigma corr': sigma_corr})
else:
datafr = pd.DataFrame({'sensitivity_gaussian': cont_curve_samp,
'throughput': thruput_interp,
'distance': rad_samp,
'distance_arcsec': rad_samp_arcsec,
'noise': noise_samp_sm,
'residual_level': res_lev_samp_sm})
if full_output:
return datafr, frame_fc_all, frame_nofc, fc_map_all
else:
return datafr
def throughput(cube, angle_list, psf_template, fwhm, pxscale, algo, nbranch=1,
theta=0, inner_rad=1, fc_rad_sep=3, wedge=(0,360), fc_snr=100,
full_output=False, verbose=True, **algo_dict):
""" Measures the throughput for chosen algorithm and input dataset (ADI or
ADI+mSDI). The final throughput is the average of the same procedure
measured in ``nbranch`` azimutally equidistant branches.
Parameters
---------_
cube : numpy ndarray
The input cube, 3d (ADI data) or 4d array (IFS data), without fake
companions.
angle_list : numpy ndarray
Vector with the parallactic angles.
psf_template : numpy ndarray
Frame with the psf template for the fake companion(s).
PSF must be centered in array. Normalization is done internally.
fwhm: int or float or 1d array, optional
The the Full Width Half Maximum in pixels. It can handle a different
FWHM value for different wavelengths (IFS data).
pxscale : float
Plate scale in arcsec/px.
algo : callable or function
The post-processing algorithm, e.g. vip_hci.pca.pca. Third party Python
algorithms can be plugged here. They must have the parameters: 'cube',
'angle_list' and 'verbose'. Optionally a wrapper function can be used.
nbranch : int optional
Number of branches on which to inject fakes companions. Each branch
is tested individually.
theta : float, optional
Angle in degrees for rotating the position of the first branch that by
default is located at zero degrees. Theta counts counterclockwise from
the positive x axis.
inner_rad : int, optional
Innermost radial distance to be considered in terms of FWHM.
fc_rad_sep : int optional
Radial separation between the injected companions (in each of the
patterns) in FWHM. Must be large enough to avoid overlapping. With the
maximum possible value, a single fake companion will be injected per
cube and algorithm post-processing (which greatly affects computation
time).
wedge : tuple of floats, optional
Initial and Final angles for using a wedge. For example (-90,90) only
considers the right side of an image.
fc_snr: float optional
Signal to noise ratio of injected fake companions (w.r.t a Gaussian
distribution).
full_output : bool, optional
If True returns intermediate arrays.
verbose : bool, optional
If True prints out timing and information.
**algo_dict
Parameters of the post-processing algorithms must be passed here,
including imlib and interpolation.
Returns
-------
thruput_arr : numpy ndarray
2d array whose rows are the annulus-wise throughput values for each
branch.
vector_radd : numpy ndarray
1d array with the distances in FWHM (the positions of the annuli).
If full_output is True then the function returns: thruput_arr, noise,
vector_radd, cube_fc_all, frame_fc_all, frame_nofc and fc_map_all.
noise : numpy ndarray
1d array with the noise per annulus.
frame_fc_all : numpy ndarray
3d array with the 3 frames of the 3 (patterns) processed cubes with
companions.
frame_nofc : numpy ndarray
2d array, PCA processed frame without companions.
fc_map_all : numpy ndarray
3d array with 3 frames containing the position of the companions in the
3 patterns.
"""
array = cube
parangles = angle_list
imlib = algo_dict.get('imlib', 'vip-fft')
interpolation = algo_dict.get('interpolation', 'lanczos4')
if array.ndim != 3 and array.ndim != 4:
raise TypeError('The input array is not a 3d or 4d cube')
else:
if array.ndim == 3:
if array.shape[0] != parangles.shape[0]:
msg = 'Input parallactic angles vector has wrong length'
raise TypeError(msg)
if psf_template.ndim != 2:
raise TypeError('Template PSF is not a frame or 2d array')
maxfcsep = int((array.shape[1]/2.)/fwhm)-1
if fc_rad_sep < 3 or fc_rad_sep > maxfcsep:
msg = 'Too large separation between companions in the radial '
msg += 'patterns. Should lie between 3 and {}'
raise ValueError(msg.format(maxfcsep))
elif array.ndim == 4:
if array.shape[1] != parangles.shape[0]:
msg = 'Input vector or parallactic angles has wrong length'
raise TypeError(msg)
if psf_template.ndim != 3:
raise TypeError('Template PSF is not a frame, 3d array')
if 'scale_list' not in algo_dict:
raise ValueError('Vector of wavelength not found')
else:
if algo_dict['scale_list'].shape[0] != array.shape[0]:
raise TypeError('Input wavelength vector has wrong length')
if isinstance(fwhm, float) or isinstance(fwhm, int):
maxfcsep = int((array.shape[2] / 2.) / fwhm) - 1
else:
maxfcsep = int((array.shape[2] / 2.) / np.amin(fwhm)) - 1
if fc_rad_sep < 3 or fc_rad_sep > maxfcsep:
msg = 'Too large separation between companions in the '
msg += 'radial patterns. Should lie between 3 and {}'
raise ValueError(msg.format(maxfcsep))
if psf_template.shape[1] % 2 == 0:
raise ValueError("Only odd-sized PSF is accepted")
if not hasattr(algo, '__call__'):
raise TypeError('Parameter `algo` must be a callable function')
if not isinstance(inner_rad, int):
raise TypeError('inner_rad must be an integer')
angular_range = wedge[1] - wedge[0]
if nbranch > 1 and angular_range < 360:
msg = 'Only a single branch is allowed when working on a wedge'
raise RuntimeError(msg)
if isinstance(fwhm, (np.ndarray,list)):
fwhm_med = np.median(fwhm)
else:
fwhm_med = fwhm
if verbose:
start_time = time_ini()
#***************************************************************************
# Compute noise in concentric annuli on the "empty frame"
argl = inspect.getargspec(algo).args
if 'cube' in argl and 'angle_list' in argl and 'verbose' in argl:
if 'fwhm' in argl:
frame_nofc = algo(cube=array, angle_list=parangles, fwhm=fwhm_med,
verbose=False, **algo_dict)
if algo_dict.pop('scaling',None):
new_algo_dict = algo_dict.copy()
new_algo_dict['scaling'] = None
frame_nofc_noscal = algo(cube=array, angle_list=parangles,
fwhm=fwhm_med, verbose=False,
**new_algo_dict)
else:
frame_nofc_noscal = frame_nofc
else:
frame_nofc = algo(array, angle_list=parangles, verbose=False,
**algo_dict)
if algo_dict.pop('scaling',None):
new_algo_dict = algo_dict.copy()
new_algo_dict['scaling'] = None
frame_nofc_noscal = algo(cube=array, angle_list=parangles,
verbose=False, **new_algo_dict)
else:
frame_nofc_noscal = frame_nofc
if verbose:
msg1 = 'Cube without fake companions processed with {}'
print(msg1.format(algo.__name__))
timing(start_time)
noise, res_level, vector_radd = noise_per_annulus(frame_nofc,
separation=fwhm_med,
fwhm=fwhm_med,
wedge=wedge)
noise_noscal, _, _ = noise_per_annulus(frame_nofc_noscal,
separation=fwhm_med, fwhm=fwhm_med,
wedge=wedge)
vector_radd = vector_radd[inner_rad-1:]
noise = noise[inner_rad-1:]
res_level = res_level[inner_rad-1:]
noise_noscal = noise_noscal[inner_rad-1:]
if verbose:
print('Measured annulus-wise noise in resulting frame')
timing(start_time)
# We crop the PSF and check if PSF has been normalized (so that flux in
# 1*FWHM aperture = 1) and fix if needed
new_psf_size = int(round(3 * fwhm_med))
if new_psf_size % 2 == 0:
new_psf_size += 1
if cube.ndim == 3:
n, y, x = array.shape
psf_template = normalize_psf(psf_template, fwhm=fwhm, verbose=verbose,
size=min(new_psf_size,
psf_template.shape[1]))
# Initialize the fake companions
angle_branch = angular_range / nbranch
thruput_arr = np.zeros((nbranch, noise.shape[0]))
fc_map_all = np.zeros((nbranch * fc_rad_sep, y, x))
frame_fc_all = np.zeros((nbranch * fc_rad_sep, y, x))
cy, cx = frame_center(array[0])
# each branch is computed separately
for br in range(nbranch):
# each pattern is computed separately. For each one the companions
# are separated by "fc_rad_sep * fwhm", interleaving the injections
for irad in range(fc_rad_sep):
radvec = vector_radd[irad::fc_rad_sep]
cube_fc = array.copy()
# filling map with small numbers
fc_map = np.ones_like(array[0]) * 1e-6
fcy = []
fcx = []
for i in range(radvec.shape[0]):
flux = fc_snr * noise_noscal[irad + i * fc_rad_sep]
cube_fc = cube_inject_companions(cube_fc, psf_template,
parangles, flux, pxscale,
rad_dists=[radvec[i]],
theta=br*angle_branch +
theta,
imlib=imlib, verbose=False,
interpolation=
interpolation)
y = cy + radvec[i] * np.sin(np.deg2rad(br * angle_branch +
theta))
x = cx + radvec[i] * np.cos(np.deg2rad(br * angle_branch +
theta))
fc_map = frame_inject_companion(fc_map, psf_template, y, x,
flux, imlib, interpolation)
fcy.append(y)
fcx.append(x)
if verbose:
msg2 = 'Fake companions injected in branch {} '
msg2 += '(pattern {}/{})'
print(msg2.format(br+1, irad+1, fc_rad_sep))
timing(start_time)
#***************************************************************
arg = inspect.getargspec(algo).args
if 'cube' in arg and 'angle_list' in arg and 'verbose' in arg:
if 'fwhm' in arg:
frame_fc = algo(cube=cube_fc, angle_list=parangles,
fwhm=fwhm_med, verbose=False,
**algo_dict)
else:
frame_fc = algo(cube=cube_fc, angle_list=parangles,
verbose=False, **algo_dict)
else:
msg = 'Input algorithm must have at least 3 parameters: '
msg += 'cube, angle_list and verbose'
raise ValueError(msg)
if verbose:
msg3 = 'Cube with fake companions processed with {}'
msg3 += '\nMeasuring its annulus-wise throughput'
print(msg3.format(algo.__name__))
timing(start_time)
#**************************************************************
injected_flux = aperture_flux(fc_map, fcy, fcx, fwhm_med)
recovered_flux = aperture_flux((frame_fc - frame_nofc), fcy,
fcx, fwhm_med)
thruput = recovered_flux / injected_flux
thruput[np.where(thruput < 0)] = 0
thruput_arr[br, irad::fc_rad_sep] = thruput
fc_map_all[br*fc_rad_sep+irad, :, :] = fc_map
frame_fc_all[br*fc_rad_sep+irad, :, :] = frame_fc
elif cube.ndim == 4:
w, n, y, x = array.shape
if isinstance(fwhm, (int, float)):
fwhm = [fwhm] * w
psf_template = normalize_psf(psf_template, fwhm=fwhm, verbose=verbose,
size=min(new_psf_size,
psf_template.shape[1]))
# Initialize the fake companions
angle_branch = angular_range / nbranch
thruput_arr = np.zeros((nbranch, noise.shape[0]))
fc_map_all = np.zeros((nbranch * fc_rad_sep, w, y, x))
frame_fc_all = np.zeros((nbranch * fc_rad_sep, y, x))
cy, cx = frame_center(array[0, 0])
# each branch is computed separately
for br in range(nbranch):
# each pattern is computed separately. For each pattern the
# companions are separated by "fc_rad_sep * fwhm"
# radius = vector_radd[irad::fc_rad_sep]
for irad in range(fc_rad_sep):
radvec = vector_radd[irad::fc_rad_sep]
thetavec = range(int(theta), int(theta) + 360,
360 // len(radvec))
cube_fc = array.copy()
# filling map with small numbers
fc_map = np.ones_like(array[:, 0]) * 1e-6
fcy = []
fcx = []
for i in range(radvec.shape[0]):
flux = fc_snr * noise_noscal[irad + i * fc_rad_sep]
cube_fc = cube_inject_companions(cube_fc, psf_template,
parangles, flux, pxscale,
rad_dists=[radvec[i]],
theta=thetavec[i],
verbose=False,
imlib=imlib,
interpolation=
interpolation)
y = cy + radvec[i] * np.sin(np.deg2rad(br * angle_branch +
thetavec[i]))
x = cx + radvec[i] * np.cos(np.deg2rad(br * angle_branch +
thetavec[i]))
fc_map = frame_inject_companion(fc_map, psf_template, y, x,
flux)
fcy.append(y)
fcx.append(x)
if verbose:
msg2 = 'Fake companions injected in branch {} '
msg2 += '(pattern {}/{})'
print(msg2.format(br + 1, irad + 1, fc_rad_sep))
timing(start_time)
# **************************************************************
arg = inspect.getargspec(algo).args
if 'cube' in arg and 'angle_list' in arg and 'verbose' in arg:
if 'fwhm' in arg:
frame_fc = algo(cube=cube_fc, angle_list=parangles,
fwhm=fwhm_med, verbose=False,
**algo_dict)
else:
frame_fc = algo(cube=cube_fc, angle_list=parangles,
verbose=False, **algo_dict)
if verbose:
msg3 = 'Cube with fake companions processed with {}'
msg3 += '\nMeasuring its annulus-wise throughput'
print(msg3.format(algo.__name__))
timing(start_time)
# *************************************************************
injected_flux = [aperture_flux(fc_map[i], fcy, fcx, fwhm[i])
for i in range(array.shape[0])]
injected_flux = np.mean(injected_flux, axis=0)
recovered_flux = aperture_flux((frame_fc - frame_nofc), fcy,
fcx, fwhm_med)
thruput = recovered_flux / injected_flux
thruput[np.where(thruput < 0)] = 0
thruput_arr[br, irad::fc_rad_sep] = thruput
fc_map_all[br * fc_rad_sep + irad, :, :] = fc_map
frame_fc_all[br * fc_rad_sep + irad, :, :] = frame_fc
if verbose:
msg = 'Finished measuring the throughput in {} branches'
print(msg.format(nbranch))
timing(start_time)
if full_output:
return (thruput_arr, noise, res_level, vector_radd, frame_fc_all,
frame_nofc, fc_map_all)
else:
return thruput_arr, vector_radd
def noise_per_annulus(array, separation, fwhm, init_rad=None, wedge=(0, 360),
verbose=False, debug=False):
""" Measures the noise and mean residual level as the standard deviation
and mean, respectively, of apertures defined in each annulus with a given
separation.
The annuli start at init_rad (== fwhm by default) and stop 2*separation
before the edge of the frame.
Parameters
----------
array : numpy ndarray
Input frame.
separation : float
Separation in pixels of the centers of the annuli measured from the
center of the frame.
fwhm : float
FWHM in pixels.
init_rad : float
Initial radial distance to be used. If None then the init_rad = FWHM.
wedge : tuple of floats, optional
Initial and Final angles for using a wedge. For example (-90,90) only
considers the right side of an image. Be careful when using small
wedges, this leads to computing a standard deviation of very small
samples (<10 values).
verbose : bool, optional
If True prints information.
debug : bool, optional
If True plots the positioning of the apertures.
Returns
-------
noise : numpy ndarray
Vector with the noise value per annulus.
res_level : numpy ndarray
Vector with the mean residual level per annulus.
vector_radd : numpy ndarray
Vector with the radial distances values.
"""
def find_coords(rad, sep, init_angle, fin_angle):
angular_range = fin_angle-init_angle
npoints = (np.deg2rad(angular_range)*rad)/sep #(2*np.pi*rad)/sep
ang_step = angular_range/npoints #360/npoints
x = []
y = []
for i in range(int(npoints)):
newx = rad * np.cos(np.deg2rad(ang_step * i + init_angle))
newy = rad * np.sin(np.deg2rad(ang_step * i + init_angle))
x.append(newx)
y.append(newy)
return np.array(y), np.array(x)
###
if array.ndim != 2:
raise TypeError('Input array is not a frame or 2d array')
if not isinstance(wedge, tuple):
raise TypeError('Wedge must be a tuple with the initial and final '
'angles')
if init_rad is None:
init_rad = fwhm
init_angle, fin_angle = wedge
centery, centerx = frame_center(array)
n_annuli = int(np.floor((centery - init_rad)/separation)) - 1
noise = []
res_level = []
vector_radd = []
if verbose:
print('{} annuli'.format(n_annuli))
if debug:
_, ax = plt.subplots(figsize=(6, 6))
ax.imshow(array, origin='lower', interpolation='nearest',
alpha=0.5, cmap='gray')
for i in range(n_annuli):
y = centery + init_rad + separation * i
rad = dist(centery, centerx, y, centerx)
yy, xx = find_coords(rad, fwhm, init_angle, fin_angle)
yy += centery
xx += centerx
apertures = photutils.CircularAperture(np.array((xx, yy)).T, fwhm/2)
fluxes = photutils.aperture_photometry(array, apertures)
fluxes = np.array(fluxes['aperture_sum'])
noise_ann = np.std(fluxes)
mean_ann = np.mean(fluxes)
noise.append(noise_ann)
res_level.append(mean_ann)
vector_radd.append(rad)
if debug:
for j in range(xx.shape[0]):
# Circle takes coordinates as (X,Y)
aper = plt.Circle((xx[j], yy[j]), radius=fwhm/2, color='r',
fill=False, alpha=0.8)
ax.add_patch(aper)
cent = plt.Circle((xx[j], yy[j]), radius=0.8, color='r',
fill=True, alpha=0.5)
ax.add_patch(cent)
if verbose:
print('Radius(px) = {}, Noise = {:.3f} '.format(rad, noise_ann))
return np.array(noise), np.array(res_level), np.array(vector_radd)
def aperture_flux(array, yc, xc, fwhm, ap_factor=1, mean=False, verbose=False):
""" Returns the sum of pixel values in a circular aperture centered on the
input coordinates. The radius of the aperture is set as (ap_factor*fwhm)/2.
Parameters
----------
array : numpy ndarray
Input frame.
yc, xc : list or 1d arrays
List of y and x coordinates of sources.
fwhm : float
FWHM in pixels.
ap_factor : int, optional
Diameter of aperture in terms of the FWHM.
Returns
-------
flux : list of floats
List of fluxes.
Note
----
From Photutils documentation, the aperture photometry defines the aperture
using one of 3 methods:
'center': A pixel is considered to be entirely in or out of the aperture
depending on whether its center is in or out of the aperture.
'subpixel': A pixel is divided into subpixels and the center of each
subpixel is tested (as above).
'exact': (default) The exact overlap between the aperture and each pixel is
calculated.
"""
n_obj = len(yc)
flux = np.zeros((n_obj))
for i, (y, x) in enumerate(zip(yc, xc)):
if mean:
ind = disk((y, x), (ap_factor*fwhm)/2)
values = array[ind]
obj_flux = np.mean(values)
else:
aper = photutils.CircularAperture((x, y), (ap_factor*fwhm)/2)
obj_flux = photutils.aperture_photometry(array, aper,
method='exact')
obj_flux = np.array(obj_flux['aperture_sum'])
flux[i] = obj_flux
if verbose:
print('Coordinates of object {} : ({},{})'.format(i, y, x))
print('Object Flux = {:.2f}'.format(flux[i]))
return flux
|
py | 1a50a914d7eac8d0d8c91284c9cd3416f90f64f2 | import tensorflow as tf
from keras.utils.np_utils import to_categorical
from models import Models
from utils import plot
from parameters import batch_size, epochs, batch_size, validation_split, verbose
def main():
# load data
# in the first time, it will be downloaded.
fashion_mnist = tf.keras.datasets.fashion_mnist
(train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data()
# print dimentions
print("shape of train-set is: ", train_images.shape)
print("shape of test-set is: ", test_images.shape)
print("The number of classes is ", len(set(test_labels)))
# change lables to catagory
y_train = []
for i in train_labels:
y_train.append(int(i))
y_train = to_categorical(y_train)
y_test = []
for i in test_labels:
y_test.append(int(i))
y_test = to_categorical(y_test)
# reshape images to 28*28*1
# convert to 3-D
X_train = train_images.reshape(
train_images.shape[0],
train_images.shape[1],
train_images.shape[2],
1
)
X_test = test_images.reshape(
test_images.shape[0],
test_images.shape[1],
test_images.shape[2],
1
)
# parameters
input_shape = X_train.shape[1:]
num_class = len(set(test_labels))
# initiate the models
Model = Models()
model = Model.MLP(input_shape, num_class)
# fit model
histoey = model.fit(
X_train, y_train,
epochs=epochs,
verbose=verbose,
batch_size=batch_size,
validation_split= validation_split,
)
# plot training phase
plot(histoey, "MLP")
# print accuracy and loss
out = model.evaluate(X_test, y_test)
if __name__ == '__main__':
main() |
py | 1a50a9c78bd06018ef5df9a3d9de7910276f1417 | import pickle
import read_input_file
from sklearn.externals import joblib
import pandas as pd
dataframe = read_input_file.create_data_set(True, 0)
print(dataframe)
loaded_model = pickle.load(open("classifiers_components/Gaussian_NB_model.sav", 'rb'))
#y_pred = loaded_model.predict(dataframe)
#print(y_pred)
loaded_model.predict(dataframe) |
py | 1a50aa9e2380007339b44832b2cd70a961c66981 | import warnings
from django.conf import settings
from django.conf.urls import url
from django.core.urlresolvers import LocaleRegexURLResolver
from django.utils import six
from TWLight.i18n.views import set_language
# Direct rip from django.conf.urls.i18n, but imports our local set_language
# https://docs.djangoproject.com/en/1.8/_modules/django/conf/urls/i18n/
def i18n_patterns(prefix, *args):
"""
Adds the language code prefix to every URL pattern within this
function. This may only be used in the root URLconf, not in an included
URLconf.
"""
if isinstance(prefix, six.string_types):
pattern_list = patterns(prefix, *args)
else:
pattern_list = [prefix] + list(args)
if not settings.USE_I18N:
return pattern_list
return [LocaleRegexURLResolver(pattern_list)]
urlpatterns = [
url(r'^setlang/$', set_language, name='set_language'),
]
|
py | 1a50abc73326d70d49a372e5d42b819a64bd7193 | """Methods based on Newton's method."""
import numpy as np
from optimus.types import DirectionMethod
from optimus.types import Function
class Newton(DirectionMethod):
"""Classic Netwon's method. Direction is the inverse hessian times gradient."""
def __call__(
self, parameters: np.ndarray, objective_function: Function
) -> np.ndarray:
return np.linalg.inv(objective_function.hessian(parameters)).dot(
objective_function.gradient(parameters)
)
|
py | 1a50ac06cbf925305c38c7dc470aa1b2a01208f1 | import torch
from tvl_backends.nvdec import nv12_to_rgb
def test_nv12_to_rgb():
w = 3840
h = 2160
nv12 = torch.empty(int(w * h * 1.5), device='cuda:0', dtype=torch.uint8)
for i in range(100):
nv12.random_(0, 256)
rgb = nv12_to_rgb(nv12, h, w)
assert rgb.shape == (3, h, w)
|
py | 1a50ac5b27c818881236a1e3df618666c31feb9a | # Autogenerated from KST: please remove this line if doing any edits by hand!
import unittest
from bits_signed_b32_le import _schema
class TestBitsSignedB32Le(unittest.TestCase):
def test_bits_signed_b32_le(self):
r = _schema.parse_file('src/bits_signed_b32_le.bin')
self.assertEqual(r.a_num, 0)
self.assertEqual(r.a_bit, True)
self.assertEqual(r.b_num, 2147483647)
self.assertEqual(r.b_bit, False)
|
py | 1a50ad59c79ddfef48d6c37485d808e37fcdf555 | # Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import importlib
import mock
import unittest
from cloudbaseinit import exception as cbinit_exception
class WindowsNetworkUtilsTests(unittest.TestCase):
def setUp(self):
self._ctypes_mock = mock.MagicMock()
self._moves_mock = mock.MagicMock()
self._module_patcher = mock.patch.dict(
'sys.modules',
{'ctypes': self._ctypes_mock,
'six.moves': self._moves_mock})
self._module_patcher.start()
self.network = importlib.import_module(
'cloudbaseinit.utils.windows.network')
self.network.iphlpapi = mock.MagicMock()
self.network.kernel32 = mock.MagicMock()
self.network.ws2_32 = mock.MagicMock()
def tearDown(self):
self._module_patcher.stop()
def test_format_mac_address(self):
phys_address = [00, 00, 00, 00]
response = self.network._format_mac_address(phys_address=phys_address,
phys_address_len=4)
self.assertEqual("00:00:00:00", response)
def _test_socket_addr_to_str(self, ret_val):
mock_socket_addr = mock.MagicMock()
mock_create_unicode_buffer = self._ctypes_mock.create_unicode_buffer
mock_byref = self._ctypes_mock.byref
self.network.ws2_32.WSAAddressToStringW.return_value = ret_val
if ret_val:
self.assertRaises(cbinit_exception.CloudbaseInitException,
self.network._socket_addr_to_str,
mock_socket_addr)
self.network.ws2_32.WSAGetLastError.assert_called_once_with()
else:
response = self.network._socket_addr_to_str(mock_socket_addr)
self.assertEqual(mock_create_unicode_buffer.return_value.value,
response)
self._ctypes_mock.wintypes.DWORD.assert_called_once_with(256)
mock_create_unicode_buffer.assert_called_once_with(256)
self.network.ws2_32.WSAAddressToStringW.assert_called_once_with(
mock_socket_addr.lpSockaddr, mock_socket_addr.iSockaddrLength,
None, mock_create_unicode_buffer.return_value,
mock_byref.return_value)
mock_byref.assert_called_once_with(
self._ctypes_mock.wintypes.DWORD.return_value)
def test_socket_addr_to_str(self):
self._test_socket_addr_to_str(ret_val=None)
def test_socket_addr_to_str_fail(self):
self._test_socket_addr_to_str(ret_val=1)
def _test_get_registry_dhcp_server(self, dhcp_server, exception=None):
fake_adapter = mock.sentinel.fake_adapter_name
self._moves_mock.winreg.QueryValueEx.return_value = [dhcp_server]
if exception:
self._moves_mock.winreg.QueryValueEx.side_effect = [exception]
if exception.errno != 2:
self.assertRaises(cbinit_exception.CloudbaseInitException,
self.network._get_registry_dhcp_server,
fake_adapter)
else:
response = self.network._get_registry_dhcp_server(fake_adapter)
if dhcp_server == "255.255.255.255":
self.assertEqual(None, response)
else:
self.assertEqual(dhcp_server, response)
self._moves_mock.winreg.OpenKey.assert_called_once_with(
self._moves_mock.winreg.HKEY_LOCAL_MACHINE,
"SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters\\"
"Interfaces\\%s" % fake_adapter, 0,
self._moves_mock.winreg.KEY_READ)
self._moves_mock.winreg.QueryValueEx.assert_called_once_with(
self._moves_mock.winreg.OpenKey.return_value.__enter__(),
"DhcpServer")
def test_get_registry_dhcp_server(self):
self._test_get_registry_dhcp_server(
dhcp_server=mock.sentinel.dhcp_server)
def test_get_registry_dhcp_server_expected(self):
self._test_get_registry_dhcp_server(dhcp_server="255.255.255.255")
def test_get_registry_dhcp_server_expeption_not_found(self):
ex = cbinit_exception.CloudbaseInitException()
ex.errno = 2
self._test_get_registry_dhcp_server(dhcp_server="", exception=ex)
def test_get_registry_dhcp_server_expeption_other(self):
ex = cbinit_exception.CloudbaseInitException()
ex.errno = 3
self._test_get_registry_dhcp_server(dhcp_server="", exception=ex)
@mock.patch('cloudbaseinit.utils.windows.network._format_mac_address')
@mock.patch('cloudbaseinit.utils.windows.network._socket_addr_to_str')
@mock.patch('cloudbaseinit.utils.windows.network'
'._get_registry_dhcp_server')
def _test_get_adapter_addresses(self, mock_get_registry_dhcp_server,
mock_socket_addr_to_str,
mock_format_mac_address,
ret_val, p, ret_val2, xp_data_length):
self.maxDiff = None
mock_byref = self._ctypes_mock.byref
mock_cast = self._ctypes_mock.cast
mock_POINTER = self._ctypes_mock.POINTER
self.network.iphlpapi.GetAdaptersAddresses.side_effect = [ret_val,
ret_val2]
self.network.kernel32.HeapAlloc.return_value = p
self.network.iphlpapi.IP_ADAPTER_DHCP_ENABLED = True
self.network.iphlpapi.IP_ADAPTER_IPV4_ENABLED = True
self.network.iphlpapi.IP_ADAPTER_ADDRESSES_SIZE_2003 = xp_data_length
p_curr_addr = mock.MagicMock()
compare_cast = []
net_adapters = []
compare_socket_addr_to_str = []
mock_cast.side_effect = [p_curr_addr, None, None]
curr_addr = p_curr_addr.contents
curr_addr.Flags = True
curr_addr.Union1.Struct1.Length = 2
curr_addr.Dhcpv4Server.iSockaddrLength = True
p_unicast_addr = curr_addr.FirstUnicastAddress
unicast_addr = p_unicast_addr.contents
unicast_addresses = [
(mock_socket_addr_to_str.return_value,
unicast_addr.Address.lpSockaddr.contents.sa_family)]
compare_GetAdaptersAddresses = [mock.call(
self.network.ws2_32.AF_UNSPEC,
self.network.iphlpapi.GAA_FLAG_SKIP_ANYCAST,
None, None, mock_byref.return_value)]
if not p:
self.assertRaises(cbinit_exception.CloudbaseInitException,
self.network.get_adapter_addresses)
if ret_val2 and ret_val2 != self.network.kernel32.ERROR_NO_DATA:
self.assertRaises(cbinit_exception.CloudbaseInitException,
self.network.get_adapter_addresses)
compare_cast.append(mock.call(p, mock_POINTER.return_value))
compare_GetAdaptersAddresses.append(mock.call(
self.network.ws2_32.AF_UNSPEC,
self.network.iphlpapi.GAA_FLAG_SKIP_ANYCAST, None,
p_curr_addr, mock_byref.return_value))
else:
response = self.network.get_adapter_addresses()
if ret_val == self.network.kernel32.ERROR_NO_DATA:
self.assertEqual([], response)
elif ret_val == self.network.kernel32.ERROR_BUFFER_OVERFLOW:
self.network.kernel32.GetProcessHeap.assert_called_once_with()
self.network.kernel32.HeapAlloc.assert_called_once_with(
self.network.kernel32.GetProcessHeap.return_value, 0,
self._ctypes_mock.wintypes.ULONG.return_value.value)
self.network.ws2_32.init_wsa.assert_called_once_with()
compare_cast.append(mock.call(p, mock_POINTER.return_value))
compare_GetAdaptersAddresses.append(mock.call(
self.network.ws2_32.AF_UNSPEC,
self.network.iphlpapi.GAA_FLAG_SKIP_ANYCAST, None,
p_curr_addr, mock_byref.return_value))
if ret_val2 == self.network.kernel32.ERROR_NO_DATA:
self.assertEqual([], response)
else:
compare_cast.append(mock.call(p_unicast_addr.contents.Next,
mock_POINTER.return_value))
mock_format_mac_address.assert_called_once_with(
p_curr_addr.contents.PhysicalAddress,
p_curr_addr.contents.PhysicalAddressLength)
if not curr_addr.Union1.Struct1.Length <= xp_data_length:
dhcp_server = mock_socket_addr_to_str.return_value
compare_socket_addr_to_str.append(
mock.call(curr_addr.Dhcpv4Server |
curr_addr.Dhcpv6Server))
else:
dhcp_server = \
mock_get_registry_dhcp_server.return_value
mock_get_registry_dhcp_server.assert_called_once_with(
curr_addr.AdapterName)
compare_cast.append(mock.call(curr_addr.Next,
mock_POINTER.return_value))
self.network.kernel32.HeapFree.assert_called_once_with(
self.network.kernel32.GetProcessHeap.return_value, 0,
p)
self.network.ws2_32.WSACleanup.assert_called_once_with()
compare_socket_addr_to_str.append(mock.call(
unicast_addr.Address))
net_adapters.append(
{"interface_index": curr_addr.Union1.Struct1.IfIndex,
"adapter_name": curr_addr.AdapterName,
"friendly_name": curr_addr.FriendlyName,
"description": curr_addr.Description,
"mtu": curr_addr.Mtu,
"mac_address": mock_format_mac_address.return_value,
"dhcp_enabled": True,
"dhcp_server": dhcp_server,
"interface_type": curr_addr.IfType,
"unicast_addresses": unicast_addresses})
self.assertEqual(net_adapters, response)
self.assertEqual(compare_cast, mock_cast.call_args_list)
self.assertEqual(
compare_GetAdaptersAddresses,
self.network.iphlpapi.GetAdaptersAddresses.call_args_list)
def test_get_adapter_addresses_no_data(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_NO_DATA,
p=True, ret_val2=self.network.kernel32.ERROR_NO_DATA,
xp_data_length=3)
def test_get_adapter_addresses_overflow_and_no_data(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_BUFFER_OVERFLOW,
p=True, ret_val2=self.network.kernel32.ERROR_NO_DATA,
xp_data_length=3)
def test_get_adapter_addresses_overflow_other_ret_val(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_BUFFER_OVERFLOW,
p=True, ret_val2=mock.sentinel.other_return_value,
xp_data_length=3)
def test_get_adapter_addresses_overflow(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_BUFFER_OVERFLOW,
p=True, ret_val2=None,
xp_data_length=3)
def test_get_adapter_addresses_overflow_xp_data(self):
self._test_get_adapter_addresses(
ret_val=self.network.kernel32.ERROR_BUFFER_OVERFLOW,
p=True, ret_val2=None,
xp_data_length=0)
|
py | 1a50adc98e47a3bc9d998d06cdf6f3a35b42c1cf | import repoInfo
from filechange import ischanged
from colors import logcolors
import pyfiglet
import logger
from utils import initCommands
def init():
info = repoInfo.checkinfoInDir()
url, branch = info
logger.checkdata(url , branch)
if('n' in info):
initCommands(info)
else:
print(f'{logcolors.BOLD}Retrieving info from git directory{logcolors.ENDC}')
print(f'{logcolors.CYAN}URL:{logcolors.ENDC} {url} , {logcolors.CYAN}Branch:{logcolors.ENDC} {branch}')
ischanged(url,branch)
if __name__ == '__main__':
f = pyfiglet.figlet_format('G - AUTO', font='5lineoblique')
print(f"{logcolors.BOLD}{f}{logcolors.ENDC}")
init()
|
py | 1a50ae3fda9859e1fd8d25407e665dea747f5b9e | from .fingerprint import Fingerprinter, FormatResults, FindPehash
from .extract_data import extract_auth_data
|
py | 1a50aee1d12bda7851779223f0ad9b981bca0ed3 | # unittest_exception.py
import unittest
def raises_error(*args, **kwargs):
raise ValueError(f"Invalid value: {args} {kwargs}")
class ExceptionTest(unittest.TestCase):
def testTrapLocally(self):
try:
raises_error('a', b='c')
except ValueError:
pass
else:
self.fail("Did not see ValueError")
def testAssertRaises(self):
self.assertRaises(ValueError, raises_error, 'a', b='c')
|
py | 1a50aef3357e4abcdcbcb1d099316fddae9018b6 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: task.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from next_builder_sdk.model.easy_flow import target_info_pb2 as next__builder__sdk_dot_model_dot_easy__flow_dot_target__info__pb2
from next_builder_sdk.model.easy_flow import package_info_pb2 as next__builder__sdk_dot_model_dot_easy__flow_dot_package__info__pb2
from next_builder_sdk.model.easy_flow import deploy_target_pb2 as next__builder__sdk_dot_model_dot_easy__flow_dot_deploy__target__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='task.proto',
package='easy_flow',
syntax='proto3',
serialized_options=_b('ZCgo.easyops.local/contracts/protorepo-models/easyops/model/easy_flow'),
serialized_pb=_b('\n\ntask.proto\x12\teasy_flow\x1a\x32next_builder_sdk/model/easy_flow/target_info.proto\x1a\x33next_builder_sdk/model/easy_flow/package_info.proto\x1a\x34next_builder_sdk/model/easy_flow/deploy_target.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x9f\x08\n\x04Task\x12\r\n\x05\x61ppId\x18\x01 \x01(\t\x12\x0f\n\x07\x61ppName\x18\x02 \x01(\t\x12\x11\n\tclusterId\x18\x03 \x01(\t\x12\x13\n\x0b\x63lusterType\x18\x04 \x01(\t\x12\x10\n\x08operator\x18\x05 \x01(\t\x12\x0b\n\x03org\x18\x06 \x01(\x05\x12)\n\ntargetList\x18\x07 \x03(\x0b\x32\x15.easy_flow.TargetInfo\x12+\n\x0bpackageList\x18\x08 \x03(\x0b\x32\x16.easy_flow.PackageInfo\x12.\n\nconfigList\x18\t \x03(\x0b\x32\x1a.easy_flow.Task.ConfigList\x12\x15\n\rtaskTimeStamp\x18\n \x01(\t\x12\x15\n\rconfigVersion\x18\x0b \x01(\t\x12\x17\n\x0f\x63onfigPackageId\x18\x0c \x01(\t\x12\'\n\x06labels\x18\r \x01(\x0b\x32\x17.google.protobuf.Struct\x12.\n\nconfigDiff\x18\x0e \x03(\x0b\x32\x1a.easy_flow.Task.ConfigDiff\x12\x12\n\nneedNotify\x18\x0f \x01(\x08\x12\x10\n\x08\x62\x61tchNum\x18\x10 \x01(\x05\x12\x15\n\rbatchInterval\x18\x11 \x01(\x05\x12(\n\x07\x62\x61tches\x18\x12 \x03(\x0b\x32\x17.easy_flow.Task.Batches\x12\x12\n\nfailedStop\x18\x13 \x01(\x08\x1a\xe2\x01\n\nConfigList\x12\r\n\x05hosts\x18\x01 \x03(\t\x12\x33\n\x07\x63onfigs\x18\x02 \x03(\x0b\x32\".easy_flow.Task.ConfigList.Configs\x1a\x8f\x01\n\x07\x43onfigs\x12\x11\n\tpackageId\x18\x01 \x01(\t\x12\x37\n\x05items\x18\x02 \x03(\x0b\x32(.easy_flow.Task.ConfigList.Configs.Items\x12\x13\n\x0binstallPath\x18\x03 \x01(\t\x1a#\n\x05Items\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x1a\xf2\x01\n\nConfigDiff\x12\r\n\x05hosts\x18\x01 \x03(\t\x12\x31\n\x06\x64\x65tail\x18\x02 \x03(\x0b\x32!.easy_flow.Task.ConfigDiff.Detail\x1a\xa1\x01\n\x06\x44\x65tail\x12\x36\n\x05items\x18\x01 \x03(\x0b\x32\'.easy_flow.Task.ConfigDiff.Detail.Items\x12\x11\n\tpackageId\x18\x02 \x01(\t\x12\x13\n\x0binstallPath\x18\x03 \x01(\t\x1a\x37\n\x05Items\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0f\n\x07newName\x18\x02 \x01(\t\x12\x0f\n\x07oldName\x18\x03 \x01(\t\x1a\x33\n\x07\x42\x61tches\x12(\n\x07targets\x18\x01 \x03(\x0b\x32\x17.easy_flow.DeployTargetBEZCgo.easyops.local/contracts/protorepo-models/easyops/model/easy_flowb\x06proto3')
,
dependencies=[next__builder__sdk_dot_model_dot_easy__flow_dot_target__info__pb2.DESCRIPTOR,next__builder__sdk_dot_model_dot_easy__flow_dot_package__info__pb2.DESCRIPTOR,next__builder__sdk_dot_model_dot_easy__flow_dot_deploy__target__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,])
_TASK_CONFIGLIST_CONFIGS_ITEMS = _descriptor.Descriptor(
name='Items',
full_name='easy_flow.Task.ConfigList.Configs.Items',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='easy_flow.Task.ConfigList.Configs.Items.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='path', full_name='easy_flow.Task.ConfigList.Configs.Items.path', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=937,
serialized_end=972,
)
_TASK_CONFIGLIST_CONFIGS = _descriptor.Descriptor(
name='Configs',
full_name='easy_flow.Task.ConfigList.Configs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='packageId', full_name='easy_flow.Task.ConfigList.Configs.packageId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='items', full_name='easy_flow.Task.ConfigList.Configs.items', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='installPath', full_name='easy_flow.Task.ConfigList.Configs.installPath', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TASK_CONFIGLIST_CONFIGS_ITEMS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=829,
serialized_end=972,
)
_TASK_CONFIGLIST = _descriptor.Descriptor(
name='ConfigList',
full_name='easy_flow.Task.ConfigList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hosts', full_name='easy_flow.Task.ConfigList.hosts', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='configs', full_name='easy_flow.Task.ConfigList.configs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TASK_CONFIGLIST_CONFIGS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=746,
serialized_end=972,
)
_TASK_CONFIGDIFF_DETAIL_ITEMS = _descriptor.Descriptor(
name='Items',
full_name='easy_flow.Task.ConfigDiff.Detail.Items',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='path', full_name='easy_flow.Task.ConfigDiff.Detail.Items.path', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='newName', full_name='easy_flow.Task.ConfigDiff.Detail.Items.newName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='oldName', full_name='easy_flow.Task.ConfigDiff.Detail.Items.oldName', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1162,
serialized_end=1217,
)
_TASK_CONFIGDIFF_DETAIL = _descriptor.Descriptor(
name='Detail',
full_name='easy_flow.Task.ConfigDiff.Detail',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='items', full_name='easy_flow.Task.ConfigDiff.Detail.items', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packageId', full_name='easy_flow.Task.ConfigDiff.Detail.packageId', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='installPath', full_name='easy_flow.Task.ConfigDiff.Detail.installPath', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TASK_CONFIGDIFF_DETAIL_ITEMS, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1056,
serialized_end=1217,
)
_TASK_CONFIGDIFF = _descriptor.Descriptor(
name='ConfigDiff',
full_name='easy_flow.Task.ConfigDiff',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hosts', full_name='easy_flow.Task.ConfigDiff.hosts', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='detail', full_name='easy_flow.Task.ConfigDiff.detail', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TASK_CONFIGDIFF_DETAIL, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=975,
serialized_end=1217,
)
_TASK_BATCHES = _descriptor.Descriptor(
name='Batches',
full_name='easy_flow.Task.Batches',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='targets', full_name='easy_flow.Task.Batches.targets', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1219,
serialized_end=1270,
)
_TASK = _descriptor.Descriptor(
name='Task',
full_name='easy_flow.Task',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='appId', full_name='easy_flow.Task.appId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='appName', full_name='easy_flow.Task.appName', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clusterId', full_name='easy_flow.Task.clusterId', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='clusterType', full_name='easy_flow.Task.clusterType', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='operator', full_name='easy_flow.Task.operator', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='org', full_name='easy_flow.Task.org', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='targetList', full_name='easy_flow.Task.targetList', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packageList', full_name='easy_flow.Task.packageList', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='configList', full_name='easy_flow.Task.configList', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='taskTimeStamp', full_name='easy_flow.Task.taskTimeStamp', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='configVersion', full_name='easy_flow.Task.configVersion', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='configPackageId', full_name='easy_flow.Task.configPackageId', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='easy_flow.Task.labels', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='configDiff', full_name='easy_flow.Task.configDiff', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='needNotify', full_name='easy_flow.Task.needNotify', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='batchNum', full_name='easy_flow.Task.batchNum', index=15,
number=16, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='batchInterval', full_name='easy_flow.Task.batchInterval', index=16,
number=17, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='batches', full_name='easy_flow.Task.batches', index=17,
number=18, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='failedStop', full_name='easy_flow.Task.failedStop', index=18,
number=19, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_TASK_CONFIGLIST, _TASK_CONFIGDIFF, _TASK_BATCHES, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=215,
serialized_end=1270,
)
_TASK_CONFIGLIST_CONFIGS_ITEMS.containing_type = _TASK_CONFIGLIST_CONFIGS
_TASK_CONFIGLIST_CONFIGS.fields_by_name['items'].message_type = _TASK_CONFIGLIST_CONFIGS_ITEMS
_TASK_CONFIGLIST_CONFIGS.containing_type = _TASK_CONFIGLIST
_TASK_CONFIGLIST.fields_by_name['configs'].message_type = _TASK_CONFIGLIST_CONFIGS
_TASK_CONFIGLIST.containing_type = _TASK
_TASK_CONFIGDIFF_DETAIL_ITEMS.containing_type = _TASK_CONFIGDIFF_DETAIL
_TASK_CONFIGDIFF_DETAIL.fields_by_name['items'].message_type = _TASK_CONFIGDIFF_DETAIL_ITEMS
_TASK_CONFIGDIFF_DETAIL.containing_type = _TASK_CONFIGDIFF
_TASK_CONFIGDIFF.fields_by_name['detail'].message_type = _TASK_CONFIGDIFF_DETAIL
_TASK_CONFIGDIFF.containing_type = _TASK
_TASK_BATCHES.fields_by_name['targets'].message_type = next__builder__sdk_dot_model_dot_easy__flow_dot_deploy__target__pb2._DEPLOYTARGET
_TASK_BATCHES.containing_type = _TASK
_TASK.fields_by_name['targetList'].message_type = next__builder__sdk_dot_model_dot_easy__flow_dot_target__info__pb2._TARGETINFO
_TASK.fields_by_name['packageList'].message_type = next__builder__sdk_dot_model_dot_easy__flow_dot_package__info__pb2._PACKAGEINFO
_TASK.fields_by_name['configList'].message_type = _TASK_CONFIGLIST
_TASK.fields_by_name['labels'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT
_TASK.fields_by_name['configDiff'].message_type = _TASK_CONFIGDIFF
_TASK.fields_by_name['batches'].message_type = _TASK_BATCHES
DESCRIPTOR.message_types_by_name['Task'] = _TASK
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Task = _reflection.GeneratedProtocolMessageType('Task', (_message.Message,), {
'ConfigList' : _reflection.GeneratedProtocolMessageType('ConfigList', (_message.Message,), {
'Configs' : _reflection.GeneratedProtocolMessageType('Configs', (_message.Message,), {
'Items' : _reflection.GeneratedProtocolMessageType('Items', (_message.Message,), {
'DESCRIPTOR' : _TASK_CONFIGLIST_CONFIGS_ITEMS,
'__module__' : 'task_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.Task.ConfigList.Configs.Items)
})
,
'DESCRIPTOR' : _TASK_CONFIGLIST_CONFIGS,
'__module__' : 'task_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.Task.ConfigList.Configs)
})
,
'DESCRIPTOR' : _TASK_CONFIGLIST,
'__module__' : 'task_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.Task.ConfigList)
})
,
'ConfigDiff' : _reflection.GeneratedProtocolMessageType('ConfigDiff', (_message.Message,), {
'Detail' : _reflection.GeneratedProtocolMessageType('Detail', (_message.Message,), {
'Items' : _reflection.GeneratedProtocolMessageType('Items', (_message.Message,), {
'DESCRIPTOR' : _TASK_CONFIGDIFF_DETAIL_ITEMS,
'__module__' : 'task_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.Task.ConfigDiff.Detail.Items)
})
,
'DESCRIPTOR' : _TASK_CONFIGDIFF_DETAIL,
'__module__' : 'task_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.Task.ConfigDiff.Detail)
})
,
'DESCRIPTOR' : _TASK_CONFIGDIFF,
'__module__' : 'task_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.Task.ConfigDiff)
})
,
'Batches' : _reflection.GeneratedProtocolMessageType('Batches', (_message.Message,), {
'DESCRIPTOR' : _TASK_BATCHES,
'__module__' : 'task_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.Task.Batches)
})
,
'DESCRIPTOR' : _TASK,
'__module__' : 'task_pb2'
# @@protoc_insertion_point(class_scope:easy_flow.Task)
})
_sym_db.RegisterMessage(Task)
_sym_db.RegisterMessage(Task.ConfigList)
_sym_db.RegisterMessage(Task.ConfigList.Configs)
_sym_db.RegisterMessage(Task.ConfigList.Configs.Items)
_sym_db.RegisterMessage(Task.ConfigDiff)
_sym_db.RegisterMessage(Task.ConfigDiff.Detail)
_sym_db.RegisterMessage(Task.ConfigDiff.Detail.Items)
_sym_db.RegisterMessage(Task.Batches)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
py | 1a50af50d88c9c38474d0cb0c8f0c8db66a7c28c | # -*- coding: utf-8 -*-
# Upside Travel, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
import boto3
import botocore.session
from botocore.stub import Stubber
from common import AV_SIGNATURE_METADATA
from common import AV_SIGNATURE_OK
from common import AV_STATUS_METADATA
from common import AV_TIMESTAMP_METADATA
from common import get_timestamp
import scan
from moto import mock_sqs
from moto import mock_s3
from publish import send_to_queue
class TestScan(unittest.TestCase):
def setUp(self):
# Common data
self.s3_bucket_name = "test_bucket"
self.s3_key_name = "test_key"
# Clients and Resources
self.s3 = boto3.resource("s3")
self.s3_client = botocore.session.get_session().create_client("s3")
self.s3_obj = self.s3.Object(self.s3_bucket_name, self.s3_key_name)
@mock_sqs
def test_get_objects_from_sqs(self):
sqs = boto3.client("sqs")
queue = sqs.create_queue(QueueName="test-queue")
queue_url = queue["QueueUrl"]
# Stage SQS queue with a message
message = self.s3_key_name
send_to_queue(message, queue_url)
all_objects = scan.get_objects_from_sqs(queue_url, self.s3_bucket_name)
self.assertEquals(len(all_objects), 1)
self.assertEquals(all_objects[0], self.s3_obj)
def test_set_av_tags(self):
scan_result = "not_malicious"
scan_signature = AV_SIGNATURE_OK
timestamp = get_timestamp()
tag_set = {
"TagSet": [
{"Key": "Arbitrary", "Value": "arbitrary"},
{"Key": AV_SIGNATURE_METADATA, "Value": scan_signature},
{"Key": AV_STATUS_METADATA, "Value": scan_result},
{"Key": AV_TIMESTAMP_METADATA, "Value": timestamp},
]
}
s3_stubber = Stubber(self.s3_client)
get_object_tagging_response = tag_set
get_object_tagging_expected_params = {
"Bucket": self.s3_bucket_name,
"Key": self.s3_key_name,
}
s3_stubber.add_response(
"get_object_tagging",
get_object_tagging_response,
get_object_tagging_expected_params,
)
put_object_tagging_response = {}
put_object_tagging_expected_params = {
"Bucket": self.s3_bucket_name,
"Key": self.s3_key_name,
"Tagging": tag_set,
}
s3_stubber.add_response(
"put_object_tagging",
put_object_tagging_response,
put_object_tagging_expected_params,
)
with s3_stubber:
response = scan.set_av_tags(
self.s3_client, self.s3_obj, scan_result, scan_signature, timestamp
)
assert response == tag_set["TagSet"]
def test_str_to_bool(self):
string = "True"
result = scan.str_to_bool(string)
assert result is True
@mock_s3
def test_download_file(self):
s3 = boto3.resource("s3")
s3_client = botocore.session.get_session().create_client("s3")
s3_client.create_bucket(Bucket=self.s3_bucket_name)
s3_client.put_object(Bucket=self.s3_bucket_name, Key=self.s3_key_name, Body="")
s3_obj = s3.Object(self.s3_bucket_name, self.s3_key_name)
scan.download_file(s3_obj)
assert os.path.isfile(f"/tmp/scandir/{s3_obj.key}")
|
py | 1a50b035db868ebacc19f9be067f14d947a1ebcf | """Config flow for OpenWeatherMap."""
import logging
from pyowm import OWM
from pyowm.exceptions.api_call_error import APICallError
from pyowm.exceptions.api_response_error import UnauthorizedError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import (
CONF_API_KEY,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_MODE,
CONF_NAME,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_LANGUAGE,
DEFAULT_FORECAST_MODE,
DEFAULT_LANGUAGE,
DEFAULT_NAME,
FORECAST_MODES,
LANGUAGES,
)
from .const import DOMAIN # pylint:disable=unused-import
SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): str,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_MODE, default=DEFAULT_FORECAST_MODE): vol.In(FORECAST_MODES),
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In(LANGUAGES),
}
)
_LOGGER = logging.getLogger(__name__)
class OpenWeatherMapConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Config flow for OpenWeatherMap."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Get the options flow for this handler."""
return OpenWeatherMapOptionsFlow(config_entry)
async def async_step_user(self, user_input=None):
"""Handle a flow initialized by the user."""
errors = {}
if user_input is not None:
latitude = user_input[CONF_LATITUDE]
longitude = user_input[CONF_LONGITUDE]
await self.async_set_unique_id(f"{latitude}-{longitude}")
self._abort_if_unique_id_configured()
try:
api_online = await _is_owm_api_online(
self.hass, user_input[CONF_API_KEY]
)
if not api_online:
errors["base"] = "invalid_api_key"
except UnauthorizedError:
errors["base"] = "invalid_api_key"
except APICallError:
errors["base"] = "cannot_connect"
if not errors:
return self.async_create_entry(
title=user_input[CONF_NAME], data=user_input
)
return self.async_show_form(step_id="user", data_schema=SCHEMA, errors=errors)
async def async_step_import(self, import_input=None):
"""Set the config entry up from yaml."""
config = import_input.copy()
if CONF_NAME not in config:
config[CONF_NAME] = DEFAULT_NAME
if CONF_LATITUDE not in config:
config[CONF_LATITUDE] = self.hass.config.latitude
if CONF_LONGITUDE not in config:
config[CONF_LONGITUDE] = self.hass.config.longitude
if CONF_MODE not in config:
config[CONF_MODE] = DEFAULT_FORECAST_MODE
if CONF_LANGUAGE not in config:
config[CONF_LANGUAGE] = DEFAULT_LANGUAGE
return await self.async_step_user(config)
class OpenWeatherMapOptionsFlow(config_entries.OptionsFlow):
"""Handle options."""
def __init__(self, config_entry):
"""Initialize options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(title="", data=user_input)
return self.async_show_form(
step_id="init",
data_schema=self._get_options_schema(),
)
def _get_options_schema(self):
return vol.Schema(
{
vol.Optional(
CONF_MODE,
default=self.config_entry.options.get(
CONF_MODE, DEFAULT_FORECAST_MODE
),
): vol.In(FORECAST_MODES),
vol.Optional(
CONF_LANGUAGE,
default=self.config_entry.options.get(
CONF_LANGUAGE, DEFAULT_LANGUAGE
),
): vol.In(LANGUAGES),
}
)
async def _is_owm_api_online(hass, api_key):
owm = OWM(api_key)
return await hass.async_add_executor_job(owm.is_API_online)
|
py | 1a50b0661a9257832641de08eca91cb1b6084c24 | from operator import add, neg
import linalg
__all__ = ["Matrix"]
class MatrixError(Exception):
pass
class Matrix:
"""Implements Matrices.
"""
def __init__(self, mat, valid=False):
if not valid:
try:
self._is_valid(mat)
except Exception as e:
raise e
self.shape = (len(mat), len(mat[0]))
self.matrix = mat
def _is_valid(self, mat):
"""checks if given matrix is valid
:param mat: Matrix to test
:type mat: Matrix
:raises MatrixError: raises MatrixError
"""
matlen = len(mat[0])
for row in mat:
if len(row) != matlen:
raise MatrixError("Malformed matrix")
for i in row:
assert isinstance(i, (int, float, complex))
def _is_square(self) -> bool:
return self.shape[0] == self.shape[1]
def inverse(self) -> "Matrix":
"""
returns the inverse matrix of mat
implemented as an alias of linalg.unary.inverse
:param mat: the matrix to invert
:type mat: Matrix
:return: the inverse matrix of mat
:rtype: Matrix
"""
return linalg.unary.inverse(self)
def transpose(self) -> "Matrix":
"""
computes the transpose of self
implemented as an alias of linalg.unary.transpose
:return: transposed matrix
:rtype: Matrix
"""
return linalg.unary.transpose(self)
inv = inverse # Alias for inverse
T = transpose # Alias for transpose
def det(self) -> float:
"""
computes the determinant for self.
implemented as an alias of linalg.unary.det
:return: the determinant for mat
:rtype: float
"""
return linalg.unary.det(self)
def __str__(self):
r = ""
for i in range(self.shape[0]):
r += "|"
for j in range(self.shape[1]):
r += "{:^6.5}".format(str(self[i][j]))
r += "|"
r += "\n"
return r
def __add__(self, x):
return Matrix(
list([list(map(add, self[i], x[i])) for i in range(self.shape[0])]),
valid=True,
)
def __neg__(self):
return Matrix(
list([list(map(neg, self[i])) for i in range(self.shape[0])]), valid=True
)
def __sub__(self, x):
return self.__add__(-x)
def __matmul__(self, x: "Matrix"):
result = Matrix(
[
[sum(a * b for a, b in zip(self_row, x_col)) for x_col in zip(*x)]
for self_row in self
]
)
return result
def __mul__(self, x: float):
return Matrix([[a * x for a in row] for row in self])
def __rmul__(self, x: float):
return self.__mul__(x)
def __getitem__(self, i):
return self.matrix[i]
def __setitem__(self, key, item):
self.matrix[key] = item
|
py | 1a50b09f2cf450fdf962c103fd1fe8cba55056bb | # Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import sys
if sys.version_info < (3,0):
raise RuntimeError('ParlAI requires Python 3.')
|
py | 1a50b0ac22ecb2d9fecef391f9fb96725035a799 | # Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
import unittest
from iptest.type_util import *
from iptest import run_test
class ComplexTest(unittest.TestCase):
def test_from_string(self):
# complex from string: negative
# - space related
l = ['1.2', '.3', '4e3', '.3e-4', "0.031"]
for x in l:
for y in l:
self.assertRaises(ValueError, complex, "%s +%sj" % (x, y))
self.assertRaises(ValueError, complex, "%s+ %sj" % (x, y))
self.assertRaises(ValueError, complex, "%s - %sj" % (x, y))
self.assertRaises(ValueError, complex, "%s- %sj" % (x, y))
self.assertRaises(ValueError, complex, "%s-\t%sj" % (x, y))
self.assertRaises(ValueError, complex, "%sj+%sj" % (x, y))
self.assertEqual(complex(" %s+%sj" % (x, y)), complex(" %s+%sj " % (x, y)))
def test_misc(self):
self.assertEqual(mycomplex(), complex())
a = mycomplex(1)
b = mycomplex(1,0)
c = complex(1)
d = complex(1,0)
for x in [a,b,c,d]:
for y in [a,b,c,d]:
self.assertEqual(x,y)
self.assertEqual(a ** 2, a)
self.assertEqual(a-complex(), a)
self.assertEqual(a+complex(), a)
self.assertEqual(complex()/a, complex())
self.assertEqual(complex()*a, complex())
self.assertEqual(complex()%a, complex())
self.assertEqual(complex() // a, complex())
self.assertEqual(complex(2), complex(2, 0))
def test_inherit(self):
class mycomplex(complex): pass
a = mycomplex(2+1j)
self.assertEqual(a.real, 2)
self.assertEqual(a.imag, 1)
def test_repr(self):
self.assertEqual(repr(1-6j), '(1-6j)')
def test_infinite(self):
self.assertEqual(repr(1.0e340j), 'infj')
self.assertEqual(repr(-1.0e340j),'-infj')
run_test(__name__)
|
py | 1a50b16a1987d73ce4d9a8dd3b964cbfde5f691a | # Copyright (c) 2009-2022 The Regents of the University of Michigan.
# Part of HOOMD-blue, released under the BSD 3-Clause License.
""" Metal potentials.
.. rubric:: Stability
:py:mod:`hoomd.metal` is **unstable**. When upgrading from version 2.x to 2.y (y > x),
"""
from hoomd.metal import pair
|
py | 1a50b183c10c1b20840b5a29d6b5b00ab284659c | # Copyright 2016 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
from abc import ABC, abstractmethod
import numpy as np
import pandas as pd
from zipline.data._resample import (
_minute_to_session_open,
_minute_to_session_high,
_minute_to_session_low,
_minute_to_session_close,
_minute_to_session_volume,
)
from zipline.data.bar_reader import NoDataOnDate
from zipline.data.minute_bars import MinuteBarReader
from zipline.data.session_bars import SessionBarReader
from zipline.utils.memoize import lazyval
_MINUTE_TO_SESSION_OHCLV_HOW = OrderedDict((
('open', 'first'),
('high', 'max'),
('low', 'min'),
('close', 'last'),
('volume', 'sum'),
))
def minute_frame_to_session_frame(minute_frame, calendar):
"""
Resample a DataFrame with minute data into the frame expected by a
BcolzDailyBarWriter.
Parameters
----------
minute_frame : pd.DataFrame
A DataFrame with the columns `open`, `high`, `low`, `close`, `volume`,
and `dt` (minute dts)
calendar : trading_calendars.trading_calendar.TradingCalendar
A TradingCalendar on which session labels to resample from minute
to session.
Return
------
session_frame : pd.DataFrame
A DataFrame with the columns `open`, `high`, `low`, `close`, `volume`,
and `day` (datetime-like).
"""
how = OrderedDict((c, _MINUTE_TO_SESSION_OHCLV_HOW[c])
for c in minute_frame.columns)
labels = calendar.minute_index_to_session_labels(minute_frame.index)
return minute_frame.groupby(labels).agg(how)
def minute_to_session(column, close_locs, data, out):
"""
Resample an array with minute data into an array with session data.
This function assumes that the minute data is the exact length of all
minutes in the sessions in the output.
Parameters
----------
column : str
The `open`, `high`, `low`, `close`, or `volume` column.
close_locs : array[intp]
The locations in `data` which are the market close minutes.
data : array[float64|uint32]
The minute data to be sampled into session data.
The first value should align with the market open of the first session,
containing values for all minutes for all sessions. With the last value
being the market close of the last session.
out : array[float64|uint32]
The output array into which to write the sampled sessions.
"""
if column == 'open':
_minute_to_session_open(close_locs, data, out)
elif column == 'high':
_minute_to_session_high(close_locs, data, out)
elif column == 'low':
_minute_to_session_low(close_locs, data, out)
elif column == 'close':
_minute_to_session_close(close_locs, data, out)
elif column == 'volume':
_minute_to_session_volume(close_locs, data, out)
return out
class DailyHistoryAggregator:
"""
Converts minute pricing data into a daily summary, to be used for the
last slot in a call to history with a frequency of `1d`.
This summary is the same as a daily bar rollup of minute data, with the
distinction that the summary is truncated to the `dt` requested.
i.e. the aggregation slides forward during a the course of simulation day.
Provides aggregation for `open`, `high`, `low`, `close`, and `volume`.
The aggregation rules for each price type is documented in their respective
"""
def __init__(self, market_opens, minute_reader, trading_calendar):
self._market_opens = market_opens
self._minute_reader = minute_reader
self._trading_calendar = trading_calendar
# The caches are structured as (date, market_open, entries), where
# entries is a dict of asset -> (last_visited_dt, value)
#
# Whenever an aggregation method determines the current value,
# the entry for the respective asset should be overwritten with a new
# entry for the current dt.value (int) and aggregation value.
#
# When the requested dt's date is different from date the cache is
# flushed, so that the cache entries do not grow unbounded.
#
# Example cache:
# cache = (date(2016, 3, 17),
# pd.Timestamp('2016-03-17 13:31', tz='UTC'),
# {
# 1: (1458221460000000000, np.nan),
# 2: (1458221460000000000, 42.0),
# })
self._caches = {
'open': None,
'high': None,
'low': None,
'close': None,
'volume': None
}
# The int value is used for deltas to avoid extra computation from
# creating new Timestamps.
self._one_min = pd.Timedelta('1 min').value
def _prelude(self, dt, field):
session = self._trading_calendar.minute_to_session_label(dt)
dt_value = dt.value
cache = self._caches[field]
if cache is None or cache[0] != session:
market_open = self._market_opens.loc[session]
cache = self._caches[field] = (session, market_open, {})
_, market_open, entries = cache
market_open = market_open.tz_localize('UTC')
if dt != market_open:
prev_dt = dt_value - self._one_min
else:
prev_dt = None
return market_open, prev_dt, dt_value, entries
def opens(self, assets, dt):
"""
The open field's aggregation returns the first value that occurs
for the day, if there has been no data on or before the `dt` the open
is `nan`.
Once the first non-nan open is seen, that value remains constant per
asset for the remainder of the day.
Returns
-------
np.array with dtype=float64, in order of assets parameter.
"""
market_open, prev_dt, dt_value, entries = self._prelude(dt, 'open')
opens = []
session_label = self._trading_calendar.minute_to_session_label(dt)
for asset in assets:
if not asset.is_alive_for_session(session_label):
opens.append(np.NaN)
continue
if prev_dt is None:
val = self._minute_reader.get_value(asset, dt, 'open')
entries[asset] = (dt_value, val)
opens.append(val)
continue
else:
try:
last_visited_dt, first_open = entries[asset]
if last_visited_dt == dt_value:
opens.append(first_open)
continue
elif not pd.isnull(first_open):
opens.append(first_open)
entries[asset] = (dt_value, first_open)
continue
else:
after_last = pd.Timestamp(
last_visited_dt + self._one_min, tz='UTC')
window = self._minute_reader.load_raw_arrays(
['open'],
after_last,
dt,
[asset],
)[0]
nonnan = window[~pd.isnull(window)]
if len(nonnan):
val = nonnan[0]
else:
val = np.nan
entries[asset] = (dt_value, val)
opens.append(val)
continue
except KeyError:
window = self._minute_reader.load_raw_arrays(
['open'],
market_open,
dt,
[asset],
)[0]
nonnan = window[~pd.isnull(window)]
if len(nonnan):
val = nonnan[0]
else:
val = np.nan
entries[asset] = (dt_value, val)
opens.append(val)
continue
return np.array(opens)
def highs(self, assets, dt):
"""
The high field's aggregation returns the largest high seen between
the market open and the current dt.
If there has been no data on or before the `dt` the high is `nan`.
Returns
-------
np.array with dtype=float64, in order of assets parameter.
"""
market_open, prev_dt, dt_value, entries = self._prelude(dt, 'high')
highs = []
session_label = self._trading_calendar.minute_to_session_label(dt)
for asset in assets:
if not asset.is_alive_for_session(session_label):
highs.append(np.NaN)
continue
if prev_dt is None:
val = self._minute_reader.get_value(asset, dt, 'high')
entries[asset] = (dt_value, val)
highs.append(val)
continue
else:
try:
last_visited_dt, last_max = entries[asset]
if last_visited_dt == dt_value:
highs.append(last_max)
continue
elif last_visited_dt == prev_dt:
curr_val = self._minute_reader.get_value(
asset, dt, 'high')
if pd.isnull(curr_val):
val = last_max
elif pd.isnull(last_max):
val = curr_val
else:
val = max(last_max, curr_val)
entries[asset] = (dt_value, val)
highs.append(val)
continue
else:
after_last = pd.Timestamp(
last_visited_dt + self._one_min, tz='UTC')
window = self._minute_reader.load_raw_arrays(
['high'],
after_last,
dt,
[asset],
)[0].T
val = np.nanmax(np.append(window, last_max))
entries[asset] = (dt_value, val)
highs.append(val)
continue
except KeyError:
window = self._minute_reader.load_raw_arrays(
['high'],
market_open,
dt,
[asset],
)[0].T
val = np.nanmax(window)
entries[asset] = (dt_value, val)
highs.append(val)
continue
return np.array(highs)
def lows(self, assets, dt):
"""
The low field's aggregation returns the smallest low seen between
the market open and the current dt.
If there has been no data on or before the `dt` the low is `nan`.
Returns
-------
np.array with dtype=float64, in order of assets parameter.
"""
market_open, prev_dt, dt_value, entries = self._prelude(dt, 'low')
lows = []
session_label = self._trading_calendar.minute_to_session_label(dt)
for asset in assets:
if not asset.is_alive_for_session(session_label):
lows.append(np.NaN)
continue
if prev_dt is None:
val = self._minute_reader.get_value(asset, dt, 'low')
entries[asset] = (dt_value, val)
lows.append(val)
continue
else:
try:
last_visited_dt, last_min = entries[asset]
if last_visited_dt == dt_value:
lows.append(last_min)
continue
elif last_visited_dt == prev_dt:
curr_val = self._minute_reader.get_value(
asset, dt, 'low')
val = np.nanmin([last_min, curr_val])
entries[asset] = (dt_value, val)
lows.append(val)
continue
else:
after_last = pd.Timestamp(
last_visited_dt + self._one_min, tz='UTC')
window = self._minute_reader.load_raw_arrays(
['low'],
after_last,
dt,
[asset],
)[0].T
val = np.nanmin(np.append(window, last_min))
entries[asset] = (dt_value, val)
lows.append(val)
continue
except KeyError:
window = self._minute_reader.load_raw_arrays(
['low'],
market_open,
dt,
[asset],
)[0].T
val = np.nanmin(window)
entries[asset] = (dt_value, val)
lows.append(val)
continue
return np.array(lows)
def closes(self, assets, dt):
"""
The close field's aggregation returns the latest close at the given
dt.
If the close for the given dt is `nan`, the most recent non-nan
`close` is used.
If there has been no data on or before the `dt` the close is `nan`.
Returns
-------
np.array with dtype=float64, in order of assets parameter.
"""
market_open, prev_dt, dt_value, entries = self._prelude(dt, 'close')
closes = []
session_label = self._trading_calendar.minute_to_session_label(dt)
def _get_filled_close(asset):
"""
Returns the most recent non-nan close for the asset in this
session. If there has been no data in this session on or before the
`dt`, returns `nan`
"""
window = self._minute_reader.load_raw_arrays(
['close'],
market_open,
dt,
[asset],
)[0]
try:
return window[~np.isnan(window)][-1]
except IndexError:
return np.NaN
for asset in assets:
if not asset.is_alive_for_session(session_label):
closes.append(np.NaN)
continue
if prev_dt is None:
val = self._minute_reader.get_value(asset, dt, 'close')
entries[asset] = (dt_value, val)
closes.append(val)
continue
else:
try:
last_visited_dt, last_close = entries[asset]
if last_visited_dt == dt_value:
closes.append(last_close)
continue
elif last_visited_dt == prev_dt:
val = self._minute_reader.get_value(
asset, dt, 'close')
if pd.isnull(val):
val = last_close
entries[asset] = (dt_value, val)
closes.append(val)
continue
else:
val = self._minute_reader.get_value(
asset, dt, 'close')
if pd.isnull(val):
val = _get_filled_close(asset)
entries[asset] = (dt_value, val)
closes.append(val)
continue
except KeyError:
val = self._minute_reader.get_value(
asset, dt, 'close')
if pd.isnull(val):
val = _get_filled_close(asset)
entries[asset] = (dt_value, val)
closes.append(val)
continue
return np.array(closes)
def volumes(self, assets, dt):
"""
The volume field's aggregation returns the sum of all volumes
between the market open and the `dt`
If there has been no data on or before the `dt` the volume is 0.
Returns
-------
np.array with dtype=int64, in order of assets parameter.
"""
market_open, prev_dt, dt_value, entries = self._prelude(dt, 'volume')
volumes = []
session_label = self._trading_calendar.minute_to_session_label(dt)
for asset in assets:
if not asset.is_alive_for_session(session_label):
volumes.append(0)
continue
if prev_dt is None:
val = self._minute_reader.get_value(asset, dt, 'volume')
entries[asset] = (dt_value, val)
volumes.append(val)
continue
else:
try:
last_visited_dt, last_total = entries[asset]
if last_visited_dt == dt_value:
volumes.append(last_total)
continue
elif last_visited_dt == prev_dt:
val = self._minute_reader.get_value(
asset, dt, 'volume')
val += last_total
entries[asset] = (dt_value, val)
volumes.append(val)
continue
else:
after_last = pd.Timestamp(
last_visited_dt + self._one_min, tz='UTC')
window = self._minute_reader.load_raw_arrays(
['volume'],
after_last,
dt,
[asset],
)[0]
val = np.nansum(window) + last_total
entries[asset] = (dt_value, val)
volumes.append(val)
continue
except KeyError:
window = self._minute_reader.load_raw_arrays(
['volume'],
market_open,
dt,
[asset],
)[0]
val = np.nansum(window)
entries[asset] = (dt_value, val)
volumes.append(val)
continue
return np.array(volumes)
class MinuteResampleSessionBarReader(SessionBarReader):
def __init__(self, calendar, minute_bar_reader):
self._calendar = calendar
self._minute_bar_reader = minute_bar_reader
def _get_resampled(self, columns, start_session, end_session, assets):
range_open = self._calendar.session_open(start_session)
range_close = self._calendar.session_close(end_session)
minute_data = self._minute_bar_reader.load_raw_arrays(
columns,
range_open,
range_close,
assets,
)
# Get the index of the close minute for each session in the range.
# If the range contains only one session, the only close in the range
# is the last minute in the data. Otherwise, we need to get all the
# session closes and find their indices in the range of minutes.
if start_session == end_session:
close_ilocs = np.array([len(minute_data[0]) - 1], dtype=np.int64)
else:
minutes = self._calendar.minutes_in_range(
range_open,
range_close,
)
session_closes = self._calendar.session_closes_in_range(
start_session,
end_session,
)
close_ilocs = minutes.searchsorted(session_closes.values)
results = []
shape = (len(close_ilocs), len(assets))
for col in columns:
if col != 'volume':
out = np.full(shape, np.nan)
else:
out = np.zeros(shape, dtype=np.uint32)
results.append(out)
for i in range(len(assets)):
for j, column in enumerate(columns):
data = minute_data[j][:, i]
minute_to_session(column, close_ilocs, data, results[j][:, i])
return results
@property
def trading_calendar(self):
return self._calendar
def load_raw_arrays(self, columns, start_dt, end_dt, sids):
return self._get_resampled(columns, start_dt, end_dt, sids)
def get_value(self, sid, session, colname):
# WARNING: This will need caching or other optimization if used in a
# tight loop.
# This was developed to complete interface, but has not been tuned
# for real world use.
return self._get_resampled([colname], session, session, [sid])[0][0][0]
@lazyval
def sessions(self):
cal = self._calendar
first = self._minute_bar_reader.first_trading_day
last = cal.minute_to_session_label(
self._minute_bar_reader.last_available_dt)
return cal.sessions_in_range(first, last)
@lazyval
def last_available_dt(self):
return self.trading_calendar.minute_to_session_label(
self._minute_bar_reader.last_available_dt
)
@property
def first_trading_day(self):
return self._minute_bar_reader.first_trading_day
def get_last_traded_dt(self, asset, dt):
return self.trading_calendar.minute_to_session_label(
self._minute_bar_reader.get_last_traded_dt(asset, dt))
class ReindexBarReader(ABC):
"""
A base class for readers which reindexes results, filling in the additional
indices with empty data.
Used to align the reading assets which trade on different calendars.
Currently only supports a ``trading_calendar`` which is a superset of the
``reader``'s calendar.
Parameters
----------
- trading_calendar : zipline.utils.trading_calendar.TradingCalendar
The calendar to use when indexing results from the reader.
- reader : MinuteBarReader|SessionBarReader
The reader which has a calendar that is a subset of the desired
``trading_calendar``.
- first_trading_session : pd.Timestamp
The first trading session the reader should provide. Must be specified,
since the ``reader``'s first session may not exactly align with the
desired calendar. Specifically, in the case where the first session
on the target calendar is a holiday on the ``reader``'s calendar.
- last_trading_session : pd.Timestamp
The last trading session the reader should provide. Must be specified,
since the ``reader``'s last session may not exactly align with the
desired calendar. Specifically, in the case where the last session
on the target calendar is a holiday on the ``reader``'s calendar.
"""
def __init__(self,
trading_calendar,
reader,
first_trading_session,
last_trading_session):
self._trading_calendar = trading_calendar
self._reader = reader
self._first_trading_session = first_trading_session
self._last_trading_session = last_trading_session
@property
def last_available_dt(self):
return self._reader.last_available_dt
def get_last_traded_dt(self, sid, dt):
return self._reader.get_last_traded_dt(sid, dt)
@property
def first_trading_day(self):
return self._reader.first_trading_day
def get_value(self, sid, dt, field):
# Give an empty result if no data is present.
try:
return self._reader.get_value(sid, dt, field)
except NoDataOnDate:
if field == 'volume':
return 0
else:
return np.nan
@abstractmethod
def _outer_dts(self, start_dt, end_dt):
raise NotImplementedError
@abstractmethod
def _inner_dts(self, start_dt, end_dt):
raise NotImplementedError
@property
def trading_calendar(self):
return self._trading_calendar
@lazyval
def sessions(self):
return self.trading_calendar.sessions_in_range(
self._first_trading_session,
self._last_trading_session
)
def load_raw_arrays(self, fields, start_dt, end_dt, sids):
outer_dts = self._outer_dts(start_dt, end_dt)
inner_dts = self._inner_dts(start_dt, end_dt)
indices = outer_dts.searchsorted(inner_dts)
shape = len(outer_dts), len(sids)
outer_results = []
if len(inner_dts) > 0:
inner_results = self._reader.load_raw_arrays(
fields, inner_dts[0], inner_dts[-1], sids)
else:
inner_results = None
for i, field in enumerate(fields):
if field != 'volume':
out = np.full(shape, np.nan)
else:
out = np.zeros(shape, dtype=np.uint32)
if inner_results is not None:
out[indices] = inner_results[i]
outer_results.append(out)
return outer_results
class ReindexMinuteBarReader(ReindexBarReader, MinuteBarReader):
"""
See: ``ReindexBarReader``
"""
def _outer_dts(self, start_dt, end_dt):
return self._trading_calendar.minutes_in_range(start_dt, end_dt)
def _inner_dts(self, start_dt, end_dt):
return self._reader.calendar.minutes_in_range(start_dt, end_dt)
class ReindexSessionBarReader(ReindexBarReader, SessionBarReader):
"""
See: ``ReindexBarReader``
"""
def _outer_dts(self, start_dt, end_dt):
return self.trading_calendar.sessions_in_range(start_dt, end_dt)
def _inner_dts(self, start_dt, end_dt):
return self._reader.trading_calendar.sessions_in_range(
start_dt, end_dt)
|
py | 1a50b216381c48a40ba0139a1b5908de137512ea | # coding: utf-8
import copy
import random
from models.judge import Judge
from logger.log import logger
from protocol.serialize import send
from common.roomConfig import roomCfg
from common.constDefine import *
class Room:
room_id = -1 # 房间ID
master_id = -1 # 房主ID
room_type = -1 # 房间类型
users = None # 房间的玩家
judge = None # 法官
max_num = 0 # 房间最大玩家数量
user_role = None # 玩家角色
user_role_num = None # 玩家角色数量
interrupt_flag = False # 是否允许其他玩家在某个玩家发言过程中插话
speak_time = 0 # 玩家发言时长
status = None # 房间状态
def __init__(self, room_id, room_type, master_id):
self.room_id = room_id
self.room_type = room_type
self.master_id = master_id
self.users = {}
self.status = ROOM_STATUS_READY
if roomCfg[self.room_type] is not None:
self.max_num = roomCfg[self.room_type].max_num
self.user_role = roomCfg[self.room_type].user_role
self.interrupt_flag = roomCfg[self.room_type].interrupt_flag
self.speak_time = roomCfg[self.room_type].speak_time
from collections import Counter
self.user_role_num = Counter(self.user_role)
else:
logger.error("room config is not exists ! {0}".format(self.room_type))
def dump(self):
return {k: v for k, v in self.__dict__}
def get_number_by_identity(self, identity):
"""获取指定类型玩家的数量"""
if identity in self.user_role_num.keys():
return self.user_role_num[identity]
else:
return None
def is_full(self):
"""房间是否满员"""
if len(self.users) >= self.max_num:
return True
else:
return False
# 发送消息给所有玩家
# cmd 命令码
# proto 内容
# flag 默认True为给全部玩家发送, False为给除自己外的所有玩家发送
def send_msg_to_all_users(self, cmd, proto, session, flag=True):
for user in self.users.values():
if not flag and user.uuid == session.uuid:
continue
else:
send(cmd, proto, user)
def send_msg_to_identity_users(self, identity, cmd, proto):
"""发送消息给指定身份的玩家"""
for user in self.users.values():
if user.role is not None and user.role.identity == identity:
send(cmd, proto, user)
break
def add_user(self, user):
"""添加玩家"""
ret = False
if user.uuid in self.users.keys():
logger.error("addUser error ! uuid {0} is exists!".format(user.uuid))
else:
self.users[user.uuid] = user
ret = True
return ret
def del_user(self, uuid):
"""删除玩家"""
ret = False
if uuid in self.users.keys():
del self.users[uuid]
ret = True
else:
logger.error("delUser error ! uuid {0} is not exists!".format(uuid))
return ret
def alloc_role_by_index(self, index):
if index in USER_ROLE_CLASS_DICT.keys():
cls = USER_ROLE_CLASS_DICT[index]
return cls()
else:
logger.error("alloc role error ! {0}".format(index))
return None
def allot_role(self):
"""分配身份"""
tmp_role = copy.deepcopy(self.user_role)
for user in self.users.values():
if user.role is None:
index = random.randint(0, len(tmp_role))
user.role = self.alloc_role_by_index(tmp_role[index])
if user.role is not None:
del tmp_role[index]
def dismiss(self):
"""解散房间"""
for user in self.users.values():
user.room_id = 0
self.users.clear()
def speak(self, user_id, type, msg):
"""玩家发言"""
logger.info("user {0} speak {1} type:{2}".format(user_id, msg, type))
pass
def vote(self, user_id, other_id):
"""玩家投票"""
logger.info("user {0} vote id {1}".format(user_id, other_id))
pass
def do_skill(self, user_id, sid, target_id):
"""玩家使用技能"""
logger.info("user {0} use skill {1} target is {2}".format(user_id, sid, target_id))
pass
def ready(self):
"""玩家准备"""
flag = True
for user in self.users.values():
if user.status == 0:
flag = False
break
if flag:
# 全部玩家准备好,则开始游戏
self.start_game()
def start_game(self):
"""开始游戏"""
self.judge = Judge(self)
self.judge.start()
def end_game(self):
"""结束游戏"""
pass
|
py | 1a50b28944c049c8bd3c07e6c83cc60af3fb2c9f | from abc import abstractmethod
from typing import AsyncContextManager, Collection, Container, ContextManager
from eth_typing import BLSPubkey, BLSSignature
from eth2.beacon.types.attestations import Attestation
from eth2.beacon.types.blocks import BeaconBlock
from eth2.beacon.typing import CommitteeIndex, Epoch, Operation, SignedOperation, Slot
from eth2.clock import Tick
from eth2.validator_client.duty import Duty
from eth2.validator_client.typing import BLSPrivateKey
class BeaconNodeAPI(AsyncContextManager["BeaconNodeAPI"]):
"""
``BeaconNodeAPI`` represents a remote beacon node the validator client
can query for information about the beacon state and supply
signed messages to.
"""
@abstractmethod
async def fetch_duties(
self,
current_tick: Tick,
public_keys: Collection[BLSPubkey],
target_epoch: Epoch,
) -> Collection[Duty]:
...
@abstractmethod
async def fetch_attestation(
self, public_key: BLSPubkey, slot: Slot, committee_index: CommitteeIndex
) -> Attestation:
...
@abstractmethod
async def fetch_block_proposal(
self, slot: Slot, randao_reveal: BLSSignature
) -> BeaconBlock:
...
@abstractmethod
async def publish(self, duty: Duty, signed_operation: SignedOperation) -> None:
...
class SignatoryDatabaseAPI(Container[bytes]):
"""
Provides persistence for actions of the client to prevent
the publishing of slashable signatures.
"""
@abstractmethod
async def record_signature_for(self, duty: Duty, operation: Operation) -> None:
...
@abstractmethod
async def is_slashable(self, duty: Duty, operation: Operation) -> bool:
...
@abstractmethod
def insert(self, key: bytes, value: bytes) -> None:
...
class KeyStoreAPI(ContextManager["KeyStoreAPI"]):
@property
@abstractmethod
def public_keys(self) -> Collection[BLSPubkey]:
...
@abstractmethod
def import_private_key(self, encoded_private_key: str) -> None:
...
@abstractmethod
def private_key_for(self, public_key: BLSPubkey) -> BLSPrivateKey:
...
|
py | 1a50b2bcb485c0bc564b5697dbce7b594619787d | project_info = ["name - Concept Of The Day",
"version = 1.0",
"description = Programming concepts implemented in Python",
"author = Anirudh Sharma",
"author_email = [email protected]",
"repo = https://github.com/ani03sha/ConceptOfTheDay",
]
print(project_info)
|
py | 1a50b2d07efab610c09e33ebd7b03f5e835b8802 | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import torch
import tests.helpers.pipelines as tpipes
import tests.helpers.utils as tutils
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import Callback, EarlyStopping, ModelCheckpoint
from tests.helpers import BoringModel
from tests.helpers.datamodules import ClassifDataModule
from tests.helpers.runif import RunIf
from tests.helpers.simple_models import ClassificationModel
def test_cpu_slurm_save_load(tmpdir):
"""Verify model save/load/checkpoint on CPU."""
model = BoringModel()
# logger file to get meta
logger = tutils.get_default_logger(tmpdir)
version = logger.version
# fit model
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=1,
logger=logger,
limit_train_batches=0.2,
limit_val_batches=0.2,
callbacks=[ModelCheckpoint(dirpath=tmpdir)],
)
trainer.fit(model)
real_global_step = trainer.global_step
# traning complete
assert trainer.state.finished, "cpu model failed to complete"
# predict with trained model before saving
# make a prediction
dataloaders = model.test_dataloader()
if not isinstance(dataloaders, list):
dataloaders = [dataloaders]
for dataloader in dataloaders:
for batch in dataloader:
break
model.eval()
pred_before_saving = model(batch)
# test HPC saving
# simulate snapshot on slurm
# save logger to make sure we get all the metrics
if logger:
logger.finalize("finished")
hpc_save_path = trainer._checkpoint_connector.hpc_save_path(trainer.weights_save_path)
trainer.save_checkpoint(hpc_save_path)
assert os.path.exists(hpc_save_path)
# new logger file to get meta
logger = tutils.get_default_logger(tmpdir, version=version)
model = BoringModel()
class _StartCallback(Callback):
# set the epoch start hook so we can predict before the model does the full training
def on_train_epoch_start(self, trainer, model):
assert trainer.global_step == real_global_step and trainer.global_step > 0
# predict with loaded model to make sure answers are the same
mode = model.training
model.eval()
new_pred = model(batch)
assert torch.eq(pred_before_saving, new_pred).all()
model.train(mode)
trainer = Trainer(
default_root_dir=tmpdir,
max_epochs=1,
logger=logger,
callbacks=[_StartCallback(), ModelCheckpoint(dirpath=tmpdir)],
)
# by calling fit again, we trigger training, loading weights from the cluster
# and our hook to predict using current model before any more weight updates
trainer.fit(model)
def test_early_stopping_cpu_model(tmpdir):
class ModelTrainVal(BoringModel):
def validation_step(self, *args, **kwargs):
output = super().validation_step(*args, **kwargs)
self.log("val_loss", output["x"])
return output
tutils.reset_seed()
stopping = EarlyStopping(monitor="val_loss", min_delta=0.1)
trainer_options = dict(
callbacks=[stopping],
default_root_dir=tmpdir,
gradient_clip_val=1.0,
track_grad_norm=2,
enable_progress_bar=False,
accumulate_grad_batches=2,
limit_train_batches=0.1,
limit_val_batches=0.1,
)
model = ModelTrainVal()
tpipes.run_model_test(trainer_options, model, on_gpu=False)
# test freeze on cpu
model.freeze()
model.unfreeze()
@RunIf(skip_windows=True, skip_49370=True)
def test_multi_cpu_model_ddp(tmpdir):
"""Make sure DDP works."""
tutils.set_random_main_port()
trainer_options = dict(
default_root_dir=tmpdir,
enable_progress_bar=False,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
gpus=None,
num_processes=2,
strategy="ddp_spawn",
)
dm = ClassifDataModule()
model = ClassificationModel()
tpipes.run_model_test(trainer_options, model, data=dm, on_gpu=False)
def test_lbfgs_cpu_model(tmpdir):
"""Test each of the trainer options.
Testing LBFGS optimizer
"""
class ModelSpecifiedOptimizer(BoringModel):
def __init__(self, optimizer_name, learning_rate):
super().__init__()
self.optimizer_name = optimizer_name
self.learning_rate = learning_rate
self.save_hyperparameters()
trainer_options = dict(
default_root_dir=tmpdir,
max_epochs=1,
enable_progress_bar=False,
limit_train_batches=0.2,
limit_val_batches=0.2,
)
model = ModelSpecifiedOptimizer(optimizer_name="LBFGS", learning_rate=0.004)
tpipes.run_model_test_without_loggers(trainer_options, model, min_acc=0.01)
def test_default_logger_callbacks_cpu_model(tmpdir):
"""Test each of the trainer options."""
trainer_options = dict(
default_root_dir=tmpdir,
max_epochs=1,
gradient_clip_val=1.0,
overfit_batches=0.20,
enable_progress_bar=False,
limit_train_batches=0.01,
limit_val_batches=0.01,
)
model = BoringModel()
tpipes.run_model_test_without_loggers(trainer_options, model, min_acc=0.01)
# test freeze on cpu
model.freeze()
model.unfreeze()
def test_running_test_after_fitting(tmpdir):
"""Verify test() on fitted model."""
class ModelTrainValTest(BoringModel):
def validation_step(self, *args, **kwargs):
output = super().validation_step(*args, **kwargs)
self.log("val_loss", output["x"])
return output
def test_step(self, *args, **kwargs):
output = super().test_step(*args, **kwargs)
self.log("test_loss", output["y"])
return output
model = ModelTrainValTest()
# logger file to get meta
logger = tutils.get_default_logger(tmpdir)
# logger file to get weights
checkpoint = tutils.init_checkpoint_callback(logger)
# fit model
trainer = Trainer(
default_root_dir=tmpdir,
enable_progress_bar=False,
max_epochs=2,
limit_train_batches=0.4,
limit_val_batches=0.2,
limit_test_batches=0.2,
callbacks=[checkpoint],
logger=logger,
)
trainer.fit(model)
assert trainer.state.finished, f"Training failed with {trainer.state}"
trainer.test()
# test we have good test accuracy
tutils.assert_ok_model_acc(trainer, key="test_loss", thr=0.5)
def test_running_test_no_val(tmpdir):
"""Verify `test()` works on a model with no `val_dataloader`.
It performs train and test only
"""
class ModelTrainTest(BoringModel):
def val_dataloader(self):
pass
def test_step(self, *args, **kwargs):
output = super().test_step(*args, **kwargs)
self.log("test_loss", output["y"])
return output
model = ModelTrainTest()
# logger file to get meta
logger = tutils.get_default_logger(tmpdir)
# logger file to get weights
checkpoint = tutils.init_checkpoint_callback(logger)
# fit model
trainer = Trainer(
default_root_dir=tmpdir,
enable_progress_bar=False,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.2,
limit_test_batches=0.2,
callbacks=[checkpoint],
logger=logger,
)
trainer.fit(model)
assert trainer.state.finished, f"Training failed with {trainer.state}"
trainer.test()
# test we have good test accuracy
tutils.assert_ok_model_acc(trainer, key="test_loss")
def test_simple_cpu(tmpdir):
"""Verify continue training session on CPU."""
model = BoringModel()
# fit model
trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, limit_val_batches=0.1, limit_train_batches=20)
trainer.fit(model)
# traning complete
assert trainer.state.finished, "amp + ddp model failed to complete"
def test_cpu_model(tmpdir):
"""Make sure model trains on CPU."""
trainer_options = dict(
default_root_dir=tmpdir, enable_progress_bar=False, max_epochs=1, limit_train_batches=4, limit_val_batches=4
)
model = BoringModel()
tpipes.run_model_test(trainer_options, model, on_gpu=False)
def test_all_features_cpu_model(tmpdir):
"""Test each of the trainer options."""
trainer_options = dict(
default_root_dir=tmpdir,
gradient_clip_val=1.0,
overfit_batches=0.20,
track_grad_norm=2,
enable_progress_bar=False,
accumulate_grad_batches=2,
max_epochs=1,
limit_train_batches=0.4,
limit_val_batches=0.4,
)
model = BoringModel()
tpipes.run_model_test(trainer_options, model, on_gpu=False, min_acc=0.01)
|
py | 1a50b307a32c25f92c0f399a70a861b0bf20ad68 | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This code is based on https://github.com/nwojke/deep_sort/tree/master/deep_sort
"""
import numpy as np
from scipy.optimize import linear_sum_assignment
from ..motion import kalman_filter
INFTY_COST = 1e+5
__all__ = [
'iou_1toN',
'iou_cost',
'_nn_euclidean_distance',
'_nn_cosine_distance',
'NearestNeighborDistanceMetric',
'min_cost_matching',
'matching_cascade',
'gate_cost_matrix',
]
def iou_1toN(bbox, candidates):
"""
Computer intersection over union (IoU) by one box to N candidates.
Args:
bbox (ndarray): A bounding box in format `(top left x, top left y, width, height)`.
candidates (ndarray): A matrix of candidate bounding boxes (one per row) in the
same format as `bbox`.
Returns:
ious (ndarray): The intersection over union in [0, 1] between the `bbox`
and each candidate. A higher score means a larger fraction of the
`bbox` is occluded by the candidate.
"""
bbox_tl = bbox[:2]
bbox_br = bbox[:2] + bbox[2:]
candidates_tl = candidates[:, :2]
candidates_br = candidates[:, :2] + candidates[:, 2:]
tl = np.c_[np.maximum(bbox_tl[0], candidates_tl[:, 0])[:, np.newaxis],
np.maximum(bbox_tl[1], candidates_tl[:, 1])[:, np.newaxis]]
br = np.c_[np.minimum(bbox_br[0], candidates_br[:, 0])[:, np.newaxis],
np.minimum(bbox_br[1], candidates_br[:, 1])[:, np.newaxis]]
wh = np.maximum(0., br - tl)
area_intersection = wh.prod(axis=1)
area_bbox = bbox[2:].prod()
area_candidates = candidates[:, 2:].prod(axis=1)
ious = area_intersection / (area_bbox + area_candidates - area_intersection)
return ious
def iou_cost(tracks, detections, track_indices=None, detection_indices=None):
"""
IoU distance metric.
Args:
tracks (list[Track]): A list of tracks.
detections (list[Detection]): A list of detections.
track_indices (Optional[list[int]]): A list of indices to tracks that
should be matched. Defaults to all `tracks`.
detection_indices (Optional[list[int]]): A list of indices to detections
that should be matched. Defaults to all `detections`.
Returns:
cost_matrix (ndarray): A cost matrix of shape len(track_indices),
len(detection_indices) where entry (i, j) is
`1 - iou(tracks[track_indices[i]], detections[detection_indices[j]])`.
"""
if track_indices is None:
track_indices = np.arange(len(tracks))
if detection_indices is None:
detection_indices = np.arange(len(detections))
cost_matrix = np.zeros((len(track_indices), len(detection_indices)))
for row, track_idx in enumerate(track_indices):
if tracks[track_idx].time_since_update > 1:
cost_matrix[row, :] = 1e+5
continue
bbox = tracks[track_idx].to_tlwh()
candidates = np.asarray([detections[i].tlwh for i in detection_indices])
cost_matrix[row, :] = 1. - iou_1toN(bbox, candidates)
return cost_matrix
def _nn_euclidean_distance(s, q):
"""
Compute pair-wise squared (Euclidean) distance between points in `s` and `q`.
Args:
s (ndarray): Sample points: an NxM matrix of N samples of dimensionality M.
q (ndarray): Query points: an LxM matrix of L samples of dimensionality M.
Returns:
distances (ndarray): A vector of length M that contains for each entry in `q` the
smallest Euclidean distance to a sample in `s`.
"""
s, q = np.asarray(s), np.asarray(q)
if len(s) == 0 or len(q) == 0:
return np.zeros((len(s), len(q)))
s2, q2 = np.square(s).sum(axis=1), np.square(q).sum(axis=1)
distances = -2. * np.dot(s, q.T) + s2[:, None] + q2[None, :]
distances = np.clip(distances, 0., float(np.inf))
return np.maximum(0.0, distances.min(axis=0))
def _nn_cosine_distance(s, q):
"""
Compute pair-wise cosine distance between points in `s` and `q`.
Args:
s (ndarray): Sample points: an NxM matrix of N samples of dimensionality M.
q (ndarray): Query points: an LxM matrix of L samples of dimensionality M.
Returns:
distances (ndarray): A vector of length M that contains for each entry in `q` the
smallest Euclidean distance to a sample in `s`.
"""
s = np.asarray(s) / np.linalg.norm(s, axis=1, keepdims=True)
q = np.asarray(q) / np.linalg.norm(q, axis=1, keepdims=True)
distances = 1. - np.dot(s, q.T)
return distances.min(axis=0)
class NearestNeighborDistanceMetric(object):
"""
A nearest neighbor distance metric that, for each target, returns
the closest distance to any sample that has been observed so far.
Args:
metric (str): Either "euclidean" or "cosine".
matching_threshold (float): The matching threshold. Samples with larger
distance are considered an invalid match.
budget (Optional[int]): If not None, fix samples per class to at most
this number. Removes the oldest samples when the budget is reached.
Attributes:
samples (Dict[int -> List[ndarray]]): A dictionary that maps from target
identities to the list of samples that have been observed so far.
"""
def __init__(self, metric, matching_threshold, budget=None):
if metric == "euclidean":
self._metric = _nn_euclidean_distance
elif metric == "cosine":
self._metric = _nn_cosine_distance
else:
raise ValueError(
"Invalid metric; must be either 'euclidean' or 'cosine'")
self.matching_threshold = matching_threshold
self.budget = budget
self.samples = {}
def partial_fit(self, features, targets, active_targets):
"""
Update the distance metric with new data.
Args:
features (ndarray): An NxM matrix of N features of dimensionality M.
targets (ndarray): An integer array of associated target identities.
active_targets (List[int]): A list of targets that are currently
present in the scene.
"""
for feature, target in zip(features, targets):
self.samples.setdefault(target, []).append(feature)
if self.budget is not None:
self.samples[target] = self.samples[target][-self.budget:]
self.samples = {k: self.samples[k] for k in active_targets}
def distance(self, features, targets):
"""
Compute distance between features and targets.
Args:
features (ndarray): An NxM matrix of N features of dimensionality M.
targets (list[int]): A list of targets to match the given `features` against.
Returns:
cost_matrix (ndarray): a cost matrix of shape len(targets), len(features),
where element (i, j) contains the closest squared distance between
`targets[i]` and `features[j]`.
"""
cost_matrix = np.zeros((len(targets), len(features)))
for i, target in enumerate(targets):
cost_matrix[i, :] = self._metric(self.samples[target], features)
return cost_matrix
def min_cost_matching(distance_metric,
max_distance,
tracks,
detections,
track_indices=None,
detection_indices=None):
"""
Solve linear assignment problem.
Args:
distance_metric :
Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray
The distance metric is given a list of tracks and detections as
well as a list of N track indices and M detection indices. The
metric should return the NxM dimensional cost matrix, where element
(i, j) is the association cost between the i-th track in the given
track indices and the j-th detection in the given detection_indices.
max_distance (float): Gating threshold. Associations with cost larger
than this value are disregarded.
tracks (list[Track]): A list of predicted tracks at the current time
step.
detections (list[Detection]): A list of detections at the current time
step.
track_indices (list[int]): List of track indices that maps rows in
`cost_matrix` to tracks in `tracks`.
detection_indices (List[int]): List of detection indices that maps
columns in `cost_matrix` to detections in `detections`.
Returns:
A tuple (List[(int, int)], List[int], List[int]) with the following
three entries:
* A list of matched track and detection indices.
* A list of unmatched track indices.
* A list of unmatched detection indices.
"""
if track_indices is None:
track_indices = np.arange(len(tracks))
if detection_indices is None:
detection_indices = np.arange(len(detections))
if len(detection_indices) == 0 or len(track_indices) == 0:
return [], track_indices, detection_indices # Nothing to match.
cost_matrix = distance_metric(tracks, detections, track_indices,
detection_indices)
cost_matrix[cost_matrix > max_distance] = max_distance + 1e-5
indices = linear_sum_assignment(cost_matrix)
matches, unmatched_tracks, unmatched_detections = [], [], []
for col, detection_idx in enumerate(detection_indices):
if col not in indices[1]:
unmatched_detections.append(detection_idx)
for row, track_idx in enumerate(track_indices):
if row not in indices[0]:
unmatched_tracks.append(track_idx)
for row, col in zip(indices[0], indices[1]):
track_idx = track_indices[row]
detection_idx = detection_indices[col]
if cost_matrix[row, col] > max_distance:
unmatched_tracks.append(track_idx)
unmatched_detections.append(detection_idx)
else:
matches.append((track_idx, detection_idx))
return matches, unmatched_tracks, unmatched_detections
def matching_cascade(distance_metric,
max_distance,
cascade_depth,
tracks,
detections,
track_indices=None,
detection_indices=None):
"""
Run matching cascade.
Args:
distance_metric :
Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray
The distance metric is given a list of tracks and detections as
well as a list of N track indices and M detection indices. The
metric should return the NxM dimensional cost matrix, where element
(i, j) is the association cost between the i-th track in the given
track indices and the j-th detection in the given detection_indices.
max_distance (float): Gating threshold. Associations with cost larger
than this value are disregarded.
cascade_depth (int): The cascade depth, should be se to the maximum
track age.
tracks (list[Track]): A list of predicted tracks at the current time
step.
detections (list[Detection]): A list of detections at the current time
step.
track_indices (list[int]): List of track indices that maps rows in
`cost_matrix` to tracks in `tracks`.
detection_indices (List[int]): List of detection indices that maps
columns in `cost_matrix` to detections in `detections`.
Returns:
A tuple (List[(int, int)], List[int], List[int]) with the following
three entries:
* A list of matched track and detection indices.
* A list of unmatched track indices.
* A list of unmatched detection indices.
"""
if track_indices is None:
track_indices = list(range(len(tracks)))
if detection_indices is None:
detection_indices = list(range(len(detections)))
unmatched_detections = detection_indices
matches = []
for level in range(cascade_depth):
if len(unmatched_detections) == 0: # No detections left
break
track_indices_l = [
k for k in track_indices if tracks[k].time_since_update == 1 + level
]
if len(track_indices_l) == 0: # Nothing to match at this level
continue
matches_l, _, unmatched_detections = \
min_cost_matching(
distance_metric, max_distance, tracks, detections,
track_indices_l, unmatched_detections)
matches += matches_l
unmatched_tracks = list(set(track_indices) - set(k for k, _ in matches))
return matches, unmatched_tracks, unmatched_detections
def gate_cost_matrix(kf,
cost_matrix,
tracks,
detections,
track_indices,
detection_indices,
gated_cost=INFTY_COST,
only_position=False):
"""
Invalidate infeasible entries in cost matrix based on the state
distributions obtained by Kalman filtering.
Args:
kf (object): The Kalman filter.
cost_matrix (ndarray): The NxM dimensional cost matrix, where N is the
number of track indices and M is the number of detection indices,
such that entry (i, j) is the association cost between
`tracks[track_indices[i]]` and `detections[detection_indices[j]]`.
tracks (list[Track]): A list of predicted tracks at the current time
step.
detections (list[Detection]): A list of detections at the current time
step.
track_indices (List[int]): List of track indices that maps rows in
`cost_matrix` to tracks in `tracks`.
detection_indices (List[int]): List of detection indices that maps
columns in `cost_matrix` to detections in `detections`.
gated_cost (Optional[float]): Entries in the cost matrix corresponding
to infeasible associations are set this value. Defaults to a very
large value.
only_position (Optional[bool]): If True, only the x, y position of the
state distribution is considered during gating. Default False.
"""
gating_dim = 2 if only_position else 4
gating_threshold = kalman_filter.chi2inv95[gating_dim]
measurements = np.asarray(
[detections[i].to_xyah() for i in detection_indices])
for row, track_idx in enumerate(track_indices):
track = tracks[track_idx]
gating_distance = kf.gating_distance(track.mean, track.covariance,
measurements, only_position)
cost_matrix[row, gating_distance > gating_threshold] = gated_cost
return cost_matrix
|
py | 1a50b3a7351297968a096beb7a71d73f7a3d02a0 | """Trains a ResNet on the CIFAR10 dataset.
ResNet v1
[a] Deep Residual Learning for Image Recognition
https://arxiv.org/pdf/1512.03385.pdf
ResNet v2
[b] Identity Mappings in Deep Residual Networks
https://arxiv.org/pdf/1603.05027.pdf
"""
from __future__ import print_function
import keras
from keras.layers import Dense, Conv2D, BatchNormalization, Activation
from keras.layers import AveragePooling2D, Input, Flatten
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.callbacks import ReduceLROnPlateau
from keras.preprocessing.image import ImageDataGenerator
from keras.regularizers import l2
from keras.models import Model
from keras.datasets import cifar10
from keras.utils import plot_model
import numpy as np
import os
# Training parameters
batch_size = 32 # orig paper trained all networks with batch_size=128
epochs = 200
data_augmentation = True
num_classes = 10
# Subtracting pixel mean improves accuracy
subtract_pixel_mean = True
# Model parameter
# ----------------------------------------------------------------------------
# | | 200-epoch | Orig Paper| 200-epoch | Orig Paper| sec/epoch
# Model | n | ResNet v1 | ResNet v1 | ResNet v2 | ResNet v2 | GTX1080Ti
# |v1(v2)| %Accuracy | %Accuracy | %Accuracy | %Accuracy | v1 (v2)
# ----------------------------------------------------------------------------
# ResNet20 | 3 (2)| 92.16 | 91.25 | ----- | ----- | 35 (---)
# ResNet32 | 5(NA)| 92.46 | 92.49 | NA | NA | 50 ( NA)
# ResNet44 | 7(NA)| 92.50 | 92.83 | NA | NA | 70 ( NA)
# ResNet56 | 9 (6)| 92.71 | 93.03 | 93.01 | NA | 90 (100)
# ResNet110 |18(12)| 92.65 | 93.39+-.16| 93.15 | 93.63 | 165(180)
# ResNet164 |27(18)| ----- | 94.07 | ----- | 94.54 | ---(---)
# ResNet1001| (111)| ----- | 92.39 | ----- | 95.08+-.14| ---(---)
# ---------------------------------------------------------------------------
n = 3
# Model version
# Orig paper: version = 1 (ResNet v1), Improved ResNet: version = 2 (ResNet v2)
version = 1
# Computed depth from supplied model parameter n
if version == 1:
depth = n * 6 + 2
elif version == 2:
depth = n * 9 + 2
# Model name, depth and version
model_type = 'ResNet%dv%d' % (depth, version)
# Load the CIFAR10 data.
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Input image dimensions.
input_shape = x_train.shape[1:]
# Normalize data.
x_train = x_train.astype('float32') / 255
x_test = x_test.astype('float32') / 255
# If subtract pixel mean is enabled
if subtract_pixel_mean:
x_train_mean = np.mean(x_train, axis=0)
x_train -= x_train_mean
x_test -= x_train_mean
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
print('y_train shape:', y_train.shape)
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
def lr_schedule(epoch):
"""Learning Rate Schedule
Learning rate is scheduled to be reduced after 80, 120, 160, 180 epochs.
Called automatically every epoch as part of callbacks during training.
# Arguments
epoch (int): The number of epochs
# Returns
lr (float32): learning rate
"""
lr = 1e-3
if epoch > 180:
lr *= 0.5e-3
elif epoch > 160:
lr *= 1e-3
elif epoch > 120:
lr *= 1e-2
elif epoch > 80:
lr *= 1e-1
print('Learning rate: ', lr)
return lr
def resnet_layer(inputs,
num_filters=16,
kernel_size=3,
strides=1,
activation='relu',
batch_normalization=True,
conv_first=True):
"""2D Convolution-Batch Normalization-Activation stack builder
# Arguments
inputs (tensor): input tensor from input image or previous layer
num_filters (int): Conv2D number of filters
kernel_size (int): Conv2D square kernel dimensions
strides (int): Conv2D square stride dimensions
activation (string): activation name
batch_normalization (bool): whether to include batch normalization
conv_first (bool): conv-bn-activation (True) or
activation-bn-conv (False)
# Returns
x (tensor): tensor as input to the next layer
"""
conv = Conv2D(num_filters,
kernel_size=kernel_size,
strides=strides,
padding='same',
kernel_initializer='he_normal',
kernel_regularizer=l2(1e-4))
x = inputs
if conv_first:
x = conv(x)
if batch_normalization:
x = BatchNormalization()(x)
if activation is not None:
x = Activation(activation)(x)
else:
if batch_normalization:
x = BatchNormalization()(x)
if activation is not None:
x = Activation(activation)(x)
x = conv(x)
return x
def resnet_v1(input_shape, depth, num_classes=10):
"""ResNet Version 1 Model builder [a]
Stacks of 2 x (3 x 3) Conv2D-BN-ReLU
Last ReLU is after the shortcut connection.
At the beginning of each stage, the feature map size is halved (downsampled)
by a convolutional layer with strides=2, while the number of filters is
doubled. Within each stage, the layers have the same number filters and the
same number of filters.
Features maps sizes:
stage 0: 32x32, 16
stage 1: 16x16, 32
stage 2: 8x8, 64
The Number of parameters is approx the same as Table 6 of [a]:
ResNet20 0.27M
ResNet32 0.46M
ResNet44 0.66M
ResNet56 0.85M
ResNet110 1.7M
# Arguments
input_shape (tensor): shape of input image tensor
depth (int): number of core convolutional layers
num_classes (int): number of classes (CIFAR10 has 10)
# Returns
model (Model): Keras model instance
"""
if (depth - 2) % 6 != 0:
raise ValueError('depth should be 6n+2 (eg 20, 32, 44 in [a])')
# Start model definition.
num_filters = 16
num_res_blocks = int((depth - 2) / 6)
inputs = Input(shape=input_shape)
x = resnet_layer(inputs=inputs)
# Instantiate the stack of residual units
for stack in range(3):
for res_block in range(num_res_blocks):
strides = 1
if stack > 0 and res_block == 0: # first layer but not first stack
strides = 2 # downsample
y = resnet_layer(inputs=x,
num_filters=num_filters,
strides=strides)
y = resnet_layer(inputs=y,
num_filters=num_filters,
activation=None)
if stack > 0 and res_block == 0: # first layer but not first stack
# linear projection residual shortcut connection to match
# changed dims
x = resnet_layer(inputs=x,
num_filters=num_filters,
kernel_size=1,
strides=strides,
activation=None,
batch_normalization=False)
x = keras.layers.add([x, y])
x = Activation('relu')(x)
num_filters *= 2
# Add classifier on top.
# v1 does not use BN after last shortcut connection-ReLU
x = AveragePooling2D(pool_size=8)(x)
y = Flatten()(x)
outputs = Dense(num_classes,
activation='softmax',
kernel_initializer='he_normal')(y)
# Instantiate model.
model = Model(inputs=inputs, outputs=outputs)
return model
def resnet_v2(input_shape, depth, num_classes=10):
"""ResNet Version 2 Model builder [b]
Stacks of (1 x 1)-(3 x 3)-(1 x 1) BN-ReLU-Conv2D or also known as
bottleneck layer
First shortcut connection per layer is 1 x 1 Conv2D.
Second and onwards shortcut connection is identity.
At the beginning of each stage, the feature map size is halved (downsampled)
by a convolutional layer with strides=2, while the number of filter maps is
doubled. Within each stage, the layers have the same number filters and the
same filter map sizes.
Features maps sizes:
conv1 : 32x32, 16
stage 0: 32x32, 64
stage 1: 16x16, 128
stage 2: 8x8, 256
# Arguments
input_shape (tensor): shape of input image tensor
depth (int): number of core convolutional layers
num_classes (int): number of classes (CIFAR10 has 10)
# Returns
model (Model): Keras model instance
"""
if (depth - 2) % 9 != 0:
raise ValueError('depth should be 9n+2 (eg 56 or 110 in [b])')
# Start model definition.
num_filters_in = 16
num_res_blocks = int((depth - 2) / 9)
inputs = Input(shape=input_shape)
# v2 performs Conv2D with BN-ReLU on input before splitting into 2 paths
x = resnet_layer(inputs=inputs,
num_filters=num_filters_in,
conv_first=True)
# Instantiate the stack of residual units
for stage in range(3):
for res_block in range(num_res_blocks):
activation = 'relu'
batch_normalization = True
strides = 1
if stage == 0:
num_filters_out = num_filters_in * 4
if res_block == 0: # first layer and first stage
activation = None
batch_normalization = False
else:
num_filters_out = num_filters_in * 2
if res_block == 0: # first layer but not first stage
strides = 2 # downsample
# bottleneck residual unit
y = resnet_layer(inputs=x,
num_filters=num_filters_in,
kernel_size=1,
strides=strides,
activation=activation,
batch_normalization=batch_normalization,
conv_first=False)
y = resnet_layer(inputs=y,
num_filters=num_filters_in,
conv_first=False)
y = resnet_layer(inputs=y,
num_filters=num_filters_out,
kernel_size=1,
conv_first=False)
if res_block == 0:
# linear projection residual shortcut connection to match
# changed dims
x = resnet_layer(inputs=x,
num_filters=num_filters_out,
kernel_size=1,
strides=strides,
activation=None,
batch_normalization=False)
x = keras.layers.add([x, y])
num_filters_in = num_filters_out
# Add classifier on top.
# v2 has BN-ReLU before Pooling
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = AveragePooling2D(pool_size=8)(x)
y = Flatten()(x)
outputs = Dense(num_classes,
activation='softmax',
kernel_initializer='he_normal')(y)
# Instantiate model.
model = Model(inputs=inputs, outputs=outputs)
return model
if version == 2:
model = resnet_v2(input_shape=input_shape, depth=depth)
else:
model = resnet_v1(input_shape=input_shape, depth=depth)
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=lr_schedule(0)),
metrics=['accuracy'])
model.summary()
plot_model(model, to_file="%s.png" % model_type, show_shapes=True)
print(model_type)
# Prepare model model saving directory.
save_dir = os.path.join(os.getcwd(), 'saved_models')
model_name = 'cifar10_%s_model.{epoch:03d}.h5' % model_type
if not os.path.isdir(save_dir):
os.makedirs(save_dir)
filepath = os.path.join(save_dir, model_name)
# Prepare callbacks for model saving and for learning rate adjustment.
checkpoint = ModelCheckpoint(filepath=filepath,
monitor='val_acc',
verbose=1,
save_best_only=True)
lr_scheduler = LearningRateScheduler(lr_schedule)
lr_reducer = ReduceLROnPlateau(factor=np.sqrt(0.1),
cooldown=0,
patience=5,
min_lr=0.5e-6)
callbacks = [checkpoint, lr_reducer, lr_scheduler]
# Run training, with or without data augmentation.
if not data_augmentation:
print('Not using data augmentation.')
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
validation_data=(x_test, y_test),
shuffle=True,
callbacks=callbacks)
else:
print('Using real-time data augmentation.')
# This will do preprocessing and realtime data augmentation:
datagen = ImageDataGenerator(
# set input mean to 0 over the dataset
featurewise_center=False,
# set each sample mean to 0
samplewise_center=False,
# divide inputs by std of dataset
featurewise_std_normalization=False,
# divide each input by its std
samplewise_std_normalization=False,
# apply ZCA whitening
zca_whitening=False,
# randomly rotate images in the range (deg 0 to 180)
rotation_range=0,
# randomly shift images horizontally
width_shift_range=0.1,
# randomly shift images vertically
height_shift_range=0.1,
# randomly flip images
horizontal_flip=True,
# randomly flip images
vertical_flip=False)
# Compute quantities required for featurewise normalization
# (std, mean, and principal components if ZCA whitening is applied).
datagen.fit(x_train)
# Fit the model on the batches generated by datagen.flow().
model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size),
validation_data=(x_test, y_test),
epochs=epochs, verbose=1, workers=4,
callbacks=callbacks)
# Score trained model.
scores = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
|
py | 1a50b3a7bb9091e3374171cb326cac009443d914 | from app import create_app, db
from app.models import User, User_dataset, Data_subset, Analysis_result, Dataset_columns, Task
app = create_app()
@app.shell_context_processor
def make_shell_context():
return {'db': db, 'User': User, 'Task': Task}
|
py | 1a50b3cb1f4c1f2eda5d37f3a6828ba8fe3ed9d1 | #!/env python
# -*- coding: utf8 -*-
import math
#pi - число pi, rad - радиус сферы (Земли)
rad = 6372795
#координаты двух точек
llat1 = 77.1539
llong1 = -120.398
llat2 = 77.1804
llong2 = 129.55
#в радианах
lat1 = llat1*math.pi/180.
lat2 = llat2*math.pi/180.
long1 = llong1*math.pi/180.
long2 = llong2*math.pi/180.
#косинусы и синусы широт и разницы долгот
cl1 = math.cos(lat1)
cl2 = math.cos(lat2)
sl1 = math.sin(lat1)
sl2 = math.sin(lat2)
delta = long2 - long1
cdelta = math.cos(delta)
sdelta = math.sin(delta)
#вычисления длины большого круга
y = math.sqrt(math.pow(cl2*sdelta,2)+math.pow(cl1*sl2-sl1*cl2*cdelta,2))
x = sl1*sl2+cl1*cl2*cdelta
ad = math.atan2(y,x)
dist = ad*rad
#вычисление начального азимута
x = (cl1*sl2) - (sl1*cl2*cdelta)
y = sdelta*cl2
z = math.degrees(math.atan(-y/x))
if (x < 0):
z = z+180.
z2 = (z+180.) % 360. - 180.
z2 = - math.radians(z2)
anglerad2 = z2 - ((2*math.pi)*math.floor((z2/(2*math.pi))) )
angledeg = (anglerad2*180.)/math.pi
print 'Distance >> %.0f' % dist, ' [meters]'
print 'Initial bearing >> ', angledeg, '[degrees]'
|
py | 1a50b3dd06b84beafe80dfd326cc00fb49c156f0 | #!/usr/bin/env python
import sys
sys.setdlopenflags(0x100|0x2)
import fvm
import fvm.fvmbaseExt as fvmbaseExt
import fvm.importers as importers
import numpy
from mpi4py import MPI
fvm.set_atype('double')
import math
if fvm.atype == 'double':
import fvm.models_atyped_double as models
import fvm.exporters_atyped_double as exporters
elif fvm.atype == 'tangent':
import fvm.models_atyped_tangent_double as models
import fvm.exporters_atyped_tangent_double as exporters
from FluentCase import FluentCase
from optparse import OptionParser
#fvmbaseExt.enableDebug("cdtor")
fileBase0 = None
fileBase1 = None
numIterations = 100
numEIterations = 100
sPot = 160.
fileBase0 = "/scratch/prism/shankha/prism/prism1/memosa/src/fvm/test/shankha/structureMeshDeformation/trdeform16/dbeam1"
fileBase1 = "/scratch/prism/shankha/prism/prism1/memosa/src/fvm/test/shankha/structureMeshDeformation/trdeform16/dbeam2"
def eadvance(fmodel,niter):
for i in range(0,niter):
try:
stopFlag=fmodel.advance(1)
if stopFlag == 1:
break
except KeyboardInterrupt:
break
def setDirichletCommonDisplacement(dmodel,geomFields,meshes,structureFields):
mesh0 = meshes[0]
nodes0 = mesh0.getNodes()
cells0 = mesh0.getCells()
common0 = dmodel.getCommon(nodes0).asNumPyArray()
mesh1 = meshes[1]
nodes1 = mesh1.getNodes()
common1 = dmodel.getCommon(nodes1).asNumPyArray()
length = len(common0)
def1 = geomFields.dirichletNodeDisplacement[nodes1].asNumPyArray()
coord0 = geomFields.coordinate[nodes0].asNumPyArray()
coord0_K1 = geomFields.coordinateK1[nodes0].asNumPyArray()
for i in range(0,length):
id0 = common0[i]
id1 = common1[i]
# print '\n coord0 is %e %e %i' %(coord0N1[id0,0],coord0N1[id0,1],length)
def1[id1] = coord0[id0] - coord0_K1[id0]
def advance(smodel,dmodel,movingMeshModel,emodel,geomFields,
structureFields,electricFields,meshes0,meshes,niter):
for i in range(0,niter):
try:
bcMap = smodel.getBCMap()
bcID = 3
if bcID in bcMap:
bc = smodel.getBCMap()[bcID]
bc.bcType = 'SpecifiedDistForce'
bcEID = 5
felec=createBVFields(geomFields,meshes,bcID,bcEID,structureFields,electricFields)
bc['specifiedXDistForce']=0
bc['specifiedYDistForce']=felec
bc['specifiedZDistForce']=0
sk=smodel.advance(1)
dmodel.calculateNodeDisplacement()
dmodel.deformStructure()
# setDirichletCommonDisplacement(dmodel,geomFields,meshes,structureFields)
# movingMeshModel.advance()
metricsCalculator.recalculate_deform()
# eadvance(emodel,numEIterations)
if(sk==1):
break
except KeyboardInterrupt:
break
def advanceUnsteady(smodel,dmodel,movingMeshModel,emodel,geomFields,
structureFields,electricFields,meshes0,meshes,nTimeSteps,globalTime):
fileName = fileBase0 + "middef.txt"
file = open(fileName,"w")
mesh0 = meshes0[0]
deformation = structureFields.deformation[mesh0.getCells()].asNumPyArray()
file.write(" %e " % globalTime)
file.write(" %e " % deformation[500][0])
file.write(" %e " % deformation[500][1])
file.write("\n")
for i in range(0,nTimeSteps):
try:
advance(smodel,dmodel,movingMeshModel,emodel,geomFields,
structureFields,electricFields,meshes0,meshes,numIterations)
globalTime += timeStep
print 'advancing to time %e at iteration %i' % (globalTime,i)
file.write(" %e " % globalTime)
file.write(" %e " % deformation[500][0])
file.write(" %e " % deformation[500][1])
file.write("\n")
smodel.updateTime()
except KeyboardInterrupt:
break
def createBVFields(geomFields,meshes,id,eid,structureFields,electricFields):
fy = fvmbaseExt.Field('bvy')
mesh0 = meshes[0]
mesh1 = meshes[1]
vol = geomFields.volume[mesh0.getCells()]
deflection = structureFields.deformation[mesh0.getCells()].asNumPyArray()
fgs0 = mesh0.getBoundaryGroups()
fgs1 = mesh1.getBoundaryGroups()
for fg1 in fgs1:
if fg1.id == eid:
ewall = fg1.site
bArea = geomFields.area[ewall].asNumPyArray().copy()
bpflux = electricFields.potential_flux[ewall].asNumPyArray()
for fg in fgs0:
if fg.id==id:
faceCells = mesh.getFaceCells(fg.site)
nFaces = fg.site.getCount()
forceY = vol.newSizedClone(nFaces)
forceYa = forceY.asNumPyArray()
xf = geomFields.coordinate[fg.site].asNumPyArray()
pot_top=sPot
pot_bot=0.0
bSurface = -3.75e-6
perm=8.8542e-12
for i in range(0,nFaces):
c0 = faceCells(i,0)
gap = deflection[c0,1]-bSurface
dpot = (pot_top-pot_bot)/gap
# magBArea = math.sqrt(bArea[i][0]*bArea[i][0]+bArea[i][1]*bArea[i][1]+bArea[i][2]*bArea[i][2])
# dpot = bpflux[i]/magBArea
sigmat=-perm*dpot
felec=-(sigmat*sigmat)/(2.*perm)
forceYa[i]=felec
# print 'force %f %f %f %e %e %e' % (xf[i,0],xf[i,1],forceYa[i],bpflux[i],dpot,magBArea)
fy[fg.site] = forceY
return fy
# change as needed
# map between fvm, tecplot, and xdmf types
etype = {
'tri' : 1,
'quad' : 2,
'tetra' : 3,
'hexa' : 4
}
tectype = {
'tri' : 'FETRIANGLE',
'quad' : 'FEQUADRILATERAL',
'tetra' : 'FETETRAHEDRON',
'hexa' : 'FEBRICK'
}
def dumpTecplotFile(nmesh, meshes, geomFields, mtype):
#cell sites
cellSites = []
for n in range(0,nmesh):
cellSites.append( meshes[n].getCells() )
# print "cellSites[", n, "].getCount = ", cellSites[n].getCount()
#face sites
faceSites = []
for n in range(0,nmesh):
faceSites.append( meshes[n].getFaces() )
#node sites
nodeSites = []
for n in range(0,nmesh):
nodeSites.append( meshes[n].getNodes() )
#get connectivity (faceCells)
faceCells = []
for n in range(0,nmesh):
faceCells.append( meshes[n].getConnectivity( faceSites[n], cellSites[n] ) )
#get connectivity ( cellNodes )
cellNodes = []
for n in range(0,nmesh):
cellNodes.append( meshes[n].getCellNodes() )
#get Volume as array
volumes = []
for n in range(0,nmesh):
volumes.append( geomFields.volume[cellSites[n]].asNumPyArray() )
cellCentroids =[]
for n in range(0,nmesh):
cellCentroids.append( geomFields.coordinate[cellSites[n]].asNumPyArray() )
# defFields = []
# for n in range(0,nmesh):
# defFields.append( electricFields.potential[cellSites[n]].asNumPyArray() )
# tractionXFields = []
# for n in range(0,nmesh):
# tractionXFields.append( structureFields.tractionX[cellSites[n]].asNumPyArray() )
coords = []
for n in range(0,nmesh):
coords.append( geomFields.coordinate[nodeSites[n]].asNumPyArray() )
# print "shape( coords[", n, "] ) = ", shape( coords[n] )
f = open("tecplot_dbeam.dat","w")
f.write("Title = \" tecplot file for 2D Cavity problem \" \n")
f.write("variables = \"x\", \"y\", \"z\", \"cellCentroidY\" \n")
for n in range(0,nmesh):
title_name = "nmesh%s" % n
ncell = cellSites[n].getSelfCount()
nnode = nodeSites[n].getCount()
f.write("Zone T = \"%s\" N = %s E = %s DATAPACKING = BLOCK, VARLOCATION = ([4]=CELLCENTERED), ZONETYPE=%s\n" %
(title_name, nodeSites[n].getCount(), ncell, tectype[mtype]))
#write x
for i in range(0,nnode):
f.write(str(coords[n][i][0])+" ")
if ( i % 5 == 4 ):
f.write("\n")
f.write("\n")
#write y
for i in range(0,nnode):
f.write(str(coords[n][i][1])+" ")
if ( i % 5 == 4 ):
f.write("\n")
f.write("\n")
#write z
for i in range(0,nnode):
f.write(str(coords[n][i][2])+" ")
if ( i % 5 == 4 ):
f.write("\n")
f.write("\n")
# #write defX
# for i in range(0,ncell):
# f.write(str(defFields[n][i]) + " ")
# if ( i % 5 == 4 ):
# f.write("\n")
# f.write("\n")
# #write defY
# for i in range(0,ncell):
# f.write(str(defFields[n][i][1]) + " ")
# if ( i % 5 == 4 ):
# f.write("\n")
# f.write("\n")
# #write sigmaXX
# for i in range(0,ncell):
# f.write(str(tractionXFields[n][i][0]) + " ")
# if ( i % 5 == 4 ):
# f.write("\n")
# f.write("\n")
# #write sigmaXY
# for i in range(0,ncell):
# f.write(str(tractionXFields[n][i][1]) + " ")
# if ( i % 5 == 4 ):
# f.write("\n")
# f.write("\n")
# #write sigmaYY
# for i in range(0,ncell):
# f.write(str(tractionXFields[n][i][2]) + " ")
# if ( i % 5 == 4 ):
# f.write("\n")
# f.write("\n")
#write velX
for i in range(0,ncell):
f.write( str(cellCentroids[n][i][1]) + " ")
if ( i % 5 == 4 ):
f.write("\n")
f.write("\n")
#connectivity
for i in range(0,ncell):
nnodes_per_cell = cellNodes[n].getCount(i)
for node in range(0,nnodes_per_cell):
f.write( str(cellNodes[n](i,node)+1) + " ")
f.write("\n")
f.write("\n")
f.close()
parser = OptionParser()
parser.set_defaults(type='quad')
parser.add_option("--type", help="'quad'[default], 'tri', 'hexa', or 'tetra'")
parser.add_option("--xdmf", action='store_true', help="Dump data in xdmf")
parser.add_option("--time","-t",action='store_true',help="Print timing information.")
(options, args) = parser.parse_args()
reader0 = FluentCase(fileBase0+".cas")
reader1 = FluentCase(fileBase1+".cas")
#import debug
reader0.read();
reader1.read();
meshes0 = reader0.getMeshList()
meshes1 = reader1.getMeshList()
#for mesh in meshes:
# mesh.getCells().clearGatherScatterMaps()
mesh0 = meshes0[0]
mesh1 = meshes1[0]
nmesh = 2
mesh0.findCommonNodes(mesh1)
meshes = []
meshes.append(mesh0)
meshes.append(mesh1)
import time
t0 = time.time()
geomFields = models.GeomFields('geom')
metricsCalculator = models.MeshMetricsCalculatorA(geomFields,meshes)
metricsCalculator.init()
nodes0 = mesh0.getNodes()
nodes1 = mesh1.getNodes()
rho = 7854.0
E = 2.0*math.pow(10,11)
nu = 0.31
if fvm.atype == 'tangent':
metricsCalculator.setTangentCoords(0,7,1)
flowFields = models.FlowFields('flow')
structureFields = models.StructureFields('structure')
electricFields = models.ElectricFields('elec')
smodel = models.StructureModelA(geomFields,structureFields,meshes0)
dmodel = models.StructureDeformationModelA(geomFields,structureFields,meshes0)
movingMeshModel = models.MovingMeshModelA(meshes1,geomFields,flowFields)
emodel = models.ElectricModelA(geomFields,electricFields,meshes1)
movingMeshModel.init()
bcMap = smodel.getBCMap()
#left (mesh0)
bcID = 6
if bcID in bcMap:
bc = smodel.getBCMap()[bcID]
bc.bcType = 'SpecifiedDeformation'
bc['specifiedXDeformation']=0
bc['specifiedYDeformation']=0
bc['specifiedZDeformation']=0
#top (mesh0)
bcID = 5
if bcID in bcMap:
bc = smodel.getBCMap()[bcID]
bc.bcType = 'SpecifiedTraction'
bc['specifiedXXTraction']=0
bc['specifiedXYTraction']=0
bc['specifiedXZTraction']=0
bc['specifiedYXTraction']=0
bc['specifiedYYTraction']=0
bc['specifiedYZTraction']=0
bc['specifiedZXTraction']=0
bc['specifiedZYTraction']=0
bc['specifiedZZTraction']=0
#right (mesh0)
bcID = 4
if bcID in bcMap:
bc = smodel.getBCMap()[bcID]
bc.bcType = 'SpecifiedDeformation'
bc['specifiedXDeformation']=0
bc['specifiedYDeformation']=0
bc['specifiedZDeformation']=0
#bottom (mesh0)
#bcID = 3
#if bcID in bcMap:
# bc = smodel.getBCMap()[bcID]
# felec=createBVFields(geomFields,meshes0,bcID,structureFields,electricFields)
# bc.bcType = 'SpecifiedDistForce'
# bc['specifiedXDistForce']=0
# bc['specifiedYDistForce']=felec
# bc['specifiedZDistForce']=0
f2 = open("displacementOptions.dat","w")
for mesh in meshes1:
nodes = mesh.getNodes()
displacementOptions = geomFields.displacementOptions[nodes].asNumPyArray()
nodeCoordinate = geomFields.coordinate[nodes].asNumPyArray()
nodemark = numpy.zeros(nodes.getCount())
fgs = mesh.getAllFaceGroups()
for fg in fgs:
if fg.id!=0:
if fg.id == 5:
fgsite = fg.site
fgn = mesh.getFaceNodes(fgsite)
nfaces =fgsite.getCount()
for nf in range(0,nfaces):
nnodes = fgn.getCount(nf)
for nnode in range(0,nnodes):
nid = fgn(nf,nnode)
if nodemark[nid] == 0:
nodemark[nid] = 1
displacementOptions[nid] = 1
f2.write('%i\t' % fg.id)
f2.write('%i\t' % displacementOptions[nid])
f2.write('%f\t' % nodeCoordinate[nid,0])
f2.write('%f\t' % nodeCoordinate[nid,1])
f2.write('%f\n' % nodeCoordinate[nid,2])
for fg in fgs:
if fg.id!=0:
if fg.id == 3:
fgsite = fg.site
fgn = mesh.getFaceNodes(fgsite)
nfaces =fgsite.getCount()
for nf in range(0,nfaces):
nnodes = fgn.getCount(nf)
for nnode in range(0,nnodes):
nid = fgn(nf,nnode)
if nodemark[nid] == 0:
nodemark[nid] = 1
displacementOptions[nid] = 0
f2.write('%i\t' % fg.id)
f2.write('%i\t' % displacementOptions[nid])
f2.write('%f\t' % nodeCoordinate[nid,0])
f2.write('%f\t' % nodeCoordinate[nid,1])
f2.write('%f\n' % nodeCoordinate[nid,2])
for fg in fgs:
if fg.id!=0:
if fg.id in (6,4):
fgsite = fg.site
fgn = mesh.getFaceNodes(fgsite)
nfaces =fgsite.getCount()
for nf in range(0,nfaces):
nnodes = fgn.getCount(nf)
for nnode in range(0,nnodes):
nid = fgn(nf,nnode)
if nodemark[nid] == 0:
nodemark[nid] = 1
displacementOptions[nid] = 2
f2.write('%i\t' % fg.id)
f2.write('%i\t' % displacementOptions[nid])
f2.write('%f\t' % nodeCoordinate[nid,0])
f2.write('%f\t' % nodeCoordinate[nid,1])
f2.write('%f\n' % nodeCoordinate[nid,2])
f2.close()
bcElecMap = emodel.getBCMap()
#top
bcID = 5
if bcID in bcElecMap:
bc = emodel.getBCMap()[bcID]
bc.bcType = 'SpecifiedPotential'
bc.setVar('specifiedPotential',sPot)
#bc.bcType = 'SpecifiedCharge'
#bc.setVar('specifiedCharge',400)
#bot
bcID = 3
if bcID in bcElecMap:
bc = emodel.getBCMap()[bcID]
bc.bcType = 'SpecifiedPotential'
bc.setVar('specifiedPotential',0)
#bc.bcType = 'SpecifiedCharge'
#bc.setVar('specifiedCharge',300)
#left
bcID = 6
if bcID in bcElecMap:
bc = emodel.getBCMap()[bcID]
# bc.bcType = 'SpecifiedPotential'
# bc.setVar('specifiedPotential',350)
bc.bcType = 'SpecifiedPotentialFlux'
bc.setVar('specifiedPotentialFlux',0)
#bc.bcType = 'SpecifiedCharge'
#bc.setVar('specifiedCharge',350)
#right
bcID = 4
if bcID in bcElecMap:
bc = emodel.getBCMap()[bcID]
# bc.bcType = 'SpecifiedPotential'
# bc.setVar('specifiedPotential',350)
bc.bcType = 'SpecifiedPotentialFlux'
bc.setVar('specifiedPotentialFlux',0)
#bc.bcType = 'SpecifiedCharge'
#bc.setVar('specifiedCharge',350)
vcMap = smodel.getVCMap()
for i,vc in vcMap.iteritems():
vc['density'] = rho
vc['eta'] = E/(2.*(1+nu))
vc['eta1'] = nu*E/((1+nu)*(1-1.0*nu))
pc = fvmbaseExt.AMG()
pc.verbosity=0
defSolver = fvmbaseExt.BCGStab()
defSolver.preconditioner = pc
defSolver.relativeTolerance = 1e-9
defSolver.absoluteTolerance = 1.e-30
defSolver.nMaxIterations = 6000
defSolver.verbosity=1
elecSolver = fvmbaseExt.AMG()
elecSolver.relativeTolerance = 1e-3
elecSolver.nMaxIterations = 1000
elecSolver.maxCoarseLevels=20
elecSolver.verbosity=1
soptions = smodel.getOptions()
soptions.deformationLinearSolver = defSolver
soptions.deformationTolerance=1.0e-3
soptions.setVar("deformationURF",1.0)
soptions.printNormalizedResiduals=True
soptions.transient=True
mmmoptions = movingMeshModel.getOptions()
mmmoptions.nNodeDisplacementSweeps = 500000
mmmoptions.absTolerance = 1e-14
mmmoptions.relativeTolerance = 1e-9
mmmoptions.setVar('underrelaxation',0.4)
eoptions = emodel.getOptions()
eoptions.electrostaticsLinearSolver = elecSolver
eoptions.electrostaticsTolerance = 0.5e-5
eoptions.electrostatics = 1
eoptions.chargetransport = 0
eoptions.tunneling = 0
eoptions.ibm = 0
eoptions.transient_enable = False
eoptions.printNormalizedResiduals = True
metricsCalculator.calculateBoundaryNodeNormal()
numTimeSteps = 2500
period = 8.8043e-6
timeStep = period/1000
globalTime=0.
# set the timesteps
soptions.setVar('timeStep',timeStep)
"""
if fvm.atype=='tangent':
vcMap = fmodel.getVCMap()
for i,vc in vcMap.iteritems():
print vc.getVar('viscosity')
vc.setVar('viscosity',(1.7894e-5,1))
"""
for mesh in meshes0:
fgs = mesh.getBoundaryGroups()
for fg in fgs:
bc = smodel.getBCMap()[fg.id]
print '%i %s' %(fg.id,bc.bcType)
emodel.printBCs()
#import ddd
print '\n no of cells in mesh0 = %i' % (mesh0.getCells().getSelfCount())
print '\n no of cells in mesh1 = %i' % (mesh1.getCells().getSelfCount())
smodel.init()
dmodel.init()
emodel.init()
#set up permittivity
cells1 = mesh1.getCells()
perm = electricFields.dielectric_constant[cells1].asNumPyArray()
perm[:] = 1.0
#smodel.advance(numIterations)
#dmodel.calculateNodeDisplacement()
#dmodel.deformStructure()
#setDirichletCommonDisplacement(dmodel,geomFields,meshes,structureFields)
#movingMeshModel.advance()
#metricsCalculator.recalculate()
#emodel.advance(numIterations)
eadvance(emodel,numEIterations)
#advance(smodel,dmodel,movingMeshModel,emodel,geomFields,
# structureFields,electricFields,meshes0,meshes,numIterations)
advanceUnsteady(smodel,dmodel,movingMeshModel,emodel,geomFields,
structureFields,electricFields,meshes0,meshes,numTimeSteps,globalTime)
#cells0 = mesh0.getCells()
#nCells0 = cells0.getCount()
#xc = geomFields.coordinate[cells0].asNumPyArray()
#print '\n The mid point is %e %e' %(xc[500,0],xc[500,1])
#smodel.getTractionX(mesh0)
#smodel.getTractionX(mesh1)
#mnumber = 0
#for mesh in meshes:
# mnumber = mnumber + 1
# nodes = mesh.getNodes()
# common = dmodel.getCommon(nodes).asNumPyArray()
# length = len(common)
# xf = geomFields.coordinate[nodes].asNumPyArray()
# for i in range(0,length):
# id = common[i]
# print '\n Mesh %i Node %i, x = %f, y = %f\n' %(mnumber,id,xf[id][0],xf[id][1])
fileName = fileBase1 + "dirichletNodeDisplacement.txt"
file = open(fileName,"w")
xf = geomFields.coordinate[nodes1].asNumPyArray()
nnodes1 = nodes1.getCount()
doptions = geomFields.displacementOptions[nodes1].asNumPyArray()
dvar = geomFields.dirichletNodeDisplacement[nodes1].asNumPyArray()
for i in range(0,nnodes1):
x = xf[i][0]
y = xf[i][1]
file.write(" %e " % x)
file.write(" %e " % y)
file.write(" %i " % doptions[i])
file.write(" %e " % dvar[i][0])
file.write(" %e " % dvar[i][1])
file.write("\n")
file.close()
t1 = time.time()
print '\nsolution time = %f' % (t1-t0)
deformation = structureFields.deformation[mesh0.getCells()].asNumPyArray()
fileName = fileBase0 + "deformation.txt"
file = open(fileName,"w")
file.write("deformation\t\n")
fgs = mesh0.getAllFaceGroups()
for fg in fgs:
nFaces = fg.site.getCount()
xf = geomFields.coordinate[fg.site].asNumPyArray()
if fg.id==3:
faceCells = mesh0.getFaceCells(fg.site)
for i in range(0,nFaces):
x = xf[i][0]
y = xf[i][1]
def0 = deformation[faceCells(i,0)][0]
def1 = deformation[faceCells(i,0)][1]
def2 = deformation[faceCells(i,0)][2]
file.write(" %e " % x)
file.write(" %e " % y)
file.write(" %e " % def0)
file.write(" %e " % def1)
file.write(" %e " % def2)
file.write("\n")
file.close()
dumpTecplotFile( nmesh, meshes, geomFields, options.type)
#fileName = fileBase0 + "volume0.txt"
#file = open(fileName,"w")
#cells0 = mesh0.getCells()
#xc = geomFields.coordinate[cells0].asNumPyArray()
#nCells0 = cells0.getCount()
#vol0 = structureFields.volume0[cells0].asNumPyArray()
#for i in range(0,nCells0):
# x = xc[i][0]
# y = xc[i][1]
# file.write(" %e " % x)
# file.write(" %e " % y)
# file.write(" %e " % vol0[i])
# file.write("\n")
#file.close()
print '\n run complete '
|
py | 1a50b3f588058de6daf384decb2638c09ca9b846 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TFGAN utilities for loss functions that accept GANModel namedtuples."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.contrib.gan.python.losses.python import tuple_losses_impl
from tensorflow.contrib.gan.python.losses.python.tuple_losses_impl import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
__all__ = tuple_losses_impl.__all__
remove_undocumented(__name__, __all__)
|
py | 1a50b4911ae6254ebe9efa031ddfa5060365dc14 | import sqlite3
import csv
import os
os.chdir('C:\OLGA\Python CS50\import_csv_db')
#currentDir = os.getcwd()
#currentFileCSV = currentDir +"\\" + csvFilename
#print(currentFileCSV)
conn = sqlite3.connect('db.sqlite3')
c = conn.cursor()
c.execute("delete from auth_user_customuser")
c.execute("delete from api_title")
c.execute("delete from api_review")
c.execute("delete from api_title_genre")
c.execute("delete from api_genre")
c.execute("delete from api_comment")
c.execute("delete from api_category")
csvFilename = 'users.csv'
with open(csvFilename, "r", encoding='utf-8') as file:
reader = csv.DictReader(file)
for row in reader:
username = row['username']
email = row['email']
role = row['role']
desc = row['description']
first_name = row['first_name']
last_name = row['last_name']
c.execute("INSERT INTO auth_user_customuser(username, email, role, bio, first_name, last_name, password, is_superuser, is_staff, is_active, date_joined) \
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (username, email, role, desc, first_name, last_name, '111', 0, 0, 1, 0))
csvFilename = 'titles.csv'
with open(csvFilename, "r", encoding='utf-8') as file:
reader = csv.DictReader(file)
for row in reader:
name = row['name']
year = row['year']
cat = row['category']
c.execute("INSERT INTO api_title(name, year, category_id) \
VALUES(?, ?, ?)", (name, year, cat))
csvFilename = 'review.csv'
with open(csvFilename, "r", encoding='utf-8') as file:
reader = csv.DictReader(file)
for row in reader:
title = row['title_id']
text = row['text']
author = row['author']
score = row['score']
pub_date = row['pub_date']
c.execute("INSERT INTO api_review(title_id, text, author_id, score, pub_date) \
VALUES(?, ?, ?, ?, ?)", (title, text, author, score, pub_date))
csvFilename = 'genre_title.csv'
with open(csvFilename, "r") as file:
reader = csv.DictReader(file)
for row in reader:
title = row['title_id']
text = row['genre_id']
c.execute("INSERT INTO api_title_genre(title_id, genre_id) \
VALUES(?, ?)", (title, text))
csvFilename = 'genre.csv'
with open(csvFilename, "r", encoding='utf-8') as file:
reader = csv.DictReader(file)
for row in reader:
name = row['name']
slug = row['slug']
c.execute("INSERT INTO api_genre(name, slug) \
VALUES(?, ?)", (name, slug))
csvFilename = 'comments.csv'
with open(csvFilename, "r", encoding='utf-8') as file:
reader = csv.DictReader(file)
for row in reader:
review_id = row['review_id']
text = row['text']
author = row['author']
pub_date = row['pub_date']
c.execute("INSERT INTO api_comment(review_id, text, author_id, pub_date) \
VALUES(?, ?, ?, ?)", (review_id, text, author, pub_date))
csvFilename = 'category.csv'
with open(csvFilename, "r", encoding='utf-8') as file:
reader = csv.DictReader(file)
for row in reader:
name = row['name']
slug = row['slug']
c.execute("INSERT INTO api_category(name, slug) \
VALUES(?, ?)", (name, slug))
conn.commit()
conn.close() |
py | 1a50b4e9902625d00e9e72b5426e0fbc1c1f36b0 | #Finds the shortest lenght between a given point and a Bézier curve
def project (point, control_points):
#Binary search
return distance
|
py | 1a50b56b76763b8a530065f0294120b9d427d1f9 | #!/usr/bin/env python
import sys, logging, os
from optparse import OptionParser
import tftpy
def main():
usage=""
parser = OptionParser(usage=usage)
parser.add_option('-H',
'--host',
help='remote host or ip address')
parser.add_option('-p',
'--port',
help='remote port to use (default: 69)',
default=69)
parser.add_option('-f',
'--filename',
help='filename to fetch (deprecated, use download)')
parser.add_option('-D',
'--download',
help='filename to download')
parser.add_option('-u',
'--upload',
help='filename to upload')
parser.add_option('-b',
'--blksize',
help='udp packet size to use (default: 512)')
parser.add_option('-o',
'--output',
help='output file, - for stdout (default: same as download)')
parser.add_option('-i',
'--input',
help='input file, - for stdin (default: same as upload)')
parser.add_option('-d',
'--debug',
action='store_true',
default=False,
help='upgrade logging from info to debug')
parser.add_option('-q',
'--quiet',
action='store_true',
default=False,
help="downgrade logging from info to warning")
parser.add_option('-t',
'--tsize',
action='store_true',
default=False,
help="ask client to send tsize option in download")
options, args = parser.parse_args()
# Handle legacy --filename argument.
if options.filename:
options.download = options.filename
if not options.host or (not options.download and not options.upload):
sys.stderr.write("Both the --host and --filename options "
"are required.\n")
parser.print_help()
sys.exit(1)
if options.debug and options.quiet:
sys.stderr.write("The --debug and --quiet options are "
"mutually exclusive.\n")
parser.print_help()
sys.exit(1)
class Progress(object):
def __init__(self, out):
self.progress = 0
self.out = out
def progresshook(self, pkt):
if isinstance(pkt, tftpy.TftpPacketDAT):
self.progress += len(pkt.data)
self.out("Transferred %d bytes" % self.progress)
elif isinstance(pkt, tftpy.TftpPacketOACK):
self.out("Received OACK, options are: %s" % pkt.options)
if options.debug:
tftpy.setLogLevel(logging.DEBUG)
elif options.quiet:
tftpy.setLogLevel(logging.WARNING)
else:
tftpy.setLogLevel(logging.INFO)
progresshook = Progress(tftpy.log.info).progresshook
tftp_options = {}
if options.blksize:
tftp_options['blksize'] = int(options.blksize)
if options.tsize:
tftp_options['tsize'] = 0
tclient = tftpy.TftpClient(options.host,
int(options.port),
tftp_options)
try:
if options.download:
if not options.output:
options.output = os.path.basename(options.download)
tclient.download(options.download,
options.output,
progresshook)
elif options.upload:
if not options.input:
options.input = os.path.basename(options.upload)
tclient.upload(options.upload,
options.input,
progresshook)
except tftpy.TftpException, err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
|
py | 1a50b695c255c7fc35ce27b278d711dea20741f7 | # For example if user wants to input two equations like
# x1 + 2x2 = 3
# 2x1 + x2 = 3
# it will return a list like [[1,2,3],[2,1,3]]
def get_coefficients_as_list(no_of_unknowns):
all_coefficients = []
for i in range(1,no_of_unknowns+1):
coefficient = []
print("Enter the coefficients for equation ",i)
for j in range(1,no_of_unknowns+1):
num = int(input("Enter the coefficient of x"+str(j)+":- "))
coefficient.append(num)
coefficient.append(int(input("Enter the RHS constant :- ")))
all_coefficients.append(coefficient)
return all_coefficients |
py | 1a50b79d97ce3fac41c9ac2e2faa13438b5ac8d6 | from dcmrtstruct2nii.adapters.convert.rtstructcontour2mask import DcmPatientCoords2Mask
from dcmrtstruct2nii.adapters.convert.filenameconverter import FilenameConverter
from dcmrtstruct2nii.adapters.input.contours.rtstructinputadapter import RtStructInputAdapter
from dcmrtstruct2nii.adapters.input.image.dcminputadapter import DcmInputAdapter
import os.path
from dcmrtstruct2nii.adapters.output.niioutputadapter import NiiOutputAdapter
from dcmrtstruct2nii.exceptions import PathDoesNotExistException, ContourOutOfBoundsException
import logging
def list_rt_structs(rtstruct_file):
"""
Lists the structures in an DICOM RT Struct file by name.
:param rtstruct_file: Path to the rtstruct file
:return: A list of names, if any structures are found
"""
if not os.path.exists(rtstruct_file):
raise PathDoesNotExistException(f'rtstruct path does not exist: {rtstruct_file}')
rtreader = RtStructInputAdapter()
rtstructs = rtreader.ingest(rtstruct_file, True)
return [struct['name'] for struct in rtstructs]
def dcmrtstruct2nii(rtstruct_file, dicom_file, output_path, structures=None, gzip=True, mask_background_value=0, mask_foreground_value=255, convert_original_dicom=True, series_id=None): # noqa: C901 E501
"""
Converts A DICOM and DICOM RT Struct file to nii
:param rtstruct_file: Path to the rtstruct file
:param dicom_file: Path to the dicom file
:param output_path: Output path where the masks are written to
:param structures: Optional, list of structures to convert
:param gzip: Optional, output .nii.gz if set to True, default: True
:param series_id: Optional, the Series Instance UID. Use to specify the ID corresponding to the image if there are
dicoms from more than one series in `dicom_file` folder
:raise InvalidFileFormatException: Raised when an invalid file format is given.
:raise PathDoesNotExistException: Raised when the given path does not exist.
:raise UnsupportedTypeException: Raised when conversion is not supported.
:raise ValueError: Raised when mask_background_value or mask_foreground_value is invalid.
"""
output_path = os.path.join(output_path, '') # make sure trailing slash is there
if not os.path.exists(rtstruct_file):
raise PathDoesNotExistException(f'rtstruct path does not exist: {rtstruct_file}')
if not os.path.exists(dicom_file):
raise PathDoesNotExistException(f'DICOM path does not exists: {dicom_file}')
if mask_background_value < 0 or mask_background_value > 255:
raise ValueError(f'Invalid value for mask_background_value: {mask_background_value}, must be between 0 and 255')
if mask_foreground_value < 0 or mask_foreground_value > 255:
raise ValueError(f'Invalid value for mask_foreground_value: {mask_foreground_value}, must be between 0 and 255')
if structures is None:
structures = []
os.makedirs(output_path, exist_ok=True)
filename_converter = FilenameConverter()
rtreader = RtStructInputAdapter()
rtstructs = rtreader.ingest(rtstruct_file)
dicom_image = DcmInputAdapter().ingest(dicom_file, series_id=series_id)
dcm_patient_coords_to_mask = DcmPatientCoords2Mask()
nii_output_adapter = NiiOutputAdapter()
for rtstruct in rtstructs:
if len(structures) == 0 or rtstruct['name'] in structures:
if 'sequence' not in rtstruct:
logging.info('Skipping mask {} no shape/polygon found'.format(rtstruct['name']))
continue
logging.info('Working on mask {}'.format(rtstruct['name']))
try:
mask = dcm_patient_coords_to_mask.convert(rtstruct['sequence'], dicom_image, mask_background_value, mask_foreground_value)
except ContourOutOfBoundsException:
logging.warning(f'Structure {rtstruct["name"]} is out of bounds, ignoring contour!')
continue
mask.CopyInformation(dicom_image)
mask_filename = filename_converter.convert(f'mask_{rtstruct["name"]}')
nii_output_adapter.write(mask, f'{output_path}{mask_filename}', gzip)
if convert_original_dicom:
logging.info('Converting original DICOM to nii')
nii_output_adapter.write(dicom_image, f'{output_path}image', gzip)
logging.info('Success!')
|
py | 1a50b7e5cd9c7cb40eaba224edf85140685fe4fb | # -*- coding:utf-8 -*-
from django.views.generic import View
from django.shortcuts import render
from django.http import JsonResponse
class CamView(View):
def get(self, request):
return render('cam.html')
def post(self, request):
return JsonResponse() |
py | 1a50ba9536a34af049c8482ec1e34223a7e61f25 | from ConSSL.transforms.self_supervised import Patchify, RandomTranslateWithReflect
from ConSSL.utils import _TORCHVISION_AVAILABLE
from ConSSL.utils.warnings import warn_missing_pkg
if _TORCHVISION_AVAILABLE:
from torchvision import transforms
else: # pragma: no cover
warn_missing_pkg('torchvision')
class CPCTrainTransformsCIFAR10:
"""
Transforms used for CPC:
Transforms::
random_flip
img_jitter
col_jitter
rnd_gray
transforms.ToTensor()
normalize
Patchify(patch_size=patch_size, overlap_size=patch_size // 2)
Example::
# in a regular dataset
CIFAR10(..., transforms=CPCTrainTransformsCIFAR10())
# in a DataModule
module = CIFAR10DataModule(PATH)
train_loader = module.train_dataloader(batch_size=32, transforms=CPCTrainTransformsCIFAR10())
"""
def __init__(self, patch_size=8, overlap=4):
"""
Args:
patch_size: size of patches when cutting up the image into overlapping patches
overlap: how much to overlap patches
"""
if not _TORCHVISION_AVAILABLE: # pragma: no cover
raise ModuleNotFoundError('You want to use `transforms` from `torchvision` which is not installed yet.')
self.patch_size = patch_size
self.overlap = overlap
self.flip_lr = transforms.RandomHorizontalFlip(p=0.5)
normalize = transforms.Normalize(
mean=[x / 255.0 for x in [125.3, 123.0, 113.9]],
std=[x / 255.0 for x in [63.0, 62.1, 66.7]],
)
col_jitter = transforms.RandomApply([transforms.ColorJitter(0.4, 0.4, 0.4, 0.2)], p=0.8)
img_jitter = transforms.RandomApply([RandomTranslateWithReflect(4)], p=0.8)
rnd_gray = transforms.RandomGrayscale(p=0.25)
self.transforms = transforms.Compose([
img_jitter,
col_jitter,
rnd_gray,
transforms.ToTensor(),
normalize,
Patchify(patch_size=patch_size, overlap_size=overlap),
])
def __call__(self, inp):
inp = self.flip_lr(inp)
out1 = self.transforms(inp)
return out1
class CPCEvalTransformsCIFAR10:
"""
Transforms used for CPC:
Transforms::
random_flip
transforms.ToTensor()
normalize
Patchify(patch_size=patch_size, overlap_size=overlap)
Example::
# in a regular dataset
CIFAR10(..., transforms=CPCEvalTransformsCIFAR10())
# in a DataModule
module = CIFAR10DataModule(PATH)
train_loader = module.train_dataloader(batch_size=32, transforms=CPCEvalTransformsCIFAR10())
"""
def __init__(self, patch_size: int = 8, overlap: int = 4):
"""
Args:
patch_size: size of patches when cutting up the image into overlapping patches
overlap: how much to overlap patches
"""
if not _TORCHVISION_AVAILABLE: # pragma: no cover
raise ModuleNotFoundError('You want to use `transforms` from `torchvision` which is not installed yet.')
# flipping image along vertical axis
self.patch_size = patch_size
self.overlap = overlap
self.flip_lr = transforms.RandomHorizontalFlip(p=0.5)
normalize = transforms.Normalize(
mean=[x / 255.0 for x in [125.3, 123.0, 113.9]],
std=[x / 255.0 for x in [63.0, 62.1, 66.7]],
)
self.transforms = transforms.Compose([
transforms.ToTensor(),
normalize,
Patchify(patch_size=patch_size, overlap_size=overlap),
])
def __call__(self, inp):
out1 = self.transforms(inp)
return out1
class CPCTrainTransformsSTL10:
"""
Transforms used for CPC:
Transforms::
random_flip
img_jitter
col_jitter
rnd_gray
transforms.ToTensor()
normalize
Patchify(patch_size=patch_size, overlap_size=patch_size // 2)
Example::
# in a regular dataset
STL10(..., transforms=CPCTrainTransformsSTL10())
# in a DataModule
module = STL10DataModule(PATH)
train_loader = module.train_dataloader(batch_size=32, transforms=CPCTrainTransformsSTL10())
"""
def __init__(self, patch_size: int = 16, overlap: int = 8):
"""
Args:
patch_size: size of patches when cutting up the image into overlapping patches
overlap: how much to overlap patches
"""
if not _TORCHVISION_AVAILABLE: # pragma: no cover
raise ModuleNotFoundError('You want to use `transforms` from `torchvision` which is not installed yet.')
# flipping image along vertical axis
self.patch_size = patch_size
self.overlap = overlap
self.flip_lr = transforms.RandomHorizontalFlip(p=0.5)
normalize = transforms.Normalize(mean=(0.43, 0.42, 0.39), std=(0.27, 0.26, 0.27))
# image augmentation functions
col_jitter = transforms.RandomApply([transforms.ColorJitter(0.4, 0.4, 0.4, 0.2)], p=0.8)
rnd_gray = transforms.RandomGrayscale(p=0.25)
rand_crop = transforms.RandomResizedCrop(64, scale=(0.3, 1.0), ratio=(0.7, 1.4), interpolation=3)
self.transforms = transforms.Compose([
rand_crop, col_jitter, rnd_gray,
transforms.ToTensor(), normalize,
Patchify(patch_size=patch_size, overlap_size=overlap)
])
def __call__(self, inp):
inp = self.flip_lr(inp)
out1 = self.transforms(inp)
return out1
class CPCEvalTransformsSTL10:
"""
Transforms used for CPC:
Transforms::
random_flip
transforms.ToTensor()
normalize
Patchify(patch_size=patch_size, overlap_size=patch_size // 2)
Example::
# in a regular dataset
STL10(..., transforms=CPCEvalTransformsSTL10())
# in a DataModule
module = STL10DataModule(PATH)
train_loader = module.train_dataloader(batch_size=32, transforms=CPCEvalTransformsSTL10())
"""
def __init__(self, patch_size: int = 16, overlap: int = 8):
"""
Args:
patch_size: size of patches when cutting up the image into overlapping patches
overlap: how much to overlap patches
"""
if not _TORCHVISION_AVAILABLE: # pragma: no cover
raise ModuleNotFoundError('You want to use `transforms` from `torchvision` which is not installed yet.')
# flipping image along vertical axis
self.patch_size = patch_size
self.overlap = overlap
self.flip_lr = transforms.RandomHorizontalFlip(p=0.5)
normalize = transforms.Normalize(mean=(0.43, 0.42, 0.39), std=(0.27, 0.26, 0.27))
self.transforms = transforms.Compose([
transforms.Resize(70, interpolation=3),
transforms.CenterCrop(64),
transforms.ToTensor(), normalize,
Patchify(patch_size=patch_size, overlap_size=overlap)
])
def __call__(self, inp):
out1 = self.transforms(inp)
return out1
class CPCTrainTransformsImageNet128:
"""
Transforms used for CPC:
Transforms::
random_flip
transforms.ToTensor()
normalize
Patchify(patch_size=patch_size, overlap_size=patch_size // 2)
Example::
# in a regular dataset
Imagenet(..., transforms=CPCTrainTransformsImageNet128())
# in a DataModule
module = ImagenetDataModule(PATH)
train_loader = module.train_dataloader(batch_size=32, transforms=CPCTrainTransformsImageNet128())
"""
def __init__(self, patch_size: int = 32, overlap: int = 16):
"""
Args:
patch_size: size of patches when cutting up the image into overlapping patches
overlap: how much to overlap patches
"""
if not _TORCHVISION_AVAILABLE: # pragma: no cover
raise ModuleNotFoundError('You want to use `transforms` from `torchvision` which is not installed yet.')
# image augmentation functions
self.patch_size = patch_size
self.overlap = overlap
self.flip_lr = transforms.RandomHorizontalFlip(p=0.5)
rand_crop = transforms.RandomResizedCrop(128, scale=(0.3, 1.0), ratio=(0.7, 1.4), interpolation=3)
col_jitter = transforms.RandomApply([transforms.ColorJitter(0.4, 0.4, 0.4, 0.1)], p=0.8)
rnd_gray = transforms.RandomGrayscale(p=0.25)
post_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
Patchify(patch_size=patch_size, overlap_size=overlap),
])
self.transforms = transforms.Compose([rand_crop, col_jitter, rnd_gray, post_transform])
def __call__(self, inp):
inp = self.flip_lr(inp)
out1 = self.transforms(inp)
return out1
class CPCEvalTransformsImageNet128:
"""
Transforms used for CPC:
Transforms::
random_flip
transforms.ToTensor()
normalize
Patchify(patch_size=patch_size, overlap_size=patch_size // 2)
Example::
# in a regular dataset
Imagenet(..., transforms=CPCEvalTransformsImageNet128())
# in a DataModule
module = ImagenetDataModule(PATH)
train_loader = module.train_dataloader(batch_size=32, transforms=CPCEvalTransformsImageNet128())
"""
def __init__(self, patch_size: int = 32, overlap: int = 16):
"""
Args:
patch_size: size of patches when cutting up the image into overlapping patches
overlap: how much to overlap patches
"""
if not _TORCHVISION_AVAILABLE: # pragma: no cover
raise ModuleNotFoundError('You want to use `transforms` from `torchvision` which is not installed yet.')
# image augmentation functions
self.patch_size = patch_size
self.overlap = overlap
self.flip_lr = transforms.RandomHorizontalFlip(p=0.5)
post_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
Patchify(patch_size=patch_size, overlap_size=overlap),
])
self.transforms = transforms.Compose([
transforms.Resize(146, interpolation=3),
transforms.CenterCrop(128), post_transform
])
def __call__(self, inp):
inp = self.flip_lr(inp)
out1 = self.transforms(inp)
return out1
|
py | 1a50bb5acf8d58c3a73e67232f05331d893b5dd6 | # Copyright 2022 Aprendizaje Profundo, All rights reserved.
#
# Licensed under the MIT License;
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/MIT
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Developed by Alvaro Mauricio Montenegro Reyes and Daniel Mauricio Montenegro Reyes
# ==================================================================================
from collections import Counter
import re
def coincidence_parser(datos,column_name='Coincidencias_iniciales', verbose = True,kind = 'ruleId'):
coincidencias = datos[column_name].values
size = coincidencias.shape[0]
#list for the dictionaries
clist = []
# counter for rules
rules = Counter()
for i, index in enumerate(datos.index):
if i % 100 == 0 and verbose: print(i,end=' ')
# extract coincidences from row i
# split the rules, according to the data structure
c = coincidencias[i]
c = re.sub('Match\({','MatchXXXX({',c)
l = c.split('MatchXXXX')
# create a list with each dictionary obtained
clist.clear()
for k in range(1, len(l)):
dictionary = l[k]
try:
val = dict(eval(dictionary[1:-3]))
except:
val = dict(eval(dictionary[1:-2]))
clist.append(val)
# count the rules found by type of rule
rules.clear()
for j in range(len(clist)):
# Posibles: ruleId, ruleIssueType, category
rules[clist[j][kind]] += 1
# to the dataframe
for key, value in rules.items():
datos.at[index, key] = value
from collections import Counter
def spacy_column_parser(column_name, verbose=True):
# start
size = datos.shape[0]
print('¡¡Empezando...!! Se procesaran ', size, ' registros de la columna ', column_name)
# counter for rules
rules = Counter()
# working bucle
for i, index in enumerate(datos.index):
if i % 100 == 0 and verbose: print(i, end=' ')
# read column value in this register (index)
values = eval(datos.at[index, column_name])
# extract the rules
rules.clear()
for value in values:
rules[value] += 1
# to the dataframe
for key, value in rules.items():
datos.at[index, key] = value
print('\n¡¡Hecho!!.... Se procesaron ', i+1, ' registros de la columna ', column_name)
def spacy_parser( column_names=['Upos', 'Dep', 'Ner_type'], verbose = True):
for column_name in column_names:
spacy_column_parser(column_name, verbose=True) |
py | 1a50bbfd0dbada9c46ceb2247b1f2e851faf3f59 | #!/usr/bin/env python2
# test case courtesy of William Schaub ([email protected])
import os, sys
from socket import *
UNIXSOCKET = sys.argv[1]
server = socket(AF_UNIX,SOCK_STREAM)
server.connect(UNIXSOCKET)
while 1:
data = sys.stdin.readline()
if not data: break
server.sendall(data)
server.close()
|
py | 1a50bce59613d1ff6e10b7e98a543b40d2495390 | # -*- coding: utf-8 -*-
import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import unittest
import subprocess
from pattern import de
try:
PATH = os.path.dirname(os.path.realpath(__file__))
except:
PATH = ""
#---------------------------------------------------------------------------------------------------
class TestInflection(unittest.TestCase):
def setUp(self):
pass
def test_gender(self):
# Assert der Hund => MASCULINE
# Assert die Studentin => FEMININE
# Assert das Auto => NEUTRAL
self.assertEqual(de.gender("Hund"), de.MASCULINE)
self.assertEqual(de.gender("Studentin"), de.FEMININE)
self.assertEqual(de.gender("Auto"), de.NEUTRAL)
def test_pluralize(self):
# Assert the accuracy of the pluralization algorithm.
from pattern.db import Datasheet
i, n = 0, 0
for tag, sg, pl in Datasheet.load(os.path.join(PATH, "corpora", "wordforms-de-celex.csv")):
if tag == "n":
if de.pluralize(sg) == pl:
i +=1
n += 1
self.assertTrue(float(i) / n > 0.69)
print "pattern.de.pluralize()"
def test_singularize(self):
# Assert the accuracy of the singularization algorithm.
from pattern.db import Datasheet
i, n = 0, 0
for tag, sg, pl in Datasheet.load(os.path.join(PATH, "corpora", "wordforms-de-celex.csv")):
if tag == "n":
if de.singularize(pl) == sg:
i +=1
n += 1
self.assertTrue(float(i) / n > 0.82)
print "pattern.de.singularize()"
def test_attributive(self):
# Assert "groß" => "großer" (masculine, nominative), and others.
for lemma, inflected, gender, role, article in (
(u"groß", u"großer", de.MALE, de.SUBJECT, None),
(u"groß", u"großen", de.MALE, de.OBJECT, None),
(u"groß", u"großem", de.MALE, de.INDIRECT, None),
(u"groß", u"großen", de.MALE, de.PROPERTY, None),
(u"groß", u"große", de.FEMALE, de.SUBJECT, None),
(u"groß", u"große", de.FEMALE, de.OBJECT, None),
(u"groß", u"großer", de.FEMALE, de.INDIRECT, None),
(u"groß", u"großes", de.NEUTRAL, de.SUBJECT, None),
(u"groß", u"großes", de.NEUTRAL, de.OBJECT, None),
(u"groß", u"großen", de.MALE, de.PROPERTY, "mein"),
(u"groß", u"großen", de.FEMALE, de.PROPERTY, "jeder"),
(u"groß", u"großen", de.FEMALE, de.PROPERTY, "mein"),
(u"groß", u"großen", de.PLURAL, de.INDIRECT, "jede"),
(u"groß", u"großen", de.PLURAL, de.PROPERTY, "jeder")):
v = de.attributive(lemma, gender, role, article)
self.assertEqual(v, inflected)
print "pattern.de.attributive()"
def test_predicative(self):
# Assert the accuracy of the predicative algorithm ("großer" => "groß").
from pattern.db import Datasheet
i, n = 0, 0
for tag, pred, attr in Datasheet.load(os.path.join(PATH, "corpora", "wordforms-de-celex.csv")):
if tag == "a":
if de.predicative(attr) == pred:
i +=1
n += 1
self.assertTrue(float(i) / n > 0.98)
print "pattern.de.predicative()"
def test_find_lemma(self):
# Assert the accuracy of the verb lemmatization algorithm.
# Note: the accuracy is higher (88%) when measured on CELEX word forms
# (presumably because de.inflect.verbs has high percentage irregular verbs).
i, n = 0, 0
for v1, v2 in de.inflect.verbs.inflections.items():
if de.inflect.verbs.find_lemma(v1) == v2:
i += 1
n += 1
self.assertTrue(float(i) / n > 0.86)
print "pattern.de.inflect.verbs.find_lemma()"
def test_find_lexeme(self):
# Assert the accuracy of the verb conjugation algorithm.
i, n = 0, 0
for v, lexeme1 in de.inflect.verbs.infinitives.items():
lexeme2 = de.inflect.verbs.find_lexeme(v)
for j in range(len(lexeme2)):
if lexeme1[j] == "":
continue
if lexeme1[j] == lexeme2[j]:
i += 1
n += 1
self.assertTrue(float(i) / n > 0.86)
print "pattern.de.inflect.verbs.find_lexeme()"
def test_conjugate(self):
# Assert different tenses with different conjugations.
for (v1, v2, tense) in (
("sein", "sein", de.INFINITIVE),
("sein", "bin", (de.PRESENT, 1, de.SINGULAR)),
("sein", "bist", (de.PRESENT, 2, de.SINGULAR)),
("sein", "ist", (de.PRESENT, 3, de.SINGULAR)),
("sein", "sind", (de.PRESENT, 1, de.PLURAL)),
("sein", "seid", (de.PRESENT, 2, de.PLURAL)),
("sein", "sind", (de.PRESENT, 3, de.PLURAL)),
("sein", "seiend", (de.PRESENT + de.PARTICIPLE)),
("sein", "war", (de.PAST, 1, de.SINGULAR)),
("sein", "warst", (de.PAST, 2, de.SINGULAR)),
("sein", "war", (de.PAST, 3, de.SINGULAR)),
("sein", "waren", (de.PAST, 1, de.PLURAL)),
("sein", "wart", (de.PAST, 2, de.PLURAL)),
("sein", "waren", (de.PAST, 3, de.PLURAL)),
("sein", "gewesen", (de.PAST + de.PARTICIPLE)),
("sein", "sei", (de.PRESENT, 2, de.SINGULAR, de.IMPERATIVE)),
("sein", "seien", (de.PRESENT, 1, de.PLURAL, de.IMPERATIVE)),
("sein", "seid", (de.PRESENT, 2, de.PLURAL, de.IMPERATIVE)),
("sein", u"sei", (de.PRESENT, 1, de.SINGULAR, de.SUBJUNCTIVE)),
("sein", u"seiest", (de.PRESENT, 2, de.SINGULAR, de.SUBJUNCTIVE)),
("sein", u"sei", (de.PRESENT, 3, de.SINGULAR, de.SUBJUNCTIVE)),
("sein", u"seien", (de.PRESENT, 1, de.PLURAL, de.SUBJUNCTIVE)),
("sein", u"seiet", (de.PRESENT, 2, de.PLURAL, de.SUBJUNCTIVE)),
("sein", u"seien", (de.PRESENT, 3, de.PLURAL, de.SUBJUNCTIVE)),
("sein", u"wäre", (de.PAST, 1, de.SINGULAR, de.SUBJUNCTIVE)),
("sein", u"wärest", (de.PAST, 2, de.SINGULAR, de.SUBJUNCTIVE)),
("sein", u"wäre", (de.PAST, 3, de.SINGULAR, de.SUBJUNCTIVE)),
("sein", u"wären", (de.PAST, 1, de.PLURAL, de.SUBJUNCTIVE)),
("sein", u"wäret", (de.PAST, 2, de.PLURAL, de.SUBJUNCTIVE)),
("sein", u"wären", (de.PAST, 3, de.PLURAL, de.SUBJUNCTIVE))):
self.assertEqual(de.conjugate(v1, tense), v2)
print "pattern.de.conjugate()"
def test_lexeme(self):
# Assert all inflections of "sein".
v = de.lexeme("sein")
self.assertEqual(v, [
"sein", "bin", "bist", "ist", "sind", "seid", "seiend",
"war", "warst", "waren", "wart", "gewesen",
"sei", "seien", "seiest", "seiet",
u"wäre", u"wärest", u"wären", u"wäret"
])
print "pattern.de.inflect.lexeme()"
def test_tenses(self):
# Assert tense recognition.
self.assertTrue((de.PRESENT, 3, de.SG) in de.tenses("ist"))
self.assertTrue("2sg" in de.tenses("bist"))
print "pattern.de.tenses()"
#---------------------------------------------------------------------------------------------------
class TestParser(unittest.TestCase):
def setUp(self):
pass
def test_find_lemmata(self):
# Assert lemmata for nouns, adjectives and verbs.
v = de.parser.find_lemmata([["Ich", "PRP"], ["sage", "VB"], [u"schöne", "JJ"], [u"Dinge", "NNS"]])
self.assertEqual(v, [
["Ich", "PRP", "ich"],
["sage", "VB", "sagen"],
[u"schöne", "JJ", u"schön"],
["Dinge", "NNS", "ding"]])
print "pattern.de.parser.find_lemmata()"
def test_parse(self):
# Assert parsed output with Penn Treebank II tags (slash-formatted).
# 1) "der große Hund" is a noun phrase, "auf der Matte" is a prepositional noun phrase.
v = de.parser.parse(u"Der große Hund sitzt auf der Matte.")
self.assertEqual(v,
u"Der/DT/B-NP/O große/JJ/I-NP/O Hund/NN/I-NP/O " + \
u"sitzt/VB/B-VP/O " + \
u"auf/IN/B-PP/B-PNP der/DT/B-NP/I-PNP Matte/NN/I-NP/I-PNP ././O/O"
)
# 2) "große" and "sitzt" lemmata are "groß" and "sitzen".
# Note how articles are problematic ("der" can be male subject but also plural possessive).
v = de.parser.parse(u"Der große Hund sitzt auf der Matte.", lemmata=True)
self.assertEqual(v,
u"Der/DT/B-NP/O/der große/JJ/I-NP/O/groß Hund/NN/I-NP/O/hund " + \
u"sitzt/VB/B-VP/O/sitzen " + \
u"auf/IN/B-PP/B-PNP/auf der/DT/B-NP/I-PNP/der Matte/NN/I-NP/I-PNP/matte ././O/O/."
)
# 3) Assert the accuracy of the German tagger.
i, n = 0, 0
for sentence in open(os.path.join(PATH, "corpora", "tagged-de-tiger.txt")).readlines():
sentence = sentence.decode("utf-8").strip()
s1 = [w.split("/") for w in sentence.split(" ")]
s1 = [de.stts2penntreebank(w, pos) for w, pos in s1]
s2 = [[w for w, pos in s1]]
s2 = de.parse(s2, tokenize=False)
s2 = [w.split("/") for w in s2.split(" ")]
for j in range(len(s1)):
if s1[j][1] == s2[j][1]:
i += 1
n += 1
self.assertTrue(float(i) / n > 0.844)
print "pattern.de.parse()"
def test_tag(self):
# Assert [("der", "DT"), ("grosse", "JJ"), ("Hund", "NN")].
v = de.tag("der grosse Hund")
self.assertEqual(v, [("der", "DT"), ("grosse", "JJ"), ("Hund", "NN")])
print "pattern.de.tag()"
def test_command_line(self):
# Assert parsed output from the command-line (example from the documentation).
p = ["python", "-m", "pattern.de", "-s", "Der grosse Hund.", "-OTCRL"]
p = subprocess.Popen(p, stdout=subprocess.PIPE)
p.wait()
v = p.stdout.read()
v = v.strip()
self.assertEqual(v, "Der/DT/B-NP/O/O/der grosse/JJ/I-NP/O/O/gross Hund/NN/I-NP/O/O/hund ././O/O/O/.")
print "python -m pattern.de"
#---------------------------------------------------------------------------------------------------
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestInflection))
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(TestParser))
return suite
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=1).run(suite())
|
py | 1a50bd386677f0a028fd71156ed0426dac98c637 | from distutils.core import setup
from setuptools import find_packages
import nhlscrapi
setup(
name="nhlscrapi",
version=nhlscrapi.__version__,
description='NHL Scrapr API for Python',
long_description=open('README.rst').read(),
author='Rob Howley',
author_email='[email protected]',
url='https://github.com/robhowley/nhlscrapi',
packages=find_packages(),
include_package_data=True,
scripts=['bin/gamedata.py'],
license="Apache Software License version 2.0",
platforms='any',
zip_safe=False,
keywords='nhlscrapi',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
],
test_suite='tests',
# Dependent packages (distributions)
install_requires=['lxml', 'requests']
)
|
py | 1a50bdf63d5b3a9d4ad0df4d1fa835d50376c8f8 | """
dxagent.py
This file contains the core of dxagent
@author: K.Edeline
"""
import sched
import time
import signal
import importlib
from .constants import AGENT_INPUT_PERIOD
from .core.ios import IOManager
from .core.daemon import Daemon
from .input.sysinfo import SysInfo
from .input.bm_input import BMWatcher
from .input.vm_input import VMWatcher
from .input.vpp_input import VPPWatcher
from .assurance.health import HealthEngine
from .gnmi.exporter import DXAgentExporter
class DXAgent(Daemon, IOManager):
"""
DXAgent
"""
def __init__(self, parse_args=True):
Daemon.__init__(self, pidfile='/var/run/dxagent.pid',
stdout='/var/log/dxagent.log',
stderr='/var/log/dxagent.log',
name='dxagent',
input_rate=AGENT_INPUT_PERIOD)
IOManager.__init__(self, child=self, parse_args=parse_args)
self.load_ios()
if not parse_args:
return
def _init(self):
self.sysinfo = SysInfo()
self.scheduler = sched.scheduler()
# ringbuffers are stored here
self._data = {}
# SharedMemory with dxtop.
# Drop privileges to avoid dxtop root requirements
if not self.args.disable_shm:
mod = importlib.import_module("agent.core.shareablebuffer")
with self.drop():
self.sbuffer = getattr(mod, "ShareableBuffer")(create=True)
# watchers.
self.bm_watcher = BMWatcher(self._data, self.info, self)
self.vm_watcher = VMWatcher(self._data, self.info, self)
self.vpp_watcher = VPPWatcher(self._data, self.info, self)
# health engine
self.engine = HealthEngine(self._data, self.info, self)
# exporter
if self.gnmi_target:
self.exporter = DXAgentExporter(self._data, self.info, self,
target_url=self.gnmi_target)
self.exporter.run()
# catch signal for cleanup
signal.signal(signal.SIGTERM, self.exit)
def _input(self):
self.bm_watcher.input()
self.vm_watcher.input()
self.vpp_watcher.input()
def process(self):
"""
read input data, process and write it to shmem.
re-schedule itself.
"""
# fetch input
self._input()
# compute metrics&symptoms from input
self.engine.update_health()
# write to shmem
if not self.args.disable_shm:
skip=["stats"] if not self.args.verbose else []
self.sbuffer.write(self._data, skip=skip, info=self.info)
#self.info(list(self.exporter._iterate_data()))
self.scheduler.enter(AGENT_INPUT_PERIOD,0,self.process)
def exit(self, signum=None, stackframe=None):
"""
cleanup before exiting
"""
self.running = False
time.sleep(AGENT_INPUT_PERIOD)
self.bm_watcher.exit()
self.vm_watcher.exit()
self.vpp_watcher.exit()
if not self.args.disable_shm:
self.sbuffer.unlink()
del self.sbuffer
def run(self):
"""
main function
"""
self._init()
self.running = True
self.info(self.sysinfo)
self.process()
while self.running:
self.scheduler.run(blocking=False)
time.sleep(AGENT_INPUT_PERIOD)
|
py | 1a50bfa72e90f8791b5343d4ee0a35f0eb2c7a61 | from pytools import testutil
import sys
import basecase
class E1photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E1flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E1fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E1vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E1abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E1stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E1obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E1counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E2photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E2flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E2fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E2vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E2abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E2stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E2obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E2counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E3photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E3flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E3fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E3vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E3abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E3stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E3obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E3counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E4photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E4flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E4fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E4vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E4abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E4stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E4obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E4counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E5photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E5flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E5fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E5vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E5abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E5stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E5obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E5counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E6photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E6flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E6fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E6vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E6abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E6stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E6obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E6counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E7photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E7flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E7fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E7vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E7abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E7stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E7obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E7counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E8photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E8flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E8fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E8vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E8abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E8stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E8obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E8counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E9photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E9flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E9fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E9vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E9abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E9stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E9obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E9counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E10photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E10flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E10fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E10vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E10abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E10stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E10obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E10counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E11photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E11flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E11fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E11vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E11abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E11stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E11obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E11counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E12photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E12flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E12fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E12vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E12abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E12stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E12obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E12counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E13photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E13flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E13fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E13vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E13abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E13stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E13obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E13counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E14photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E14flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E14fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E14vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E14abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E14stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E14obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E14counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E15photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E15flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E15fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E15vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E15abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E15stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E15obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E15counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E16photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E16flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E16fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E16vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E16abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E16stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E16obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E16counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E17photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E17flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E17fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E17vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E17abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E17stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E17obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E17counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E18photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E18flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E18fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E18vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E18abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E18stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E18obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E18counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E19photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E19flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E19fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E19vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E19abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E19stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E19obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E19counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E20photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E20flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E20fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E20vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E20abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E20stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E20obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E20counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E21photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E21flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E21fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E21vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E21abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E21stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E21obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E21counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E22photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E22flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E22fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E22vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E22abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E22stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E22obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E22counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E23photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E23flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E23fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E23vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E23abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E23stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E23obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E23counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E24photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E24flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E24fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E24vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E24abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E24stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E24obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E24counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E25photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E25flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E25fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E25vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E25abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E25stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E25obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E25counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E26photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E26flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E26fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E26vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E26abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E26stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E26obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E26counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E27photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E27flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E27fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E27vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E27abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E27stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E27obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E27counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E28photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E28flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E28fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E28vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E28abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E28stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E28obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E28counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E29photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E29flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E29fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E29vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E29abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E29stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E29obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E29counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E30photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E30flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E30fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E30vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E30abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E30stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E30obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E30counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E31photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E31flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E31fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E31vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E31abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E31stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E31obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E31counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E32photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E32flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E32fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E32vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E32abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E32stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E32obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E32counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E33photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E33flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E33fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E33vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E33abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E33stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E33obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E33counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E34photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E34flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E34fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E34vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E34abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E34stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E34obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E34counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E35photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E35flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E35fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E35vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E35abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E35stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E35obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E35counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E36photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E36flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E36fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E36vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E36abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E36stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E36obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E36counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E37photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E37flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E37fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E37vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E37abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E37stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E37obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E37counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E38photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E38flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E38fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E38vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E38abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E38stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E38obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E38counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E39photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E39flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E39fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E39vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E39abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E39stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E39obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E39counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E40photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E40flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E40fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E40vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E40abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E40stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E40obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E40counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E41photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E41flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E41fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E41vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E41abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E41stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E41obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E41counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E42photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E42flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E42fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E42vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E42abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E42stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E42obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E42counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E43photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E43flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E43fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E43vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E43abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E43stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E43obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E43counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E44photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E44flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E44fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E44vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E44abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E44stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E44obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E44counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E45photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E45flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E45fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E45vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E45abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E45stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E45obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E45counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E46photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E46flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E46fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E46vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E46abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E46stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E46obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E46counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E47photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E47flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E47fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E47vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E47abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E47stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E47obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E47counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E48photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E48flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E48fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E48vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E48abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E48stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E48obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E48counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E49photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E49flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E49fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E49vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E49abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E49stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E49obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E49counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E50photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E50flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E50fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E50vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E50abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E50stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E50obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E50counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E51photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E51flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E51fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E51vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E51abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E51stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E51obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E51counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E52photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E52flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E52fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E52vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E52abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E52stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E52obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E52counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E53photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E53flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E53fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E53vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E53abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E53stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E53obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E53counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E54photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E54flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E54fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E54vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E54abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E54stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E54obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E54counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f435w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E55photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E55flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E55fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E55vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E55abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E55stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E55obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E55counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f475w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E56photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E56flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E56fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E56vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E56abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E56stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E56obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E56counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f555w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E57photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E57flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E57fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E57vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E57abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E57stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E57obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E57counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E58photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E58flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E58fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E58vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E58abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E58stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E58obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E58counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E59photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E59flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E59fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E59vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E59abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E59stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E59obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E59counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E60photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E60flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E60fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E60vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E60abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E60stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E60obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E60counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,wfc1,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E61photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E61flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E61fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E61vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E61abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E61stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E61obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E61counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E62photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E62flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E62fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E62vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E62abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E62stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E62obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E62counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E63photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E63flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E63fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E63vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E63abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E63stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E63obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E63counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E64photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E64flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E64fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E64vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E64abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E64stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E64obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E64counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E65photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E65flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E65fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E65vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E65abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E65stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E65obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E65counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E66photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E66flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E66fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E66vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E66abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E66stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E66obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E66counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E67photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E67flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E67fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E67vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E67abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E67stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E67obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E67counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E68photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E68flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E68fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E68vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E68abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E68stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E68obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E68counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E69photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E69flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E69fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E69vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E69abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E69stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E69obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E69counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E70photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E70flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E70fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E70vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E70abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E70stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E70obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E70counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E71photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E71flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E71fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E71vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E71abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E71stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E71obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E71counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E72photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E72flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E72fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E72vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E72abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E72stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E72obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E72counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E73photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E73flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E73fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E73vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E73abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E73stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E73obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E73counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E74photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E74flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E74fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E74vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E74abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E74stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E74obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E74counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E75photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E75flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E75fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E75vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E75abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E75stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E75obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E75counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E76photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E76flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E76fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E76vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E76abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E76stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E76obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E76counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E77photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E77flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E77fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E77vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E77abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E77stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E77obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E77counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E78photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E78flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E78fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E78vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E78abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E78stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E78obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E78counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E79photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E79flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E79fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E79vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E79abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E79stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E79obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E79counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E80photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E80flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E80fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E80vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E80abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E80stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E80obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E80counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E81photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E81flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E81fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E81vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E81abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E81stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E81obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E81counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E82photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E82flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E82fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E82vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E82abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E82stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E82obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E82counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E83photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E83flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E83fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E83vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E83abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E83stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E83obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E83counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E84photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E84flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E84fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E84vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E84abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E84stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E84obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E84counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E85photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E85flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E85fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E85vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E85abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E85stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E85obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E85counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E86photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E86flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E86fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E86vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E86abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E86stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E86obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E86counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E87photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E87flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E87fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E87vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E87abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E87stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E87obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E87counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E88photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E88flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E88fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E88vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E88abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E88stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E88obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E88counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E89photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E89flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E89fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E89vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E89abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E89stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E89obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E89counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E90photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E90flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E90fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E90vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E90abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E90stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E90obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E90counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E91photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E91flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E91fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E91vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E91abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E91stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E91obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E91counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E92photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E92flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E92fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E92vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E92abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E92stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E92obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E92counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E93photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E93flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E93fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E93vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E93abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E93stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E93obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E93counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E94photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E94flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E94fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E94vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E94abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E94stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E94obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E94counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E95photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E95flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E95fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E95vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E95abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E95stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E95obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E95counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E96photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E96flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E96fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E96vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E96abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E96stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E96obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E96counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E97photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E97flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E97fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E97vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E97abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E97stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E97obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E97counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E98photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E98flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E98fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E98vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E98abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E98stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E98obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E98counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E99photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E99flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E99fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E99vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E99abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E99stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E99obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E99counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E100photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E100flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E100fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E100vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E100abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E100stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E100obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E100counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E101photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E101flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E101fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E101vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E101abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E101stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E101obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E101counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E102photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E102flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E102fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E102vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E102abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E102stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E102obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E102counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E103photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E103flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E103fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E103vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E103abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E103stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E103obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E103counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E104photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E104flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E104fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E104vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E104abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E104stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E104obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E104counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E105photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E105flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E105fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E105vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E105abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E105stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E105obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E105counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E106photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E106flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E106fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E106vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E106abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E106stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E106obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E106counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E107photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E107flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E107fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E107vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E107abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E107stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E107obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E107counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E108photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E108flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E108fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E108vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E108abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E108stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E108obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E108counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E109photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E109flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E109fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E109vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E109abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E109stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E109obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E109counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(2000) "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E110photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E110flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E110fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E110vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E110abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E110stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E110obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E110counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E111photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E111flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E111fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E111vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E111abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E111stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E111obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E111counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E112photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E112flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E112fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E112vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E112abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E112stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E112obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E112counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E113photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E113flam(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E113fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E113vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E113abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E113stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E113obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E113counts(basecase.effstimCase):
def setUp(self):
self.spectrum="bb(3000) "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E114photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E114flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E114fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E114vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E114abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E114stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E114obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E114counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f435w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E115photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E115flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E115fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E115vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E115abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E115stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E115obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E115counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f475w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E116photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E116flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E116fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E116vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E116abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E116stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E116obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E116counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f555w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E117photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E117flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E117fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E117vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E117abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E117stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E117obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E117counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f606w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E118photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E118flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E118fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E118vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E118abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E118stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E118obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E118counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f775w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E119photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E119flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E119fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E119vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E119abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E119stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E119obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E119counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f814w"
self.form="counts"
self.setglobal(__file__)
self.runpy()
class E120photlam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="photlam"
self.setglobal(__file__)
self.runpy()
class E120flam(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="flam"
self.setglobal(__file__)
self.runpy()
class E120fnu(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="fnu"
self.setglobal(__file__)
self.runpy()
class E120vegamag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="vegamag"
self.setglobal(__file__)
self.runpy()
class E120abmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="abmag"
self.setglobal(__file__)
self.runpy()
class E120stmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="stmag"
self.setglobal(__file__)
self.runpy()
class E120obmag(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="obmag"
self.setglobal(__file__)
self.runpy()
class E120counts(basecase.effstimCase):
def setUp(self):
self.spectrum="crcalspec$alpha_lyr_stis_003.fits "
self.obsmode="acs,hrc,f850lp"
self.form="counts"
self.setglobal(__file__)
self.runpy()
|
py | 1a50c05e8dde2624455d34cbe62b293fc3afb2a6 | from googlesearch import search
from pyppeteer import launch
from wplay.utils.helpers import chatbot_image_folder_path
async def Bot(last_Message):
"""
Function to perform instruction as instructed to bot.
"""
print('\n Bot activated')
first_last_Message = "".join(last_Message.split())
simple_menu = {
"hi": say_hi,
"help": _help_commands,
"goodmorning": say_goodmorning,
"goodnight": say_goodnight,
"howareyou?": say_fine,
}
simple_menu_keys = simple_menu.keys()
result = []
try:
command_args = first_last_Message[1:].split(" ", 1)
command_arg = last_Message[1:].split(" ", 1)
if len(command_args) == 1 and command_args[0] in simple_menu_keys:
return simple_menu[command_args[0]]()
elif command_arg[0] == 'google':
query = "".join(command_arg[1])
for j in search(query, tld="co.in", num=10, stop=10, pause=2):
result.append(j)
print("Sending links for query")
return result
elif command_arg[0] == "image":
query = "".join(command_arg[1])
await takeScreenshot(query)
print("Taking screenshot of google image for query")
return "Sending you screenshot"
elif command_arg[0] == "maps":
query = "".join(command_arg[1])
map_parameters_list = query.replace(" ", "")
map_parameters = map_parameters_list.split(',')
base_url = "https://www.google.com/maps/dir/?api=1&"
custom_url = base_url + "origin={ori}&destination={dest}&travelmode={t_mode}".format(ori=map_parameters[0], dest=map_parameters[1], t_mode=map_parameters[2])
print("Sending link for google maps")
return custom_url
else:
return "Wrong command. Send me /help to see a list of valid commands"
except KeyError as e:
print("Key Error Exception: {err}".format(err=str(e)))
def say_hi():
print("Saying hi")
return "Wplay chatbot says hi! Hope you are having a nice day..."
def say_goodmorning():
print("Saying good morning")
return "Bot says Good Morning! Have a Good Day..."
def say_goodnight():
print("Saying good night")
return "Bot says Good Night! Sweet Dreams..."
def say_fine():
print("Saying I am Fine!")
return "Bot says I am Fine Thank You! How are you?"
def _help_commands():
print("Asking for help")
return "How may I assist you with help\n"\
"List of commands:\n" \
"/hi (bot says hi), " \
"/all_commands (ist of all commands), " \
"/good morning, " \
"/good night, " \
"/how are you? " \
"/google {query} " \
"/image {query} " \
"/maps {origin}, {destination}, {mode:driving/bicycling/transit/two-wheeler/walking}"
async def takeScreenshot(qry):
browser = await launch()
page = await browser.newPage()
await page.goto('https://www.google.com/search?q={}&source=lnms&tbm=isch'.format(qry))
image_path = str(chatbot_image_folder_path / '{}.png'.format(qry))
await page.screenshot({'path': image_path})
await browser.close()
|
py | 1a50c0a6defbab214ca903c2cfc9b3facdbc18c2 | import copy
import pytest
from ckan_api_client.exceptions import HTTPError
from ckan_api_client.objects import CkanDataset
from ckan_api_client.tests.utils.diff import diff_mappings
from ckan_api_client.tests.utils.generate import generate_dataset
from ckan_api_client.tests.utils.validation import MutableCheckpoint
def test_dataset_create(ckan_client_hl):
client = ckan_client_hl
dataset_dict = generate_dataset()
dataset = CkanDataset(dataset_dict)
created = client.create_dataset(dataset)
assert created.is_equivalent(dataset)
def test_dataset_get_by_name(ckan_client_hl):
client = ckan_client_hl
dataset_dict = generate_dataset()
dataset_dict['name'] = 'example-dataset-name'
dataset = CkanDataset(dataset_dict)
created = client.create_dataset(dataset)
assert created.is_equivalent(dataset)
dataset_id = created.id
# Try getting by id
dataset_1 = client.get_dataset(dataset_id)
assert created == dataset_1
# Try getting by name
dataset_2 = client.get_dataset_by_name('example-dataset-name')
assert created == dataset_2
# Try getting by id, but passing name instead
with pytest.raises(HTTPError) as excinfo:
client.get_dataset('example-dataset-name')
assert excinfo.value.status_code == 404
# Try getting by name, but passing id instead
with pytest.raises(HTTPError) as excinfo:
client.get_dataset_by_name(dataset_id)
assert excinfo.value.status_code == 404
def test_dataset_update_base_fields(ckan_client_hl):
client = ckan_client_hl # shortcut
ckp = MutableCheckpoint() # to check objects mutation
# Create our dataset
dataset_dict = generate_dataset()
ckp.add(dataset_dict)
dataset = CkanDataset(generate_dataset())
dataset.author = 'original author'
dataset.author_email = '[email protected]'
dataset.license_id = 'cc-zero'
created = client.create_dataset(dataset)
# Store a copy of the original dataset
original_dataset = client.get_dataset(created.id)
assert created.is_equivalent(original_dataset)
ckp.add(original_dataset)
# Update some base fields, send back & check
to_be_updated = copy.deepcopy(original_dataset)
to_be_updated.author = 'NEW_AUTHOR'
to_be_updated.author_email = 'NEW_AUTHOR_EMAIL'
to_be_updated.license_id = 'cc-by-sa'
assert to_be_updated.is_modified()
# Update, get back, check
updated = client.update_dataset(to_be_updated)
updated_2 = client.get_dataset(created.id)
assert updated.is_equivalent(to_be_updated)
assert updated.is_equivalent(updated_2)
diffs = diff_mappings(
original_dataset.serialize(),
updated.serialize())
assert diffs['differing'] == set([
'author', 'author_email', 'license_id',
])
assert diffs['left'] == set()
assert diffs['right'] == set()
# Make sure dicts did not mutate
ckp.check()
def test_dataset_update_extras(ckan_client_hl):
client = ckan_client_hl # shortcut
ds_dict = generate_dataset()
ds_dict['extras'] = {
'key-0': 'value-0',
'key-1': 'value-1',
'key-2': 'value-2',
'key-3': 'value-3',
'key-4': 'value-4',
'key-5': 'value-5',
'key-6': 'value-6',
'key-7': 'value-7',
'key-8': 'value-8',
'key-9': 'value-9',
}
stage_1pre = CkanDataset(ds_dict)
stage_1 = client.create_dataset(stage_1pre)
# --------------------------------------------------
# Try adding a new record
stage_1b = client.get_dataset(stage_1.id)
stage_2pre = copy.deepcopy(stage_1b)
stage_2pre.extras['NEW_FIELD_NAME'] = 'NEW_FIELD_VALUE'
stage_2 = client.update_dataset(stage_2pre)
assert stage_2.is_equivalent(client.get_dataset(stage_1.id))
diffs = diff_mappings(stage_1b.serialize(), stage_2.serialize())
assert diffs['left'] == diffs['right'] == set()
assert diffs['differing'] == set(['extras'])
del stage_1b, stage_2pre, stage_2, diffs
# --------------------------------------------------
# Try removing the custom field
stage_2pre = client.get_dataset(stage_1.id)
del stage_2pre.extras['NEW_FIELD_NAME']
stage_2 = client.update_dataset(stage_2pre)
assert stage_2.is_equivalent(client.get_dataset(stage_1.id))
assert 'NEW_FIELD_NAME' not in stage_2.extras
stage_2b = client.get_dataset(stage_1.id)
assert stage_2 == stage_2b
# Make sure we brought it back to its original state
assert stage_1.is_equivalent(stage_2)
del stage_2pre, stage_2
def test_dataset_update_resources(ckan_client_hl):
client = ckan_client_hl # shortcut
ds_dict = generate_dataset()
ds_dict['resources'] = [
{'name': 'example-csv-1',
'url': 'http://example.com/dataset-1.csv',
'format': 'CSV'},
{'name': 'example-json-1',
'url': 'http://example.com/dataset-1.json',
'format': 'JSON'},
]
stage_1pre = CkanDataset(ds_dict)
stage_1 = client.create_dataset(stage_1pre)
# --------------------------------------------------
# Try adding a new resource
stage_2pre = client.get_dataset(stage_1.id)
stage_2pre.resources.append({
'name': 'example-csv-2',
'url': 'http://example.com/dataset-2.csv',
'format': 'CSV'})
assert len(stage_2pre.resources) == 3
assert len(stage_2pre.serialize()['resources']) == 3
stage_2 = client.update_dataset(stage_2pre)
assert len(stage_2.resources) == 3
assert len(stage_2.serialize()['resources']) == 3
# --------------------------------------------------
# Try prepending adding a new resource
stage_3pre = client.get_dataset(stage_1.id)
stage_3pre.resources.insert(0, {
'url': 'http://example.com/dataset-2.json',
'format': 'JSON'})
assert len(stage_3pre.resources) == 4
assert len(stage_3pre.serialize()['resources']) == 4
stage_3 = client.update_dataset(stage_3pre)
assert len(stage_3.resources) == 4
assert len(stage_3.serialize()['resources']) == 4
def test_dataset_delete(ckan_client_hl):
client = ckan_client_hl
dataset_dict = generate_dataset()
dataset = CkanDataset(dataset_dict)
created = client.create_dataset(dataset)
assert created.is_equivalent(dataset)
# Make sure it is in lists
assert created.id in client.list_datasets()
# Delete it
client.delete_dataset(created.id)
assert created.id not in client.list_datasets()
# Test that our workarounds work as expected..
with pytest.raises(HTTPError) as excinfo:
client.get_dataset(created.id)
assert excinfo.value.status_code == 404
retrieved = client.get_dataset(created.id, allow_deleted=True)
assert retrieved.state == 'deleted'
def test_dataset_wipe(ckan_client_hl):
client = ckan_client_hl
# ------------------------------------------------------------
# Now delete normally and try inserting another
# one with the same name. Should fail with 409
dataset = CkanDataset(generate_dataset())
dataset.name = 'dataset-to-delete'
created = client.create_dataset(dataset)
assert created.is_equivalent(dataset)
client.delete_dataset(created.id)
new_dataset = CkanDataset(generate_dataset())
new_dataset.name = 'dataset-to-delete'
with pytest.raises(HTTPError) as excinfo:
client.create_dataset(new_dataset)
assert excinfo.value.status_code == 409
del dataset, created, new_dataset, excinfo
# ------------------------------------------------------------
# Now let's try updating + deleting
dataset = CkanDataset(generate_dataset())
dataset.name = 'dataset-to-delete-2'
created = client.create_dataset(dataset)
assert created.is_equivalent(dataset)
client.wipe_dataset(created.id)
new_dataset = CkanDataset(generate_dataset())
new_dataset.name = 'dataset-to-delete-2'
# Should not fail anymore
created = client.create_dataset(new_dataset)
assert created.name == 'dataset-to-delete-2'
|
py | 1a50c0d64e0bbfe139c71453462156304b09962c | def serialize_attr(name, value):
if name in ('restaurant', 'type'):
return value.name
if name == 'date':
return str(value)
return value
def serialize_item(item, attrs):
return {attr: serialize_attr(attr, getattr(item, attr)) for attr in attrs}
def serialize_menu(menu):
attrs = ['restaurant', 'date', 'type', 'name', 'price', 'currency']
return [serialize_item(item, attrs) for item in menu]
|
py | 1a50c0eb1f30996766279c0f738d390df07e325e | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class LearnFrames(Base):
"""The learning frames that IxNetwork sends during the test.
The LearnFrames class encapsulates a required learnFrames resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'learnFrames'
_SDM_ATT_MAP = {
'FastPathEnable': 'fastPathEnable',
'FastPathLearnFrameSize': 'fastPathLearnFrameSize',
'FastPathNumFrames': 'fastPathNumFrames',
'FastPathRate': 'fastPathRate',
'LearnFrameSize': 'learnFrameSize',
'LearnFrequency': 'learnFrequency',
'LearnNumFrames': 'learnNumFrames',
'LearnRate': 'learnRate',
'LearnSendMacOnly': 'learnSendMacOnly',
'LearnSendRouterSolicitation': 'learnSendRouterSolicitation',
'LearnWaitTime': 'learnWaitTime',
'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit',
}
def __init__(self, parent):
super(LearnFrames, self).__init__(parent)
@property
def FastPathEnable(self):
"""
Returns
-------
- bool: If true, the fast path is enabled.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable'])
@FastPathEnable.setter
def FastPathEnable(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value)
@property
def FastPathLearnFrameSize(self):
"""
Returns
-------
- number: Specifies the size of the learning frames in the fast path.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'])
@FastPathLearnFrameSize.setter
def FastPathLearnFrameSize(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value)
@property
def FastPathNumFrames(self):
"""
Returns
-------
- number: Specifies the number of learn frames that IxNetwork sends through fast path.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames'])
@FastPathNumFrames.setter
def FastPathNumFrames(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value)
@property
def FastPathRate(self):
"""
Returns
-------
- number: The learnt information on the rate the data is to be transferred.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastPathRate'])
@FastPathRate.setter
def FastPathRate(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value)
@property
def LearnFrameSize(self):
"""
Returns
-------
- number: Specifies the size of the learning frames.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize'])
@LearnFrameSize.setter
def LearnFrameSize(self, value):
self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value)
@property
def LearnFrequency(self):
"""
Returns
-------
- str(never | onBinaryIteration | oncePerService | oncePerTest | onTrial): Allows to choose how frequently IxNetwork sends learning frames during the test.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency'])
@LearnFrequency.setter
def LearnFrequency(self, value):
self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value)
@property
def LearnNumFrames(self):
"""
Returns
-------
- number: Specifies the number of learning frames that IxNetwork sends for each address.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames'])
@LearnNumFrames.setter
def LearnNumFrames(self, value):
self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value)
@property
def LearnRate(self):
"""
Returns
-------
- number: Specifies the rate at which IxNetwork sends learn frames to the DUT.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnRate'])
@LearnRate.setter
def LearnRate(self, value):
self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value)
@property
def LearnSendMacOnly(self):
"""
Returns
-------
- bool: Sends learning frames to MAC address only.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'])
@LearnSendMacOnly.setter
def LearnSendMacOnly(self, value):
self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value)
@property
def LearnSendRouterSolicitation(self):
"""
Returns
-------
- bool: The learnt information on the router sent solicitation.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'])
@LearnSendRouterSolicitation.setter
def LearnSendRouterSolicitation(self, value):
self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value)
@property
def LearnWaitTime(self):
"""
Returns
-------
- number: Specifies the length of time in ms that IxNetwork pauses before sending all the learning frames from all the ports.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime'])
@LearnWaitTime.setter
def LearnWaitTime(self, value):
self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value)
@property
def LearnWaitTimeBeforeTransmit(self):
"""
Returns
-------
- number: The time in ms that IxNetwork waits before sending all the learning frames from all the ports.
"""
return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'])
@LearnWaitTimeBeforeTransmit.setter
def LearnWaitTimeBeforeTransmit(self, value):
self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value)
def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None):
"""Updates learnFrames resource on the server.
Args
----
- FastPathEnable (bool): If true, the fast path is enabled.
- FastPathLearnFrameSize (number): Specifies the size of the learning frames in the fast path.
- FastPathNumFrames (number): Specifies the number of learn frames that IxNetwork sends through fast path.
- FastPathRate (number): The learnt information on the rate the data is to be transferred.
- LearnFrameSize (number): Specifies the size of the learning frames.
- LearnFrequency (str(never | onBinaryIteration | oncePerService | oncePerTest | onTrial)): Allows to choose how frequently IxNetwork sends learning frames during the test.
- LearnNumFrames (number): Specifies the number of learning frames that IxNetwork sends for each address.
- LearnRate (number): Specifies the rate at which IxNetwork sends learn frames to the DUT.
- LearnSendMacOnly (bool): Sends learning frames to MAC address only.
- LearnSendRouterSolicitation (bool): The learnt information on the router sent solicitation.
- LearnWaitTime (number): Specifies the length of time in ms that IxNetwork pauses before sending all the learning frames from all the ports.
- LearnWaitTimeBeforeTransmit (number): The time in ms that IxNetwork waits before sending all the learning frames from all the ports.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def Apply(self):
"""Executes the apply operation on the server.
Applies the specified Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('apply', payload=payload, response_object=None)
def ApplyAsync(self):
"""Executes the applyAsync operation on the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyAsync', payload=payload, response_object=None)
def ApplyAsyncResult(self):
"""Executes the applyAsyncResult operation on the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyAsyncResult', payload=payload, response_object=None)
def ApplyITWizardConfiguration(self):
"""Executes the applyITWizardConfiguration operation on the server.
Applies the specified Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyITWizardConfiguration', payload=payload, response_object=None)
def GenerateReport(self):
"""Executes the generateReport operation on the server.
Generate a PDF report for the last succesfull test run.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('generateReport', payload=payload, response_object=None)
def Run(self, *args, **kwargs):
"""Executes the run operation on the server.
Starts the specified Quick Test and waits for its execution to finish.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
run(InputParameters=string)list
-------------------------------
- InputParameters (str): The input arguments of the test.
- Returns list(str): This method is synchronous and returns the result of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('run', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
"""Executes the start operation on the server.
Starts the specified Quick Test.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(InputParameters=string)
-----------------------------
- InputParameters (str): The input arguments of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self):
"""Executes the stop operation on the server.
Stops the currently running Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('stop', payload=payload, response_object=None)
def WaitForTest(self):
"""Executes the waitForTest operation on the server.
Waits for the execution of the specified Quick Test to be completed.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('waitForTest', payload=payload, response_object=None)
|
py | 1a50c18b7220bfd0a2cd2b1c4acdf97526b0c745 | """The command for hard removing a metric."""
import logging
from typing import Final
from jupiter.domain.inbox_tasks.infra.inbox_task_notion_manager import InboxTaskNotionManager
from jupiter.domain.metrics.infra.metric_notion_manager import MetricNotionManager
from jupiter.domain.metrics.metric_key import MetricKey
from jupiter.domain.metrics.service.remove_service import MetricRemoveService
from jupiter.domain.storage_engine import StorageEngine
from jupiter.framework.use_case import UseCase
from jupiter.utils.time_provider import TimeProvider
LOGGER = logging.getLogger(__name__)
class MetricRemoveUseCase(UseCase[MetricKey, None]):
"""The command for removing a metric."""
_time_provider: Final[TimeProvider]
_storage_engine: Final[StorageEngine]
_metric_notion_manager: Final[MetricNotionManager]
_inbox_task_notion_manager: Final[InboxTaskNotionManager]
def __init__(
self, time_provider: TimeProvider, storage_engine: StorageEngine,
inbox_task_notion_manager: InboxTaskNotionManager, metric_notion_manager: MetricNotionManager) -> None:
"""Constructor."""
self._time_provider = time_provider
self._storage_engine = storage_engine
self._metric_notion_manager = metric_notion_manager
self._inbox_task_notion_manager = inbox_task_notion_manager
def execute(self, args: MetricKey) -> None:
"""Execute the command's action."""
with self._storage_engine.get_unit_of_work() as uow:
metric = uow.metric_repository.load_by_key(args)
MetricRemoveService(self._storage_engine, self._inbox_task_notion_manager, self._metric_notion_manager)\
.execute(metric)
|
py | 1a50c2eccc3580875f86026097b6b7c44b07ea3a | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2015 Cristian van Ee <cristian at cvee.org>
# Copyright 2015 Igor Gnatenko <[email protected]>
# Copyright 2018 Adam Miller <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: dnf
version_added: 1.9
short_description: Manages packages with the I(dnf) package manager
description:
- Installs, upgrade, removes, and lists packages and groups with the I(dnf) package manager.
options:
name:
description:
- "A list of package names, or package specifier with version, like C(name-1.0)
When using state=latest, this can be '*' which means run: dnf -y update.
You can also pass a url or a local path to a rpm file."
required: true
aliases:
- pkg
list:
description:
- Various (non-idempotent) commands for usage with C(/usr/bin/ansible) and I(not) playbooks. See examples.
state:
description:
- Whether to install (C(present), C(latest)), or remove (C(absent)) a package.
choices: ['absent', 'present', 'installed', 'removed', 'latest']
default: "present"
enablerepo:
description:
- I(Repoid) of repositories to enable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
disablerepo:
description:
- I(Repoid) of repositories to disable for the install/update operation.
These repos will not persist beyond the transaction.
When specifying multiple repos, separate them with a ",".
conf_file:
description:
- The remote dnf configuration file to use for the transaction.
disable_gpg_check:
description:
- Whether to disable the GPG checking of signatures of packages being
installed. Has an effect only if state is I(present) or I(latest).
type: bool
default: 'no'
installroot:
description:
- Specifies an alternative installroot, relative to which all packages
will be installed.
version_added: "2.3"
default: "/"
releasever:
description:
- Specifies an alternative release from which all packages will be
installed.
required: false
version_added: "2.6"
default: null
autoremove:
description:
- If C(yes), removes all "leaf" packages from the system that were originally
installed as dependencies of user-installed packages but which are no longer
required by any such package. Should be used alone or when state is I(absent)
type: bool
default: false
version_added: "2.4"
exclude:
description:
- Package name(s) to exclude when state=present, or latest. This can be a
list or a comma separated string.
version_added: "2.7"
skip_broken:
description:
- Skip packages with broken dependencies(devsolve) and are causing problems.
type: bool
default: "no"
version_added: "2.7"
update_cache:
description:
- Force yum to check if cache is out of date and redownload if needed.
Has an effect only if state is I(present) or I(latest).
type: bool
default: "no"
aliases: [ expire-cache ]
version_added: "2.7"
update_only:
description:
- When using latest, only update installed packages. Do not install packages.
- Has an effect only if state is I(latest)
required: false
default: "no"
type: bool
version_added: "2.7"
security:
description:
- If set to C(yes), and C(state=latest) then only installs updates that have been marked security related.
type: bool
default: "no"
version_added: "2.7"
bugfix:
description:
- If set to C(yes), and C(state=latest) then only installs updates that have been marked bugfix related.
required: false
default: "no"
type: bool
version_added: "2.7"
enable_plugin:
description:
- I(Plugin) name to enable for the install/update operation.
The enabled plugin will not persist beyond the transaction.
required: false
version_added: "2.7"
disable_plugin:
description:
- I(Plugin) name to disable for the install/update operation.
The disabled plugins will not persist beyond the transaction.
required: false
version_added: "2.7"
disable_excludes:
description:
- Disable the excludes defined in DNF config files.
- If set to C(all), disables all excludes.
- If set to C(main), disable excludes defined in [main] in yum.conf.
- If set to C(repoid), disable excludes defined for given repo id.
required: false
choices: [ all, main, repoid ]
version_added: "2.7"
validate_certs:
description:
- This only applies if using a https url as the source of the rpm. e.g. for localinstall. If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates as it avoids verifying the source site.
type: bool
default: "yes"
version_added: "2.7"
allow_downgrade:
description:
- Specify if the named package and version is allowed to downgrade
a maybe already installed higher version of that package.
Note that setting allow_downgrade=True can make this module
behave in a non-idempotent way. The task could end up with a set
of packages that does not match the complete list of specified
packages to install (because dependencies between the downgraded
package and others can cause changes to the packages which were
in the earlier transaction).
type: bool
default: False
version_added: "2.7"
install_repoquery:
description:
- This is effectively a no-op in DNF as it is not needed with DNF, but is an accepted parameter for feature
parity/compatibility with the I(yum) module.
type: bool
default: True
version_added: "2.7"
download_only:
description:
- Only download the packages, do not install them.
required: false
default: "no"
type: bool
version_added: "2.7"
notes:
- When used with a `loop:` each package will be processed individually, it is much more efficient to pass the list directly to the `name` option.
- Group removal doesn't work if the group was installed with Ansible because
upstream dnf's API doesn't properly mark groups as installed, therefore upon
removal the module is unable to detect that the group is installed
(https://bugzilla.redhat.com/show_bug.cgi?id=1620324)
requirements:
- "python >= 2.6"
- python-dnf
- for the autoremove option you need dnf >= 2.0.1"
author:
- Igor Gnatenko (@ignatenkobrain) <[email protected]>
- Cristian van Ee (@DJMuggs) <cristian at cvee.org>
- Berend De Schouwer (@berenddeschouwer)
- Adam Miller (@maxamillion) <[email protected]>
'''
EXAMPLES = '''
- name: install the latest version of Apache
dnf:
name: httpd
state: latest
- name: remove the Apache package
dnf:
name: httpd
state: absent
- name: install the latest version of Apache from the testing repo
dnf:
name: httpd
enablerepo: testing
state: present
- name: upgrade all packages
dnf:
name: "*"
state: latest
- name: install the nginx rpm from a remote repo
dnf:
name: 'http://nginx.org/packages/centos/6/noarch/RPMS/nginx-release-centos-6-0.el6.ngx.noarch.rpm'
state: present
- name: install nginx rpm from a local file
dnf:
name: /usr/local/src/nginx-release-centos-6-0.el6.ngx.noarch.rpm
state: present
- name: install the 'Development tools' package group
dnf:
name: '@Development tools'
state: present
- name: Autoremove unneeded packages installed as dependencies
dnf:
autoremove: yes
- name: Uninstall httpd but keep its dependencies
dnf:
name: httpd
state: absent
autoremove: no
'''
import os
import re
import tempfile
try:
import dnf
import dnf.cli
import dnf.const
import dnf.exceptions
import dnf.subject
import dnf.util
HAS_DNF = True
except ImportError:
HAS_DNF = False
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.six import PY2, text_type
from distutils.version import LooseVersion
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.yumdnf import YumDnf, yumdnf_argument_spec
# 64k. Number of bytes to read at a time when manually downloading pkgs via a url
BUFSIZE = 65536
class DnfModule(YumDnf):
"""
DNF Ansible module back-end implementation
"""
def __init__(self, module):
# This populates instance vars for all argument spec params
super(DnfModule, self).__init__(module)
self._ensure_dnf()
def _sanitize_dnf_error_msg(self, spec, error):
"""
For unhandled dnf.exceptions.Error scenarios, there are certain error
messages we want to filter. Do that here.
"""
if to_text("no package matched") in to_text(error):
return "No package {0} available.".format(spec)
return error
def _package_dict(self, package):
"""Return a dictionary of information for the package."""
# NOTE: This no longer contains the 'dnfstate' field because it is
# already known based on the query type.
result = {
'name': package.name,
'arch': package.arch,
'epoch': str(package.epoch),
'release': package.release,
'version': package.version,
'repo': package.repoid}
result['nevra'] = '{epoch}:{name}-{version}-{release}.{arch}'.format(
**result)
# Added for YUM3/YUM4 compat
if package.repoid == 'installed':
result['yumstate'] = 'installed'
else:
result['yumstate'] = 'available'
return result
def _packagename_dict(self, packagename):
"""
Return a dictionary of information for a package name string or None
if the package name doesn't contain at least all NVR elements
"""
if packagename[-4:] == '.rpm':
packagename = packagename[:-4]
# This list was auto generated on a Fedora 28 system with the following one-liner
# printf '[ '; for arch in $(ls /usr/lib/rpm/platform); do printf '"%s", ' ${arch%-linux}; done; printf ']\n'
redhat_rpm_arches = [
"aarch64", "alphaev56", "alphaev5", "alphaev67", "alphaev6", "alpha",
"alphapca56", "amd64", "armv3l", "armv4b", "armv4l", "armv5tejl", "armv5tel",
"armv5tl", "armv6hl", "armv6l", "armv7hl", "armv7hnl", "armv7l", "athlon",
"geode", "i386", "i486", "i586", "i686", "ia32e", "ia64", "m68k", "mips64el",
"mips64", "mips64r6el", "mips64r6", "mipsel", "mips", "mipsr6el", "mipsr6",
"noarch", "pentium3", "pentium4", "ppc32dy4", "ppc64iseries", "ppc64le", "ppc64",
"ppc64p7", "ppc64pseries", "ppc8260", "ppc8560", "ppciseries", "ppc", "ppcpseries",
"riscv64", "s390", "s390x", "sh3", "sh4a", "sh4", "sh", "sparc64", "sparc64v",
"sparc", "sparcv8", "sparcv9", "sparcv9v", "x86_64"
]
rpm_arch_re = re.compile(r'(.*)\.(.*)')
rpm_nevr_re = re.compile(r'(\S+)-(?:(\d*):)?(.*)-(~?\w+[\w.]*)')
try:
arch = None
rpm_arch_match = rpm_arch_re.match(packagename)
if rpm_arch_match:
nevr, arch = rpm_arch_match.groups()
if arch in redhat_rpm_arches:
packagename = nevr
rpm_nevr_match = rpm_nevr_re.match(packagename)
if rpm_nevr_match:
name, epoch, version, release = rpm_nevr_re.match(packagename).groups()
if not version or not version.split('.')[0].isdigit():
return None
else:
return None
except AttributeError as e:
self.module.fail_json(
msg='Error attempting to parse package: %s, %s' % (packagename, to_native(e)),
rc=1,
results=[]
)
if not epoch:
epoch = "0"
if ':' in name:
epoch_name = name.split(":")
epoch = epoch_name[0]
name = ''.join(epoch_name[1:])
result = {
'name': name,
'epoch': epoch,
'release': release,
'version': version,
}
return result
# Original implementation from yum.rpmUtils.miscutils (GPLv2+)
# http://yum.baseurl.org/gitweb?p=yum.git;a=blob;f=rpmUtils/miscutils.py
def _compare_evr(self, e1, v1, r1, e2, v2, r2):
# return 1: a is newer than b
# 0: a and b are the same version
# -1: b is newer than a
if e1 is None:
e1 = '0'
else:
e1 = str(e1)
v1 = str(v1)
r1 = str(r1)
if e2 is None:
e2 = '0'
else:
e2 = str(e2)
v2 = str(v2)
r2 = str(r2)
# print '%s, %s, %s vs %s, %s, %s' % (e1, v1, r1, e2, v2, r2)
rc = dnf.rpm.rpm.labelCompare((e1, v1, r1), (e2, v2, r2))
# print '%s, %s, %s vs %s, %s, %s = %s' % (e1, v1, r1, e2, v2, r2, rc)
return rc
def fetch_rpm_from_url(self, spec):
# FIXME: Remove this once this PR is merged:
# https://github.com/ansible/ansible/pull/19172
# download package so that we can query it
package_name, dummy = os.path.splitext(str(spec.rsplit('/', 1)[1]))
package_file = tempfile.NamedTemporaryFile(dir=self.module.tmpdir, prefix=package_name, suffix='.rpm', delete=False)
self.module.add_cleanup_file(package_file.name)
try:
rsp, info = fetch_url(self.module, spec)
if not rsp:
self.module.fail_json(
msg="Failure downloading %s, %s" % (spec, info['msg']),
results=[],
)
data = rsp.read(BUFSIZE)
while data:
package_file.write(data)
data = rsp.read(BUFSIZE)
package_file.close()
except Exception as e:
self.module.fail_json(
msg="Failure downloading %s, %s" % (spec, to_native(e)),
results=[],
)
return package_file.name
def _ensure_dnf(self):
if not HAS_DNF:
if PY2:
package = 'python2-dnf'
else:
package = 'python3-dnf'
if self.module.check_mode:
self.module.fail_json(
msg="`{0}` is not installed, but it is required"
"for the Ansible dnf module.".format(package),
results=[],
)
self.module.run_command(['dnf', 'install', '-y', package], check_rc=True)
global dnf
try:
import dnf
import dnf.cli
import dnf.const
import dnf.exceptions
import dnf.subject
import dnf.util
except ImportError:
self.module.fail_json(
msg="Could not import the dnf python module. "
"Please install `{0}` package.".format(package),
results=[],
)
def _configure_base(self, base, conf_file, disable_gpg_check, installroot='/'):
"""Configure the dnf Base object."""
if self.enable_plugin and self.disable_plugin:
base.init_plugins(self.disable_plugin, self.enable_plugin)
elif self.enable_plugin:
base.init_plugins(enable_plugins=self.enable_plugin)
elif self.disable_plugin:
base.init_plugins(self.disable_plugin)
conf = base.conf
# Turn off debug messages in the output
conf.debuglevel = 0
# Set whether to check gpg signatures
conf.gpgcheck = not disable_gpg_check
# Don't prompt for user confirmations
conf.assumeyes = True
# Set installroot
conf.installroot = installroot
# Set excludes
if self.exclude:
conf.exclude(self.exclude)
# Set disable_excludes
if self.disable_excludes:
conf.disable_excludes.append(self.disable_excludes)
# Set releasever
if self.releasever is not None:
conf.substitutions['releasever'] = self.releasever
# Set skip_broken (in dnf this is strict=0)
if self.skip_broken:
conf.strict = 0
if self.download_only:
conf.downloadonly = True
# Change the configuration file path if provided
if conf_file:
# Fail if we can't read the configuration file.
if not os.access(conf_file, os.R_OK):
self.module.fail_json(
msg="cannot read configuration file", conf_file=conf_file,
results=[],
)
else:
conf.config_file_path = conf_file
# Default in dnf upstream is true
conf.clean_requirements_on_remove = self.autoremove
# Read the configuration file
conf.read()
def _specify_repositories(self, base, disablerepo, enablerepo):
"""Enable and disable repositories matching the provided patterns."""
base.read_all_repos()
repos = base.repos
# Disable repositories
for repo_pattern in disablerepo:
for repo in repos.get_matching(repo_pattern):
repo.disable()
# Enable repositories
for repo_pattern in enablerepo:
for repo in repos.get_matching(repo_pattern):
repo.enable()
def _base(self, conf_file, disable_gpg_check, disablerepo, enablerepo, installroot):
"""Return a fully configured dnf Base object."""
base = dnf.Base()
self._configure_base(base, conf_file, disable_gpg_check, installroot)
self._specify_repositories(base, disablerepo, enablerepo)
base.fill_sack(load_system_repo='auto')
if self.bugfix:
key = {'advisory_type__eq': 'bugfix'}
base._update_security_filters = [base.sack.query().filter(**key)]
if self.security:
key = {'advisory_type__eq': 'security'}
base._update_security_filters = [base.sack.query().filter(**key)]
if self.update_cache:
base.update_cache()
return base
def list_items(self, command):
"""List package info based on the command."""
# Rename updates to upgrades
if command == 'updates':
command = 'upgrades'
# Return the corresponding packages
if command in ['installed', 'upgrades', 'available']:
results = [
self._package_dict(package)
for package in getattr(self.base.sack.query(), command)()]
# Return the enabled repository ids
elif command in ['repos', 'repositories']:
results = [
{'repoid': repo.id, 'state': 'enabled'}
for repo in self.base.repos.iter_enabled()]
# Return any matching packages
else:
packages = dnf.subject.Subject(command).get_best_query(self.base.sack)
results = [self._package_dict(package) for package in packages]
self.module.exit_json(msg="", results=results)
def _is_installed(self, pkg):
installed = self.base.sack.query().installed()
if installed.filter(name=pkg):
return True
else:
return False
def _is_newer_version_installed(self, pkg_name):
candidate_pkg = self._packagename_dict(pkg_name)
if not candidate_pkg:
# The user didn't provide a versioned rpm, so version checking is
# not required
return False
installed = self.base.sack.query().installed()
installed_pkg = installed.filter(name=candidate_pkg['name']).run()
if installed_pkg:
installed_pkg = installed_pkg[0]
# this looks weird but one is a dict and the other is a dnf.Package
evr_cmp = self._compare_evr(
installed_pkg.epoch, installed_pkg.version, installed_pkg.release,
candidate_pkg['epoch'], candidate_pkg['version'], candidate_pkg['release'],
)
if evr_cmp == 1:
return True
else:
return False
else:
return False
def _mark_package_install(self, pkg_spec, upgrade=False):
"""Mark the package for install."""
is_newer_version_installed = self._is_newer_version_installed(pkg_spec)
is_installed = self._is_installed(pkg_spec)
try:
if self.allow_downgrade:
# dnf only does allow_downgrade, we have to handle this ourselves
# because it allows a possibility for non-idempotent transactions
# on a system's package set (pending the yum repo has many old
# NVRs indexed)
if upgrade:
if is_installed:
self.base.upgrade(pkg_spec)
else:
self.base.install(pkg_spec)
else:
self.base.install(pkg_spec)
elif not self.allow_downgrade and is_newer_version_installed:
return {'failed': False, 'msg': '', 'failure': '', 'rc': 0}
elif not is_newer_version_installed:
if upgrade:
if is_installed:
self.base.upgrade(pkg_spec)
else:
self.base.install(pkg_spec)
else:
self.base.install(pkg_spec)
else:
if upgrade:
if is_installed:
self.base.upgrade(pkg_spec)
else:
self.base.install(pkg_spec)
else:
self.base.install(pkg_spec)
return {'failed': False, 'msg': 'Installed: {0}'.format(pkg_spec), 'failure': '', 'rc': 0}
except dnf.exceptions.MarkingError as e:
return {
'failed': True,
'msg': "No package {0} available.".format(pkg_spec),
'failure': " ".join((pkg_spec, to_native(e))),
'rc': 1,
"results": []
}
except dnf.exceptions.DepsolveError as e:
return {
'failed': True,
'msg': "Depsolve Error occured for package {0}.".format(pkg_spec),
'failure': " ".join((pkg_spec, to_native(e))),
'rc': 1,
"results": []
}
except dnf.exceptions.Error as e:
if to_text("already installed") in to_text(e):
return {'failed': False, 'msg': '', 'failure': ''}
else:
return {
'failed': True,
'msg': "Unknown Error occured for package {0}.".format(pkg_spec),
'failure': " ".join((pkg_spec, to_native(e))),
'rc': 1,
"results": []
}
def _parse_spec_group_file(self):
pkg_specs, grp_specs, filenames = [], [], []
for name in self.names:
if name.endswith(".rpm"):
if '://' in name:
name = self.fetch_rpm_from_url(name)
filenames.append(name)
elif name.startswith("@"):
grp_specs.append(name[1:])
else:
pkg_specs.append(name)
return pkg_specs, grp_specs, filenames
def _update_only(self, pkgs):
not_installed = []
for pkg in pkgs:
if self._is_installed(pkg):
try:
if isinstance(to_text(pkg), text_type):
self.base.upgrade(pkg)
else:
self.base.package_upgrade(pkg)
except Exception as e:
self.module.fail_json(
msg="Error occured attempting update_only operation: {0}".format(to_native(e)),
results=[],
rc=1,
)
else:
not_installed.append(pkg)
return not_installed
def _install_remote_rpms(self, filenames):
if int(dnf.__version__.split(".")[0]) >= 2:
pkgs = list(sorted(self.base.add_remote_rpms(list(filenames)), reverse=True))
else:
pkgs = []
try:
for filename in filenames:
pkgs.append(self.base.add_remote_rpm(filename))
except IOError as e:
if to_text("Can not load RPM file") in to_text(e):
self.module.fail_json(
msg="Error occured attempting remote rpm install of package: {0}. {1}".format(filename, to_native(e)),
results=[],
rc=1,
)
if self.update_only:
self._update_only(pkgs)
else:
for pkg in pkgs:
try:
if self._is_newer_version_installed(self._package_dict(pkg)['nevra']):
if self.allow_downgrade:
self.base.package_install(pkg)
else:
self.base.package_install(pkg)
except Exception as e:
self.module.fail_json(
msg="Error occured attempting remote rpm operation: {0}".format(to_native(e)),
results=[],
rc=1,
)
def ensure(self):
allow_erasing = False
response = {
'msg': "",
'changed': False,
'results': [],
'rc': 0
}
# Accumulate failures. Package management modules install what they can
# and fail with a message about what they can't.
failure_response = {
'msg': "",
'failures': [],
'results': [],
'rc': 1
}
# Autoremove is called alone
# Jump to remove path where base.autoremove() is run
if not self.names and self.autoremove:
self.names = []
self.state = 'absent'
if self.names == ['*'] and self.state == 'latest':
try:
self.base.upgrade_all()
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured attempting to upgrade all packages"
self.module.fail_json(**failure_response)
else:
pkg_specs, group_specs, filenames = self._parse_spec_group_file()
if group_specs:
self.base.read_comps()
pkg_specs = [p.strip() for p in pkg_specs]
filenames = [f.strip() for f in filenames]
groups = []
environments = []
for group_spec in (g.strip() for g in group_specs):
group = self.base.comps.group_by_pattern(group_spec)
if group:
groups.append(group.id)
else:
environment = self.base.comps.environment_by_pattern(group_spec)
if environment:
environments.append(environment.id)
else:
self.module.fail_json(
msg="No group {0} available.".format(group_spec),
results=[],
)
if self.state in ['installed', 'present']:
# Install files.
self._install_remote_rpms(filenames)
for filename in filenames:
response['results'].append("Installed {0}".format(filename))
# Install groups.
for group in groups:
try:
group_pkg_count_installed = self.base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES)
if group_pkg_count_installed == 0:
response['results'].append("Group {0} already installed.".format(group))
else:
response['results'].append("Group {0} installed.".format(group))
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured attempting to install group: {0}".format(group)
self.module.fail_json(**failure_response)
except dnf.exceptions.Error as e:
# In dnf 2.0 if all the mandatory packages in a group do
# not install, an error is raised. We want to capture
# this but still install as much as possible.
failure_response['failures'].append(" ".join((group, to_native(e))))
for environment in environments:
try:
self.base.environment_install(environment, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured attempting to install environment: {0}".format(environment)
self.module.fail_json(**failure_response)
except dnf.exceptions.Error as e:
failure_response['failures'].append(" ".join((environment, to_native(e))))
# Install packages.
if self.update_only:
not_installed = self._update_only(pkg_specs)
for spec in not_installed:
response['results'].append("Packages providing %s not installed due to update_only specified" % spec)
else:
for pkg_spec in pkg_specs:
install_result = self._mark_package_install(pkg_spec)
if install_result['failed']:
failure_response['msg'] += install_result['msg']
failure_response['failures'].append(self._sanitize_dnf_error_msg(pkg_spec, install_result['failure']))
else:
response['results'].append(install_result['msg'])
elif self.state == 'latest':
# "latest" is same as "installed" for filenames.
self._install_remote_rpms(filenames)
for filename in filenames:
response['results'].append("Installed {0}".format(filename))
for group in groups:
try:
try:
self.base.group_upgrade(group)
response['results'].append("Group {0} upgraded.".format(group))
except dnf.exceptions.CompsError:
if not self.update_only:
# If not already installed, try to install.
group_pkg_count_installed = self.base.group_install(group, dnf.const.GROUP_PACKAGE_TYPES)
if group_pkg_count_installed == 0:
response['results'].append("Group {0} already installed.".format(group))
else:
response['results'].append("Group {0} installed.".format(group))
except dnf.exceptions.Error as e:
failure_response['failures'].append(" ".join((group, to_native(e))))
for environment in environments:
try:
try:
self.base.environment_upgrade(environment)
except dnf.exceptions.CompsError:
# If not already installed, try to install.
self.base.environment_install(environment, dnf.const.GROUP_PACKAGE_TYPES)
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured attempting to install environment: {0}".format(environment)
except dnf.exceptions.Error as e:
failure_response['failures'].append(" ".join((environment, to_native(e))))
if self.update_only:
not_installed = self._update_only(pkg_specs)
for spec in not_installed:
response['results'].append("Packages providing %s not installed due to update_only specified" % spec)
else:
for pkg_spec in pkg_specs:
# best effort causes to install the latest package
# even if not previously installed
self.base.conf.best = True
install_result = self._mark_package_install(pkg_spec, upgrade=True)
if install_result['failed']:
failure_response['msg'] += install_result['msg']
failure_response['failures'].append(self._sanitize_dnf_error_msg(pkg_spec, install_result['failure']))
else:
response['results'].append(install_result['msg'])
else:
# state == absent
if filenames:
self.module.fail_json(
msg="Cannot remove paths -- please specify package name.",
results=[],
)
for group in groups:
try:
self.base.group_remove(group)
except dnf.exceptions.CompsError:
# Group is already uninstalled.
pass
except AttributeError:
# Group either isn't installed or wasn't marked installed at install time
# because of DNF bug
#
# This is necessary until the upstream dnf API bug is fixed where installing
# a group via the dnf API doesn't actually mark the group as installed
# https://bugzilla.redhat.com/show_bug.cgi?id=1620324
pass
for environment in environments:
try:
self.base.environment_remove(environment)
except dnf.exceptions.CompsError:
# Environment is already uninstalled.
pass
installed = self.base.sack.query().installed()
for pkg_spec in pkg_specs:
if installed.filter(name=pkg_spec):
self.base.remove(pkg_spec)
# Like the dnf CLI we want to allow recursive removal of dependent
# packages
allow_erasing = True
if self.autoremove:
self.base.autoremove()
try:
if not self.base.resolve(allow_erasing=allow_erasing):
if failure_response['failures']:
failure_response['msg'] = 'Failed to install some of the specified packages'
self.module.fail_json(**failure_response)
response['msg'] = "Nothing to do"
self.module.exit_json(**response)
else:
response['changed'] = True
if self.module.check_mode:
if failure_response['failures']:
failure_response['msg'] = 'Failed to install some of the specified packages',
self.module.fail_json(**failure_response)
response['msg'] = "Check mode: No changes made, but would have if not in check mode"
self.module.exit_json(**response)
try:
self.base.download_packages(self.base.transaction.install_set)
except dnf.exceptions.DownloadError as e:
self.module.fail_json(
msg="Failed to download packages: {0}".format(to_text(e)),
results=[],
)
if self.download_only:
for package in self.base.transaction.install_set:
response['results'].append("Downloaded: {0}".format(package))
self.module.exit_json(**response)
else:
self.base.do_transaction()
for package in self.base.transaction.install_set:
response['results'].append("Installed: {0}".format(package))
for package in self.base.transaction.remove_set:
response['results'].append("Removed: {0}".format(package))
if failure_response['failures']:
failure_response['msg'] = 'Failed to install some of the specified packages',
self.module.exit_json(**response)
self.module.exit_json(**response)
except dnf.exceptions.DepsolveError as e:
failure_response['msg'] = "Depsolve Error occured: {0}".format(to_native(e))
self.module.fail_json(**failure_response)
except dnf.exceptions.Error as e:
if to_text("already installed") in to_text(e):
response['changed'] = False
response['results'].append("Package already installed: {0}".format(to_native(e)))
self.module.exit_json(**response)
else:
failure_response['msg'] = "Unknown Error occured: {0}".format(to_native(e))
self.module.fail_json(**failure_response)
@staticmethod
def has_dnf():
return HAS_DNF
def run(self):
"""The main function."""
# Check if autoremove is called correctly
if self.autoremove:
if LooseVersion(dnf.__version__) < LooseVersion('2.0.1'):
self.module.fail_json(
msg="Autoremove requires dnf>=2.0.1. Current dnf version is %s" % dnf.__version__,
results=[],
)
if self.state not in ["absent", None]:
self.module.fail_json(
msg="Autoremove should be used alone or with state=absent",
results=[],
)
# Set state as installed by default
# This is not set in AnsibleModule() because the following shouldn't happend
# - dnf: autoremove=yes state=installed
if self.state is None:
self.state = 'installed'
if self.list:
self.base = self._base(
self.conf_file, self.disable_gpg_check, self.disablerepo,
self.enablerepo, self.installroot
)
self.list_items(self.list)
else:
# Note: base takes a long time to run so we want to check for failure
# before running it.
if not dnf.util.am_i_root():
self.module.fail_json(
msg="This command has to be run under the root user.",
results=[],
)
self.base = self._base(
self.conf_file, self.disable_gpg_check, self.disablerepo,
self.enablerepo, self.installroot
)
self.ensure()
def main():
# state=installed name=pkgspec
# state=removed name=pkgspec
# state=latest name=pkgspec
#
# informational commands:
# list=installed
# list=updates
# list=available
# list=repos
# list=pkgspec
module = AnsibleModule(
**yumdnf_argument_spec
)
module_implementation = DnfModule(module)
try:
module_implementation.run()
except dnf.exceptions.RepoError as de:
module.fail_json(
msg="Failed to synchronize repodata: {0}".format(to_native(de)),
rc=1,
results=[],
changed=False
)
if __name__ == '__main__':
main()
|
py | 1a50c35acb43d4b76b6bba2c750bf6f722d6f781 | # -*- coding: utf-8 -*-
"""Defines several tools for monitoring net activity."""
# pylint: disable=F0401, E1101, too-many-lines, wrong-import-order
import logging as _logging
import os as _os
import subprocess as _subprocess
import collections as _collections
import numpy as _np
# pylint: disable=no-name-in-module
from scipy.stats import bernoulli as _bernoulli
from scipy.ndimage.interpolation import rotate as _rotate
from sklearn.decomposition import PCA as _PCA
from .tools import pad as _pad
# CAREFUL! This must be imported before any caffe-related import!
from .initialization import init as _init
import caffe as _caffe
try: # pragma: no cover
import cv2 as _cv2
_cv2INTER_CUBIC = _cv2.INTER_CUBIC # pylint: disable=invalid-name
_cv2INTER_LINEAR = _cv2.INTER_LINEAR # pylint: disable=invalid-name
_cv2INTER_NEAREST = _cv2.INTER_NEAREST # pylint: disable=invalid-name
_cv2resize = _cv2.resize # pylint: disable=invalid-name
except ImportError: # pragma: no cover
_cv2 = None
_cv2INTER_CUBIC = None # pylint: disable=invalid-name
_cv2INTER_LINEAR = None # pylint: disable=invalid-name
_cv2INTER_NEAREST = None # pylint: disable=invalid-name
_cv2resize = None # pylint: disable=invalid-name
try: # pragma: no cover
import matplotlib.pyplot as _plt
import matplotlib.ticker as _tkr
import matplotlib.colorbar as _colorbar
from mpl_toolkits.axes_grid1 import make_axes_locatable as _make_axes_locatable
_PLT_AVAILABLE = True
except ImportError: # pragma: no cover
_PLT_AVAILABLE = False
_init()
_LOGGER = _logging.getLogger(__name__)
class Monitor(object): # pylint: disable=R0903
"""
The monitor interface.
Should be implemented by any monitor class. The method
:py:func:`barrista.monitoring.Monitor.__call__` must be specified,
the function :py:func:`barrista.monitoring.Monitor.finalize` may
optionally be specified.
"""
def __call__(self, kwargs):
"""
The call implementation.
For available keyword arguments, see the documentation of
:py:class:`barrista.solver.SolverInterface.Fit`.
The callback signals are used as follows:
* initialize_train: called once before training starts,
* initialize_test: called once before training starts (if training with
a validation set is used) or once before testing,
* pre_fit: called before fitting mode is used (e.g., before going
back to fitting during training after a validation run),
* pre_test: called before testing mode is used (e.g., during training
before validation starts),
* post_test: called when testing finished,
* pre_train_batch: before a training batch is fed to the network,
* post_train_batch: after forwarding a training batch,
* pre_test_batch: before a test batch is fed to the network,
* post_test_batch: after a test batch was forwarded through the
network.
"""
if kwargs['callback_signal'] == 'initialize_train':
self._initialize_train(kwargs)
elif kwargs['callback_signal'] == 'initialize_test':
self._initialize_test(kwargs)
elif kwargs['callback_signal'] == 'pre_fit':
self._pre_fit(kwargs)
elif kwargs['callback_signal'] == 'pre_test':
self._pre_test(kwargs)
elif kwargs['callback_signal'] == 'post_test':
self._post_test(kwargs)
elif kwargs['callback_signal'] == 'pre_test_batch':
self._pre_test_batch(kwargs)
elif kwargs['callback_signal'] == 'post_test_batch':
self._post_test_batch(kwargs)
elif kwargs['callback_signal'] == 'pre_train_batch':
self._pre_train_batch(kwargs)
elif kwargs['callback_signal'] == 'post_train_batch':
self._post_train_batch(kwargs)
def _initialize_train(self, kwargs): # pylint: disable=C0111
pass
def _initialize_test(self, kwargs): # pylint: disable=C0111
pass
def _pre_fit(self, kwargs): # pylint: disable=C0111
pass
def _pre_test(self, kwargs): # pylint: disable=C0111
pass
def _post_test(self, kwargs): # pylint: disable=C0111
pass
def _pre_test_batch(self, kwargs): # pylint: disable=C0111
pass
def _post_test_batch(self, kwargs): # pylint: disable=C0111
pass
def _pre_train_batch(self, kwargs): # pylint: disable=C0111
pass
def _post_train_batch(self, kwargs): # pylint: disable=C0111
pass
def finalize(self, kwargs):
"""Will be called at the end of a training/fitting process."""
pass
class DataMonitor(Monitor): # pylint: disable=R0903
r"""
Monitor interface for filling the blobs of a network.
This is a specific monitor which will fill the blobs of the network
for the forward pass or solver step.
Ideally, there should only be one such monitor per callback,
but multiple ones are possible.
"""
pass
class ParallelMonitor(Monitor):
r"""
Monitor interface for monitors executed parallel to processing a batch.
The order of all monitors implementing this interface is respected. They
will work on a dummy network object with dummy blobs and prepare their
data. The dummy blob content is then copied to the real network prior
to the next batch execution.
"""
def get_parallel_blob_names(self): # pragma: no cover
"""Get the names of all blobs that must be provided for the dummy."""
raise NotImplementedError()
# pylint: disable=too-few-public-methods
class StaticDataMonitor(DataMonitor, ParallelMonitor):
r"""
Always provides the same data for a specific net input blob.
Parameters
==========
:param X: dict(string, np.ndarray)
The static input blobs to use.
"""
def __init__(self, X):
self._X = X # pylint: disable=C0103
def _initialize_train(self, kwargs):
self._initialize(kwargs)
def _initialize_test(self, kwargs):
self._initialize(kwargs)
def _initialize(self, kwargs):
if 'test' in kwargs['callback_signal']:
net = kwargs['testnet']
else:
net = kwargs['net']
for key, value in list(self._X.items()):
assert key in list(net.blobs.keys()), (
'data key has no corresponding network blob {} {}'.format(
key, str(list(net.blobs.keys()))))
assert isinstance(value, _np.ndarray), (
'data must be a numpy nd array ({})'.format(type(value))
)
def _pre_train_batch(self, kwargs):
self._pre_batch(kwargs['net'], kwargs)
def _pre_test_batch(self, kwargs):
self._pre_batch(kwargs['testnet'], kwargs)
def _pre_batch(self, net, kwargs): # pylint: disable=unused-argument
for key in list(self._X.keys()):
net.blobs[key].data[...] = self._X[key]
def get_parallel_blob_names(self):
"""Get the names of all blobs that must be provided for the dummy."""
return list(self._X.keys())
# pylint: disable=too-few-public-methods
class OversamplingDataMonitor(DataMonitor, ParallelMonitor):
r"""
Provides oversampled data.
Parameters
==========
:param blobinfos: dict(string, string|None).
Associates blob name to oversample and optional the interpolation
method to use for resize. This may be 'n' (nearest neighbour),
'c' (cubic), 'l' (linear) or None (no interpolation). If an
interpolation method is selected, `before_oversample_resize_to` must
be not None and provide a size.
:param before_oversample_resize_to: dict(string, 2-tuple).
Specifies a size to which the image inputs will be resized before the
oversampling is invoked.
"""
def __init__(self,
blobinfos,
before_oversample_resize_to=None):
for val in blobinfos.values():
assert val in ['n', 'c', 'l', None]
self._blobinfos = blobinfos
for key, val in blobinfos.items():
if val is not None:
assert key in list(before_oversample_resize_to.keys())
self._before_oversample_resize_to = before_oversample_resize_to
self._batch_size = None
def get_parallel_blob_names(self):
"""Get the names of all blobs that must be provided for the dummy."""
return list(self._blobinfos.keys())
def _initialize_train(self, kwargs):
raise Exception("The OversamplingDataMonitor can only be used during "
"testing!")
def _initialize_test(self, kwargs):
if 'test' in kwargs['callback_signal']:
net = kwargs['testnet']
else:
net = kwargs['net']
for key in list(self._blobinfos.keys()):
assert key in list(net.blobs.keys()), (
'data key has no corresponding network blob {} {}'.format(
key, str(list(net.blobs.keys()))))
def _pre_test(self, kwargs): # pragma: no cover
net = kwargs['testnet']
self._batch_size = net.blobs[
list(self._blobinfos.keys())[0]].data.shape[0]
def _pre_test_batch(self, kwargs): # pragma: no cover
for blob_name in list(self._blobinfos):
assert blob_name in kwargs['data_orig'], (
"The unchanged data must be provided by another DataProvider, "
"e.g., CyclingDataMonitor with `only_preload`!")
assert (len(kwargs['data_orig'][blob_name]) * 10 ==
self._batch_size), (
"The number of provided images * 10 must be the batch "
"size!")
# pylint: disable=invalid-name
for im_idx, im in enumerate(kwargs['data_orig'][blob_name]):
if self._blobinfos[blob_name] is not None:
if self._blobinfos[blob_name] == 'n':
interpolation = _cv2INTER_NEAREST
elif self._blobinfos[blob_name] == 'c':
interpolation = _cv2INTER_CUBIC
elif self._blobinfos[blob_name] == 'l':
interpolation = _cv2INTER_LINEAR
oversampling_prep = _cv2resize(
_np.transpose(im, (1, 2, 0)),
(self._before_oversample_resize_to[blob_name][1],
self._before_oversample_resize_to[blob_name][0]),
interpolation=interpolation)
else:
oversampling_prep = _np.transpose(im, (1, 2, 0))
imshape = kwargs['testnet'].blobs[blob_name].data.shape[2:4]
kwargs['testnet'].blobs[blob_name].data[
im_idx * 10:(im_idx+1) * 10] =\
_np.transpose(
_caffe.io.oversample(
[oversampling_prep],
imshape),
(0, 3, 1, 2))
# pylint: disable=too-many-instance-attributes, R0903
class CyclingDataMonitor(DataMonitor, ParallelMonitor):
r"""
Uses the data sequentially.
This monitor maps data to the network an cycles through the data
sequentially. It is the default monitor used if a user provides X
or X_val to the barrista.solver.fit method.
If further processing of the original data is intended, by using the flag
``only_preload``, the following monitors find a dictionary of lists of
the original datapoints with the name 'data_orig' in their ``kwargs``.
The data is in this case NOT written to the network input layers! This
can make sense, e.g., for the ``ResizingMonitor``.
:param X: dict of numpy.ndarray or list, or None.
If specified, is used as input data. It is used sequentially, so
shuffle it pre, if required. The keys of the dict must have
a corresponding layer name in the net. The values must be provided
already in network dimension order, i.e., usually channels, height,
width.
:param only_preload: list(string).
List of blobs for which the data will be loaded and stored in a dict
of (name: list) for further processing with other monitors.
:param input_processing_flags: dict(string, string).
Dictionary associating input blob names with intended preprocessing
methods. Valid values are:
* n: none,
* rn: resize, nearest neighbour,
* rc: resize, cubic,
* rl: resize, linear,
* pX: padding, with value X.
:param virtual_batch_size: int or None.
Override the network batch size. May only be used if ``only_preload`` is
set to True. Only makes sense with another DataMonitor in succession.
:param color_data_augmentation_sigmas: dict(string, float) or None.
Enhance the color of the samples as described in (Krizhevsky et al.,
2012). The parameter gives the sigma for the normal distribution that is
sampled to obtain the weights for scaled pixel principal components per
blob.
:param shuffle: Bool.
If set to True, shuffle the data every epoch. Default: False.
"""
# pylint: disable=too-many-arguments
def __init__(self,
X,
only_preload=None,
input_processing_flags=None,
virtual_batch_size=None,
color_data_augmentation_sigmas=None,
shuffle=False):
"""See class documentation."""
if only_preload is None:
only_preload = []
self.only_preload = only_preload
self._X = X # pylint: disable=C0103
assert X is not None
if input_processing_flags is None:
input_processing_flags = dict()
self._input_processing_flags = input_processing_flags
for key in input_processing_flags.keys():
assert key in self._X.keys()
self._padvals = dict()
for key, val in input_processing_flags.items():
assert (val in ['n', 'rn', 'rc', 'rl'] or
val.startswith('p')), (
"The input processing flags for the CyclingDataMonitor "
"must be in ['n', 'rn', 'rc', 'rl', 'p']: {}!".format(
val))
if val.startswith('p'):
self._padvals[key] = int(val[1:])
for key in self.only_preload:
assert key in self._X.keys()
self._sample_pointer = 0
self._len_data = None
self._initialized = False
self._batch_size = None
assert virtual_batch_size is None or self.only_preload, (
"If the virtual_batch_size is set, `only_preload` must be used!")
if virtual_batch_size is not None:
assert virtual_batch_size > 0
self._virtual_batch_size = virtual_batch_size
if color_data_augmentation_sigmas is None:
color_data_augmentation_sigmas = dict()
self._color_data_augmentation_sigmas = color_data_augmentation_sigmas
for key in list(self._color_data_augmentation_sigmas.keys()):
assert key in list(self._X.keys())
for key in list(self._X.keys()):
if key not in list(self._color_data_augmentation_sigmas.keys()):
self._color_data_augmentation_sigmas[key] = 0.
# pylint: disable=invalid-name
self._color_data_augmentation_weights = dict()
# pylint: disable=invalid-name
self._color_data_augmentation_components = dict()
self._shuffle = shuffle
self._sample_order = None
def get_parallel_blob_names(self):
return list(self._X.keys())
def _initialize_train(self, kwargs):
self._initialize(kwargs)
# Calculate the color channel PCA per blob if required.
for bname, sigma in self._color_data_augmentation_sigmas.items():
if sigma > 0.:
_LOGGER.info("Performing PCA for color data augmentation for "
"blob '%s'...", bname)
for im in self._X[bname]: # pylint: disable=invalid-name
assert im.ndim == 3 and im.shape[0] == 3, (
"To perform the color data augmentation, images must "
"be provided in shape (3, height, width).")
flldta = _np.vstack(
[im.reshape((3, im.shape[1] * im.shape[2])).T
for im in self._X[bname]])
# No need to copy the data another time, since `vstack` already
# copied it.
pca = _PCA(copy=False, whiten=False)
pca.fit(flldta)
self._color_data_augmentation_weights[bname] = _np.sqrt(
pca.explained_variance_.astype('float32'))
self._color_data_augmentation_components[bname] = \
pca.components_.T.astype('float32')
def _initialize_test(self, kwargs):
self._initialize(kwargs)
def _initialize(self, kwargs):
# we make sure, now that the network is available, that
# all names in the provided data dict has a corresponding match
# in the network
if self._initialized:
raise Exception("This DataProvider has already been intialized! "
"Did you maybe try to use it for train and test? "
"This is not possible!")
if 'test' in kwargs['callback_signal']:
net = kwargs['testnet']
else:
net = kwargs['net']
self._len_data = len(list(self._X.values())[0])
for key, value in list(self._X.items()):
if key not in self._input_processing_flags:
self._input_processing_flags[key] = 'n'
assert key in list(net.blobs.keys()), (
'data key has no corresponding network blob {} {}'.format(
key, str(list(net.blobs.keys()))))
assert len(value) == self._len_data, (
'all items need to have the same length {} vs {}'.format(
len(value), self._len_data))
assert isinstance(value, _np.ndarray) or isinstance(value, list), (
'data must be a numpy nd array or list ({})'.format(type(value))
)
self._sample_order = list(range(self._len_data))
if self._shuffle:
_np.random.seed(1)
self._sample_order = _np.random.permutation(self._sample_order)
self._initialized = True
def _pre_fit(self, kwargs):
if 'test' in kwargs['callback_signal']:
net = kwargs['testnet']
else:
net = kwargs['net']
if self._virtual_batch_size is not None:
self._batch_size = self._virtual_batch_size
else:
self._batch_size = net.blobs[list(self._X.keys())[0]].data.shape[0]
assert self._batch_size > 0
def _pre_test(self, kwargs):
self._pre_fit(kwargs)
self._sample_pointer = 0
def _pre_train_batch(self, kwargs):
self._pre_batch(kwargs['net'], kwargs)
def _pre_test_batch(self, kwargs):
self._pre_batch(kwargs['testnet'], kwargs)
def _color_augment(self, bname, sample):
sigma = self._color_data_augmentation_sigmas[bname]
if sigma == 0.:
if isinstance(sample, (int, float)):
return float(sample)
else:
return sample.astype('float32')
else:
comp_weights = _np.random.normal(0., sigma, 3).astype('float32') *\
self._color_data_augmentation_weights[bname]
noise = _np.dot(self._color_data_augmentation_components[bname],
comp_weights.T)
return (sample.astype('float32').transpose((1, 2, 0)) + noise)\
.transpose((2, 0, 1))
def _pre_batch(self, net, kwargs): # pylint: disable=C0111, W0613, R0912
# this will simply cycle through the data.
samples_ids = [self._sample_order[idx % self._len_data]
for idx in
range(self._sample_pointer,
self._sample_pointer + self._batch_size)]
# updating the sample pointer for the next time
old_sample_pointer = self._sample_pointer
self._sample_pointer = (
(self._sample_pointer + len(samples_ids)) % self._len_data)
if self._shuffle and old_sample_pointer > self._sample_pointer:
# Epoch ended. Reshuffle.
self._sample_order = _np.random.permutation(self._sample_order)
if len(self.only_preload) > 0:
sample_dict = dict()
for key in list(self._X.keys()): # pylint: disable=too-many-nested-blocks
if key in self.only_preload:
sample_dict[key] = []
# this will actually fill the data for the network
for sample_idx in range(self._batch_size):
augmented_sample = self._color_augment(
key,
self._X[key][samples_ids[sample_idx]])
if key in self.only_preload:
sample_dict[key].append(augmented_sample)
else:
if (net.blobs[key].data[sample_idx].size == 1 and (
isinstance(self._X[key][samples_ids[sample_idx]],
(int, float)) or
self._X[key][samples_ids[sample_idx]].size == 1) or
self._X[key][samples_ids[sample_idx]].size ==
net.blobs[key].data[sample_idx].size):
if net.blobs[key].data[sample_idx].size == 1:
net.blobs[key].data[sample_idx] =\
augmented_sample
else:
net.blobs[key].data[sample_idx] = (
augmented_sample.reshape(
net.blobs[key].data.shape[1:]))
else:
if self._input_processing_flags[key] == 'n': # pragma: no cover
raise Exception(("Sample size {} does not match " +
"network input size {} and no " +
"preprocessing is allowed!")
.format(
augmented_sample.size,
net.blobs[key].data[sample_idx].size))
elif self._input_processing_flags[key] in ['rn',
'rc',
'rl']:
assert (
augmented_sample.shape[0]
== net.blobs[key].data.shape[1])
if self._input_processing_flags == 'rn':
interp_method = _cv2INTER_NEAREST
elif self._input_processing_flags == 'rc':
interp_method = _cv2INTER_CUBIC
else:
interp_method = _cv2INTER_LINEAR
for channel_idx in range(
net.blobs[key].data.shape[1]):
net.blobs[key].data[sample_idx, channel_idx] =\
_cv2resize(
augmented_sample[channel_idx],
(net.blobs[key].data.shape[3],
net.blobs[key].data.shape[2]),
interpolation=interp_method)
else:
# Padding.
net.blobs[key].data[sample_idx] = _pad(
augmented_sample,
net.blobs[key].data.shape[2:4],
val=self._padvals[key])
if len(self.only_preload) > 0:
kwargs['data_orig'] = sample_dict
class ResizingMonitor(ParallelMonitor, Monitor): # pylint: disable=R0903
r"""
Optionally resizes input data and adjusts the network input shape.
This monitor optionally resizes the input data randomly and adjusts
the network input size accordingly (this works only for batch size 1
and fully convolutional networks).
For this to work, it must be used with the ``CyclingDataMonitor`` with
``only_preload`` set.
:param blobinfos: dict(string, int).
Describes which blobs to apply the resizing operation to, and which
padding value to use for the remaining space.
:param base_scale: float.
If set to a value different than 1., apply the given base scale first
to images. If set to a value different than 1., the parameter
``interp_methods`` must be set.
:param random_change_up_to: float.
If set to a value different than 0., the scale change is altered
randomly with a uniformly drawn value from -``random_change_up_to`` to
``random_change_up_to``, that is being added to the base value.
:param net_input_size_adjustment_multiple_of: int.
If set to a value greater than 0, the blobs shape is adjusted from its
initial value (which is used as minimal one) in multiples of the given
one.
:param interp_methods: dict(string, string).
Dictionary which stores for every blob the interpolation method. The
string must be for each blob in ['n', 'c', 'l'] (nearest neighbour,
cubic, linear).
"""
def __init__(self, # pylint: disable=R0913
blobinfos,
base_scale=1.,
random_change_up_to=0.,
net_input_size_adjustment_multiple_of=0,
interp_methods=None):
"""See class documentation."""
self._blobinfos = blobinfos
self._base_scale = base_scale
self._random_change_up_to = random_change_up_to
if self._base_scale != 1. or self._random_change_up_to != 0.:
assert interp_methods is not None
for key in self._blobinfos.keys():
assert key in interp_methods.keys()
assert interp_methods[key] in ['n', 'c', 'l']
self._interp_methods = interp_methods
self._adjustment_multiple_of = net_input_size_adjustment_multiple_of
self._min_input_size = None
self._batch_size = None
def _initialize_train(self, kwargs):
self._initialize(kwargs)
def _initialize_test(self, kwargs):
self._initialize(kwargs)
def _initialize(self, kwargs):
# we make sure, now that the network is available, that
# all names in the provided data dict have a corresponding match
# in the network
if 'test' in kwargs['callback_signal']:
net = kwargs['testnet']
else:
net = kwargs['net']
for key in list(self._blobinfos.keys()):
assert key in list(net.blobs.keys()), (
'data key has no corresponding network blob {} {}'.format(
key, str(list(net.blobs.keys()))))
assert net.blobs[key].data.ndim == 4
if self._adjustment_multiple_of > 0:
if self._min_input_size is None:
self._min_input_size = net.blobs[key].data.shape[2:4]
else:
assert (net.blobs[key].data.shape[2:4] ==
self._min_input_size), (
'if automatic input size adjustment is '
'activated, all inputs must be of same size '
'(first: {}, {}: {})'.format(
self._min_input_size, key,
net.blobs[key].data.shape[2:4]))
def _pre_fit(self, kwargs):
if 'test' in kwargs['callback_signal']:
net = kwargs['testnet']
else:
net = kwargs['net']
self._batch_size = net.blobs[
list(self._blobinfos.keys())[0]].data.shape[0]
if self._adjustment_multiple_of > 0:
assert self._batch_size == 1, (
"If size adjustment is activated, the batch size must be one!")
def _pre_test(self, kwargs):
self._pre_fit(kwargs)
def _pre_train_batch(self, kwargs):
self._pre_batch(kwargs['net'], kwargs)
def _pre_test_batch(self, kwargs):
self._pre_batch(kwargs['testnet'], kwargs)
# pylint: disable=C0111, W0613, R0912, too-many-locals
def _pre_batch(self, net, kwargs):
scales = None
sizes = None
if not 'data_orig' in kwargs.keys():
raise Exception(
"This data monitor needs a data providing monitor "
"to run in advance (e.g., a CyclingDataMonitor with "
"`only_preload`)!")
for key, value in kwargs['data_orig'].items():
assert len(value) == self._batch_size
if sizes is None:
sizes = []
for img in value:
sizes.append(img.shape[1:3])
else:
for img_idx, img in enumerate(value):
# pylint: disable=unsubscriptable-object
assert img.shape[1:3] == sizes[img_idx]
for key, padval in self._blobinfos.items():
if scales is None:
scales = []
for sample_idx in range(self._batch_size):
if self._random_change_up_to > 0:
scales.append(
self._base_scale +
_np.random.uniform(low=-self._random_change_up_to,
high=self._random_change_up_to))
else:
scales.append(self._base_scale)
for sample_idx in range(self._batch_size):
# Get the scaled data.
scaled_sample = kwargs['data_orig'][key][sample_idx]
if scales[sample_idx] != 1.:
scaled_sample = _np.empty((scaled_sample.shape[0],
int(scaled_sample.shape[1] *
scales[sample_idx]),
int(scaled_sample.shape[2] *
scales[sample_idx])),
dtype='float32')
if self._interp_methods[key] == 'n':
interpolation_method = _cv2INTER_NEAREST
elif self._interp_methods[key] == 'l':
interpolation_method = _cv2INTER_LINEAR
else:
interpolation_method = _cv2INTER_CUBIC
for layer_idx in range(scaled_sample.shape[0]):
scaled_sample[layer_idx] = _cv2resize(
kwargs['data_orig'][key][sample_idx][layer_idx],
(scaled_sample.shape[2],
scaled_sample.shape[1]),
interpolation=interpolation_method)
# If necessary, adjust the network input size.
if self._adjustment_multiple_of > 0:
image_height, image_width = scaled_sample.shape[1:3]
netinput_height = int(max(
self._min_input_size[0] +
_np.ceil(
float(image_height - self._min_input_size[0]) /
self._adjustment_multiple_of) *
self._adjustment_multiple_of,
self._min_input_size[0]))
netinput_width = int(max(
self._min_input_size[1] +
_np.ceil(
float(image_width - self._min_input_size[1]) /
self._adjustment_multiple_of) *
self._adjustment_multiple_of,
self._min_input_size[1]))
net.blobs[key].reshape(1,
scaled_sample.shape[0],
netinput_height,
netinput_width)
# Put the data in place.
net.blobs[key].data[sample_idx] = _pad(
scaled_sample,
net.blobs[key].data.shape[2:4],
val=padval)
def get_parallel_blob_names(self):
"""Get the names of all blobs that must be provided for the dummy."""
return list(self._blobinfos.keys())
# pylint: disable=too-few-public-methods
class RotatingMirroringMonitor(ParallelMonitor, Monitor):
r"""
Rotate and/or horizontally mirror samples within blobs.
For every sample, the rotation and mirroring will be consistent
across the blobs.
:param blobinfos: dict(string, int).
A dictionary containing the blob names and the padding values that
will be applied.
:param max_rotation_degrees: float.
The rotation will be sampled uniformly from the interval
[-rotation_degrees, rotation_degrees[ for each sample.
:param mirror_prob: float.
The probability that horizontal mirroring occurs. Is as well sampled
individually for every sample.
:param mirror_value_swaps: dict(string, dict(int, list(2-tuples))).
Specifies for every blob for every layer whether any values must be
swapped if mirroring is applied. This is important when, e.g.,
mirroring annotation maps with left-right information. Every 2-tuple
contains (original value, new value). The locations of the swaps are
determined before any change is applied, so the order of tuples does not
play a role.
:param mirror_layer_swaps: dict(string, list(2-tuples)).
Specifies for every blob whether any layers must be swapped if
mirroring is applied. Can be used together with mirror_value_swaps: in
this case, the `mirror_value_swaps` are applied first, then the layers
are swapped.
"""
# pylint: disable=too-many-arguments
def __init__(self,
blobinfos,
max_rotation_degrees,
mirror_prob=0.,
mirror_value_swaps=None,
mirror_layer_swaps=None):
"""See class documentation."""
self._blobinfos = blobinfos
self._rotation_degrees = max_rotation_degrees
self._mirror_prob = mirror_prob
self._batch_size = None
if mirror_value_swaps is None:
mirror_value_swaps = dict()
for key in list(mirror_value_swaps.keys()):
assert key in self._blobinfos, ("Blob not in handled: {}!"\
.format(key))
for layer_idx in list(mirror_value_swaps[key].keys()):
m_tochange = []
for swappair in mirror_value_swaps[key][layer_idx]:
assert len(swappair) == 2, (
"Swaps must be specified as (from_value, to_value): {}"\
.format(mirror_value_swaps[key][layer_idx]))
assert swappair[0] not in m_tochange, (
"Every value may change only to one new: {}."\
.format(mirror_value_swaps[key][layer_idx]))
m_tochange.append(swappair[0])
assert blobinfos[key] not in swappair, (
"A specified swap value is the fill value for this "
"blob: {}, {}, {}.".format(key,
blobinfos[key][layer_idx],
swappair))
if mirror_layer_swaps is None:
mirror_layer_swaps = dict()
for key in list(mirror_layer_swaps.keys()):
assert key in self._blobinfos, ("Blob not handled: {}!"\
.format(key))
idx_tochange = []
for swappair in mirror_layer_swaps[key]:
assert len(swappair) == 2, (
"Swaps must be specified as (from_value, to_value): {}"\
.format(swappair))
assert (swappair[0] not in idx_tochange and
swappair[1] not in idx_tochange), (
"Every value may only be swapped to or from one "
"position!")
idx_tochange.extend(swappair)
for key in list(self._blobinfos):
if key not in list(mirror_value_swaps.keys()):
mirror_value_swaps[key] = dict()
if key not in list(mirror_layer_swaps.keys()):
mirror_layer_swaps[key] = []
self._mirror_value_swaps = mirror_value_swaps
self._mirror_layer_swaps = mirror_layer_swaps
def get_parallel_blob_names(self):
"""Get the names of all blobs that must be provided for the dummy."""
return list(self._blobinfos.keys())
def _initialize_train(self, kwargs):
self._initialize(kwargs)
def _initialize_test(self, kwargs):
self._initialize(kwargs)
def _initialize(self, kwargs):
# we make sure, now that the network is available, that
# all names in the provided data dict have a corresponding match
# in the network
if 'test' in kwargs['callback_signal']:
net = kwargs['testnet']
else:
net = kwargs['net']
for key in list(self._blobinfos.keys()):
assert key in list(net.blobs.keys()), (
'data key has no corresponding network blob {} {}'.format(
key, str(list(net.blobs.keys()))))
assert net.blobs[key].data.ndim == 4
for layer_idx in self._mirror_value_swaps[key].keys():
assert layer_idx < net.blobs[key].data.shape[1], ((
"The data for blob {} has not enough layers for swapping "
"{}!").format(key, layer_idx))
for swappair in self._mirror_layer_swaps[key]:
assert (swappair[0] < net.blobs[key].data.shape[1] and
swappair[1] < net.blobs[key].data.shape[1]), (
"Not enough layers in blob {} to swap {}!".format(
key, swappair))
def _pre_fit(self, kwargs):
if 'test' in kwargs['callback_signal']:
net = kwargs['testnet']
else:
net = kwargs['net']
self._batch_size = net.blobs[
list(self._blobinfos.keys())[0]].data.shape[0]
def _pre_test(self, kwargs):
self._pre_fit(kwargs)
def _pre_train_batch(self, kwargs):
self._pre_batch(kwargs['net'], kwargs)
def _pre_test_batch(self, kwargs):
self._pre_batch(kwargs['testnet'], kwargs)
# pylint: disable=C0111, W0613, R0912, too-many-locals
def _pre_batch(self, net, kwargs):
rotations = None
mirrorings = None
spline_interpolation_order = 0
prefilter = False
for key, padval in self._blobinfos.items():
if rotations is None:
rotations = []
if self._rotation_degrees > 0.:
rotations = _np.random.uniform(low=-self._rotation_degrees,
high=self._rotation_degrees,
size=self._batch_size)
else:
rotations = [0.] * self._batch_size
if mirrorings is None:
mirrorings = []
if self._mirror_prob > 0.:
mirrorings = _bernoulli.rvs(self._mirror_prob,
size=self._batch_size)
else:
mirrorings = [0] * self._batch_size
for sample_idx in range(self._batch_size):
if rotations[sample_idx] != 0.:
net.blobs[key].data[sample_idx] = _rotate(
net.blobs[key].data[sample_idx],
rotations[sample_idx],
(1, 2),
reshape=False,
order=spline_interpolation_order,
mode='constant',
cval=padval,
prefilter=prefilter)
if mirrorings[sample_idx] == 1.:
net.blobs[key].data[sample_idx] = \
net.blobs[key].data[sample_idx, :, :, ::-1]
for layer_idx in range(net.blobs[key].data.shape[1]):
if (layer_idx not in
self._mirror_value_swaps[key].keys()):
continue
swap_indices = dict()
swap_tuples = self._mirror_value_swaps[key][layer_idx]
# Swaps.
for swappair in swap_tuples:
swap_indices[swappair[0]] = (
net.blobs[key].data[sample_idx, layer_idx] ==\
swappair[0])
for swappair in swap_tuples:
net.blobs[key].data[sample_idx, layer_idx][
swap_indices[swappair[0]]] = swappair[1]
if len(self._mirror_layer_swaps[key]) > 0:
new_layer_order = list(
range(net.blobs[key].data.shape[1]))
for swappair in self._mirror_layer_swaps[key]:
new_layer_order[swappair[0]],\
new_layer_order[swappair[1]] = \
new_layer_order[swappair[1]],\
new_layer_order[swappair[0]]
net.blobs[key].data[...] = net.blobs[key].data[
:, tuple(new_layer_order)]
class ResultExtractor(Monitor): # pylint: disable=R0903
r"""
This monitor is designed for monitoring scalar layer results.
The main use case are salar outputs such as loss and accuracy.
IMPORTANT: this monitor will change cbparams and add new values to it,
most likely other monitors will depend on this, thus, ResultExtractors
should be among the first monitors in the callback list, e.g. by
insert them always in the beginning.
It will extract the value of a layer and add the value to the cbparam.
:param cbparam_key: string.
The key we will overwrite/set in the cbparams dict.
:param layer_name: string.
The layer to extract the value from.
"""
def __init__(self, cbparam_key, layer_name):
"""See class documentation."""
self._layer_name = layer_name
self._cbparam_key = cbparam_key
self._init = False
self._not_layer_available = True
self._test_data = None
def __call__(self, kwargs):
"""Callback implementation."""
if self._not_layer_available and self._init:
return
Monitor.__call__(self, kwargs)
def _initialize_train(self, kwargs):
self._initialize(kwargs)
def _initialize_test(self, kwargs):
self._initialize(kwargs)
def _initialize(self, kwargs):
if self._init:
raise Exception("This ResultExtractor is already initialized! "
"Did you try to use it for train and test?")
if 'test' in kwargs['callback_signal']:
tmp_net = kwargs['testnet']
else:
tmp_net = kwargs['net']
if self._layer_name in list(tmp_net.blobs.keys()):
self._not_layer_available = False
self._init = True
assert self._cbparam_key not in kwargs, (
'it is only allowed to add keys to the cbparam,',
'not overwrite them {} {}'.format(self._cbparam_key,
list(kwargs.keys())))
def _pre_train_batch(self, kwargs):
kwargs[self._cbparam_key] = 0.0
def _post_train_batch(self, kwargs):
kwargs[self._cbparam_key] = float(
kwargs['net'].blobs[self._layer_name].data[...].ravel()[0])
def _pre_test(self, kwargs):
self._test_data = []
def _post_test(self, kwargs):
kwargs[self._cbparam_key] = _np.mean(self._test_data)
def _post_test_batch(self, kwargs):
# need to multiply by batch_size since it is normalized
# internally
self._test_data.append(float(
kwargs['testnet'].blobs[self._layer_name].data[...].ravel()[0]))
kwargs[self._cbparam_key] = self._test_data[-1]
# Again, tested in a subprocess and not discovered.
# pylint: disable=R0903
class ProgressIndicator(Monitor): # pragma: no cover
r"""
Generates a progress bar with current information about the process.
The progress bar always displays completion percentage and ETA. If
available, it also displays loss, accuracy, test loss and test accuracy.
It makes use of the following keyword arguments (\* indicates required):
* ``iter``\*,
* ``max_iter``\*,
* ``train_loss``,
* ``test_loss``,
* ``train_accuracy``,
* ``test_accuracy``.
"""
def __init__(self):
"""See class documentation."""
self.loss = None
self.test_loss = None
self.accuracy = None
self.test_accuracy = None
import tqdm
self.pbarclass = tqdm.tqdm
self.pbar = None
self.last_iter = 0
def _perf_string(self):
pstr = ''
if self.loss is not None:
pstr += 'ls: {0:.4f}|'.format(self.loss)
if self.accuracy is not None:
pstr += 'ac: {0:.4f}|'.format(self.accuracy)
if self.test_loss is not None:
pstr += 'tls: {0:.4f}|'.format(self.test_loss)
if self.test_accuracy is not None:
pstr += 'tac: {0:.4f}|'.format(self.test_accuracy)
return pstr
def _post_train_batch(self, kwargs):
if self.pbar is None:
self.pbar = self.pbarclass(total=kwargs['max_iter'])
if 'train_loss' in list(kwargs.keys()):
self.loss = kwargs['train_loss']
if 'train_accuracy' in list(kwargs.keys()):
self.accuracy = kwargs['train_accuracy']
self.pbar.set_description(self._perf_string())
self.pbar.update(kwargs['iter'] + kwargs['batch_size'] - self.last_iter)
self.last_iter = kwargs['iter'] + kwargs['batch_size']
def _post_test_batch(self, kwargs):
if self.pbar is None:
self.pbar = self.pbarclass(total=kwargs['max_iter'])
if 'test_loss' in list(kwargs.keys()):
self.test_loss = kwargs['test_loss']
if 'test_accuracy' in list(kwargs.keys()):
self.test_accuracy = kwargs['test_accuracy']
self.pbar.set_description(self._perf_string())
self.pbar.update(kwargs['iter'] - self.last_iter)
self.last_iter = kwargs['iter']
def _post_test(self, kwargs):
# Write the mean if possible.
if self.pbar is not None:
if 'test_loss' in list(kwargs.keys()):
self.test_loss = kwargs['test_loss']
if 'test_accuracy' in list(kwargs.keys()):
self.test_accuracy = kwargs['test_accuracy']
self.pbar.set_description(self._perf_string())
self.pbar.update(kwargs['iter'] - self.last_iter)
self.last_iter = kwargs['iter']
def finalize(self, kwargs): # pylint: disable=W0613
"""Call ``progressbar.finish()``."""
if self.pbar is not None:
self.pbar.close()
def _sorted_ar_from_dict(inf, key): # pragma: no cover
iters = []
vals = []
for values in inf:
if values.has_key(key):
iters.append(int(values['NumIters']))
vals.append(float(values[key]))
sortperm = _np.argsort(iters)
arr = _np.array([iters, vals]).T
return arr[sortperm, :]
def _draw_perfplot(phases, categories, ars, outfile): # pragma: no cover
"""Draw the performance plots."""
fig, axes = _plt.subplots(nrows=len(categories), sharex=True)
for category_idx, category in enumerate(categories):
ax = axes[category_idx] # pylint: disable=invalid-name
ax.set_title(category.title())
for phase in phases:
if phase + '_' + category not in ars.keys():
continue
ar = ars[phase + '_' + category] # pylint: disable=invalid-name
alpha = 0.7
color = 'b'
if phase == 'test':
alpha = 1.0
color = 'g'
ax.plot(ar[:, 0], ar[:, 1],
label=phase.title(), c=color, alpha=alpha)
if phase == 'test':
ax.scatter(ar[:, 0], ar[:, 1],
c=color, s=50)
ax.set_ylabel(category.title())
ax.grid()
ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
_plt.savefig(outfile, bbox_inches='tight')
_plt.close(fig)
class JSONLogger(Monitor): # pylint: disable=R0903
r"""
Logs available information to a JSON file.
The information is stored in a dictionary of lists. The lists contain
score information and the iteration at which it was obtained. The
currently logged scores are loss, accuracy, test loss and test accuracy.
The logger makes use of the following keyword arguments
(\* indicates required):
* ``iter``\*,
:param path: string.
The path to store the file in.
:param name: string.
The filename. Will be prefixed with 'barrista_' and '.json' will be
appended.
:param logging: dict of lists.
The two keys in the dict which are used are test, train.
For each of those a list of keys can be provided, those keys
have to be available in the kwargs/cbparams structure.
Usually the required data is provided by the ResultExtractor.
:param base_iter: int or None.
If provided, add this value to the number of iterations. This overrides
the number of iterations retrieved from a loaded JSON log to append to.
:param write_every: int or None.
Write the JSON log every `write_every` iterations. The log is always
written upon completion of the training. If it is None, the log is only
written on completion.
:param create_plot: bool.
If set to True, create a plot at `path` when the JSON log is written with
the name of the JSON file + `_plot.png`. Default: False.
"""
# pylint: disable=too-many-arguments
def __init__(self,
path,
name,
logging,
base_iter=None,
write_every=None,
create_plot=False):
"""See class documentation."""
import json
self.json_package = json
self.json_filename = str(_os.path.join(
path,
'barrista_' + name + '.json'))
if base_iter is None:
self.base_iter = 0
else:
self.base_iter = base_iter
if _os.path.exists(self.json_filename):
with open(self.json_filename, 'r') as infile:
self.dict = self.json_package.load(infile)
if base_iter is None:
for key in ['train', 'test']:
for infdict in self.dict[key]:
if infdict.has_key('NumIters'):
self.base_iter = max(self.base_iter,
infdict['NumIters'])
_LOGGER.info("Appending to JSON log at %s from iteration %d.",
self.json_filename,
self.base_iter)
else:
self.dict = {'train': [], 'test': [], 'barrista_produced': True}
assert write_every is None or write_every > 0
self._write_every = write_every
self._logging = logging
self._create_plot = create_plot
if self._create_plot:
assert _PLT_AVAILABLE, (
"Matplotlib must be available to use plotting!")
def _initialize_train(self, kwargs):
self._initialize(kwargs)
def _initialize_test(self, kwargs):
self._initialize(kwargs)
def _initialize(self, kwargs): # pylint: disable=unused-argument
for key in list(self._logging.keys()):
assert key in ['train', 'test'], (
'only train and test is supported by this logger')
def _post_test(self, kwargs):
self._post('test', kwargs)
def _post_train_batch(self, kwargs):
self._post('train', kwargs)
def _post(self, phase_name, kwargs): # pylint: disable=C0111
if phase_name not in self._logging: # pragma: no cover
return
if phase_name == 'train':
kwargs['iter'] += kwargs['batch_size']
if (self._write_every is not None and
kwargs['iter'] % self._write_every == 0):
with open(self.json_filename, 'w') as outf:
self.json_package.dump(self.dict, outf)
if self._create_plot: # pragma: no cover
categories = set()
arrs = dict()
for plot_phase_name in ['train', 'test']:
for key in self._logging[plot_phase_name]:
categories.add(key[len(plot_phase_name) + 1:])
arrs[key] = _sorted_ar_from_dict(self.dict[plot_phase_name],
key)
_draw_perfplot(['train', 'test'],
categories,
arrs,
self.json_filename + '_plot.png')
for key in self._logging[phase_name]:
if key in kwargs:
self.dict[phase_name].append({'NumIters':
kwargs['iter'] + self.base_iter,
key: kwargs[key]})
if phase_name == 'train':
kwargs['iter'] -= kwargs['batch_size']
def finalize(self, kwargs): # pylint: disable=W0613
"""Write the json file."""
with open(self.json_filename, 'w') as outf:
self.json_package.dump(self.dict, outf)
if self._create_plot: # pragma: no cover
categories = set()
arrs = dict()
for phase_name in ['train', 'test']:
for key in self._logging[phase_name]:
categories.add(key[len(phase_name) + 1:])
arrs[key] = _sorted_ar_from_dict(self.dict[phase_name], key)
_draw_perfplot(['train', 'test'],
categories,
arrs,
self.json_filename + '_plot.png')
class Checkpointer(Monitor): # pylint: disable=R0903
r"""
Writes the network blobs to disk at certain iteration intervals.
The logger makes use of the following keyword arguments
(\* indicates required):
* ``iter``\*,
* ``net``\*,
* ``batch_size``\*.
:param name_prefix: string or None.
The first part of the output filenames to generate. The prefix '_iter_,
the current iteration, as well as '.caffemodel' is added.
If you are using a caffe version from later than Dec. 2015, caffe's
internal snapshot method is exposed to Python and also snapshots the
solver. If it's available, then this method will be used. However,
in that case, it's not possible to influence the storage location
from Python. Please use the solver parameter ``snapshot_prefix``
when constructing the solver instead (this parameter may be None
and is unused then).
:param iterations: int > 0.
Always if the current number of iterations is divisible by iterations,
the network blobs are written to disk. Hence, this value must be a
multiple of the batch size!
"""
def __init__(self,
name_prefix,
iterations,
base_iterations=0):
"""See class documentation."""
assert iterations > 0
_LOGGER.info('Setting up checkpointing with name prefix %s every ' +
'%d iterations.', name_prefix, iterations)
self.name_prefix = name_prefix
self.iterations = iterations
self.created_checkpoints = []
self._base_iterations = base_iterations
# pylint: disable=arguments-differ
def _post_train_batch(self, kwargs, finalize=False):
assert self.iterations % kwargs['batch_size'] == 0, (
'iterations not multiple of batch_size, {} vs {}'.format(
self.iterations, kwargs['batch_size']))
# Prevent double-saving.
if kwargs['iter'] in self.created_checkpoints:
return
if ((kwargs['iter'] + self._base_iterations +
kwargs['batch_size']) % self.iterations == 0 or
finalize):
self.created_checkpoints.append(kwargs['iter'])
# pylint: disable=protected-access
if not hasattr(kwargs['solver']._solver, 'snapshot'): # pragma: no cover
checkpoint_filename = (
self.name_prefix + '_iter_' +
str(int((kwargs['iter'] + self._base_iterations) /
kwargs['batch_size']) + 1) +
'.caffemodel')
_LOGGER.debug("Writing checkpoint to file '%s'.",
checkpoint_filename)
kwargs['net'].save(checkpoint_filename)
else:
# pylint: disable=protected-access
kwargs['solver']._solver.snapshot()
caffe_checkpoint_filename = (self.name_prefix +
'_iter_' +
str((kwargs['iter'] + self._base_iterations) /
kwargs['batch_size'] + 1) +
'.caffemodel')
caffe_sstate_filename = (self.name_prefix +
'_iter_' +
str((kwargs['iter'] + self._base_iterations) /
kwargs['batch_size'] + 1) +
'.solverstate')
_LOGGER.debug('Writing checkpoint to file "[solverprefix]%s" ' +
'and "[solverprefix]%s".',
caffe_checkpoint_filename,
caffe_sstate_filename)
assert _os.path.exists(caffe_checkpoint_filename), (
"An error occured checkpointing to {}. File not found. "
"Make sure the `base_iterations` and the `name_prefix` "
"are correct.").format(caffe_checkpoint_filename)
assert _os.path.exists(caffe_sstate_filename), (
"An error occured checkpointing to {}. File not found. "
"Make sure the `base_iterations` and the `name_prefix` "
"are correct.").format(caffe_sstate_filename)
def finalize(self, kwargs):
"""Write a final checkpoint."""
# Account for the counting on iteration increase for the last batch.
kwargs['iter'] -= kwargs['batch_size']
self._post_train_batch(kwargs, finalize=True)
kwargs['iter'] += kwargs['batch_size']
class GradientMonitor(Monitor):
"""
Tools to keep an eye on the gradient.
Create plots of the gradient. Creates histograms of the gradient for all
``selected_parameters`` and creates an overview plot with the maximum
absolute gradient per layer. If ``create_videos`` is set and ffmpeg is
available, automatically creates videos.
:param write_every: int.
Write every x iterations. Since matplotlib takes some time to run, choose
with care.
:param output_folder: string.
Where to store the outputs.
:param selected_parameters: dict(string, list(int)) or None.
Which parameters to include in the plots. The string is the name of the
layer, the list of integers contains the parts to include, e.g., for a
convolution layer, specify the name of the layer as key and 0 for
the parameters of the convolution weights, 1 for the biases per channel.
The order and meaning of parameter blobs is determined by caffe. If
None, then all parameters are plotted. Default: None.
:param relative: Bool.
If set to True, will give the weights relative to the max absolute weight
in the target parameter blob. Default: False.
:param iteroffset: int.
An iteration offset if training is resumed to not overwrite existing
output. Default: 0.
:param create_videos: Bool.
If set to True, try to create a video using ffmpeg. Default: True.
:param video_frame_rate: int.
The video frame rate.
"""
def __init__(self, # pylint: disable=too-many-arguments
write_every,
output_folder,
selected_parameters=None,
relative=False,
iteroffset=0,
create_videos=True,
video_frame_rate=1):
assert write_every > 0
self._write_every = write_every
self._output_folder = output_folder
self._selected_parameters = selected_parameters
self._relative = relative
self._n_parameters = None
self._iteroffset = iteroffset
self._create_videos = create_videos
self._video_frame_rate = video_frame_rate
def _initialize_train(self, kwargs): # pragma: no cover
assert _PLT_AVAILABLE, (
"Matplotlib must be available to use the GradientMonitor!")
assert self._write_every % kwargs['batch_size'] == 0, (
"`write_every` must be a multiple of the batch size!")
self._n_parameters = 0
if self._selected_parameters is not None:
for name in self._selected_parameters.keys():
assert name in kwargs['net'].params.keys()
for p_idx in self._selected_parameters[name]:
assert p_idx >= 0
assert len(kwargs['net'].params[name]) > p_idx
self._n_parameters += 1
else:
self._selected_parameters = _collections.OrderedDict()
for name in kwargs['net'].params.keys():
self._selected_parameters[name] = range(len(
kwargs['net'].params[name]))
self._n_parameters += len(kwargs['net'].params[name])
# pylint: disable=too-many-locals
def _post_train_batch(self, kwargs): # pragma: no cover
if kwargs['iter'] % self._write_every == 0:
net = kwargs['net']
maxabsupdates = {}
maxabsupdates_flat = []
# Create histograms.
fig, axes = _plt.subplots(nrows=1,
ncols=self._n_parameters,
figsize=(self._n_parameters * 3, 3))
ax_idx = 0
xfmt = _tkr.FormatStrFormatter('%.1e')
for lname in self._selected_parameters.keys():
maxabsupdates[lname] = []
for p_idx in self._selected_parameters[lname]:
if self._relative:
lgradient = (net.params[lname][p_idx].diff /
net.params[lname][p_idx].data.max())
else:
lgradient = net.params[lname][p_idx].diff
maxabsupdates[lname].append(_np.max(_np.abs(lgradient)))
maxabsupdates_flat.append(_np.max(_np.abs(lgradient)))
axes[ax_idx].set_title(lname + ', p%d' % (p_idx))
axes[ax_idx].hist(list(lgradient.flat),
25,
normed=1,
alpha=0.5)
axes[ax_idx].set_xticks(_np.linspace(-maxabsupdates_flat[-1],
maxabsupdates_flat[-1],
num=3))
axes[ax_idx].yaxis.set_visible(False)
axes[ax_idx].xaxis.set_major_formatter(xfmt)
ax_idx += 1
_plt.tight_layout(rect=[0, 0.03, 1, 0.95])
_plt.suptitle("Gradient histograms for iteration %d" % (
kwargs['iter'] + self._iteroffset))
if self._relative:
ghname = self._output_folder + 'gradient_hists_rel_%d.png' % (
(self._iteroffset + kwargs['iter']) /
self._write_every)
else:
ghname = self._output_folder + 'gradient_hists_%d.png' % (
(self._iteroffset + kwargs['iter']) /
self._write_every)
_plt.savefig(ghname)
_plt.close(fig)
# Create the magnitude overview plot.
fig = _plt.figure(figsize=(self._n_parameters * 1, 1.5))
_plt.title("Maximum absolute gradient per layer (iteration %d)" % (
kwargs['iter'] + self._iteroffset))
ax = _plt.gca() # pylint: disable=invalid-name
# pylint: disable=invalid-name
im = ax.imshow(_np.atleast_2d(_np.array(maxabsupdates_flat)),
interpolation='none')
ax.yaxis.set_visible(False)
divider = _make_axes_locatable(ax)
cax = divider.append_axes("right", size="10%", pad=0.05)
_plt.colorbar(im, cax=cax, ticks=_np.linspace(_np.min(maxabsupdates_flat),
_np.max(maxabsupdates_flat),
5))
_plt.tight_layout(rect=[0, 0.03, 1, 0.95])
if self._relative:
gmname = self._output_folder + 'gradient_magnitude_rel_%d.png' % (
(self._iteroffset + kwargs['iter']) /
self._write_every)
else:
gmname = self._output_folder + 'gradient_magnitude_%d.png' % (
(self._iteroffset + kwargs['iter']) /
self._write_every)
_plt.savefig(gmname)
_plt.close(fig)
def finalize(self, kwargs): # pragma: no cover
if self._create_videos:
_LOGGER.debug("Creating gradient videos...")
try:
if not _os.path.exists(_os.path.join(self._output_folder,
'videos')):
_os.mkdir(_os.path.join(self._output_folder, 'videos'))
if self._relative:
rel_add = '_rel'
else:
rel_add = ''
with open(_os.devnull, 'w') as quiet:
_subprocess.check_call([
'ffmpeg',
'-y',
'-start_number', str(0),
'-r', str(self._video_frame_rate),
'-i', _os.path.join(self._output_folder,
'gradient_hists' + rel_add + '_%d.png'),
_os.path.join(self._output_folder,
'videos',
'gradient_hists' + rel_add + '.mp4')
], stdout=quiet, stderr=quiet)
_subprocess.check_call([
'ffmpeg',
'-y',
'-start_number', str(0),
'-r', str(self._video_frame_rate),
'-i', _os.path.join(self._output_folder,
'gradient_magnitude' + rel_add + '_%d.png'),
_os.path.join(self._output_folder,
'videos',
'gradient_magnitude' + rel_add + '.mp4')
], stdout=quiet, stderr=quiet)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.error(
"Could not create videos! Error: %s. Is " +
"ffmpeg available on the command line?",
str(ex))
_LOGGER.debug("Done.")
class ActivationMonitor(Monitor):
"""
Tools to keep an eye on the net activations.
Create plots of the net activations. If ``create_videos`` is set and
ffmpeg is available, automatically creates videos.
:param write_every: int.
Write every x iterations. Since matplotlib takes some time to run, choose
with care.
:param output_folder: string.
Where to store the outputs.
:param selected_blobs: list(string) or None.
Which blobs to include in the plots. If
None, then all parameters are plotted. Default: None.
:param iteroffset: int.
An iteration offset if training is resumed to not overwrite existing
output. Default: 0.
:param sample: dict(string, NDarray(3D)).
A sample to use that will be forward propagated to obtain the activations.
Must contain one for every input layer of the network. Each sample is not
preprocessed and must fit the input. If None, use the existing values
from the blobs.
:param create_videos: Bool.
If set to True, try to create a video using ffmpeg. Default: True.
:param video_frame_rate: int.
The video frame rate.
"""
# pylint: disable=too-many-arguments
def __init__(self, # pragma: no cover
write_every,
output_folder,
selected_blobs=None,
iteroffset=0,
sample=None,
create_videos=True,
video_frame_rate=1):
assert write_every > 0
self._write_every = write_every
self._output_folder = output_folder
self._selected_blobs = selected_blobs
self._n_parameters = None
self._iteroffset = iteroffset
self._create_videos = create_videos
self._video_frame_rate = video_frame_rate
self._sample = sample
def _initialize_train(self, kwargs): # pragma: no cover
assert _PLT_AVAILABLE, (
"Matplotlib must be available to use the ActivationMonitor!")
assert self._write_every % kwargs['batch_size'] == 0, (
"`write_every` must be a multiple of the batch size!")
self._n_parameters = 0
if self._selected_blobs is not None:
for name in self._selected_blobs:
assert name in kwargs['net'].blobs.keys(), (
"The activation monitor should monitor {}, which is not "
"part of the net!").format(name)
self._n_parameters += 1
else:
self._selected_blobs = []
for name in kwargs['net'].blobs.keys():
bshape = kwargs['net'].blobs[name].data.shape
if len(bshape) == 4:
self._selected_blobs.append(name)
self._n_parameters += 1
if self._sample is not None:
for inp_name in self._sample.keys():
assert (kwargs['net'].blobs[inp_name].data.shape[1:] ==
self._sample[inp_name].shape), (
"All provided inputs as `sample` must have the shape "
"of an input blob, starting from its sample "
"dimension. Does not match for %s: %s vs. %s." % (
inp_name,
str(kwargs['net'].blobs[inp_name].data.shape[1:]),
str(self._sample[inp_name].shape)))
# pylint: disable=too-many-locals
def _post_train_batch(self, kwargs): # pragma: no cover
if kwargs['iter'] % self._write_every == 0:
net = kwargs['net']
if self._sample is not None:
for bname in self._sample.keys():
net.blobs[bname].data[-1, ...] = self._sample[bname]
net.forward()
for bname in self._selected_blobs:
blob = net.blobs[bname].data
nchannels = blob.shape[1]
gridlen = int(_np.ceil(_np.sqrt(nchannels)))
fig, axes = _plt.subplots(nrows=gridlen,
ncols=gridlen,
squeeze=False)
bmin = blob[-1].min()
bmax = blob[-1].max()
for c_idx in range(nchannels):
ax = axes.flat[c_idx] # pylint: disable=invalid-name
im = ax.imshow(blob[-1, c_idx], # pylint: disable=invalid-name
vmin=bmin,
vmax=bmax,
cmap='Greys_r',
interpolation='none')
ax.set_title('C%d' % (c_idx))
ax.yaxis.set_visible(False)
ax.xaxis.set_visible(False)
# pylint: disable=undefined-loop-variable
for blank_idx in range(c_idx + 1, gridlen * gridlen):
ax = axes.flat[blank_idx] # pylint: disable=invalid-name
ax.axis('off')
_plt.tight_layout(rect=[0, 0.03, 1, 0.95])
_plt.suptitle("Activations in blob %s (iteration %d)" % (
bname, self._iteroffset + kwargs['iter']))
cbax, cbkw = _colorbar.make_axes([ax for ax in axes.flat])
fig.colorbar(im, cax=cbax, **cbkw)
_plt.savefig(self._output_folder +
'activations_%s_%d.png' % (
bname,
(self._iteroffset + kwargs['iter']) /
self._write_every))
_plt.close(fig)
def finalize(self, kwargs): # pragma: no cover
if self._create_videos:
_LOGGER.debug("Creating activation videos...")
try:
if not _os.path.exists(_os.path.join(self._output_folder,
'videos')):
_os.mkdir(_os.path.join(self._output_folder, 'videos'))
for bname in self._selected_blobs:
with open(_os.devnull, 'w') as quiet:
_subprocess.check_call([
'ffmpeg',
'-y',
'-start_number', str(0),
'-r', str(self._video_frame_rate),
'-i', _os.path.join(self._output_folder,
'activations_' + bname + '_%d.png'),
_os.path.join(self._output_folder,
'videos',
'activations_' + bname + '.mp4')
], stdout=quiet, stderr=quiet)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.error(
"Could not create videos! Error: %s. Is " +
"ffmpeg available on the command line?",
str(ex))
_LOGGER.debug("Done.")
class FilterMonitor(Monitor):
"""
Tools to keep an eye on the filters.
Create plots of the network filters. Creates filter plots for all
``selected_parameters``. If ``create_videos`` is set and ffmpeg is
available, automatically creates videos.
:param write_every: int.
Write every x iterations. Since matplotlib takes some time to run, choose
with care.
:param output_folder: string.
Where to store the outputs.
:param selected_parameters: dict(string, list(int)) or None.
Which parameters to include in the plots. The string is the name of the
layer, the list of integers contains the parts to include, e.g., for a
convolution layer, specify the name of the layer as key and 0 for
the parameters of the convolution weights, 1 for the biases per channel.
The order and meaning of parameter blobs is determined by caffe. If
None, then all parameters are plotted. **Only 4D blobs can be plotted!**
Default: None.
:param iteroffset: int.
An iteration offset if training is resumed to not overwrite existing
output. Default: 0.
:param create_videos: Bool.
If set to True, try to create a video using ffmpeg. Default: True.
:param video_frame_rate: int.
The video frame rate.
"""
# pylint: disable=too-many-arguments
def __init__(self, # pragma: no cover
write_every,
output_folder,
selected_parameters=None,
iteroffset=0,
create_videos=True,
video_frame_rate=1):
assert write_every > 0
self._write_every = write_every
self._output_folder = output_folder
self._selected_parameters = selected_parameters
self._n_parameters = None
self._iteroffset = iteroffset
self._create_videos = create_videos
self._video_frame_rate = video_frame_rate
def _initialize_train(self, kwargs): # pragma: no cover
assert _PLT_AVAILABLE, (
"Matplotlib must be available to use the FilterMonitor!")
assert self._write_every % kwargs['batch_size'] == 0, (
"`write_every` must be a multiple of the batch size!")
self._n_parameters = 0
if self._selected_parameters is not None:
for name in self._selected_parameters.keys():
assert name in kwargs['net'].params.keys()
for p_idx in self._selected_parameters[name]:
assert p_idx >= 0
assert len(kwargs['net'].params[name][p_idx].data.shape) == 4
self._n_parameters += 1
else:
self._selected_parameters = _collections.OrderedDict()
for name in kwargs['net'].params.keys():
self._selected_parameters[name] = []
for pindex in range(len(kwargs['net'].params[name])):
if len(kwargs['net'].params[name][pindex].data.shape) == 4:
self._selected_parameters[name].append(pindex)
self._n_parameters += 1
def _post_train_batch(self, kwargs): # pragma: no cover
if kwargs['iter'] % self._write_every == 0:
net = kwargs['net']
for pname in self._selected_parameters.keys():
for pindex in self._selected_parameters[pname]:
fig = _plt.figure()
param = net.params[pname][pindex].data
border = 2
collected_weights = _np.zeros((param.shape[0] *
(param.shape[2] + border) +
border,
param.shape[1] *
(param.shape[3] + border) +
border), dtype='float32')
pmin = param.min()
pmax = param.max()
# Build up the plot manually because matplotlib is too slow.
for filter_idx in range(param.shape[0]):
for layer_idx in range(param.shape[1]):
collected_weights[border + filter_idx * (param.shape[2] + border):
border + filter_idx * (param.shape[2] + border) +
param.shape[2],
border + layer_idx * (param.shape[3] + border):
border + layer_idx * (param.shape[3] + border) +
param.shape[3]] = (
(param[filter_idx, layer_idx] - pmin)
/ (pmax - pmin))
_plt.imshow(collected_weights,
cmap='Greys_r',
interpolation='none')
ax = _plt.gca() # pylint: disable=invalid-name
ax.yaxis.set_visible(False)
ax.xaxis.set_visible(False)
ax.set_title((
"Values of layer %s, param %d\n" +
"(iteration %d, min %.1e, max %.1e)") % (
pname, pindex, self._iteroffset + kwargs['iter'], pmin, pmax))
_plt.savefig(self._output_folder +
'parameters_%s_%d_%d.png' % (
pname,
pindex,
(self._iteroffset + kwargs['iter']) /
self._write_every))
_plt.close(fig)
def finalize(self, kwargs): # pragma: no cover
if self._create_videos:
_LOGGER.debug("Creating filter videos...")
try:
if not _os.path.exists(_os.path.join(self._output_folder,
'videos')):
_os.mkdir(_os.path.join(self._output_folder, 'videos'))
for pname in self._selected_parameters.keys():
for pindex in self._selected_parameters[pname]:
with open(_os.devnull, 'w') as quiet:
_subprocess.check_call([
'ffmpeg',
'-y',
'-start_number', str(0),
'-r', str(self._video_frame_rate),
'-i', _os.path.join(self._output_folder,
'parameters_' +
pname + '_' +
str(pindex) + '_' +
'%d.png'),
_os.path.join(self._output_folder,
'videos',
'parameters_' +
pname + '_' +
str(pindex) + '.mp4')
], stdout=quiet, stderr=quiet)
except Exception as ex: # pylint: disable=broad-except
_LOGGER.error(
"Could not create videos! Error: %s. Is " +
"ffmpeg available on the command line?",
str(ex))
_LOGGER.debug("Done.")
|
py | 1a50c4bbd571d21546450b468c016c2cc6b8b495 | from nets.segnet import convnet_segnet
from PIL import Image
import numpy as np
import random
import copy
import os
class_colors = [[0,0,0],[0,255,0]]
NCLASSES = 2
HEIGHT = 416
WIDTH = 416
model = convnet_segnet(n_classes=NCLASSES,input_height=HEIGHT, input_width=WIDTH)
model.load_weights("logs/ep021-loss0.083-val_loss0.143.h5")
imgs = os.listdir("./img/")
for jpg in imgs:
img = Image.open("./img/"+jpg)
old_img = copy.deepcopy(img)
orininal_h = np.array(img).shape[0]
orininal_w = np.array(img).shape[1]
img = img.resize((WIDTH,HEIGHT))
img = np.array(img)
img = img/255
img = img.reshape(-1,HEIGHT,WIDTH,3)
pr = model.predict(img)[0]
pr = pr.reshape((int(HEIGHT/2), int(WIDTH/2),NCLASSES)).argmax(axis=-1)
seg_img = np.zeros((int(HEIGHT/2), int(WIDTH/2),3))
colors = class_colors
for c in range(NCLASSES):
seg_img[:,:,0] += ( (pr[:,: ] == c )*( colors[c][0] )).astype('uint8')
seg_img[:,:,1] += ((pr[:,: ] == c )*( colors[c][1] )).astype('uint8')
seg_img[:,:,2] += ((pr[:,: ] == c )*( colors[c][2] )).astype('uint8')
seg_img = Image.fromarray(np.uint8(seg_img)).resize((orininal_w,orininal_h))
image = Image.blend(old_img,seg_img,0.3)
image.save("./img_out/"+jpg)
|
py | 1a50c613001a17713e5b1894a303827e0398b8d0 | rhacm_versions = [
('1.0', '7'),
('2.0', '7'),
('2.1', '7'),
('2.2', '7'),
('2.3', '7'),
('1.0', '8'),
('2.0', '8'),
('2.1', '8'),
('2.2', '8'),
('2.3', '8'),
]
def test_rhacm_product_version_count(rhacm_product):
assert len(rhacm_product.product_versions()) == 10
def test_rhacm_product_version_names(rhacm_product):
for index, product_version in enumerate(rhacm_product.product_versions()):
version, release = rhacm_versions[index]
assert product_version.name == 'RHEL-%s-RHACM-%s' % (release, version)
def test_rhacm_product_version_descriptions(rhacm_product):
for index, product_version in enumerate(rhacm_product.product_versions()):
version, release = rhacm_versions[index]
assert product_version.description == 'Red Hat Advanced Cluster ' + \
'Management for Kubernetes %s for RHEL %s' % (version, release)
def test_rhacm_product_version_default_brew_tags(rhacm_product):
for index, product_version in enumerate(rhacm_product.product_versions()):
version, release = rhacm_versions[index]
assert product_version.default_brew_tag == \
'rhacm-%s-rhel-%s-container-candidate' % (version, release)
|
py | 1a50c7d46eb76c410ff334db0a97e16bb1161360 | # BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
import awkward as ak
np = ak.nplike.NumpyMetadata.instance()
def flatten(array, axis=1, highlevel=True, behavior=None):
"""
Args:
array: Data containing nested lists to flatten.
axis (None or int): If None, the operation flattens all levels of
nesting, returning a 1-dimensional array. Otherwise, it flattens
at a specified depth. The outermost dimension is `0`, followed
by `1`, etc., and negative values count backward from the
innermost: `-1` is the innermost dimension, `-2` is the next
level up, etc.
highlevel (bool): If True, return an #ak.Array; otherwise, return
a low-level #ak.layout.Content subclass.
behavior (None or dict): Custom #ak.behavior for the output array, if
high-level.
Returns an array with one level of nesting removed by erasing the
boundaries between consecutive lists. Since this operates on a level of
nesting, `axis=0` is a special case that only removes values at the
top level that are equal to None.
Consider the following doubly nested `array`.
ak.Array([[
[1.1, 2.2, 3.3],
[],
[4.4, 5.5],
[6.6]],
[],
[
[7.7],
[8.8, 9.9]
]])
At `axis=1`, the outer lists (length 4, length 0, length 2) become a single
list (of length 6).
>>> print(ak.flatten(array, axis=1))
[[1.1, 2.2, 3.3], [], [4.4, 5.5], [6.6], [7.7], [8.8, 9.9]]
At `axis=2`, the inner lists (lengths 3, 0, 2, 1, 1, and 2) become three
lists (of lengths 6, 0, and 3).
>>> print(ak.flatten(array, axis=2))
[[1.1, 2.2, 3.3, 4.4, 5.5, 6.6], [], [7.7, 8.8, 9.9]]
There's also an option to completely flatten the array with `axis=None`.
This is useful for passing the data to a function that doesn't care about
nested structure, such as a plotting routine.
>>> print(ak.flatten(array, axis=None))
[1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9]
Missing values are eliminated by flattening: there is no distinction
between an empty list and a value of None at the level of flattening.
>>> array = ak.Array([[1.1, 2.2, 3.3], None, [4.4], [], [5.5]])
>>> ak.flatten(array, axis=1)
<Array [1.1, 2.2, 3.3, 4.4, 5.5] type='5 * float64'>
As a consequence, flattening at `axis=0` does only one thing: it removes
None values from the top level.
>>> ak.flatten(array, axis=0)
<Array [[1.1, 2.2, 3.3], [4.4], [], [5.5]] type='4 * var * float64'>
As a technical detail, the flattening operation can be trivial in a common
case, #ak.layout.ListOffsetArray in which the first `offset` is `0`.
In that case, the flattened data is simply the array node's `content`.
>>> array.layout
<ListOffsetArray64>
<offsets><Index64 i="[0 4 4 6]" offset="0" length="4"/></offsets>
<content><ListOffsetArray64>
<offsets><Index64 i="[0 3 3 5 6 7 9]" offset="0" length="7"/></offsets>
<content>
<NumpyArray format="d" shape="9" data="1.1 2.2 3.3 4.4 5.5 6.6 7.7 8.8 9.9"/>
</content>
</ListOffsetArray64></content>
</ListOffsetArray64>
>>> np.asarray(array.layout.content.content)
array([1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9])
However, it is important to keep in mind that this is a special case:
#ak.flatten and `content` are not interchangeable!
"""
with ak._v2._util.OperationErrorContext(
"ak._v2.flatten",
dict(array=array, axis=axis, highlevel=highlevel, behavior=behavior),
):
return _impl(array, axis, highlevel, behavior)
def _impl(array, axis, highlevel, behavior):
layout = ak._v2.operations.convert.to_layout(
array, allow_record=False, allow_other=False
)
nplike = ak.nplike.of(layout)
if axis is None:
out = layout.completely_flatten(function_name="ak.flatten")
assert isinstance(out, tuple) and all(
isinstance(x, nplike.ndarray) for x in out
)
out = ak._v2.contents.NumpyArray(nplike.concatenate(out))
elif axis == 0 or layout.axis_wrap_if_negative(axis) == 0:
def apply(layout):
if layout.is_UnknownType:
return apply(ak._v2.contents.NumpyArray(nplike.array([])))
elif layout.is_IndexedType:
return apply(layout.project())
elif layout.is_UnionType:
if not any(
x.is_OptionType and not isinstance(x, ak._v2.contents.UnmaskedArray)
for x in layout.contents
):
return layout
tags = nplike.asarray(layout.tags)
index = nplike.array(nplike.asarray(layout.index), copy=True)
bigmask = nplike.empty(len(index), dtype=np.bool_)
for tag, content in enumerate(layout.contents):
if content.is_OptionType and not isinstance(
content, ak._v2.contents.UnmaskedArray
):
bigmask[:] = False
bigmask[tags == tag] = nplike.asarray(
content.mask_as_bool(valid_when=False)
).view(np.bool_)
index[bigmask] = -1
good = index >= 0
return ak._v2.contents.UnionArray(
ak._v2.index.Index8(tags[good]),
ak._v2.index.Index64(index[good]),
layout.contents,
)
elif layout.is_OptionType:
return layout.project()
else:
return layout
out = apply(layout)
return ak._v2._util.wrap(out, behavior, highlevel)
else:
out = layout.flatten(axis)
return ak._v2._util.wrap(out, behavior, highlevel)
|
py | 1a50c8548b567628170b547f22b173d339bd56e4 | '''
@Author: Kai Song, ks838 _at_ cam.ac.uk
@Notes : This part gives the constants and parameters.
'''
import numpy as np
# parameters for the system
#the inverse temperature
beta = 0.05 # a.u.
mass = 1.0
# ------ params for propagation ------
dt = 2 * pow(10,-3) # time step
# F is displacement of harmonic oscillator : Fx
F = 6
# frequency of nuclear motion
omega = 1
# coupling between electronic state.
Delta = 1
# displacement (1/2 M omega^2 (R + R0 * sigma_z )^2 ) in Hamiltonian H0 . initial state is sampled from e^{-\beta H0}
# R0 = 0 # displacement for initial Hamiltonian
R0 = F / ( mass * np.power(omega,2) )
equil_dt = pow(10,-2) # time step for thermalization of initial state
equil_time = 10
# steps for the equilibrating part
nsteps_equil = int(equil_time / equil_dt )
# steps for the dynamics
dynamics_time = 10
nsteps_dynamics = int(dynamics_time / dt )
print_time = 0.1
nsteps_print = int(print_time / dt )
# --------- for electronic state --------
n_electronic_state = 2
# -------- for the n beads ----------
# for simple potential forms (e.g., a double-well form), n_beads <10 are
# engough. And, for harmonic form, n_beads = 1 is engough
n_beads = 16 # should be an even number in our settings
omega_N = n_beads/beta # we have used hbar = 1 . omega_N = 1/(beta_N * hbar)
beta_N = beta/n_beads
# Normal mode frequency for free ring polymer. See eq.(36) in Ceriotti et al. J. Chem. Phys. 133, 124104 2010.
omegak = np.zeros(n_beads)
for i_bead in range(n_beads):
omegak[i_bead] = 2* omega_N * np.sin(i_bead*np.pi/n_beads)
#------ parameter for Ceriotti thermostatting-----
tau0 = 0.7 # an input parameter for tuning the efficiency
# The number of samplings (from the thermostatting).
# Typically, we need ~10^4 to get converged results.
# We started using a small number for testing
n_samplings = 100
# sampling initial momentum and coordinate.
mu_p = 0.0
sigma_p = np.sqrt(mass*n_beads/beta)
mu_q = 0.0
|
py | 1a50c895b24c325703e60c70d3ab3c728781b560 | import os,time,math,sys,json,re,string,json
import importlib
import get_dataflow
import pandas as pd
import joblib
import json
import requests
import bs4
import lxml
from sklearn.ensemble import RandomForestClassifier
from nltk.tokenize import word_tokenize
stdlib=['string','re','difflib','textwrap','unicodedata','stringprep','readline','rlcompleter',
'struct','codecs','datatime','calendar','collections','collections.abc','heapq','bisect',
'array','weakref','types','copy','pprint','reprlib','enum','numbers','math','cmath',
'decimal','fractions','random','statistics','itertools','functools','operator','pathlib',
'os.path','fileinput','stat','filecmp','tempfile','glob','fnmatch','linecache','shutil',
'pickle','copyreg','shelve','marshal','dbm','sqlite3','zlib','gzip','bz2','lzma','zipfile',
'tarfile','csv','configparser','netrc','xdrlib','plistlib','hashlib','hmac','secrets',
'os','io','time','argparse','getopt','logging','logging.config','logging.handlers',
'getpass','curses','curses.textpad','curses.ascii','curses.panel','platform','errno',
'ctypes','threading','multiprocessing','multiprocessing.shared_memory','concurrent',
'concurrent.futures','subprocess','sched','queue','_thread','_dummy_thread','dummy_threading',
'contextvars','asyncio','socket','ssl','select','selectors','asyncore','asynchat','signal',
'mmap','email','json','mailcap','mailbox','mimetypes','base64','binhex','binascii',
'quopri','uu','html','html.parser','html.entities','xml','webbrowser','xml.etree.ElementTree',
'xml.dom','xml.dom.minidom','xml.dom.pulldom','xml.sax','xml.sax.handler','xml.sax.saxutils',
'xml.sax.xmlreader','xml.parsers.expat','cgi','cgitb','wsgiref','urllib','urllib.request',
'urllib.response','urllib.parse','urllib.error','urllib.robotparser','http','http.client',
'ftplib','poplib','imaplib','nntplib','smtplib','smtpd','telnetlib','uuid','socketserver',
'http.server','http.cookies','http.cookiejar','xmlrpc','xmlrpc.client','xmlrpc.server',
'ipaddress','audioop','aifc','sunau','wave','chunk','colorsys','imghdr','sndhdr','ossaudiodev',
'gettext','locale','turtle','cmd','shlex','tkinter','tkinter.ttk','tkinter.tix','tkinter.scrolledtext',
'typing','pydoc','doctest','unittest','unittest.mock','unittest.mock','test','test.support',
'test.support.script_helper','bdb','faulthandler','pdb','timeit','trace','tracemalloc','distutils',
'ensurepip','venv','zipapp','sys','sysconfig','builtins','__main__','warnings','dataclasses',
'contextlib','abc','atexit','traceback','__future__','gc','inspect','site','code','codeop','zipimport',
'pkgutil','modulefinder','runpy','importlib','ast','symtable','symbol','token','keyword',
'tokenize','tabnanny','pyclbr','py_compile','compileall','dis','pickletools','formatter','msilib',
'msvcrt','winreg','winsound','posix','pwd','spwd','grp','crypt','termios','tty','pty','fcntl','pipes',
'resource','nis','optparse','imp']
topk_array = [0,0,0,0,0,0,0]
num_of_apis = 0
class ShowProcess():
i = 0
max_steps = 0
max_arrow = 50
infoDone = 'done'
def __init__(self, max_steps, infoDone = 'Done'):
self.max_steps = max_steps
self.i = 0
self.infoDone = infoDone
def show_process(self, i=None):
if i is not None:
self.i = i
else:
self.i += 1
num_arrow = int(self.i * self.max_arrow / self.max_steps)
num_line = self.max_arrow - num_arrow
percent = self.i * 100.0 / self.max_steps
process_bar = '[' + '>' * num_arrow + '-' * num_line + ']'\
+ '%.2f' % percent + '%' + '\r'
sys.stdout.write(process_bar)
sys.stdout.flush()
if self.i >= self.max_steps:
self.close()
def close(self):
print('')
print(self.infoDone)
self.i = 0
def get_file_path(root_path,file_list,dir_list):
global ret_list
dir_or_files = os.listdir(root_path)
for dir_file in dir_or_files:
dir_file_path = os.path.join(root_path,dir_file)
if os.path.isdir(dir_file_path):
dir_list.append(dir_file_path)
get_file_path(dir_file_path,file_list,dir_list)
elif dir_file_path.endswith('.py') and not dir_file_path.endswith('tmp.py'):
#print(dir_file_path)
ret_list.append(dir_file_path)
file_list.append(dir_file_path)
def GetMiddleStr(content,startStr,endStr):
startIndex = content.index(startStr)
if startIndex>=0:
startIndex += len(startStr)
endIndex = content.index(endStr)
return content[startIndex:endIndex]
def get_module_funcs(modulename):
modulename=modulename.strip()
flag=0
ms=[]
for curapi in cur_apis:
items=curapi.split('.')
if modulename+'.' in curapi or curapi.startswith(modulename+'.'):
#print('yes!',curapi)
api=items[-1]
ms.append(api)
flag=1
if flag==1:
ms=list(set(ms))
return {modulename:ms}
#print(modulename)
rootmodule=''
try:
module=importlib.import_module(modulename)
except Exception:
if '.' in modulename:
index=modulename.find('.')
rootmodule=modulename[:index]
os.system('pip3 install '+rootmodule)
else:
os.system('pip3 install '+modulename)
try:
module=importlib.import_module(modulename)
except Exception as err:
print(err)
return {}
ms=dir(module)
return {modulename:ms}
def get_alias_funcs(modulename,alias):
modulename=modulename.strip()
flag=0
ms=[]
for curapi in cur_apis:
items=curapi.split('.')
if modulename+'.' in curapi or curapi.startswith(modulename+'.'):
#print('yes!',curapi)
api=items[-1]
ms.append(api)
flag=1
if flag==1:
ms=list(set(ms))
return {alias:ms}
#print(modulename)
rootmodule=''
try:
module=importlib.import_module(modulename)
except Exception:
if '.' in modulename:
index=modulename.find('.')
rootmodule=modulename[:index]
os.system('pip3 install '+rootmodule)
else:
os.system('pip3 install '+modulename)
try:
module=importlib.import_module(modulename)
except Exception as err:
print(err)
return {}
ms=dir(module)
return {alias:ms}
def GetMiddleStr(content,startStr,endStr):
startIndex = content.index(startStr)
if startIndex>=0:
startIndex += len(startStr)
endIndex = content.index(endStr)
return content[startIndex:endIndex]
def get_alias_item(modulename,itname,aliasname):
modulename=modulename.strip()
flag=0
ms=[]
for curapi in cur_apis:
items=curapi.split('.')
if modulename+'.'+itname in curapi or curapi.startswith(modulename+'.'+itname):
#print('yes!',curapi)
api=items[-1]
ms.append(api)
flag=1
if flag==1:
ms=list(set(ms))
return {aliasname:ms}
#print(modulename,itname)
rootmodule=''
submodule=''
try:
module=importlib.import_module(modulename)
except Exception:
try:
if '.' in modulename:
index=modulename.find('.')
rootmodule=modulename[:index]
os.system('pip3 install '+rootmodule)
else:
os.system('pip3 install '+modulename)
module=importlib.import_module(modulename)
except Exception:
try:
submodule=importlib.import_module(modulename+'.'+itname)
return {aliasname:dir(submodule)}
except Exception as err:
print(err)
return {}
try:
item=getattr(module,itname)
return {aliasname:dir(item)}
except Exception:
try:
submodule=importlib.import_module(modulename+'.'+itname)
return {aliasname:dir(submodule)}
except Exception as err:
print(err)
return {}
def get_item_methods(modulename,itname):
modulename=modulename.strip()
flag=0
ms=[]
for curapi in cur_apis:
items=curapi.split('.')
if modulename+'.'+itname in curapi or curapi.startswith(modulename+'.'+itname):
#print('yes!',curapi)
api=items[-1]
ms.append(api)
flag=1
if flag==1:
ms=list(set(ms))
return {modulename:ms}
#print(modulename,itname)
rootmodule=''
submodule=''
try:
module=importlib.import_module(modulename)
except Exception:
try:
if '.' in modulename:
index=modulename.find('.')
rootmodule=modulename[:index]
os.system('pip3 install '+rootmodule)
else:
os.system('pip3 install '+modulename)
module=importlib.import_module(modulename)
except Exception:
try:
submodule=importlib.import_module(modulename+'.'+itname)
return {itname:dir(submodule)}
except Exception as err:
print(err)
return {}
try:
item=getattr(module,itname)
return {itname:dir(item)}
except Exception:
try:
submodule=importlib.import_module(modulename+'.'+itname)
return {itname:dir(submodule)}
except Exception as err:
print(err)
return {}
def deal_with_current_module(modulename,file,names):
modulename=modulename.strip()
#current_file='/home/user/PRIAN/targetProj/abu/abupy/TLineBu/ABuTLExecute.py'
current_file=file
layer=0
for c in modulename:
if c=='.':
layer+=1
else:
break
#print(layer)
ls7=current_file.split('/')
newdirs=ls7[:(0-layer)]
newdir=''
for d in newdirs:
newdir+=d+'/'
realdir=newdir
#print(realdir)
newdir=newdir+'end'
rootdir=GetMiddleStr(newdir,root_path,'/end')
if modulename=='.':
rootmodule=re.sub('/','.',rootdir)
else:
rootmodule=re.sub('/','.',rootdir)+'.'+modulename[layer:]
#print("Note!",rootmodule)
ret={}
for n in names:
x=get_item_methods(rootmodule,n)
ret.update(x)
return ret
def get_item_funcs(rootmodule,module,item):
try:
module1=importlib.import_module(module)
except Exception:
try:
os.system('pip3 install '+rootmodule)
module1=importlib.import_module(module)
except Exception:
try:
submodule=importlib.import_module(module+'.'+item)
return {item:dir(submodule)}
except Exception as err:
print(err)
return {}
try:
it=getattr(module1,item)
return {item:dir(it)}
except Exception:
try:
submodule=importlib.import_module(module+'.'+item)
return {item:dir(submodule)}
except Exception as err:
print(err)
return {}
def get_real_module(modulename,file):
current_file=file
layer=0
for c in modulename:
if c=='.':
layer+=1
else:
break
#print(layer)
ls7=current_file.split('/')
newdirs=ls7[:(0-layer)]
newdir=''
for d in newdirs:
newdir+=d+'/'
realdir=newdir
#print(realdir)
newdir=newdir+'end'
rootdir=GetMiddleStr(newdir,root_path,'/end')
if modulename=='.':
rootmodule=re.sub('/','.',rootdir)
else:
rootmodule=re.sub('/','.',rootdir)+'.'+modulename[layer:]
#print("Note!",rootmodule)
return rootmodule
def get_module_methods(file):
modulemethods=[]
all_candidates={}
with open(file) as f:
lines=f.readlines()
for line in lines:
line=line.strip()
#in most cases, we choose to get all fuctions of the module imported directly using inspect
#maybe need all classes and all methods of the classes in the module
if re.match('import [a-zA-Z0-9\.\_\,\s]+$',line) and ' as ' not in line:
#print(1,line)
modulename=line.split('import')[-1].strip()
if ',' not in modulename:
x1=get_module_funcs(modulename)
all_candidates.update(x1)
else:
ls3=modulename.split(',')
#global all_candidates
for j in ls3:
itemname=j.strip()
x2=get_module_funcs(itemname)
all_candidates.update(x2)
#should choose another example
elif re.match('import [a-zA-Z0-9\.\_\,]+ as [a-zA-Z0-9\.\_\,\s]+$',line):
#print(2,line)
if ',' not in line:
modulename=GetMiddleStr(line,'import',' as ').strip()
alias=line.split(' as ')[-1].strip()
#print(modulename,alias)
x3=get_alias_funcs(modulename,alias)
#global all_candidates
all_candidates.update(x3)
#many combing methods, checked by ','
else:
body=line.split('import')[-1].strip()
#print("multias:",body)
mas=body.split(',')
#print(mas)
for ma in mas:
if ' as ' in ma:
ls4=ma.split(' as ')
maname=ls4[0].strip()
aliasname=ls4[1].strip()
#print(maname,aliasname)
x4=get_alias_funcs(maname,aliasname)
#global all_candidates
all_candidates.update(x4)
else:
maname=ma.strip()
#print(maname)
x5=get_module_funcs(maname)
#global all_candidates
all_candidates.update(x5)
elif re.match('from [a-zA-Z0-9\.\_]+ import [a-zA-Z0-9\_\.\*\,\s]+$',line) and 'as' not in line:
#print(3,line)
modulename=GetMiddleStr(line,'from','import').strip()
itemname=line.split('import')[-1].strip()
names=[]
if ',' in itemname:
ns=itemname.split(',')
for n in ns:
names.append(n.strip())
else:
names.append(itemname)
#print(modulename,names)
if modulename.startswith('.'):
#print(modulename)
#print(file)
x6=deal_with_current_module(modulename,file,names)
#global all_candidates
all_candidates.update(x6)
continue
'''
firmname=modulename.split('.')[0]
if firmname==curmodule:
print("current module:",modulename)
deal_with_current_module(modulename,names)
continue
#need other ops get all methods defined in modules
#try1:copy the current proj to root path
'''
for n in names:
x7=get_item_methods(modulename,n)
#global all_candidates
all_candidates.update(x7)
elif re.match('from [a-zA-Z0-9\.\_]+ import [a-zA-Z0-9\_\.\*\,]+ as [a-zA-Z0-9\_\.\*\,\s]+$',line):
#print(4,line)
modulename=GetMiddleStr(line,'from','import').strip()
if modulename.startswith('.'):
#print(modulename)
#print(4,file)
modulename=get_real_module(modulename,file)
#continue
#print(modulename)
#need other ops to change the modulename as absmodule
itemname=line.split('import')[-1]
#print(modulename,itemname)
if ',' not in itemname:
lsx=itemname.split(' as ')
if len(lsx)<2:
continue
itname=lsx[0].strip()
aliasname=lsx[1].strip()
x8=get_alias_item(modulename,itname,aliasname)
#global all_candidates
all_candidates.update(x8)
else:
ls5=itemname.split(',')
for it in ls5:
if ' as ' not in it:
itname=it.strip()
x9=get_item_methods(modulename,itname)
#global all_candidates
all_candidates.update(x9)
else:
itname=it.split(' as ')[0].strip()
aliasname=it.split(' as ')[1].strip()
x10=get_alias_item(modulename,itname,aliasname)
#global all_candidates
all_candidates.update(x10)
#pass
#else:
#print('SyntaxError: invalid syntax')
#print(all_candidates)
return all_candidates
def get_caller(rec):
nrec=re.sub('\(.*\)','',rec)
pindex=nrec.rfind('.')
return nrec[:pindex]
def check(newcontext):
ls=newcontext.split('\n')
i=0
for i in range(len(ls)-1,-1,-1):
if ls[i].strip().startswith('def'):
break
nc=''
for j in range(i,len(ls)):
nc+=ls[j]+'\n'
#nc=newcontext
#print(nc)
nc=re.sub('\'[\\\[\]\(\)\{\}A-Za-z0-9_\,\:]+\'','',nc)
nc=re.sub('\"[\\\[\]\(\)\{\}A-Za-z0-9_\,\:]+\"','',nc)
lk=nc.count('(')
rk=nc.count(')')
ll=nc.count('[')
rl=nc.count(']')
ld=nc.count('{')
rd=nc.count('}')
kc=lk-rk
lc=ll-rl
dc=ld-rd
addc=''
#print(kc,lc,dc)
if kc==lc==dc==0:
return newcontext
else:
ks=''
#print(nc)
for i in range(0,len(nc)):
c=nc[i]
if re.match('[\(\)\[\]\{\}]',c):
ks+=c
#print(ks)
while('{}' in ks or '[]' in ks or '()' in ks):
while '()' in ks:
ks=re.sub('\[\]','',ks)
ks=re.sub('\{\}','',ks)
ks=re.sub('\(\)','',ks)
while '[]' in ks:
ks=re.sub('\{\}','',ks)
ks=re.sub('\(\)','',ks)
ks=re.sub('\[\]','',ks)
while '{}' in ks:
ks=re.sub('\[\]','',ks)
ks=re.sub('\(\)','',ks)
ks=re.sub('\{\}','',ks)
#print(ks)
for i in range(len(ks)-1,-1,-1):
if ks[i]=='(':
addc+=')'
elif ks[i]=='[':
addc+=']'
else:
addc+='}'
#print(newcontext)
#sys.exit(0)
#x=re.sub('return ','',newcontext+addc)
return newcontext+addc
def get_type(finalc,file):
lindex=file.rfind('/')
tmp=file[:lindex]+'/tmp.py'
with open(tmp,'w+') as f:
f.write(finalc)
#with open(tmp2,'w+') as f2:
#f2.write(finalc)
try:
#os.system('pytype '+tmp)
os.system('pytype '+tmp+' > log.txt')
#os.system('rm '+tmp)
except Exception:
sys.exit()
with open('log.txt') as f:
lines=f.readlines()
vtype='None'
for line in lines:
if '[reveal-type]' in line:
tp=line.split(':')[1]
vtype=re.sub('\[reveal\-type\]','',tp)
#print(vtype)
break
#if '[python-compiler-error]' in line:
#sys.exit()
global Nonenum,Anynum,OKnum
if vtype=='None':
#print(tmp)
#sys.exit()
Nonenum+=1
elif vtype=='Any' or vtype=='nothing':
Anynum+=1
else:
OKnum+=1
return vtype
def get_bank(line):
ip=0
for ip in range(0,len(line)):
if line[ip]!=' ':
break
return (line[:ip],ip)
def check_try(code,trycache):
#print(trycache)
ret=code
#l=sorted(trycache)
#print(l)
for i in range(len(trycache)-1,-1,-1):
ret+='\n'+trycache[i][0]+'except Exception:\n'+trycache[i][0]+' '+'pass'
return ret
def get_curr_apis(ft,file):
#print('Note! ',ft,file)
tmp_file=re.sub(root_path,'',file)
rmodule=re.sub('\/','.',tmp_file)
rmodule=rmodule[:-3]
#print("Note!",rmodule)
ret=get_item_methods(rmodule,ft)
#print('Note! ',ret)
return ret
def get_typeshed_apis(ft):
ret=[]
ft=ft.strip()
ft=re.sub('\[.*\]','',ft)
with open('typeshed.txt') as f:
lines=f.readlines()
s1='.'+ft+'.'
s2=ft+'.'
for line in lines:
if s1 in line or line.startswith(s2):
#print('Find typeshed: '+line.strip())
s3=line.strip()
index=s3.rfind('.')
s4=s3[index+1:]
if not s4 in ret:
ret.append(s4)
return ret
#inferred type, caller
def get_candidates(ft,caller,file):
if ft.startswith('Type['):
ft=ft[5:-1]
print('type:',ft)
candidates={}
global if_from_current_proj
if_from_current_proj=1
if ft=='module':
for k,v in module_apis.items():
if k==caller:
candidates={caller:v}
#print(candidates)
return candidates
candidates=get_module_funcs(caller)
elif ft=='str':
candidates={caller:dir(str)}
elif re.match('List\[.*\]',ft):
candidates={caller:dir(list)}
elif re.match('Dict\[.*\]',ft):
apsx=dir(dict)
apsx.append('iteritems')
candidates={caller:apsx}
elif ft=='set' or re.match('Set\[.*\]',ft):
candidates={caller:dir(set)}
elif ft.endswith('[str]'):
candidates=get_candidates(ft[:-5],caller,file)
elif ft=='bool':
candidates={caller:dir(bool)}
elif re.match('Union\[.*\]',ft):
ft=ft+'end'
contents=GetMiddleStr(ft,'Union[',']end')
contents=re.sub('\[.*\]','',contents)
lss=contents.split(',')
tmp=[]
for k in lss:
#print('Note!!')
k=k.strip()
#print(k)
if k=='Any' or k=='nothing':
continue
tpdic=get_candidates(k,caller,file)
for k,v in tpdic.items():
tmp.extend(v)
if_from_current_proj=0
candidates={caller:tmp}
elif re.match('Optional\[.*\]',ft):
#ft=ft+'end'
#contents=GetMiddleStr(ft,'Optional[',']end')
#contents=re.sub('\[.*\]','',contents)
#candidates=get_candidates(ft,caller,file)
candidates={}
if_from_current_proj=0
#elif tuple int float since we haven't found these kinds of caller templely ignore.
#elif re.match('Pattern\[.*\]',ft):
#candidates={caller:dir(re.Pattern)}
#elif re.match('Match\[.*\]',ft):
#candidates={caller:dir(re.Match)}
elif '.' in ft:
index=ft.rfind('.')
module=ft[:index]
item=ft[index+1:]
rindex=ft.find('.')
rootmodule=ft[:rindex]
candidates=get_item_funcs(rootmodule,module,item)
elif ft=='Any' or ft=='None' or ft=='nothing':
candidates=get_all_apis()
if_from_current_proj=0
#print('Note!All types:')
#print(candidates)
return candidates
elif re.match('[a-zA-Z0-9_]+',ft):
#since in many case, the caller calls funcs defined behind the caller, we copy the original file into python lib to get candidates.
candidates=get_curr_apis(ft,file)
#print('Other types: '+ft)
if len(candidates)==0:
typeshed_apis=get_typeshed_apis(ft)
candidates.update({caller:typeshed_apis})
#else:
#if_from_current_proj=1
for k,v in candidates.items():
dag=[]
#print('yes')
#print(v,len(v))
for j in range(0,len(v)):
#print(j)
if not v[j].startswith('__'):
dag.append(v[j])
#print("yes")
#print(dag)
candidates[k]=dag
#print(candidates)
return candidates
def get_callee(rec):
nrec=re.sub('\(.*\)','',rec)
pindex=nrec.rfind('.')
return nrec[pindex+1:],rec[pindex+1:]
def get_total(w,naming_context,files):
ret=0.0
#print(w)
for fi in files:
key=w+'##'+fi
if key in proj_token_count:
ret+=proj_token_count[key]
ret+=naming_context.count(w)
#print(ret)
#sys.exit(0)
return ret
def get_conum(w,n,naming_context,files):
ret=0.0
for fi in files:
k1=w+'##'+fi
k2=n+'##'+fi
if k1 in proj_token_no and k2 in proj_token_no:
x1=proj_token_no[k1]
y1=proj_token_no[k2]
ctis=[x for x in x1 if x in y1]
ret+=float(len(ctis))
return ret
def get_conum_of_line(api,naming_line,naming_context,files):
del_estr = string.punctuation + string.digits
replace = " "*len(del_estr)
tran_tab = str.maketrans(del_estr, replace)
tmp=naming_line.translate(tran_tab)
nl=word_tokenize(tmp)
cs=api.translate(tran_tab)
wcs=word_tokenize(cs)
#print(api,wcs,naming_line,nl)
#sys.exit(0)
total=0.0
conum=0.0
score=0.0
#print(wcs,nl)
#TODO:gao fan le !!!!
for w in wcs:
total=total+get_total(w,naming_context,files)
#print(1)
for n in nl:
conum+=get_conum(w,n,naming_context,files)
if total!=0:
total=float(total)
conum=float(conum)
score=float( conum / total )
return score
#proj_tokens
#proj_depends
def get_line_scores(aps,naming_line,naming_context,file):
line_scores={}
tokens=[]
fi=re.sub('\.py','',file)
index=fi.rfind('/')
curname=fi[index+1:]
#print(curname)
files=[]
for k,v in proj_depends.items():
if k==file:
continue
#print(k)
flag=0
for imports in v:
#print
if curname in imports:
#print(imports)
flag=1
break
if flag==0:
#print(proj_tokens[k])
#sys.exit(0)
files.append(k)
#print(tokens)
for api in aps:
if api.startswith('__') or re.match('[A-Z0-9_]+$',api) or api.strip()=='_':
#process_bar.show_process()
continue
line_ret=get_conum_of_line(api,naming_line,naming_context,files)
line_scores[api]=line_ret
return line_scores
def get_total_infile(w,files):
ret=0.0
for fi in files:
key=w+'##'+fi
if key in proj_token_count:
ret+=1.0
return ret
def get_conum_infile(w,item,files):
ret=0.0
for fi in files:
k1=w+'##'+fi
k2=item+'##'+fi
if k1 in proj_token_no and k2 in proj_token_no:
ret+=1.0
return ret
def get_conum_of_con(api,naming_context,files):
code=naming_context.strip()
lines=code.split('\n')
del_estr = string.punctuation + string.digits
replace = " "*len(del_estr)
tran_tab = str.maketrans(del_estr, replace)
rets=0.0
for i in range(0,len(lines)):
tmp=lines[i].translate(tran_tab)
nl=word_tokenize(tmp)
cs=api.translate(tran_tab)
wcs=word_tokenize(cs)
total=0.0
#print(wcs,nl)
for w in wcs:
total=total+get_total_infile(w,files)
conum=0.0
for w in wcs:
for item in nl:
conum=conum+get_conum_infile(w,item,files)
if total!=0:
total=float(total)
conum=float(conum)
score=float( conum / total )
rets+=float(i+1)*score
context_ret=float(float(rets) / float(len(lines)+1.0))
return context_ret
def get_conum_scores(aps,naming_context,file):
conum_scores={}
fi=re.sub('\.py','',file)
index=fi.rfind('/')
curname=fi[index+1:]
#print(curname)
files=[]
for k,v in proj_depends.items():
if k==file:
continue
#print(k)
flag=0
for imports in v:
#print
if curname in imports:
#print(imports)
flag=1
break
if flag==0:
files.append(k)
for api in aps:
if api.startswith('__') or re.match('[A-Z0-9_]+$',api) or api.strip()=='_':
continue
con_ret=get_conum_of_con(api,naming_context,files)
conum_scores[api]=con_ret
return conum_scores
def get_results(arr):
print('Ranks :'+str(arr))
mrr=0.0
top1=0
top2=0
top3=0
top4=0
top5=0
top10=0
top20=0
for i in range(0,len(arr)):
mrr+=float(1.0/float(arr[i]))
if arr[i]==1:
top1+=1
top2+=1
top3+=1
top4+=1
top5+=1
top10+=1
top20+=1
elif arr[i]==2:
top2+=1
top3+=1
top4+=1
top5+=1
top10+=1
top20+=1
elif arr[i]==3:
top3+=1
top4+=1
top5+=1
top10+=1
top20+=1
elif arr[i]==4:
top4+=1
top5+=1
top10+=1
top20+=1
elif arr[i]==5:
top5+=1
top10+=1
top20+=1
elif arr[i]<=10:
top10+=1
top20+=1
elif arr[i]<=20:
top20+=1
tp1=float(top1/len(arr))
tp2=float(top2/len(arr))
tp3=float(top3/len(arr))
tp4=float(top4/len(arr))
tp5=float(top5/len(arr))
tp10=float(top10/len(arr))
tp20=float(top20/len(arr))
mrr=float(mrr/float(len(arr)))
print("Top-k:",top1,top2,top3,top4,top5,top10,top20,len(arr))
print("Top-k+mrr:",tp1,tp2,tp3,tp4,tp5,tp10,tp20,mrr)
return [tp1,tp2,tp3,tp4,tp5,tp10, mrr]
s=str(tp1)+','+str(tp2)+','+str(tp3)+','+str(tp4)+','+str(tp5)+','+str(tp10)+','+str(tp20)+','+str(mrr)+'\n'
with open('testdata/'+CURRENT_PROJ+'_result.txt','w+') as ft:
ft.write(s)
def get_time(ts):
totalt=0.0
for t in ts:
totalt+=t
ret=float(totalt/float(len(ts)))
print('Average time: ',ret)
with open('testdata/'+CURRENT_PROJ+'_result.txt','a+') as ft:
ft.write(str(ret)+'\n')
def get_rec_point(file):
print('DEAL-WITH:'+file)
#with open('types/types.txt','a+') as ff:
#ff.write('FILE:'+file)
with open(file) as f:
lines=f.readlines()
#print(lines)
precode=''
trynum=0
trycache=[]
kflag=0
lno=0
#s=''
comment_flag=0
calls=[]
for line in lines:
#print(line)
lno+=1
if line.strip().startswith('#'):
continue
if re.match('[bru]*\'\'\'$',line.strip()) or re.match('[bru]*\"\"\"$',line.strip()):
if comment_flag==0:
comment_flag=1
else:
comment_flag=0
continue
elif (re.match('[bru]*\'\'\'',line.strip()) or re.match('[bru]*\"\"\"',line.strip())) and (re.match('.*[bru]*\'\'\'$',line.strip()) or re.match('.*[bru]*\"\"\"$',line.strip())):
continue
elif re.match('[bru]*\'\'\'',line.strip()) or re.match('[bru]*\"\"\"',line.strip()) or re.match('.*[bru]*\'\'\'$',line.strip()) or re.match('.*[bru]*\"\"\"$',line.strip()):
if comment_flag==0:
comment_flag=1
else:
comment_flag=0
continue
if comment_flag==1:
continue
if 'try:' in line:
trynum+=1
trycache.append(get_bank(line))
elif trynum>0 and ('except' in line or 'finally:' in line):
(bank,lenth)=get_bank(line)
for i in range(len(trycache)-1,-1,-1):
if trycache[i][1]==lenth:
trynum-=1
del trycache[i]
recobj=re.findall('[a-zA-Z0-9_\.\[\]]+\.[a-zA-Z0-9\_]+\(.*\)',line)
#print(recobj)
if len(recobj)==0:
precode+=line
continue
#print(file)
#print(recobj)
rec=recobj[0]
caller=get_caller(rec)
if caller.startswith('['):
caller=caller[1:]
callee,rcallee=get_callee(rec)
if callee.startswith('_') or re.match('[A-Z0-9_]+$',callee) or callee.strip()=='_':
precode+=line
continue
cp=caller+'.'+callee
if cp in calls:
precode+=line
continue
else:
calls.append(cp)
i=0
latest_line=line.replace(rcallee,'unknown_api()')
#print('NOTE!',latest_line)
tpp=precode.strip()
if tpp.endswith(','):
newcontext=tpp[:-1]
finalc=check(newcontext)
#print(finalc)
current_context=finalc+'\n'+latest_line
prelast=precode.strip().split('\n')[-1]
for i in range(0,len(prelast)):
if prelast[i]!=' ':
break
finalc+='\n'+line[:i-4]+'reveal_type('+caller+')'
elif tpp.endswith('(') or tpp.endswith('{') or tpp.endswith('['):
newcontext=tpp
finalc=check(newcontext)
current_context=finalc+'\n'+latest_line
#print(finalc)
prelast=precode.strip().split('\n')[-1]
for i in range(0,len(prelast)):
if prelast[i]!=' ':
break
finalc+='\n'+line[:i]+'reveal_type('+caller+')'
else:
for i in range(0,len(line)):
if line[i]!=' ':
break
#print(i)
#print(line)
newcontext=tpp
finalc=check(newcontext)
finalc+='\n'+line[:i]+'reveal_type('+caller+')'
current_context=precode+latest_line
if len(trycache)>0:
finalc=check_try(finalc,trycache)
#print(finalc)
#print('[Process[1] : Preprocessing # Getting reommendation point, simple type inference, possible API candidates and current incomplete code context.]')
#print(file+'#'+str(lno)+'#'+caller+'#'+callee)
#if '.' in caller:
#ft='Any'
#else:
ft=get_type(finalc,file)
ft=ft.strip()
print(line.strip())
print(file+'#'+str(lno)+'#'+caller+':'+ft+'#'+callee)
#print(Nonenum,Anynum,OKnum)
aps=[]
if ft=='None' or ft=='Any':
if caller=='self':
for d in all_defs:
dname=d.strip().split(' ')[1]
aps.append(dname)
elif caller=='str' or caller=='s' or caller=='string':
ft='str'
elif caller=='sys.stderr' or caller=='sys.stdout' or caller=='sys.stdin':
ft='module'
elif caller=='log':
ft='logging.Logger'
caller=ft
elif re.match('for .* in .*\..*\(.*\).*\:',line.strip()):
aps=dir(dict)
aps.append('iteritems')
else:
#tp=caller.split('.')
#fc=tp[0]
if '.' in caller:
xindex=caller.find('.')
fc=caller[:xindex]
xattr=caller[xindex+1:]
else:
xattr=caller
fc=caller
#print('check module:',fc)
#print('check attr:',xattr)
if fc in stdlib:
ft='module'
print('stdlib!',fc)
#print('module!',caller)
try:
module1=importlib.import_module(caller)
aps=dir(module1)
except Exception:
try:
module2=importlib.import_module(fc)
attr=getattr(module2,xattr)
aps=dir(attr)
except Exception:
aps=[]
else:
for curapi in cur_apis:
if '.'+caller+'.' in curapi:
idx=curapi.find('.'+caller+'.')
canapi=curapi[idx+1:]
if not '.' in canapi:
aps.append(canapi)
print('get api form json!')
print(canapi)
if len(aps)==0:
apis = get_candidates(ft,caller,file)
for k,v in apis.items():
aps.extend(v)
if len(aps)==0:
precode+=line
continue
global pranks,ptimes,pinranks
if re.match('[A-Z]+[A-Za-z]+',callee) or callee.startswith('_'):
print('CONSTRUCTOR,IGNORE')
precode+=line
continue
if callee in aps:
print('API IV')
else:
print('API OOV')
pranks.append(100)
global all_apis_add,all_apis
all_apis_add.append(callee)
tmpx=all_apis['all_apis']
tmpx.extend(all_apis_add)
tmpx=list(set(tmpx))
all_apis['all_apis']=tmpx
ptimes.append(0.0)
precode+=line
continue
#ss=''
#for ap in aps:
#ss=ss+ap+','
#ss=ss[:-1]+'\n'
#s=caller+':'+ft+'#'+callee+'\n'
s1=time.time()
#print('[Process[2] : Constructing dataflow hints.]')
current_dataflow=get_dataflow.get_current_dataflow2(current_context,caller)
#print(maxflow)
if len(current_dataflow)==0:
precode+=line
continue
maxflow=max(current_dataflow,key=len)
#print(maxflow)
dataflow_scores=get_dataflow.get_dataflow_scores(aps,maxflow,current_dataflow,ft,callee)
tosim_scores=get_dataflow.get_tosim_scores(aps,maxflow,current_dataflow,ft,callee)
try:
naming_line=re.sub(callee,'',line)
except Exception as err:
print(err)
print(line)
sys.exit()
precode+=line
continue
naming_context=precode
line_scores=get_line_scores(aps,naming_line,naming_context,file)
e1=time.time()
print(e1-s1)
label=0
apis=[]
with open('test.csv','w+') as f:
f.write('f1,f2,f3,f4\n')
start=time.time()
if ft=='None' or ft=='Any' or ft=='nothing':
for api in aps:
if api.startswith('__') or re.match('[A-Z0-9_]+$',api) or api.strip()=='_':
continue
if api==callee:
label=1
else:
label=0
apis.append(api)
try:
s=str(dataflow_scores[api])+','+str(tosim_scores[api])+','+str(line_scores[api])+',0.0\n'
with open('test.csv','a+') as f:
f.write(s)
except Exception as err:
print(err)
sys.exit(0)
else:
flag=0
conum_scores=get_conum_scores(aps,naming_context,file)
for api in aps:
if api.startswith('__') or re.match('[A-Z0-9_]+$',api) or api.strip()=='_':
continue
if api==callee:
label=1
else:
label=0
apis.append(api)
try:
s=str(dataflow_scores[api])+','+str(tosim_scores[api])+','+str(line_scores[api])+','+str(conum_scores[api])+'\n'
with open('test.csv','a+') as f:
f.write(s)
except Exception as err:
print(err)
sys.exit(0)
test_data=pd.read_csv('test.csv')
#print(apis)
#print(len(apis))
#print(test_data)
clf=joblib.load('traincsv/'+CURRENT_PROJ+'_svm.pkl')
result=clf.predict_proba(test_data)
candidates={}
for i in range(0,len(apis)):
candidates[apis[i]]=result[i][1]
cans=sorted(candidates.items(), key=lambda x: x[1], reverse=True)
#print(cans)
end = time.time()
ts=end - start
print(ts)
print('--------------------------------------------------------------------------------------------------')
print('Recommended Functions for Caller: ' + caller)
print('--------------------------------------------------------------------------------------------------')
lenthk=len(cans)
exists_rec = []
if lenthk > 10:
lenthk = 10
for i in range(0,lenthk):
print(str(i+1)+' : ' + caller + '.' + cans[i][0] + '()')
exists_rec.append(cans[i][0])
rev_cans = sorted(candidates.items(), key=lambda x: x[1])
print('--------------------------------------------------------------------------------------------------')
print('Functions not Reccomended for Caller: ' + caller)
print('--------------------------------------------------------------------------------------------------')
lenghk=len(rev_cans)
if lenthk > 5:
lenthk = 5
for i in range(0,lenthk):
if rev_cans[i][0] not in exists_rec:
print(str(i+1)+' : ' + caller + '.' + rev_cans[i][0] + '()')
# Temporarily commented out Google search feature for faster testing of accuracy. Uncomment to reimplement.
"""
print('--------------------------------------------------------------------------------------------------')
print('Press c to continue or type a number from the reccomended function list to search google for more information about the function: ')
google_input = input()
while google_input != 'c':
if google_input == '1':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[0][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '2':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[1][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '3':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[2][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '4':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[3][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '5':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[4][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '6':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[5][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '7':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[6][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '8':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[7][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '9':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[8][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
elif google_input == '10':
req_result = requests.get('https://www.google.com/search?q=python+' + cans[9][0]).text
soup = bs4.BeautifulSoup(req_result, 'html.parser')
headings = soup.find_all('h3')
links = soup.find_all('a')
print('Top google search result for function: ')
print('--------------------------------------------------------------------------------------------------')
for heading in headings:
if 'python' in heading.getText().lower():
print(heading.getText())
break
for link in links:
link_string = link.get('href').lower()
if 'python' in link_string and 'www' in link_string and 'google' not in link_string:
print(link.get('href').replace('/url?q=', '').split('&', 1)[0])
break
print('--------------------------------------------------------------------------------------------------')
print('Type another number to search google or press c to continue')
google_input = input()
"""
rank=21
for k in range(0,len(cans)):
if cans[k][0]==callee:
rank=k+1
#print('Ranked '+str(rank))
if rank > 20:
pranks.append(rank)
#if atag==1:
#aranks.append(rank)
# Record: PRIAN cannot recommend, jumo to next recommendation.
else:
# PRIAN successfully recommends.
pranks.append(rank)
#if atag==1:
#aranks.append(rank)
ptimes.append(ts)
#alltimes+=ts+'\n'
pinranks.append(rank)
precode+=line
temp_arr = get_results(pinranks)
topk_array[0] += temp_arr[0]
topk_array[1] += temp_arr[1]
topk_array[2] += temp_arr[2]
topk_array[3] += temp_arr[3]
topk_array[4] += temp_arr[4]
topk_array[5] += temp_arr[5]
topk_array[6] += temp_arr[6]
global num_of_apis
if topk_array[5] != 0:
num_of_apis += 1
get_results(pranks)
#get_time(ptimes)
def count_all_apis():
#TODO:count all apis,including module_apis,builtin_apis,proj_apis
ret=[]
for k,v in module_apis.items():
for f in v:
if (not f.startswith('__')) and (not re.match('[A-Z0-9]+',f)) and (not f in ret):
ret.append(f)
#print(ret)
with open('testJson/'+CURRENT_PROJ+'.json') as f:
lines=f.readlines()
for line in lines:
line=line.strip()
index=line.rfind('.')
item=line[index+1:]
if (not item.startswith('__')) and (not item in ret):
ret.append(item)
with open('builtin.txt') as f2:
l2=f2.readlines()
for line2 in l2:
it=line2.strip()
if not it in ret:
ret.append(it)
return {'all_apis':ret}
def dealwith(curfile):
global module_apis,all_apis
module_apis={}
all_apis={}
module_apis=get_module_methods(curfile)
all_apis=count_all_apis()
tmpx=all_apis['all_apis']
tmpx.extend(all_apis_add)
tmpx=list(set(tmpx))
all_apis['all_apis']=tmpx
get_rec_point(curfile)
def get_all_apis():
return all_apis
def get_proj_tokens(iret_list):
global proj_token_count,proj_token_no,proj_depends
del_estr = string.punctuation + string.digits
replace = " "*len(del_estr)
tran_tab = str.maketrans(del_estr, replace)
#tmp=lines[i].strip().translate(tran_tab)
#file_label=0
for file in iret_list:
#file_label+=1
with open(file,encoding='ISO-8859-1') as f:
lines=f.readlines()
line_label=0
for i in range(0,len(lines)):
line_label+=1
if lines[i].strip()=='':
continue
elif re.sub(' ','',lines[i].strip())=='':
continue
elif 'import ' in lines[i]:
if file in proj_depends:
imports=proj_depends[file]
else:
imports=[]
imports.append(lines[i])
proj_depends[file]=imports
tmp=lines[i].strip().translate(tran_tab)
tokens=word_tokenize(tmp)
for tk in tokens:
token=tk+'##'+file
if token in proj_token_count:
tcount=proj_token_count[token]
else:
tcount=0
tcount+=lines[i].count(tk)
proj_token_count[token]=tcount
if token in proj_token_no:
no=proj_token_no[token]
else:
no=[]
no.append(line_label)
proj_token_no[token]=no
###main entry###
# if __name__=="main":
# __main__(CURRENT_PROJ,filePath)
ret_list=[]
proj_token_count={}
proj_token_no={}
proj_depends={}
cur_apis=[]
module_apis={}
all_apis={}
pranks=[]
ptimes=[]
pinranks=[]
all_apis_add=[]
root_path=''
Nonenum=Anynum=OKnum=0
all_defs=[]
all_recs=''
#alltimes=''
CURRENT_PROJ='pyspider'
filePath='testdata/'
with open('test.csv','w+') as f:
f.write('')
Nonenum=Anynum=OKnum=0
pranks=[]
ptimes=[]
pinranks=[]
all_apis_add=[]
root_path = filePath+CURRENT_PROJ
print('LOAD-PROJ:',root_path)
file_list = dir_list = []
ret_list=[]
get_file_path(root_path,file_list,dir_list)
#ret_list=list(set(ret_list))
print(len(ret_list))
trainlen=int(len(ret_list)/10*9)
#print(trainlen)
train_list=ret_list[:trainlen]
test_list=ret_list[trainlen:]
print(train_list)
print(test_list)
#sys.exit()
#proj_tokens={}
proj_token_count={}
proj_token_no={}
proj_depends={}
get_proj_tokens(ret_list)
module_apis={}
id=0
special_flag=0
if_from_current_proj=0
callps=[]
all_apis={}
#======MAIN FUNC ENTRY======
for ifile in test_list:
dealwith(ifile)
#with open('/home/user/PyART/testdatak/'+CURRENT_PROJ+'_time.txt','w+') as f:
#f.write(str(ptimes))
for x, y in enumerate(topk_array):
topk_array[x] = y/num_of_apis
print("Top K Averages for SVM: Top 1: " + str(topk_array[0]) + " Top 2: " + str(topk_array[1]) + " Top 3: " + str(topk_array[2]) + " Top 4: " + str(topk_array[3]) + " Top 5: " + str(topk_array[4]) + " Top 10: " + str(topk_array[5]) + " MRR: " + str(topk_array[6])) |
py | 1a50c9a92b92cebe490769beaa129e4317827b42 | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 系统内部使用工具
Case Name : pssh工具远程连接备机并输出备节点主机名
Description :
1.pssh -H 添加所有参数,远程连接备节点并输出备节点主机名
2.查看执行结果文件
3.查看错误结果文件
4.pssh -H 添加所有参数,远程连接备节点并输出备节点主机名,命令错误
5.查看执行结果文件
6.查看错误结果文件
7.清理环境
Expect :
1.执行成功;屏幕显示备1节点主机名
2.结果文件显示备节点主机名
3.错误结果文件为空
4.合理报错
5.结果文件为空
6.错误结果文件显示报错提示信息
7.清理环境完成
History :
"""
import os
import unittest
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
from yat.test import Node
from yat.test import macro
COMMONSH = CommonSH("PrimaryDbUser")
@unittest.skipIf(1 == COMMONSH.get_node_num(), "单机不执行")
class SystemInternalTools(unittest.TestCase):
def setUp(self):
self.log = Logger()
self.log.info('-Opengauss_Function_Tools_Pssh_Case0002开始执行-')
self.PrimaryNode = Node('PrimaryDbUser')
self.Standby_User_Node = Node('Standby1DbUser')
self.IP1 = self.Standby_User_Node.db_host
self.parent_path = os.path.dirname(macro.DB_INSTANCE_PATH)
self.pssh_path = os.path.join(self.parent_path, 'tool', 'script',
'gspylib', 'pssh', 'bin')
self.output_file = os.path.join(self.parent_path, 'output.log')
self.generate_file = os.path.join(self.output_file, self.IP1)
self.err_output_file = os.path.join(self.parent_path, 'err_output.log')
self.generate_err_file = os.path.join(self.err_output_file, self.IP1)
self.expect_result = "bash: hostname123: command not found"
def test_pssh(self):
text = '--step1:pssh -H 添加所有参数,远程连接备节点并输出备节点' \
'主机名;expect:执行成功;屏幕显示备1节点主机名--'
self.log.info(text)
cmd = 'hostname'
check_hostname = self.Standby_User_Node.sh(cmd).result()
self.log.info(check_hostname)
pssh_cmd = f" cd {self.pssh_path};" \
f"source {macro.DB_ENV_PATH};" \
f"python3 pssh " \
f"-H {self.Standby_User_Node.db_host} " \
f"-t 5 " \
f"-p 2 " \
f"-o {self.output_file} " \
f"-e {self.err_output_file} " \
f"-P " \
f"-s " \
f"-i 'echo $HOSTNAME';"
self.log.info(pssh_cmd)
msg = self.PrimaryNode.sh(pssh_cmd).result()
self.log.info(msg)
self.assertEqual(check_hostname, msg.splitlines()[-1].strip(),
'执行失败:' + text)
text = '--step2:查看执行结果文件;expect:结果文件显示备节点主机名--'
self.log.info(text)
cat_cmd = f"cat {self.generate_file}"
self.log.info(cat_cmd)
msg = self.PrimaryNode.sh(cat_cmd).result()
self.log.info(msg)
self.assertEqual(check_hostname, msg, '执行失败:' + text)
text = '--step3:查看错误结果文件;expect:错误结果文件为空--'
self.log.info(text)
cat_cmd = f"cat {self.generate_err_file}"
self.log.info(cat_cmd)
msg = self.PrimaryNode.sh(cat_cmd).result()
self.log.info(msg)
self.assertEqual('', msg, '执行失败:' + text)
text = '--step4:pssh -H 添加所有参数,远程连接备节点并输出备节点' \
'主机名,命令错误;expect:合理报错--'
self.log.info(text)
pssh_cmd = f" cd {self.pssh_path};" \
f"source {macro.DB_ENV_PATH};" \
f"python3 pssh " \
f"-H {self.Standby_User_Node.db_host} " \
f"-t 5 " \
f"-p 2 " \
f"-o {self.output_file} " \
f"-e {self.err_output_file} " \
f"-P " \
f"-s " \
f"-i hostname123;"
self.log.info(pssh_cmd)
msg = self.PrimaryNode.sh(pssh_cmd).result()
self.log.info(msg)
self.assertTrue(self.expect_result in msg, '执行失败:' + text)
text = '--step5:查看执行结果文件;expect:结果文件为空--'
self.log.info(text)
cat_cmd = f"cat {self.generate_file}"
self.log.info(cat_cmd)
msg = self.PrimaryNode.sh(cat_cmd).result()
self.log.info(msg)
self.assertEqual('', msg, '执行失败:' + text)
text = '--step6:查看错误结果文件;expect:错误结果文件显示报错提示信息--'
self.log.info(text)
cat_cmd = f"cat {self.generate_err_file}"
self.log.info(cat_cmd)
msg = self.PrimaryNode.sh(cat_cmd).result()
self.log.info(msg)
self.assertTrue(self.expect_result in msg, '执行失败:' + text)
def tearDown(self):
text = '--step7:清理环境;expect:清理环境完成--'
self.log.info(text)
rm_cmd = f"rm -rf {self.output_file};" \
f"rm -rf {self.err_output_file}"
self.log.info(rm_cmd)
msg = self.PrimaryNode.sh(rm_cmd).result()
self.log.info(msg)
self.log.info('断言teardown执行成功')
self.assertEqual('', msg, '执行失败:' + text)
self.log.info('-Opengauss_Function_Tools_Pssh_Case0002执行完成-')
|
py | 1a50c9dfc69fa2b7c3fb48c333d8b878110c2956 | # coding=utf8
"""
Test that the expression parser returns proper Unicode strings.
"""
from __future__ import print_function
import os
import time
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
# this test case fails because of rdar://12991846
# the expression parser does not deal correctly with Unicode expressions
# e.g.
#(lldb) expr L"Hello"
#(const wchar_t [6]) $0 = {
# [0] = \0\0\0\0
# [1] = \0\0\0\0
# [2] = \0\0\0\0
# [3] = \0\0\0\0
# [4] = H\0\0\0
# [5] = e\0\0\0
#}
class UnicodeLiteralsTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_expr1(self):
"""Test that the expression parser returns proper Unicode strings."""
self.build()
self.rdar12991846(expr=1)
def test_expr2(self):
"""Test that the expression parser returns proper Unicode strings."""
self.build()
self.rdar12991846(expr=2)
def test_expr3(self):
"""Test that the expression parser returns proper Unicode strings."""
self.build()
self.rdar12991846(expr=3)
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break for main.cpp.
self.source = 'main.cpp'
self.line = line_number(
self.source, '// Set break point at this line.')
def rdar12991846(self, expr=None):
"""Test that the expression parser returns proper Unicode strings."""
if self.getArchitecture() in ['i386']:
self.skipTest(
"Skipping because this test is known to crash on i386")
exe = self.getBuildArtifact("a.out")
# Create a target by the debugger.
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
# Break on the struct declration statement in main.cpp.
lldbutil.run_break_set_by_file_and_line(self, "main.cpp", self.line)
# Now launch the process, and do not stop at entry point.
process = target.LaunchSimple(
None, None, self.get_process_working_directory())
if not process:
self.fail("SBTarget.Launch() failed")
if expr == 1:
self.expect('expression L"hello"', substrs=['hello'])
if expr == 2:
self.expect('expression u"hello"', substrs=['hello'])
if expr == 3:
self.expect('expression U"hello"', substrs=['hello'])
|
py | 1a50c9f7e890401157c5fe0b3c531e5ca413bf7d | from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework.test import APIClient
from core.models import Ingredient
from recipe.serializers import IngredientSerializer
INGREDIENTS_URL = reverse('recipe:ingredient-list')
class PublicIngredientsApiTest(TestCase):
def setUp(self):
self.client = APIClient()
def test_login_required(self):
res = self.client.get(INGREDIENTS_URL)
self.assertEqual(res.status_code, 401)
class PrivateIngredientsApiTest(TestCase):
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(
'[email protected]',
'password'
)
self.client.force_authenticate(self.user)
def test_retrieve_ingredients_list(self):
Ingredient.objects.create(user=self.user, name='Kale')
Ingredient.objects.create(user=self.user, name='Salt')
res = self.client.get(INGREDIENTS_URL)
ingredients = Ingredient.objects.all().order_by('-name')
serializer = IngredientSerializer(ingredients, many=True)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, serializer.data)
def test_ingredients_limited_to_user(self):
user2 = get_user_model().objects.create_user(
'[email protected]',
'password'
)
Ingredient.objects.create(user=user2, name='Vinegar')
ingredient = Ingredient.objects.create(user=self.user, name='Turmeric')
res = self.client.get(INGREDIENTS_URL)
self.assertEqual(res.status_code, 200)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], ingredient.name)
def test_create_ingredient_successful(self):
payload = {'name': 'Cabbage'}
self.client.post(INGREDIENTS_URL, payload)
exists = Ingredient.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertTrue(exists)
def test_create_ingredient_invalid(self):
payload = {'name': ''}
res = self.client.post(INGREDIENTS_URL, payload)
self.assertEqual(res.status_code, 400) |
py | 1a50cb2f24f5375f952dd2c5d0b8f4b52b79978d | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any
from azure.core import AsyncPipelineClient
from msrest import Deserializer, Serializer
from ._configuration import AzureCommunicationSMSServiceConfiguration
from .operations import SmsOperations
from .. import models
class AzureCommunicationSMSService(object):
"""Azure Communication SMS Service.
:ivar sms: SmsOperations operations
:vartype sms: azure.communication.sms.aio.operations.SmsOperations
:param endpoint: The communication resource, for example https://my-resource.communication.azure.com.
:type endpoint: str
"""
def __init__(
self,
endpoint: str,
**kwargs: Any
) -> None:
base_url = '{endpoint}'
self._config = AzureCommunicationSMSServiceConfiguration(endpoint, **kwargs)
self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.sms = SmsOperations(
self._client, self._config, self._serialize, self._deserialize)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "AzureCommunicationSMSService":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
|
py | 1a50cb89bc38e5d60818a294af6bfbbd82bbcda5 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2013 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
import random
from PyQt5.QtCore import (pyqtSignal, QByteArray, QDataStream, QIODevice,
QMimeData, QPoint, QRect, QSize, Qt)
from PyQt5.QtGui import QDrag, QColor, QCursor, QIcon, QPainter, QPixmap
from PyQt5.QtWidgets import (QApplication, QFileDialog, QFrame, QHBoxLayout,
QListView, QListWidget, QListWidgetItem, QMainWindow, QMessageBox,
QSizePolicy, QWidget)
import puzzle_rc
class PuzzleWidget(QWidget):
puzzleCompleted = pyqtSignal()
def __init__(self, parent=None):
super(PuzzleWidget, self).__init__(parent)
self.piecePixmaps = []
self.pieceRects = []
self.pieceLocations = []
self.highlightedRect = QRect()
self.inPlace = 0
self.setAcceptDrops(True)
self.setMinimumSize(400, 400)
self.setMaximumSize(400, 400)
def clear(self):
self.pieceLocations = []
self.piecePixmaps = []
self.pieceRects = []
self.highlightedRect = QRect()
self.inPlace = 0
self.update()
def dragEnterEvent(self, event):
if event.mimeData().hasFormat('image/x-puzzle-piece'):
event.accept()
else:
event.ignore()
def dragLeaveEvent(self, event):
updateRect = self.highlightedRect
self.highlightedRect = QRect()
self.update(updateRect)
event.accept()
def dragMoveEvent(self, event):
updateRect = self.highlightedRect.united(self.targetSquare(event.pos()))
if event.mimeData().hasFormat('image/x-puzzle-piece') and self.findPiece(self.targetSquare(event.pos())) == -1:
self.highlightedRect = self.targetSquare(event.pos())
event.setDropAction(Qt.MoveAction)
event.accept()
else:
self.highlightedRect = QRect()
event.ignore()
self.update(updateRect)
def dropEvent(self, event):
if event.mimeData().hasFormat('image/x-puzzle-piece') and self.findPiece(self.targetSquare(event.pos())) == -1:
pieceData = event.mimeData().data('image/x-puzzle-piece')
dataStream = QDataStream(pieceData, QIODevice.ReadOnly)
square = self.targetSquare(event.pos())
pixmap = QPixmap()
location = QPoint()
dataStream >> pixmap >> location
self.pieceLocations.append(location)
self.piecePixmaps.append(pixmap)
self.pieceRects.append(square)
self.hightlightedRect = QRect()
self.update(square)
event.setDropAction(Qt.MoveAction)
event.accept()
if location == QPoint(square.x() / 80, square.y() / 80):
self.inPlace += 1
if self.inPlace == 25:
self.puzzleCompleted.emit()
else:
self.highlightedRect = QRect()
event.ignore()
def findPiece(self, pieceRect):
try:
return self.pieceRects.index(pieceRect)
except ValueError:
return -1
def mousePressEvent(self, event):
square = self.targetSquare(event.pos())
found = self.findPiece(square)
if found == -1:
return
location = self.pieceLocations[found]
pixmap = self.piecePixmaps[found]
del self.pieceLocations[found]
del self.piecePixmaps[found]
del self.pieceRects[found]
if location == QPoint(square.x() / 80, square.y() / 80):
self.inPlace -= 1
self.update(square)
itemData = QByteArray()
dataStream = QDataStream(itemData, QIODevice.WriteOnly)
dataStream << pixmap << location
mimeData = QMimeData()
mimeData.setData('image/x-puzzle-piece', itemData)
drag = QDrag(self)
drag.setMimeData(mimeData)
drag.setHotSpot(event.pos() - square.topLeft())
drag.setPixmap(pixmap)
if drag.exec_(Qt.MoveAction) != Qt.MoveAction:
self.pieceLocations.insert(found, location)
self.piecePixmaps.insert(found, pixmap)
self.pieceRects.insert(found, square)
self.update(self.targetSquare(event.pos()))
if location == QPoint(square.x() / 80, square.y() / 80):
self.inPlace += 1
def paintEvent(self, event):
painter = QPainter()
painter.begin(self)
painter.fillRect(event.rect(), Qt.white)
if self.highlightedRect.isValid():
painter.setBrush(QColor("#ffcccc"))
painter.setPen(Qt.NoPen)
painter.drawRect(self.highlightedRect.adjusted(0, 0, -1, -1))
for rect, pixmap in zip(self.pieceRects, self.piecePixmaps):
painter.drawPixmap(rect, pixmap)
painter.end()
def targetSquare(self, position):
return QRect(position.x() // 80 * 80, position.y() // 80 * 80, 80, 80)
class PiecesList(QListWidget):
def __init__(self, parent=None):
super(PiecesList, self).__init__(parent)
self.setDragEnabled(True)
self.setViewMode(QListView.IconMode)
self.setIconSize(QSize(60, 60))
self.setSpacing(10)
self.setAcceptDrops(True)
self.setDropIndicatorShown(True)
def dragEnterEvent(self, event):
if event.mimeData().hasFormat('image/x-puzzle-piece'):
event.accept()
else:
event.ignore()
def dragMoveEvent(self, event):
if event.mimeData().hasFormat('image/x-puzzle-piece'):
event.setDropAction(Qt.MoveAction)
event.accept()
else:
event.ignore()
def dropEvent(self, event):
if event.mimeData().hasFormat('image/x-puzzle-piece'):
pieceData = event.mimeData().data('image/x-puzzle-piece')
dataStream = QDataStream(pieceData, QIODevice.ReadOnly)
pixmap = QPixmap()
location = QPoint()
dataStream >> pixmap >> location
self.addPiece(pixmap, location)
event.setDropAction(Qt.MoveAction)
event.accept()
else:
event.ignore()
def addPiece(self, pixmap, location):
pieceItem = QListWidgetItem(self)
pieceItem.setIcon(QIcon(pixmap))
pieceItem.setData(Qt.UserRole, pixmap)
pieceItem.setData(Qt.UserRole+1, location)
pieceItem.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsDragEnabled)
def startDrag(self, supportedActions):
item = self.currentItem()
itemData = QByteArray()
dataStream = QDataStream(itemData, QIODevice.WriteOnly)
pixmap = QPixmap(item.data(Qt.UserRole))
location = item.data(Qt.UserRole+1)
dataStream << pixmap << location
mimeData = QMimeData()
mimeData.setData('image/x-puzzle-piece', itemData)
drag = QDrag(self)
drag.setMimeData(mimeData)
drag.setHotSpot(QPoint(pixmap.width()/2, pixmap.height()/2))
drag.setPixmap(pixmap)
if drag.exec_(Qt.MoveAction) == Qt.MoveAction:
if self.currentItem() is not None:
self.takeItem(self.row(item))
class MainWindow(QMainWindow):
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.puzzleImage = QPixmap()
self.setupMenus()
self.setupWidgets()
self.setSizePolicy(QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed))
self.setWindowTitle("Puzzle")
def openImage(self, path=None):
if not path:
path, _ = QFileDialog.getOpenFileName(self, "Open Image", '',
"Image Files (*.png *.jpg *.bmp)")
if path:
newImage = QPixmap()
if not newImage.load(path):
QMessageBox.warning(self, "Open Image",
"The image file could not be loaded.",
QMessageBox.Cancel)
return
self.puzzleImage = newImage
self.setupPuzzle()
def setCompleted(self):
QMessageBox.information(self, "Puzzle Completed",
"Congratulations! You have completed the puzzle!\nClick OK "
"to start again.",
QMessageBox.Ok)
self.setupPuzzle()
def setupPuzzle(self):
size = min(self.puzzleImage.width(), self.puzzleImage.height())
self.puzzleImage = self.puzzleImage.copy(
(self.puzzleImage.width() - size)/2,
(self.puzzleImage.height() - size)/2, size, size).scaled(400, 400, Qt.IgnoreAspectRatio, Qt.SmoothTransformation)
self.piecesList.clear()
for y in range(5):
for x in range(5):
pieceImage = self.puzzleImage.copy(x*80, y*80, 80, 80)
self.piecesList.addPiece(pieceImage, QPoint(x,y))
random.seed(QCursor.pos().x() ^ QCursor.pos().y())
for i in range(self.piecesList.count()):
if random.random() < 0.5:
item = self.piecesList.takeItem(i)
self.piecesList.insertItem(0, item)
self.puzzleWidget.clear()
def setupMenus(self):
fileMenu = self.menuBar().addMenu("&File")
openAction = fileMenu.addAction("&Open...")
openAction.setShortcut("Ctrl+O")
exitAction = fileMenu.addAction("E&xit")
exitAction.setShortcut("Ctrl+Q")
gameMenu = self.menuBar().addMenu("&Game")
restartAction = gameMenu.addAction("&Restart")
openAction.triggered.connect(self.openImage)
exitAction.triggered.connect(QApplication.instance().quit)
restartAction.triggered.connect(self.setupPuzzle)
def setupWidgets(self):
frame = QFrame()
frameLayout = QHBoxLayout(frame)
self.piecesList = PiecesList()
self.puzzleWidget = PuzzleWidget()
self.puzzleWidget.puzzleCompleted.connect(self.setCompleted,
Qt.QueuedConnection)
frameLayout.addWidget(self.piecesList)
frameLayout.addWidget(self.puzzleWidget)
self.setCentralWidget(frame)
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
window = MainWindow()
window.openImage(':/images/example.jpg')
window.show()
sys.exit(app.exec_())
|
py | 1a50cdfbdb469584d70aa18e6832a197c42df676 | from dota3 import Hero, Game
battlefield=Game()
battlefield.start()
|
py | 1a50ce37e8540ce5dd523c8fd0661bde8ce44c7d | """Compensation constants."""
#Domain name
DOMAIN = "battery_consumption"
#Sensor Name
SENSOR = "battery_consumption"
#YAML
CONF_BATTERY_CONSUMPTION = "battery_consumption"
CONF_PRECISION = "precision"
CONF_BATTERY_CAPACITY = "battery_capacity"
#initialisation process
DATA_BATTERY_CONSUMPTION = "battery_consumption"
#Default
DEFAULT_NAME = "battery_consumption"
DEFAULT_PRECISION = 2
|
py | 1a50cee4d5f759a4c4b560fb21be95cf7771b135 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
settings.DYNAMIC_FORMS_EMAIL_RECIPIENTS = getattr(
settings,
'DYNAMIC_FORMS_EMAIL_RECIPIENTS',
[mail[1] for mail in settings.ADMINS]
)
settings.DYNAMIC_FORMS_FORM_TEMPLATES = getattr(
settings,
'DYNAMIC_FORMS_FORM_TEMPLATES',
[
('dynamic_forms/form.html', _('Default form template')),
]
)
settings.DYNAMIC_FORMS_SUCCESS_TEMPLATES = getattr(
settings,
'DYNAMIC_FORMS_SUCCESS_TEMPLATES',
[
('dynamic_forms/form_success.html', _('Default success template')),
]
)
|
py | 1a50cf90cb371bd29a8e40892c30121c85c65f30 | #!/usr/bin/env python3
"""Combine logs from multiple compchain nodes as well as the test_framework log.
This streams the combined log output to stdout. Use combine_logs.py > outputfile
to write to an outputfile."""
import argparse
from collections import defaultdict, namedtuple
import heapq
import itertools
import os
import re
import sys
# Matches on the date format at the start of the log event
TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}Z")
LogEvent = namedtuple('LogEvent', ['timestamp', 'source', 'event'])
def main():
"""Main function. Parses args, reads the log files and renders them as text or html."""
parser = argparse.ArgumentParser(usage='%(prog)s [options] <test temporary directory>', description=__doc__)
parser.add_argument('-c', '--color', dest='color', action='store_true', help='outputs the combined log with events colored by source (requires posix terminal colors. Use less -r for viewing)')
parser.add_argument('--html', dest='html', action='store_true', help='outputs the combined log as html. Requires jinja2. pip install jinja2')
args, unknown_args = parser.parse_known_args()
if args.color and os.name != 'posix':
print("Color output requires posix terminal colors.")
sys.exit(1)
if args.html and args.color:
print("Only one out of --color or --html should be specified")
sys.exit(1)
# There should only be one unknown argument - the path of the temporary test directory
if len(unknown_args) != 1:
print("Unexpected arguments" + str(unknown_args))
sys.exit(1)
log_events = read_logs(unknown_args[0])
print_logs(log_events, color=args.color, html=args.html)
def read_logs(tmp_dir):
"""Reads log files.
Delegates to generator function get_log_events() to provide individual log events
for each of the input log files."""
files = [("test", "%s/test_framework.log" % tmp_dir)]
for i in itertools.count():
logfile = "{}/node{}/regtest/debug.log".format(tmp_dir, i)
if not os.path.isfile(logfile):
break
files.append(("node%d" % i, logfile))
return heapq.merge(*[get_log_events(source, f) for source, f in files])
def get_log_events(source, logfile):
"""Generator function that returns individual log events.
Log events may be split over multiple lines. We use the timestamp
regex match as the marker for a new log event."""
try:
with open(logfile, 'r', encoding='utf-8') as infile:
event = ''
timestamp = ''
for line in infile:
# skip blank lines
if line == '\n':
continue
# if this line has a timestamp, it's the start of a new log event.
time_match = TIMESTAMP_PATTERN.match(line)
if time_match:
if event:
yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip())
event = line
timestamp = time_match.group()
# if it doesn't have a timestamp, it's a continuation line of the previous log.
else:
event += "\n" + line
# Flush the final event
yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip())
except FileNotFoundError:
print("File %s could not be opened. Continuing without it." % logfile, file=sys.stderr)
def print_logs(log_events, color=False, html=False):
"""Renders the iterator of log events into text or html."""
if not html:
colors = defaultdict(lambda: '')
if color:
colors["test"] = "\033[0;36m" # CYAN
colors["node0"] = "\033[0;34m" # BLUE
colors["node1"] = "\033[0;32m" # GREEN
colors["node2"] = "\033[0;31m" # RED
colors["node3"] = "\033[0;33m" # YELLOW
colors["reset"] = "\033[0m" # Reset font color
for event in log_events:
print("{0} {1: <5} {2} {3}".format(colors[event.source.rstrip()], event.source, event.event, colors["reset"]))
else:
try:
import jinja2
except ImportError:
print("jinja2 not found. Try `pip install jinja2`")
sys.exit(1)
print(jinja2.Environment(loader=jinja2.FileSystemLoader('./'))
.get_template('combined_log_template.html')
.render(title="Combined Logs from testcase", log_events=[event._asdict() for event in log_events]))
if __name__ == '__main__':
main()
|
py | 1a50d0363ed1f5a0145c066d718363c992739841 | # -*- coding: utf-8 -*-
"""
Created on Fri Aug 14 11:58:01 2020
@author: Jhon Corro
@author: Cristhyan De Marchena
"""
import vtk
tube = vtk.vtkTubeFilter()
def get_program_parameters():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('data_file', nargs='?', default=None, help='data file')
parser.add_argument('texture_file', nargs='?',
default=None, help='texture file')
args = parser.parse_args()
return args.data_file, args.texture_file
def read_file(file_name):
import os
if(file_name):
path, extension = os.path.splitext(file_name)
extension = extension.lower()
if extension == ".vti":
reader = vtk.vtkXMLImageDataReader()
reader.SetFileName(file_name)
elif extension == ".vtp":
reader = vtk.vtkXMLPolyDataReader()
reader.SetFileName(file_name)
elif extension == ".jpg":
readerFactory = vtk.vtkImageReader2Factory()
img_file = readerFactory.CreateImageReader2(file_name)
img_file.SetFileName(file_name)
img_file.Update()
reader = img_file
else:
# the file provided doesn't match the accepted extenstions
reader = None
else:
reader = None
return reader
def generate_texture(texture_file):
texture_file = read_file(texture_file)
if(texture_file):
texture = vtk.vtkTexture()
texture.SetInputConnection(texture_file.GetOutputPort())
texture.InterpolateOn()
else:
texture = None
return texture
def generate_actors(data, texture):
# contour
iso = vtk.vtkContourFilter()
iso.SetInputConnection(data.GetOutputPort())
iso.GenerateValues(19, -10000, 8000)
ctf = vtk.vtkColorTransferFunction()
ctf.AddRGBPoint(-10000, 31/255, 162/255, 255/255)
ctf.AddRGBPoint(-1, 1, 1, 1)
ctf.AddRGBPoint(0, 255/255, 47/255, 61/255)
ctf.AddRGBPoint(1, 1, 1, 1)
ctf.AddRGBPoint(8000, 255/255, 251/255, 19/255)
# tubes
global tube
tube.SetInputConnection(iso.GetOutputPort())
tube.SetRadius(1000)
tube.SetNumberOfSides(5)
# Add iso surface mapper.
isoMapper = vtk.vtkDataSetMapper()
isoMapper.SetLookupTable(ctf)
isoMapper.SetInputConnection(tube.GetOutputPort())
isoMapper.SetScalarRange(0, 255)
# mapper.ScalarVisibilityOff()
# Generate iso surface actor from iso surface mapper.
isoActor = vtk.vtkActor()
isoActor.SetMapper(isoMapper)
# Add mapper.
mapper = vtk.vtkDataSetMapper()
mapper.SetInputConnection(data.GetOutputPort())
mapper.SetScalarRange(0, 255)
mapper.ScalarVisibilityOff()
# generate actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.SetTexture(texture)
# Included CTF so it can be set to the scalar bar SetLookUpTable
return [actor, isoActor, ctf]
def generate_slide_bar():
# Slidebar colors
red_r = 224/255
red_g = 69/255
red_b = 85/255
green_r = 70/255
green_g = 224/255
green_b = 105/255
white = 242/255
# Create Slidebar
slide_bar = vtk.vtkSliderRepresentation2D()
# Set range and title.
slide_bar.SetMinimumValue(100)
slide_bar.SetMaximumValue(10000.0)
slide_bar.SetValue(1000)
slide_bar.SetTitleText("Tube radius")
# Set colors.
slide_bar.GetSliderProperty().SetColor(red_r, red_g, red_b)
slide_bar.GetTitleProperty().SetColor(white, white, white)
slide_bar.GetLabelProperty().SetColor(red_r, red_g, red_b)
slide_bar.GetSelectedProperty().SetColor(green_r, green_g, green_b)
slide_bar.GetTubeProperty().SetColor(white, white, white)
slide_bar.GetCapProperty().SetColor(red_r, red_g, red_b)
# Set coordinates.
slide_bar.GetPoint1Coordinate().SetCoordinateSystemToNormalizedDisplay()
slide_bar.GetPoint1Coordinate().SetValue(0.78, 0.1)
slide_bar.GetPoint2Coordinate().SetCoordinateSystemToNormalizedDisplay()
slide_bar.GetPoint2Coordinate().SetValue(0.98, 0.1)
return slide_bar
def custom_callback(obj, event):
# print("interaction called")
value = int(obj.GetRepresentation().GetValue())
global tube
tube.SetRadius(value)
tube.Update()
def generate_gui(actors):
# Create renderer stuff
renderer = vtk.vtkRenderer()
renderer_window = vtk.vtkRenderWindow()
renderer_window.AddRenderer(renderer)
renderer_window_interactor = vtk.vtkRenderWindowInteractor()
renderer_window_interactor.SetRenderWindow(renderer_window)
# Add slide bar
slide_bar = generate_slide_bar()
slide_bar.SetLabelFormat("%-#6.2f")
slider_widget = vtk.vtkSliderWidget()
slider_widget.SetInteractor(renderer_window_interactor)
slider_widget.SetRepresentation(slide_bar)
slider_widget.AddObserver("InteractionEvent", custom_callback)
slider_widget.EnabledOn()
# Attempt to create scalar bar
# Create the scalar_bar
scalar_bar = vtk.vtkScalarBarActor()
scalar_bar.SetOrientationToHorizontal()
scalar_bar.SetTextPositionToPrecedeScalarBar()
scalar_bar.UnconstrainedFontSizeOff()
# Pops CTF from the actors' list
scalar_bar.SetLookupTable(actors.pop())
scalar_bar.SetNumberOfLabels(3)
scalar_bar.SetLabelFormat("%-6.0f")
# Estas instrucciones no las está tomando, no estoy muy seguro de la razón
scalar_bar.SetPosition(0.24, 0.08)
scalar_bar.SetHeight(0.1)
scalar_bar.SetWidth(0.5)
# Add the actor and camera to the renderer, set background and size
for index, actor in enumerate(actors):
renderer.AddActor(actor)
renderer.AddActor2D(scalar_bar)
renderer.ResetCamera()
renderer.GetActiveCamera().Azimuth(180)
renderer.GetActiveCamera().Roll(180)
renderer.GetActiveCamera().Yaw(0)
renderer.GetActiveCamera().Elevation(0)
renderer.SetBackground(0.1, 0.1, 0.1)
renderer.ResetCameraClippingRange()
renderer_window.SetSize(renderer_window.GetScreenSize())
cam1 = renderer.GetActiveCamera()
cam1.Zoom(1.5)
# Smoother camera controls
renderer_window_interactor.GetInteractorStyle().SetCurrentStyleToTrackballCamera()
renderer_window_interactor.Initialize()
renderer_window.Render()
renderer_window.SetWindowName('Heightfield Visualizer')
renderer_window.Render()
renderer_window_interactor.Start()
def main():
# Get file paths from cli params.
data_file, texture_file = get_program_parameters()
# Read data file.
data = read_file(data_file)
if(data):
# Generate texture.
texture = generate_texture(texture_file)
if(texture):
# Generate actor.
actors = generate_actors(data, texture)
# Generate GUI
generate_gui(actors)
else:
print(
'The texture file was not found or the file provided does not match the .jpg extension.')
else:
print('The data file was not found or the file provided does not match neither the .vti and .vtp extension.')
if __name__ == '__main__':
main()
|
py | 1a50d08858deb99460f216a654c4a197c8547ca5 | import logging
import os
import shutil
import re
from collections import defaultdict
import sys
from bs4 import BeautifulSoup
def _getSubstring(block, delimiters):
# No error checking...don't do anything dumb
return block[delimiters[0]:delimiters[1]]
def _textify(block):
"""
Smash down any html formatting in the provided string
"""
# Format html lists as python/Sphinx lists.
block = block.replace("<li>","<li>* ")
return re.sub('[^\x00-\x7f]', ' ', BeautifulSoup(block, 'lxml').get_text()) # explicitly scrub non-ascii chars
def _padString(strIn, padding=None):
"""
Replace every endline with endline + (#padding) spaces, for indent formatting
"""
if padding is None:
return strIn
if not (isinstance(padding, int) and padding >= 0):
raise ValueError("Invalid padding argument {} ".format(padding))
pad = ' '*padding
return pad + strIn.replace('\n', '\n'+pad)
def _docstringify(strIn, padding=None, collapseEmpty=True):
if strIn is None:
return None
stripped = strIn.strip()
if len(stripped) < 1 and collapseEmpty:
return None
if padding is None:
return stripped
return _padString('\n' + stripped + '\n', padding)
def _htmlUnescape(htmlIn, parts=None, additionalParts=None):
if parts is None:
parts = {
" ": " ", " ": " ", " ": " ", # non-breaking space
"​": ""
}
if additionalParts is not None:
parts.update(additionalParts)
out = htmlIn
for key in parts:
out = out.replace(key, parts[key])
return out
def _findBlock(strIn, startString, endString, startLimit=None, endLimit=None, inclusive=False):
if startLimit is None:
startLimit = 0
if endLimit is None:
endLimit = len(strIn)
if endLimit <= startLimit:
return None
try:
start = strIn.index(startString, startLimit, endLimit)
except ValueError as e:
return None
try:
end = strIn.index(endString, start+len(startString), endLimit)
except ValueError as e:
if inclusive:
return start, None
else:
return start+len(startString), None
if inclusive:
return start, end+len(endString)
else:
return start+len(startString), end
def _splitTerms(part, delim=' ', secDelim=None):
def parseAnchor(block):
termDelimiters = _findBlock(block, '<a', '</a>', inclusive=True)
if termDelimiters is None:
return block
titleDelimiters = _findBlock(block, 'title="', '"', startLimit=termDelimiters[0], inclusive=False)
path = _getSubstring(block, titleDelimiters).split()[-1]
elementPart = _findBlock(block, '>', '</a>', startLimit=titleDelimiters[1], inclusive=False)
element = _getSubstring(block, elementPart)
end = ""
if len(block) > termDelimiters[1]:
end = block[termDelimiters[1]:]
return path + "." + element + end
def parseBrackets(block, dlim, subBrackets=True):
# find and process <> blocks
outblocks = []
cont = True
previousEnd = 0
while cont:
termDelimiters = _findBlock(block, '<', '>', startLimit=previousEnd, inclusive=True)
if termDelimiters is None:
cont = False
else:
# Is this <> nested?
starts = [termDelimiters[0], ]
ends = [termDelimiters[1], ]
cont2 = True
while cont2:
tempDelimiters = _findBlock(block, '<', '>', startLimit=starts[-1] + 4, inclusive=True)
if tempDelimiters is not None and tempDelimiters[0] < ends[0]:
# we found another block.
try:
blockEnd = block.index('>', ends[-1]) # we have to advance to the proper end
except Exception:
logging.error('We failed to find the new end {}, {},\n\t{}'.format(starts, ends, block))
raise
starts.append(tempDelimiters[0])
ends.append(blockEnd)
else:
cont2 = False
start = starts[0]
end = ends[-1]
# backtrack start to previous delimiter
try:
moveTo = block[start::-1].index(dlim)
start -= moveTo
except ValueError:
start = 0 # there is no previous delimiter
# advance end to next delimiter
try:
moveTo = block.index(dlim, end)
end = moveTo
except ValueError:
end = len(block) # there is no next delimiter
if start > previousEnd:
temp = block[previousEnd:start].strip().split(dlim)
outblocks.extend([el.strip() for el in temp if len(el.strip()) > 0])
if subBrackets:
outblocks.append(_htmlUnescape(block[start:end].strip(),
additionalParts={'<': '<', '>': '>'}))
else:
outblocks.append(block[start:end].strip())
previousEnd = end
else:
if previousEnd < len(block):
temp = block[previousEnd:].strip().split(dlim)
outblocks.extend([el.strip() for el in temp if len(el.strip()) > 0])
return outblocks
# find and replace all anchor segments
part1 = ""
cont = True
previousEnd = 0
while cont:
termDelimiters = _findBlock(part, '<a', '</a>', startLimit=previousEnd, inclusive=True)
if termDelimiters is not None:
start = termDelimiters[0]
end = termDelimiters[1]
part1 += part[previousEnd:start] + parseAnchor(part[start:end])
previousEnd = end
else:
cont = False
else:
part1 += part[previousEnd:]
# find and process <> blocks
if secDelim is None:
return parseBrackets(part1, delim, subBrackets=True)
else:
blocks = []
for theBlock in parseBrackets(part1, delim, subBrackets=False):
blocks.append(parseBrackets(theBlock, secDelim, subBrackets=True))
return blocks
def _parseSignature(sigString, methodName):
# get rid of the junk elements
sigString = _htmlUnescape(sigString, additionalParts={'\n': ' '})
segments = sigString.split(methodName+'(')
# segemnts[0] = modifiers (w. generics info) and return type
# segments[1] = params info, then any thrown exception details
# parse the return type and modifiers
modifierParts = _splitTerms(segments[0].strip())
returnType = modifierParts[-1]
modifiers = []
genericsInfo = None
allowedModifiers = {'public', 'private', 'protected', 'static', 'abstract', 'default', 'final', 'strictfp',
'java.lang.@Deprecated', 'io.deephaven.util.annotations.@ScriptApi'}
if len(modifierParts) > 1:
for el in modifierParts[:-1]:
if el in allowedModifiers:
modifiers.append(el)
elif not el.startswith('@'):
genericsInfo = el
other = segments[1].strip().split(" throws ")
params = []
paramString = other[0].strip()[:-1] # eliminate trailing parenthesis from params
if len(paramString) > 0:
params = _splitTerms(paramString, delim=',', secDelim=' ')
# Not especially interested in parsing anything the method throws?
return modifiers, genericsInfo, returnType, params
class ClassDocParser(object):
"""This parses the desired components from the provided java doc (page?)"""
def __init__(self, docString):
self._docString = docString
self._methods = defaultdict(self._newMethodItem)
self._package = None
self._symbol = None
self._type = None
self._text = None
# parse the symbol information
self._parseSymbol()
# parse the method details
self._parseMethods()
# todo: parse any other details?
@property
def docString(self):
"""The provided doc string"""
return self._docString
@property
def methods(self):
"""Dictionary of the form {'<symbol>#method' : MethodDetails object}"""
return self._methods
@property
def className(self):
"""The class name for this class"""
if self._package is None or self._symbol is None:
raise ValueError("Package or Symbol not parsed successfully")
return self._package + '.' + self._symbol
@property
def pathName(self):
"""The fully qualified path name for this class"""
if self._package is None or self._symbol is None:
raise ValueError("Package or Symbol not parsed successfully")
return self.className.replace('$', '.')
@property
def isNested(self):
"""Is this nested in another class?"""
if self._symbol is None:
raise ValueError("Symbol not parsed successfully")
return '$' in self._symbol
@property
def parentPath(self):
"""The parent path if nested class/interface, or None"""
if not self.isNested:
return None
ind = self._symbol[-1::-1].index('$')
return self._package + '.' + self._symbol[:-ind]
@property
def type(self):
"""interface, enum, or class?"""
return self._type
@property
def text(self):
"""Document string for the class itself"""
return self._text
def __str__(self):
return 'ClassDocParser<< pathName={}, type={} >>'.format(self.pathName, self.type)
def __repr__(self):
out = []
for key in sorted(self.methods.keys()):
out2 = ',\n'.join([str(meth) for meth in self.methods[key]])
out.append('{}=[\n{}\n]'.format(key, out2))
if self.isNested:
return 'ClassDocParser(\n' + \
'pathName={}\n,'.format(self.pathName) + \
'className={}\n,'.format(self.className) + \
'methods={\n' + ',\n'.join(out) + '})'
else:
return 'ClassDocParser(\n' + \
'pathName={}\n,'.format(self.pathName) + \
'methods={\n' + ',\n'.join(out) + '})'
@staticmethod
def _newMethodItem():
"""Helper method"""
return []
def _parseSymbol(self):
# find the symbol information
classStartBlock = '<!-- ======== START OF CLASS DATA ======== -->'
packageStartBlock = '<div class="subTitle">'
packageEndBlock = '</div'
symbolStartBlock = '<h2'
symbolEndBlock = '</h2>'
symbolInfoDelimiters = _findBlock(self.docString, classStartBlock, symbolEndBlock, inclusive=True)
if symbolInfoDelimiters is None:
raise ValueError('Failed to find the symbol information block')
symbolInfoBlock = _getSubstring(self.docString, symbolInfoDelimiters)
packageInfoDelimiters = _findBlock(symbolInfoBlock, packageStartBlock, packageEndBlock, inclusive=True)
if packageInfoDelimiters is None:
raise ValueError('Failed to find the package block inside the symbol '
'information block = {}'.format(symbolInfoBlock))
pack = _textify(_getSubstring(symbolInfoBlock, packageInfoDelimiters)).strip().split()[-1]
self._package = pack
symbolBlockDelimiters = _findBlock(symbolInfoBlock, symbolStartBlock, symbolEndBlock, inclusive=True)
if symbolBlockDelimiters is None:
raise ValueError('Failed to find the symbol block inside the symbol '
'information block = {}'.format(symbolInfoBlock))
symb = _textify(_getSubstring(symbolInfoBlock, symbolBlockDelimiters)).strip()
# is this a class or an interface?
temp = symb.lower().split()
if 'interface' in temp:
self._type = 'interface'
elif 'enum' in temp:
self._type = 'enum'
else:
self._type = 'class'
# get rid of bracket crapola
try:
ind = symb.index('<')
symb = symb[:ind]
except ValueError:
pass
# get rid of any initial cruft
symb = symb.split()[-1]
symb = symb.replace('.', '$')
self._symbol = symb
# Try to parse the text for this class/enum/interface
classDetailsStartBlock = '<div class="description">' # after symbolEndBlock
classDetailsEndBlock = '<div class="summary">'
classSpecificStart = '<pre>'
classSpecificEnd = '</pre>'
textStart = '<div class="block">' # directly after class specific stuff
textEnd = "</div>"
classDetailsDelimiters = _findBlock(self.docString, classDetailsStartBlock, classDetailsEndBlock,
startLimit=symbolInfoDelimiters[1], inclusive=False)
if classDetailsDelimiters is not None:
classBlock = _getSubstring(self.docString, classDetailsDelimiters)
# find the class specific stuff
classSpecificDelimiters = _findBlock(classBlock, classSpecificStart, classSpecificEnd, inclusive=True)
if classDetailsDelimiters is not None:
textDelimiters = _findBlock(classBlock, textStart, textEnd,
startLimit=classSpecificDelimiters[1], inclusive=True)
if textDelimiters is not None:
self._text = _textify(_getSubstring(classBlock, textDelimiters))
def _parseMethods(self):
# look for a methods section
methodStartString = '<h3>Method Detail</h3>'
methodEndString = '</section>'
limits = _findBlock(self.docString, methodStartString, methodEndString, inclusive=False)
if limits is not None:
methodBlockString = self.docString[limits[0]:limits[1]]
thisStart = 0
theEnd = len(methodBlockString)
# iterate over each method and populate
while (thisStart is not None) and thisStart < theEnd:
methodLimits = _findBlock(methodBlockString, '<li class="blockList">\n<h4>',
'</li>\n</ul>', thisStart, theEnd, inclusive=True)
if methodLimits is not None:
if self.type == 'interface':
defMods = {'public', } # everything for an interface is implicitly public
else:
defMods = set()
methodDetail = MethodDetail(methodBlockString, methodLimits[0], methodLimits[1], defaultModifiers=defMods)
self.methods[methodDetail.name].append(methodDetail)
thisStart = methodLimits[1]
else:
thisStart = None
class MethodDetail(object):
ignoreInSignature = {','}
def __init__(self, strIn, start, end, defaultModifiers=set()):
self.name = None
self.modifiers = None
self.genericsInfo = None
self.returnType = None
self.returnText = None
self.parameters = []
self.parameterTypes = []
self.parametersText = {}
self.text = None
self.documentBlock = strIn[start:end]
thisEnd = len(self.documentBlock)
step = self._getName(0, thisEnd)
if step is not None:
step = self._getSignature(step, thisEnd)
# add in any default modifiers
if self.modifiers is None:
self.modifiers = defaultModifiers
else:
self.modifiers = defaultModifiers.union(self.modifiers)
# make parameters & parameters a tuple - must be hashable
self.parameters = tuple(self.parameters)
self.parameterTypes = tuple(self.parameterTypes)
if step is not None:
step = self._getText(step, thisEnd)
if step is not None:
step = self._getParameterDetails(step, thisEnd)
def __str__(self):
out = []
for arg in ['name', 'modifiers', 'genericsInfo', 'text', 'parameters', 'parameterTypes',
'parametersText', 'returnType', 'returnText']:
val = getattr(self, arg)
if val is not None:
out.append('{}={}'.format(arg, val))
return 'MethodDetail(\n\t' + ',\n\t'.join(out) + ')'
def _getName(self, start, end):
"""Parses name and returns the end of the name block"""
nameStartString = '<h4>'
nameEndString = '</h4>'
nameDelimiters = _findBlock(self.documentBlock, nameStartString, nameEndString, start, end, inclusive=False)
if nameDelimiters is not None:
if nameDelimiters[1] is not None:
self.name = self.documentBlock[nameDelimiters[0]:nameDelimiters[1]]
return nameDelimiters[1] + len(nameEndString)
else:
self.name = self.documentBlock[nameDelimiters[0]:end]
return None
def _getSignature(self, start, end):
"""Parses signature and returns the end of the signature block"""
sigStartString = ['<pre class="methodSignature">', '<pre>']
sigEndString = '</pre>'
sigDelimiters = None
for sigStartStr in sigStartString:
if sigDelimiters is None:
sigDelimiters = _findBlock(self.documentBlock, sigStartStr, sigEndString,
start, end, inclusive=False)
if sigDelimiters is None or sigDelimiters[1] is None:
return None
modifiers, genericsInfo, returnType, params = _parseSignature(self.documentBlock[sigDelimiters[0]: sigDelimiters[1]], self.name)
self.modifiers = modifiers
self.genericsInfo = genericsInfo
self.returnType = returnType
badParsing = False
for seg in params:
el = [entry for entry in seg if entry not in self.ignoreInSignature]
if len(el) == 2:
self.parameterTypes.append(el[0])
self.parameters.append(el[1])
elif len(el) == 3:
self.parameterTypes.append(el[1])
self.parameters.append(el[2])
else:
logging.error("Misparsed argument {}".format(el))
badParsing = True
if badParsing:
logging.error('Evidently bad parsing for the parameters in {}'.format(
_htmlUnescape(self.documentBlock[sigDelimiters[0]: sigDelimiters[1]])))
raise ValueError
return sigDelimiters[1] + len(sigEndString)
def _getText(self, start, end):
"""Parses method text - if it's there - and returns the next starting point"""
textStartString = '<div class="block">'
textEndString = '</div>'
block = None
while block is None:
textDelimiters = _findBlock(self.documentBlock, textStartString, textEndString, start, end, inclusive=False)
if textDelimiters is None or textDelimiters[1] is None:
return start
block = self.documentBlock[textDelimiters[0]:textDelimiters[1]]
# we should squish the html formatting out of the text
if "Description copied" in block:
block = None
start = textDelimiters[1]
self.text = _textify(block)
return textDelimiters[1] + len(textEndString)
def _getParameterDetails(self, start, end):
"""Parses parameter details text - if it's there - and returns the next starting point"""
paramStartString = '<dl>\n<dt><span class="paramLabel">Parameters:</span></dt>\n'
returnStartString = '<dt><span class="returnLabel">Returns:</span></dt>\n'
blockEnd = '</dl>\n</li>'
paramsDelimiters = _findBlock(self.documentBlock, paramStartString, blockEnd, start, end, inclusive=False)
returnsDelimiters = _findBlock(self.documentBlock, returnStartString, blockEnd, start, end, inclusive=False)
paramsBlock = None
returnsBlock = None
endPoint = start
if paramsDelimiters is None and returnsDelimiters is None:
return start
elif returnsDelimiters is None:
# just params block
paramsBlock = self.documentBlock[paramsDelimiters[0]:paramsDelimiters[1]]
endPoint = paramsDelimiters[1] + len(blockEnd)
elif paramsDelimiters is None:
# just returns block
returnsBlock = self.documentBlock[returnsDelimiters[0]:returnsDelimiters[1]]
endPoint = returnsDelimiters[1] + len(blockEnd)
else:
# both are present
paramsBlock = self.documentBlock[paramsDelimiters[0]: returnsDelimiters[0]-len(returnStartString)]
returnsBlock = self.documentBlock[returnsDelimiters[0]:returnsDelimiters[1]]
endPoint = returnsDelimiters[1] + len(blockEnd)
entryStartString = '<dd>'
entryEndString = '</dd>'
pNameStartString = '<code>'
pNameEndString = '</code>'
if returnsBlock is not None:
returnTextDelimiters = _findBlock(returnsBlock, entryStartString, entryEndString, inclusive=False)
if returnTextDelimiters is not None:
self.returnText = _textify(returnsBlock[returnTextDelimiters[0]:returnTextDelimiters[1]])
if paramsBlock is not None:
paramsStep = 0
while (paramsStep is not None) and (paramsStep < len(paramsBlock)):
thisParamDelimiters = _findBlock(paramsBlock, entryStartString, entryEndString, paramsStep, inclusive=False)
paramsStep = None
if thisParamDelimiters is not None:
paramsStep = thisParamDelimiters[0]
paramNameDelimiters = _findBlock(paramsBlock, pNameStartString, pNameEndString, paramsStep, inclusive=False)
paramsStep = None
if paramNameDelimiters is not None:
self.parametersText[paramsBlock[paramNameDelimiters[0]:paramNameDelimiters[1]]] = \
_textify(paramsBlock[paramNameDelimiters[1] + 7:thisParamDelimiters[1]])
paramsStep = thisParamDelimiters[1] + len(entryEndString)
return endPoint
def createDocString(self, padding=None, excludeText=False, collapseEmpty=True):
out = ""
if (self.text is not None) and (len(self.text) > 0) and (not excludeText):
out += '{}\n\n'.format(self.text)
if self.genericsInfo is not None:
out += 'Note: Java generics information - {}\n\n'.format(self.genericsInfo)
for pname, ptype in zip(self.parameters, self.parameterTypes):
pText = self.parametersText.get(pname, None)
if pText is None:
out += ':param {}: {}\n'.format(pname, ptype)
else:
out += ':param {}: ({}) {}\n'.format(pname, ptype, pText)
if self.returnType is not None and self.returnType != 'void':
if self.returnText is None:
out += ':return: {}\n'.format(self.returnType)
else:
out += ':return: ({}) {}\n'.format(self.returnType, self.returnText)
return _docstringify(out, padding, collapseEmpty=collapseEmpty)
def methodDigest(methodDetailList, details, requiredModifiers={'public'}, maxCount=5, padding=None, verbose=False):
maxMaxCount = 50
try:
maxCount = int(maxCount)
except ValueError:
maxCount = 5
finally:
if maxCount < 1:
logging.warning("maxCount was set to {} (< 1), and will be redefined as 1".format(maxCount))
maxCount = 1
if maxCount > maxMaxCount:
logging.warning("maxCount was set to {} (> {}), and will be redefined as {}".format(maxCount, maxMaxCount, maxMaxCount))
maxCount = maxMaxCount
useList = []
for el in methodDetailList:
mods = requiredModifiers.intersection(el.modifiers)
if mods == requiredModifiers:
useList.append(el)
if len(useList) < 1:
return _docstringify(None, padding)
# Is there just one MethodDetail? If so, just return a decent doc string
if len(useList) == 1:
return useList[0].createDocString(padding)
# digest all the things
text = set()
for el in useList:
if el.text is None:
text.add("")
else:
text.add(el.text.strip())
# Is there just one text?
if len(text) == 1:
textPart = text.pop()
else:
texts = {el.text for el in useList if el.text is not None}
texts = list(texts)
texts.sort()
if len(texts) == 0:
textPart = None
elif len(texts) == 1:
textPart = texts[0]
else:
textPart = "**Incompatible overloads text - text from the first overload:**\n\n{}".format(texts[0])
if verbose:
className = details["className"]
print(f"vvvvv INCOMPATIBLE JAVADOC FOR PYTHON vvvvv")
print(f"\tclassName: {className}\n")
print(f"\t{useList[0]}\n")
for i in range(len(texts)):
txt = texts[i].replace("\n"," ")
print(f"\tdocstring {i}: {txt}")
print(f"^^^^^ INCOMPATIBLE JAVADOC FOR PYTHON ^^^^^")
if textPart is None:
out = ""
else:
out = '{}\n\n'.format(textPart.strip())
if len(useList) > 2*maxCount-1:
out += "There are {} overloads, restricting signature summary to first {}:\n".format(len(useList), maxCount)
for i, md in enumerate(useList[:maxCount]):
out += "*Overload {}*{}\n".format(i+1, md.createDocString(padding=2, excludeText=True, collapseEmpty=False))
else:
for i, md in enumerate(useList):
out += "*Overload {}*{}\n".format(i+1, md.createDocString(padding=2, excludeText=True, collapseEmpty=False))
return _docstringify(out, padding)
if __name__ == '__main__':
# NOTE: this will fail (currently) unless the working directory is this location
from docGenUtil import populateCurrentDocs, classDocGeneration, finalize
maxSignatures = 50
verbose = False
# NOTE: weak arg parsing here, do we need more?
if len(sys.argv) < 2:
raise ValueError("The script requires at least one argument: devroot")
if sys.argv[1].lower() in ['-h', '--help']:
print("Called as:\n"
" python javadocExtraction.py <devroot> <assertNoChange>[False]\n"
"\n"
" - <devroot> specifies the development root, below which we expect directories\n"
" `build/docs/javadoc` and `Integrations/python/deephaven/doc`\n"
" - <assertNoChange> [default `False`] optional argument.\n"
" * False indicates to extract the javadocs to .json format below\n"
" `Integrations/python/deephaven/doc`\n"
" * True indicates to check that the .json files in the file system below\n"
" `Integrations/python/deephaven/doc` match what WOULD be generated.\n"
" **NO ACTUAL GENERATION HERE**")
# Parse the arguments
devRoot = sys.argv[1]
assertNoChange = False
if len(sys.argv) > 2:
assert_t = sys.argv[2].lower()
if assert_t in ['true', 't', '1']:
assertNoChange = True
docRoot = os.path.join(devRoot, 'build', 'docs', 'javadoc')
outDir = os.path.join(devRoot, 'Integrations', 'python', 'deephaven', 'doc')
# junk any contents of outDir, if it exists - it's easier than trying to sync somehow
if (not assertNoChange) and os.path.exists(outDir):
shutil.rmtree(outDir)
# walk the contents of outDir, and figure the current list of javadoc extracts
currentDocs = populateCurrentDocs(outDir)
# walk down the com directory of docRoot, and find all the html files
for root, dirs, files in os.walk(os.path.join(docRoot, 'com')):
for fil in files:
fstem, fext = os.path.splitext(fil)
if (fstem[0] == '.') or (fext != '.html') or (fstem.startswith('package-')):
continue
# parse the file
with open(os.path.join(root, fil), 'r', encoding="utf8") as fi:
classDetails = ClassDocParser(fi.read())
logging.info('Converting docs for {}'.format(classDetails))
# get classname, pathname and text for class/interface/enum itself
className = classDetails.className
pathName = classDetails.pathName
symbDocString = _docstringify(classDetails.text, padding=None)
# prepare the docstring dictionary
details = {"className": className, "path": pathName, "typeName": classDetails.type}
if symbDocString is None:
logging.info("className = {} has empty doc string".format(className))
else:
details["text"] = symbDocString
# parse details for explicit methods
methodDetails = {}
for methodName in classDetails.methods:
methodList = classDetails.methods[methodName]
entryDocString = methodDigest(methodList, details, requiredModifiers={'public'}, maxCount=maxSignatures, padding=None, verbose=verbose)
if entryDocString is None:
logging.info("className = {}, methodName = {} has empty docstring".format(className, methodName))
else:
methodDetails[methodName] = entryDocString
details["methods"] = methodDetails
# finalize the generation task for this class
classDocGeneration(currentDocs, assertNoChange, details, outDir)
finalize(currentDocs, assertNoChange, '\nTo resolve failure, run the task "./gradlew :Generators:generatePyDoc -PwithPy=true" '
'to regenerate, and then commit the generated changes.\n'
'To diagnose trouble, run the generation task followed by \"git diff\" to see the changes.\n'
'To diagnose possible indeterminism in the generation process, regenerate the code and check '
'the diff **multiple times**.')
|
py | 1a50d0a84c632b63b10c27dda03902aa6e59b728 | import simuvex
######################################
# Returns a valid char
######################################
class ReturnChar(simuvex.SimProcedure):
def run(self):
s_var = self.state.se.Unconstrained("char_ret", self.state.arch.bits)
self.state.add_constraints(self.state.se.And(self.state.se.ULE(s_var, 126), self.state.se.UGE(s_var, 9)))
return s_var
|
py | 1a50d11d5e94b217818bde4f5bb0e23d15385349 | from polyaxon.config_manager import config
POLYAXON_K8S_APP_NAME = config.get_string('POLYAXON_K8S_APP_NAME')
POLYAXON_K8S_API_HOST = config.get_string('POLYAXON_K8S_API_HOST')
POLYAXON_K8S_API_HTTP_PORT = config.get_int('POLYAXON_K8S_API_HTTP_PORT')
POLYAXON_K8S_API_WS_PORT = config.get_int('POLYAXON_K8S_API_WS_PORT')
POLYAXON_K8S_APP_CONFIG_NAME = config.get_string('POLYAXON_K8S_APP_CONFIG_NAME')
POLYAXON_K8S_APP_SECRET_NAME = config.get_string('POLYAXON_K8S_APP_SECRET_NAME')
POLYAXON_K8S_RABBITMQ_SECRET_NAME = config.get_string('POLYAXON_K8S_RABBITMQ_SECRET_NAME')
POLYAXON_K8S_DB_SECRET_NAME = config.get_string('POLYAXON_K8S_DB_SECRET_NAME')
K8S_AUTHORISATION = config.get_string('POLYAXON_K8S_AUTHORISATION',
is_optional=True,
is_secret=True)
K8S_HOST = config.get_string('POLYAXON_K8S_HOST', is_optional=True)
SSL_CA_CERT = config.get_string('POLYAXON_K8S_SSL_CA_CERT', is_optional=True)
K8S_CONFIG = None
if K8S_AUTHORISATION and K8S_HOST:
import urllib3
from kubernetes import client
K8S_CONFIG = client.Configuration()
K8S_CONFIG.api_key['authorization'] = K8S_AUTHORISATION
K8S_CONFIG.api_key_prefix['authorization'] = 'Bearer'
K8S_CONFIG.host = K8S_HOST
if SSL_CA_CERT:
K8S_CONFIG.verify_ssl = True
K8S_CONFIG.ssl_ca_cert = SSL_CA_CERT
else:
K8S_CONFIG.verify_ssl = False
urllib3.disable_warnings()
|
py | 1a50d1ec21f84551d0932b35103cb13019eedeec | import os
import unittest
from programytest.client import TestClient
class StarPrecedenceTestClient(TestClient):
def __init__(self):
TestClient.__init__(self)
def load_storage(self):
super(StarPrecedenceTestClient, self).load_storage()
self.add_default_stores()
self.add_single_categories_store(os.path.dirname(__file__) + os.sep + "precedence.aiml")
class StarPrecedenceAIMLTests(unittest.TestCase):
def setUp(self):
client =StarPrecedenceTestClient()
self._client_context = client.create_client_context("testid")
def test_star_precedence(self):
response = self._client_context.bot.ask_question(self._client_context, "FIRSTWORD")
self.assertIsNotNone(response)
self.assertEqual(response, 'FOUND1.')
response = self._client_context.bot.ask_question(self._client_context, "SECONDWORD")
self.assertIsNotNone(response)
self.assertEqual(response, 'NOTHING FOUND.')
|
py | 1a50d29ccca57998babdcc6d2a6e2bff0ec7ec52 | from chef import DataBag, DataBagItem, Search
from chef.exceptions import ChefError
from chef.tests import ChefTestCase
class DataBagTestCase(ChefTestCase):
def test_list(self):
bags = DataBag.list()
self.assertIn('test_1', bags)
self.assertIsInstance(bags['test_1'], DataBag)
def test_keys(self):
bag = DataBag('test_1')
self.assertItemsEqual(bag.keys(), ['item_1', 'item_2'])
self.assertItemsEqual(iter(bag), ['item_1', 'item_2'])
def test_item(self):
bag = DataBag('test_1')
item = bag['item_1']
self.assertEqual(item['test_attr'], 1)
self.assertEqual(item['other'], 'foo')
def test_search_item(self):
self.assertIn('test_1', Search.list())
q = Search('test_1')
self.assertIn('item_1', q)
self.assertIn('item_2', q)
self.assertEqual(q['item_1']['raw_data']['test_attr'], 1)
item = q['item_1'].object
self.assertIsInstance(item, DataBagItem)
self.assertEqual(item['test_attr'], 1)
def test_direct_item(self):
item = DataBagItem('test_1', 'item_1')
self.assertEqual(item['test_attr'], 1)
self.assertEqual(item['other'], 'foo')
def test_direct_item_bag(self):
bag = DataBag('test_1')
item = DataBagItem(bag, 'item_1')
self.assertEqual(item['test_attr'], 1)
self.assertEqual(item['other'], 'foo')
def test_create_bag(self):
name = self.random()
bag = DataBag.create(name)
self.register(bag)
self.assertIn(name, DataBag.list())
def test_create_item(self):
value = self.random()
bag_name = self.random()
bag = DataBag.create(bag_name)
self.register(bag)
item_name = self.random()
item = DataBagItem.create(bag, item_name, foo=value)
self.assertIn('foo', item)
self.assertEqual(item['foo'], value)
self.assertIn(item_name, bag)
bag2 = DataBag(bag_name)
self.assertIn(item_name, bag2)
item2 = bag2[item_name]
self.assertIn('foo', item)
self.assertEqual(item['foo'], value)
def test_set_item(self):
value = self.random()
value2 = self.random()
bag_name = self.random()
bag = DataBag.create(bag_name)
self.register(bag)
item_name = self.random()
item = DataBagItem.create(bag, item_name, foo=value)
item['foo'] = value2
item.save()
self.assertEqual(item['foo'], value2)
item2 = DataBagItem(bag, item_name)
self.assertEqual(item2['foo'], value2)
|
py | 1a50d2e01d205b2cb4ebca658a5b54aab9feadda | names = ["libquadmath0", "libssl1.0.0"]
status = {"libquadmath0":
{"Name": "libquadmath0",
"Dependencies": ["gcc-5-base", "libc6"],
"Description": "GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>",
"Need me": ["libssl1.0.0"]},
"libssl1.0.0":
{"Name": "libssl1.0.0",
"Dependencies": ["libc6", "zlib1g", "libquadmath0"],
"Description": "SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>",
"Alternatives": ["debconf"]}
}
unsure = {"libssl1.0.0": [" debconf (>= 0.5) ", " libquadmath0\n"]}
before_alt = {"libquadmath0":
{"Name": "libquadmath0",
"Dependencies": ["gcc-5-base", "libc6"],
"Description": "GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>"},
"libssl1.0.0":
{"Name": "libssl1.0.0",
"Dependencies": ["libc6", "zlib1g"],
"Description": "SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>"}
}
before_need = {"libquadmath0":
{"Name": "libquadmath0",
"Dependencies": ["gcc-5-base", "libc6"],
"Description": "GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>"},
"libssl1.0.0":
{"Name": "libssl1.0.0",
"Dependencies": ["libc6", "zlib1g", "libquadmath0"],
"Description": "SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>"}
}
lines = ["Package: libquadmath0\n",
"Status: install ok installed\n",
"Priority: optional\n", "Section: libs\n",
"Installed-Size: 265\n",
"Maintainer: Ubuntu Core developers <[email protected]>\n",
"Architecture: amd64\n", "Multi-Arch: same\n",
"Source: gcc-5\n", "Version: 5.4.0-6ubuntu1~16.04.12\n",
"Depends: gcc-5-base (= 5.4.0-6ubuntu1~16.04.12), libc6 (>= 2.23)\n",
"Description: GCC Quad-Precision Math Library\n",
" A library, which provides quad-precision mathematical functions on targets\n",
" supporting the __float128 datatype. The library is used to provide on such\n",
" targets the REAL(16) type in the GNU Fortran compiler.\n",
"Homepage: http://gcc.gnu.org/\n",
"Original-Maintainer: Debian GCC Maintainers <[email protected]>\n",
"\n",
"Package: libssl1.0.0\n",
"Status: install ok installed\n",
"Multi-Arch: same\n",
"Priority: important\n",
"Section: libs\n",
"Installed-Size: 2836\n",
"Maintainer: Ubuntu Developers <[email protected]>\n",
"Architecture: amd64\n",
"Source: openssl\n",
"Version: 1.0.1-4ubuntu5.5\n",
"Depends: libc6 (>= 2.14), zlib1g (>= 1:1.1.4), debconf (>= 0.5) | libquadmath0\n",
"Pre-Depends: multiarch-support\n",
"Breaks: openssh-client (<< 1:5.9p1-4), openssh-server (<< 1:5.9p1-4)\n",
"Description: SSL shared libraries\n",
" libssl and libcrypto shared libraries needed by programs like\n",
" apache-ssl, telnet-ssl and openssh.\n",
" .\n",
" It is part of the OpenSSL implementation of SSL.\n",
"Original-Maintainer: Debian OpenSSL Team <[email protected]>\n",
"\n"]
|
py | 1a50d2fef4678bf09c58845ae424e117ad3b7df1 | from .position import Position
class Portfolio(object):
def __init__(self, price_handler, cash):
"""
On creation, the Portfolio object contains no
positions and all values are "reset" to the initial
cash, with no PnL - realised or unrealised.
Note that realised_pnl is the running tally pnl from closed
positions (closed_pnl), as well as realised_pnl
from currently open positions.
"""
self.price_handler = price_handler
self.init_cash = cash
self.equity = cash
self.cur_cash = cash
self.positions = {}
self.closed_positions = []
self.realised_pnl = 0
def _update_portfolio(self):
"""
Updates the value of all positions that are currently open.
Value of closed positions is tallied as self.realised_pnl.
"""
self.unrealised_pnl = 0
self.equity = self.realised_pnl
self.equity += self.init_cash
for ticker in self.positions:
pt = self.positions[ticker]
if self.price_handler.istick():
bid, ask = self.price_handler.get_best_bid_ask(ticker)
else:
close_price = self.price_handler.get_last_close(ticker)
bid = close_price
ask = close_price
pt.update_market_value(bid, ask)
self.unrealised_pnl += pt.unrealised_pnl
pnl_diff = pt.realised_pnl - pt.unrealised_pnl
self.equity += (
pt.market_value - pt.cost_basis + pnl_diff
)
def _add_position(
self, action, ticker,
quantity, price, commission
):
"""
Adds a new Position object to the Portfolio. This
requires getting the best bid/ask price from the
price handler in order to calculate a reasonable
"market value".
Once the Position is added, the Portfolio values
are updated.
"""
if ticker not in self.positions:
if self.price_handler.istick():
bid, ask = self.price_handler.get_best_bid_ask(ticker)
else:
close_price = self.price_handler.get_last_close(ticker)
bid = close_price
ask = close_price
position = Position(
action, ticker, quantity,
price, commission, bid, ask
)
self.positions[ticker] = position
self._update_portfolio()
else:
print(
"Ticker %s is already in the positions list. "
"Could not add a new position." % ticker
)
def _modify_position(
self, action, ticker,
quantity, price, commission
):
"""
Modifies a current Position object to the Portfolio.
This requires getting the best bid/ask price from the
price handler in order to calculate a reasonable
"market value".
Once the Position is modified, the Portfolio values
are updated.
"""
if ticker in self.positions:
self.positions[ticker].transact_shares(
action, quantity, price, commission
)
if self.price_handler.istick():
bid, ask = self.price_handler.get_best_bid_ask(ticker)
else:
close_price = self.price_handler.get_last_close(ticker)
bid = close_price
ask = close_price
self.positions[ticker].update_market_value(bid, ask)
if self.positions[ticker].quantity == 0:
closed = self.positions.pop(ticker)
self.realised_pnl += closed.realised_pnl
self.closed_positions.append(closed)
self._update_portfolio()
else:
print(
"Ticker %s not in the current position list. "
"Could not modify a current position." % ticker
)
def transact_position(
self, action, ticker,
quantity, price, commission
):
"""
Handles any new position or modification to
a current position, by calling the respective
_add_position and _modify_position methods.
Hence, this single method will be called by the
PortfolioHandler to update the Portfolio itself.
"""
if action == "BOT":
self.cur_cash -= ((quantity * price) + commission)
elif action == "SLD":
self.cur_cash += ((quantity * price) - commission)
if ticker not in self.positions:
self._add_position(
action, ticker, quantity,
price, commission
)
else:
self._modify_position(
action, ticker, quantity,
price, commission
)
|
py | 1a50d394b373afd3869c0602916bfe1d237c1ea3 | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
#
# Based on:
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
"""blob helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from six.moves import cPickle as pickle
import numpy as np
import cv2
from core.config import cfg
def get_image_blob(image, target_scale, target_max_size):
"""Convert an image into a network input.
Arguments:
im (ndarray): a color image in BGR order
Returns:
blob (ndarray): a data blob holding an image pyramid
im_scale (float): image scale (target size) / (original size)
im_info (ndarray)
"""
#TODO: choose suitable pixel_means for DEEPLESION data, see also roi_data/minibatch.py:_get_image_blob
if cfg.LESION.LESION_ENABLED:
if cfg.LESION.USE_3DCE or cfg.LESION.MULTI_MODALITY:
pixel_means = np.tile(np.array([100]), cfg.LESION.NUM_IMAGES_3DCE * 3)
else:
pixel_means = np.tile(np.array([100]), cfg.LESION.SLICE_NUM)
else:
pixel_means = cfg.PIXEL_MEANS
if isinstance(image, list):
im = image[0]
other_im = image[1]
processed_im = []
im, im_scale = prep_im_for_blob(
im, pixel_means, [target_scale], target_max_size, None)
other_im, other_im_scale = prep_im_for_blob(
other_im, pixel_means, [target_scale], target_max_size, None)
processed_im.append(im[0])
processed_im.append(other_im[0])
else:
processed_im, im_scale = prep_im_for_blob(
image, pixel_means, [target_scale], target_max_size, None
)
# Note: processed_im might have different shape with blob. blob might be larger than
# processed_im, or max_size
blob = im_list_to_blob(processed_im)
# NOTE: this height and width may be larger than actual scaled input image
# due to the FPN.COARSEST_STRIDE related padding in im_list_to_blob. We are
# maintaining this behavior for now to make existing results exactly
# reproducible (in practice using the true input image height and width
# yields nearly the same results, but they are sometimes slightly different
# because predictions near the edge of the image will be pruned more
# aggressively).
# N,C,H,W for 2D input; N,C,D,H,W for 3D input.
if cfg.LESION.USE_3D_INPUT:
height, width = blob.shape[3], blob.shape[4]
else:
height, width = blob.shape[2], blob.shape[3]
im_info = np.hstack((height, width, im_scale))[np.newaxis, :]
return blob, im_scale, im_info.astype(np.float32)
def im_list_to_blob(ims):
"""Convert a list of images into a network input. Assumes images were
prepared using prep_im_for_blob or equivalent: i.e.
- BGR channel order
- pixel means subtracted
- resized to the desired input size
- float32 numpy ndarray format
- H,W,C for 2D input , H,W,D for 3D input
Output: 4D N,C,H,W for 2D input (5D N,C,D,H,W for 3D input).
"""
if not isinstance(ims, list):
ims = [ims]
num_images = len(ims)
if cfg.LESION.USE_3D_INPUT:
# transform 3D Lesion data(H,W,D) to (N,C,D,H,W).
max_shape = get_3d_max_shape([im.shape for im in ims])
# depth axis is not padded.
blob = np.zeros(
(num_images, 1,max_shape[0], max_shape[1], ims[0].shape[2]), dtype=np.float32)
for i in range(num_images):
im = ims[i]
blob[i, 0, 0:im.shape[0], 0:im.shape[1], :im.shape[2]] = im
channel_swap = (0, 1, 4, 2, 3)
# Axis order will become: (n, c, d, h, w), eg. (1,1,9,800,800) for 9 slices
blob = blob.transpose(channel_swap)
else:
max_shape = get_max_shape([im.shape[:2] for im in ims])
if cfg.LESION.LESION_ENABLED:
if cfg.LESION.USE_3DCE or cfg.LESION.MULTI_MODALITY:
blob = np.zeros((num_images, max_shape[0], max_shape[1], cfg.LESION.NUM_IMAGES_3DCE * 3), dtype=np.float32)
else:
blob = np.zeros((num_images, max_shape[0], max_shape[1], cfg.LESION.SLICE_NUM), dtype=np.float32)
else:
blob = np.zeros(
(num_images, max_shape[0], max_shape[1], 3), dtype=np.float32)
for i in range(num_images):
im = ims[i]
blob[i, 0:im.shape[0], 0:im.shape[1], :] = im
# Move channels (axis 3) to axis 1
# Axis order will become: (batch elem, channel, height, width)
channel_swap = (0, 3, 1, 2)
blob = blob.transpose(channel_swap)
return blob
def get_3d_max_shape(im_shapes):
"""
Calculate max spatial size for batching given a list of image shapes.
Note That this function is called twice during dealing one batch,
first in blob.get_minibatch(),H,W,D order, then in loader.collate_minibatch(),D,H,W order.
Depth pad should be ignored.
"""
max_shape = np.array(im_shapes).max(axis=0)
assert max_shape.size == 3
# Pad the image so they can be divisible by a stride
if cfg.FPN.FPN_ON:
stride = float(cfg.FPN.COARSEST_STRIDE)
max_shape[0] = int(np.ceil(max_shape[0] / stride) * stride)
max_shape[1] = int(np.ceil(max_shape[1] / stride) * stride)
max_shape[2] = int(np.ceil(max_shape[2] / stride) * stride)
return max_shape
def get_max_shape(im_shapes):
"""Calculate max spatial size (h, w) for batching given a list of image shapes
"""
max_shape = np.array(im_shapes).max(axis=0)
assert max_shape.size == 2
# Pad the image so they can be divisible by a stride
if cfg.FPN.FPN_ON:
stride = float(cfg.FPN.COARSEST_STRIDE)
max_shape[0] = int(np.ceil(max_shape[0] / stride) * stride)
max_shape[1] = int(np.ceil(max_shape[1] / stride) * stride)
return max_shape
def prep_im_for_blob(im, pixel_means, target_sizes, max_size, transform_cv=None):
"""Prepare an image for use as a network input blob. Specially:
- Subtract per-channel pixel mean
- Convert to float32
- Rescale to each of the specified target size (capped at max_size)
Returns a list of transformed images, one for each target size. Also returns
the scale factors that were used to compute each returned image.
"""
if transform_cv != None:
im = transform_cv(im)
im = im.astype(np.float32, copy=False)
im -= pixel_means
im_shape = im.shape
im_size_min = np.min(im_shape[0:2])
im_size_max = np.max(im_shape[0:2])
ims = []
im_scales = []
for target_size in target_sizes:
im_scale = get_target_scale(im_size_min, im_size_max, target_size, max_size)
im_resized = cv2.resize(im, None, None, fx=im_scale, fy=im_scale,
interpolation=cv2.INTER_LINEAR)
ims.append(im_resized)
im_scales.append(im_scale)
return ims, im_scales
def get_im_blob_sizes(im_shape, target_sizes, max_size):
"""Calculate im blob size for multiple target_sizes given original im shape
"""
im_size_min = np.min(im_shape)
im_size_max = np.max(im_shape)
im_sizes = []
for target_size in target_sizes:
im_scale = get_target_scale(im_size_min, im_size_max, target_size, max_size)
im_sizes.append(np.round(im_shape * im_scale))
return np.array(im_sizes)
def get_target_scale(im_size_min, im_size_max, target_size, max_size):
"""Calculate target resize scale
"""
im_scale = float(target_size) / float(im_size_min)
# Prevent the biggest axis from being more than max_size
if np.round(im_scale * im_size_max) > max_size:
im_scale = float(max_size) / float(im_size_max)
return im_scale
def zeros(shape, int32=False):
"""Return a blob of all zeros of the given shape with the correct float or
int data type.
"""
return np.zeros(shape, dtype=np.int32 if int32 else np.float32)
def ones(shape, int32=False):
"""Return a blob of all ones of the given shape with the correct float or
int data type.
"""
return np.ones(shape, dtype=np.int32 if int32 else np.float32)
def serialize(obj):
"""Serialize a Python object using pickle and encode it as an array of
float32 values so that it can be feed into the workspace. See deserialize().
"""
return np.fromstring(pickle.dumps(obj), dtype=np.uint8).astype(np.float32)
def deserialize(arr):
"""Unserialize a Python object from an array of float32 values fetched from
a workspace. See serialize().
"""
return pickle.loads(arr.astype(np.uint8).tobytes())
|
py | 1a50d4fd50c5209a4d5d14038b8a8ddd004eaede | # -*- coding: utf-8 -*-
"""Interface for running Python functions as subprocess-mode commands.
Code for several helper methods in the `ProcProxy` class have been reproduced
without modification from `subprocess.py` in the Python 3.4.2 standard library.
The contents of `subprocess.py` (and, thus, the reproduced methods) are
Copyright (c) 2003-2005 by Peter Astrand <[email protected]> and were
licensed to the Python Software foundation under a Contributor Agreement.
"""
import io
import os
import re
import sys
import time
import queue
import array
import ctypes
import signal
import inspect
import builtins
import functools
import threading
import subprocess
import collections.abc as cabc
from xonsh.platform import (
ON_WINDOWS,
ON_POSIX,
ON_MSYS,
ON_CYGWIN,
CAN_RESIZE_WINDOW,
LFLAG,
CC,
)
from xonsh.tools import (
redirect_stdout,
redirect_stderr,
print_exception,
XonshCalledProcessError,
findfirst,
on_main_thread,
XonshError,
format_std_prepost,
)
from xonsh.lazyasd import lazyobject, LazyObject
from xonsh.jobs import wait_for_active_job, give_terminal_to, _continue
from xonsh.lazyimps import fcntl, termios, _winapi, msvcrt, winutils
# these decorators are imported for users back-compatible
from xonsh.tools import unthreadable, uncapturable # NOQA
# foreground has be deprecated
foreground = unthreadable
@lazyobject
def STDOUT_CAPTURE_KINDS():
return frozenset(["stdout", "object"])
# The following escape codes are xterm codes.
# See http://rtfm.etla.org/xterm/ctlseq.html for more.
MODE_NUMS = ("1049", "47", "1047")
START_ALTERNATE_MODE = LazyObject(
lambda: frozenset("\x1b[?{0}h".format(i).encode() for i in MODE_NUMS),
globals(),
"START_ALTERNATE_MODE",
)
END_ALTERNATE_MODE = LazyObject(
lambda: frozenset("\x1b[?{0}l".format(i).encode() for i in MODE_NUMS),
globals(),
"END_ALTERNATE_MODE",
)
ALTERNATE_MODE_FLAGS = LazyObject(
lambda: tuple(START_ALTERNATE_MODE) + tuple(END_ALTERNATE_MODE),
globals(),
"ALTERNATE_MODE_FLAGS",
)
RE_HIDDEN_BYTES = LazyObject(
lambda: re.compile(b"(\001.*?\002)"), globals(), "RE_HIDDEN"
)
@lazyobject
def RE_VT100_ESCAPE():
return re.compile(b"(\x9B|\x1B\\[)[0-?]*[ -\\/]*[@-~]")
@lazyobject
def RE_HIDE_ESCAPE():
return re.compile(
b"(" + RE_HIDDEN_BYTES.pattern + b"|" + RE_VT100_ESCAPE.pattern + b")"
)
class QueueReader:
"""Provides a file-like interface to reading from a queue."""
def __init__(self, fd, timeout=None):
"""
Parameters
----------
fd : int
A file descriptor
timeout : float or None, optional
The queue reading timeout.
"""
self.fd = fd
self.timeout = timeout
self.closed = False
self.queue = queue.Queue()
self.thread = None
def close(self):
"""close the reader"""
self.closed = True
def is_fully_read(self):
"""Returns whether or not the queue is fully read and the reader is
closed.
"""
return (
self.closed
and (self.thread is None or not self.thread.is_alive())
and self.queue.empty()
)
def read_queue(self):
"""Reads a single chunk from the queue. This is blocking if
the timeout is None and non-blocking otherwise.
"""
try:
return self.queue.get(block=True, timeout=self.timeout)
except queue.Empty:
return b""
def read(self, size=-1):
"""Reads bytes from the file."""
i = 0
buf = b""
while size < 0 or i != size:
line = self.read_queue()
if line:
buf += line
else:
break
i += len(line)
return buf
def readline(self, size=-1):
"""Reads a line, or a partial line from the file descriptor."""
i = 0
nl = b"\n"
buf = b""
while size < 0 or i != size:
line = self.read_queue()
if line:
buf += line
if line.endswith(nl):
break
else:
break
i += len(line)
return buf
def _read_all_lines(self):
"""This reads all remaining lines in a blocking fashion."""
lines = []
while not self.is_fully_read():
chunk = self.read_queue()
lines.extend(chunk.splitlines(keepends=True))
return lines
def readlines(self, hint=-1):
"""Reads lines from the file descriptor. This is blocking for negative
hints (i.e. read all the remaining lines) and non-blocking otherwise.
"""
if hint == -1:
return self._read_all_lines()
lines = []
while len(lines) != hint:
chunk = self.read_queue()
if not chunk:
break
lines.extend(chunk.splitlines(keepends=True))
return lines
def fileno(self):
"""Returns the file descriptor number."""
return self.fd
@staticmethod
def readable():
"""Returns true, because this object is always readable."""
return True
def iterqueue(self):
"""Iterates through all remaining chunks in a blocking fashion."""
while not self.is_fully_read():
chunk = self.read_queue()
if not chunk:
continue
yield chunk
def populate_fd_queue(reader, fd, queue):
"""Reads 1 kb of data from a file descriptor into a queue.
If this ends or fails, it flags the calling reader object as closed.
"""
while True:
try:
c = os.read(fd, 1024)
except OSError:
reader.closed = True
break
if c:
queue.put(c)
else:
reader.closed = True
break
class NonBlockingFDReader(QueueReader):
"""A class for reading characters from a file descriptor on a background
thread. This has the advantages that the calling thread can close the
file and that the reading does not block the calling thread.
"""
def __init__(self, fd, timeout=None):
"""
Parameters
----------
fd : int
A file descriptor
timeout : float or None, optional
The queue reading timeout.
"""
super().__init__(fd, timeout=timeout)
# start reading from stream
self.thread = threading.Thread(
target=populate_fd_queue, args=(self, self.fd, self.queue)
)
self.thread.daemon = True
self.thread.start()
def populate_buffer(reader, fd, buffer, chunksize):
"""Reads bytes from the file descriptor and copies them into a buffer.
The reads happen in parallel using the pread() syscall; which is only
available on POSIX systems. If the read fails for any reason, the reader is
flagged as closed.
"""
offset = 0
while True:
try:
buf = os.pread(fd, chunksize, offset)
except OSError:
reader.closed = True
break
if buf:
buffer.write(buf)
offset += len(buf)
else:
reader.closed = True
break
class BufferedFDParallelReader:
"""Buffered, parallel background thread reader."""
def __init__(self, fd, buffer=None, chunksize=1024):
"""
Parameters
----------
fd : int
File descriptor from which to read.
buffer : binary file-like or None, optional
A buffer to write bytes into. If None, a new BytesIO object
is created.
chunksize : int, optional
The max size of the parallel reads, default 1 kb.
"""
self.fd = fd
self.buffer = io.BytesIO() if buffer is None else buffer
self.chunksize = chunksize
self.closed = False
# start reading from stream
self.thread = threading.Thread(
target=populate_buffer, args=(self, fd, self.buffer, chunksize)
)
self.thread.daemon = True
self.thread.start()
def _expand_console_buffer(cols, max_offset, expandsize, orig_posize, fd):
# if we are getting close to the end of the console buffer,
# expand it so that we can read from it successfully.
if cols == 0:
return orig_posize[-1], max_offset, orig_posize
rows = ((max_offset + expandsize) // cols) + 1
winutils.set_console_screen_buffer_size(cols, rows, fd=fd)
orig_posize = orig_posize[:3] + (rows,)
max_offset = (rows - 1) * cols
return rows, max_offset, orig_posize
def populate_console(reader, fd, buffer, chunksize, queue, expandsize=None):
"""Reads bytes from the file descriptor and puts lines into the queue.
The reads happened in parallel,
using xonsh.winutils.read_console_output_character(),
and is thus only available on windows. If the read fails for any reason,
the reader is flagged as closed.
"""
# OK, so this function is super annoying because Windows stores its
# buffers as a 2D regular, dense array -- without trailing newlines.
# Meanwhile, we want to add *lines* to the queue. Also, as is typical
# with parallel reads, the entire buffer that you ask for may not be
# filled. Thus we have to deal with the full generality.
# 1. reads may end in the middle of a line
# 2. excess whitespace at the end of a line may not be real, unless
# 3. you haven't read to the end of the line yet!
# So there are alignment issues everywhere. Also, Windows will automatically
# read past the current cursor position, even though there is presumably
# nothing to see there.
#
# These chunked reads basically need to happen like this because,
# a. The default buffer size is HUGE for the console (90k lines x 120 cols)
# as so we can't just read in everything at the end and see what we
# care about without a noticeable performance hit.
# b. Even with this huge size, it is still possible to write more lines than
# this, so we should scroll along with the console.
# Unfortunately, because we do not have control over the terminal emulator,
# It is not possible to compute how far back we should set the beginning
# read position because we don't know how many characters have been popped
# off the top of the buffer. If we did somehow know this number we could do
# something like the following:
#
# new_offset = (y*cols) + x
# if new_offset == max_offset:
# new_offset -= scrolled_offset
# x = new_offset%cols
# y = new_offset//cols
# continue
#
# So this method is imperfect and only works as long as the screen has
# room to expand to. Thus the trick here is to expand the screen size
# when we get close enough to the end of the screen. There remain some
# async issues related to not being able to set the cursor position.
# but they just affect the alignment / capture of the output of the
# first command run after a screen resize.
if expandsize is None:
expandsize = 100 * chunksize
x, y, cols, rows = posize = winutils.get_position_size(fd)
pre_x = pre_y = -1
orig_posize = posize
offset = (cols * y) + x
max_offset = (rows - 1) * cols
# I believe that there is a bug in PTK that if we reset the
# cursor position, the cursor on the next prompt is accidentally on
# the next line. If this is fixed, uncomment the following line.
# if max_offset < offset + expandsize:
# rows, max_offset, orig_posize = _expand_console_buffer(
# cols, max_offset, expandsize,
# orig_posize, fd)
# winutils.set_console_cursor_position(x, y, fd=fd)
while True:
posize = winutils.get_position_size(fd)
offset = (cols * y) + x
if ((posize[1], posize[0]) <= (y, x) and posize[2:] == (cols, rows)) or (
pre_x == x and pre_y == y
):
# already at or ahead of the current cursor position.
if reader.closed:
break
else:
time.sleep(reader.timeout)
continue
elif max_offset <= offset + expandsize:
ecb = _expand_console_buffer(cols, max_offset, expandsize, orig_posize, fd)
rows, max_offset, orig_posize = ecb
continue
elif posize[2:] == (cols, rows):
# cursor updated but screen size is the same.
pass
else:
# screen size changed, which is offset preserving
orig_posize = posize
cols, rows = posize[2:]
x = offset % cols
y = offset // cols
pre_x = pre_y = -1
max_offset = (rows - 1) * cols
continue
try:
buf = winutils.read_console_output_character(
x=x, y=y, fd=fd, buf=buffer, bufsize=chunksize, raw=True
)
except (OSError, IOError):
reader.closed = True
break
# cursor position and offset
if not reader.closed:
buf = buf.rstrip()
nread = len(buf)
if nread == 0:
time.sleep(reader.timeout)
continue
cur_x, cur_y = posize[0], posize[1]
cur_offset = (cols * cur_y) + cur_x
beg_offset = (cols * y) + x
end_offset = beg_offset + nread
if end_offset > cur_offset and cur_offset != max_offset:
buf = buf[: cur_offset - end_offset]
# convert to lines
xshift = cols - x
yshift = (nread // cols) + (1 if nread % cols > 0 else 0)
lines = [buf[:xshift]]
lines += [
buf[l * cols + xshift : (l + 1) * cols + xshift] for l in range(yshift)
]
lines = [line for line in lines if line]
if not lines:
time.sleep(reader.timeout)
continue
# put lines in the queue
nl = b"\n"
for line in lines[:-1]:
queue.put(line.rstrip() + nl)
if len(lines[-1]) == xshift:
queue.put(lines[-1].rstrip() + nl)
else:
queue.put(lines[-1])
# update x and y locations
if (beg_offset + len(buf)) % cols == 0:
new_offset = beg_offset + len(buf)
else:
new_offset = beg_offset + len(buf.rstrip())
pre_x = x
pre_y = y
x = new_offset % cols
y = new_offset // cols
time.sleep(reader.timeout)
class ConsoleParallelReader(QueueReader):
"""Parallel reader for consoles that runs in a background thread.
This is only needed, available, and useful on Windows.
"""
def __init__(self, fd, buffer=None, chunksize=1024, timeout=None):
"""
Parameters
----------
fd : int
Standard buffer file descriptor, 0 for stdin, 1 for stdout (default),
and 2 for stderr.
buffer : ctypes.c_wchar_p, optional
An existing buffer to (re-)use.
chunksize : int, optional
The max size of the parallel reads, default 1 kb.
timeout : float, optional
The queue reading timeout.
"""
timeout = timeout or builtins.__xonsh__.env.get("XONSH_PROC_FREQUENCY")
super().__init__(fd, timeout=timeout)
self._buffer = buffer # this cannot be public
if buffer is None:
self._buffer = ctypes.c_char_p(b" " * chunksize)
self.chunksize = chunksize
# start reading from stream
self.thread = threading.Thread(
target=populate_console,
args=(self, fd, self._buffer, chunksize, self.queue),
)
self.thread.daemon = True
self.thread.start()
def safe_fdclose(handle, cache=None):
"""Closes a file handle in the safest way possible, and potentially
storing the result.
"""
if cache is not None and cache.get(handle, False):
return
status = True
if handle is None:
pass
elif isinstance(handle, int):
if handle >= 3:
# don't close stdin, stdout, stderr, -1
try:
os.close(handle)
except OSError:
status = False
elif handle is sys.stdin or handle is sys.stdout or handle is sys.stderr:
# don't close stdin, stdout, or stderr
pass
else:
try:
handle.close()
except OSError:
status = False
if cache is not None:
cache[handle] = status
def safe_flush(handle):
"""Attempts to safely flush a file handle, returns success bool."""
status = True
try:
handle.flush()
except OSError:
status = False
return status
def still_writable(fd):
"""Determines whether a file descriptor is still writable by trying to
write an empty string and seeing if it fails.
"""
try:
os.write(fd, b"")
status = True
except OSError:
status = False
return status
class PopenThread(threading.Thread):
"""A thread for running and managing subprocess. This allows reading
from the stdin, stdout, and stderr streams in a non-blocking fashion.
This takes the same arguments and keyword arguments as regular Popen.
This requires that the captured_stdout and captured_stderr attributes
to be set following instantiation.
"""
def __init__(self, *args, stdin=None, stdout=None, stderr=None, **kwargs):
super().__init__()
self.lock = threading.RLock()
env = builtins.__xonsh__.env
# stdin setup
self.orig_stdin = stdin
if stdin is None:
self.stdin_fd = 0
elif isinstance(stdin, int):
self.stdin_fd = stdin
else:
self.stdin_fd = stdin.fileno()
self.store_stdin = env.get("XONSH_STORE_STDIN")
self.timeout = env.get("XONSH_PROC_FREQUENCY")
self.in_alt_mode = False
self.stdin_mode = None
# stdout setup
self.orig_stdout = stdout
self.stdout_fd = 1 if stdout is None else stdout.fileno()
self._set_pty_size()
# stderr setup
self.orig_stderr = stderr
# Set some signal handles, if we can. Must come before process
# is started to prevent deadlock on windows
self.proc = None # has to be here for closure for handles
self.old_int_handler = self.old_winch_handler = None
self.old_tstp_handler = self.old_quit_handler = None
if on_main_thread():
self.old_int_handler = signal.signal(signal.SIGINT, self._signal_int)
if ON_POSIX:
self.old_tstp_handler = signal.signal(signal.SIGTSTP, self._signal_tstp)
self.old_quit_handler = signal.signal(signal.SIGQUIT, self._signal_quit)
if CAN_RESIZE_WINDOW:
self.old_winch_handler = signal.signal(
signal.SIGWINCH, self._signal_winch
)
# start up process
if ON_WINDOWS and stdout is not None:
os.set_inheritable(stdout.fileno(), False)
try:
self.proc = proc = subprocess.Popen(
*args, stdin=stdin, stdout=stdout, stderr=stderr, **kwargs
)
except Exception:
self._clean_up()
raise
self.pid = proc.pid
self.universal_newlines = uninew = proc.universal_newlines
if uninew:
self.encoding = enc = env.get("XONSH_ENCODING")
self.encoding_errors = err = env.get("XONSH_ENCODING_ERRORS")
self.stdin = io.BytesIO() # stdin is always bytes!
self.stdout = io.TextIOWrapper(io.BytesIO(), encoding=enc, errors=err)
self.stderr = io.TextIOWrapper(io.BytesIO(), encoding=enc, errors=err)
else:
self.encoding = self.encoding_errors = None
self.stdin = io.BytesIO()
self.stdout = io.BytesIO()
self.stderr = io.BytesIO()
self.suspended = False
self.prevs_are_closed = False
self.start()
def run(self):
"""Runs the subprocess by performing a parallel read on stdin if allowed,
and copying bytes from captured_stdout to stdout and bytes from
captured_stderr to stderr.
"""
proc = self.proc
spec = self._wait_and_getattr("spec")
# get stdin and apply parallel reader if needed.
stdin = self.stdin
if self.orig_stdin is None:
origin = None
elif ON_POSIX and self.store_stdin:
origin = self.orig_stdin
origfd = origin if isinstance(origin, int) else origin.fileno()
origin = BufferedFDParallelReader(origfd, buffer=stdin)
else:
origin = None
# get non-blocking stdout
stdout = self.stdout.buffer if self.universal_newlines else self.stdout
capout = spec.captured_stdout
if capout is None:
procout = None
else:
procout = NonBlockingFDReader(capout.fileno(), timeout=self.timeout)
# get non-blocking stderr
stderr = self.stderr.buffer if self.universal_newlines else self.stderr
caperr = spec.captured_stderr
if caperr is None:
procerr = None
else:
procerr = NonBlockingFDReader(caperr.fileno(), timeout=self.timeout)
# initial read from buffer
self._read_write(procout, stdout, sys.__stdout__)
self._read_write(procerr, stderr, sys.__stderr__)
# loop over reads while process is running.
i = j = cnt = 1
while proc.poll() is None:
# this is here for CPU performance reasons.
if i + j == 0:
cnt = min(cnt + 1, 1000)
tout = self.timeout * cnt
if procout is not None:
procout.timeout = tout
if procerr is not None:
procerr.timeout = tout
elif cnt == 1:
pass
else:
cnt = 1
if procout is not None:
procout.timeout = self.timeout
if procerr is not None:
procerr.timeout = self.timeout
# redirect some output!
i = self._read_write(procout, stdout, sys.__stdout__)
j = self._read_write(procerr, stderr, sys.__stderr__)
if self.suspended:
break
if self.suspended:
return
# close files to send EOF to non-blocking reader.
# capout & caperr seem to be needed only by Windows, while
# orig_stdout & orig_stderr are need by posix and Windows.
# Also, order seems to matter here,
# with orig_* needed to be closed before cap*
safe_fdclose(self.orig_stdout)
safe_fdclose(self.orig_stderr)
if ON_WINDOWS:
safe_fdclose(capout)
safe_fdclose(caperr)
# read in the remaining data in a blocking fashion.
while (procout is not None and not procout.is_fully_read()) or (
procerr is not None and not procerr.is_fully_read()
):
self._read_write(procout, stdout, sys.__stdout__)
self._read_write(procerr, stderr, sys.__stderr__)
# kill the process if it is still alive. Happens when piping.
if proc.poll() is None:
proc.terminate()
def _wait_and_getattr(self, name):
"""make sure the instance has a certain attr, and return it."""
while not hasattr(self, name):
time.sleep(1e-7)
return getattr(self, name)
def _read_write(self, reader, writer, stdbuf):
"""Reads a chunk of bytes from a buffer and write into memory or back
down to the standard buffer, as appropriate. Returns the number of
successful reads.
"""
if reader is None:
return 0
i = -1
for i, chunk in enumerate(iter(reader.read_queue, b"")):
self._alt_mode_switch(chunk, writer, stdbuf)
if i >= 0:
writer.flush()
stdbuf.flush()
return i + 1
def _alt_mode_switch(self, chunk, membuf, stdbuf):
"""Enables recursively switching between normal capturing mode
and 'alt' mode, which passes through values to the standard
buffer. Pagers, text editors, curses applications, etc. use
alternate mode.
"""
i, flag = findfirst(chunk, ALTERNATE_MODE_FLAGS)
if flag is None:
self._alt_mode_writer(chunk, membuf, stdbuf)
else:
# This code is executed when the child process switches the
# terminal into or out of alternate mode. The line below assumes
# that the user has opened vim, less, or similar, and writes writes
# to stdin.
j = i + len(flag)
# write the first part of the chunk in the current mode.
self._alt_mode_writer(chunk[:i], membuf, stdbuf)
# switch modes
# write the flag itself the current mode where alt mode is on
# so that it is streamed to the terminal ASAP.
# this is needed for terminal emulators to find the correct
# positions before and after alt mode.
alt_mode = flag in START_ALTERNATE_MODE
if alt_mode:
self.in_alt_mode = alt_mode
self._alt_mode_writer(flag, membuf, stdbuf)
self._enable_cbreak_stdin()
else:
self._alt_mode_writer(flag, membuf, stdbuf)
self.in_alt_mode = alt_mode
self._disable_cbreak_stdin()
# recurse this function, but without the current flag.
self._alt_mode_switch(chunk[j:], membuf, stdbuf)
def _alt_mode_writer(self, chunk, membuf, stdbuf):
"""Write bytes to the standard buffer if in alt mode or otherwise
to the in-memory buffer.
"""
if not chunk:
pass # don't write empty values
elif self.in_alt_mode:
stdbuf.buffer.write(chunk)
else:
with self.lock:
p = membuf.tell()
membuf.seek(0, io.SEEK_END)
membuf.write(chunk)
membuf.seek(p)
#
# Window resize handlers
#
def _signal_winch(self, signum, frame):
"""Signal handler for SIGWINCH - window size has changed."""
self.send_signal(signal.SIGWINCH)
self._set_pty_size()
def _set_pty_size(self):
"""Sets the window size of the child pty based on the window size of
our own controlling terminal.
"""
if ON_WINDOWS or not os.isatty(self.stdout_fd):
return
# Get the terminal size of the real terminal, set it on the
# pseudoterminal.
buf = array.array("h", [0, 0, 0, 0])
# 1 = stdout here
try:
fcntl.ioctl(1, termios.TIOCGWINSZ, buf, True)
fcntl.ioctl(self.stdout_fd, termios.TIOCSWINSZ, buf)
except OSError:
pass
#
# SIGINT handler
#
def _signal_int(self, signum, frame):
"""Signal handler for SIGINT - Ctrl+C may have been pressed."""
self.send_signal(signum)
if self.proc is not None and self.proc.poll() is not None:
self._restore_sigint(frame=frame)
if on_main_thread():
signal.pthread_kill(threading.get_ident(), signal.SIGINT)
def _restore_sigint(self, frame=None):
old = self.old_int_handler
if old is not None:
if on_main_thread():
signal.signal(signal.SIGINT, old)
self.old_int_handler = None
if frame is not None:
self._disable_cbreak_stdin()
if old is not None and old is not self._signal_int:
old(signal.SIGINT, frame)
#
# SIGTSTP handler
#
def _signal_tstp(self, signum, frame):
"""Signal handler for suspending SIGTSTP - Ctrl+Z may have been pressed.
"""
self.suspended = True
self.send_signal(signum)
self._restore_sigtstp(frame=frame)
def _restore_sigtstp(self, frame=None):
old = self.old_tstp_handler
if old is not None:
if on_main_thread():
signal.signal(signal.SIGTSTP, old)
self.old_tstp_handler = None
if frame is not None:
self._disable_cbreak_stdin()
#
# SIGQUIT handler
#
def _signal_quit(self, signum, frame):
r"""Signal handler for quiting SIGQUIT - Ctrl+\ may have been pressed.
"""
self.send_signal(signum)
self._restore_sigquit(frame=frame)
def _restore_sigquit(self, frame=None):
old = self.old_quit_handler
if old is not None:
if on_main_thread():
signal.signal(signal.SIGQUIT, old)
self.old_quit_handler = None
if frame is not None:
self._disable_cbreak_stdin()
#
# cbreak mode handlers
#
def _enable_cbreak_stdin(self):
if not ON_POSIX:
return
try:
self.stdin_mode = termios.tcgetattr(self.stdin_fd)[:]
except termios.error:
# this can happen for cases where another process is controlling
# xonsh's tty device, such as in testing.
self.stdin_mode = None
return
new = self.stdin_mode[:]
new[LFLAG] &= ~(termios.ECHO | termios.ICANON)
new[CC][termios.VMIN] = 1
new[CC][termios.VTIME] = 0
try:
# termios.TCSAFLUSH may be less reliable than termios.TCSANOW
termios.tcsetattr(self.stdin_fd, termios.TCSANOW, new)
except termios.error:
self._disable_cbreak_stdin()
def _disable_cbreak_stdin(self):
if not ON_POSIX or self.stdin_mode is None:
return
new = self.stdin_mode[:]
new[LFLAG] |= termios.ECHO | termios.ICANON
new[CC][termios.VMIN] = 1
new[CC][termios.VTIME] = 0
try:
termios.tcsetattr(self.stdin_fd, termios.TCSANOW, new)
except termios.error:
pass
#
# Dispatch methods
#
def poll(self):
"""Dispatches to Popen.returncode."""
return self.proc.returncode
def wait(self, timeout=None):
"""Dispatches to Popen.wait(), but also does process cleanup such as
joining this thread and replacing the original window size signal
handler.
"""
self._disable_cbreak_stdin()
rtn = self.proc.wait(timeout=timeout)
self.join()
# need to replace the old signal handlers somewhere...
if self.old_winch_handler is not None and on_main_thread():
signal.signal(signal.SIGWINCH, self.old_winch_handler)
self.old_winch_handler = None
self._clean_up()
return rtn
def _clean_up(self):
self._restore_sigint()
self._restore_sigtstp()
self._restore_sigquit()
@property
def returncode(self):
"""Process return code."""
return self.proc.returncode
@returncode.setter
def returncode(self, value):
"""Process return code."""
self.proc.returncode = value
@property
def signal(self):
"""Process signal, or None."""
s = getattr(self.proc, "signal", None)
if s is None:
rtn = self.returncode
if rtn is not None and rtn != 0:
s = (-1 * rtn, rtn < 0 if ON_WINDOWS else os.WCOREDUMP(rtn))
return s
@signal.setter
def signal(self, value):
"""Process signal, or None."""
self.proc.signal = value
def send_signal(self, signal):
"""Dispatches to Popen.send_signal()."""
dt = 0.0
while self.proc is None and dt < self.timeout:
time.sleep(1e-7)
dt += 1e-7
if self.proc is None:
return
try:
rtn = self.proc.send_signal(signal)
except ProcessLookupError:
# This can happen in the case of !(cmd) when the command has ended
rtn = None
return rtn
def terminate(self):
"""Dispatches to Popen.terminate()."""
return self.proc.terminate()
def kill(self):
"""Dispatches to Popen.kill()."""
return self.proc.kill()
class Handle(int):
closed = False
def Close(self, CloseHandle=None):
CloseHandle = CloseHandle or _winapi.CloseHandle
if not self.closed:
self.closed = True
CloseHandle(self)
def Detach(self):
if not self.closed:
self.closed = True
return int(self)
raise ValueError("already closed")
def __repr__(self):
return "Handle(%d)" % int(self)
__del__ = Close
__str__ = __repr__
class FileThreadDispatcher:
"""Dispatches to different file handles depending on the
current thread. Useful if you want file operation to go to different
places for different threads.
"""
def __init__(self, default=None):
"""
Parameters
----------
default : file-like or None, optional
The file handle to write to if a thread cannot be found in
the registry. If None, a new in-memory instance.
Attributes
----------
registry : dict
Maps thread idents to file handles.
"""
if default is None:
default = io.TextIOWrapper(io.BytesIO())
self.default = default
self.registry = {}
def register(self, handle):
"""Registers a file handle for the current thread. Returns self so
that this method can be used in a with-statement.
"""
if handle is self:
# prevent weird recurssion errors
return self
self.registry[threading.get_ident()] = handle
return self
def deregister(self):
"""Removes the current thread from the registry."""
ident = threading.get_ident()
if ident in self.registry:
# don't remove if we have already been deregistered
del self.registry[threading.get_ident()]
@property
def available(self):
"""True if the thread is available in the registry."""
return threading.get_ident() in self.registry
@property
def handle(self):
"""Gets the current handle for the thread."""
return self.registry.get(threading.get_ident(), self.default)
def __enter__(self):
pass
def __exit__(self, ex_type, ex_value, ex_traceback):
self.deregister()
#
# io.TextIOBase interface
#
@property
def encoding(self):
"""Gets the encoding for this thread's handle."""
return self.handle.encoding
@property
def errors(self):
"""Gets the errors for this thread's handle."""
return self.handle.errors
@property
def newlines(self):
"""Gets the newlines for this thread's handle."""
return self.handle.newlines
@property
def buffer(self):
"""Gets the buffer for this thread's handle."""
return self.handle.buffer
def detach(self):
"""Detaches the buffer for the current thread."""
return self.handle.detach()
def read(self, size=None):
"""Reads from the handle for the current thread."""
return self.handle.read(size)
def readline(self, size=-1):
"""Reads a line from the handle for the current thread."""
return self.handle.readline(size)
def readlines(self, hint=-1):
"""Reads lines from the handle for the current thread."""
return self.handle.readlines(hint)
def seek(self, offset, whence=io.SEEK_SET):
"""Seeks the current file."""
return self.handle.seek(offset, whence)
def tell(self):
"""Reports the current position in the handle for the current thread."""
return self.handle.tell()
def write(self, s):
"""Writes to this thread's handle. This also flushes, just to be
extra sure the string was written.
"""
h = self.handle
try:
r = h.write(s)
h.flush()
except OSError:
r = None
return r
@property
def line_buffering(self):
"""Gets if line buffering for this thread's handle enabled."""
return self.handle.line_buffering
#
# io.IOBase interface
#
def close(self):
"""Closes the current thread's handle."""
return self.handle.close()
@property
def closed(self):
"""Is the thread's handle closed."""
return self.handle.closed
def fileno(self):
"""Returns the file descriptor for the current thread."""
return self.handle.fileno()
def flush(self):
"""Flushes the file descriptor for the current thread."""
return safe_flush(self.handle)
def isatty(self):
"""Returns if the file descriptor for the current thread is a tty."""
return self.handle.isatty()
def readable(self):
"""Returns if file descriptor for the current thread is readable."""
return self.handle.readable()
def seekable(self):
"""Returns if file descriptor for the current thread is seekable."""
return self.handle.seekable()
def truncate(self, size=None):
"""Truncates the file for for the current thread."""
return self.handle.truncate()
def writable(self, size=None):
"""Returns if file descriptor for the current thread is writable."""
return self.handle.writable(size)
def writelines(self):
"""Writes lines for the file descriptor for the current thread."""
return self.handle.writelines()
# These should NOT be lazy since they *need* to get the true stdout from the
# main thread. Also their creation time should be negligible.
STDOUT_DISPATCHER = FileThreadDispatcher(default=sys.stdout)
STDERR_DISPATCHER = FileThreadDispatcher(default=sys.stderr)
def parse_proxy_return(r, stdout, stderr):
"""Proxies may return a variety of outputs. This handles them generally.
Parameters
----------
r : tuple, str, int, or None
Return from proxy function
stdout : file-like
Current stdout stream
stdout : file-like
Current stderr stream
Returns
-------
cmd_result : int
The return code of the proxy
"""
cmd_result = 0
if isinstance(r, str):
stdout.write(r)
stdout.flush()
elif isinstance(r, int):
cmd_result = r
elif isinstance(r, cabc.Sequence):
rlen = len(r)
if rlen > 0 and r[0] is not None:
stdout.write(r[0])
stdout.flush()
if rlen > 1 and r[1] is not None:
stderr.write(r[1])
stderr.flush()
if rlen > 2 and r[2] is not None:
cmd_result = r[2]
elif r is not None:
# for the random object...
stdout.write(str(r))
stdout.flush()
return cmd_result
def proxy_zero(f, args, stdin, stdout, stderr, spec, stack):
"""Calls a proxy function which takes no parameters."""
return f()
def proxy_one(f, args, stdin, stdout, stderr, spec, stack):
"""Calls a proxy function which takes one parameter: args"""
return f(args)
def proxy_two(f, args, stdin, stdout, stderr, spec, stack):
"""Calls a proxy function which takes two parameter: args and stdin."""
return f(args, stdin)
def proxy_three(f, args, stdin, stdout, stderr, spec, stack):
"""Calls a proxy function which takes three parameter: args, stdin, stdout.
"""
return f(args, stdin, stdout)
def proxy_four(f, args, stdin, stdout, stderr, spec, stack):
"""Calls a proxy function which takes four parameter: args, stdin, stdout,
and stderr.
"""
return f(args, stdin, stdout, stderr)
def proxy_five(f, args, stdin, stdout, stderr, spec, stack):
"""Calls a proxy function which takes four parameter: args, stdin, stdout,
stderr, and spec.
"""
return f(args, stdin, stdout, stderr, spec)
PROXIES = (proxy_zero, proxy_one, proxy_two, proxy_three, proxy_four, proxy_five)
PROXY_KWARG_NAMES = frozenset(["args", "stdin", "stdout", "stderr", "spec", "stack"])
def partial_proxy(f):
"""Dispatches the appropriate proxy function based on the number of args."""
numargs = 0
for name, param in inspect.signature(f).parameters.items():
if (
param.kind == param.POSITIONAL_ONLY
or param.kind == param.POSITIONAL_OR_KEYWORD
):
numargs += 1
elif name in PROXY_KWARG_NAMES and param.kind == param.KEYWORD_ONLY:
numargs += 1
if numargs < 6:
return functools.partial(PROXIES[numargs], f)
elif numargs == 6:
# don't need to partial.
return f
else:
e = "Expected proxy with 6 or fewer arguments for {}, not {}"
raise XonshError(e.format(", ".join(PROXY_KWARG_NAMES), numargs))
class ProcProxyThread(threading.Thread):
"""
Class representing a function to be run as a subprocess-mode command.
"""
def __init__(
self,
f,
args,
stdin=None,
stdout=None,
stderr=None,
universal_newlines=False,
env=None,
):
"""Parameters
----------
f : function
The function to be executed.
args : list
A (possibly empty) list containing the arguments that were given on
the command line
stdin : file-like, optional
A file-like object representing stdin (input can be read from
here). If `stdin` is not provided or if it is explicitly set to
`None`, then an instance of `io.StringIO` representing an empty
file is used.
stdout : file-like, optional
A file-like object representing stdout (normal output can be
written here). If `stdout` is not provided or if it is explicitly
set to `None`, then `sys.stdout` is used.
stderr : file-like, optional
A file-like object representing stderr (error output can be
written here). If `stderr` is not provided or if it is explicitly
set to `None`, then `sys.stderr` is used.
universal_newlines : bool, optional
Whether or not to use universal newlines.
env : Mapping, optional
Environment mapping.
"""
self.orig_f = f
self.f = partial_proxy(f)
self.args = args
self.pid = None
self.returncode = None
self._closed_handle_cache = {}
handles = self._get_handles(stdin, stdout, stderr)
(
self.p2cread,
self.p2cwrite,
self.c2pread,
self.c2pwrite,
self.errread,
self.errwrite,
) = handles
# default values
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.env = env or builtins.__xonsh__.env
self._interrupted = False
if ON_WINDOWS:
if self.p2cwrite != -1:
self.p2cwrite = msvcrt.open_osfhandle(self.p2cwrite.Detach(), 0)
if self.c2pread != -1:
self.c2pread = msvcrt.open_osfhandle(self.c2pread.Detach(), 0)
if self.errread != -1:
self.errread = msvcrt.open_osfhandle(self.errread.Detach(), 0)
if self.p2cwrite != -1:
self.stdin = io.open(self.p2cwrite, "wb", -1)
if universal_newlines:
self.stdin = io.TextIOWrapper(
self.stdin, write_through=True, line_buffering=False
)
elif isinstance(stdin, int) and stdin != 0:
self.stdin = io.open(stdin, "wb", -1)
if self.c2pread != -1:
self.stdout = io.open(self.c2pread, "rb", -1)
if universal_newlines:
self.stdout = io.TextIOWrapper(self.stdout)
if self.errread != -1:
self.stderr = io.open(self.errread, "rb", -1)
if universal_newlines:
self.stderr = io.TextIOWrapper(self.stderr)
# Set some signal handles, if we can. Must come before process
# is started to prevent deadlock on windows
self.old_int_handler = None
if on_main_thread():
self.old_int_handler = signal.signal(signal.SIGINT, self._signal_int)
# start up the proc
super().__init__()
self.start()
def __del__(self):
self._restore_sigint()
def run(self):
"""Set up input/output streams and execute the child function in a new
thread. This is part of the `threading.Thread` interface and should
not be called directly.
"""
if self.f is None:
return
spec = self._wait_and_getattr("spec")
last_in_pipeline = spec.last_in_pipeline
if last_in_pipeline:
capout = spec.captured_stdout # NOQA
caperr = spec.captured_stderr # NOQA
env = builtins.__xonsh__.env
enc = env.get("XONSH_ENCODING")
err = env.get("XONSH_ENCODING_ERRORS")
if ON_WINDOWS:
if self.p2cread != -1:
self.p2cread = msvcrt.open_osfhandle(self.p2cread.Detach(), 0)
if self.c2pwrite != -1:
self.c2pwrite = msvcrt.open_osfhandle(self.c2pwrite.Detach(), 0)
if self.errwrite != -1:
self.errwrite = msvcrt.open_osfhandle(self.errwrite.Detach(), 0)
# get stdin
if self.stdin is None:
sp_stdin = None
elif self.p2cread != -1:
sp_stdin = io.TextIOWrapper(
io.open(self.p2cread, "rb", -1), encoding=enc, errors=err
)
else:
sp_stdin = sys.stdin
# stdout
if self.c2pwrite != -1:
sp_stdout = io.TextIOWrapper(
io.open(self.c2pwrite, "wb", -1), encoding=enc, errors=err
)
else:
sp_stdout = sys.stdout
# stderr
if self.errwrite == self.c2pwrite:
sp_stderr = sp_stdout
elif self.errwrite != -1:
sp_stderr = io.TextIOWrapper(
io.open(self.errwrite, "wb", -1), encoding=enc, errors=err
)
else:
sp_stderr = sys.stderr
# run the function itself
try:
with STDOUT_DISPATCHER.register(sp_stdout), STDERR_DISPATCHER.register(
sp_stderr
), redirect_stdout(STDOUT_DISPATCHER), redirect_stderr(STDERR_DISPATCHER):
r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec, spec.stack)
except SystemExit as e:
r = e.code if isinstance(e.code, int) else int(bool(e.code))
except OSError:
status = still_writable(self.c2pwrite) and still_writable(self.errwrite)
if status:
# stdout and stderr are still writable, so error must
# come from function itself.
print_exception()
r = 1
else:
# stdout and stderr are no longer writable, so error must
# come from the fact that the next process in the pipeline
# has closed the other side of the pipe. The function then
# attempted to write to this side of the pipe anyway. This
# is not truly an error and we should exit gracefully.
r = 0
except Exception:
print_exception()
r = 1
safe_flush(sp_stdout)
safe_flush(sp_stderr)
self.returncode = parse_proxy_return(r, sp_stdout, sp_stderr)
if not last_in_pipeline and not ON_WINDOWS:
# mac requires us *not to* close the handles here while
# windows requires us *to* close the handles here
return
# clean up
# scopz: not sure why this is needed, but stdin cannot go here
# and stdout & stderr must.
handles = [self.stdout, self.stderr]
for handle in handles:
safe_fdclose(handle, cache=self._closed_handle_cache)
def _wait_and_getattr(self, name):
"""make sure the instance has a certain attr, and return it."""
while not hasattr(self, name):
time.sleep(1e-7)
return getattr(self, name)
def poll(self):
"""Check if the function has completed.
Returns
-------
None if the function is still executing, and the returncode otherwise
"""
return self.returncode
def wait(self, timeout=None):
"""Waits for the process to finish and returns the return code."""
self.join()
self._restore_sigint()
return self.returncode
#
# SIGINT handler
#
def _signal_int(self, signum, frame):
"""Signal handler for SIGINT - Ctrl+C may have been pressed."""
# Check if we have already been interrupted. This should prevent
# the possibility of infinite recursion.
if self._interrupted:
return
self._interrupted = True
# close file handles here to stop an processes piped to us.
handles = (
self.p2cread,
self.p2cwrite,
self.c2pread,
self.c2pwrite,
self.errread,
self.errwrite,
)
for handle in handles:
safe_fdclose(handle)
if self.poll() is not None:
self._restore_sigint(frame=frame)
if on_main_thread():
signal.pthread_kill(threading.get_ident(), signal.SIGINT)
def _restore_sigint(self, frame=None):
old = self.old_int_handler
if old is not None:
if on_main_thread():
signal.signal(signal.SIGINT, old)
self.old_int_handler = None
if frame is not None:
if old is not None and old is not self._signal_int:
old(signal.SIGINT, frame)
if self._interrupted:
self.returncode = 1
# The code below (_get_devnull, _get_handles, and _make_inheritable) comes
# from subprocess.py in the Python 3.4.2 Standard Library
def _get_devnull(self):
if not hasattr(self, "_devnull"):
self._devnull = os.open(os.devnull, os.O_RDWR)
return self._devnull
if ON_WINDOWS:
def _make_inheritable(self, handle):
"""Return a duplicate of handle, which is inheritable"""
h = _winapi.DuplicateHandle(
_winapi.GetCurrentProcess(),
handle,
_winapi.GetCurrentProcess(),
0,
1,
_winapi.DUPLICATE_SAME_ACCESS,
)
return Handle(h)
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
if stdin is None and stdout is None and stderr is None:
return (-1, -1, -1, -1, -1, -1)
p2cread, p2cwrite = -1, -1
c2pread, c2pwrite = -1, -1
errread, errwrite = -1, -1
if stdin is None:
p2cread = _winapi.GetStdHandle(_winapi.STD_INPUT_HANDLE)
if p2cread is None:
p2cread, _ = _winapi.CreatePipe(None, 0)
p2cread = Handle(p2cread)
_winapi.CloseHandle(_)
elif stdin == subprocess.PIPE:
p2cread, p2cwrite = Handle(p2cread), Handle(p2cwrite)
elif stdin == subprocess.DEVNULL:
p2cread = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
# Assuming file-like object
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout is None:
c2pwrite = _winapi.GetStdHandle(_winapi.STD_OUTPUT_HANDLE)
if c2pwrite is None:
_, c2pwrite = _winapi.CreatePipe(None, 0)
c2pwrite = Handle(c2pwrite)
_winapi.CloseHandle(_)
elif stdout == subprocess.PIPE:
c2pread, c2pwrite = _winapi.CreatePipe(None, 0)
c2pread, c2pwrite = Handle(c2pread), Handle(c2pwrite)
elif stdout == subprocess.DEVNULL:
c2pwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
# Assuming file-like object
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr is None:
errwrite = _winapi.GetStdHandle(_winapi.STD_ERROR_HANDLE)
if errwrite is None:
_, errwrite = _winapi.CreatePipe(None, 0)
errwrite = Handle(errwrite)
_winapi.CloseHandle(_)
elif stderr == subprocess.PIPE:
errread, errwrite = _winapi.CreatePipe(None, 0)
errread, errwrite = Handle(errread), Handle(errwrite)
elif stderr == subprocess.STDOUT:
errwrite = c2pwrite
elif stderr == subprocess.DEVNULL:
errwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
# Assuming file-like object
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
else:
# POSIX versions
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
p2cread, p2cwrite = -1, -1
c2pread, c2pwrite = -1, -1
errread, errwrite = -1, -1
if stdin is None:
pass
elif stdin == subprocess.PIPE:
p2cread, p2cwrite = os.pipe()
elif stdin == subprocess.DEVNULL:
p2cread = self._get_devnull()
elif isinstance(stdin, int):
p2cread = stdin
else:
# Assuming file-like object
p2cread = stdin.fileno()
if stdout is None:
pass
elif stdout == subprocess.PIPE:
c2pread, c2pwrite = os.pipe()
elif stdout == subprocess.DEVNULL:
c2pwrite = self._get_devnull()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
# Assuming file-like object
c2pwrite = stdout.fileno()
if stderr is None:
pass
elif stderr == subprocess.PIPE:
errread, errwrite = os.pipe()
elif stderr == subprocess.STDOUT:
errwrite = c2pwrite
elif stderr == subprocess.DEVNULL:
errwrite = self._get_devnull()
elif isinstance(stderr, int):
errwrite = stderr
else:
# Assuming file-like object
errwrite = stderr.fileno()
return (p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
#
# Foreground Thread Process Proxies
#
class ProcProxy(object):
"""This is process proxy class that runs its alias functions on the
same thread that it was called from, which is typically the main thread.
This prevents the process from running on a background thread, but enables
debugger and profiler tools (functions) be run on the same thread that they
are attempting to debug.
"""
def __init__(
self,
f,
args,
stdin=None,
stdout=None,
stderr=None,
universal_newlines=False,
env=None,
):
self.orig_f = f
self.f = partial_proxy(f)
self.args = args
self.pid = os.getpid()
self.returncode = None
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.universal_newlines = universal_newlines
self.env = env
def poll(self):
"""Check if the function has completed via the returncode or None.
"""
return self.returncode
def wait(self, timeout=None):
"""Runs the function and returns the result. Timeout argument only
present for API compatibility.
"""
if self.f is None:
return 0
env = builtins.__xonsh__.env
enc = env.get("XONSH_ENCODING")
err = env.get("XONSH_ENCODING_ERRORS")
spec = self._wait_and_getattr("spec")
# set file handles
if self.stdin is None:
stdin = None
else:
if isinstance(self.stdin, int):
inbuf = io.open(self.stdin, "rb", -1)
else:
inbuf = self.stdin
stdin = io.TextIOWrapper(inbuf, encoding=enc, errors=err)
stdout = self._pick_buf(self.stdout, sys.stdout, enc, err)
stderr = self._pick_buf(self.stderr, sys.stderr, enc, err)
# run the actual function
try:
r = self.f(self.args, stdin, stdout, stderr, spec, spec.stack)
except Exception:
print_exception()
r = 1
self.returncode = parse_proxy_return(r, stdout, stderr)
safe_flush(stdout)
safe_flush(stderr)
return self.returncode
@staticmethod
def _pick_buf(handle, sysbuf, enc, err):
if handle is None or handle is sysbuf:
buf = sysbuf
elif isinstance(handle, int):
if handle < 3:
buf = sysbuf
else:
buf = io.TextIOWrapper(
io.open(handle, "wb", -1), encoding=enc, errors=err
)
elif hasattr(handle, "encoding"):
# must be a text stream, no need to wrap.
buf = handle
else:
# must be a binary stream, should wrap it.
buf = io.TextIOWrapper(handle, encoding=enc, errors=err)
return buf
def _wait_and_getattr(self, name):
"""make sure the instance has a certain attr, and return it."""
while not hasattr(self, name):
time.sleep(1e-7)
return getattr(self, name)
@lazyobject
def SIGNAL_MESSAGES():
sm = {
signal.SIGABRT: "Aborted",
signal.SIGFPE: "Floating point exception",
signal.SIGILL: "Illegal instructions",
signal.SIGTERM: "Terminated",
signal.SIGSEGV: "Segmentation fault",
}
if ON_POSIX:
sm.update(
{signal.SIGQUIT: "Quit", signal.SIGHUP: "Hangup", signal.SIGKILL: "Killed"}
)
return sm
def safe_readlines(handle, hint=-1):
"""Attempts to read lines without throwing an error."""
try:
lines = handle.readlines(hint)
except OSError:
lines = []
return lines
def safe_readable(handle):
"""Attempts to find if the handle is readable without throwing an error."""
try:
status = handle.readable()
except (OSError, ValueError):
status = False
return status
def update_fg_process_group(pipeline_group, background):
if background:
return False
if not ON_POSIX:
return False
env = builtins.__xonsh__.env
if not env.get("XONSH_INTERACTIVE"):
return False
return give_terminal_to(pipeline_group)
class CommandPipeline:
"""Represents a subprocess-mode command pipeline."""
attrnames = (
"stdin",
"stdout",
"stderr",
"pid",
"returncode",
"args",
"alias",
"stdin_redirect",
"stdout_redirect",
"stderr_redirect",
"timestamps",
"executed_cmd",
"input",
"output",
"errors",
)
nonblocking = (io.BytesIO, NonBlockingFDReader, ConsoleParallelReader)
def __init__(self, specs):
"""
Parameters
----------
specs : list of SubprocSpec
Process specifications
Attributes
----------
spec : SubprocSpec
The last specification in specs
proc : Popen-like
The process in procs
ended : bool
Boolean for if the command has stopped executing.
input : str
A string of the standard input.
output : str
A string of the standard output.
errors : str
A string of the standard error.
lines : list of str
The output lines
starttime : floats or None
Pipeline start timestamp.
"""
self.starttime = None
self.ended = False
self.procs = []
self.specs = specs
self.spec = specs[-1]
self.captured = specs[-1].captured
self.input = self._output = self.errors = self.endtime = None
self._closed_handle_cache = {}
self.lines = []
self._stderr_prefix = self._stderr_postfix = None
self.term_pgid = None
background = self.spec.background
pipeline_group = None
for spec in specs:
if self.starttime is None:
self.starttime = time.time()
try:
proc = spec.run(pipeline_group=pipeline_group)
except Exception:
print_exception()
self._return_terminal()
self.proc = None
return
if (
proc.pid
and pipeline_group is None
and not spec.is_proxy
and self.captured != "object"
):
pipeline_group = proc.pid
if update_fg_process_group(pipeline_group, background):
self.term_pgid = pipeline_group
self.procs.append(proc)
self.proc = self.procs[-1]
def __repr__(self):
s = self.__class__.__name__ + "("
s += ", ".join(a + "=" + str(getattr(self, a)) for a in self.attrnames)
s += ")"
return s
def __bool__(self):
return self.returncode == 0
def __len__(self):
return len(self.procs)
def __iter__(self):
"""Iterates through stdout and returns the lines, converting to
strings and universal newlines if needed.
"""
if self.ended:
yield from iter(self.lines)
else:
yield from self.tee_stdout()
def iterraw(self):
"""Iterates through the last stdout, and returns the lines
exactly as found.
"""
# get appropriate handles
spec = self.spec
proc = self.proc
if proc is None:
return
timeout = builtins.__xonsh__.env.get("XONSH_PROC_FREQUENCY")
# get the correct stdout
stdout = proc.stdout
if (
stdout is None or spec.stdout is None or not safe_readable(stdout)
) and spec.captured_stdout is not None:
stdout = spec.captured_stdout
if hasattr(stdout, "buffer"):
stdout = stdout.buffer
if stdout is not None and not isinstance(stdout, self.nonblocking):
stdout = NonBlockingFDReader(stdout.fileno(), timeout=timeout)
if (
not stdout
or self.captured == "stdout"
or not safe_readable(stdout)
or not spec.threadable
):
# we get here if the process is not threadable or the
# class is the real Popen
PrevProcCloser(pipeline=self)
task = wait_for_active_job()
if task is None or task["status"] != "stopped":
proc.wait()
self._endtime()
if self.captured == "object":
self.end(tee_output=False)
elif self.captured == "hiddenobject" and stdout:
b = stdout.read()
lines = b.splitlines(keepends=True)
yield from lines
self.end(tee_output=False)
elif self.captured == "stdout":
b = stdout.read()
s = self._decode_uninew(b, universal_newlines=True)
self.lines = s.splitlines(keepends=True)
return
# get the correct stderr
stderr = proc.stderr
if (
stderr is None or spec.stderr is None or not safe_readable(stderr)
) and spec.captured_stderr is not None:
stderr = spec.captured_stderr
if hasattr(stderr, "buffer"):
stderr = stderr.buffer
if stderr is not None and not isinstance(stderr, self.nonblocking):
stderr = NonBlockingFDReader(stderr.fileno(), timeout=timeout)
# read from process while it is running
check_prev_done = len(self.procs) == 1
prev_end_time = None
i = j = cnt = 1
while proc.poll() is None:
if getattr(proc, "suspended", False):
return
elif getattr(proc, "in_alt_mode", False):
time.sleep(0.1) # probably not leaving any time soon
continue
elif not check_prev_done:
# In the case of pipelines with more than one command
# we should give the commands a little time
# to start up fully. This is particularly true for
# GNU Parallel, which has a long startup time.
pass
elif self._prev_procs_done():
self._close_prev_procs()
proc.prevs_are_closed = True
break
stdout_lines = safe_readlines(stdout, 1024)
i = len(stdout_lines)
if i != 0:
yield from stdout_lines
stderr_lines = safe_readlines(stderr, 1024)
j = len(stderr_lines)
if j != 0:
self.stream_stderr(stderr_lines)
if not check_prev_done:
# if we are piping...
if stdout_lines or stderr_lines:
# see if we have some output.
check_prev_done = True
elif prev_end_time is None:
# or see if we already know that the next-to-last
# proc in the pipeline has ended.
if self._prev_procs_done():
# if it has, record the time
prev_end_time = time.time()
elif time.time() - prev_end_time >= 0.1:
# if we still don't have any output, even though the
# next-to-last proc has finished, wait a bit to make
# sure we have fully started up, etc.
check_prev_done = True
# this is for CPU usage
if i + j == 0:
cnt = min(cnt + 1, 1000)
else:
cnt = 1
time.sleep(timeout * cnt)
# read from process now that it is over
yield from safe_readlines(stdout)
self.stream_stderr(safe_readlines(stderr))
proc.wait()
self._endtime()
yield from safe_readlines(stdout)
self.stream_stderr(safe_readlines(stderr))
if self.captured == "object":
self.end(tee_output=False)
def itercheck(self):
"""Iterates through the command lines and throws an error if the
returncode is non-zero.
"""
yield from self
if self.returncode:
# I included self, as providing access to stderr and other details
# useful when instance isn't assigned to a variable in the shell.
raise XonshCalledProcessError(
self.returncode, self.executed_cmd, self.stdout, self.stderr, self
)
def tee_stdout(self):
"""Writes the process stdout to the output variable, line-by-line, and
yields each line. This may optionally accept lines (in bytes) to iterate
over, in which case it does not call iterraw().
"""
env = builtins.__xonsh__.env
enc = env.get("XONSH_ENCODING")
err = env.get("XONSH_ENCODING_ERRORS")
lines = self.lines
stream = self.captured not in STDOUT_CAPTURE_KINDS
if stream and not self.spec.stdout:
stream = False
stdout_has_buffer = hasattr(sys.stdout, "buffer")
nl = b"\n"
cr = b"\r"
crnl = b"\r\n"
for line in self.iterraw():
# write to stdout line ASAP, if needed
if stream:
if stdout_has_buffer:
sys.stdout.buffer.write(line)
else:
sys.stdout.write(line.decode(encoding=enc, errors=err))
sys.stdout.flush()
# do some munging of the line before we return it
if line.endswith(crnl):
line = line[:-2] + nl
elif line.endswith(cr):
line = line[:-1] + nl
line = RE_HIDE_ESCAPE.sub(b"", line)
line = line.decode(encoding=enc, errors=err)
# tee it up!
lines.append(line)
yield line
def stream_stderr(self, lines):
"""Streams lines to sys.stderr and the errors attribute."""
if not lines:
return
env = builtins.__xonsh__.env
enc = env.get("XONSH_ENCODING")
err = env.get("XONSH_ENCODING_ERRORS")
b = b"".join(lines)
if self.stderr_prefix:
b = self.stderr_prefix + b
if self.stderr_postfix:
b += self.stderr_postfix
stderr_has_buffer = hasattr(sys.stderr, "buffer")
# write bytes to std stream
if stderr_has_buffer:
sys.stderr.buffer.write(b)
else:
sys.stderr.write(b.decode(encoding=enc, errors=err))
sys.stderr.flush()
# do some munging of the line before we save it to the attr
b = b.replace(b"\r\n", b"\n").replace(b"\r", b"\n")
b = RE_HIDE_ESCAPE.sub(b"", b)
env = builtins.__xonsh__.env
s = b.decode(
encoding=env.get("XONSH_ENCODING"), errors=env.get("XONSH_ENCODING_ERRORS")
)
# set the errors
if self.errors is None:
self.errors = s
else:
self.errors += s
def _decode_uninew(self, b, universal_newlines=None):
"""Decode bytes into a str and apply universal newlines as needed."""
if not b:
return ""
if isinstance(b, bytes):
env = builtins.__xonsh__.env
s = b.decode(
encoding=env.get("XONSH_ENCODING"),
errors=env.get("XONSH_ENCODING_ERRORS"),
)
else:
s = b
if universal_newlines or self.spec.universal_newlines:
s = s.replace("\r\n", "\n").replace("\r", "\n")
return s
#
# Ending methods
#
def end(self, tee_output=True):
"""
End the pipeline, return the controlling terminal if needed.
Main things done in self._end().
"""
if self.ended:
return
self._end(tee_output=tee_output)
self._return_terminal()
def _end(self, tee_output):
"""Waits for the command to complete and then runs any closing and
cleanup procedures that need to be run.
"""
if tee_output:
for _ in self.tee_stdout():
pass
self._endtime()
# since we are driven by getting output, input may not be available
# until the command has completed.
self._set_input()
self._close_prev_procs()
self._close_proc()
self._check_signal()
self._apply_to_history()
self.ended = True
self._raise_subproc_error()
def _return_terminal(self):
if ON_WINDOWS or not ON_POSIX:
return
pgid = os.getpgid(0)
if self.term_pgid is None or pgid == self.term_pgid:
return
if give_terminal_to(pgid): # if gave term succeed
self.term_pgid = pgid
if builtins.__xonsh__.shell is not None:
# restoring sanity could probably be called whenever we return
# control to the shell. But it only seems to matter after a
# ^Z event. This *has* to be called after we give the terminal
# back to the shell.
builtins.__xonsh__.shell.shell.restore_tty_sanity()
def resume(self, job, tee_output=True):
self.ended = False
if give_terminal_to(job["pgrp"]):
self.term_pgid = job["pgrp"]
_continue(job)
self.end(tee_output=tee_output)
def _endtime(self):
"""Sets the closing timestamp if it hasn't been already."""
if self.endtime is None:
self.endtime = time.time()
def _safe_close(self, handle):
safe_fdclose(handle, cache=self._closed_handle_cache)
def _prev_procs_done(self):
"""Boolean for if all previous processes have completed. If there
is only a single process in the pipeline, this returns False.
"""
any_running = False
for s, p in zip(self.specs[:-1], self.procs[:-1]):
if p.poll() is None:
any_running = True
continue
self._safe_close(s.stdin)
self._safe_close(s.stdout)
self._safe_close(s.stderr)
if p is None:
continue
self._safe_close(p.stdin)
self._safe_close(p.stdout)
self._safe_close(p.stderr)
return False if any_running else (len(self) > 1)
def _close_prev_procs(self):
"""Closes all but the last proc's stdout."""
for s, p in zip(self.specs[:-1], self.procs[:-1]):
self._safe_close(s.stdin)
self._safe_close(s.stdout)
self._safe_close(s.stderr)
if p is None:
continue
self._safe_close(p.stdin)
self._safe_close(p.stdout)
self._safe_close(p.stderr)
def _close_proc(self):
"""Closes last proc's stdout."""
s = self.spec
p = self.proc
self._safe_close(s.stdin)
self._safe_close(s.stdout)
self._safe_close(s.stderr)
self._safe_close(s.captured_stdout)
self._safe_close(s.captured_stderr)
if p is None:
return
self._safe_close(p.stdin)
self._safe_close(p.stdout)
self._safe_close(p.stderr)
def _set_input(self):
"""Sets the input variable."""
if self.proc is None:
return
stdin = self.proc.stdin
if (
stdin is None
or isinstance(stdin, int)
or stdin.closed
or not stdin.seekable()
or not safe_readable(stdin)
):
input = b""
else:
stdin.seek(0)
input = stdin.read()
self.input = self._decode_uninew(input)
def _check_signal(self):
"""Checks if a signal was received and issues a message."""
proc_signal = getattr(self.proc, "signal", None)
if proc_signal is None:
return
sig, core = proc_signal
sig_str = SIGNAL_MESSAGES.get(sig)
if sig_str:
if core:
sig_str += " (core dumped)"
print(sig_str, file=sys.stderr)
if self.errors is not None:
self.errors += sig_str + "\n"
def _apply_to_history(self):
"""Applies the results to the current history object."""
hist = builtins.__xonsh__.history
if hist is not None:
hist.last_cmd_rtn = 1 if self.proc is None else self.proc.returncode
def _raise_subproc_error(self):
"""Raises a subprocess error, if we are supposed to."""
spec = self.spec
rtn = self.returncode
if (
not spec.is_proxy
and rtn is not None
and rtn > 0
and builtins.__xonsh__.env.get("RAISE_SUBPROC_ERROR")
):
try:
raise subprocess.CalledProcessError(rtn, spec.cmd, output=self.output)
finally:
# this is need to get a working terminal in interactive mode
self._return_terminal()
#
# Properties
#
@property
def stdin(self):
"""Process stdin."""
return self.proc.stdin
@property
def stdout(self):
"""Process stdout."""
return self.proc.stdout
@property
def stderr(self):
"""Process stderr."""
return self.proc.stderr
@property
def inp(self):
"""Creates normalized input string from args."""
return " ".join(self.args)
@property
def output(self):
"""Non-blocking, lazy access to output"""
if self.ended:
if self._output is None:
self._output = "".join(self.lines)
return self._output
else:
return "".join(self.lines)
@property
def out(self):
"""Output value as a str."""
self.end()
return self.output
@property
def err(self):
"""Error messages as a string."""
self.end()
return self.errors
@property
def pid(self):
"""Process identifier."""
return self.proc.pid
@property
def returncode(self):
"""Process return code, waits until command is completed."""
self.end()
if self.proc is None:
return 1
return self.proc.returncode
rtn = returncode
@property
def args(self):
"""Arguments to the process."""
return self.spec.args
@property
def rtn(self):
"""Alias to return code."""
return self.returncode
@property
def alias(self):
"""Alias the process used."""
return self.spec.alias
@property
def stdin_redirect(self):
"""Redirection used for stdin."""
stdin = self.spec.stdin
name = getattr(stdin, "name", "<stdin>")
mode = getattr(stdin, "mode", "r")
return [name, mode]
@property
def stdout_redirect(self):
"""Redirection used for stdout."""
stdout = self.spec.stdout
name = getattr(stdout, "name", "<stdout>")
mode = getattr(stdout, "mode", "a")
return [name, mode]
@property
def stderr_redirect(self):
"""Redirection used for stderr."""
stderr = self.spec.stderr
name = getattr(stderr, "name", "<stderr>")
mode = getattr(stderr, "mode", "r")
return [name, mode]
@property
def timestamps(self):
"""The start and end time stamps."""
return [self.starttime, self.endtime]
@property
def executed_cmd(self):
"""The resolve and executed command."""
return self.spec.cmd
@property
def stderr_prefix(self):
"""Prefix to print in front of stderr, as bytes."""
p = self._stderr_prefix
if p is None:
env = builtins.__xonsh__.env
t = env.get("XONSH_STDERR_PREFIX")
s = format_std_prepost(t, env=env)
p = s.encode(
encoding=env.get("XONSH_ENCODING"),
errors=env.get("XONSH_ENCODING_ERRORS"),
)
self._stderr_prefix = p
return p
@property
def stderr_postfix(self):
"""Postfix to print after stderr, as bytes."""
p = self._stderr_postfix
if p is None:
env = builtins.__xonsh__.env
t = env.get("XONSH_STDERR_POSTFIX")
s = format_std_prepost(t, env=env)
p = s.encode(
encoding=env.get("XONSH_ENCODING"),
errors=env.get("XONSH_ENCODING_ERRORS"),
)
self._stderr_postfix = p
return p
class HiddenCommandPipeline(CommandPipeline):
def __repr__(self):
return ""
def pause_call_resume(p, f, *args, **kwargs):
"""For a process p, this will call a function f with the remaining args and
and kwargs. If the process cannot accept signals, the function will be called.
Parameters
----------
p : Popen object or similar
f : callable
args : remaining arguments
kwargs : keyword arguments
"""
can_send_signal = (
hasattr(p, "send_signal") and ON_POSIX and not ON_MSYS and not ON_CYGWIN
)
if can_send_signal:
try:
p.send_signal(signal.SIGSTOP)
except PermissionError:
pass
try:
f(*args, **kwargs)
except Exception:
pass
if can_send_signal:
p.send_signal(signal.SIGCONT)
class PrevProcCloser(threading.Thread):
"""Previous process closer thread for pipelines whose last command
is itself unthreadable. This makes sure that the pipeline is
driven forward and does not deadlock.
"""
def __init__(self, pipeline):
"""
Parameters
----------
pipeline : CommandPipeline
The pipeline whose prev procs we should close.
"""
self.pipeline = pipeline
super().__init__()
self.daemon = True
self.start()
def run(self):
"""Runs the closing algorithm."""
pipeline = self.pipeline
check_prev_done = len(pipeline.procs) == 1
if check_prev_done:
return
proc = pipeline.proc
prev_end_time = None
timeout = builtins.__xonsh__.env.get("XONSH_PROC_FREQUENCY")
sleeptime = min(timeout * 1000, 0.1)
while proc.poll() is None:
if not check_prev_done:
# In the case of pipelines with more than one command
# we should give the commands a little time
# to start up fully. This is particularly true for
# GNU Parallel, which has a long startup time.
pass
elif pipeline._prev_procs_done():
pipeline._close_prev_procs()
proc.prevs_are_closed = True
break
if not check_prev_done:
# if we are piping...
if prev_end_time is None:
# or see if we already know that the next-to-last
# proc in the pipeline has ended.
if pipeline._prev_procs_done():
# if it has, record the time
prev_end_time = time.time()
elif time.time() - prev_end_time >= 0.1:
# if we still don't have any output, even though the
# next-to-last proc has finished, wait a bit to make
# sure we have fully started up, etc.
check_prev_done = True
# this is for CPU usage
time.sleep(sleeptime)
|
py | 1a50d580ccf5b93243dcef831bc60ba2c67ec7b8 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v5/proto/errors/campaign_experiment_error.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v5/proto/errors/campaign_experiment_error.proto',
package='google.ads.googleads.v5.errors',
syntax='proto3',
serialized_options=b'\n\"com.google.ads.googleads.v5.errorsB\034CampaignExperimentErrorProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v5/errors;errors\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V5.Errors\312\002\036Google\\Ads\\GoogleAds\\V5\\Errors\352\002\"Google::Ads::GoogleAds::V5::Errors',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nDgoogle/ads/googleads_v5/proto/errors/campaign_experiment_error.proto\x12\x1egoogle.ads.googleads.v5.errors\x1a\x1cgoogle/api/annotations.proto\"\x80\x04\n\x1b\x43\x61mpaignExperimentErrorEnum\"\xe0\x03\n\x17\x43\x61mpaignExperimentError\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07UNKNOWN\x10\x01\x12\x12\n\x0e\x44UPLICATE_NAME\x10\x02\x12\x16\n\x12INVALID_TRANSITION\x10\x03\x12/\n+CANNOT_CREATE_EXPERIMENT_WITH_SHARED_BUDGET\x10\x04\x12\x36\n2CANNOT_CREATE_EXPERIMENT_FOR_REMOVED_BASE_CAMPAIGN\x10\x05\x12\x33\n/CANNOT_CREATE_EXPERIMENT_FOR_NON_PROPOSED_DRAFT\x10\x06\x12%\n!CUSTOMER_CANNOT_CREATE_EXPERIMENT\x10\x07\x12%\n!CAMPAIGN_CANNOT_CREATE_EXPERIMENT\x10\x08\x12)\n%EXPERIMENT_DURATIONS_MUST_NOT_OVERLAP\x10\t\x12\x38\n4EXPERIMENT_DURATION_MUST_BE_WITHIN_CAMPAIGN_DURATION\x10\n\x12*\n&CANNOT_MUTATE_EXPERIMENT_DUE_TO_STATUS\x10\x0b\x42\xf7\x01\n\"com.google.ads.googleads.v5.errorsB\x1c\x43\x61mpaignExperimentErrorProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v5/errors;errors\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V5.Errors\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V5\\Errors\xea\x02\"Google::Ads::GoogleAds::V5::Errorsb\x06proto3'
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_CAMPAIGNEXPERIMENTERRORENUM_CAMPAIGNEXPERIMENTERROR = _descriptor.EnumDescriptor(
name='CampaignExperimentError',
full_name='google.ads.googleads.v5.errors.CampaignExperimentErrorEnum.CampaignExperimentError',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DUPLICATE_NAME', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INVALID_TRANSITION', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CANNOT_CREATE_EXPERIMENT_WITH_SHARED_BUDGET', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CANNOT_CREATE_EXPERIMENT_FOR_REMOVED_BASE_CAMPAIGN', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CANNOT_CREATE_EXPERIMENT_FOR_NON_PROPOSED_DRAFT', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CUSTOMER_CANNOT_CREATE_EXPERIMENT', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CAMPAIGN_CANNOT_CREATE_EXPERIMENT', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXPERIMENT_DURATIONS_MUST_NOT_OVERLAP', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXPERIMENT_DURATION_MUST_BE_WITHIN_CAMPAIGN_DURATION', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CANNOT_MUTATE_EXPERIMENT_DUE_TO_STATUS', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=167,
serialized_end=647,
)
_sym_db.RegisterEnumDescriptor(_CAMPAIGNEXPERIMENTERRORENUM_CAMPAIGNEXPERIMENTERROR)
_CAMPAIGNEXPERIMENTERRORENUM = _descriptor.Descriptor(
name='CampaignExperimentErrorEnum',
full_name='google.ads.googleads.v5.errors.CampaignExperimentErrorEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_CAMPAIGNEXPERIMENTERRORENUM_CAMPAIGNEXPERIMENTERROR,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=135,
serialized_end=647,
)
_CAMPAIGNEXPERIMENTERRORENUM_CAMPAIGNEXPERIMENTERROR.containing_type = _CAMPAIGNEXPERIMENTERRORENUM
DESCRIPTOR.message_types_by_name['CampaignExperimentErrorEnum'] = _CAMPAIGNEXPERIMENTERRORENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
CampaignExperimentErrorEnum = _reflection.GeneratedProtocolMessageType('CampaignExperimentErrorEnum', (_message.Message,), {
'DESCRIPTOR' : _CAMPAIGNEXPERIMENTERRORENUM,
'__module__' : 'google.ads.googleads_v5.proto.errors.campaign_experiment_error_pb2'
,
'__doc__': """Container for enum describing possible campaign experiment errors.""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.errors.CampaignExperimentErrorEnum)
})
_sym_db.RegisterMessage(CampaignExperimentErrorEnum)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
py | 1a50d5fa4416cd5572d557ac383a1c830888e92f | #!/usr/bin/python27
#coding:utf-8
#pylab inline
from __future__ import division
import matplotlib
matplotlib.use('TkAgg') # matplotlib 'agg'是不画图的,'Tkagg'是画图的.
import os
import numpy as np
import PIL.Image as pil
import tensorflow as tf
from SfMLearner import SfMLearner
from utils import normalize_depth_for_display
import matplotlib.pyplot as plt
import operator
img_height=128
img_width=416
# ckpt_file = 'models/model-190532' #depth
ckpt_file='checkpoints/model-117270'
fh = open('misc/sample.png', 'r')
#
# fh=open('raw_data_KITTI/2011_09_28/2011_09_28_drive_0001_sync/image_02/data/0000000012.png','r') # 自己测试所用
I = pil.open(fh) #读取图片
I = I.resize((img_width, img_height), pil.ANTIALIAS) #antialias滤镜缩放
I = np.array(I)
print(I.shape) #(128, 416, 3)
print(I[None,:,:,:].shape) #(1,128, 416, 3)
sfm = SfMLearner() #initialize
sfm.setup_inference(img_height,
img_width,
mode='depth')
#执行了build_depth_test_graph()
saver = tf.train.Saver([var for
var in tf.model_variables()]) #保存和恢复变量,保存到checkpoints中
with tf.Session() as sess:
saver.restore(sess, ckpt_file)
pred = sfm.inference(I[None,:,:,:], sess, mode='depth') #I[None,:,:,:] None的作用是增加了一个轴
print(pred) #is a dictionary
print(pred['depth'][0,:,:,0])
print(pred['depth'][0,:,:,0].shape)
plt.figure(figsize=(15,15))
plt.subplot(1,2,1)
plt.imshow(I)
plt.subplot(1,2,2); plt.imshow(normalize_depth_for_display(pred['depth'][0,:,:,0]))
plt.show()
|
py | 1a50d695d186d86a0b254e16e0547a056a8ab9cd | import numpy as np
import pytest
import unyt as u
from unyt.testing import assert_allclose_units
from gmso import Topology
from gmso.formats.mol2 import from_mol2
from gmso.tests.base_test import BaseTest
from gmso.utils.io import get_fn
class TestMol2(BaseTest):
def test_read_mol2(self):
top = Topology.load(get_fn("parmed.mol2"))
assert top.name == "parmed"
assert top.n_sites == 8
assert_allclose_units(
top.box.lengths,
([8.2693, 7.9100, 6.6460] * u.Å).to("nm"),
rtol=1e-5,
atol=1e-8,
)
assert list(top.sites)[0].element.name == "carbon"
assert_allclose_units(
list(top.sites)[0].element.mass,
np.array(1.9944733e-26) * u.kg,
rtol=1e-5,
atol=1e-8,
)
top = Topology.load(get_fn("tip3p.mol2"))
assert top.name == "tip3p"
assert top.n_sites == 3
assert_allclose_units(
top.box.lengths, 3.0130 * np.ones(3) * u.Å, rtol=1e-5, atol=1e-8
)
positions_check = [
[0.061, 0.1, 0.1],
[0.017, 0.09, 0.177],
[0.011, 0.154, 0.04],
]
for check, site in zip(positions_check, top.sites):
assert_allclose_units(
site.position,
check * u.nm,
rtol=1e-5,
atol=1e-8,
)
top = Topology.load(get_fn("vmd.mol2"))
assert top.name == "vmd"
assert top.n_sites == 6
assert len(top.bonds) == 5
assert top.bonds[0].connection_members[0] == top.sites[0]
assert top.box == None
with pytest.warns(
UserWarning,
match=r"No charges were detected for site C with index 1",
):
top = Topology.load(get_fn("ethane.mol2"))
assert list(top.sites)[0].charge is None
with pytest.warns(
UserWarning,
match=r"No element detected for site C with index1\, consider manually adding the element to the topology",
):
Topology.load(get_fn("benzene.mol2"))
def test_residue(self):
top = Topology.load(get_fn("ethanol_aa.mol2"))
assert np.all([site.residue_name == "ETO" for site in top.sites])
assert np.all([site.residue_number == 1 for site in top.sites])
top = Topology.load(get_fn("benzene_ua.mol2"), site_type="lj")
assert np.all(
[
site.residue_name == "BEN1"
for site in top.iter_sites("residue_name", "BEN1")
]
)
assert np.all(
[
site.residue_number == 1
for site in top.iter_sites("residue_name", "BEN1")
]
)
assert np.all(
[
site.residue_name == "BEN2"
for site in top.iter_sites("residue_name", "BEN2")
]
)
assert np.all(
[
site.residue_number == 2
for site in top.iter_sites("residue_name", "BEN2")
]
)
def test_lj_system(self):
top = Topology.load(get_fn("methane.mol2"), site_type="lj")
assert np.all([site.element == None for site in top.sites])
def test_wrong_path(self):
with pytest.raises(
OSError, match=r"Provided path to file that does not exist"
):
Topology.load("not_a_file.mol2")
top = Topology.load(get_fn("ethanegro.mol2"))
assert len(top.sites) == 0
assert len(top.bonds) == 0
def test_broken_files(self):
with pytest.warns(
UserWarning,
match=r"The record type indicator @<TRIPOS>MOLECULE_extra_text\n is not supported. Skipping current section and moving to the next RTI header.",
):
Topology.load(get_fn("broken.mol2"))
with pytest.warns(
UserWarning,
match=r"This mol2 file has two boxes to be read in, only reading in one with dimensions Box\(a=0.72",
):
Topology.load(get_fn("broken.mol2"))
|
py | 1a50d7002b8c5708af74cc4a5088b2f90b398dbd | from . import common
import pandas as pd
import os
FILENAME_ATTR = 'Filename'
VOLUME_ATTR = 'Volume'
URL_ATTR = 'Mirror'
class NoiseDownloader:
def __init__(
self,
output_files_key,
output_volumes_key,
data,
download_directory):
self.output_files_key = output_files_key
self.output_volumes_key = output_volumes_key
self.data = data
self.download_directory = download_directory
def execute(self, context):
output_files = context[self.output_files_key] = []
output_volumes = context[self.output_volumes_key] = []
common.create_directory(self.download_directory)
data = pd.read_csv(self.data)
for index, row in data.iterrows():
output_volumes.append(float(row[VOLUME_ATTR]))
output_file = os.path.join(
self.download_directory, row[FILENAME_ATTR])
output_files.append(output_file)
common.fetch(row[URL_ATTR], output_file)
|
py | 1a50d75850db565e8954ff2823c35457d4d21ab5 | """
cwpair2.py
Takes a list of called peaks on both strands and produces a list of matched pairs and a list
of unmatched orphans using a specified method for finding matched pairs. Methods for finding
matched pairs are mode, closest, largest or all, where the analysis is run for each method
Input: list of one or more gff format files
Output: files produced for each input/mode combination:
MP (matched_pair), D (details), O (orphans), P (frequency preview plot), F (frequency final plot),
C (statistics graph), statistics.tabular
"""
import argparse
import csv
import cwpair2_util
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--input', dest='inputs', action='append', nargs=2, help="Input datasets")
parser.add_argument('--method', dest='method', default='mode', help='Method of finding match.')
parser.add_argument('--up_distance', dest='up_distance', type=int, default=50, help='Distance upstream from a pair.')
parser.add_argument('--down_distance', dest='down_distance', type=int, default=100, help='Distance downstream of a pair.')
parser.add_argument('--binsize', dest='binsize', type=int, default=1, help='Width of bins for plots and mode.')
parser.add_argument('--threshold_format', dest='threshold_format', help='Percentage to filter the 95th percentile.')
parser.add_argument('--relative_threshold', dest='relative_threshold', type=float, default=0.0, help='Percentage to filter the 95th percentile.')
parser.add_argument('--absolute_threshold', dest='absolute_threshold', type=float, default=0.0, help='Absolute value to filter.')
parser.add_argument('--output_files', dest='output_files', default='matched_pair', help='Restrict output dataset collections.')
parser.add_argument('--statistics_output', dest='statistics_output', help='Statistics output file.')
args = parser.parse_args()
cwpair2_util.create_directories()
statistics = []
if args.absolute_threshold > 0:
threshold = args.absolute_threshold
elif args.relative_threshold > 0:
threshold = args.relative_threshold / 100.0
else:
threshold = 0
for (dataset_path, hid) in args.inputs:
stats = cwpair2_util.process_file(dataset_path,
hid,
args.method,
threshold,
args.up_distance,
args.down_distance,
args.binsize,
args.output_files)
statistics.extend(stats)
# Accumulate statistics.
by_file = {}
for stats in statistics:
# Skip "None" statistics from failed files
if not stats:
continue
path = stats['stats_path']
if path not in by_file:
by_file[path] = []
by_file[path].append(stats)
# Write tabular statistics file.
keys = ['fname', 'final_mode', 'preview_mode', 'perc95', 'paired', 'orphans']
statistics_out = csv.writer(open(args.statistics_output, 'wt'), delimiter='\t', lineterminator="\n")
statistics_out.writerow(keys)
for file_path, statistics in by_file.items():
for stats in statistics:
statistics_out.writerow([stats[key] for key in keys])
|
py | 1a50d9aba4dfec0fdfb30fe9714160b17508e82b | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import http.client
from oslo_serialization import jsonutils
from keystone.common.policies import base as bp
from keystone.common import provider_api
import keystone.conf
from keystone.tests.common import auth as common_auth
from keystone.tests import unit
from keystone.tests.unit import base_classes
from keystone.tests.unit import ksfixtures
from keystone.tests.unit.ksfixtures import temporaryfile
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
class _SystemUserProjectEndpointTests(object):
"""Common default functionality for all system users."""
def test_user_can_list_projects_for_endpoint(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
r = c.get('/v3/OS-EP-FILTER/endpoints/%s/projects'
% endpoint['id'],
headers=self.headers)
for project_itr in r.json['projects']:
self.assertIn(project['id'], project_itr['id'])
def test_user_can_check_endpoint_in_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.get('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT)
def test_user_can_list_endpoints_for_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
r = c.get('/v3/OS-EP-FILTER/projects/%s/endpoints' % project['id'],
headers=self.headers)
for endpoint_itr in r.json['endpoints']:
self.assertIn(endpoint['id'], endpoint_itr['id'])
class _SystemReaderAndMemberProjectEndpointTests(object):
def test_user_cannot_add_endpoint_to_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
with self.test_client() as c:
c.put('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
def test_user_cannot_remove_endpoint_from_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
with self.test_client() as c:
c.delete('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
class _DomainAndProjectUserProjectEndpointTests(object):
def test_user_cannot_list_projects_for_endpoint(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.get('/v3/OS-EP-FILTER/endpoints/%s/projects' % endpoint['id'],
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
def test_user_cannot_check_endpoint_in_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.get('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
def test_user_cannot_list_endpoints_for_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.get('/v3/OS-EP-FILTER/projects/%s/endpoints' % project['id'],
headers=self.headers,
expected_status_code=http.client.FORBIDDEN)
class SystemReaderTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def setUp(self):
super(SystemReaderTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_reader = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_reader
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.reader_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=system_reader['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemMemberTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def setUp(self):
super(SystemMemberTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
system_member = unit.new_user_ref(
domain_id=CONF.identity.default_domain_id
)
self.user_id = PROVIDERS.identity_api.create_user(
system_member
)['id']
PROVIDERS.assignment_api.create_system_grant_for_user(
self.user_id, self.bootstrapper.member_role_id
)
auth = self.build_authentication_request(
user_id=self.user_id, password=system_member['password'],
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class SystemAdminTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_SystemUserProjectEndpointTests):
def setUp(self):
super(SystemAdminTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
# Reuse the system administrator account created during
# ``keystone-manage bootstrap``
self.user_id = self.bootstrapper.admin_user_id
auth = self.build_authentication_request(
user_id=self.user_id,
password=self.bootstrapper.admin_password,
system=True
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
def test_user_can_add_endpoint_to_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
with self.test_client() as c:
c.put('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT)
def test_user_can_remove_endpoint_from_project(self):
project = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex,
unit.new_project_ref(domain_id=CONF.identity.default_domain_id)
)
service = PROVIDERS.catalog_api.create_service(
uuid.uuid4().hex, unit.new_service_ref()
)
endpoint = unit.new_endpoint_ref(service['id'], region_id=None)
endpoint = PROVIDERS.catalog_api.create_endpoint(
endpoint['id'], endpoint
)
PROVIDERS.catalog_api.add_endpoint_to_project(
endpoint['id'], project['id'])
with self.test_client() as c:
c.delete('/v3/OS-EP-FILTER/projects/%s/endpoints/%s'
% (project['id'], endpoint['id']),
headers=self.headers,
expected_status_code=http.client.NO_CONTENT)
class DomainUserTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def setUp(self):
super(DomainUserTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
self.domain_id = domain['id']
domain_admin = unit.new_user_ref(domain_id=self.domain_id)
self.user_id = PROVIDERS.identity_api.create_user(domain_admin)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.admin_role_id, user_id=self.user_id,
domain_id=self.domain_id
)
auth = self.build_authentication_request(
user_id=self.user_id,
password=domain_admin['password'],
domain_id=self.domain_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class ProjectUserTests(base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def setUp(self):
super(ProjectUserTests, self).setUp()
self.loadapp()
self.useFixture(ksfixtures.Policy(self.config_fixture))
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
self.user_id = self.bootstrapper.admin_user_id
auth = self.build_authentication_request(
user_id=self.user_id,
password=self.bootstrapper.admin_password,
project_id=self.bootstrapper.project_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
class ProjectUserTestsWithoutEnforceScope(
base_classes.TestCaseWithBootstrap,
common_auth.AuthTestMixin,
_DomainAndProjectUserProjectEndpointTests,
_SystemReaderAndMemberProjectEndpointTests):
def _override_policy(self):
# TODO(cmurphy): Remove this once the deprecated policies in
# keystone.common.policies.project_endpoint have been removed. This is
# only here to make sure we test the new policies instead of the
# deprecated ones. Oslo.policy will OR deprecated policies with new
# policies to maintain compatibility and give operators a chance to
# update permissions or update policies without breaking users. This
# will cause these specific tests to fail since we're trying to correct
# this broken behavior with better scope checking.
with open(self.policy_file_name, 'w') as f:
overridden_policies = {
'identity:list_projects_for_endpoint': bp.SYSTEM_READER,
'identity:add_endpoint_to_project': bp.SYSTEM_ADMIN,
'identity:check_endpoint_in_project': bp.SYSTEM_READER,
'identity:list_endpoints_for_project': bp.SYSTEM_READER,
'identity:remove_endpoint_from_project': bp.SYSTEM_ADMIN
}
f.write(jsonutils.dumps(overridden_policies))
def setUp(self):
super(ProjectUserTestsWithoutEnforceScope, self).setUp()
self.loadapp()
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
self.policy_file_name = self.policy_file.file_name
self.useFixture(
ksfixtures.Policy(
self.config_fixture, policy_file=self.policy_file_name
)
)
self._override_policy()
# Explicity set enforce_scope to False to make sure we maintain
# backwards compatibility with project users.
self.config_fixture.config(group='oslo_policy', enforce_scope=False)
domain = PROVIDERS.resource_api.create_domain(
uuid.uuid4().hex, unit.new_domain_ref()
)
user = unit.new_user_ref(domain_id=domain['id'])
self.user_id = PROVIDERS.identity_api.create_user(user)['id']
self.project_id = PROVIDERS.resource_api.create_project(
uuid.uuid4().hex, unit.new_project_ref(domain_id=domain['id'])
)['id']
PROVIDERS.assignment_api.create_grant(
self.bootstrapper.admin_role_id, user_id=self.user_id,
project_id=self.project_id
)
auth = self.build_authentication_request(
user_id=self.user_id,
password=user['password'],
project_id=self.project_id
)
# Grab a token using the persona we're testing and prepare headers
# for requests we'll be making in the tests.
with self.test_client() as c:
r = c.post('/v3/auth/tokens', json=auth)
self.token_id = r.headers['X-Subject-Token']
self.headers = {'X-Auth-Token': self.token_id}
|
py | 1a50d9c303636f8f362033d519464242b4bb1485 | """The tests for the hassio component."""
from datetime import timedelta
import os
from unittest.mock import patch
import pytest
from homeassistant.auth.const import GROUP_ID_ADMIN
from homeassistant.components import frontend
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.hassio import ADDONS_COORDINATOR, DOMAIN, STORAGE_KEY
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.helpers.device_registry import async_get
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.common import MockConfigEntry, async_fire_time_changed
MOCK_ENVIRON = {"HASSIO": "127.0.0.1", "HASSIO_TOKEN": "abcdefgh"}
@pytest.fixture(autouse=True)
def mock_all(aioclient_mock, request):
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
aioclient_mock.get("http://127.0.0.1/supervisor/ping", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"})
aioclient_mock.get(
"http://127.0.0.1/info",
json={
"result": "ok",
"data": {"supervisor": "222", "homeassistant": "0.110.0", "hassos": None},
},
)
aioclient_mock.get(
"http://127.0.0.1/store",
json={
"result": "ok",
"data": {"addons": [], "repositories": []},
},
)
aioclient_mock.get(
"http://127.0.0.1/host/info",
json={
"result": "ok",
"data": {
"result": "ok",
"data": {
"chassis": "vm",
"operating_system": "Debian GNU/Linux 10 (buster)",
"kernel": "4.19.0-6-amd64",
},
},
},
)
aioclient_mock.get(
"http://127.0.0.1/core/info",
json={"result": "ok", "data": {"version_latest": "1.0.0", "version": "1.0.0"}},
)
aioclient_mock.get(
"http://127.0.0.1/os/info",
json={"result": "ok", "data": {"version_latest": "1.0.0", "version": "1.0.0"}},
)
aioclient_mock.get(
"http://127.0.0.1/supervisor/info",
json={
"result": "ok",
"data": {"version_latest": "1.0.0", "version": "1.0.0"},
"addons": [
{
"name": "test",
"slug": "test",
"installed": True,
"update_available": False,
"version": "1.0.0",
"version_latest": "1.0.0",
"repository": "core",
"url": "https://github.com/home-assistant/addons/test",
},
{
"name": "test2",
"slug": "test2",
"installed": True,
"update_available": False,
"version": "1.0.0",
"version_latest": "1.0.0",
"repository": "core",
"url": "https://github.com",
},
],
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/test/stats",
json={
"result": "ok",
"data": {
"cpu_percent": 0.99,
"memory_usage": 182611968,
"memory_limit": 3977146368,
"memory_percent": 4.59,
"network_rx": 362570232,
"network_tx": 82374138,
"blk_read": 46010945536,
"blk_write": 15051526144,
},
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/test2/stats",
json={
"result": "ok",
"data": {
"cpu_percent": 0.8,
"memory_usage": 51941376,
"memory_limit": 3977146368,
"memory_percent": 1.31,
"network_rx": 31338284,
"network_tx": 15692900,
"blk_read": 740077568,
"blk_write": 6004736,
},
},
)
aioclient_mock.get(
"http://127.0.0.1/addons/test3/stats",
json={
"result": "ok",
"data": {
"cpu_percent": 0.8,
"memory_usage": 51941376,
"memory_limit": 3977146368,
"memory_percent": 1.31,
"network_rx": 31338284,
"network_tx": 15692900,
"blk_read": 740077568,
"blk_write": 6004736,
},
},
)
aioclient_mock.get("http://127.0.0.1/addons/test/changelog", text="")
aioclient_mock.get(
"http://127.0.0.1/addons/test/info",
json={"result": "ok", "data": {"auto_update": True}},
)
aioclient_mock.get("http://127.0.0.1/addons/test2/changelog", text="")
aioclient_mock.get(
"http://127.0.0.1/addons/test2/info",
json={"result": "ok", "data": {"auto_update": False}},
)
aioclient_mock.get(
"http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}}
)
async def test_setup_api_ping(hass, aioclient_mock):
"""Test setup with API ping."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {})
assert result
assert aioclient_mock.call_count == 10
assert hass.components.hassio.get_core_info()["version_latest"] == "1.0.0"
assert hass.components.hassio.is_hassio()
async def test_setup_api_panel(hass, aioclient_mock):
"""Test setup with API ping."""
assert await async_setup_component(hass, "frontend", {})
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {})
assert result
panels = hass.data[frontend.DATA_PANELS]
assert panels.get("hassio").to_response() == {
"component_name": "custom",
"icon": None,
"title": None,
"url_path": "hassio",
"require_admin": True,
"config": {
"_panel_custom": {
"embed_iframe": True,
"js_url": "/api/hassio/app/entrypoint.js",
"name": "hassio-main",
"trust_external": False,
}
},
}
async def test_setup_api_push_api_data(hass, aioclient_mock):
"""Test setup with API push."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass, "hassio", {"http": {"server_port": 9999}, "hassio": {}}
)
assert result
assert aioclient_mock.call_count == 10
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 9999
assert aioclient_mock.mock_calls[1][2]["watchdog"]
async def test_setup_api_push_api_data_server_host(hass, aioclient_mock):
"""Test setup with API push with active server host."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass,
"hassio",
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
)
assert result
assert aioclient_mock.call_count == 10
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 9999
assert not aioclient_mock.mock_calls[1][2]["watchdog"]
async def test_setup_api_push_api_data_default(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert aioclient_mock.call_count == 10
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 8123
refresh_token = aioclient_mock.mock_calls[1][2]["refresh_token"]
hassio_user = await hass.auth.async_get_user(
hass_storage[STORAGE_KEY]["data"]["hassio_user"]
)
assert hassio_user is not None
assert hassio_user.system_generated
assert len(hassio_user.groups) == 1
assert hassio_user.groups[0].id == GROUP_ID_ADMIN
assert hassio_user.name == "Supervisor"
for token in hassio_user.refresh_tokens.values():
if token.token == refresh_token:
break
else:
assert False, "refresh token not found"
async def test_setup_adds_admin_group_to_user(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
# Create user without admin
user = await hass.auth.async_create_system_user("Hass.io")
assert not user.is_admin
await hass.auth.async_create_refresh_token(user)
hass_storage[STORAGE_KEY] = {
"data": {"hassio_user": user.id},
"key": STORAGE_KEY,
"version": 1,
}
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert user.is_admin
async def test_setup_migrate_user_name(hass, aioclient_mock, hass_storage):
"""Test setup with migrating the user name."""
# Create user with old name
user = await hass.auth.async_create_system_user("Hass.io")
await hass.auth.async_create_refresh_token(user)
hass_storage[STORAGE_KEY] = {
"data": {"hassio_user": user.id},
"key": STORAGE_KEY,
"version": 1,
}
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert user.name == "Supervisor"
async def test_setup_api_existing_hassio_user(hass, aioclient_mock, hass_storage):
"""Test setup with API push default data."""
user = await hass.auth.async_create_system_user("Hass.io test")
token = await hass.auth.async_create_refresh_token(user)
hass_storage[STORAGE_KEY] = {"version": 1, "data": {"hassio_user": user.id}}
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"http": {}, "hassio": {}})
assert result
assert aioclient_mock.call_count == 10
assert not aioclient_mock.mock_calls[1][2]["ssl"]
assert aioclient_mock.mock_calls[1][2]["port"] == 8123
assert aioclient_mock.mock_calls[1][2]["refresh_token"] == token.token
async def test_setup_core_push_timezone(hass, aioclient_mock):
"""Test setup with API push default data."""
hass.config.time_zone = "testzone"
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(hass, "hassio", {"hassio": {}})
assert result
assert aioclient_mock.call_count == 10
assert aioclient_mock.mock_calls[2][2]["timezone"] == "testzone"
with patch("homeassistant.util.dt.set_default_time_zone"):
await hass.config.async_update(time_zone="America/New_York")
await hass.async_block_till_done()
assert aioclient_mock.mock_calls[-1][2]["timezone"] == "America/New_York"
async def test_setup_hassio_no_additional_data(hass, aioclient_mock):
"""Test setup with API push default data."""
with patch.dict(os.environ, MOCK_ENVIRON), patch.dict(
os.environ, {"HASSIO_TOKEN": "123456"}
):
result = await async_setup_component(hass, "hassio", {"hassio": {}})
assert result
assert aioclient_mock.call_count == 10
assert aioclient_mock.mock_calls[-1][3]["X-Hassio-Key"] == "123456"
async def test_fail_setup_without_environ_var(hass):
"""Fail setup if no environ variable set."""
with patch.dict(os.environ, {}, clear=True):
result = await async_setup_component(hass, "hassio", {})
assert not result
async def test_warn_when_cannot_connect(hass, caplog):
"""Fail warn when we cannot connect."""
with patch.dict(os.environ, MOCK_ENVIRON), patch(
"homeassistant.components.hassio.HassIO.is_connected",
return_value=None,
):
result = await async_setup_component(hass, "hassio", {})
assert result
assert hass.components.hassio.is_hassio()
assert "Not connected with the supervisor / system too busy!" in caplog.text
async def test_service_register(hassio_env, hass):
"""Check if service will be setup."""
assert await async_setup_component(hass, "hassio", {})
assert hass.services.has_service("hassio", "addon_start")
assert hass.services.has_service("hassio", "addon_stop")
assert hass.services.has_service("hassio", "addon_restart")
assert hass.services.has_service("hassio", "addon_update")
assert hass.services.has_service("hassio", "addon_stdin")
assert hass.services.has_service("hassio", "host_shutdown")
assert hass.services.has_service("hassio", "host_reboot")
assert hass.services.has_service("hassio", "host_reboot")
assert hass.services.has_service("hassio", "backup_full")
assert hass.services.has_service("hassio", "backup_partial")
assert hass.services.has_service("hassio", "restore_full")
assert hass.services.has_service("hassio", "restore_partial")
async def test_service_calls(hassio_env, hass, aioclient_mock, caplog):
"""Call service and check the API calls behind that."""
assert await async_setup_component(hass, "hassio", {})
aioclient_mock.post("http://127.0.0.1/addons/test/start", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/restart", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/update", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/addons/test/stdin", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/host/shutdown", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/host/reboot", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/backups/new/full", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/backups/new/partial", json={"result": "ok"})
aioclient_mock.post(
"http://127.0.0.1/backups/test/restore/full", json={"result": "ok"}
)
aioclient_mock.post(
"http://127.0.0.1/backups/test/restore/partial", json={"result": "ok"}
)
await hass.services.async_call("hassio", "addon_start", {"addon": "test"})
await hass.services.async_call("hassio", "addon_stop", {"addon": "test"})
await hass.services.async_call("hassio", "addon_restart", {"addon": "test"})
await hass.services.async_call("hassio", "addon_update", {"addon": "test"})
await hass.services.async_call(
"hassio", "addon_stdin", {"addon": "test", "input": "test"}
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 8
assert aioclient_mock.mock_calls[-1][2] == "test"
await hass.services.async_call("hassio", "host_shutdown", {})
await hass.services.async_call("hassio", "host_reboot", {})
await hass.async_block_till_done()
assert aioclient_mock.call_count == 10
await hass.services.async_call("hassio", "backup_full", {})
await hass.services.async_call(
"hassio",
"backup_partial",
{
"homeassistant": True,
"addons": ["test"],
"folders": ["ssl"],
"password": "123456",
},
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 12
assert aioclient_mock.mock_calls[-1][2] == {
"homeassistant": True,
"addons": ["test"],
"folders": ["ssl"],
"password": "123456",
}
await hass.services.async_call("hassio", "restore_full", {"slug": "test"})
await hass.async_block_till_done()
await hass.services.async_call(
"hassio",
"restore_partial",
{
"slug": "test",
"homeassistant": False,
"addons": ["test"],
"folders": ["ssl"],
"password": "123456",
},
)
await hass.async_block_till_done()
assert aioclient_mock.call_count == 14
assert aioclient_mock.mock_calls[-1][2] == {
"addons": ["test"],
"folders": ["ssl"],
"homeassistant": False,
"password": "123456",
}
async def test_service_calls_core(hassio_env, hass, aioclient_mock):
"""Call core service and check the API calls behind that."""
assert await async_setup_component(hass, "hassio", {})
aioclient_mock.post("http://127.0.0.1/homeassistant/restart", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/homeassistant/stop", json={"result": "ok"})
await hass.services.async_call("homeassistant", "stop")
await hass.async_block_till_done()
assert aioclient_mock.call_count == 4
await hass.services.async_call("homeassistant", "check_config")
await hass.async_block_till_done()
assert aioclient_mock.call_count == 4
with patch(
"homeassistant.config.async_check_ha_config_file", return_value=None
) as mock_check_config:
await hass.services.async_call("homeassistant", "restart")
await hass.async_block_till_done()
assert mock_check_config.called
assert aioclient_mock.call_count == 5
async def test_entry_load_and_unload(hass):
"""Test loading and unloading config entry."""
with patch.dict(os.environ, MOCK_ENVIRON):
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert SENSOR_DOMAIN in hass.config.components
assert BINARY_SENSOR_DOMAIN in hass.config.components
assert ADDONS_COORDINATOR in hass.data
assert await hass.config_entries.async_unload(config_entry.entry_id)
await hass.async_block_till_done()
assert ADDONS_COORDINATOR not in hass.data
async def test_migration_off_hassio(hass):
"""Test that when a user moves instance off Hass.io, config entry gets cleaned up."""
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
config_entry.add_to_hass(hass)
assert not await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert hass.config_entries.async_entries(DOMAIN) == []
async def test_device_registry_calls(hass):
"""Test device registry entries for hassio."""
dev_reg = async_get(hass)
supervisor_mock_data = {
"version": "1.0.0",
"version_latest": "1.0.0",
"addons": [
{
"name": "test",
"state": "started",
"slug": "test",
"installed": True,
"icon": False,
"update_available": False,
"version": "1.0.0",
"version_latest": "1.0.0",
"repository": "test",
"url": "https://github.com/home-assistant/addons/test",
},
{
"name": "test2",
"state": "started",
"slug": "test2",
"installed": True,
"icon": False,
"update_available": False,
"version": "1.0.0",
"version_latest": "1.0.0",
"url": "https://github.com",
},
],
}
os_mock_data = {
"board": "odroid-n2",
"boot": "A",
"update_available": False,
"version": "5.12",
"version_latest": "5.12",
}
with patch.dict(os.environ, MOCK_ENVIRON), patch(
"homeassistant.components.hassio.HassIO.get_supervisor_info",
return_value=supervisor_mock_data,
), patch(
"homeassistant.components.hassio.HassIO.get_os_info",
return_value=os_mock_data,
):
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert len(dev_reg.devices) == 5
supervisor_mock_data = {
"version": "1.0.0",
"version_latest": "1.0.0",
"addons": [
{
"name": "test2",
"state": "started",
"slug": "test2",
"installed": True,
"icon": False,
"update_available": False,
"version": "1.0.0",
"version_latest": "1.0.0",
"url": "https://github.com",
},
],
}
# Test that when addon is removed, next update will remove the add-on and subsequent updates won't
with patch(
"homeassistant.components.hassio.HassIO.get_supervisor_info",
return_value=supervisor_mock_data,
), patch(
"homeassistant.components.hassio.HassIO.get_os_info",
return_value=os_mock_data,
):
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=1))
await hass.async_block_till_done()
assert len(dev_reg.devices) == 4
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=2))
await hass.async_block_till_done()
assert len(dev_reg.devices) == 4
supervisor_mock_data = {
"version": "1.0.0",
"version_latest": "1.0.0",
"addons": [
{
"name": "test2",
"slug": "test2",
"state": "started",
"installed": True,
"icon": False,
"update_available": False,
"version": "1.0.0",
"version_latest": "1.0.0",
"url": "https://github.com",
},
{
"name": "test3",
"slug": "test3",
"state": "stopped",
"installed": True,
"icon": False,
"update_available": False,
"version": "1.0.0",
"version_latest": "1.0.0",
"url": "https://github.com",
},
],
}
# Test that when addon is added, next update will reload the entry so we register
# a new device
with patch(
"homeassistant.components.hassio.HassIO.get_supervisor_info",
return_value=supervisor_mock_data,
), patch(
"homeassistant.components.hassio.HassIO.get_os_info",
return_value=os_mock_data,
):
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=3))
await hass.async_block_till_done()
assert len(dev_reg.devices) == 5
|
py | 1a50d9e633dd3f49b41ae8cf94f40d31e5c419f2 | from django.db import models
from status.models import User
class Score(models.Model):
score = models.FloatField(default=0.0)
username = models.CharField(max_length=10)
def __str__(self):
return "{} scored {}".format(self.username, self.score)
class LevelScore(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
level = models.IntegerField(default=1)
score = models.FloatField(default=0.0)
username = models.CharField(max_length=10)
def __str__(self):
return "{} scored {} on level {}".format(self.user, self.score, self.level)
class Meta:
unique_together = (("user","level"),)
|
py | 1a50d9faf54cf3091af3ca98ee62c54b74b4c1a9 | ################################################################################
# Copyright (C) 2016 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell cop-
# ies of the Software, and to permit persons to whom the Software is furnished
# to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IM-
# PLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNE-
# CTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
################################################################################
# This script only gets called by CMake
from Common import globalParameters, HR, print1, print2, printExit, ensurePath, CHeader, CMakeHeader, assignGlobalParameters, ProgressBar
from SolutionStructs import Solution
import YAMLIO
from SolutionWriter import SolutionWriter
from KernelWriterSource import KernelWriterSource
from KernelWriterAssembly import KernelWriterAssembly
import os
import os.path
import argparse
import sys
from shutil import copy as shutil_copy
################################################################################
# Write Solutions and Kernels for BenchmarkClient or LibraryClient
################################################################################
def writeSolutionsAndKernels(outputPath, solutions, kernels, kernelsBetaOnly, \
solutionWriter, kernelWriterSource, kernelWriterAssembly):
print1("# Writing Solutions and Kernels")
if not globalParameters["MergeFiles"]:
ensurePath(os.path.join(outputPath, "Solutions"))
ensurePath(os.path.join(outputPath, "Kernels"))
progressBar = ProgressBar(len(solutions)+len(kernels))
##############################################################################
# Write Solutions
##############################################################################
if globalParameters["MergeFiles"]:
solutionSourceFile = open(os.path.join(outputPath, \
"Solutions.cpp"), "w")
solutionHeaderFile = open(os.path.join(outputPath, \
"Solutions.h"), "w")
if globalParameters["MergeFiles"]:
solutionSourceFile.write(CHeader)
solutionHeaderFile.write(CHeader)
solutionSourceFile.write("#include \"Solutions.h\"\n")
solutionHeaderFile.write("#include \"TensileTypes.h\"\n")
solutionHeaderFile.write("#include \"Kernels.h\"\n")
solutionHeaderFile.write("#include \"SolutionHelper.h\"\n")
solutionHeaderFile.write("#include \"Tools.h\"\n")
for solution in solutions:
# get solution name
if not globalParameters["MergeFiles"]:
solutionFileName = solutionWriter.getSolutionName(solution)
# write solution.cpp
if not globalParameters["MergeFiles"]:
solutionSourceFile = open(os.path.join(outputPath, \
"Solutions", solutionFileName+".cpp"), "w")
solutionSourceFile.write(CHeader)
solutionSourceFile.write( \
solutionWriter.getSourceFileString(solution))
if not globalParameters["MergeFiles"]:
solutionSourceFile.close()
# write solution.h
if not globalParameters["MergeFiles"]:
solutionHeaderFile = open(os.path.join(outputPath, \
"Solutions", solutionFileName+".h"), "w")
solutionHeaderFile.write(CHeader)
solutionHeaderFile.write( \
solutionWriter.getHeaderFileString(solution))
if not globalParameters["MergeFiles"]:
solutionHeaderFile.close()
progressBar.increment()
# close merged
if not globalParameters["MergeFiles"]:
solutionHeaderFile.close()
##############################################################################
# Write Kernels
##############################################################################
if globalParameters["MergeFiles"]:
kernelSourceFile = open(os.path.join(outputPath, \
"Kernels.cpp"), "w")
kernelHeaderFile = open(os.path.join(outputPath, \
"Kernels.h"), "w")
kernelSourceFile.write(CHeader)
kernelHeaderFile.write(CHeader)
kernelSourceFile.write("#include \"Kernels.h\"\n")
kernelHeaderFile.write("#pragma once\n")
if globalParameters["RuntimeLanguage"] == "HIP":
kernelHeaderFile.write("#define HCC_ENABLE_ACCELERATOR_PRINTF\n\n")
kernelHeaderFile.write("#include <hip/hip_runtime.h>\n")
kernelHeaderFile.write("#include \"TensileTypes.h\"\n")
kernelHeaderFile.write("#include \"KernelHeader.h\"\n")
else:
kernelHeaderFile.write("#include <string>\n")
# tensor contraction kernels
for kernel in kernels:
kernelWriter = kernelWriterSource if kernel["KernelLanguage"] == "Source" else kernelWriterAssembly
# get kernel name
if not globalParameters["MergeFiles"]:
kernelName = kernelWriter.getKernelName(kernel)
# write kernel.cpp
if not globalParameters["MergeFiles"]:
kernelSourceFile = open(os.path.join(outputPath, \
"Kernels", kernelName+".cpp"), "w")
kernelSourceFile.write(CHeader)
kernelSourceFile.write( kernelWriter.getSourceFileString(kernel))
if not globalParameters["MergeFiles"]:
kernelSourceFile.close()
# write kernel.h
if not globalParameters["MergeFiles"]:
kernelHeaderFile = open(os.path.join(outputPath, \
"Kernels", kernelName+".h"), "w")
kernelHeaderFile.write(CHeader)
kernelHeaderFile.write( kernelWriter.getHeaderFileString(kernel))
if not globalParameters["MergeFiles"]:
kernelHeaderFile.close()
progressBar.increment()
# beta-only kernels
for kernel in kernelsBetaOnly:
kernelWriter = kernelWriterSource
kernelName = kernelWriter.getKernelNameBetaOnly(kernel)
# write kernel.cpp
if not globalParameters["MergeFiles"]:
kernelSourceFile = open(os.path.join(outputPath, \
"Kernels", kernelName+".cpp"), "w")
kernelSourceFile.write(CHeader)
kernelSourceFile.write( kernelWriter.getSourceFileStringBetaOnly(kernel))
if not globalParameters["MergeFiles"]:
kernelSourceFile.close()
# write kernel.h
if not globalParameters["MergeFiles"]:
kernelHeaderFile = open(os.path.join(outputPath, \
"Kernels", kernelName + ".h"), "w")
kernelHeaderFile.write(CHeader)
kernelHeaderFile.write( kernelWriter.getHeaderFileStringBetaOnly(kernel))
if not globalParameters["MergeFiles"]:
kernelHeaderFile.close()
# close merged
if globalParameters["MergeFiles"]:
kernelHeaderFile.close()
################################################################################
# Write Logic
################################################################################
def writeLogic(outputPath, logicData, solutionWriter ):
print1("# Writing Library Logic")
if not globalParameters["MergeFiles"]:
ensurePath(os.path.join(outputPath, "Logic"))
# Tensile.h
h = ""
h += "#pragma once\n"
h += "#include \"TensileTypes.h\"\n"
# TensileInternal.h
ih = ""
ih += "#include \"Tensile.h\"\n"
ih += "#include \"SolutionHelper.h\"\n"
if globalParameters["SolutionMapHash"]:
ih += "#include <unordered_map>\n"
else:
ih += "#include <map>\n"
ih += "#include <tuple>\n"
# problem type Key
problemSizeTemplate = "unsigned int, unsigned int, unsigned int"
if globalParameters["RuntimeLanguage"] == "OCL":
problemSizeTemplate += ", cl_command_queue"
ih += "typedef std::tuple<%s> ProblemSizeKey;\n" \
% (problemSizeTemplate)
# hash function
ih += "\n"
ih += "size_t tensileProblemSizeHasher( const ProblemSizeKey & problemSize ) {\n"
ih += " size_t hash = 0;\n"
ih += " // ignore lowest 4 bits; keep next 21 bits\n"
ih += " size_t hash0 = (std::get<0>(problemSize) >> 4) & ((1<<22)-1); // 21 bits of size0\n"
ih += " size_t hash1 = (std::get<1>(problemSize) >> 4) & ((1<<22)-1); // 21 bits of size1\n"
ih += " size_t hashU = (std::get<2>(problemSize) >> 4) & ((1<<22)-1); // 21 bits of sizeU\n"
ih += " // 21+21+21 = 63 bit hash\n"
ih += " hash |= hash0;\n"
ih += " hash |= hash1<<21;\n"
ih += " hash |= hashU<<42;\n"
ih += " return hash;\n"
ih += "}\n"
ih += "\n"
# Tensile.cpp
s = ""
s += "#include \"Tensile.h\"\n"
s += "#include \"TensileInternal.h\"\n"
s += "#include \"Solutions.h\"\n"
########################################
# problemType
for problemType in logicData:
# function argument list
argListSizes = solutionWriter.getArgList(problemType, False, False, False)
argListStream = solutionWriter.getArgList(problemType, False, False, True)
argListData = solutionWriter.getArgList(problemType, True, True, True)
# declare tensile_ProblemType
h += "\n// enqueue solution\n"
h += "TensileStatus tensile_%s(\n" % problemType
for i in range(0, len(argListData)):
h += " %s %s%s" \
% (argListData[i][0], argListData[i][1], \
",\n" if i < len(argListData)-1 else ");\n\n")
# declare TensileSolutionPointer_ProblemType
h += "\n// solution pointer\n"
h += "typedef TensileStatus (*TensileSolutionPointer_%s)(\n" \
% problemType
for i in range(0, len(argListData)):
h += " %s %s%s" % (argListData[i][0], argListData[i][1], ",\n" \
if i < len(argListData)-1 else ");\n\n")
# declare tensileGetSolutionPointer_ProblemType
h += "\n// get solution pointer\n"
h += "TensileSolutionPointer_%s tensileGetSolutionPointer_%s(\n" \
% (problemType, problemType)
for i in range(0, len(argListStream)):
h += " %s %s%s" \
% (argListStream[i][0], argListStream[i][1], \
",\n" if i < len(argListStream)-1 else ");\n\n")
# declare tensileName_
h += "// get solution name\n"
h += "const char * tensileGetSolutionName_%s(\n" \
% (problemType)
for i in range(0, len(argListStream)):
h += " %s %s%s" \
% (argListStream[i][0], argListStream[i][1], \
",\n" if i < len(argListStream)-1 else ");\n\n")
# get solution naming for problem type
solutionsForProblemType = []
for scheduleTuple in logicData[problemType]:
solutionsForSchedule = scheduleTuple[2]
for solution in solutionsForSchedule:
if solution not in solutionsForProblemType:
solutionsForProblemType.append(solution)
# solution names for problem type
solutionNamesForProblemType = []
for solution in solutionsForProblemType:
solutionName = solutionWriter.getSolutionName(solution)
solutionNamesForProblemType.append(solutionName)
# reset problemType source
if not globalParameters["MergeFiles"]:
filePrefix = "Tensile_%s" % (problemType)
s = "#include \"Tensile.h\"\n"
s += "#include \"TensileInternal.h\"\n"
for solutionName in solutionNamesForProblemType:
s += "#include \"%s.h\"\n" % solutionName
########################################
# implement per-Schedule functions in source
s += "/*******************************************************************************\n * Per-Schedule Functions\n *******************************************************************************/"
for scheduleTuple in logicData[problemType]:
# get logic parameters for problem type
scheduleName = scheduleTuple[0]
deviceNames = scheduleTuple[1]
solutionsForSchedule = scheduleTuple[2]
indexOrder = scheduleTuple[3]
exactLogic = scheduleTuple[4]
rangeLogic = scheduleTuple[5]
# solution names for schedule
solutionNamesForSchedule = []
for solution in solutionsForSchedule:
solutionName = solutionWriter.getSolutionName(solution)
solutionNamesForSchedule.append(solutionName)
# function tensileGetSolutionPointerUncached_Schedule_ProblemType
s += "\n// problem size -> solution logic\n"
s += "TensileSolutionPointer_%s tensileGetSolutionPointerUncached_%s_%s(\n" \
% (problemType, scheduleName, problemType)
for i in range(0, len(argListSizes)):
s += " %s %s%s" \
% (argListSizes[i][0], argListSizes[i][1], \
",\n" if i < len(argListSizes)-1 else ") {\n\n")
exactLogicStr = writeExactLogic(exactLogic, \
solutionNamesForSchedule, True)
if rangeLogic != None:
rangeLogicStr = writeRangeLogicRec(0, indexOrder, rangeLogic, \
solutionNamesForSchedule, problemType, True)
else:
rangeLogicStr = " return NULL; // none\n"
s += " /* exact mappings */\n"
s += exactLogicStr
s += "\n /* range mappings */\n"
s += rangeLogicStr
s += "\n}\n"
# function tensileGetSolutionName_Schedule_ProblemType
s += "\n// get solution name for problem size\n"
s += "const char * tensileGetSolutionName_%s_%s(\n" \
% (scheduleName, problemType)
for i in range(0, len(argListSizes)):
s += " %s %s%s" \
% (argListSizes[i][0], argListSizes[i][1], \
",\n" if i < len(argListSizes)-1 else ") {\n\n")
exactLogicStr = writeExactLogic(exactLogic, \
solutionNamesForSchedule, False)
if rangeLogic != None:
rangeLogicStr = writeRangeLogicRec(0, indexOrder, rangeLogic, \
solutionNamesForSchedule, problemType, False)
else:
rangeLogicStr = " return NULL; // none\n"
s += " /* exact mappings */\n"
s += exactLogicStr
s += "\n /* range mappings */\n"
s += rangeLogicStr
s += "\n}\n"
########################################
# implement problem-type functions in source
s += "/*******************************************************************************\n * Per-ProblemType Functions\n *******************************************************************************/"
if globalParameters["SolutionMapHash"]:
ih += "typedef std::unordered_map<ProblemSizeKey, TensileSolutionPointer_%s, std::function<size_t (ProblemSizeKey)>> Map_%s;\n" \
% (problemType, problemType )
else:
ih += "typedef std::map<ProblemSizeKey, TensileSolutionPointer_%s> Map_%s;\n" \
% (problemType, problemType)
ih += "extern Map_%s solutionMap_%s;\n" % (problemType, problemType)
# implement tensileGetSolutionPointerUncached_ProblemType
for ptr in [True, False]:
returnType = "PointerUncached" if ptr else "Name"
s += "\n// return solution %s\n" % returnType
s += ("TensileSolutionPointer_%s "%problemType) if ptr else "const char *"
s += "tensileGetSolution%s_%s(\n" \
% (returnType, problemType)
for i in range(0, len(argListStream)):
s += " %s %s%s" \
% (argListStream[i][0], argListStream[i][1], \
",\n" if i < len(argListStream)-1 else ") {\n")
# choose from schedules based on device name
# print logicData
schedules = logicData[problemType]
numSchedules = len(schedules)
if numSchedules > 1:
reordered_schedules = []
for scheduleIdx in range(0, numSchedules):
schedule = schedules[scheduleIdx]
deviceNames = schedule[1]
if deviceNames != ["fallback"]:
reordered_schedules.append(schedule)
for scheduleIdx in range(0, numSchedules):
schedule = schedules[scheduleIdx]
deviceNames = schedule[1]
if deviceNames == ["fallback"]:
reordered_schedules.append(schedule)
# get device name
if globalParameters["RuntimeLanguage"] == "OCL":
s += "get device name opencl;\n"
else:
s += "\n// get device name hip;\n"
s += " int deviceId;\n"
s += " hipCtxGetDevice(&deviceId);\n"
s += " hipDeviceProp_t deviceProperties;\n"
s += " hipGetDeviceProperties(&deviceProperties, deviceId);\n"
s += " std::string name = deviceProperties.name;\n"
s += "\n "
for scheduleIdx in range(0, numSchedules):
schedule = reordered_schedules[scheduleIdx]
scheduleName = schedule[0]
deviceNames = schedule[1]
if scheduleIdx > 0:
s += " else "
if scheduleIdx < numSchedules-1:
s += "if ("
for deviceNameIdx in range(0, len(deviceNames)):
deviceName = deviceNames[deviceNameIdx]
if deviceNameIdx > 0:
s += " || "
s += "name == \"%s\"" % deviceName
s += ")"
s += "{\n"
s += " return tensileGetSolution%s_%s_%s(" \
% ( returnType, scheduleName, problemType)
for i in range(0, len(argListSizes)):
s += "%s%s" \
% (argListSizes[i][1],
", " if i < len(argListSizes)-1 else ");\n")
s += " }\n"
else: # == 1
schedule = schedules[0]
scheduleName = schedule[0]
s += " return tensileGetSolution%s_%s_%s(" \
% ( returnType, scheduleName, problemType)
for i in range(0, len(argListSizes)):
s += "%s%s" \
% (argListSizes[i][1],
", " if i < len(argListSizes)-1 else ");\n")
s += "\n}\n"
# implement tensileGetSolutionPointer_ProblemType
s += "\n// return solution pointer; user calls it\n"
s += "Map_%s solutionMap_%s%s;\n" % (problemType, problemType, "(1024, tensileProblemSizeHasher)" if globalParameters["SolutionMapHash"] else "")
s += "TensileSolutionPointer_%s tensileGetSolutionPointer_%s(\n" \
% (problemType, problemType)
for i in range(0, len(argListStream)):
s += " %s %s%s" \
% (argListStream[i][0], argListStream[i][1], \
",\n" if i < len(argListStream)-1 else ") {\n")
# create key
s += " ProblemSizeKey key = std::make_tuple( size%s, size%s, size%s%s );\n" \
% ( \
globalParameters["IndexChars"][problemType["Index0"]], \
globalParameters["IndexChars"][problemType["Index1"]], \
globalParameters["IndexChars"][problemType["IndexUnroll"]], \
", stream" if globalParameters["RuntimeLanguage"] == "OCL" else "")
# check for key in map
s += " static std::mutex findKernelMutex;\n"
s += " std::lock_guard<std::mutex> findKernelLock(findKernelMutex);\n"
s += " Map_%s::iterator iter = solutionMap_%s.find(key);\n" \
% (problemType, problemType)
s += " if (iter != solutionMap_%s.end()) {\n" % problemType
s += " return iter->second;\n"
s += " } else {\n"
s += " TensileSolutionPointer_%s ptr = tensileGetSolutionPointerUncached_%s(\n" \
% (problemType, problemType)
for i in range(0, len(argListStream)):
s += " %s%s" \
% (argListStream[i][1], "," if i < len(argListStream)-1 else ");")
s += "\n"
s += " solutionMap_%s[key] = ptr;\n" % problemType
s += " return ptr;\n"
s += " }\n"
s += "}\n"
# declare tensile_ProblemType
s += "\n// main call to solution; enqueues a kernel\n"
s += "TensileStatus tensile_%s(\n" % problemType
for i in range(0, len(argListData)):
s += " %s %s%s" \
% (argListData[i][0], argListData[i][1], \
",\n" if i < len(argListData)-1 else ") {\n")
s += " TensileSolutionPointer_%s ptr = tensileGetSolutionPointer_%s(\n" \
% (problemType, problemType)
for i in range(0, len(argListStream)):
s += " %s%s" \
% (argListStream[i][1], ", " if i < len(argListStream)-1 else ");")
s += "\n"
s += " if ( ptr ) {\n"
s += " return ptr("
for i in range(0, len(argListData)):
s += "%s%s" \
% (argListData[i][1], ", " if i < len(argListData)-1 else ");\n")
s += " } else {\n"
s += " return tensileStatusFailure; // no solution found\n"
s += " }\n"
s += "}\n"
# open and close problemType files
if not globalParameters["MergeFiles"]:
logicSourceFile = open(os.path.join(outputPath, "Logic", \
"%s.cpp" % filePrefix), "w")
logicSourceFile.write(s)
logicSourceFile.close()
# close merged files
if globalParameters["MergeFiles"]:
logicSourceFile = open(os.path.join(outputPath, \
"Tensile.cpp"), "w")
logicSourceFile.write(s)
logicSourceFile.close()
logicHeaderFile = open(os.path.join(outputPath, \
"Tensile.h"), "w")
logicHeaderFile.write(h)
logicHeaderFile.close()
internalHeaderFile = open(os.path.join(outputPath, \
"TensileInternal.h"), "w")
internalHeaderFile.write(ih)
internalHeaderFile.close()
################################################################################
# Write Range Logic Recursive
################################################################################
def writeExactLogic(exactLogic, solutionNames, ptr):
s = ""
indent = " "
for ruleIdx in range(0, len(exactLogic)):
rule = exactLogic[ruleIdx]
problemSize = rule[0]
solutionIdx = rule[1][0]
solutionGFlops = rule[1][1]
s += indent
if ruleIdx > 0:
s += "else "
s += "if ("
s += " size%s == %u " % (globalParameters["IndexChars"][0], problemSize[0])
for i in range(1, len(problemSize)):
s += "&& size%s == %u " % (globalParameters["IndexChars"][i], \
problemSize[i])
solutionName = solutionNames[solutionIdx]
if ptr:
returnValue = solutionName
else:
returnValue = "\"%s~\"" % solutionName
s += ") return %s; // %.0f GFlop/s\n" % (returnValue, solutionGFlops)
return s
################################################################################
# Write Range Logic Recursive
################################################################################
def writeRangeLogicRec(depth, indexOrder, rangeLogic, solutionNames, \
problemType, ptr):
indexChars = globalParameters["IndexChars"]
indent = " "
indent += " "*depth
s = ""
lowestLevel = depth == len(indexOrder)-1
numRules = len(rangeLogic)
for ruleIdx in range(0, numRules):
rule = rangeLogic[ruleIdx]
threshold = rule[0]
if lowestLevel:
solutionIdx = rule[1]
solutionName = solutionNames[solutionIdx]
if ptr:
returnValue = solutionName
else:
returnValue = "\"%s\"" % solutionName
if threshold > 0:
s += "%sif (size%s <= %u) return %s;\n" \
% (indent, indexChars[indexOrder[depth]], threshold, returnValue)
else:
s += "%sreturn %s;\n" % (indent, returnValue)
else:
if threshold > 0:
s += "%sif (size%s <= %u) {\n" \
% (indent, indexChars[indexOrder[depth]], threshold)
else:
s += "%s{\n" % (indent)
s += writeRangeLogicRec(depth+1, indexOrder, rule[1], solutionNames, \
problemType, ptr)
s += "%s}\n" % (indent)
return s
################################################################################
# Write Solution Call
################################################################################
def writeSolutionCall(solutionName, problemType):
indexChars = globalParameters["IndexChars"]
s = ""
s += "%s(" % solutionName
# solution parameters
s += " dataC, dataA, dataB, alpha"
if problemType["UseBeta"]:
s += ", beta"
s += ", offsetC, offsetA, offsetB"
firstStride = 1
if problemType["UseInitialStrides"]:
firstStride = 0
lastStrideC = problemType["NumIndicesC"]
lastStrideA = len(problemType["IndexAssignmentsA"])
lastStrideB = len(problemType["IndexAssignmentsB"])
for i in range(firstStride,lastStrideC):
s += ", strideC%u%s" % (i, indexChars[i])
for i in range(firstStride,lastStrideA):
s += ", strideA%u%s" % (i, \
indexChars[problemType["IndexAssignmentsA"][i]])
for i in range(firstStride,lastStrideB):
s += ", strideB%u%s" % (i, \
indexChars[problemType["IndexAssignmentsB"][i]])
for i in range(0, problemType["TotalIndices"]):
s += ", size%s" % indexChars[i]
s += ", stream, numInputEvents, inputEvents, outputEvent )"
return s
################################################################################
# Write CMake
################################################################################
def writeCMake(outputPath, solutions, kernels, libraryStaticFiles, clientName ):
print1("# Writing Custom CMake")
##############################################################################
# Min Naming
##############################################################################
if globalParameters["ShortNames"] and not globalParameters["MergeFiles"] :
solutionSerialNaming = Solution.getSerialNaming(solutions)
kernelSerialNaming = Solution.getSerialNaming(kernels)
else:
solutionSerialNaming = None
kernelSerialNaming = None
solutionMinNaming = Solution.getMinNaming(solutions)
kernelMinNaming = Solution.getMinNaming(kernels)
solutionWriter = SolutionWriter( \
solutionMinNaming, solutionSerialNaming, \
kernelMinNaming, kernelSerialNaming)
kernelWriterSource = KernelWriterSource( \
kernelMinNaming, kernelSerialNaming)
kernelWriterAssembly = KernelWriterAssembly( \
kernelMinNaming, kernelSerialNaming)
generatedFile = open(os.path.join(outputPath, "Generated.cmake"), "w")
generatedFile.write(CMakeHeader)
generatedFile.write("set( TensileClient_SOLUTIONS\n")
# write solution names
if globalParameters["MergeFiles"]:
generatedFile.write(" ${CMAKE_SOURCE_DIR}/Solutions.h\n")
generatedFile.write(" ${CMAKE_SOURCE_DIR}/Solutions.cpp\n")
else:
for solution in solutions:
solutionName = solutionWriter.getSolutionName(solution)
generatedFile.write(" ${CMAKE_SOURCE_DIR}/Solutions/%s.h\n" \
% (solutionName) )
generatedFile.write(" ${CMAKE_SOURCE_DIR}/Solutions/%s.cpp\n" \
% (solutionName) )
generatedFile.write(" )\n")
# write kernel names
generatedFile.write("set( TensileClient_KERNELS\n")
if globalParameters["MergeFiles"]:
generatedFile.write(" ${CMAKE_SOURCE_DIR}/Kernels.h\n")
generatedFile.write(" ${CMAKE_SOURCE_DIR}/Kernels.cpp\n")
else:
for kernel in kernels:
kernelName = kernelWriterSource.getKernelName(kernel) if kernel["KernelLanguage"] == "Source" else kernelWriterAssembly.getKernelName(kernel)
generatedFile.write(" ${CMAKE_SOURCE_DIR}/Kernels/%s.h\n" % (kernelName))
generatedFile.write(" ${CMAKE_SOURCE_DIR}/Kernels/%s.cpp\n" % kernelName)
generatedFile.write(" )\n")
generatedFile.write("set( TensileClient_SOURCE\n")
for fileName in libraryStaticFiles:
# copy file
shutil_copy( os.path.join(globalParameters["SourcePath"], fileName), \
outputPath )
# add file to cmake
generatedFile.write(" ${CMAKE_SOURCE_DIR}/%s\n" % fileName)
generatedFile.write(" )\n\n")
# close generated cmake
generatedFile.close()
################################################################################
# Tensile Create Library
################################################################################
def TensileCreateLibrary():
print1("")
print1(HR)
print1("# Tensile Create Library")
print2(HR)
print2("")
##############################################################################
# Parse Command Line Arguments
##############################################################################
print2("Arguments: %s" % sys.argv)
argParser = argparse.ArgumentParser()
argParser.add_argument("LogicPath", help="Path to LibraryLogic.yaml files.")
argParser.add_argument("OutputPath", help="Where to write library files?")
argParser.add_argument("RuntimeLanguage", help="Which runtime language?", \
choices=["OCL", "HIP", "HSA"])
argParser.add_argument("--merge-files", dest="MergeFiles", \
action="store_true")
argParser.add_argument("--no-merge-files", dest="MergeFiles", \
action="store_false")
argParser.add_argument("--short-file-names", dest="ShortNames", \
action="store_true")
argParser.add_argument("--no-short-file-names", dest="ShortNames", \
action="store_false")
argParser.add_argument("--library-print-debug", dest="LibraryPrintDebug", \
action="store_true")
argParser.add_argument("--no-library-print-debug", dest="LibraryPrintDebug", \
action="store_false")
args = argParser.parse_args()
logicPath = args.LogicPath
outputPath = args.OutputPath
print2("OutputPath: %s" % outputPath)
ensurePath(outputPath)
arguments = {}
arguments["RuntimeLanguage"] = args.RuntimeLanguage
arguments["MergeFiles"] = args.MergeFiles
arguments["ShortNames"] = args.ShortNames
arguments["LibraryPrintDebug"] = args.LibraryPrintDebug
assignGlobalParameters(arguments)
if not os.path.exists(logicPath):
printExit("LogicPath %s doesn't exist" % logicPath)
logicFiles = [os.path.join(logicPath, f) for f in os.listdir(logicPath) \
if (os.path.isfile(os.path.join(logicPath, f)) \
and os.path.splitext(f)[1]==".yaml")]
print1("# LibraryLogicFiles:" % logicFiles)
for logicFile in logicFiles:
print1("# %s" % logicFile)
##############################################################################
# Parse config files
##############################################################################
solutions = []
logicData = {} # keys are problemTypes, values are schedules
for logicFileName in logicFiles:
(scheduleName, deviceNames, problemType, solutionsForSchedule, \
indexOrder, exactLogic, rangeLogic) \
= YAMLIO.readLibraryLogicForSchedule(logicFileName)
if problemType not in logicData:
logicData[problemType] = []
logicData[problemType].append((scheduleName, deviceNames, \
solutionsForSchedule, indexOrder, exactLogic, rangeLogic ))
for solution in solutionsForSchedule:
if solution not in solutions:
solutions.append(solution)
# create solution writer and kernel writer
kernels = []
kernelsBetaOnly = []
for solution in solutions:
solutionKernels = solution.getKernels()
for kernel in solutionKernels:
if kernel not in kernels:
kernels.append(kernel)
solutionKernelsBetaOnly = solution.getKernelsBetaOnly()
for kernel in solutionKernelsBetaOnly:
if kernel not in kernelsBetaOnly:
kernelsBetaOnly.append(kernel)
# if any kernels are assembly, append every ISA supported
if globalParameters["ShortNames"] and not globalParameters["MergeFiles"]:
solutionSerialNaming = Solution.getSerialNaming(solutions)
kernelSerialNaming = Solution.getSerialNaming(kernels)
else:
solutionSerialNaming = None
kernelSerialNaming = None
solutionMinNaming = Solution.getMinNaming(solutions)
kernelMinNaming = Solution.getMinNaming(kernels)
solutionWriter = SolutionWriter( \
solutionMinNaming, solutionSerialNaming, \
kernelMinNaming, kernelSerialNaming)
kernelWriterSource = KernelWriterSource( \
kernelMinNaming, kernelSerialNaming)
kernelWriterAssembly = KernelWriterAssembly( \
kernelMinNaming, kernelSerialNaming)
# write solutions and kernels
writeSolutionsAndKernels(outputPath, solutions, kernels, kernelsBetaOnly, \
solutionWriter, kernelWriterSource, kernelWriterAssembly)
libraryStaticFiles = [
"TensileTypes.h",
"KernelHeader.h",
"SolutionHelper.cpp",
"SolutionHelper.h",
"Tools.cpp",
"Tools.h" ]
# write cmake
clientName = "LibraryClient"
writeCMake(outputPath, solutions, kernels, libraryStaticFiles, clientName )
# write logic
writeLogic(outputPath, logicData, solutionWriter)
print1("# Tensile Library Writer DONE")
print1(HR)
print1("")
################################################################################
# Main
################################################################################
if __name__ == "__main__":
TensileCreateLibrary()
|
py | 1a50da4fb5e4b5a7a798fe1a1e9fed1b31ca99c2 | import os
import argparse
import time
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import numpy as np
parser = argparse.ArgumentParser('ODE demo')
parser.add_argument('--method', type=str, choices=['dopri5', 'adams'], default='dopri5')
parser.add_argument('--data_size', type=int, default=1000)
parser.add_argument('--batch_time', type=int, default=2)
parser.add_argument('--batch_size', type=int, default=20)
parser.add_argument('--niters', type=int, default=2000)
parser.add_argument('--test_freq', type=int, default=20)
parser.add_argument('--ntest', type=int, default=10)
parser.add_argument('--shrink_std', type=float, default=0.1)
parser.add_argument('--shrink_proportion', type=float, default=0.5)
parser.add_argument('--viz', action='store_true')
parser.add_argument('--gpu', type=int, default=0)
parser.add_argument('--adjoint', action='store_true')
args = parser.parse_args()
if args.adjoint:
from torchdiffeq import odeint_adjoint as odeint
from torchdiffeq import odeint_adjoint_stochastic_end_v2 as odeint_stochastic_end_v2
else:
from torchdiffeq import odeint_stochastic_end_v2
from torchdiffeq import odeint
device = torch.device('cuda:' + str(args.gpu) if torch.cuda.is_available() else 'cpu')
true_y0 = torch.tensor([0.])
t = torch.linspace(0., 25., args.data_size)
true_A = torch.tensor([[-0.1, 2.0], [-2.0, -0.1]])
class Lambda(nn.Module):
def forward(self, t, y):
t = t.unsqueeze(0)
#equation = -1000*y + 3000 - 2000 * torch.exp(-t) + 1000 * torch.sin(t)
equation = -1000*y + 3000 - 2000 * torch.exp(-t)
#equation = -1000*y + 3000 - 2000 * torch.exp(-1000*t)
#equation = 10 * torch.sin(t)
return equation
#return torch.mm(y**3, true_A)
#return torch.mm(y**3, true_A)
with torch.no_grad():
true_y = odeint(Lambda(), true_y0, t, method='dopri5')
#true_y = odeint(Lambda(), true_y0, t, method='adams')
def get_batch():
s = torch.from_numpy(np.random.choice(np.arange(args.data_size - args.batch_time, dtype=np.int64), args.batch_size, replace=False))
batch_y0 = true_y[s] # (M, D)
batch_t = t[:args.batch_time] # (T)
batch_y = torch.stack([true_y[s + i] for i in range(args.batch_time)], dim=0) # (T, M, D)
return batch_y0, batch_t, batch_y
def makedirs(dirname):
if not os.path.exists(dirname):
os.makedirs(dirname)
if args.viz:
makedirs('png_alternate_stochastic_end_v2')
import matplotlib.pyplot as plt
fig = plt.figure(figsize=(12, 4), facecolor='white')
ax_traj = fig.add_subplot(131, frameon=False)
ax_phase = fig.add_subplot(132, frameon=False)
#ax_multiple = fig.add_subplot(133, frameon=False)
plt.show(block=False)
def visualize(true_y, pred_y, odefunc, itr):
if args.viz:
ax_traj.cla()
ax_traj.set_title('True vs Predicted')
ax_traj.set_xlabel('t')
ax_traj.set_ylabel('y')
ax_traj.plot(t.numpy(), true_y.numpy()[:, 0], 'g-')
ax_traj.plot(t.numpy(), pred_y.numpy()[:, 0], '--', 'b--')
ax_traj.set_xlim(t.min(), t.max())
ax_traj.set_ylim(-100, 100)
ax_traj.legend()
ax_phase.cla()
ax_phase.set_title('Predicted')
ax_phase.set_xlabel('t')
ax_phase.set_ylabel('y')
ax_phase.plot(t.numpy(), pred_y.numpy()[:, 0], '--', 'b--')
ax_phase.set_xlim(t.min(), t.max())
ax_phase.set_ylim(-100, 100)
ax_phase.legend()
#ax_multiple.cla()
#ax_multiple.set_title('Variations')
#ax_multiple.set_xlabel('t')
#ax_multiple.set_ylabel('y')
#for component in pred_ys:
# ax_multiple.plot(t.numpy(), component.numpy()[:, 0], '--')
#ax_multiple.set_xlim(t.min(), t.max())
#ax_multiple.set_ylim(-100, 100)
#ax_multiple.legend()
fig.tight_layout()
plt.savefig('png_alternate_stochastic_end_v2/{:04d}'.format(itr))
plt.draw()
plt.pause(0.001)
class ODEFunc(nn.Module):
def __init__(self):
super(ODEFunc, self).__init__()
self.net = nn.Sequential(
nn.Linear(2, 500),
nn.Tanh(),
nn.Linear(500, 500),
nn.Tanh(),
nn.Linear(500, 1),
)
for m in self.net.modules():
if isinstance(m, nn.Linear):
nn.init.normal_(m.weight, mean=0, std=0.1)
nn.init.constant_(m.bias, val=0)
def forward(self, t, y):
t=t.unsqueeze(0)
t = t.view(1,1)
y = y.view(y.size(0),1)
t = t.expand_as(y)
equation = torch.cat([t,y],1)
result = self.net(equation)
if y.size(0)==1:
result = result.squeeze()
return result
class RunningAverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self, momentum=0.99):
self.momentum = momentum
self.reset()
def reset(self):
self.val = None
self.avg = 0
def update(self, val):
if self.val is None:
self.avg = val
else:
self.avg = self.avg * self.momentum + val * (1 - self.momentum)
self.val = val
if __name__ == '__main__':
ii = 0
func = ODEFunc()
optimizer = optim.RMSprop(func.parameters(), lr=1e-3)
end = time.time()
time_meter = RunningAverageMeter(0.97)
loss_meter = RunningAverageMeter(0.97)
for itr in range(1, args.niters + 1):
optimizer.zero_grad()
batch_y0, batch_t, batch_y = get_batch()
pred_y = odeint_stochastic_end_v2(func, batch_y0, batch_t,shrink_proportion=args.shrink_proportion,shrink_std=args.shrink_std,mode='train')
#pred_y = odeint_stochastic_end_v2(func, batch_y0, batch_t)
loss = torch.mean(torch.abs(pred_y - batch_y))
loss.backward()
optimizer.step()
time_meter.update(time.time() - end)
loss_meter.update(loss.item())
if itr % args.test_freq == 0:
with torch.no_grad():
pred_y = odeint(func, true_y0, t)
loss = torch.mean(torch.abs(pred_y - true_y))
print('Iter {:04d} | Total Loss {:.6f}'.format(itr, loss.item()))
#pred_ys = []
#for i in range(args.ntest):
# pred_ys.append( odeint(func, batch_y0, batch_t))
#visualize(true_y, pred_y,pred_ys, func, ii)
visualize(true_y, pred_y, func, ii)
ii += 1
end = time.time()
|
py | 1a50daa0feea476e0526972f7933c14a6bbeabb5 | import os, re, datetime, copy, collections
import json, bson
import requests
from .models import *
from accounts.models import AnnotatorProfile
from django.forms.models import model_to_dict
from django.conf import settings as global_settings
import logging
stdlogger = logging.getLogger('b2note')
def addarobase_totypefieldname(o_in):
o_out=None
if isinstance(o_in, list):
o_out = []
for item in o_in:
o_out.append(addarobase_totypefieldname(item))
elif isinstance(o_in, dict):
o_out={}
for k in o_in.keys():
if k=="type":
o_out["@type"] = addarobase_totypefieldname( o_in[k] )
else:
o_out[k] = addarobase_totypefieldname( o_in[k] )
else:
o_out = o_in
return o_out
def solr_fetchtermonexactlabel(label=None):
out = None
try:
if label:
if isinstance(label, (str, unicode)):
#r = requests.get('https://b2note.bsc.es/solr/b2note_index/select?q=labels:"' + label + '"&wt=json&indent=true&start=0&rows=100')
r = requests.get('https://b2note.bsc.es/solr/cleanup_test/select?q=labels:"' + label + '"&wt=json&indent=true&start=0&rows=100')
out = []
for rr in r.json()["response"]["docs"]:
if rr["labels"].lower() == label.lower():
out.append( rr )
return out
else:
print "solr_fetchtermonexactlabel fuction, parameter neither string nor unicode."
stdlogger.error("solr_fetchtermonexactlabel fuction, parameter neither string nor unicode.")
else:
print "solr_fetchtermonexactlabel function, empty parameter."
stdlogger.error("solr_fetchtermonexactlabel function, empty parameter.")
except:
print "solr_fetchtermonexactlabel function, could not complete."
stdlogger.error("solr_fetchtermonexactlabel function, could not complete.")
return False
return False
def solr_fetchorigintermonid(ids=None):
out = None
try:
if ids:
if isinstance(ids, list):
q_str = ""
for id in ids:
if isinstance(id, (str, unicode)):
q_str += 'OR "' + id + '" '
if q_str:
q_str = q_str.replace("#","%23")
q_str = "(" + q_str[3:] + ")"
r = None
#r = requests.get(
# 'https://b2note.bsc.es/solr/b2note_index/select?q=uris:' + q_str +'&fl=ontology_acronym,ontology_name,description,uris,labels,short_form&wt=json&indent=true&start=0&rows=' + str(10*len(ids)))
r = requests.get(
'https://b2note.bsc.es/solr/cleanup_test/select?q=uris:' + q_str +'&fl=ontology_acronym,ontology_name,description,uris,labels,short_form,synonyms,acrs_of_ontologies_reusing_uri&wt=json&indent=true&start=0&rows=' + str(10*len(ids)))
if r and r.json():
if isinstance(r.json(), dict):
if "response" in r.json().keys():
if isinstance(r.json()["response"], dict):
if "docs" in r.json()["response"].keys():
if isinstance(r.json()["response"]["docs"], list):
out = {}
for rr in r.json()["response"]["docs"]:
if isinstance(rr, dict):
if "ontology_acronym" in rr.keys() and "uris" in rr.keys():
if rr["uris"] not in out.keys():
out[ rr["uris"] ] = rr
elif rr["ontology_acronym"] in rr["uris"]:
out[ rr["uris"] ] = rr
return out
else:
print "solr_fetchorigintermonid fuction, requests object json>response>docs not a list."
stdlogger.error("solr_fetchorigintermonid fuction, requests object json>response>docs not a list.")
else:
print "solr_fetchorigintermonid fuction, 'docs' not a key of requests object json>response dict."
stdlogger.error("solr_fetchorigintermonid fuction, 'docs' not a key of requests object json>response dict.")
else:
print "solr_fetchorigintermonid fuction, requests object json>response not a dict"
stdlogger.error("solr_fetchorigintermonid fuction, requests object json>response not a dict")
else:
print "solr_fetchorigintermonid fuction, 'response' not a key of requests object json dict."
stdlogger.error("solr_fetchorigintermonid fuction, 'response' not a key of requests object json dict.")
else:
print "solr_fetchorigintermonid fuction, requests object json not a dict."
stdlogger.error("solr_fetchorigintermonid fuction, requests object json not a dict.")
else:
print "solr_fetchorigintermonid fuction, sorl fetch with no response or response not json."
stdlogger.error("solr_fetchorigintermonid fuction, sorl fetch with no response or response not json.")
else:
print "solr_fetchorigintermonid fuction, list item neither string nor unicode."
stdlogger.error("solr_fetchorigintermonid fuction, list item neither string nor unicode.")
else:
print "solr_fetchorigintermonid fuction, paramter not a list."
stdlogger.error("solr_fetchorigintermonid fuction, paramter not a list.")
else:
print "solr_fetchorigintermonid function, empty parameter."
stdlogger.error("solr_fetchorigintermonid function, empty parameter.")
except:
print "solr_fetchorigintermonid function, could not complete."
stdlogger.error("solr_fetchorigintermonid function, could not complete.")
return False
return False
def solr_fetchtermonid(id=None):
out = None
try:
if id:
if isinstance(id, (str, unicode)):
#r = requests.get('https://b2note.bsc.es/solr/b2note_index/select?q=uris:"' + id + '"&wt=json&indent=true&start=0&rows=100')
r = requests.get('https://b2note.bsc.es/solr/cleanup_test/select?q=uris:"' + id + '"&wt=json&indent=true&start=0&rows=100')
return r
else:
print "solr_fetchtermonid fuction, parameter neither string nor unicode."
stdlogger.error("solr_fetchtermonid fuction, parameter neither string nor unicode.")
else:
print "solr_fetchtermonid function, empty parameter."
stdlogger.error("solr_fetchtermonid function, empty parameter.")
except:
print "solr_fetchtermonid function, could not complete."
stdlogger.error("solr_fetchtermonid function, could not complete.")
return False
return False
def SearchAnnotation( kw ):
"""
Function: SearchAnnotation
----------------------------
Seaches for an annotation matching with the provided body value.
params:
kw (str): Body value.
returns:
A (object): Object of the matching annotation, False otherwise.
"""
try:
if kw:
if isinstance( kw, (str, unicode)):
A = Annotation.objects.raw_query({'body.value': kw})
print "SearchAnnotation function, returning annotations with body value: ", kw
stdlogger.info("SearchAnnotation function, returning annotations with body value: " + str(kw))
return A
else:
print "SearchAnnotation function, provided keyword argument neither str nor unicode."
stdlogger.error("SearchAnnotation function, provided keyword argument neither str nor unicode.")
return False
else:
print "SearchAnnotation function, missing 'kw' string argument."
stdlogger.error("SearchAnnotation function, missing 'kw' string argument.")
return False
except:
print "SearchAnnotation function did not complete."
stdlogger.error("SearchAnnotation function did not complete.")
return False
print "SearchAnnotation function did not complete successfully."
stdlogger.error("SearchAnnotation function did not complete successfully.")
return False
def RetrieveUserAnnotations( nickname=None ):
"""
Function: RetrieveAnnotations_perUsername
----------------------------
Retrieves all annotations having creator.nickname for a given file.
params:
subject_url (str): ID of the file.
nickname (str): user nickname as from user profile DB record.
returns:
dic: Dictionary with the values of the annotations.
"""
try:
if nickname and isinstance(nickname, (str, unicode)):
annotations = None
annotations = Annotation.objects.raw_query({'creator.nickname': nickname})
#annotations = sorted(annotations, key=lambda Annotation: Annotation.created, reverse=True)
if annotations:
print "RetrieveUserFileAnnotations function, returning annotations."
stdlogger.info("RetrieveUserFileAnnotations function, returning annotations.")
return annotations
else:
print "RetrieveUserFileAnnotations function, no annotations retrieved."
stdlogger.info("RetrieveUserFileAnnotations function, no annotations retrieved.")
return None
else:
print "RetrieveUserFileAnnotations function, provided nickname not valid:", nickname
stdlogger.info("RetrieveUserFileAnnotations function, provided nickname not valid:" + str(nickname))
return False
except Annotation.DoesNotExist:
print "RetrieveUserFileAnnotations function did not complete."
stdlogger.error("RetrieveUserFileAnnotations function did not complete.")
return False
print "RetrieveUserFileAnnotations function did not complete successfully."
stdlogger.error("RetrieveUserFileAnnotations function did not complete successfully.")
return False
def RetrieveFileAnnotations( subject_url ):
"""
Function: RetrieveAnnotations
----------------------------
Retrieves all annotations for a given file.
params:
subject_url (str): ID of the file.
returns:
dic: Dictionary with the values of the annotations.
"""
try:
annotations = Annotation.objects.raw_query({'target.source': subject_url})
except Annotation.DoesNotExist:
annotations = []
#annotations = sorted(annotations, key=lambda Annotation: Annotation.created, reverse=True)
return annotations
def DeleteFromPOSTinfo( db_id ):
"""
Function: DeleteFromPOSTinfo
----------------------------
Removes an annotation from MongoDB.
params:
db_id (str): ID of the document to remove.
returns:
bool: True if successful, False otherwise.
"""
del_flag = False
try:
if db_id and isinstance(db_id, (str, unicode)) and len(db_id)>0:
Annotation.objects.get(id=db_id).delete()
del_flag = True
else:
print "Argument provided is not a valid collection document id"
stdlogger.error("Argument provided is not a valid collection document id")
except ValueError:
pass
if del_flag:
print "Removed an Annotation from DB"
stdlogger.info("Removed an Annotation from DB")
return True
print "Could not remove from DB"
stdlogger.error("Could not remove from DB")
return False
def SetDateTimeModified( db_id=None ):
"""
Function: SetDateTimeModified
----------------------------
Sets date time of modified fields in annotation on change.
params:
db_id (str): database id of the document to modify.
returns:
id (str): database id of the modified document if successful, False otherwise.
"""
try:
if db_id:
if isinstance(db_id, (str, unicode)):
A = None
A = Annotation.objects.get(id=db_id)
if A:
nowdt = datetime.datetime.now()
A.modified = nowdt
#if A.body:
# if isinstance(A.body, list):
# if len(A.body)>0:
# A.body[0].modified = nowdt
A.save()
print 'SetDateTimeModified function, "' + str(nowdt) + '" set as modified date time of annotation: ', str(db_id)
stdlogger.info('SetDateTimeModified function, "' + str(nowdt) + '" set as modified date time of annotation: ' + str(db_id))
return A.id
else:
print "SetDateTimeModified function, no annotation wit id: ", str(db_id)
stdlogger.error("SetDateTimeModified function, no annotation wit id: " + str(db_id))
return False
else:
print "SetDateTimeModified function, 'db_id' parameter neither str nor unicode."
stdlogger.error("SetDateTimeModified function, 'db_id' parameter neither str nor unicode.")
return False
else:
print "SetDateTimeModified function, missing parameter called 'db_id'."
stdlogger.error("SetDateTimeModified function, missing parameter called 'db_id'.")
return False
except ValueError:
print "SetDateTimeModified function did not complete."
stdlogger.error("SetDateTimeModified function did not complete.")
return False
print "SetDateTimeModified function did not complete successfully."
stdlogger.error("SetDateTimeModified function did not complete successfully.")
return False
def SetAnnotationMotivation( db_id=None, motiv=None ):
"""
Function: SetAnnotationMotivation
----------------------------
Sets annotation motivation from existing Web Annotation set.
params:
db_id (str): database id of the document to modify.
motiv (str): motivation to be attributed to annotation document and as body purpose.
returns:
id (str): database id of the modified document if successful, False otherwise.
"""
try:
if db_id:
if isinstance(db_id, (str, unicode)):
A = None
A = Annotation.objects.get(id=db_id)
if A:
if motiv:
if isinstance(motiv, (str, unicode)):
if (motiv, motiv) in Annotation.MOTIVATION_CHOICES:
A.motivation = [ motiv ]
if A.body:
if isinstance(A.body, list):
if len(A.body)>0:
A.body[0].purpose = motiv
A.save()
print 'SetAnnotationMotivation function, "' + motiv + '" set as motivation of annotation: ', str(db_id)
stdlogger.info('SetAnnotationMotivation function, "' + motiv + '" set as motivation of annotation: ' + str(db_id))
return A.id
else:
print "SetAnnotationMotivation function, provided string parameter not part of predefined set of motivations."
stdlogger.error("SetAnnotationMotivation function, provided string parameter not part of predefined set of motivations.")
return False
else:
print "SetAnnotationMotivation function, parameter provided for motivation neither string nor unicode."
stdlogger.error("SetAnnotationMotivation function, parameter provided for motivation neither string nor unicode.")
return False
else:
print "SetAnnotationMotivation function, missing motivation parameter."
stdlogger.error("SetAnnotationMotivation function, missing motivation parameter.")
return False
else:
print "SetAnnotationMotivation function, no annotation wit id: ", str(db_id)
stdlogger.error("SetAnnotationMotivation function, no annotation wit id: " + str(db_id))
return False
else:
print "SetAnnotationMotivation function, 'db_id' parameter neither str nor unicode."
stdlogger.error("SetAnnotationMotivation function, 'db_id' parameter neither str nor unicode.")
return False
else:
print "SetAnnotationMotivation function, missing parameter called 'db_id'."
stdlogger.error("SetAnnotationMotivation function, missing parameter called 'db_id'.")
return False
except ValueError:
print "SetAnnotationMotivation function did not complete."
stdlogger.error("SetAnnotationMotivation function did not complete.")
return False
print "SetAnnotationMotivation function did not complete successfully."
stdlogger.error("SetAnnotationMotivation function did not complete successfully.")
return False
def SetUserAsAnnotationCreator( user_id=None, db_id=None ):
"""
Function: SetUserAsAnnotationCreator
----------------------------
Sets annotator profile corresponding to user_id input parameter
as creator agent of annotation document with id matching db_id
input parameter.
params:
user_id (int): sqlite3 primary key of annotator profile model.
db_id (unicode): mongodb document id.
returns:
id (str): database id of the create document if successful, False otherwise.
"""
try:
if user_id and isinstance(user_id, int) and user_id>=0:
ap = None
ap = AnnotatorProfile.objects.using('users').get(annotator_id=user_id)
if ap and ap.nickname and isinstance(ap.nickname, (str, unicode)):
if db_id and isinstance(db_id, (str, unicode)):
annotation = None
annotation = Annotation.objects.get(id=db_id)
if annotation:
annotation.creator = [Agent(
type = ['Person'],
nickname = str(ap.nickname)
)]
annotation.save()
print "User with nickname ", str(ap.nickname) ,", set as annotation ", annotation.id ," creator"
stdlogger.info("User with nickname " + str(ap.nickname) + ", set as annotation " + annotation.id + " creator")
return annotation.id
else:
print "SetUserAsAnnotationCreator function, no annotation were found matching this id: ", str(db_id)
stdlogger.error("SetUserAsAnnotationCreator function, no annotation were found matching this id: " + str(db_id))
else:
print "SetUserAsAnnotationCreator function, provided parameter for annotation id invalid."
stdlogger.error("SetUserAsAnnotationCreator function, provided parameter for annotation id invalid.")
else:
print "SetUserAsAnnotationCreator function, no registered annotator profile with id: ", user_id
stdlogger.error("SetUserAsAnnotationCreator function, no registered annotator profile with id: " + user_id)
else:
print "SetCurrentUserAsAnnotationCreator function, provided parameter for annotator profile id invalid."
stdlogger.error("SetCurrentUserAsAnnotationCreator function, provided parameter for annotator profile id invalid.")
except Exception:
print "SetUserAsAnnotationCreator function did not complete."
stdlogger.error("SetUserAsAnnotationCreator function did not complete.")
return False
print "SetUserAsAnnotationCreator function did not complete successfully."
stdlogger.error("SetUserAsAnnotationCreator function did not complete successfully.")
return False
# def DuplicateAnnotation( db_id=None ):
# """
# Function: DulicateAnnotation
# ----------------------------
# Duplicates an annotation in MongoDB.
#
# params:
# db_id (str): database id of the annotation document to duplicate.
#
# returns:
# id (str): database id of the created annotation document.
# """
# try:
#
# if db_id:
#
# if isinstance(db_id, (str, unicode)):
#
# A = None
# A = Annotation.objects.get(id=db_id)
#
# if A:
#
# if A.target:
#
# if isinstance(A.target, list):
#
# if len(A.target)>0:
#
# if A.target[0]:
#
# if A.target[0].jsonld_id:
#
# if isinstance(A.target[0].jsonld_id, (str, unicode)):
#
# B = CreateAnnotation( A.target[0].jsonld_id )
# B = Annotation.objects.get( id = B )
#
# ann = copy.deepcopy( A )
#
# B_dict = {k: v for k, v in B.__dict__.iteritems() if v is not None}
# ann.__dict__.update(B_dict)
#
# ann.save()
#
# print "DuplicateAnnotation function, created annotation document with id: " + str(ann.id)
# stdlogger.info("DuplicateAnnotation function, created annotation document with id: " + str(ann.id))
# return ann.id
#
# else:
# print "DuplicateAnnotation function, annotation document target 'jsonld_id' neither str nor unicode."
# stdlogger.error("DuplicateAnnotation function, annotation document target 'jsonld_id' neither str nor unicode.")
# return False
# else:
# print "DuplicateAnnotation function, missing annotation document target 'jsonld_id'."
# stdlogger.error("DuplicateAnnotation function, missing annotation document target 'jsonld_id'.")
# return False
# else:
# print "DuplicateAnnotation function, no element in annotation document target list."
# stdlogger.error("DuplicateAnnotation function, no element in annotation document target list.")
# return False
# else:
# print "DuplicateAnnotation function, annotation doument target list empty."
# stdlogger.error("DuplicateAnnotation function, annotation doument target list empty.")
# return False
# else:
# print "DuplicateAnnotation function, annotation doument target is not of type list."
# stdlogger.error("DuplicateAnnotation function, annotation doument target is not of type list.")
# return False
# else:
# print "DuplicateAnnotation function, annotation document missing target field."
# stdlogger.error("DuplicateAnnotation function, annotation document missing target field.")
# return False
# else:
# print "DuplicateAnnotation function, provided 'db_id' argument neither str nor unicode."
# stdlogger.error("DuplicateAnnotation function, provided 'db_id' argument neither str nor unicode.")
# return False
#
# else:
# print "DuplicateAnnotation function, missing 'db_id' argument."
# stdlogger.error("DuplicateAnnotation function, missing 'db_id' argument.")
# return False
#
# except ValueError:
# print "DuplicateAnnotation function, did not complete."
# stdlogger.error("DuplicateAnnotation function, did not complete.")
# return False
#
# print "DuplicateAnnotation function did not complete successfully."
# stdlogger.error("DuplicateAnnotation function did not complete successfully.")
# return False
def CreateSemanticTag( subject_url=None, subject_pid=None, object_json=None ):
"""
Function: CreateSemanticTag
----------------------------
Creates an annotation in MongoDB.
params:
subject_url (str): URL of the target file of the annotation to create.
subject_pid (str): PID of the target file of the annotation to create. (either URL or PID required, both recommended)
Providing only PID creates an annotation the target of which is an external resource with only identifier
the PID under jsonld_id field.
Provinding only URL creates an annotation the target of which is an external specific resource of type
oa:SpecificResource with only identifier the URL under source field.
Providing both creates an annotation the target of which is an external specific resource of type
oa:SpecificResource with identifiers the URL under source field and the PID under jsonld_id field.
object_json (str): JSON of the annotation provided by SOLR (list of dicts with ontology class information)
returns:
db_id (str): database id of the modified annotation if successful, False otherwise.
"""
try:
if (subject_url and isinstance(subject_url, (str, unicode))) or (subject_pid and isinstance(subject_pid, (str, unicode))):
my_id = None
my_id = CreateAnnotation(subject_url, subject_pid)
if my_id:
if object_json and isinstance(object_json, (str, unicode)):
db_id = MakeAnnotationSemanticTag( my_id, object_json )
db_id = SetAnnotationMotivation( db_id, "tagging" )
print "CreateSemanticTag function, made annotation semantic tag: ", str(db_id)
stdlogger.info("CreateSemanticTag function, made annotation semantic tag: " + str(db_id))
return db_id
else:
print "CreateSemanticTag function, provided json object is neither string nor unicode."
stdlogger.error("CreateSemanticTag function, provided json object is neither string nor unicode.")
return False
else:
print "CreateSemanticTag function, internal call to CreateAnnotation function did not return an exploitable id reference."
stdlogger.error("CreateSemanticTag function, internal call to CreateAnnotation function did not return an exploitable id reference.")
return False
else:
print "CreateSemanticTag function, provided parameter is not a valid string for subject_url."
stdlogger.error("CreateSemanticTag function, provided parameter is not a valid string for subject_url.")
return False
except ValueError:
print "CreateSemanticTag function did not complete."
stdlogger.error("CreateSemanticTag function did not complete.")
return False
print "CreateSemanticTag function did not complete successfully."
stdlogger.error("CreateSemanticTag function did not complete successfully.")
return False
def CreateFreeTextKeyword( subject_url=None, subject_pid=None, text=None ):
"""
Function: CreateFreeTextKeyword
----------------------------
Creates an annotation in MongoDB.
params:
subject_url (str): URL of the target file of the annotation to create.
subject_pid (str): PID of the target file of the annotation to create. (either URL or PID required, both recommended)
Providing only PID creates an annotation the target of which is an external resource with only identifier
the PID under jsonld_id field.
Provinding only URL creates an annotation the target of which is an external specific resource of type
oa:SpecificResource with only identifier the URL under source field.
Providing both creates an annotation the target of which is an external specific resource of type
oa:SpecificResource with identifiers the URL under source field and the PID under jsonld_id field.
text (str): Free text introduced by the user
returns:
db_id (str): database id of the modified annotation if successful, False otherwise.
"""
try:
my_id = CreateAnnotation(subject_url, subject_pid)
if my_id:
if isinstance( my_id, (str, unicode) ):
if isinstance(text, (str, unicode)) and len(text) > 0:
db_id = None
db_id = MakeAnnotationFreeText(my_id, text)
db_id = SetAnnotationMotivation( db_id, "tagging" )
if db_id:
print "CreateFreeTextKeyword function, created free-text keyword annotation:", str(db_id)
stdlogger.info("CreateFreeTextKeyword function, created free-text keyword annotation:" + str(db_id))
return db_id
else:
print "CreateFreeTextKeyword function, free-text keyword annotation make unreturned."
stdlogger.info("CreateFreeTextKeyword function, free-text keyword annotation make unreturned.")
return False
else:
print "CreateFreeTextKeyword function, wrong text codification or empty text."
stdlogger.error("CreateFreeTextKeyword function, wrong text codification or empty text.")
return False
else:
print "CreateFreeTextKeyword function, 'my_id' parameter neither str nor unicode."
stdlogger.error("CreateFreeTextKeyword function, 'my_id' parameter neither str nor unicode.")
return False
else:
print "CreateFreeTextKeyword function, annotation not created or id not returned."
stdlogger.error("CreateFreeText function, annotation not created or id not returned.")
return False
except ValueError:
print "CreateFreeTextKeyword function did not complete."
stdlogger.error("CreateFreeTextKeyword function did not complete.")
return False
print "CreateFreeTextKeyword function did not complete successfully."
stdlogger.error("CreateFreeTextKeyword function did not complete successfully.")
return False
def CreateFreeTextComment(subject_url=None, subject_pid=None, text=None):
"""
Function: CreateFreeTextComment
----------------------------
Creates an annotation in MongoDB.
params:
subject_url (str): URL of the target file of the annotation to create.
subject_pid (str): PID of the target file of the annotation to create. (either URL or PID required, both recommended)
Providing only PID creates an annotation the target of which is an external resource with only identifier
the PID under jsonld_id field.
Provinding only URL creates an annotation the target of which is an external specific resource of type
oa:SpecificResource with only identifier the URL under source field.
Providing both creates an annotation the target of which is an external specific resource of type
oa:SpecificResource with identifiers the URL under source field and the PID under jsonld_id field.
text (str): Free text introduced by the user
returns:
db_id (str): database id of the modified annotation if successful, False otherwise.
"""
try:
my_id = CreateAnnotation(subject_url, subject_pid)
if my_id:
if isinstance(my_id, (str, unicode)):
if isinstance(text, (str, unicode)) and len(text) > 0:
db_id = None
db_id = MakeAnnotationFreeText(my_id, text)
db_id = SetAnnotationMotivation(db_id, "commenting")
if db_id:
print "CreateFreeTextComment function, created free-text comment annotation:", str(db_id)
stdlogger.info("CreateFreeTextComment function, created free-text comment annotation:", str(db_id))
return db_id
else:
print "CreateFreeTextComment function, free-text comment annotation created not returned."
stdlogger.info("CreateFreeTextComment function, free-text comment annotation created not returned.")
return False
else:
print "CreateFreeTextComment function, wrong text codification or empty text."
stdlogger.info("CreateFreeTextComment function, wrong text codification or empty text.")
return False
else:
print "CreateFreeTextComment function, 'my_id' parameter neither str nor unicode."
stdlogger.info("CreateFreeTextComment function, 'my_id' parameter neither str nor unicode.")
return False
else:
print "CreateFreeTextComment function, annotation not created or id not returned."
stdlogger.info("CreateFreeTextComment function, annotation not created or id not returned.")
return False
except ValueError:
print "CreateFreeTextComment function did not complete."
stdlogger.info("CreateFreeTextComment function did not complete.")
return False
print "CreateFreeTextComment function did not complete successfully."
stdlogger.info("CreateFreeTextComment function did not complete successfully.")
return False
def MakeAnnotationSemanticTag( db_id=None, object_json=None ):
"""
Function: MakeAnnotationSemanticTag
-----------------------------------
Adds semantic tag body to an annotation.
params:
db_id (str): database id of the annotation to which the semantic tag body can be attached.
object_json (str): JSON of the annotation provided by SOLR.
returns:
db_id (str): database id of the modified annotation if successful, False otherwise.
"""
try:
if db_id:
if isinstance(db_id, (str, unicode)):
A = None
A = Annotation.objects.get(id=db_id)
if A:
if object_json and isinstance(object_json, (str, unicode)):
ojl = None
ojl = json.loads(object_json)
if ojl and isinstance(ojl, list) and len(ojl)>0:
object_label = ""
object_uri = set()
for o in ojl:
if o and isinstance(o, dict):
if "uris" in o.keys():
if o["uris"] and isinstance(o["uris"], (str, unicode)):
object_uri.add( o["uris"] )
else:
print "MakeAnnotationSemanticTag function, dictionary field at key 'uris' does not resolve in a valid string."
stdlogger.error("MakeAnnotationSemanticTag function, dictionary field at key 'uris' does not resolve in a valid string.")
else:
print "MakeAnnotationSemanticTag function, dictionary does not contain a field with key 'uris'."
stdlogger.error("MakeAnnotationSemanticTag function, dictionary does not contain a field with key 'uris'.")
if not object_label:
if "labels" in o.keys():
if o["labels"] and isinstance(o["labels"], (str, unicode)):
object_label = o["labels"]
else:
print "MakeAnnotationSemanticTag function, provided json list item is not dictionary."
stdlogger.error(
"MakeAnnotationSemanticTag function, provided json list item is not dictionary.")
itemz = []
for o_uri in object_uri:
stsr = SemanticTagSpecificResource(
type = "SpecificResource",
source = o_uri
)
itemz.append(stsr)
sttb = SemanticTagTextualBody(
type = "TextualBody",
value = object_label
)
itemz.append(sttb)
A.body = [
SemanticTagBodySet(
type = "Composite",
items = itemz,
purpose = "tagging"
)
]
A.save()
db_id = SetAnnotationMotivation( A.id, "tagging" )
print "MakeAnnotationSemanticTag function, made annotation semantic tag: ", str(db_id)
stdlogger.info("MakeAnnotationSemanticTag function, made annotation semantic tag: " + str(db_id))
return db_id
else:
print "MakeAnnotationSemanticTag function, provided json does not load as a python list."
stdlogger.error("MakeAnnotationSemanticTag function, provided json does not load as a python list.")
return False
else:
print "MakeAnnotationSemanticTag function, provided json object is neither string nor unicode."
stdlogger.error("MakeAnnotationSemanticTag function, provided json object is neither string nor unicode.")
return False
else:
print "MakeAnnotationSemanticTag function, no annotation wit id: ", str(db_id)
stdlogger.error("MakeAnnotationSemanticTag function, no annotation wit id: " + str(db_id))
return False
else:
print "MakeAnnotationSemanticTag function, 'db_id' parameter neither str nor unicode."
stdlogger.error("MakeAnnotationSemanticTag function, 'db_id' parameter neither str nor unicode.")
return False
else:
print "MakeAnnotationSemanticTag function, missing parameter called 'db_id'."
stdlogger.error("MakeAnnotationSemanticTag function, missing parameter called 'db_id'.")
return False
except ValueError:
print "MakeAnnotationSemanticTag function did not complete."
stdlogger.error("MakeAnnotationSemanticTag function did not complete.")
return False
print "MakeAnnotationSemanticTag function did not complete successfully."
stdlogger.error("MakeAnnotationSemanticTag function did not complete successfully.")
return False
def MakeAnnotationFreeText( db_id=None, text=None ):
"""
Function: MakeAnnotationFreeText
--------------------------------
Makes an existing annotation document free-text comment.
params:
db_id (str): database id of the annotation to create.
text (str): Free text introduced by the user.
returns:
db_id (str): database id of the modified annotation if successful, False otherwise.
"""
try:
if db_id:
if isinstance(db_id, (str, unicode)):
A = None
A = Annotation.objects.get(id=db_id)
if A:
if isinstance(text, (str, unicode)) and len(text) > 0:
A.body = [TextualBody(type=["TextualBody"], value=text)]
A.save()
print "MakeAnnotationFreeText function, made free-text annotation: ", str(db_id)
stdlogger.info("MakeAnnotationFreeText function, made free-text annotation: " + str(db_id))
return db_id
else:
print "MakeAnnotationFreeText function, wrong text codification or empty text"
stdlogger.error("MakeAnnotationFreeText function, wrong text codification or empty text")
return False
else:
print "MakeAnnotationFreeText function, no annotation wit id: ", str(db_id)
stdlogger.error("MakeAnnotationFreeText function, no annotation wit id: " + str(db_id))
return False
else:
print "MakeAnnotationFreeText function, 'db_id' parameter neither str nor unicode."
stdlogger.error("MakeAnnotationFreeText function, 'db_id' parameter neither str nor unicode.")
return False
else:
print "MakeAnnotationFreeText function, missing parameter called 'db_id'."
stdlogger.error("MakeAnnotationFreeText function, missing parameter called 'db_id'.")
return False
except ValueError:
print "MakeAnnotationFreeText function did not complete."
stdlogger.error("MakeAnnotationFreeText function did not complete.")
return False
print "MakeAnnotationFreeText function did not complete successfully."
stdlogger.error("MakeAnnotationFreeText function did not complete successfully.")
return False
def CreateAnnotation(target_url=None, target_pid=None):
"""
Function: CreateAnnotation
----------------------------
Creates an annotation in MongoDB.
params:
subject_url (str): URL of the target file of the annotation to create.
subject_pid (str): PID of the target file of the annotation to create. (either URL or PID required, both recommended)
Providing only PID creates an annotation the target of which is an external resource with only identifier
the PID under jsonld_id field.
Provinding only URL creates an annotation the target of which is an external specific resource of type
oa:SpecificResource with only identifier the URL under source field.
Providing both creates an annotation the target of which is an external specific resource of type
oa:SpecificResource with identifiers the URL under source field and the PID under jsonld_id field.
returns:
id (str): database id of the created annotation document.
"""
try:
if target_url or target_pid:
if (isinstance(target_pid, (str, unicode)) and len(target_pid) > 0) or \
(isinstance(target_url, (str, unicode)) and len(target_url) > 0):
gen_agt = Agent(
type = ["Software"],
name = ["B2Note v1.0"],
#name = ["B2Note semantic annotator"],
#nickname = "B2Note v1.0",
#email = ["[email protected]"],
homepage = ["https://b2note.bsc.es"]
)
ext_res = None
if isinstance(target_pid, (str, unicode)) and len(target_pid) > 0:
if isinstance(target_url, (str, unicode)) and len(target_url) > 0:
ext_res = ExternalSpecificResource(
type = "SpecificResource",
source = target_url,
jsonld_id = target_pid
)
else:
ext_res = ExternalResource(
jsonld_id = target_pid
)
elif isinstance(target_url, (str, unicode)) and len(target_url) > 0:
ext_res = ExternalSpecificResource(
type = "SpecificResource",
source = target_url
)
if ext_res:
ann = Annotation(
jsonld_context = [global_settings.JSONLD_CONTEXT_URL],
type = ["Annotation"],
target = [ ext_res ],
#target = [ExternalResource( jsonld_id = target )],
generator = [ gen_agt ]
)
ann.save()
ann = Annotation.objects.get(id=ann.id)
ann.jsonld_id = global_settings.ROOT_ANNOTATION_ID + ann.id
ann.save()
print "CreateAnnotation function, created annotation document with id: " + str(ann.id)
stdlogger.info("CreateAnnotation function, created annotation document with id: " + str(ann.id))
return ann.id
else:
print "CreateAnnotation function, external resource was not constructed."
stdlogger.error("CreateAnnotation function, external resource was not constructed.")
return False
else:
print "CreateAnnotation function, provided 'target' argument not a valid str or unicode."
stdlogger.error("CreateAnnotation function, provided 'target_url' argument not a valid str or unicode.")
return False
else:
print "CreateAnnotation function, missing file identifier argument."
stdlogger.error("CreateAnnotation function, missing file identifier argument.")
return False
except ValueError:
print "CreateAnnotation function, did not complete."
stdlogger.error("CreateAnnotation function, did not complete.")
return False
print "CreateAnnotation function did not complete successfully."
stdlogger.error("CreateAnnotation function did not complete successfully.")
return False
def orderedJSONLDfields(o_in):
out = None
try:
if o_in:
out = o_in
if isinstance(o_in, list):
out = []
for item in o_in:
out.append(orderedJSONLDfields(item))
if isinstance(o_in, dict):
out = collections.OrderedDict()
for k in ["@context", "id", "type", "target", "body", "value", "motivation", "purpose", "creator", "generator"]:
if k in o_in.keys():
out[k] = orderedJSONLDfields(o_in[k])
for k in o_in.keys():
if k not in out.keys():
out[k] = orderedJSONLDfields(o_in[k])
except:
out = None
print "orderedJSONLDfields function, Exception."
pass
return out
def ridOflistsOfOneItem(o_in):
out = None
try:
if o_in:
out = o_in
if isinstance(o_in, list) or isinstance(o_in, tuple):
if len(o_in) == 1:
out = ridOflistsOfOneItem( o_in[0] )
else:
out = []
for item in o_in:
out.append( ridOflistsOfOneItem( item ) )
if isinstance(o_in, dict):
out = {}
for k in o_in.keys():
out[k] = ridOflistsOfOneItem( o_in[k] )
except:
out = None
print "ridOflistsOfOneItem function, Exception."
pass
return out
def readyQuerySetValuesForDumpAsJSONLD( o_in ):
"""
Function: readyQuerySetValuesForDumpAsJSONLD
--------------------------------------------
Recursively drops embedded custom model class objects and model
class field names beginning with "jsonld_whatever" to "@whatever",
while avoiding returning fields with no content and making
datetimes to xsd:datetime strings.
input:
o_in (object): In nesting order, Django queryset values
list then tuple or list or set or dict or datetime or
out-of-scope object.
output:
o_out: None (execution failed) or list of native python
objects, where each out-of-scope object was replaced
by its "string-ified" avatar, designed for subsequent
JSON-ification.
"""
o_out = None
try:
if type(o_in) is tuple:
o_out = ()
if len(o_in) == 1 and readyQuerySetValuesForDumpAsJSONLD( o_in[0] ):
o_out = readyQuerySetValuesForDumpAsJSONLD( o_in[0] )
else:
for item in o_in:
if item and readyQuerySetValuesForDumpAsJSONLD( item ):
o_out += ( readyQuerySetValuesForDumpAsJSONLD( item ), )
elif type(o_in) is list or type(o_in) is set:
o_out = []
if len(o_in) == 1 and readyQuerySetValuesForDumpAsJSONLD( o_in[0] ):
o_out = readyQuerySetValuesForDumpAsJSONLD( o_in[0] )
else:
for item in o_in:
if item and readyQuerySetValuesForDumpAsJSONLD( item ):
o_out.append( readyQuerySetValuesForDumpAsJSONLD( item ) )
elif type(o_in) is dict:
o_out = {}
for k in o_in.keys():
if o_in[k] and readyQuerySetValuesForDumpAsJSONLD( o_in[k] ) and k != "id":
newkey = k
m = re.match(r'^jsonld_(.*)', k)
if m:
newkey = "@{0}".format(m.group(1))
if newkey=="@id": newkey="id"
#if newkey!="@context":
o_out[newkey] = readyQuerySetValuesForDumpAsJSONLD( o_in[k] )
elif isinstance(o_in, datetime.datetime) or isinstance(o_in, datetime.datetime):
o_out = o_in.isoformat()
elif o_in and o_in != "None" and not re.match(r'^<class (.*)>', o_in):
o_out = str(o_in)
except:
o_out = None
pass
return o_out
def CheckDuplicateAnnotation( target_url=None, target_pid=None, annotation_body=None ):
"""
Function: CheckDuplicateAnnotation
--------------------------------------------
Will be used to send feedback message to user in case they attempt to
create an annotation with a body that is a duplicate of a previously
existing annotation for the same target file.
input:
target (str): URL of the annotation to check.
annotation_body (dict): intended (new) annotation body.
output:
boolean: True/False
"""
try:
if target_url or target_pid:
if isinstance(target_url, (str, unicode)) or isinstance(target_pid, (str, unicode)):
if 'body' in annotation_body:
A = None
if 'jsonld_id' in annotation_body['body'].keys() and \
isinstance(annotation_body['body']['jsonld_id'], list) and \
len(annotation_body['body']['jsonld_id']) > 0:
if target_pid and target_url:
A = Annotation.objects.raw_query(
{'$and':[
{'target.source': target_url},
{'target.jsonld_id': target_pid}
],
'body.items.source':{'$in':annotation_body['body']['jsonld_id']}
}
)
elif target_url:
A = Annotation.objects.raw_query(
{'target.source': target_url,
'body.items.source': {'$in':annotation_body['body']['jsonld_id']}
}
)
elif target_pid:
A = Annotation.objects.raw_query(
{'target.jsonld_id': target_pid,
'body.items.source': {'$in':annotation_body['body']['jsonld_id']}
}
)
else:
if 'value' in annotation_body['body'].keys():
if target_pid and target_url:
A = Annotation.objects.raw_query(
{'$and':[
{'target.source': target_url},
{'target.jsonld_id': target_pid}
],
'$or':[
{'body.value': annotation_body['body']['value']},
{'body.items.value': annotation_body['body']['value']}
]})
elif target_url:
A = Annotation.objects.raw_query(
{'target.source': target_url,
'$or': [
{'body.value': annotation_body['body']['value']},
{'body.items.value': annotation_body['body']['value']}
]})
elif target_pid:
A = Annotation.objects.raw_query(
{'target.jsonld_id': target_pid,
'$or': [
{'body.value': annotation_body['body']['value']},
{'body.items.value': annotation_body['body']['value']}
]})
else:
print "CheckDuplicateAnnotation function, provided 'annotation_body' argument not a valid dictionary."
stdlogger.error("CheckDuplicateAnnotation function, provided 'annotation_body' argument not a valid dictionary.")
return False
if len(A) > 0:
return A
else:
return False
else:
print "CheckDuplicateAnnotation function, provided 'annotation_body' argument not a valid dictionary."
stdlogger.error("CheckDuplicateAnnotation function, provided 'annotation_body' argument not a valid dictionary.")
return False
else:
print "CheckDuplicateAnnotation function, provided 'target_url' argument not a valid str or unicode."
stdlogger.error("CheckDuplicateAnnotation function, provided 'target_url' argument not a valid str or unicode.")
return False
else:
print "CheckDuplicateAnnotation function, missing 'target_url' argument."
stdlogger.error("CheckDuplicateAnnotation function, missing 'target_url' argument.")
return False
except ValueError:
print "CheckDuplicateAnnotation function, did not complete."
stdlogger.error("CheckDuplicateAnnotation function, did not complete.")
return False
def CheckLengthFreeText( body_value=None, length_limit=60 ):
"""
Function: CheckLengthFreeText
--------------------------------------------
Will be used to send feedback message to the user in case they attempt to create
a free-text tag annotation with a long body value, to check whether their intent
is "tagging" or "commenting" (resulting in 2 different types of annotations:
"free-text tag" or "(free-text) comment").
input:
body_value (str): intended (new) annotation body value.
length_limit (int): tag string length check limit
output:
boolean: True/False
"""
try:
if body_value:
if isinstance(body_value, (str, unicode)):
if len(body_value) <= length_limit:
return True
else:
return False
else:
print "CheckLengthFreeText function, provided 'body_value' argument not a valid str or unicode."
stdlogger.error("CheckLengthFreeText function, provided 'body_value' argument not a valid str or unicode.")
return False
else:
print "CheckLengthFreeText function, missing parameter called 'body_value'."
stdlogger.error("CheckLengthFreeText function, missing parameter called 'body_value'.")
return False
except ValueError:
print "CheckLengthFreeText function, did not complete."
stdlogger.error("CheckLengthFreeText function, did not complete.")
return False
|
py | 1a50dae68f8f8e4d4955d13044e6c51378d8479a | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from ..core.enums import HorizontalLocation, MarkerType, VerticalLocation
from ..core.properties import (
Any,
Auto,
Either,
Enum,
Instance,
Int,
List,
Null,
Nullable,
Seq,
String,
TextLike,
Tuple,
)
from ..models import (
ColumnDataSource,
CoordinateMapping,
GraphRenderer,
Plot,
Range,
Scale,
Tool,
)
from ..models.dom import Template
from ..models.tools import (
Drag,
GestureTool,
InspectTool,
Scroll,
Tap,
)
from ..transform import linear_cmap
from ..util.options import Options
from ._graph import get_graph_kwargs
from ._plot import get_range, get_scale, process_axis_and_grid
from ._stack import double_stack, single_stack
from ._tools import process_active_tools, process_tools_arg
from .glyph_api import _MARKER_SHORTCUTS, GlyphAPI
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
#: A default set of tools configured if no configuration is provided
DEFAULT_TOOLS = "pan,wheel_zoom,box_zoom,save,reset,help"
__all__ = (
'figure',
'markers',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class figure(Plot, GlyphAPI):
''' Create a new figure for plotting.
A subclass of |Plot| that simplifies plot creation with default axes, grids,
tools, etc.
Figure objects have many glyph methods that can be used to draw
vectorized graphical glyphs:
.. hlist::
:columns: 3
* :func:`~bokeh.plotting.figure.annular_wedge`
* :func:`~bokeh.plotting.figure.annulus`
* :func:`~bokeh.plotting.figure.arc`
* :func:`~bokeh.plotting.figure.asterisk`
* :func:`~bokeh.plotting.figure.bezier`
* :func:`~bokeh.plotting.figure.circle`
* :func:`~bokeh.plotting.figure.circle_cross`
* :func:`~bokeh.plotting.figure.circle_dot`
* :func:`~bokeh.plotting.figure.circle_x`
* :func:`~bokeh.plotting.figure.circle_y`
* :func:`~bokeh.plotting.figure.cross`
* :func:`~bokeh.plotting.figure.dash`
* :func:`~bokeh.plotting.figure.diamond`
* :func:`~bokeh.plotting.figure.diamond_cross`
* :func:`~bokeh.plotting.figure.diamond_dot`
* :func:`~bokeh.plotting.figure.dot`
* :func:`~bokeh.plotting.figure.ellipse`
* :func:`~bokeh.plotting.figure.harea`
* :func:`~bokeh.plotting.figure.hbar`
* :func:`~bokeh.plotting.figure.hex`
* :func:`~bokeh.plotting.figure.hex_tile`
* :func:`~bokeh.plotting.figure.image`
* :func:`~bokeh.plotting.figure.image_rgba`
* :func:`~bokeh.plotting.figure.image_url`
* :func:`~bokeh.plotting.figure.inverted_triangle`
* :func:`~bokeh.plotting.figure.line`
* :func:`~bokeh.plotting.figure.multi_line`
* :func:`~bokeh.plotting.figure.multi_polygons`
* :func:`~bokeh.plotting.figure.oval`
* :func:`~bokeh.plotting.figure.patch`
* :func:`~bokeh.plotting.figure.patches`
* :func:`~bokeh.plotting.figure.plus`
* :func:`~bokeh.plotting.figure.quad`
* :func:`~bokeh.plotting.figure.quadratic`
* :func:`~bokeh.plotting.figure.ray`
* :func:`~bokeh.plotting.figure.rect`
* :func:`~bokeh.plotting.figure.segment`
* :func:`~bokeh.plotting.figure.square`
* :func:`~bokeh.plotting.figure.square_cross`
* :func:`~bokeh.plotting.figure.square_dot`
* :func:`~bokeh.plotting.figure.square_pin`
* :func:`~bokeh.plotting.figure.square_x`
* :func:`~bokeh.plotting.figure.star`
* :func:`~bokeh.plotting.figure.star_dot`
* :func:`~bokeh.plotting.figure.step`
* :func:`~bokeh.plotting.figure.text`
* :func:`~bokeh.plotting.figure.triangle`
* :func:`~bokeh.plotting.figure.triangle_dot`
* :func:`~bokeh.plotting.figure.triangle_pin`
* :func:`~bokeh.plotting.figure.varea`
* :func:`~bokeh.plotting.figure.vbar`
* :func:`~bokeh.plotting.figure.wedge`
* :func:`~bokeh.plotting.figure.x`
* :func:`~bokeh.plotting.figure.y`
There is a scatter function that can be parameterized by marker type:
* :func:`~bokeh.plotting.figure.scatter`
There are also specialized methods for stacking bars:
* bars: :func:`~bokeh.plotting.figure.hbar_stack`, :func:`~bokeh.plotting.figure.vbar_stack`
* lines: :func:`~bokeh.plotting.figure.hline_stack`, :func:`~bokeh.plotting.figure.vline_stack`
* areas: :func:`~bokeh.plotting.figure.harea_stack`, :func:`~bokeh.plotting.figure.varea_stack`
As well as one specialized method for making simple hexbin plots:
* :func:`~bokeh.plotting.figure.hexbin`
In addition to all the ``figure`` property attributes, the following
options are also accepted:
.. bokeh-options:: FigureOptions
:module: bokeh.plotting._figure
'''
__view_model__ = "Figure"
def __init__(self, *arg, **kw) -> None:
opts = FigureOptions(kw)
super().__init__(*arg, **kw)
self.x_range = get_range(opts.x_range)
self.y_range = get_range(opts.y_range)
self.x_scale = get_scale(self.x_range, opts.x_axis_type)
self.y_scale = get_scale(self.y_range, opts.y_axis_type)
process_axis_and_grid(self, opts.x_axis_type, opts.x_axis_location, opts.x_minor_ticks, opts.x_axis_label, self.x_range, 0)
process_axis_and_grid(self, opts.y_axis_type, opts.y_axis_location, opts.y_minor_ticks, opts.y_axis_label, self.y_range, 1)
tool_objs, tool_map = process_tools_arg(self, opts.tools, opts.tooltips)
self.add_tools(*tool_objs)
process_active_tools(
self.toolbar,
tool_map,
opts.active_drag,
opts.active_inspect,
opts.active_scroll,
opts.active_tap,
opts.active_multi,
)
@property
def plot(self):
return self
@property
def coordinates(self):
return None
def subplot(self,
*,
x_source: Range | None = None, y_source: Range | None = None,
x_scale: Scale | None = None, y_scale: Scale | None = None,
x_target: Range, y_target: Range,
) -> GlyphAPI:
""" Create a new sub-coordinate system and expose a plotting API. """
coordinates = CoordinateMapping(x_source=x_source, y_source=y_source, x_target=x_target, y_target=y_target)
return GlyphAPI(self, coordinates)
def hexbin(self, x, y, size, orientation="pointytop", palette="Viridis256", line_color=None, fill_color=None, aspect_scale=1, **kwargs):
''' Perform a simple equal-weight hexagonal binning.
A :class:`~bokeh.models.glyphs.HexTile` glyph will be added to display
the binning. The :class:`~bokeh.models.sources.ColumnDataSource` for
the glyph will have columns ``q``, ``r``, and ``count``, where ``q``
and ``r`` are `axial coordinates`_ for a tile, and ``count`` is the
associated bin count.
It is often useful to set ``match_aspect=True`` on the associated plot,
so that hexagonal tiles are all regular (i.e. not "stretched") in
screen space.
For more sophisticated use-cases, e.g. weighted binning or individually
scaling hex tiles, use :func:`hex_tile` directly, or consider a higher
level library such as HoloViews.
Args:
x (array[float]) :
A NumPy array of x-coordinates to bin into hexagonal tiles.
y (array[float]) :
A NumPy array of y-coordinates to bin into hexagonal tiles
size (float) :
The size of the hexagonal tiling to use. The size is defined as
distance from the center of a hexagon to a corner.
In case the aspect scaling is not 1-1, then specifically `size`
is the distance from the center to the "top" corner with the
`"pointytop"` orientation, and the distance from the center to
a "side" corner with the "flattop" orientation.
orientation ("pointytop" or "flattop", optional) :
Whether the hexagonal tiles should be oriented with a pointed
corner on top, or a flat side on top. (default: "pointytop")
palette (str or seq[color], optional) :
A palette (or palette name) to use to colormap the bins according
to count. (default: 'Viridis256')
If ``fill_color`` is supplied, it overrides this value.
line_color (color, optional) :
The outline color for hex tiles, or None (default: None)
fill_color (color, optional) :
An optional fill color for hex tiles, or None. If None, then
the ``palette`` will be used to color map the tiles by
count. (default: None)
aspect_scale (float) :
Match a plot's aspect ratio scaling.
When working with a plot with ``aspect_scale != 1``, this
parameter can be set to match the plot, in order to draw
regular hexagons (instead of "stretched" ones).
This is roughly equivalent to binning in "screen space", and
it may be better to use axis-aligned rectangular bins when
plot aspect scales are not one.
Any additional keyword arguments are passed to :func:`hex_tile`.
Returns
(Glyphrender, DataFrame)
A tuple with the ``HexTile`` renderer generated to display the
binning, and a Pandas ``DataFrame`` with columns ``q``, ``r``,
and ``count``, where ``q`` and ``r`` are `axial coordinates`_
for a tile, and ``count`` is the associated bin count.
Example:
.. bokeh-plot::
:source-position: above
import numpy as np
from bokeh.models import HoverTool
from bokeh.plotting import figure, show
x = 2 + 2*np.random.standard_normal(500)
y = 2 + 2*np.random.standard_normal(500)
p = figure(match_aspect=True, tools="wheel_zoom,reset")
p.background_fill_color = '#440154'
p.grid.visible = False
p.hexbin(x, y, size=0.5, hover_color="pink", hover_alpha=0.8)
hover = HoverTool(tooltips=[("count", "@c"), ("(q,r)", "(@q, @r)")])
p.add_tools(hover)
show(p)
.. _axial coordinates: https://www.redblobgames.com/grids/hexagons/#coordinates-axial
'''
from ..util.hex import hexbin
bins = hexbin(x, y, size, orientation, aspect_scale=aspect_scale)
if fill_color is None:
fill_color = linear_cmap('c', palette, 0, max(bins.counts))
source = ColumnDataSource(data=dict(q=bins.q, r=bins.r, c=bins.counts))
r = self.hex_tile(q="q", r="r", size=size, orientation=orientation, aspect_scale=aspect_scale,
source=source, line_color=line_color, fill_color=fill_color, **kwargs)
return (r, bins)
def harea_stack(self, stackers, **kw):
''' Generate multiple ``HArea`` renderers for levels stacked left
to right.
Args:
stackers (seq[str]) : a list of data source field names to stack
successively for ``x1`` and ``x2`` harea coordinates.
Additionally, the ``name`` of the renderer will be set to
the value of each successive stacker (this is useful with the
special hover variable ``$name``)
Any additional keyword arguments are passed to each call to ``harea``.
If a keyword value is a list or tuple, then each call will get one
value from the sequence.
Returns:
list[GlyphRenderer]
Examples:
Assuming a ``ColumnDataSource`` named ``source`` with columns
*2016* and *2017*, then the following call to ``harea_stack`` will
will create two ``HArea`` renderers that stack:
.. code-block:: python
p.harea_stack(['2016', '2017'], y='y', color=['blue', 'red'], source=source)
This is equivalent to the following two separate calls:
.. code-block:: python
p.harea(x1=stack(), x2=stack('2016'), y='y', color='blue', source=source, name='2016')
p.harea(x1=stack('2016'), x2=stack('2016', '2017'), y='y', color='red', source=source, name='2017')
'''
result = []
for kw in double_stack(stackers, "x1", "x2", **kw):
result.append(self.harea(**kw))
return result
def hbar_stack(self, stackers, **kw):
''' Generate multiple ``HBar`` renderers for levels stacked left to right.
Args:
stackers (seq[str]) : a list of data source field names to stack
successively for ``left`` and ``right`` bar coordinates.
Additionally, the ``name`` of the renderer will be set to
the value of each successive stacker (this is useful with the
special hover variable ``$name``)
Any additional keyword arguments are passed to each call to ``hbar``.
If a keyword value is a list or tuple, then each call will get one
value from the sequence.
Returns:
list[GlyphRenderer]
Examples:
Assuming a ``ColumnDataSource`` named ``source`` with columns
*2016* and *2017*, then the following call to ``hbar_stack`` will
will create two ``HBar`` renderers that stack:
.. code-block:: python
p.hbar_stack(['2016', '2017'], y=10, width=0.9, color=['blue', 'red'], source=source)
This is equivalent to the following two separate calls:
.. code-block:: python
p.hbar(bottom=stack(), top=stack('2016'), y=10, width=0.9, color='blue', source=source, name='2016')
p.hbar(bottom=stack('2016'), top=stack('2016', '2017'), y=10, width=0.9, color='red', source=source, name='2017')
'''
result = []
for kw in double_stack(stackers, "left", "right", **kw):
result.append(self.hbar(**kw))
return result
def _line_stack(self, x, y, **kw):
''' Generate multiple ``Line`` renderers for lines stacked vertically
or horizontally.
Args:
x (seq[str]) :
y (seq[str]) :
Additionally, the ``name`` of the renderer will be set to
the value of each successive stacker (this is useful with the
special hover variable ``$name``)
Any additional keyword arguments are passed to each call to ``hbar``.
If a keyword value is a list or tuple, then each call will get one
value from the sequence.
Returns:
list[GlyphRenderer]
Examples:
Assuming a ``ColumnDataSource`` named ``source`` with columns
*2016* and *2017*, then the following call to ``line_stack`` with
stackers for the y-coordinates will will create two ``Line``
renderers that stack:
.. code-block:: python
p.line_stack(['2016', '2017'], x='x', color=['blue', 'red'], source=source)
This is equivalent to the following two separate calls:
.. code-block:: python
p.line(y=stack('2016'), x='x', color='blue', source=source, name='2016')
p.line(y=stack('2016', '2017'), x='x', color='red', source=source, name='2017')
'''
if all(isinstance(val, (list, tuple)) for val in (x,y)):
raise ValueError("Only one of x or y may be a list of stackers")
result = []
if isinstance(y, (list, tuple)):
kw['x'] = x
for kw in single_stack(y, "y", **kw):
result.append(self.line(**kw))
return result
if isinstance(x, (list, tuple)):
kw['y'] = y
for kw in single_stack(x, "x", **kw):
result.append(self.line(**kw))
return result
return [self.line(x, y, **kw)]
def hline_stack(self, stackers, **kw):
''' Generate multiple ``Line`` renderers for lines stacked horizontally.
Args:
stackers (seq[str]) : a list of data source field names to stack
successively for ``x`` line coordinates.
Additionally, the ``name`` of the renderer will be set to
the value of each successive stacker (this is useful with the
special hover variable ``$name``)
Any additional keyword arguments are passed to each call to ``line``.
If a keyword value is a list or tuple, then each call will get one
value from the sequence.
Returns:
list[GlyphRenderer]
Examples:
Assuming a ``ColumnDataSource`` named ``source`` with columns
*2016* and *2017*, then the following call to ``hline_stack`` with
stackers for the x-coordinates will will create two ``Line``
renderers that stack:
.. code-block:: python
p.hline_stack(['2016', '2017'], y='y', color=['blue', 'red'], source=source)
This is equivalent to the following two separate calls:
.. code-block:: python
p.line(x=stack('2016'), y='y', color='blue', source=source, name='2016')
p.line(x=stack('2016', '2017'), y='y', color='red', source=source, name='2017')
'''
return self._line_stack(x=stackers, **kw)
def varea_stack(self, stackers, **kw):
''' Generate multiple ``VArea`` renderers for levels stacked bottom
to top.
Args:
stackers (seq[str]) : a list of data source field names to stack
successively for ``y1`` and ``y1`` varea coordinates.
Additionally, the ``name`` of the renderer will be set to
the value of each successive stacker (this is useful with the
special hover variable ``$name``)
Any additional keyword arguments are passed to each call to ``varea``.
If a keyword value is a list or tuple, then each call will get one
value from the sequence.
Returns:
list[GlyphRenderer]
Examples:
Assuming a ``ColumnDataSource`` named ``source`` with columns
*2016* and *2017*, then the following call to ``varea_stack`` will
will create two ``VArea`` renderers that stack:
.. code-block:: python
p.varea_stack(['2016', '2017'], x='x', color=['blue', 'red'], source=source)
This is equivalent to the following two separate calls:
.. code-block:: python
p.varea(y1=stack(), y2=stack('2016'), x='x', color='blue', source=source, name='2016')
p.varea(y1=stack('2016'), y2=stack('2016', '2017'), x='x', color='red', source=source, name='2017')
'''
result = []
for kw in double_stack(stackers, "y1", "y2", **kw):
result.append(self.varea(**kw))
return result
def vbar_stack(self, stackers, **kw):
''' Generate multiple ``VBar`` renderers for levels stacked bottom
to top.
Args:
stackers (seq[str]) : a list of data source field names to stack
successively for ``left`` and ``right`` bar coordinates.
Additionally, the ``name`` of the renderer will be set to
the value of each successive stacker (this is useful with the
special hover variable ``$name``)
Any additional keyword arguments are passed to each call to ``vbar``.
If a keyword value is a list or tuple, then each call will get one
value from the sequence.
Returns:
list[GlyphRenderer]
Examples:
Assuming a ``ColumnDataSource`` named ``source`` with columns
*2016* and *2017*, then the following call to ``vbar_stack`` will
will create two ``VBar`` renderers that stack:
.. code-block:: python
p.vbar_stack(['2016', '2017'], x=10, width=0.9, color=['blue', 'red'], source=source)
This is equivalent to the following two separate calls:
.. code-block:: python
p.vbar(bottom=stack(), top=stack('2016'), x=10, width=0.9, color='blue', source=source, name='2016')
p.vbar(bottom=stack('2016'), top=stack('2016', '2017'), x=10, width=0.9, color='red', source=source, name='2017')
'''
result = []
for kw in double_stack(stackers, "bottom", "top", **kw):
result.append(self.vbar(**kw))
return result
def vline_stack(self, stackers, **kw):
''' Generate multiple ``Line`` renderers for lines stacked vertically.
Args:
stackers (seq[str]) : a list of data source field names to stack
successively for ``y`` line coordinates.
Additionally, the ``name`` of the renderer will be set to
the value of each successive stacker (this is useful with the
special hover variable ``$name``)
Any additional keyword arguments are passed to each call to ``line``.
If a keyword value is a list or tuple, then each call will get one
value from the sequence.
Returns:
list[GlyphRenderer]
Examples:
Assuming a ``ColumnDataSource`` named ``source`` with columns
*2016* and *2017*, then the following call to ``vline_stack`` with
stackers for the y-coordinates will will create two ``Line``
renderers that stack:
.. code-block:: python
p.vline_stack(['2016', '2017'], x='x', color=['blue', 'red'], source=source)
This is equivalent to the following two separate calls:
.. code-block:: python
p.line(y=stack('2016'), x='x', color='blue', source=source, name='2016')
p.line(y=stack('2016', '2017'), x='x', color='red', source=source, name='2017')
'''
return self._line_stack(y=stackers, **kw)
def graph(self, node_source, edge_source, layout_provider, **kwargs):
''' Creates a network graph using the given node, edge and layout provider.
Args:
node_source (:class:`~bokeh.models.sources.ColumnDataSource`) : a user-supplied data source
for the graph nodes. An attempt will be made to convert the object to
:class:`~bokeh.models.sources.ColumnDataSource` if needed. If none is supplied, one is created
for the user automatically.
edge_source (:class:`~bokeh.models.sources.ColumnDataSource`) : a user-supplied data source
for the graph edges. An attempt will be made to convert the object to
:class:`~bokeh.models.sources.ColumnDataSource` if needed. If none is supplied, one is created
for the user automatically.
layout_provider (:class:`~bokeh.models.graphs.LayoutProvider`) : a ``LayoutProvider`` instance to
provide the graph coordinates in Cartesian space.
**kwargs: |line properties| and |fill properties|
'''
kw = get_graph_kwargs(node_source, edge_source, **kwargs)
graph_renderer = GraphRenderer(layout_provider=layout_provider, **kw)
self.renderers.append(graph_renderer)
return graph_renderer
def markers():
''' Prints a list of valid marker types for scatter()
Returns:
None
'''
print("Available markers: \n\n - " + "\n - ".join(list(MarkerType)))
print()
print("Shortcuts: \n\n" + "\n".join(" %r: %s" % item for item in _MARKER_SHORTCUTS.items()))
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
# This class itself is intentionally undocumented (it is used to generate
# documentation elsewhere)
class BaseFigureOptions(Options):
tools = Either(String, Seq(Either(String, Instance(Tool))), default=DEFAULT_TOOLS, help="""
Tools the plot should start with.
""")
x_minor_ticks = Either(Auto, Int, default="auto", help="""
Number of minor ticks between adjacent x-axis major ticks.
""")
y_minor_ticks = Either(Auto, Int, default="auto", help="""
Number of minor ticks between adjacent y-axis major ticks.
""")
x_axis_location = Nullable(Enum(VerticalLocation), default="below", help="""
Where the x-axis should be located.
""")
y_axis_location = Nullable(Enum(HorizontalLocation), default="left", help="""
Where the y-axis should be located.
""")
x_axis_label = Nullable(TextLike, default="", help="""
A label for the x-axis.
""")
y_axis_label = Nullable(TextLike, default="", help="""
A label for the y-axis.
""")
active_drag = Either(Null, Auto, String, Instance(Drag), default="auto", help="""
Which drag tool should initially be active.
""")
active_inspect = Either(Null, Auto, String, Instance(InspectTool), Seq(Instance(InspectTool)), default="auto", help="""
Which drag tool should initially be active.
""")
active_scroll = Either(Null, Auto, String, Instance(Scroll), default="auto", help="""
Which scroll tool should initially be active.
""")
active_tap = Either(Null, Auto, String, Instance(Tap), default="auto", help="""
Which tap tool should initially be active.
""")
active_multi = Either(Null, Auto, String, Instance(GestureTool), default="auto", help="""
Specify an active multi-gesture tool, for instance an edit tool or a range tool.
""")
tooltips = Either(Null, Instance(Template), String, List(Tuple(String, String)), help="""
An optional argument to configure tooltips for the Figure. This argument
accepts the same values as the ``HoverTool.tooltips`` property. If a hover
tool is specified in the ``tools`` argument, this value will override that
hover tools ``tooltips`` value. If no hover tool is specified in the
``tools`` argument, then passing tooltips here will cause one to be created
and added.
""")
class FigureOptions(BaseFigureOptions):
x_range = Any(help="""
Customize the x-range of the plot.
""")
y_range = Any(help="""
Customize the y-range of the plot.
""")
x_axis_type = Either(Null, Auto, Enum("linear", "log", "datetime", "mercator"), default="auto", help="""
The type of the x-axis.
""")
y_axis_type = Either(Null, Auto, Enum("linear", "log", "datetime", "mercator"), default="auto", help="""
The type of the y-axis.
""")
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
_color_fields = {"color", "fill_color", "line_color"}
_alpha_fields = {"alpha", "fill_alpha", "line_alpha"}
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
py | 1a50db26a6b4dfef1aa3650689429655c84b4b55 | # Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import numpy as np
import pandas as pd
from mars.dataframe import DataFrame, Series, ArrowStringDtype
from mars.tests import setup
from mars.tests.core import require_cudf
setup = setup
def test_sort_values_execution(setup):
distinct_opts = ['0'] if sys.platform.lower().startswith('win') else ['0', '1']
for add_distinct in distinct_opts:
os.environ['PSRS_DISTINCT_COL'] = add_distinct
df = pd.DataFrame(np.random.rand(100, 10), columns=['a' + str(i) for i in range(10)])
# test one chunk
mdf = DataFrame(df)
result = mdf.sort_values('a0').execute().fetch()
expected = df.sort_values('a0')
pd.testing.assert_frame_equal(result, expected)
result = mdf.sort_values(['a6', 'a7'], ascending=False).execute().fetch()
expected = df.sort_values(['a6', 'a7'], ascending=False)
pd.testing.assert_frame_equal(result, expected)
# test psrs
mdf = DataFrame(df, chunk_size=10)
result = mdf.sort_values('a0').execute().fetch()
expected = df.sort_values('a0')
pd.testing.assert_frame_equal(result, expected)
result = mdf.sort_values(['a3', 'a4']).execute().fetch()
expected = df.sort_values(['a3', 'a4'])
pd.testing.assert_frame_equal(result, expected)
# test ascending=False
result = mdf.sort_values(['a0', 'a1'], ascending=False).execute().fetch()
expected = df.sort_values(['a0', 'a1'], ascending=False)
pd.testing.assert_frame_equal(result, expected)
result = mdf.sort_values(['a7'], ascending=False).execute().fetch()
expected = df.sort_values(['a7'], ascending=False)
pd.testing.assert_frame_equal(result, expected)
# test multiindex
df2 = df.copy(deep=True)
df2.columns = pd.MultiIndex.from_product([list('AB'), list('CDEFG')])
mdf = DataFrame(df2, chunk_size=10)
result = mdf.sort_values([('A', 'C')]).execute().fetch()
expected = df2.sort_values([('A', 'C')])
pd.testing.assert_frame_equal(result, expected)
# test rechunk
mdf = DataFrame(df, chunk_size=3)
result = mdf.sort_values('a0').execute().fetch()
expected = df.sort_values('a0')
pd.testing.assert_frame_equal(result, expected)
result = mdf.sort_values(['a3', 'a4']).execute().fetch()
expected = df.sort_values(['a3', 'a4'])
pd.testing.assert_frame_equal(result, expected)
# test other types
raw = pd.DataFrame({'a': np.random.rand(10),
'b': np.random.randint(1000, size=10),
'c': np.random.rand(10),
'd': [np.random.bytes(10) for _ in range(10)],
'e': [pd.Timestamp(f'201{i}') for i in range(10)],
'f': [pd.Timedelta(f'{i} days') for i in range(10)]
},)
mdf = DataFrame(raw, chunk_size=3)
for label in raw.columns:
result = mdf.sort_values(label).execute().fetch()
expected = raw.sort_values(label)
pd.testing.assert_frame_equal(result, expected)
result = mdf.sort_values(['a', 'b', 'e'], ascending=False).execute().fetch()
expected = raw.sort_values(['a', 'b', 'e'], ascending=False)
pd.testing.assert_frame_equal(result, expected)
# test nan
df = pd.DataFrame({
'col1': ['A', 'A', 'B', 'B', 'D', 'C'],
'col2': [2, 1, 9, np.nan, 7, 4],
'col3': [0, 1, 9, 4, 2, 3],
})
mdf = DataFrame(df)
result = mdf.sort_values(['col2']).execute().fetch()
expected = df.sort_values(['col2'])
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(df, chunk_size=3)
result = mdf.sort_values(['col2']).execute().fetch()
expected = df.sort_values(['col2'])
pd.testing.assert_frame_equal(result, expected)
# test None (issue #1885)
df = pd.DataFrame(np.random.rand(1000, 10))
df[0][df[0] < 0.5] = 'A'
df[0][df[0] != 'A'] = None
mdf = DataFrame(df)
result = mdf.sort_values([0, 1]).execute().fetch()
expected = df.sort_values([0, 1])
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(df, chunk_size=100)
result = mdf.sort_values([0, 1]).execute().fetch()
expected = df.sort_values([0, 1])
pd.testing.assert_frame_equal(result, expected)
# test ignore_index
df = pd.DataFrame(np.random.rand(10, 3), columns=['a' + str(i) for i in range(3)])
mdf = DataFrame(df, chunk_size=3)
result = mdf.sort_values(['a0', 'a1'], ignore_index=True).execute().fetch()
try: # for python3.5
expected = df.sort_values(['a0', 'a1'], ignore_index=True)
except TypeError:
expected = df.sort_values(['a0', 'a1'])
expected.index = pd.RangeIndex(len(expected))
pd.testing.assert_frame_equal(result, expected)
# test inplace
mdf = DataFrame(df)
mdf.sort_values('a0', inplace=True)
result = mdf.execute().fetch()
df.sort_values('a0', inplace=True)
pd.testing.assert_frame_equal(result, df)
# test unknown shape
df = pd.DataFrame({'a': list(range(10)),
'b': np.random.random(10)})
mdf = DataFrame(df, chunk_size=4)
filtered = mdf[mdf['a'] > 2]
result = filtered.sort_values(by='b').execute().fetch()
pd.testing.assert_frame_equal(result, df[df['a'] > 2].sort_values(by='b'))
# test empty dataframe
df = pd.DataFrame({'a': list(range(10)),
'b': np.random.random(10)})
mdf = DataFrame(df, chunk_size=4)
filtered = mdf[mdf['b'] > 100]
result = filtered.sort_values(by='b').execute().fetch()
pd.testing.assert_frame_equal(result, df[df['b'] > 100].sort_values(by='b'))
# test chunks with zero length
df = pd.DataFrame({'a': list(range(10)),
'b': np.random.random(10)})
df.iloc[4:8, 1] = 0
mdf = DataFrame(df, chunk_size=4)
filtered = mdf[mdf['b'] != 0]
result = filtered.sort_values(by='b').execute().fetch()
pd.testing.assert_frame_equal(result, df[df['b'] != 0].sort_values(by='b'))
# test Series.sort_values
raw = pd.Series(np.random.rand(10))
series = Series(raw)
result = series.sort_values().execute().fetch()
expected = raw.sort_values()
pd.testing.assert_series_equal(result, expected)
series = Series(raw, chunk_size=3)
result = series.sort_values().execute().fetch()
expected = raw.sort_values()
pd.testing.assert_series_equal(result, expected)
series = Series(raw, chunk_size=2)
result = series.sort_values(ascending=False).execute().fetch()
expected = raw.sort_values(ascending=False)
pd.testing.assert_series_equal(result, expected)
# test empty series
series = pd.Series(list(range(10)), name='a')
mseries = Series(series, chunk_size=4)
filtered = mseries[mseries > 100]
result = filtered.sort_values().execute().fetch()
pd.testing.assert_series_equal(result, series[series > 100].sort_values())
# test series with None
series = pd.Series(np.arange(1000,))
series[series < 500] = 'A'
series[series != 'A'] = None
mseries = Series(series, chunk_size=100)
result = mseries.sort_values().execute().fetch()
expected = series.sort_values()
pd.testing.assert_series_equal(result.reset_index(drop=True), expected.reset_index(drop=True))
def test_sort_index_execution(setup):
raw = pd.DataFrame(np.random.rand(100, 20), index=np.random.rand(100))
mdf = DataFrame(raw)
result = mdf.sort_index().execute().fetch()
expected = raw.sort_index()
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(raw)
mdf.sort_index(inplace=True)
result = mdf.execute().fetch()
expected = raw.sort_index()
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(raw, chunk_size=30)
result = mdf.sort_index().execute().fetch()
expected = raw.sort_index()
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(raw, chunk_size=20)
result = mdf.sort_index(ascending=False).execute().fetch()
expected = raw.sort_index(ascending=False)
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(raw, chunk_size=10)
result = mdf.sort_index(ignore_index=True).execute().fetch()
try: # for python3.5
expected = raw.sort_index(ignore_index=True)
except TypeError:
expected = raw.sort_index()
expected.index = pd.RangeIndex(len(expected))
pd.testing.assert_frame_equal(result, expected)
# test axis=1
raw = pd.DataFrame(np.random.rand(10, 10), columns=np.random.rand(10))
mdf = DataFrame(raw)
result = mdf.sort_index(axis=1).execute().fetch()
expected = raw.sort_index(axis=1)
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(raw, chunk_size=3)
result = mdf.sort_index(axis=1).execute().fetch()
expected = raw.sort_index(axis=1)
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(raw, chunk_size=4)
result = mdf.sort_index(axis=1, ascending=False).execute().fetch()
expected = raw.sort_index(axis=1, ascending=False)
pd.testing.assert_frame_equal(result, expected)
mdf = DataFrame(raw, chunk_size=4)
result = mdf.sort_index(axis=1, ignore_index=True).execute().fetch()
try: # for python3.5
expected = raw.sort_index(axis=1, ignore_index=True)
except TypeError:
expected = raw.sort_index(axis=1)
expected.index = pd.RangeIndex(len(expected))
pd.testing.assert_frame_equal(result, expected)
# test series
raw = pd.Series(np.random.rand(10, ), index=np.random.rand(10))
series = Series(raw)
result = series.sort_index().execute().fetch()
expected = raw.sort_index()
pd.testing.assert_series_equal(result, expected)
series = Series(raw, chunk_size=2)
result = series.sort_index().execute().fetch()
expected = raw.sort_index()
pd.testing.assert_series_equal(result, expected)
series = Series(raw, chunk_size=3)
result = series.sort_index(ascending=False).execute().fetch()
expected = raw.sort_index(ascending=False)
pd.testing.assert_series_equal(result, expected)
def test_arrow_string_sort_values(setup):
rs = np.random.RandomState(0)
raw = pd.DataFrame({'a': rs.rand(10),
'b': [f's{rs.randint(1000)}' for _ in range(10)]
})
raw['b'] = raw['b'].astype(ArrowStringDtype())
mdf = DataFrame(raw, chunk_size=3)
df = mdf.sort_values(by='b')
result = df.execute().fetch()
expected = raw.sort_values(by='b')
pd.testing.assert_frame_equal(result, expected)
@require_cudf
def test_gpu_execution(setup):
# test sort_values
distinct_opts = ['0'] if sys.platform.lower().startswith('win') else ['0', '1']
for add_distinct in distinct_opts:
os.environ['PSRS_DISTINCT_COL'] = add_distinct
# test dataframe
raw = pd.DataFrame(np.random.rand(100, 10), columns=['a' + str(i) for i in range(10)])
mdf = DataFrame(raw, chunk_size=30).to_gpu()
result = mdf.sort_values(by='a0').execute().fetch()
expected = raw.sort_values(by='a0')
pd.testing.assert_frame_equal(result.to_pandas(), expected)
# test series
raw = pd.Series(np.random.rand(10))
series = Series(raw).to_gpu()
result = series.sort_values().execute().fetch()
expected = raw.sort_values()
pd.testing.assert_series_equal(result.to_pandas(), expected)
# test DataFrame.sort_index
raw = pd.DataFrame(np.random.rand(10, 10), columns=np.random.rand(10))
mdf = DataFrame(raw).to_gpu()
result = mdf.sort_index().execute().fetch()
expected = raw.sort_index()
pd.testing.assert_frame_equal(result.to_pandas(), expected)
# test Series.sort_index
raw = pd.Series(np.random.rand(10, ), index=np.random.rand(10))
series = Series(raw).to_gpu()
result = series.sort_index().execute().fetch()
expected = raw.sort_index()
pd.testing.assert_series_equal(result.to_pandas(), expected)
|
py | 1a50db8752190f2be8f9a8e3e1418642615ef1d8 | """Notebook resources unit tests."""
import unittest
import common
from notelist.tools import get_uuid
from notelist.responses import (
METHOD_NOT_ALLOWED, MISSING_TOKEN, INVALID_TOKEN, NOT_FRESH_TOKEN,
USER_UNAUTHORIZED, OK, ERROR_METHOD_NOT_ALLOWED, ERROR_MISSING_TOKEN,
ERROR_INVALID_TOKEN, ERROR_NOT_FRESH_TOKEN, ERROR_UNAUTHORIZED_USER,
ERROR_VALIDATION, ERROR_ITEM_EXISTS)
from notelist.views.notebooks import (
RETRIEVED_1, RETRIEVED, CREATED, UPDATED, DELETED, EXISTS)
class NotebookListTestCase(common.BaseTestCase):
"""Notebook List resource unit tests."""
def test_get(self):
"""Test the Get method of the Notebook List view.
This test logs in as some user, creates some notebooks and then tries
to get the user's notebook list, which should work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Get list
headers = {"Authorization": f"Bearer {access_token}"}
r = self.client.get("/notebooks/notebooks", headers=headers)
res_data = r.json
# Check status code
self.assertEqual(r.status_code, 200)
# Check result
self.assertIn("result", res_data)
notebooks = res_data["result"]
self.assertEqual(type(notebooks), list)
# Check list
self.assertEqual(len(notebooks), 0)
# Create notebook
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
res_data = r.json
notebook_id = res_data["result"]["id"]
# Get list
r = self.client.get("/notebooks/notebooks", headers=headers)
res_data = r.json
# Check status code
self.assertEqual(r.status_code, 200)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], RETRIEVED_1)
self.assertEqual(res_data[keys[1]], OK)
# Check result
self.assertIn("result", res_data)
notebooks = res_data["result"]
self.assertEqual(type(notebooks), list)
# Check list
self.assertEqual(len(notebooks), 1)
notebook = notebooks[0]
for i in ("id", "name", "tag_colors"):
self.assertIn(i, notebook)
self.assertEqual(notebook["id"], notebook_id)
self.assertEqual(notebook["name"], n["name"])
self.assertEqual(notebook["tag_colors"], n["tag_colors"])
def test_get_missing_access_token(self):
"""Test the Get method of the Notebook List view.
This test tries to get the notebook list of the request user without
providing the access token, which shouldn't work.
"""
# Get list without providing the access token
r = self.client.get("/notebooks/notebooks")
res_data = r.json
# Check status code
self.assertEqual(r.status_code, 401)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], MISSING_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_MISSING_TOKEN)
def test_get_invalid_access_token(self):
"""Test the Get method of the Notebook List view.
This test tries to get the user's notebook list providing an invalid
access token, which shouldn't work.
"""
# Get list providing an invalid access token ("1234")
headers = {"Authorization": "Bearer 1234"}
r = self.client.get("/notebooks/notebooks", headers=headers)
res_data = r.json
# Check status code
self.assertEqual(r.status_code, 422)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], INVALID_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_INVALID_TOKEN)
def test_post(self):
"""Test the Post method of the Notebook List view.
This test tries to call the Post method, which shouldn't work.
"""
r = self.client.post("/notebooks/notebooks")
# Check status code
self.assertEqual(r.status_code, 405)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], METHOD_NOT_ALLOWED)
self.assertEqual(res_data[keys[1]], ERROR_METHOD_NOT_ALLOWED)
def test_put(self):
"""Test the Put method of the Notebook List view.
This test tries to call the Put method, which shouldn't work.
"""
r = self.client.put("/notebooks/notebooks")
# Check status code
self.assertEqual(r.status_code, 405)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], METHOD_NOT_ALLOWED)
self.assertEqual(res_data[keys[1]], ERROR_METHOD_NOT_ALLOWED)
def test_delete(self):
"""Test the Delete method of the Notebook List view.
This test tries to call the Delete method, which shouldn't work.
"""
r = self.client.delete("/notebooks/notebooks")
# Check status code
self.assertEqual(r.status_code, 405)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], METHOD_NOT_ALLOWED)
self.assertEqual(res_data[keys[1]], ERROR_METHOD_NOT_ALLOWED)
class NotebookTestCase(common.BaseTestCase):
"""Notebook resource unit tests."""
def test_get(self):
"""Test the Get method of the Notebook view.
This test logs in as some user, creates a notebook and then tries to
get this notebook, which should work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Get the data of the notebook
url = f"/notebooks/notebook/{notebook_id}"
r = self.client.get(url, headers=headers)
# Check status code
self.assertEqual(r.status_code, 200)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], RETRIEVED)
self.assertEqual(res_data[keys[1]], OK)
# Check result
self.assertIn("result", r.json)
notebook = r.json["result"]
self.assertEqual(type(notebook), dict)
# Check notebook
self.assertEqual(len(notebook), 5)
for i in ("id", "name", "tag_colors", "created", "last_modified"):
self.assertIn(i, notebook)
self.assertEqual(notebook["id"], notebook_id)
for i in ("name", "tag_colors"):
self.assertEqual(notebook[i], n[i])
def test_get_missing_access_token(self):
"""Test the Get method of the Notebook view.
This test tries to get a notebook without providing the access token,
which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Get notebook without providing the access token
r = self.client.get(f"/notebooks/notebook/{notebook_id}")
# Check status code
self.assertEqual(r.status_code, 401)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], MISSING_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_MISSING_TOKEN)
def test_get_invalid_access_token(self):
"""Test the Get method of the Notebook view.
This test tries to get a notebook providing an invalid access token,
which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Get notebook providing an invalid access token ("1234")
headers = {"Authorization": "Bearer 1234"}
url = f"/notebooks/notebook/{notebook_id}"
r = self.client.get(url, headers=headers)
# Check status code
self.assertEqual(r.status_code, 422)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], INVALID_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_INVALID_TOKEN)
def test_get_unauthorized_user(self):
"""Test the Get method of the Notebook view.
This test tries to get a notebook of some user as another user, which
shouldn't work.
"""
# Log in
data = {
"username": self.admin["username"],
"password": self.admin["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Log in as another user
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Get notebook
headers = {"Authorization": f"Bearer {access_token}"}
url = f"/notebooks/notebook/{notebook_id}"
r = self.client.get(url, headers=headers)
# Check status code
self.assertEqual(r.status_code, 403)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], USER_UNAUTHORIZED)
self.assertEqual(res_data[keys[1]], ERROR_UNAUTHORIZED_USER)
def test_get_notebook_not_found(self):
"""Test the Get method of the Notebook view.
This test tries to get a notebook that doesn't exist, which shouldn't
work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Get notebook
headers = {"Authorization": f"Bearer {access_token}"}
_id = get_uuid()
r = self.client.get(f"/notebooks/notebook/{_id}", headers=headers)
# Check status code
self.assertEqual(r.status_code, 403)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], USER_UNAUTHORIZED)
self.assertEqual(res_data[keys[1]], ERROR_UNAUTHORIZED_USER)
def test_post(self):
"""Test the Post method of the Notebook view.
This test tries to create a notebook, which should work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 201)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], CREATED)
self.assertEqual(res_data[keys[1]], OK)
# Check result
self.assertIn("result", r.json)
result = res_data["result"]
self.assertIn("id", result)
notebook_id = result["id"]
self.assertEqual(type(notebook_id), str)
def test_post_missing_access_token(self):
"""Test the Post method of the Notebook view.
This test tries to create a notebook without providing the access
token, which shouldn't work.
"""
# Create notebook
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", json=n)
# Check status code
self.assertEqual(r.status_code, 401)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], MISSING_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_MISSING_TOKEN)
def test_post_invalid_access_token(self):
"""Test the Post method of the Notebook view.
This test tries to create a notebook providing an invalid access token,
which shouldn't work.
"""
# Create notebook providing an invalid access token ("1234")
headers = {"Authorization": "Bearer 1234"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 422)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], INVALID_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_INVALID_TOKEN)
def test_post_missing_fields(self):
"""Test the Post method of the Notebook view.
This test tries to create a notebook with some mandatory field missing,
which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook (without data)
headers = {"Authorization": f"Bearer {access_token}"}
r1 = self.client.post("/notebooks/notebook", headers=headers)
# Create notebook (with empty data)
r2 = self.client.post(
"/notebooks/notebook", headers=headers, json=dict())
# Check status codes and messages
keys = ("message", "message_type")
for r in (r1, r2):
# Status code
self.assertEqual(r.status_code, 400)
# Message
res_data = r.json
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[1]], ERROR_VALIDATION)
def test_post_user(self):
"""Test the Post method of the Notebook view.
This test tries to create a new notebook specifying its user, which
shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create a notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"user_id": self.reg1["id"],
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post(url, headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 400)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[1]], ERROR_VALIDATION)
def test_post_invalid_fields(self):
"""Test the Post method of the Notebook view.
This test tries to create a notebook providing some invalid/unexpected
field, which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {"name": "Test Notebook", "invalid_field": "1234"}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 400)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[1]], ERROR_VALIDATION)
def test_post_notebook_exists(self):
"""Test the Post method of the Notebook view.
This test tries to create a notebook with the same name of an existing
notebook of the request user, which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 201)
# Create same notebook again
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 400)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], EXISTS)
self.assertEqual(res_data[keys[1]], ERROR_ITEM_EXISTS)
def test_put_new(self):
"""Test the Put method of the Notebook view.
This test tries to create a notebook, which should work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create a notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put("/notebooks/notebook", headers=headers, json=n)
res_data = r.json
# Check status code
self.assertEqual(r.status_code, 201)
# Check result
self.assertIn("result", res_data)
result = res_data["result"]
self.assertIn("id", result)
notebook_id = result["id"]
self.assertEqual(type(notebook_id), str)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], CREATED)
self.assertEqual(res_data[keys[1]], OK)
def test_put_edit(self):
"""Test the Put method of the Notebook view.
This test tries to edit one of the request user's notebooks, which
should work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put("/notebooks/notebook", headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Edit notebook
new_notebook = {
"name": "Test Notebook 2",
"tag_colors": {"tag3": "#ff0000"}}
url = f"/notebooks/notebook/{notebook_id}"
r = self.client.put(url, headers=headers, json=new_notebook)
# Check status code
self.assertEqual(r.status_code, 200)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], UPDATED)
self.assertEqual(res_data[keys[1]], OK)
# Get notebook
url = f"/notebooks/notebook/{notebook_id}"
r = self.client.get(url, headers=headers)
notebook = r.json["result"]
# Check data
self.assertEqual(len(notebook), 5)
for i in ("id", "name", "tag_colors", "created", "last_modified"):
self.assertIn(i, notebook)
self.assertEqual(notebook["id"], notebook_id)
for i in ("name", "tag_colors"):
self.assertEqual(notebook[i], new_notebook[i])
# Edit notebook without setting the tag colors
new_notebook = {"name": "Test Notebook 3"}
self.client.put(url, headers=headers, json=new_notebook)
# Get notebook
r = self.client.get(url, headers=headers)
notebook = r.json["result"]
# Check data
self.assertEqual(len(notebook), 4)
for i in ("id", "name", "created", "last_modified"):
self.assertIn(i, notebook)
self.assertNotIn("tag_colors", notebook)
self.assertEqual(notebook["name"], new_notebook["name"])
def test_put_new_missing_access_token(self):
"""Test the Put method of the Notebook view.
This test tries to create a new notebook without providing the access
token, which shouldn't work.
"""
# Create notebook without providing the access token
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put("/notebooks/notebook", json=n)
# Check status code
self.assertEqual(r.status_code, 401)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], MISSING_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_MISSING_TOKEN)
def test_edit_new_missing_access_token(self):
"""Test the Put method of the Notebook view.
This test tries to edit a notebook without providing the access token,
which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Edit notebook without providing the access token
n = {"name": "Test Notebook"}
r = self.client.put(f"/notebooks/notebook/{notebook_id}", json=n)
# Check status code
self.assertEqual(r.status_code, 401)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], MISSING_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_MISSING_TOKEN)
def test_put_new_invalid_access_token(self):
"""Test the Put method of the Notebook view.
This test tries to create a notebook providing an invalid access token,
which shouldn't work.
"""
# Create notebook providing an invalid access token ("1234")
headers = {"Authorization": "Bearer 1234"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put("/notebooks/notebook", headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 422)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], INVALID_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_INVALID_TOKEN)
def test_put_edit_invalid_access_token(self):
"""Test the Put method of the Notebook view.
This test tries to edit a notebook providing an invalid access token,
which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Edit notebook providing an invalid access token ("1234")
url = f"/notebooks/notebook/{notebook_id}"
headers = {"Authorization": "Bearer 1234"}
n = {"name": "Test Notebook"}
r = self.client.put(url, headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 422)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], INVALID_TOKEN)
self.assertEqual(res_data[keys[1]], ERROR_INVALID_TOKEN)
def test_put_edit_unauthorized_user(self):
"""Test the Get method of the Notebook view.
This test creates a notebook of some user, and then tries to edit the
notebook as another user, which shouldn't work.
"""
# Log in
data = {
"username": self.admin["username"],
"password": self.admin["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
n = {"name": "Test Notebook"}
r = self.client.post("/notebooks/notebook", headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Log in as another user
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Edit the notebook as the administrator user
url = f"/notebooks/notebook/{notebook_id}"
headers = {"Authorization": f"Bearer {access_token}"}
new_notebook = {"name": "Test Notebook 2"}
r = self.client.put(url, headers=headers, json=new_notebook)
# Check status code
self.assertEqual(r.status_code, 403)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[0]], USER_UNAUTHORIZED)
self.assertEqual(res_data[keys[1]], ERROR_UNAUTHORIZED_USER)
def test_put_missing_fields(self):
"""Test the Put method of the Notebook view.
This test tries to create a notebook with some mandatory field missing,
which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook (without data)
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
r1 = self.client.put(url, headers=headers)
# Create notebook (with empty data)
r2 = self.client.put(url, headers=headers, json=dict())
# Check status codes and messages
keys = ("message", "message_type")
for r in (r1, r2):
# Status code
self.assertEqual(r.status_code, 400)
# Message
res_data = r.json
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[1]], ERROR_VALIDATION)
def test_put_new_user(self):
"""Test the Put method of the Notebook view.
This test tries to create a notebook specifying its user, which
shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"user_id": self.reg1["id"],
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put(url, headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 400)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[1]], ERROR_VALIDATION)
def test_put_new_invalid_fields(self):
"""Test the Put method of the Notebook view.
This test tries to create a notebook providing some invalid/unexpected
field, which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook providing an invalid field
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {"name": "Test Notebook", "invalid_field": "1234"}
r = self.client.put(url, headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 400)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[1]], ERROR_VALIDATION)
def test_put_edit_user(self):
"""Test the Put method of the Notebook view.
This test tries to change the user of some notebook, which shouldn't
work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put(url, headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Change notebook user
url = f"/notebooks/notebook/{notebook_id}"
new_notebook = {"user_id": self.reg2["id"]}
r = self.client.put(url, headers=headers, json=new_notebook)
# Check status code
self.assertEqual(r.status_code, 400)
# Check message
res_data = r.json
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, res_data)
self.assertEqual(res_data[keys[1]], ERROR_VALIDATION)
def test_put_edit_invalid_fields(self):
"""Test the Put method of the Notebook view.
This test tries to edit a notebook providing some invalid/unexpected
field, which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {"name": "Test Notebook"}
r = self.client.put(url, headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Edit the notebook providing an invalid field
url = f"/notebooks/notebook/{notebook_id}"
n = {"name": "Test Notebook", "invalid_field": "1234"}
r = self.client.put(url, headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 400)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[1]], ERROR_VALIDATION)
def test_put_new_notebook_exists(self):
"""Test the Put method of the Notebook view.
This test tries to create a notebook with the same name of an existing
notebook of the request user, which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
headers = {"Authorization": f"Bearer {access_token}"}
url = "/notebooks/notebook"
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put(url, headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 201)
# Create another notebook with the same name
n = {"name": "Test Notebook"}
r = self.client.put(url, headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 400)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[0]], EXISTS)
self.assertEqual(r.json[keys[1]], ERROR_ITEM_EXISTS)
def test_put_edit_notebook_not_found(self):
"""Test the Put method of the Notebook view.
This test tries to edit a notebook that doesn't exist, which shouldn't
work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Edit notebook that doesn't exist
url = f"/notebooks/notebook/{get_uuid()}"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put(url, headers=headers, json=n)
# Check status code
self.assertEqual(r.status_code, 403)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[0]], USER_UNAUTHORIZED)
self.assertEqual(r.json[keys[1]], ERROR_UNAUTHORIZED_USER)
def test_delete(self):
"""Test the Delete method of the Notebook view.
This test creates a notebook and then tries to delete it, which should
work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.put(url, headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Get user notebook list
r = self.client.get("/notebooks/notebooks", headers=headers)
notebooks = r.json["result"]
# Check list
self.assertEqual(len(notebooks), 1)
self.assertEqual(notebooks[0]["name"], n["name"])
# Delete notebook
url = f"/notebooks/notebook/{notebook_id}"
r = self.client.delete(url, headers=headers)
# Check status code
self.assertEqual(r.status_code, 200)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[0]], DELETED)
self.assertEqual(r.json[keys[1]], OK)
# Get user notebook list
r = self.client.get("/notebooks/notebooks", headers=headers)
notebooks = r.json["result"]
# Check list
self.assertEqual(len(notebooks), 0)
def test_delete_missing_access_token(self):
"""Test the Delete method of the Notebook view.
This test tries to delete an existing notebook without providing the
access token, which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post(url, headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Delete notebook without providing the access token
r = self.client.delete(f"/notebooks/notebook/{notebook_id}")
# Check status code
self.assertEqual(r.status_code, 401)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[0]], MISSING_TOKEN)
self.assertEqual(r.json[keys[1]], ERROR_MISSING_TOKEN)
def test_delete_invalid_access_token(self):
"""Test the Delete method of the Notebook view.
This test tries to delete a notebook providing an invalid access token,
which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post(url, headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Delete notebook providing an invalid access token ("1234")
url = f"/notebooks/notebook/{notebook_id}"
headers = {"Authorization": "Bearer 1234"}
r = self.client.delete(url, headers=headers)
# Check status code
self.assertEqual(r.status_code, 422)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[0]], INVALID_TOKEN)
self.assertEqual(r.json[keys[1]], ERROR_INVALID_TOKEN)
def test_delete_access_token_not_fresh(self):
"""Test the Delete method of the Notebook view.
This test tries to delete some notebook providing a not fresh access
token, which shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
result = self.client.post("/auth/login", json=data).json["result"]
access_token = result["access_token"]
refresh_token = result["refresh_token"]
# Create notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post(url, headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Get a new, not fresh, access token
url = "/auth/refresh"
headers = {"Authorization": f"Bearer {refresh_token}"}
r = self.client.get(url, headers=headers)
access_token = r.json["result"]["access_token"]
# Delete notebook
url = f"/notebooks/notebook/{notebook_id}"
headers = {"Authorization": f"Bearer {access_token}"}
r = self.client.delete(url, headers=headers)
# Check status code
self.assertEqual(r.status_code, 401)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[0]], NOT_FRESH_TOKEN)
self.assertEqual(r.json[keys[1]], ERROR_NOT_FRESH_TOKEN)
def test_delete_unauthorized_user(self):
"""Test the Delete method of the Notebook view.
This test tries to delete a notebook of a user different than the
request user, which shouldn't work.
"""
# Log in
data = {
"username": self.admin["username"],
"password": self.admin["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Create notebook
url = "/notebooks/notebook"
headers = {"Authorization": f"Bearer {access_token}"}
n = {
"name": "Test Notebook",
"tag_colors": {"tag1": "#00ff00", "tag2": "#0000ff"}}
r = self.client.post(url, headers=headers, json=n)
notebook_id = r.json["result"]["id"]
# Log in as another user
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Delete notebook
url = f"/notebooks/notebook/{notebook_id}"
headers = {"Authorization": f"Bearer {access_token}"}
r = self.client.delete(url, headers=headers)
# Check status code
self.assertEqual(r.status_code, 403)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[0]], USER_UNAUTHORIZED)
self.assertEqual(r.json[keys[1]], ERROR_UNAUTHORIZED_USER)
def test_delete_notebook_not_found(self):
"""Test the Delete method of the Notebook view.
This test tries to delete a notebook that doesn't exist, which
shouldn't work.
"""
# Log in
data = {
"username": self.reg1["username"],
"password": self.reg1["password"]}
r = self.client.post("/auth/login", json=data)
access_token = r.json["result"]["access_token"]
# Delete notebook that doesn't exist
url = f"/notebooks/notebook/{get_uuid()}"
headers = {"Authorization": f"Bearer {access_token}"}
r = self.client.delete(url, headers=headers)
# Check status code
self.assertEqual(r.status_code, 403)
# Check message
keys = ("message", "message_type")
for i in keys:
self.assertIn(i, r.json)
self.assertEqual(r.json[keys[0]], USER_UNAUTHORIZED)
self.assertEqual(r.json[keys[1]], ERROR_UNAUTHORIZED_USER)
if __name__ == "__main__":
unittest.main()
|
py | 1a50de3a6fe1618d96115ace81ceb8f7bcd2f30e | import unittest
from test import script_helper
from test import support
import subprocess
import sys
import signal
import io
import locale
import os
import errno
import tempfile
import time
import re
import sysconfig
import warnings
import select
import shutil
import gc
try:
import resource
except ImportError:
resource = None
mswindows = (sys.platform == "win32")
#
# Depends on the following external programs: Python
#
if mswindows:
SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), '
'os.O_BINARY);')
else:
SETBINARY = ''
try:
mkstemp = tempfile.mkstemp
except AttributeError:
# tempfile.mkstemp is not available
def mkstemp():
"""Replacement for mkstemp, calling mktemp."""
fname = tempfile.mktemp()
return os.open(fname, os.O_RDWR|os.O_CREAT), fname
class BaseTestCase(unittest.TestCase):
def setUp(self):
# Try to minimize the number of children we have so this test
# doesn't crash on some buildbots (Alphas in particular).
support.reap_children()
def tearDown(self):
for inst in subprocess._active:
inst.wait()
subprocess._cleanup()
self.assertFalse(subprocess._active, "subprocess._active not empty")
def assertStderrEqual(self, stderr, expected, msg=None):
# In a debug build, stuff like "[6580 refs]" is printed to stderr at
# shutdown time. That frustrates tests trying to check stderr produced
# from a spawned Python process.
actual = support.strip_python_stderr(stderr)
self.assertEqual(actual, expected, msg)
class PopenTestException(Exception):
pass
class PopenExecuteChildRaises(subprocess.Popen):
"""Popen subclass for testing cleanup of subprocess.PIPE filehandles when
_execute_child fails.
"""
def _execute_child(self, *args, **kwargs):
raise PopenTestException("Forced Exception for Test")
class ProcessTestCase(BaseTestCase):
def test_io_buffered_by_default(self):
p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
try:
self.assertIsInstance(p.stdin, io.BufferedIOBase)
self.assertIsInstance(p.stdout, io.BufferedIOBase)
self.assertIsInstance(p.stderr, io.BufferedIOBase)
finally:
p.wait()
def test_io_unbuffered_works(self):
p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, bufsize=0)
try:
self.assertIsInstance(p.stdin, io.RawIOBase)
self.assertIsInstance(p.stdout, io.RawIOBase)
self.assertIsInstance(p.stderr, io.RawIOBase)
finally:
p.wait()
def test_call_seq(self):
# call() function with sequence argument
rc = subprocess.call([sys.executable, "-c",
"import sys; sys.exit(47)"])
self.assertEqual(rc, 47)
def test_check_call_zero(self):
# check_call() function with zero return code
rc = subprocess.check_call([sys.executable, "-c",
"import sys; sys.exit(0)"])
self.assertEqual(rc, 0)
def test_check_call_nonzero(self):
# check_call() function with non-zero return code
with self.assertRaises(subprocess.CalledProcessError) as c:
subprocess.check_call([sys.executable, "-c",
"import sys; sys.exit(47)"])
self.assertEqual(c.exception.returncode, 47)
def test_check_output(self):
# check_output() function with zero return code
output = subprocess.check_output(
[sys.executable, "-c", "print('BDFL')"])
self.assertIn(b'BDFL', output)
def test_check_output_nonzero(self):
# check_call() function with non-zero return code
with self.assertRaises(subprocess.CalledProcessError) as c:
subprocess.check_output(
[sys.executable, "-c", "import sys; sys.exit(5)"])
self.assertEqual(c.exception.returncode, 5)
def test_check_output_stderr(self):
# check_output() function stderr redirected to stdout
output = subprocess.check_output(
[sys.executable, "-c", "import sys; sys.stderr.write('BDFL')"],
stderr=subprocess.STDOUT)
self.assertIn(b'BDFL', output)
def test_check_output_stdout_arg(self):
# check_output() function stderr redirected to stdout
with self.assertRaises(ValueError) as c:
output = subprocess.check_output(
[sys.executable, "-c", "print('will not be run')"],
stdout=sys.stdout)
self.fail("Expected ValueError when stdout arg supplied.")
self.assertIn('stdout', c.exception.args[0])
def test_call_kwargs(self):
# call() function with keyword args
newenv = os.environ.copy()
newenv["FRUIT"] = "banana"
rc = subprocess.call([sys.executable, "-c",
'import sys, os;'
'sys.exit(os.getenv("FRUIT")=="banana")'],
env=newenv)
self.assertEqual(rc, 1)
def test_invalid_args(self):
# Popen() called with invalid arguments should raise TypeError
# but Popen.__del__ should not complain (issue #12085)
with support.captured_stderr() as s:
self.assertRaises(TypeError, subprocess.Popen, invalid_arg_name=1)
argcount = subprocess.Popen.__init__.__code__.co_argcount
too_many_args = [0] * (argcount + 1)
self.assertRaises(TypeError, subprocess.Popen, *too_many_args)
self.assertEqual(s.getvalue(), '')
def test_stdin_none(self):
# .stdin is None when not redirected
p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
p.wait()
self.assertEqual(p.stdin, None)
def test_stdout_none(self):
# .stdout is None when not redirected, and the child's stdout will
# be inherited from the parent. In order to test this we run a
# subprocess in a subprocess:
# this_test
# \-- subprocess created by this test (parent)
# \-- subprocess created by the parent subprocess (child)
# The parent doesn't specify stdout, so the child will use the
# parent's stdout. This test checks that the message printed by the
# child goes to the parent stdout. The parent also checks that the
# child's stdout is None. See #11963.
code = ('import sys; from subprocess import Popen, PIPE;'
'p = Popen([sys.executable, "-c", "print(\'test_stdout_none\')"],'
' stdin=PIPE, stderr=PIPE);'
'p.wait(); assert p.stdout is None;')
p = subprocess.Popen([sys.executable, "-c", code],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
out, err = p.communicate()
self.assertEqual(p.returncode, 0, err)
self.assertEqual(out.rstrip(), b'test_stdout_none')
def test_stderr_none(self):
# .stderr is None when not redirected
p = subprocess.Popen([sys.executable, "-c", 'print("banana")'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stdin.close)
p.wait()
self.assertEqual(p.stderr, None)
# For use in the test_cwd* tests below.
def _normalize_cwd(self, cwd):
# Normalize an expected cwd (for Tru64 support).
# We can't use os.path.realpath since it doesn't expand Tru64 {memb}
# strings. See bug #1063571.
original_cwd = os.getcwd()
os.chdir(cwd)
cwd = os.getcwd()
os.chdir(original_cwd)
return cwd
# For use in the test_cwd* tests below.
def _split_python_path(self):
# Return normalized (python_dir, python_base).
python_path = os.path.realpath(sys.executable)
return os.path.split(python_path)
# For use in the test_cwd* tests below.
def _assert_cwd(self, expected_cwd, python_arg, **kwargs):
# Invoke Python via Popen, and assert that (1) the call succeeds,
# and that (2) the current working directory of the child process
# matches *expected_cwd*.
p = subprocess.Popen([python_arg, "-c",
"import os, sys; "
"sys.stdout.write(os.getcwd()); "
"sys.exit(47)"],
stdout=subprocess.PIPE,
**kwargs)
self.addCleanup(p.stdout.close)
p.wait()
self.assertEqual(47, p.returncode)
normcase = os.path.normcase
self.assertEqual(normcase(expected_cwd),
normcase(p.stdout.read().decode("utf-8")))
def test_cwd(self):
# Check that cwd changes the cwd for the child process.
temp_dir = tempfile.gettempdir()
temp_dir = self._normalize_cwd(temp_dir)
self._assert_cwd(temp_dir, sys.executable, cwd=temp_dir)
@unittest.skipIf(mswindows, "pending resolution of issue #15533")
def test_cwd_with_relative_arg(self):
# Check that Popen looks for args[0] relative to cwd if args[0]
# is relative.
python_dir, python_base = self._split_python_path()
rel_python = os.path.join(os.curdir, python_base)
with support.temp_cwd() as wrong_dir:
# Before calling with the correct cwd, confirm that the call fails
# without cwd and with the wrong cwd.
self.assertRaises(OSError, subprocess.Popen,
[rel_python])
self.assertRaises(OSError, subprocess.Popen,
[rel_python], cwd=wrong_dir)
python_dir = self._normalize_cwd(python_dir)
self._assert_cwd(python_dir, rel_python, cwd=python_dir)
@unittest.skipIf(mswindows, "pending resolution of issue #15533")
def test_cwd_with_relative_executable(self):
# Check that Popen looks for executable relative to cwd if executable
# is relative (and that executable takes precedence over args[0]).
python_dir, python_base = self._split_python_path()
rel_python = os.path.join(os.curdir, python_base)
doesntexist = "somethingyoudonthave"
with support.temp_cwd() as wrong_dir:
# Before calling with the correct cwd, confirm that the call fails
# without cwd and with the wrong cwd.
self.assertRaises(OSError, subprocess.Popen,
[doesntexist], executable=rel_python)
self.assertRaises(OSError, subprocess.Popen,
[doesntexist], executable=rel_python,
cwd=wrong_dir)
python_dir = self._normalize_cwd(python_dir)
self._assert_cwd(python_dir, doesntexist, executable=rel_python,
cwd=python_dir)
def test_cwd_with_absolute_arg(self):
# Check that Popen can find the executable when the cwd is wrong
# if args[0] is an absolute path.
python_dir, python_base = self._split_python_path()
abs_python = os.path.join(python_dir, python_base)
rel_python = os.path.join(os.curdir, python_base)
with script_helper.temp_dir() as wrong_dir:
# Before calling with an absolute path, confirm that using a
# relative path fails.
self.assertRaises(OSError, subprocess.Popen,
[rel_python], cwd=wrong_dir)
wrong_dir = self._normalize_cwd(wrong_dir)
self._assert_cwd(wrong_dir, abs_python, cwd=wrong_dir)
def test_executable_with_cwd(self):
python_dir, python_base = self._split_python_path()
python_dir = self._normalize_cwd(python_dir)
self._assert_cwd(python_dir, "somethingyoudonthave",
executable=sys.executable, cwd=python_dir)
@unittest.skipIf(sysconfig.is_python_build(),
"need an installed Python. See #7774")
def test_executable_without_cwd(self):
# For a normal installation, it should work without 'cwd'
# argument. For test runs in the build directory, see #7774.
self._assert_cwd('', "somethingyoudonthave", executable=sys.executable)
def test_stdin_pipe(self):
# stdin redirection
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.exit(sys.stdin.read() == "pear")'],
stdin=subprocess.PIPE)
p.stdin.write(b"pear")
p.stdin.close()
p.wait()
self.assertEqual(p.returncode, 1)
def test_stdin_filedes(self):
# stdin is set to open file descriptor
tf = tempfile.TemporaryFile()
self.addCleanup(tf.close)
d = tf.fileno()
os.write(d, b"pear")
os.lseek(d, 0, 0)
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.exit(sys.stdin.read() == "pear")'],
stdin=d)
p.wait()
self.assertEqual(p.returncode, 1)
def test_stdin_fileobj(self):
# stdin is set to open file object
tf = tempfile.TemporaryFile()
self.addCleanup(tf.close)
tf.write(b"pear")
tf.seek(0)
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.exit(sys.stdin.read() == "pear")'],
stdin=tf)
p.wait()
self.assertEqual(p.returncode, 1)
def test_stdout_pipe(self):
# stdout redirection
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stdout.write("orange")'],
stdout=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.assertEqual(p.stdout.read(), b"orange")
def test_stdout_filedes(self):
# stdout is set to open file descriptor
tf = tempfile.TemporaryFile()
self.addCleanup(tf.close)
d = tf.fileno()
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stdout.write("orange")'],
stdout=d)
p.wait()
os.lseek(d, 0, 0)
self.assertEqual(os.read(d, 1024), b"orange")
def test_stdout_fileobj(self):
# stdout is set to open file object
tf = tempfile.TemporaryFile()
self.addCleanup(tf.close)
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stdout.write("orange")'],
stdout=tf)
p.wait()
tf.seek(0)
self.assertEqual(tf.read(), b"orange")
def test_stderr_pipe(self):
# stderr redirection
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stderr.write("strawberry")'],
stderr=subprocess.PIPE)
self.addCleanup(p.stderr.close)
self.assertStderrEqual(p.stderr.read(), b"strawberry")
def test_stderr_filedes(self):
# stderr is set to open file descriptor
tf = tempfile.TemporaryFile()
self.addCleanup(tf.close)
d = tf.fileno()
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stderr.write("strawberry")'],
stderr=d)
p.wait()
os.lseek(d, 0, 0)
self.assertStderrEqual(os.read(d, 1024), b"strawberry")
def test_stderr_fileobj(self):
# stderr is set to open file object
tf = tempfile.TemporaryFile()
self.addCleanup(tf.close)
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stderr.write("strawberry")'],
stderr=tf)
p.wait()
tf.seek(0)
self.assertStderrEqual(tf.read(), b"strawberry")
def test_stdout_stderr_pipe(self):
# capture stdout and stderr to the same pipe
p = subprocess.Popen([sys.executable, "-c",
'import sys;'
'sys.stdout.write("apple");'
'sys.stdout.flush();'
'sys.stderr.write("orange")'],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.addCleanup(p.stdout.close)
self.assertStderrEqual(p.stdout.read(), b"appleorange")
def test_stdout_stderr_file(self):
# capture stdout and stderr to the same open file
tf = tempfile.TemporaryFile()
self.addCleanup(tf.close)
p = subprocess.Popen([sys.executable, "-c",
'import sys;'
'sys.stdout.write("apple");'
'sys.stdout.flush();'
'sys.stderr.write("orange")'],
stdout=tf,
stderr=tf)
p.wait()
tf.seek(0)
self.assertStderrEqual(tf.read(), b"appleorange")
def test_stdout_filedes_of_stdout(self):
# stdout is set to 1 (#1531862).
# To avoid printing the text on stdout, we do something similar to
# test_stdout_none (see above). The parent subprocess calls the child
# subprocess passing stdout=1, and this test uses stdout=PIPE in
# order to capture and check the output of the parent. See #11963.
code = ('import sys, subprocess; '
'rc = subprocess.call([sys.executable, "-c", '
' "import os, sys; sys.exit(os.write(sys.stdout.fileno(), '
'b\'test with stdout=1\'))"], stdout=1); '
'assert rc == 18')
p = subprocess.Popen([sys.executable, "-c", code],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
out, err = p.communicate()
self.assertEqual(p.returncode, 0, err)
self.assertEqual(out.rstrip(), b'test with stdout=1')
def test_env(self):
newenv = os.environ.copy()
newenv["FRUIT"] = "orange"
with subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stdout.write(os.getenv("FRUIT"))'],
stdout=subprocess.PIPE,
env=newenv) as p:
stdout, stderr = p.communicate()
self.assertEqual(stdout, b"orange")
# Windows requires at least the SYSTEMROOT environment variable to start
# Python
@unittest.skipIf(sys.platform == 'win32',
'cannot test an empty env on Windows')
@unittest.skipIf(sysconfig.get_config_var('Py_ENABLE_SHARED') is not None,
'the python library cannot be loaded '
'with an empty environment')
def test_empty_env(self):
with subprocess.Popen([sys.executable, "-c",
'import os; '
'print(list(os.environ.keys()))'],
stdout=subprocess.PIPE,
env={}) as p:
stdout, stderr = p.communicate()
self.assertIn(stdout.strip(),
(b"[]",
# Mac OS X adds __CF_USER_TEXT_ENCODING variable to an empty
# environment
b"['__CF_USER_TEXT_ENCODING']"))
def test_communicate_stdin(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys;'
'sys.exit(sys.stdin.read() == "pear")'],
stdin=subprocess.PIPE)
p.communicate(b"pear")
self.assertEqual(p.returncode, 1)
def test_communicate_stdout(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stdout.write("pineapple")'],
stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
self.assertEqual(stdout, b"pineapple")
self.assertEqual(stderr, None)
def test_communicate_stderr(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys; sys.stderr.write("pineapple")'],
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
self.assertEqual(stdout, None)
self.assertStderrEqual(stderr, b"pineapple")
def test_communicate(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stderr.write("pineapple");'
'sys.stdout.write(sys.stdin.read())'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
self.addCleanup(p.stdin.close)
(stdout, stderr) = p.communicate(b"banana")
self.assertEqual(stdout, b"banana")
self.assertStderrEqual(stderr, b"pineapple")
# Test for the fd leak reported in http://bugs.python.org/issue2791.
def test_communicate_pipe_fd_leak(self):
for stdin_pipe in (False, True):
for stdout_pipe in (False, True):
for stderr_pipe in (False, True):
options = {}
if stdin_pipe:
options['stdin'] = subprocess.PIPE
if stdout_pipe:
options['stdout'] = subprocess.PIPE
if stderr_pipe:
options['stderr'] = subprocess.PIPE
if not options:
continue
p = subprocess.Popen((sys.executable, "-c", "pass"), **options)
p.communicate()
if p.stdin is not None:
self.assertTrue(p.stdin.closed)
if p.stdout is not None:
self.assertTrue(p.stdout.closed)
if p.stderr is not None:
self.assertTrue(p.stderr.closed)
def test_communicate_returns(self):
# communicate() should return None if no redirection is active
p = subprocess.Popen([sys.executable, "-c",
"import sys; sys.exit(47)"])
(stdout, stderr) = p.communicate()
self.assertEqual(stdout, None)
self.assertEqual(stderr, None)
def test_communicate_pipe_buf(self):
# communicate() with writes larger than pipe_buf
# This test will probably deadlock rather than fail, if
# communicate() does not work properly.
x, y = os.pipe()
if mswindows:
pipe_buf = 512
else:
pipe_buf = os.fpathconf(x, "PC_PIPE_BUF")
os.close(x)
os.close(y)
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stdout.write(sys.stdin.read(47));'
'sys.stderr.write("xyz"*%d);'
'sys.stdout.write(sys.stdin.read())' % pipe_buf],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
self.addCleanup(p.stdin.close)
string_to_write = b"abc"*pipe_buf
(stdout, stderr) = p.communicate(string_to_write)
self.assertEqual(stdout, string_to_write)
def test_writes_before_communicate(self):
# stdin.write before communicate()
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stdout.write(sys.stdin.read())'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
self.addCleanup(p.stdin.close)
p.stdin.write(b"banana")
(stdout, stderr) = p.communicate(b"split")
self.assertEqual(stdout, b"bananasplit")
self.assertStderrEqual(stderr, b"")
def test_universal_newlines(self):
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' + SETBINARY +
'buf = sys.stdout.buffer;'
'buf.write(sys.stdin.readline().encode());'
'buf.flush();'
'buf.write(b"line2\\n");'
'buf.flush();'
'buf.write(sys.stdin.read().encode());'
'buf.flush();'
'buf.write(b"line4\\n");'
'buf.flush();'
'buf.write(b"line5\\r\\n");'
'buf.flush();'
'buf.write(b"line6\\r");'
'buf.flush();'
'buf.write(b"\\nline7");'
'buf.flush();'
'buf.write(b"\\nline8");'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
universal_newlines=1)
p.stdin.write("line1\n")
self.assertEqual(p.stdout.readline(), "line1\n")
p.stdin.write("line3\n")
p.stdin.close()
self.addCleanup(p.stdout.close)
self.assertEqual(p.stdout.readline(),
"line2\n")
self.assertEqual(p.stdout.read(6),
"line3\n")
self.assertEqual(p.stdout.read(),
"line4\nline5\nline6\nline7\nline8")
def test_universal_newlines_communicate(self):
# universal newlines through communicate()
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' + SETBINARY +
'buf = sys.stdout.buffer;'
'buf.write(b"line2\\n");'
'buf.flush();'
'buf.write(b"line4\\n");'
'buf.flush();'
'buf.write(b"line5\\r\\n");'
'buf.flush();'
'buf.write(b"line6\\r");'
'buf.flush();'
'buf.write(b"\\nline7");'
'buf.flush();'
'buf.write(b"\\nline8");'],
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
universal_newlines=1)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
(stdout, stderr) = p.communicate()
self.assertEqual(stdout,
"line2\nline4\nline5\nline6\nline7\nline8")
def test_universal_newlines_communicate_stdin(self):
# universal newlines through communicate(), with only stdin
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' + SETBINARY + '''\nif True:
s = sys.stdin.readline()
assert s == "line1\\n", repr(s)
s = sys.stdin.read()
assert s == "line3\\n", repr(s)
'''],
stdin=subprocess.PIPE,
universal_newlines=1)
(stdout, stderr) = p.communicate("line1\nline3\n")
self.assertEqual(p.returncode, 0)
def test_universal_newlines_communicate_input_none(self):
# Test communicate(input=None) with universal newlines.
#
# We set stdout to PIPE because, as of this writing, a different
# code path is tested when the number of pipes is zero or one.
p = subprocess.Popen([sys.executable, "-c", "pass"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
universal_newlines=True)
p.communicate()
self.assertEqual(p.returncode, 0)
def test_universal_newlines_communicate_stdin_stdout_stderr(self):
# universal newlines through communicate(), with stdin, stdout, stderr
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;' + SETBINARY + '''\nif True:
s = sys.stdin.buffer.readline()
sys.stdout.buffer.write(s)
sys.stdout.buffer.write(b"line2\\r")
sys.stderr.buffer.write(b"eline2\\n")
s = sys.stdin.buffer.read()
sys.stdout.buffer.write(s)
sys.stdout.buffer.write(b"line4\\n")
sys.stdout.buffer.write(b"line5\\r\\n")
sys.stderr.buffer.write(b"eline6\\r")
sys.stderr.buffer.write(b"eline7\\r\\nz")
'''],
stdin=subprocess.PIPE,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
universal_newlines=True)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
(stdout, stderr) = p.communicate("line1\nline3\n")
self.assertEqual(p.returncode, 0)
self.assertEqual("line1\nline2\nline3\nline4\nline5\n", stdout)
# Python debug build push something like "[42442 refs]\n"
# to stderr at exit of subprocess.
# Don't use assertStderrEqual because it strips CR and LF from output.
self.assertTrue(stderr.startswith("eline2\neline6\neline7\n"))
def test_universal_newlines_communicate_encodings(self):
# Check that universal newlines mode works for various encodings,
# in particular for encodings in the UTF-16 and UTF-32 families.
# See issue #15595.
#
# UTF-16 and UTF-32-BE are sufficient to check both with BOM and
# without, and UTF-16 and UTF-32.
for encoding in ['utf-16', 'utf-32-be']:
old_getpreferredencoding = locale.getpreferredencoding
# Indirectly via io.TextIOWrapper, Popen() defaults to
# locale.getpreferredencoding(False) and earlier in Python 3.2 to
# locale.getpreferredencoding().
def getpreferredencoding(do_setlocale=True):
return encoding
code = ("import sys; "
r"sys.stdout.buffer.write('1\r\n2\r3\n4'.encode('%s'))" %
encoding)
args = [sys.executable, '-c', code]
try:
locale.getpreferredencoding = getpreferredencoding
# We set stdin to be non-None because, as of this writing,
# a different code path is used when the number of pipes is
# zero or one.
popen = subprocess.Popen(args, universal_newlines=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
stdout, stderr = popen.communicate(input='')
finally:
locale.getpreferredencoding = old_getpreferredencoding
self.assertEqual(stdout, '1\n2\n3\n4')
def test_no_leaking(self):
# Make sure we leak no resources
if not mswindows:
max_handles = 1026 # too much for most UNIX systems
else:
max_handles = 2050 # too much for (at least some) Windows setups
handles = []
tmpdir = tempfile.mkdtemp()
try:
for i in range(max_handles):
try:
tmpfile = os.path.join(tmpdir, support.TESTFN)
handles.append(os.open(tmpfile, os.O_WRONLY|os.O_CREAT))
except OSError as e:
if e.errno != errno.EMFILE:
raise
break
else:
self.skipTest("failed to reach the file descriptor limit "
"(tried %d)" % max_handles)
# Close a couple of them (should be enough for a subprocess)
for i in range(10):
os.close(handles.pop())
# Loop creating some subprocesses. If one of them leaks some fds,
# the next loop iteration will fail by reaching the max fd limit.
for i in range(15):
p = subprocess.Popen([sys.executable, "-c",
"import sys;"
"sys.stdout.write(sys.stdin.read())"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
data = p.communicate(b"lime")[0]
self.assertEqual(data, b"lime")
finally:
for h in handles:
os.close(h)
shutil.rmtree(tmpdir)
def test_list2cmdline(self):
self.assertEqual(subprocess.list2cmdline(['a b c', 'd', 'e']),
'"a b c" d e')
self.assertEqual(subprocess.list2cmdline(['ab"c', '\\', 'd']),
'ab\\"c \\ d')
self.assertEqual(subprocess.list2cmdline(['ab"c', ' \\', 'd']),
'ab\\"c " \\\\" d')
self.assertEqual(subprocess.list2cmdline(['a\\\\\\b', 'de fg', 'h']),
'a\\\\\\b "de fg" h')
self.assertEqual(subprocess.list2cmdline(['a\\"b', 'c', 'd']),
'a\\\\\\"b c d')
self.assertEqual(subprocess.list2cmdline(['a\\\\b c', 'd', 'e']),
'"a\\\\b c" d e')
self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']),
'"a\\\\b\\ c" d e')
self.assertEqual(subprocess.list2cmdline(['ab', '']),
'ab ""')
def test_poll(self):
p = subprocess.Popen([sys.executable,
"-c", "import time; time.sleep(1)"])
count = 0
while p.poll() is None:
time.sleep(0.1)
count += 1
# We expect that the poll loop probably went around about 10 times,
# but, based on system scheduling we can't control, it's possible
# poll() never returned None. It "should be" very rare that it
# didn't go around at least twice.
self.assertGreaterEqual(count, 2)
# Subsequent invocations should just return the returncode
self.assertEqual(p.poll(), 0)
def test_wait(self):
p = subprocess.Popen([sys.executable,
"-c", "import time; time.sleep(2)"])
self.assertEqual(p.wait(), 0)
# Subsequent invocations should just return the returncode
self.assertEqual(p.wait(), 0)
def test_invalid_bufsize(self):
# an invalid type of the bufsize argument should raise
# TypeError.
with self.assertRaises(TypeError):
subprocess.Popen([sys.executable, "-c", "pass"], "orange")
def test_bufsize_is_none(self):
# bufsize=None should be the same as bufsize=0.
p = subprocess.Popen([sys.executable, "-c", "pass"], None)
self.assertEqual(p.wait(), 0)
# Again with keyword arg
p = subprocess.Popen([sys.executable, "-c", "pass"], bufsize=None)
self.assertEqual(p.wait(), 0)
def test_leaking_fds_on_error(self):
# see bug #5179: Popen leaks file descriptors to PIPEs if
# the child fails to execute; this will eventually exhaust
# the maximum number of open fds. 1024 seems a very common
# value for that limit, but Windows has 2048, so we loop
# 1024 times (each call leaked two fds).
for i in range(1024):
# Windows raises IOError. Others raise OSError.
with self.assertRaises(EnvironmentError) as c:
subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# ignore errors that indicate the command was not found
if c.exception.errno not in (errno.ENOENT, errno.EACCES):
raise c.exception
def test_issue8780(self):
# Ensure that stdout is inherited from the parent
# if stdout=PIPE is not used
code = ';'.join((
'import subprocess, sys',
'retcode = subprocess.call('
"[sys.executable, '-c', 'print(\"Hello World!\")'])",
'assert retcode == 0'))
output = subprocess.check_output([sys.executable, '-c', code])
self.assertTrue(output.startswith(b'Hello World!'), ascii(output))
def test_handles_closed_on_exception(self):
# If CreateProcess exits with an error, ensure the
# duplicate output handles are released
ifhandle, ifname = mkstemp()
ofhandle, ofname = mkstemp()
efhandle, efname = mkstemp()
try:
subprocess.Popen (["*"], stdin=ifhandle, stdout=ofhandle,
stderr=efhandle)
except OSError:
os.close(ifhandle)
os.remove(ifname)
os.close(ofhandle)
os.remove(ofname)
os.close(efhandle)
os.remove(efname)
self.assertFalse(os.path.exists(ifname))
self.assertFalse(os.path.exists(ofname))
self.assertFalse(os.path.exists(efname))
def test_communicate_epipe(self):
# Issue 10963: communicate() should hide EPIPE
p = subprocess.Popen([sys.executable, "-c", 'pass'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
self.addCleanup(p.stdin.close)
p.communicate(b"x" * 2**20)
def test_communicate_epipe_only_stdin(self):
# Issue 10963: communicate() should hide EPIPE
p = subprocess.Popen([sys.executable, "-c", 'pass'],
stdin=subprocess.PIPE)
self.addCleanup(p.stdin.close)
time.sleep(2)
p.communicate(b"x" * 2**20)
@unittest.skipUnless(hasattr(signal, 'SIGALRM'),
"Requires signal.SIGALRM")
def test_communicate_eintr(self):
# Issue #12493: communicate() should handle EINTR
def handler(signum, frame):
pass
old_handler = signal.signal(signal.SIGALRM, handler)
self.addCleanup(signal.signal, signal.SIGALRM, old_handler)
# the process is running for 2 seconds
args = [sys.executable, "-c", 'import time; time.sleep(2)']
for stream in ('stdout', 'stderr'):
kw = {stream: subprocess.PIPE}
with subprocess.Popen(args, **kw) as process:
signal.alarm(1)
# communicate() will be interrupted by SIGALRM
process.communicate()
# This test is Linux-ish specific for simplicity to at least have
# some coverage. It is not a platform specific bug.
@unittest.skipUnless(os.path.isdir('/proc/%d/fd' % os.getpid()),
"Linux specific")
def test_failed_child_execute_fd_leak(self):
"""Test for the fork() failure fd leak reported in issue16327."""
fd_directory = '/proc/%d/fd' % os.getpid()
fds_before_popen = os.listdir(fd_directory)
with self.assertRaises(PopenTestException):
PopenExecuteChildRaises(
[sys.executable, '-c', 'pass'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# NOTE: This test doesn't verify that the real _execute_child
# does not close the file descriptors itself on the way out
# during an exception. Code inspection has confirmed that.
fds_after_exception = os.listdir(fd_directory)
self.assertEqual(fds_before_popen, fds_after_exception)
# context manager
class _SuppressCoreFiles(object):
"""Try to prevent core files from being created."""
old_limit = None
def __enter__(self):
"""Try to save previous ulimit, then set it to (0, 0)."""
if resource is not None:
try:
self.old_limit = resource.getrlimit(resource.RLIMIT_CORE)
resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
except (ValueError, resource.error):
pass
if sys.platform == 'darwin':
# Check if the 'Crash Reporter' on OSX was configured
# in 'Developer' mode and warn that it will get triggered
# when it is.
#
# This assumes that this context manager is used in tests
# that might trigger the next manager.
value = subprocess.Popen(['/usr/bin/defaults', 'read',
'com.apple.CrashReporter', 'DialogType'],
stdout=subprocess.PIPE).communicate()[0]
if value.strip() == b'developer':
print("this tests triggers the Crash Reporter, "
"that is intentional", end='')
sys.stdout.flush()
def __exit__(self, *args):
"""Return core file behavior to default."""
if self.old_limit is None:
return
if resource is not None:
try:
resource.setrlimit(resource.RLIMIT_CORE, self.old_limit)
except (ValueError, resource.error):
pass
@unittest.skipIf(mswindows, "POSIX specific tests")
class POSIXProcessTestCase(BaseTestCase):
def setUp(self):
super().setUp()
self._nonexistent_dir = "/_this/pa.th/does/not/exist"
def _get_chdir_exception(self):
try:
os.chdir(self._nonexistent_dir)
except OSError as e:
# This avoids hard coding the errno value or the OS perror()
# string and instead capture the exception that we want to see
# below for comparison.
desired_exception = e
desired_exception.strerror += ': ' + repr(self._nonexistent_dir)
else:
self.fail("chdir to nonexistant directory %s succeeded." %
self._nonexistent_dir)
return desired_exception
def test_exception_cwd(self):
"""Test error in the child raised in the parent for a bad cwd."""
desired_exception = self._get_chdir_exception()
try:
p = subprocess.Popen([sys.executable, "-c", ""],
cwd=self._nonexistent_dir)
except OSError as e:
# Test that the child process chdir failure actually makes
# it up to the parent process as the correct exception.
self.assertEqual(desired_exception.errno, e.errno)
self.assertEqual(desired_exception.strerror, e.strerror)
else:
self.fail("Expected OSError: %s" % desired_exception)
def test_exception_bad_executable(self):
"""Test error in the child raised in the parent for a bad executable."""
desired_exception = self._get_chdir_exception()
try:
p = subprocess.Popen([sys.executable, "-c", ""],
executable=self._nonexistent_dir)
except OSError as e:
# Test that the child process exec failure actually makes
# it up to the parent process as the correct exception.
self.assertEqual(desired_exception.errno, e.errno)
self.assertEqual(desired_exception.strerror, e.strerror)
else:
self.fail("Expected OSError: %s" % desired_exception)
def test_exception_bad_args_0(self):
"""Test error in the child raised in the parent for a bad args[0]."""
desired_exception = self._get_chdir_exception()
try:
p = subprocess.Popen([self._nonexistent_dir, "-c", ""])
except OSError as e:
# Test that the child process exec failure actually makes
# it up to the parent process as the correct exception.
self.assertEqual(desired_exception.errno, e.errno)
self.assertEqual(desired_exception.strerror, e.strerror)
else:
self.fail("Expected OSError: %s" % desired_exception)
def test_restore_signals(self):
# Code coverage for both values of restore_signals to make sure it
# at least does not blow up.
# A test for behavior would be complex. Contributions welcome.
subprocess.call([sys.executable, "-c", ""], restore_signals=True)
subprocess.call([sys.executable, "-c", ""], restore_signals=False)
def test_start_new_session(self):
# For code coverage of calling setsid(). We don't care if we get an
# EPERM error from it depending on the test execution environment, that
# still indicates that it was called.
try:
output = subprocess.check_output(
[sys.executable, "-c",
"import os; print(os.getpgid(os.getpid()))"],
start_new_session=True)
except OSError as e:
if e.errno != errno.EPERM:
raise
else:
parent_pgid = os.getpgid(os.getpid())
child_pgid = int(output)
self.assertNotEqual(parent_pgid, child_pgid)
def test_run_abort(self):
# returncode handles signal termination
with _SuppressCoreFiles():
p = subprocess.Popen([sys.executable, "-c",
'import os; os.abort()'])
p.wait()
self.assertEqual(-p.returncode, signal.SIGABRT)
def test_preexec(self):
# DISCLAIMER: Setting environment variables is *not* a good use
# of a preexec_fn. This is merely a test.
p = subprocess.Popen([sys.executable, "-c",
'import sys,os;'
'sys.stdout.write(os.getenv("FRUIT"))'],
stdout=subprocess.PIPE,
preexec_fn=lambda: os.putenv("FRUIT", "apple"))
self.addCleanup(p.stdout.close)
self.assertEqual(p.stdout.read(), b"apple")
def test_preexec_exception(self):
def raise_it():
raise ValueError("What if two swallows carried a coconut?")
try:
p = subprocess.Popen([sys.executable, "-c", ""],
preexec_fn=raise_it)
except RuntimeError as e:
self.assertTrue(
subprocess._posixsubprocess,
"Expected a ValueError from the preexec_fn")
except ValueError as e:
self.assertIn("coconut", e.args[0])
else:
self.fail("Exception raised by preexec_fn did not make it "
"to the parent process.")
class _TestExecuteChildPopen(subprocess.Popen):
"""Used to test behavior at the end of _execute_child."""
def __init__(self, testcase, *args, **kwargs):
self._testcase = testcase
subprocess.Popen.__init__(self, *args, **kwargs)
def _execute_child(self, *args, **kwargs):
try:
subprocess.Popen._execute_child(self, *args, **kwargs)
finally:
# Open a bunch of file descriptors and verify that
# none of them are the same as the ones the Popen
# instance is using for stdin/stdout/stderr.
devzero_fds = [os.open("/dev/zero", os.O_RDONLY)
for _ in range(8)]
try:
for fd in devzero_fds:
self._testcase.assertNotIn(
fd, (self.stdin.fileno(), self.stdout.fileno(),
self.stderr.fileno()),
msg="At least one fd was closed early.")
finally:
map(os.close, devzero_fds)
@unittest.skipIf(not os.path.exists("/dev/zero"), "/dev/zero required.")
def test_preexec_errpipe_does_not_double_close_pipes(self):
"""Issue16140: Don't double close pipes on preexec error."""
def raise_it():
raise RuntimeError("force the _execute_child() errpipe_data path.")
with self.assertRaises(RuntimeError):
self._TestExecuteChildPopen(
self, [sys.executable, "-c", "pass"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, preexec_fn=raise_it)
def test_preexec_gc_module_failure(self):
# This tests the code that disables garbage collection if the child
# process will execute any Python.
def raise_runtime_error():
raise RuntimeError("this shouldn't escape")
enabled = gc.isenabled()
orig_gc_disable = gc.disable
orig_gc_isenabled = gc.isenabled
try:
gc.disable()
self.assertFalse(gc.isenabled())
subprocess.call([sys.executable, '-c', ''],
preexec_fn=lambda: None)
self.assertFalse(gc.isenabled(),
"Popen enabled gc when it shouldn't.")
gc.enable()
self.assertTrue(gc.isenabled())
subprocess.call([sys.executable, '-c', ''],
preexec_fn=lambda: None)
self.assertTrue(gc.isenabled(), "Popen left gc disabled.")
gc.disable = raise_runtime_error
self.assertRaises(RuntimeError, subprocess.Popen,
[sys.executable, '-c', ''],
preexec_fn=lambda: None)
del gc.isenabled # force an AttributeError
self.assertRaises(AttributeError, subprocess.Popen,
[sys.executable, '-c', ''],
preexec_fn=lambda: None)
finally:
gc.disable = orig_gc_disable
gc.isenabled = orig_gc_isenabled
if not enabled:
gc.disable()
def test_args_string(self):
# args is a string
fd, fname = mkstemp()
# reopen in text mode
with open(fd, "w", errors="surrogateescape") as fobj:
fobj.write("#!/bin/sh\n")
fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" %
sys.executable)
os.chmod(fname, 0o700)
p = subprocess.Popen(fname)
p.wait()
os.remove(fname)
self.assertEqual(p.returncode, 47)
def test_invalid_args(self):
# invalid arguments should raise ValueError
self.assertRaises(ValueError, subprocess.call,
[sys.executable, "-c",
"import sys; sys.exit(47)"],
startupinfo=47)
self.assertRaises(ValueError, subprocess.call,
[sys.executable, "-c",
"import sys; sys.exit(47)"],
creationflags=47)
def test_shell_sequence(self):
# Run command through the shell (sequence)
newenv = os.environ.copy()
newenv["FRUIT"] = "apple"
p = subprocess.Popen(["echo $FRUIT"], shell=1,
stdout=subprocess.PIPE,
env=newenv)
self.addCleanup(p.stdout.close)
self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple")
def test_shell_string(self):
# Run command through the shell (string)
newenv = os.environ.copy()
newenv["FRUIT"] = "apple"
p = subprocess.Popen("echo $FRUIT", shell=1,
stdout=subprocess.PIPE,
env=newenv)
self.addCleanup(p.stdout.close)
self.assertEqual(p.stdout.read().strip(b" \t\r\n\f"), b"apple")
def test_call_string(self):
# call() function with string argument on UNIX
fd, fname = mkstemp()
# reopen in text mode
with open(fd, "w", errors="surrogateescape") as fobj:
fobj.write("#!/bin/sh\n")
fobj.write("exec '%s' -c 'import sys; sys.exit(47)'\n" %
sys.executable)
os.chmod(fname, 0o700)
rc = subprocess.call(fname)
os.remove(fname)
self.assertEqual(rc, 47)
def test_specific_shell(self):
# Issue #9265: Incorrect name passed as arg[0].
shells = []
for prefix in ['/bin', '/usr/bin/', '/usr/local/bin']:
for name in ['bash', 'ksh']:
sh = os.path.join(prefix, name)
if os.path.isfile(sh):
shells.append(sh)
if not shells: # Will probably work for any shell but csh.
self.skipTest("bash or ksh required for this test")
sh = '/bin/sh'
if os.path.isfile(sh) and not os.path.islink(sh):
# Test will fail if /bin/sh is a symlink to csh.
shells.append(sh)
for sh in shells:
p = subprocess.Popen("echo $0", executable=sh, shell=True,
stdout=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.assertEqual(p.stdout.read().strip(), bytes(sh, 'ascii'))
def _kill_process(self, method, *args):
# Do not inherit file handles from the parent.
# It should fix failures on some platforms.
p = subprocess.Popen([sys.executable, "-c", """if 1:
import sys, time
sys.stdout.write('x\\n')
sys.stdout.flush()
time.sleep(30)
"""],
close_fds=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Wait for the interpreter to be completely initialized before
# sending any signal.
p.stdout.read(1)
getattr(p, method)(*args)
return p
@unittest.skipIf(sys.platform.startswith(('netbsd', 'openbsd')),
"Due to known OS bug (issue #16762)")
def _kill_dead_process(self, method, *args):
# Do not inherit file handles from the parent.
# It should fix failures on some platforms.
p = subprocess.Popen([sys.executable, "-c", """if 1:
import sys, time
sys.stdout.write('x\\n')
sys.stdout.flush()
"""],
close_fds=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
# Wait for the interpreter to be completely initialized before
# sending any signal.
p.stdout.read(1)
# The process should end after this
time.sleep(1)
# This shouldn't raise even though the child is now dead
getattr(p, method)(*args)
p.communicate()
def test_send_signal(self):
p = self._kill_process('send_signal', signal.SIGINT)
_, stderr = p.communicate()
self.assertIn(b'KeyboardInterrupt', stderr)
self.assertNotEqual(p.wait(), 0)
def test_kill(self):
p = self._kill_process('kill')
_, stderr = p.communicate()
self.assertStderrEqual(stderr, b'')
self.assertEqual(p.wait(), -signal.SIGKILL)
def test_terminate(self):
p = self._kill_process('terminate')
_, stderr = p.communicate()
self.assertStderrEqual(stderr, b'')
self.assertEqual(p.wait(), -signal.SIGTERM)
def test_send_signal_dead(self):
# Sending a signal to a dead process
self._kill_dead_process('send_signal', signal.SIGINT)
def test_kill_dead(self):
# Killing a dead process
self._kill_dead_process('kill')
def test_terminate_dead(self):
# Terminating a dead process
self._kill_dead_process('terminate')
def check_close_std_fds(self, fds):
# Issue #9905: test that subprocess pipes still work properly with
# some standard fds closed
stdin = 0
newfds = []
for a in fds:
b = os.dup(a)
newfds.append(b)
if a == 0:
stdin = b
try:
for fd in fds:
os.close(fd)
out, err = subprocess.Popen([sys.executable, "-c",
'import sys;'
'sys.stdout.write("apple");'
'sys.stdout.flush();'
'sys.stderr.write("orange")'],
stdin=stdin,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()
err = support.strip_python_stderr(err)
self.assertEqual((out, err), (b'apple', b'orange'))
finally:
for b, a in zip(newfds, fds):
os.dup2(b, a)
for b in newfds:
os.close(b)
def test_close_fd_0(self):
self.check_close_std_fds([0])
def test_close_fd_1(self):
self.check_close_std_fds([1])
def test_close_fd_2(self):
self.check_close_std_fds([2])
def test_close_fds_0_1(self):
self.check_close_std_fds([0, 1])
def test_close_fds_0_2(self):
self.check_close_std_fds([0, 2])
def test_close_fds_1_2(self):
self.check_close_std_fds([1, 2])
def test_close_fds_0_1_2(self):
# Issue #10806: test that subprocess pipes still work properly with
# all standard fds closed.
self.check_close_std_fds([0, 1, 2])
def test_remapping_std_fds(self):
# open up some temporary files
temps = [mkstemp() for i in range(3)]
try:
temp_fds = [fd for fd, fname in temps]
# unlink the files -- we won't need to reopen them
for fd, fname in temps:
os.unlink(fname)
# write some data to what will become stdin, and rewind
os.write(temp_fds[1], b"STDIN")
os.lseek(temp_fds[1], 0, 0)
# move the standard file descriptors out of the way
saved_fds = [os.dup(fd) for fd in range(3)]
try:
# duplicate the file objects over the standard fd's
for fd, temp_fd in enumerate(temp_fds):
os.dup2(temp_fd, fd)
# now use those files in the "wrong" order, so that subprocess
# has to rearrange them in the child
p = subprocess.Popen([sys.executable, "-c",
'import sys; got = sys.stdin.read();'
'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
stdin=temp_fds[1],
stdout=temp_fds[2],
stderr=temp_fds[0])
p.wait()
finally:
# restore the original fd's underneath sys.stdin, etc.
for std, saved in enumerate(saved_fds):
os.dup2(saved, std)
os.close(saved)
for fd in temp_fds:
os.lseek(fd, 0, 0)
out = os.read(temp_fds[2], 1024)
err = support.strip_python_stderr(os.read(temp_fds[0], 1024))
self.assertEqual(out, b"got STDIN")
self.assertEqual(err, b"err")
finally:
for fd in temp_fds:
os.close(fd)
def check_swap_fds(self, stdin_no, stdout_no, stderr_no):
# open up some temporary files
temps = [mkstemp() for i in range(3)]
temp_fds = [fd for fd, fname in temps]
try:
# unlink the files -- we won't need to reopen them
for fd, fname in temps:
os.unlink(fname)
# save a copy of the standard file descriptors
saved_fds = [os.dup(fd) for fd in range(3)]
try:
# duplicate the temp files over the standard fd's 0, 1, 2
for fd, temp_fd in enumerate(temp_fds):
os.dup2(temp_fd, fd)
# write some data to what will become stdin, and rewind
os.write(stdin_no, b"STDIN")
os.lseek(stdin_no, 0, 0)
# now use those files in the given order, so that subprocess
# has to rearrange them in the child
p = subprocess.Popen([sys.executable, "-c",
'import sys; got = sys.stdin.read();'
'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
stdin=stdin_no,
stdout=stdout_no,
stderr=stderr_no)
p.wait()
for fd in temp_fds:
os.lseek(fd, 0, 0)
out = os.read(stdout_no, 1024)
err = support.strip_python_stderr(os.read(stderr_no, 1024))
finally:
for std, saved in enumerate(saved_fds):
os.dup2(saved, std)
os.close(saved)
self.assertEqual(out, b"got STDIN")
self.assertEqual(err, b"err")
finally:
for fd in temp_fds:
os.close(fd)
# When duping fds, if there arises a situation where one of the fds is
# either 0, 1 or 2, it is possible that it is overwritten (#12607).
# This tests all combinations of this.
def test_swap_fds(self):
self.check_swap_fds(0, 1, 2)
self.check_swap_fds(0, 2, 1)
self.check_swap_fds(1, 0, 2)
self.check_swap_fds(1, 2, 0)
self.check_swap_fds(2, 0, 1)
self.check_swap_fds(2, 1, 0)
def test_surrogates_error_message(self):
def prepare():
raise ValueError("surrogate:\uDCff")
try:
subprocess.call(
[sys.executable, "-c", "pass"],
preexec_fn=prepare)
except ValueError as err:
# Pure Python implementations keeps the message
self.assertIsNone(subprocess._posixsubprocess)
self.assertEqual(str(err), "surrogate:\uDCff")
except RuntimeError as err:
# _posixsubprocess uses a default message
self.assertIsNotNone(subprocess._posixsubprocess)
self.assertEqual(str(err), "Exception occurred in preexec_fn.")
else:
self.fail("Expected ValueError or RuntimeError")
def test_undecodable_env(self):
for key, value in (('test', 'abc\uDCFF'), ('test\uDCFF', '42')):
# test str with surrogates
script = "import os; print(ascii(os.getenv(%s)))" % repr(key)
env = os.environ.copy()
env[key] = value
# Use C locale to get ascii for the locale encoding to force
# surrogate-escaping of \xFF in the child process; otherwise it can
# be decoded as-is if the default locale is latin-1.
env['LC_ALL'] = 'C'
stdout = subprocess.check_output(
[sys.executable, "-c", script],
env=env)
stdout = stdout.rstrip(b'\n\r')
self.assertEqual(stdout.decode('ascii'), ascii(value))
# test bytes
key = key.encode("ascii", "surrogateescape")
value = value.encode("ascii", "surrogateescape")
script = "import os; print(ascii(os.getenvb(%s)))" % repr(key)
env = os.environ.copy()
env[key] = value
stdout = subprocess.check_output(
[sys.executable, "-c", script],
env=env)
stdout = stdout.rstrip(b'\n\r')
self.assertEqual(stdout.decode('ascii'), ascii(value))
def test_bytes_program(self):
abs_program = os.fsencode(sys.executable)
path, program = os.path.split(sys.executable)
program = os.fsencode(program)
# absolute bytes path
exitcode = subprocess.call([abs_program, "-c", "pass"])
self.assertEqual(exitcode, 0)
# bytes program, unicode PATH
env = os.environ.copy()
env["PATH"] = path
exitcode = subprocess.call([program, "-c", "pass"], env=env)
self.assertEqual(exitcode, 0)
# bytes program, bytes PATH
envb = os.environb.copy()
envb[b"PATH"] = os.fsencode(path)
exitcode = subprocess.call([program, "-c", "pass"], env=envb)
self.assertEqual(exitcode, 0)
def test_pipe_cloexec(self):
sleeper = support.findfile("input_reader.py", subdir="subprocessdata")
fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
p1 = subprocess.Popen([sys.executable, sleeper],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, close_fds=False)
self.addCleanup(p1.communicate, b'')
p2 = subprocess.Popen([sys.executable, fd_status],
stdout=subprocess.PIPE, close_fds=False)
output, error = p2.communicate()
result_fds = set(map(int, output.split(b',')))
unwanted_fds = set([p1.stdin.fileno(), p1.stdout.fileno(),
p1.stderr.fileno()])
self.assertFalse(result_fds & unwanted_fds,
"Expected no fds from %r to be open in child, "
"found %r" %
(unwanted_fds, result_fds & unwanted_fds))
def test_pipe_cloexec_real_tools(self):
qcat = support.findfile("qcat.py", subdir="subprocessdata")
qgrep = support.findfile("qgrep.py", subdir="subprocessdata")
subdata = b'zxcvbn'
data = subdata * 4 + b'\n'
p1 = subprocess.Popen([sys.executable, qcat],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
close_fds=False)
p2 = subprocess.Popen([sys.executable, qgrep, subdata],
stdin=p1.stdout, stdout=subprocess.PIPE,
close_fds=False)
self.addCleanup(p1.wait)
self.addCleanup(p2.wait)
def kill_p1():
try:
p1.terminate()
except ProcessLookupError:
pass
def kill_p2():
try:
p2.terminate()
except ProcessLookupError:
pass
self.addCleanup(kill_p1)
self.addCleanup(kill_p2)
p1.stdin.write(data)
p1.stdin.close()
readfiles, ignored1, ignored2 = select.select([p2.stdout], [], [], 10)
self.assertTrue(readfiles, "The child hung")
self.assertEqual(p2.stdout.read(), data)
p1.stdout.close()
p2.stdout.close()
def test_close_fds(self):
fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
fds = os.pipe()
self.addCleanup(os.close, fds[0])
self.addCleanup(os.close, fds[1])
open_fds = set(fds)
# add a bunch more fds
for _ in range(9):
fd = os.open("/dev/null", os.O_RDONLY)
self.addCleanup(os.close, fd)
open_fds.add(fd)
p = subprocess.Popen([sys.executable, fd_status],
stdout=subprocess.PIPE, close_fds=False)
output, ignored = p.communicate()
remaining_fds = set(map(int, output.split(b',')))
self.assertEqual(remaining_fds & open_fds, open_fds,
"Some fds were closed")
p = subprocess.Popen([sys.executable, fd_status],
stdout=subprocess.PIPE, close_fds=True)
output, ignored = p.communicate()
remaining_fds = set(map(int, output.split(b',')))
self.assertFalse(remaining_fds & open_fds,
"Some fds were left open")
self.assertIn(1, remaining_fds, "Subprocess failed")
# Keep some of the fd's we opened open in the subprocess.
# This tests _posixsubprocess.c's proper handling of fds_to_keep.
fds_to_keep = set(open_fds.pop() for _ in range(8))
p = subprocess.Popen([sys.executable, fd_status],
stdout=subprocess.PIPE, close_fds=True,
pass_fds=())
output, ignored = p.communicate()
remaining_fds = set(map(int, output.split(b',')))
self.assertFalse(remaining_fds & fds_to_keep & open_fds,
"Some fds not in pass_fds were left open")
self.assertIn(1, remaining_fds, "Subprocess failed")
# Mac OS X Tiger (10.4) has a kernel bug: sometimes, the file
# descriptor of a pipe closed in the parent process is valid in the
# child process according to fstat(), but the mode of the file
# descriptor is invalid, and read or write raise an error.
@support.requires_mac_ver(10, 5)
def test_pass_fds(self):
fd_status = support.findfile("fd_status.py", subdir="subprocessdata")
open_fds = set()
for x in range(5):
fds = os.pipe()
self.addCleanup(os.close, fds[0])
self.addCleanup(os.close, fds[1])
open_fds.update(fds)
for fd in open_fds:
p = subprocess.Popen([sys.executable, fd_status],
stdout=subprocess.PIPE, close_fds=True,
pass_fds=(fd, ))
output, ignored = p.communicate()
remaining_fds = set(map(int, output.split(b',')))
to_be_closed = open_fds - {fd}
self.assertIn(fd, remaining_fds, "fd to be passed not passed")
self.assertFalse(remaining_fds & to_be_closed,
"fd to be closed passed")
# pass_fds overrides close_fds with a warning.
with self.assertWarns(RuntimeWarning) as context:
self.assertFalse(subprocess.call(
[sys.executable, "-c", "import sys; sys.exit(0)"],
close_fds=False, pass_fds=(fd, )))
self.assertIn('overriding close_fds', str(context.warning))
def test_stdout_stdin_are_single_inout_fd(self):
with io.open(os.devnull, "r+") as inout:
p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
stdout=inout, stdin=inout)
p.wait()
def test_stdout_stderr_are_single_inout_fd(self):
with io.open(os.devnull, "r+") as inout:
p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
stdout=inout, stderr=inout)
p.wait()
def test_stderr_stdin_are_single_inout_fd(self):
with io.open(os.devnull, "r+") as inout:
p = subprocess.Popen([sys.executable, "-c", "import sys; sys.exit(0)"],
stderr=inout, stdin=inout)
p.wait()
def test_wait_when_sigchild_ignored(self):
# NOTE: sigchild_ignore.py may not be an effective test on all OSes.
sigchild_ignore = support.findfile("sigchild_ignore.py",
subdir="subprocessdata")
p = subprocess.Popen([sys.executable, sigchild_ignore],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
self.assertEqual(0, p.returncode, "sigchild_ignore.py exited"
" non-zero with this error:\n%s" %
stderr.decode('utf8'))
def test_select_unbuffered(self):
# Issue #11459: bufsize=0 should really set the pipes as
# unbuffered (and therefore let select() work properly).
select = support.import_module("select")
p = subprocess.Popen([sys.executable, "-c",
'import sys;'
'sys.stdout.write("apple")'],
stdout=subprocess.PIPE,
bufsize=0)
f = p.stdout
self.addCleanup(f.close)
try:
self.assertEqual(f.read(4), b"appl")
self.assertIn(f, select.select([f], [], [], 0.0)[0])
finally:
p.wait()
def test_zombie_fast_process_del(self):
# Issue #12650: on Unix, if Popen.__del__() was called before the
# process exited, it wouldn't be added to subprocess._active, and would
# remain a zombie.
# spawn a Popen, and delete its reference before it exits
p = subprocess.Popen([sys.executable, "-c",
'import sys, time;'
'time.sleep(0.2)'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
ident = id(p)
pid = p.pid
del p
# check that p is in the active processes list
self.assertIn(ident, [id(o) for o in subprocess._active])
def test_leak_fast_process_del_killed(self):
# Issue #12650: on Unix, if Popen.__del__() was called before the
# process exited, and the process got killed by a signal, it would never
# be removed from subprocess._active, which triggered a FD and memory
# leak.
# spawn a Popen, delete its reference and kill it
p = subprocess.Popen([sys.executable, "-c",
'import time;'
'time.sleep(3)'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
ident = id(p)
pid = p.pid
del p
os.kill(pid, signal.SIGKILL)
# check that p is in the active processes list
self.assertIn(ident, [id(o) for o in subprocess._active])
# let some time for the process to exit, and create a new Popen: this
# should trigger the wait() of p
time.sleep(0.2)
with self.assertRaises(EnvironmentError) as c:
with subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as proc:
pass
# p should have been wait()ed on, and removed from the _active list
self.assertRaises(OSError, os.waitpid, pid, 0)
self.assertNotIn(ident, [id(o) for o in subprocess._active])
@unittest.skipUnless(mswindows, "Windows specific tests")
class Win32ProcessTestCase(BaseTestCase):
def test_startupinfo(self):
# startupinfo argument
# We uses hardcoded constants, because we do not want to
# depend on win32all.
STARTF_USESHOWWINDOW = 1
SW_MAXIMIZE = 3
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags = STARTF_USESHOWWINDOW
startupinfo.wShowWindow = SW_MAXIMIZE
# Since Python is a console process, it won't be affected
# by wShowWindow, but the argument should be silently
# ignored
subprocess.call([sys.executable, "-c", "import sys; sys.exit(0)"],
startupinfo=startupinfo)
def test_creationflags(self):
# creationflags argument
CREATE_NEW_CONSOLE = 16
sys.stderr.write(" a DOS box should flash briefly ...\n")
subprocess.call(sys.executable +
' -c "import time; time.sleep(0.25)"',
creationflags=CREATE_NEW_CONSOLE)
def test_invalid_args(self):
# invalid arguments should raise ValueError
self.assertRaises(ValueError, subprocess.call,
[sys.executable, "-c",
"import sys; sys.exit(47)"],
preexec_fn=lambda: 1)
self.assertRaises(ValueError, subprocess.call,
[sys.executable, "-c",
"import sys; sys.exit(47)"],
stdout=subprocess.PIPE,
close_fds=True)
def test_close_fds(self):
# close file descriptors
rc = subprocess.call([sys.executable, "-c",
"import sys; sys.exit(47)"],
close_fds=True)
self.assertEqual(rc, 47)
def test_shell_sequence(self):
# Run command through the shell (sequence)
newenv = os.environ.copy()
newenv["FRUIT"] = "physalis"
p = subprocess.Popen(["set"], shell=1,
stdout=subprocess.PIPE,
env=newenv)
self.addCleanup(p.stdout.close)
self.assertIn(b"physalis", p.stdout.read())
def test_shell_string(self):
# Run command through the shell (string)
newenv = os.environ.copy()
newenv["FRUIT"] = "physalis"
p = subprocess.Popen("set", shell=1,
stdout=subprocess.PIPE,
env=newenv)
self.addCleanup(p.stdout.close)
self.assertIn(b"physalis", p.stdout.read())
def test_call_string(self):
# call() function with string argument on Windows
rc = subprocess.call(sys.executable +
' -c "import sys; sys.exit(47)"')
self.assertEqual(rc, 47)
def _kill_process(self, method, *args):
# Some win32 buildbot raises EOFError if stdin is inherited
p = subprocess.Popen([sys.executable, "-c", """if 1:
import sys, time
sys.stdout.write('x\\n')
sys.stdout.flush()
time.sleep(30)
"""],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
self.addCleanup(p.stdin.close)
# Wait for the interpreter to be completely initialized before
# sending any signal.
p.stdout.read(1)
getattr(p, method)(*args)
_, stderr = p.communicate()
self.assertStderrEqual(stderr, b'')
returncode = p.wait()
self.assertNotEqual(returncode, 0)
def _kill_dead_process(self, method, *args):
p = subprocess.Popen([sys.executable, "-c", """if 1:
import sys, time
sys.stdout.write('x\\n')
sys.stdout.flush()
sys.exit(42)
"""],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.addCleanup(p.stdout.close)
self.addCleanup(p.stderr.close)
self.addCleanup(p.stdin.close)
# Wait for the interpreter to be completely initialized before
# sending any signal.
p.stdout.read(1)
# The process should end after this
time.sleep(1)
# This shouldn't raise even though the child is now dead
getattr(p, method)(*args)
_, stderr = p.communicate()
self.assertStderrEqual(stderr, b'')
rc = p.wait()
self.assertEqual(rc, 42)
def test_send_signal(self):
self._kill_process('send_signal', signal.SIGTERM)
def test_kill(self):
self._kill_process('kill')
def test_terminate(self):
self._kill_process('terminate')
def test_send_signal_dead(self):
self._kill_dead_process('send_signal', signal.SIGTERM)
def test_kill_dead(self):
self._kill_dead_process('kill')
def test_terminate_dead(self):
self._kill_dead_process('terminate')
# The module says:
# "NB This only works (and is only relevant) for UNIX."
#
# Actually, getoutput should work on any platform with an os.popen, but
# I'll take the comment as given, and skip this suite.
@unittest.skipUnless(os.name == 'posix', "only relevant for UNIX")
class CommandTests(unittest.TestCase):
def test_getoutput(self):
self.assertEqual(subprocess.getoutput('echo xyzzy'), 'xyzzy')
self.assertEqual(subprocess.getstatusoutput('echo xyzzy'),
(0, 'xyzzy'))
# we use mkdtemp in the next line to create an empty directory
# under our exclusive control; from that, we can invent a pathname
# that we _know_ won't exist. This is guaranteed to fail.
dir = None
try:
dir = tempfile.mkdtemp()
name = os.path.join(dir, "foo")
status, output = subprocess.getstatusoutput('cat ' + name)
self.assertNotEqual(status, 0)
finally:
if dir is not None:
os.rmdir(dir)
@unittest.skipUnless(getattr(subprocess, '_has_poll', False),
"poll system call not supported")
class ProcessTestCaseNoPoll(ProcessTestCase):
def setUp(self):
subprocess._has_poll = False
ProcessTestCase.setUp(self)
def tearDown(self):
subprocess._has_poll = True
ProcessTestCase.tearDown(self)
@unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False),
"_posixsubprocess extension module not found.")
class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase):
@classmethod
def setUpClass(cls):
global subprocess
assert subprocess._posixsubprocess
# Reimport subprocess while forcing _posixsubprocess to not exist.
with support.check_warnings(('.*_posixsubprocess .* not being used.*',
RuntimeWarning)):
subprocess = support.import_fresh_module(
'subprocess', blocked=['_posixsubprocess'])
assert not subprocess._posixsubprocess
@classmethod
def tearDownClass(cls):
global subprocess
# Reimport subprocess as it should be, restoring order to the universe.
subprocess = support.import_fresh_module('subprocess')
assert subprocess._posixsubprocess
class HelperFunctionTests(unittest.TestCase):
@unittest.skipIf(mswindows, "errno and EINTR make no sense on windows")
def test_eintr_retry_call(self):
record_calls = []
def fake_os_func(*args):
record_calls.append(args)
if len(record_calls) == 2:
raise OSError(errno.EINTR, "fake interrupted system call")
return tuple(reversed(args))
self.assertEqual((999, 256),
subprocess._eintr_retry_call(fake_os_func, 256, 999))
self.assertEqual([(256, 999)], record_calls)
# This time there will be an EINTR so it will loop once.
self.assertEqual((666,),
subprocess._eintr_retry_call(fake_os_func, 666))
self.assertEqual([(256, 999), (666,), (666,)], record_calls)
@unittest.skipUnless(mswindows, "Windows-specific tests")
class CommandsWithSpaces (BaseTestCase):
def setUp(self):
super().setUp()
f, fname = mkstemp(".py", "te st")
self.fname = fname.lower ()
os.write(f, b"import sys;"
b"sys.stdout.write('%d %s' % (len(sys.argv), [a.lower () for a in sys.argv]))"
)
os.close(f)
def tearDown(self):
os.remove(self.fname)
super().tearDown()
def with_spaces(self, *args, **kwargs):
kwargs['stdout'] = subprocess.PIPE
p = subprocess.Popen(*args, **kwargs)
self.addCleanup(p.stdout.close)
self.assertEqual(
p.stdout.read ().decode("mbcs"),
"2 [%r, 'ab cd']" % self.fname
)
def test_shell_string_with_spaces(self):
# call() function with string argument with spaces on Windows
self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname,
"ab cd"), shell=1)
def test_shell_sequence_with_spaces(self):
# call() function with sequence argument with spaces on Windows
self.with_spaces([sys.executable, self.fname, "ab cd"], shell=1)
def test_noshell_string_with_spaces(self):
# call() function with string argument with spaces on Windows
self.with_spaces('"%s" "%s" "%s"' % (sys.executable, self.fname,
"ab cd"))
def test_noshell_sequence_with_spaces(self):
# call() function with sequence argument with spaces on Windows
self.with_spaces([sys.executable, self.fname, "ab cd"])
class ContextManagerTests(BaseTestCase):
def test_pipe(self):
with subprocess.Popen([sys.executable, "-c",
"import sys;"
"sys.stdout.write('stdout');"
"sys.stderr.write('stderr');"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as proc:
self.assertEqual(proc.stdout.read(), b"stdout")
self.assertStderrEqual(proc.stderr.read(), b"stderr")
self.assertTrue(proc.stdout.closed)
self.assertTrue(proc.stderr.closed)
def test_returncode(self):
with subprocess.Popen([sys.executable, "-c",
"import sys; sys.exit(100)"]) as proc:
pass
# __exit__ calls wait(), so the returncode should be set
self.assertEqual(proc.returncode, 100)
def test_communicate_stdin(self):
with subprocess.Popen([sys.executable, "-c",
"import sys;"
"sys.exit(sys.stdin.read() == 'context')"],
stdin=subprocess.PIPE) as proc:
proc.communicate(b"context")
self.assertEqual(proc.returncode, 1)
def test_invalid_args(self):
with self.assertRaises(EnvironmentError) as c:
with subprocess.Popen(['nonexisting_i_hope'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as proc:
pass
self.assertEqual(c.exception.errno, errno.ENOENT)
def test_main():
unit_tests = (ProcessTestCase,
POSIXProcessTestCase,
Win32ProcessTestCase,
ProcessTestCasePOSIXPurePython,
CommandTests,
ProcessTestCaseNoPoll,
HelperFunctionTests,
CommandsWithSpaces,
ContextManagerTests,
)
support.run_unittest(*unit_tests)
support.reap_children()
if __name__ == "__main__":
unittest.main()
|
py | 1a50df03d2170e77eaa1f9daa160dbaf19c121f2 | """
Basic vector math for Maya.cmds
"""
from collections import namedtuple
import math
class Vector(object):
"""
Generic vector operations.
"""
def __add__(self, other):
return type(self)(*(a + b for a, b in zip(self, other)))
def __sub__(self, other):
return type(self)(*(a - b for a, b in zip(self, other)))
def __mul__(self, other):
if hasattr(other, '__iter__'):
return type(self)(*(a * b for a, b in zip(self, other)))
return type(self)(*map(lambda a: a * other, self))
def __div__(self, other):
if hasattr(other, '__iter__'):
return type(self)(*(a / b for a, b in zip(self, other)))
return type(self)(*map(lambda a: a / other, self))
def length(self):
total = sum(map(lambda a: math.pow(a, 2), self))
return math.sqrt(total)
def normalized(self):
divisor = [self.length()] * len(self)
return type(self)(*(self / divisor))
@classmethod
def add(cls, a, b):
return cls(*a) + cls(*b)
@classmethod
def sub(cls, a, b):
return cls(*a) - cls(*b)
@classmethod
def mul(cls, a, b):
return cls(*a) * cls(*b)
@classmethod
def div(cls, a, b):
return cls(*a) / cls(*b)
@classmethod
def dot(cls, left, right):
return sum(cls.mul(left, right))
@classmethod
def norm_dot(cls, left, right):
left = cls(*left).normalized()
right = cls(*right).normalized()
return sum(cls.mul(left, right))
xy = namedtuple('Vector2', 'x y')
xyz = namedtuple('Vector3', 'x y z')
xyzw = namedtuple('Vector4', 'x y z w')
class Vector2(Vector, xy):
"""
A 2-d xy vector. This is an immutable tuple, so you cannot modify it in place!
usage:
v = Vector2 ( 1.0, 0.0)
or
example = [1.0., 0.0] # any iterable with 2 items
v = Vector2(*example)
or
example = Vector2(x = 1.0, y=2.0)
Supports all base iterable functions (slicing, for loops, any(), etc)
"""
pass
class Vector3(Vector, xyz):
"""
A 3-d xyz vector. This is an immutable tuple, so you cannot modify it in place!
usage:
v = Vector2 ( 1.0, 0.0, 0.0)
or
example = [1.0., 0.0, 0.0] # any iterable with 3 items
v = Vector3(*example)
or
example = Vector3(x = 1.0, y=2.0, z = 3.0)
Supports all base iterable functions (slicing, for loops, any(), etc)
"""
pass
class Vector4(Vector, xyzw):
"""
A 4-d xyzw vector. This is an immutable tuple, so you cannot modify it in place!
usage:
v = Vector2 ( 1.0, 0.0, 0.0, 1.0)
or using *args:
example = [1.0., 0.0, 0.0, 1.0] # any iterable with 4 items
v = Vector4(*example)
or explicitly
example = Vector4(x = 1.0, y=2.0, z = 3.0, w= 0.0)
Supports all base iterable functions (slicing, for loops, any(), etc)
"""
pass
class MVector(Vector, list):
"""
A mutable version of the base Vector. This allows you to modify vector contents in place.
MVector derives from list so it supp
Supports all base iterable functions (slicing, for loops, any(), etc). However it does NOT support append(), to keep the width of the vector to what it was at creation time.
"""
def __init__(self, *args, **kwargs):
if len(args) == 1:
args = args[0]
list.__init__(self, args)
for k in 'xyzw':
if k in kwargs:
self.__setattr__(k, kwargs.pop(k))
def __getattr__(self, key):
try:
return self['xyzw'.index(key)]
except KeyError:
return self.__getattribute__(key)
def __setattr__(self, key, value):
if not key in 'xyzw':
self.__setattribute__(key, value)
return
idx = 'xyzw'.index(key)
self.__setitem__(idx, value)
def __setitem__(self, key, value):
while len(self) <= key:
super(MVector, self).append(0.0)
list.__setitem__(self, key, value)
def append(self, *args):
raise NotImplementedError(
"MVector does not support appneding: create a new vector with the correct width instead")
|
py | 1a50df1f3feffd195b77ca8787494777540b0cef | import smtplib
import os
import getpass
import sys
import ssl
from email.mime.text import MIMEText
from email.utils import formataddr
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email import encoders
from email.mime.text import MIMEText
from .color import green, white, blue, start, alert, numbering
def YahooEmail():
os.system("clear")
print(green)
print("""
__^__ __^__
( ___ )------------------------------------------------------( ___ )
| / | | \ |
| / |+------------)PhishMailer BaitMailer V1.5(-------------+| \ |
|___| Yahoo |___|
(_____)------------------------------------------------------(_____) """)
print(alert + "It Might Take A Few Minutes Until The Target Gets The Email" + alert)
print(alert + "You Might Need To Allow Less Secure Apps On You Gmail Account" + alert)
print("")
fromaddr = input(start + " Enter Your Email-Address: ")
password = getpass.getpass(start + " Enter Your Password (will not be shown): ")
toaddr = input(start + " Enter Email-Address To Send To: ")
subject = input(start + " Enter Subject: ")
pathfile = input(start + " Enter Path To Html File: ")
html = open(pathfile)
msg = MIMEText(html.read(), 'html')
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = subject
debug = False
if debug:
print(msg.as_string())
else:
server = smtplib.SMTP('smtp.mail.yahoo.com',587)
server.starttls()
server.login(fromaddr, password)
text = msg.as_string()
server.sendmail(fromaddr, toaddr, text)
server.quit()
print(alert + "Email Sent" + alert)
|
py | 1a50df2b868142b74dbe1f4c4d460b8c41c36854 | from stix_shifter_utils.modules.base.stix_transmission.base_ping_connector import BasePingConnector
from stix_shifter_utils.utils import logger
from stix_shifter_utils.utils.error_response import ErrorResponder
class PingConnector(BasePingConnector):
def __init__(self, api_client):
self.api_client = api_client
self.logger = logger.set_logger(__name__)
self.connector = __name__.split('.')[1]
def ping_connection(self):
try:
response_dict = self.api_client.ping_data_source()
response_code = response_dict["code"]
# Construct a response object
return_obj = dict()
if response_code == 200:
return_obj['success'] = True
else:
ErrorResponder.fill_error(return_obj, response_dict, ['message'], connector=self.connector)
return return_obj
except Exception as err:
self.logger.error('error when pinging datasource: %s', err, exc_info=True)
raise
|
py | 1a50df98bf88ee29ff9314073802a18c5343e102 | import os
import numpy as np
from tensorflow.keras.callbacks import TensorBoard
from tensorflow.keras.datasets import mnist
from tensorflow.keras.initializers import Constant
from tensorflow.keras.initializers import TruncatedNormal
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Sequential
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import to_categorical
from tf_utils.callbacks import ConfusionMatrix
MODEL_DIR = os.path.abspath("C:/Users/jan/Dropbox/_Coding/UdemyTF/models")
if not os.path.exists(MODEL_DIR):
os.mkdir(MODEL_DIR)
MODEL_FILE_PATH = os.path.join(MODEL_DIR, "mnist_model.h5")
LOGS_DIR = os.path.abspath("C:/Users/jan/Dropbox/_Coding/UdemyTF/logs/")
if not os.path.exists(LOGS_DIR):
os.mkdir(LOGS_DIR)
MODEL_LOG_DIR = os.path.join(LOGS_DIR, "mnist_cm")
def prepare_dataset(num_features: int, num_classes: int) -> tuple:
(x_train, y_train), (x_test, y_test) = mnist.load_data()
y_train = to_categorical(y_train, num_classes=num_classes, dtype=np.float32)
y_test = to_categorical(y_test, num_classes=num_classes, dtype=np.float32)
x_train = x_train.reshape(-1, num_features).astype(np.float32)
x_test = x_test.reshape(-1, num_features).astype(np.float32)
return (x_train, y_train), (x_test, y_test)
def build_model(num_features: int, num_classes: int) -> Sequential:
init_w = TruncatedNormal(mean=0.0, stddev=0.01)
init_b = Constant(value=0.0)
model = Sequential()
model.add(
Dense(
units=500,
kernel_initializer=init_w,
bias_initializer=init_b,
input_shape=(num_features,),
)
)
model.add(Activation("relu"))
model.add(Dense(units=300, kernel_initializer=init_w, bias_initializer=init_b))
model.add(Activation("relu"))
model.add(Dense(units=100, kernel_initializer=init_w, bias_initializer=init_b))
model.add(Activation("relu"))
model.add(Dense(units=50, kernel_initializer=init_w, bias_initializer=init_b))
model.add(Activation("relu"))
model.add(
Dense(
units=num_classes,
kernel_initializer=init_w,
bias_initializer=init_b,
)
)
model.add(Activation("softmax"))
model.summary()
return model
if __name__ == "__main__":
num_features = 784
num_classes = 10
(x_train, y_train), (x_test, y_test) = prepare_dataset(num_features, num_classes)
optimizer = Adam(learning_rate=0.001)
epochs = 2
batch_size = 256
model = build_model(num_features, num_classes)
model.compile(
loss="categorical_crossentropy",
optimizer=optimizer,
metrics=["accuracy"],
)
tb_callback = TensorBoard(log_dir=MODEL_LOG_DIR, histogram_freq=1, write_graph=True)
classes_list = [class_idx for class_idx in range(num_classes)]
cm_callback = ConfusionMatrix(
model, x_test, y_test, classes_list=classes_list, log_dir=MODEL_LOG_DIR
)
model.fit(
x=x_train,
y=y_train,
epochs=epochs,
batch_size=batch_size,
validation_data=(x_test, y_test),
callbacks=[tb_callback, cm_callback],
)
scores = model.evaluate(x=x_test, y=y_test, verbose=0)
print("Scores: ", scores)
|
py | 1a50e07fb22065015f12b58be3eb69537d2afd76 | """
Django settings for neighbourhood project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import django_heroku
import dj_database_url
from decouple import config,Csv
MODE=config("MODE", default="dev")
SECRET_KEY = config('SECRET_KEY')
DEBUG = config('DEBUG', default=False, cast=bool)
# development
if config('MODE')=="dev":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME'),
'USER': config('DB_USER'),
'PASSWORD': config('DB_PASSWORD'),
'HOST': config('DB_HOST'),
'PORT': '',
}
}
# production
else:
DATABASES = {
'default': dj_database_url.config(
default=config('DATABASE_URL')
)
}
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h6xwsvquzd7f6oz151'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'neighbour',
'bootstrap3',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'neighbourhood.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'neighbourhood.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'kneighbour',
'USER': 'postgres',
'PASSWORD':'sarahjt12',
'HOST': 'localhost',
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Nairobi'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
LOGIN_REDIRECT_URL = "home"
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# configuring the location for media
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Configure Django App for Heroku.
django_heroku.settings(locals())
|
py | 1a50e0eafe9980b6e3911d39b4defd2a484a009e | """
Problem 53: Combinatoric selections
https://projecteuler.net/problem=53
There are exactly ten ways of selecting three from five, 12345:
123, 124, 125, 134, 135, 145, 234, 235, 245, and 345
In combinatorics, we use the notation, (5 over 3) = 10.
In general, (n over r) = n! / (r! * (n−r)!), where r <= n, n! = n * (n−1) * ... * 3 * 2 * 1,
and 0! = 1.
It is not until n = 23, that a value exceeds one-million: (23 over 10) = 1144066.
How many, not necessarily distinct, values of (n over r) for 1 <= n <= 100,
are greater than one-million?
"""
from typing import Iterable, Tuple
from src.common.calculations import calculate_binomial_coefficient
# pylint: disable=invalid-name
def get_large_binomial_coefficients(max_n: int, threshold: int) -> Iterable[Tuple[int, int, int]]:
"""
Get binomial coefficients (n over r) for `1 <= n <= max_n` that are greater than `threshold`.
Returns tuples `(n, r, (n over r))`.
"""
for n in range(1, max_n + 1):
for r in range(n + 1):
binomial_coefficient = calculate_binomial_coefficient(n, r)
if binomial_coefficient > threshold:
yield n, r, binomial_coefficient
def main() -> None:
"""Main function."""
max_n = 100
threshold = int(1e6)
count = len(list(get_large_binomial_coefficients(max_n, threshold)))
print(f'The number of values of (n over r) for 1 <= n <= {max_n} ' \
f'that are greater than {threshold:,} is {count}.')
if __name__ == '__main__':
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.