id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
4887741
|
#
# import pdb
# import numpy as np
#
#
# def tempdic(dictionary):
# latch_lowtohigh = np.nonzero(np.diff(dictionary['TriggerLatch']) == 1)
# dt = np.diff(dictionary['elapsed_time'][0:2])
# timestart = dt * latch_lowtohigh[-1] * (-1)
# timeend = (len(dictionary['elapsed_time']) - latch_lowtohigh[-1]) * dt
# zerotime = np.arange(timestart, timeend, dt, dtype=float)
# temp = {'timeaxis': zerotime}
# for i in range(1, 10):
# temp.update({'PT' + str(i): dictionary['PT' + str(i)]})
# return temp
#
#
# def atTime(dictionary, time, instrument):
# index = 0
# while time >= dictionary['timeaxis'][index]:
# index = index + 1
# if time == dictionary['timeaxis'][index - 1]:
# return dictionary[instrument][index - 1]
# else:
# x = (dictionary[instrument][index] + dictionary[instrument][index - 1])/2
# return x
#
#
# def ShowIndex(dictionary):
# for key in dictionary:
# print key + str(dictionary[key].shape) + str(dictionary[key].dtype)
#
#
# def main(dictionary, timestamp):
# if not dictionary['slowDAQ']['loaded']:
# print "Failed to load slowDAQ dictionary, process terminated."
# empty = np.zeros(shape=(2, len(timestamp)), dtype=float, order='C')
# for i in range(len(timestamp)):
# empty[1][i] = timestamp[i]
# return empty
# temp = tempdic(dictionary['slowDAQ'])
# P = np.zeros(shape=(2, len(timestamp)), dtype=float, order='C')
# for i in range(len(timestamp)):
# x = atTime(temp, timestamp[i], 'PT4')
# P[0][i] = x
# P[1][i] = timestamp[i]
# return P
|
StarcoderdataPython
|
35435
|
<filename>Traditional/split_path.py
#!/usr/bin/env python3
import os
from posixpath import normpath
path = "///data/video/project1//"
normalized_path = os.path.normpath(path)
sep_path = normalized_path.split(os.sep)
path_tail = sep_path[-1] #last word in path - need to be volume name
currentPath = ''
for folder in sep_path:
if folder:
currentPath += "/"+folder
print (currentPath)
|
StarcoderdataPython
|
6548121
|
import json
from elasticsearch import Elasticsearch
from .definition import ElasticIndex
from .conf import settings
class BaseService:
def __init__(self, entity: ElasticIndex):
assert isinstance(entity, ElasticIndex), "Invalid entity provided, should be an object of ElasticIndex"
self.entity = entity
self.client = Elasticsearch(settings.ES_HOST)
@property
def index(self):
return self.entity.index
@property
def doc_type(self):
return self.entity.doc_type
class Index(BaseService):
def __init__(self, *args, **kwargs):
self.validate(**kwargs)
super().__init__(*args, **kwargs)
self.entity_id = self.get_entity_id(**kwargs)
partial = kwargs.get("partial", False)
is_script = kwargs.get("is_script", False)
self.do_update = False
if partial:
self.body = dict(doc=kwargs["data"])
self.do_update = True
elif is_script:
self.body = dict(script=kwargs["data"])
self.do_update = True
elif kwargs.get("data"):
self.body = kwargs.pop("data")
else:
self.body = self.entity.data_fns.single(self.entity_id)
@staticmethod
def validate(**kwargs):
if kwargs.get("partial") and not kwargs.get("data"):
raise AssertionError("Missing/None argument \'data\' on partial index job.")
if kwargs.get("is_script") and not kwargs.get("data"):
raise AssertionError("Missing/None argument \'data\' on script index job.")
if kwargs.get("partial") and kwargs.get("is_script"):
raise AssertionError("Don't provide both is_script and partial as True.")
if not kwargs.get("id") and not kwargs.get("data"):
raise AssertionError("Missing/None argument \'id\' received.")
@staticmethod
def get_entity_id(**kwargs):
if kwargs.get("id"):
return kwargs.pop("id")
elif kwargs.get("data") and kwargs["data"].get("id"):
return kwargs["data"]["id"]
else:
raise AssertionError("Missing/None argument \'id\'.")
def __call__(self):
kwargs = {
"index": self.index,
"doc_type": self.doc_type,
"id": self.entity_id,
"body": self.body
}
if self.do_update:
self.client.update(**kwargs)
else:
self.client.index(**kwargs)
class BulkUpdate(BaseService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
assert kwargs.get("data"), "Missing/None argument \'data\'."
self.data = kwargs.pop("data")
self.entity_ids = kwargs.pop("entity_ids", [])
assert len(self.entity_ids), "Empty list received from argument \'entity_ids\'."
def __call__(self):
bulk_body = []
for entity_id in self.entity_ids:
bulk_body.append(json.dumps({
"update": {
"_index": self.index,
"_type": self.doc_type,
"_id": str(entity_id)
}
}))
bulk_body.append(json.dumps({"doc": self.data}))
self.client.bulk(body="\n".join(bulk_body))
|
StarcoderdataPython
|
3415824
|
<reponame>CaseyK9/Scarecrow
import discord
import discord.ext.commands
def setup(bot):
bot.help_command = TreeHelpCommand()
def teardown(bot):
bot.help_command = discord.ext.commands.DefaultHelpCommand()
class TreeHelpCommand(discord.ext.commands.DefaultHelpCommand):
"""Deviation from the default help command to list subcommands with extra indentation and formatting."""
def __init__(self, **kwargs):
super().__init__(indent=min(3, kwargs.pop('indent', 3)), **kwargs)
def get_max_size(self, commands, depth=0):
"""Returns the largest name length of the specified command list, including their subcommands."""
as_lengths = set()
for command in commands:
as_lengths.add(discord.utils._string_width(command.name) + depth)
if isinstance(command, discord.ext.commands.Group):
as_lengths.add(self.get_max_size(command.commands, depth + self.indent))
return max(as_lengths, default=0)
def add_indented_commands(self, commands, *, heading, max_size=None, indent=0, tree_base=''):
"""Indents a list of commands and their subcommands as a tree view."""
if not commands:
return
if indent == 0:
indent = self.indent
if heading is not None:
self.paginator.add_line(heading)
max_size = max_size or self.get_max_size(commands)
commands = sorted(list(commands), key=lambda c: c.name)
get_width = discord.utils._string_width
for command in commands:
last_command = command == commands[-1]
base_indent = self.indent * ' '
if indent > self.indent:
tree_core = '└' if last_command else '├'
tree_indent = f'{tree_base}{tree_core}' + (self.indent - 2) * '─' + ' '
else:
tree_indent = ''
name = command.name
width = max_size - (get_width(name) - len(name))
entry = f'{base_indent}{tree_indent + name:<{width}} {command.short_doc}'
self.paginator.add_line(self.shorten_text(entry))
if isinstance(command, discord.ext.commands.Group):
next_tree_base = tree_base + f'{"│":<{self.indent - 1}}' if not last_command and indent > self.indent else tree_base
self.add_indented_commands(command.commands, heading=None, max_size=max_size, indent=indent + self.indent, tree_base=next_tree_base)
|
StarcoderdataPython
|
11284423
|
#########################################################
#
# Main file for parallel mesh testing.
#
# This is a modification of the run_parallel_advection.py
# file.
#
#
# Authors: <NAME>, <NAME> and <NAME>,
# June 2005
#
#
#
#########################################################
import time
import sys
import numpy
import anuga
#----------------------------
# Parallel interface
#---------------------------
from anuga_parallel import distribute, myid, numprocs, finalize, barrier
#--------------------------------------------------------------------------
# Setup functions for topograpy etc
#--------------------------------------------------------------------------
scale_me=1.0
def topography(x,y):
return (-x/2.0 +0.05*numpy.sin((x+y)*200.0))*scale_me
def stagefun(x,y):
stge=-0.2*scale_me #+0.01*(x>0.9)
#topo=topography(x,y)
return stge#*(stge>topo) + (topo)*(stge<=topo)
#--------------------------------------------------------------------------
# Create domains
#--------------------------------------------------------------------------
t0 = time.time()
verbose = True
#--------------------------------------------------------------------------
# Setup Domain only on processor 0
#--------------------------------------------------------------------------
if myid == 0:
length = 2.0
width = 2.0
dx = dy = 0.005
#dx = dy = 0.00125
dx = dy = 0.05
domain = anuga.rectangular_cross_domain(int(length/dx), int(width/dy),
len1=length, len2=width, verbose=verbose)
#---------------------------------------
# Add these two commands to use Gareth's
# tsunami algorithm. Play with the
# minimum allowed height to remove possible
# unrealistic large velocities
#---------------------------------------
domain.set_flow_algorithm('tsunami')
domain.set_minimum_allowed_height(0.01)
domain.set_store(True)
domain.set_quantity('elevation',topography) # Use function for elevation
domain.get_quantity('elevation').smooth_vertex_values()
domain.set_quantity('friction',0.03) # Constant friction
domain.set_quantity('stage', stagefun) # Constant negative initial stage
domain.get_quantity('stage').smooth_vertex_values()
domain.set_name('rectangular_tsunami')
domain.print_statistics()
else:
domain = None
t1 = time.time()
if myid == 0 :
print 'Create sequential domain ',t1-t0
if myid == 0 and verbose:
print 'DISTRIBUTING DOMAIN'
sys.stdout.flush()
barrier()
#-------------------------------------------------------------------------
# Distribute domain
#-------------------------------------------------------------------------
domain = distribute(domain,verbose=verbose)
t2 = time.time()
if myid == 0 :
print 'Distribute domain ',t2-t1
if myid == 0 : print 'after parallel domain'
#Boundaries
T = anuga.Transmissive_boundary(domain)
R = anuga.Reflective_boundary(domain)
D = anuga.Dirichlet_boundary([-0.1*scale_me,0.,0.])
domain.set_boundary( {'left': R, 'right': D, 'bottom': R, 'top': R, 'ghost': None} )
if myid == 0 : print 'after set_boundary'
yieldstep = 0.2
finaltime = 20.0
barrier()
t0 = time.time()
#===========================================================================
# Main Evolve Loop
#===========================================================================
for t in domain.evolve(yieldstep = yieldstep, finaltime = finaltime):
if myid == 0:
domain.write_time()
sys.stdout.flush()
for p in range(numprocs):
barrier()
if myid == p:
print 50*'='
print 'P%g' %(myid)
print 'That took %.2f seconds' %(time.time()-t0)
print 'Communication time %.2f seconds'%domain.communication_time
print 'Reduction Communication time %.2f seconds'%domain.communication_reduce_time
print 'Broadcast time %.2f seconds'%domain.communication_broadcast_time
sys.stdout.flush()
if domain.number_of_global_triangles < 50000:
if myid == 0 :
print 'Create dump of triangulation for %g triangles' % domain.number_of_global_triangles
domain.dump_triangulation(filename="rectangular_cross_%g.png"% numprocs)
finalize()
|
StarcoderdataPython
|
1965874
|
<reponame>s-vigneshwaran/Sustainable-Development-Goals-Classifier
from flask import Flask, jsonify, request
from flask_cors import CORS, cross_origin
from ngram_matcher import NgramMatcher
import uuid
import PyPDF2
import os
import numpy as np
import json
# Loading dataset into memory
fos_ids = np.load('fos_ids.npy', allow_pickle=True)
fos_names = np.load('fos_names.npy', allow_pickle=True)
with open('OSDG-mapping.json', 'r') as file_:
mapping = [(sdg, set(fos_ids)) for sdg, fos_ids in json.load(file_).items()]
with open('OSDG-fosmap.json', 'r') as file_:
fosmap = json.load(file_)
# Creating a NGram Matcher object
ngram_matcher = NgramMatcher(fos_names,
lowercase=True,
token_pattern=r'(?u)\b\w+\b',
ngram_size=(1, 4))
def extract_fos(text):
idxs, frequencies = ngram_matcher.match([text])[0]
ngrams = sorted(zip(fos_ids[idxs], fos_names[idxs], frequencies), key=lambda ng: len(ng[1]), reverse=True)
descored_ngrams = list()
for idx, (ngram_id, ngram_name, frequency) in enumerate(ngrams):
for _, fol_ngram_name, fol_frequency in ngrams[:idx]:
if ngram_name in fol_ngram_name:
frequency -= fol_frequency
if frequency > 0:
descored_ngrams.append([ngram_id, ngram_name, frequency])
ngrams = descored_ngrams
submerged_ngrams, drop_ngram_ids = list(), set()
for idx, (ngram_id, ngram_name, frequency) in enumerate(ngrams):
for ngram_id2, ngram_name2, frequency2 in ngrams[idx+1:]:
if ngram_name2 in ngram_name:
frequency += frequency2
drop_ngram_ids.add(ngram_id2)
submerged_ngrams.append([ngram_id, ngram_name, frequency])
submerged_ngrams = list(filter(lambda ng: ng[0] not in drop_ngram_ids, submerged_ngrams))
return {fos_id: frequency for fos_id, _, frequency in submerged_ngrams}
def tag_sdg(fos):
use_frequency = True
n_min_relevant_fos = 1
sdgs = []
fos_ids = fos.keys()
for sdg, sdg_fos_ids in mapping:
relevant_fos_ids = sdg_fos_ids.intersection(fos_ids)
if relevant_fos_ids and len(relevant_fos_ids) >= n_min_relevant_fos:
if use_frequency:
relevance = 0
for fos_id in relevant_fos_ids:
relevance += fos.get(fos_id)
else:
relevance = len(relevant_fos_ids)
sdgs.append({'sdg': sdg,
'relevance': float(relevance),
'fos_names': list(map(lambda fos_id: fosmap[fos_id], relevant_fos_ids))})
return sorted(sdgs, key=lambda x: x['relevance'], reverse=True)
def pdf_to_text(location, start, end):
pdf_file = open(location, 'rb')
pdf_reader = PyPDF2.PdfFileReader(pdf_file)
text = ''
for i in range(start-1, end):
page = pdf_reader.getPage(i)
text += page.extractText()
return text
app = Flask('SDG Classifier')
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
@app.route('/classify', methods=['POST'])
def classifyText():
if request.method=='POST':
posted_data = request.get_json()
fos = extract_fos(posted_data['text'])
sdgs = tag_sdg(fos)
return jsonify(sdgs)
@app.route('/classifyPDF', methods=['POST'])
def classifyPDF():
if request.method=='POST':
form = request.form.to_dict()
file_name = str(uuid.uuid4()) + '.pdf'
file_ = request.files['file']
file_.save(file_name)
text = pdf_to_text(file_name, int(form['start']), int(form['end']))
fos = extract_fos(text)
sdgs = tag_sdg(fos)
os.remove(file_name)
return jsonify(sdgs)
if __name__=='__main__':
app.run(debug=True)
|
StarcoderdataPython
|
4818350
|
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
PIDFILE = '/tmp/vde_network_checker'
IFACES = ['tap11', 'tap12']
def pytest_addoption(parser):
parser.addoption("--vde", action='store_true', default=False,
help="Use vde switch for network verification.")
def pytest_configure(config):
if config.getoption('vde'):
base = 'vde_switch -p {pidfile} -d'.format(pidfile=PIDFILE)
command = [base]
taps = ['-tap {tap}'.format(tap=tap) for tap in IFACES]
full_command = command + taps
os.system(' '.join(full_command))
for tap in IFACES:
os.system('ifconfig {tap} up'.format(tap=tap))
os.environ['NET_CHECK_IFACE_1'] = IFACES[0]
os.environ['NET_CHECK_IFACE_2'] = IFACES[1]
def pytest_unconfigure(config):
if os.path.exists(PIDFILE):
with open(PIDFILE) as f:
pid = f.read().strip()
os.kill(int(pid), 15)
|
StarcoderdataPython
|
5137220
|
<filename>pyeccodes/defs/grib2/tables/5/5_7_table.py
def load(h):
return ({'abbr': 1, 'code': 1, 'title': 'IEEE 32-bit', 'units': 'I=4 in Section 7'},
{'abbr': 2, 'code': 2, 'title': 'IEEE 64-bit', 'units': 'I=8 in Section 7'},
{'abbr': 3, 'code': 3, 'title': 'IEEE 128-bit', 'units': 'I=16 in Section 7'},
{'abbr': None, 'code': 255, 'title': 'Missing'})
|
StarcoderdataPython
|
9612489
|
<gh_stars>0
'Helpers for using Django from threads'
from django.db import close_old_connections
from threading import Thread as BaseThread
from concurrent.futures import ThreadPoolExecutor as BaseThreadPoolExecutor
__VERSION__ = '0.0.1'
class Thread(BaseThread):
def start(self):
close_old_connections()
super().start()
# TODO: would be nice if there was a place to hook in after run exits that
# doesn't require overriding a _ method.
def _bootstrap_inner(self):
super()._bootstrap_inner()
close_old_connections()
class ThreadPoolExecutor(BaseThreadPoolExecutor):
def submit(self, fn, *args, **kwargs):
def wrap(*wargs, **wkwargs):
close_old_connections()
try:
return fn(*wargs, **wkwargs)
finally:
close_old_connections()
return super().submit(wrap, *args, **kwargs)
|
StarcoderdataPython
|
3206306
|
<reponame>gshuflin/pants
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
from dataclasses import dataclass
from typing import Mapping, Optional, Tuple
from pants.base.build_environment import get_buildroot
from pants.base.exception_sink import ExceptionSink
from pants.base.exiter import PANTS_FAILED_EXIT_CODE, PANTS_SUCCEEDED_EXIT_CODE, ExitCode
from pants.base.specs import Specs
from pants.base.specs_parser import SpecsParser
from pants.base.workunit import WorkUnit
from pants.build_graph.build_configuration import BuildConfiguration
from pants.engine.internals.native import Native
from pants.engine.internals.scheduler import ExecutionError
from pants.engine.unions import UnionMembership
from pants.goal.run_tracker import RunTracker
from pants.help.help_info_extracter import HelpInfoExtracter
from pants.help.help_printer import HelpPrinter
from pants.init.engine_initializer import EngineInitializer, GraphScheduler, GraphSession
from pants.init.options_initializer import BuildConfigInitializer, OptionsInitializer
from pants.init.specs_calculator import calculate_specs
from pants.option.options import Options
from pants.option.options_bootstrapper import OptionsBootstrapper
from pants.option.subsystem import Subsystem
from pants.reporting.streaming_workunit_handler import StreamingWorkunitHandler
from pants.util.contextutil import maybe_profiled
logger = logging.getLogger(__name__)
@dataclass
class LocalPantsRunner:
"""Handles a single pants invocation running in the process-local context.
build_root: The build root for this run.
options: The parsed options for this run.
options_bootstrapper: The OptionsBootstrapper instance to use.
build_config: The parsed build configuration for this run.
specs: The specs for this run, i.e. either the address or filesystem specs.
graph_session: A LegacyGraphSession instance for graph reuse.
profile_path: The profile path - if any (from from the `PANTS_PROFILE` env var).
"""
build_root: str
options: Options
options_bootstrapper: OptionsBootstrapper
build_config: BuildConfiguration
specs: Specs
graph_session: GraphSession
union_membership: UnionMembership
profile_path: Optional[str]
_run_tracker: RunTracker
@staticmethod
def parse_options(
options_bootstrapper: OptionsBootstrapper,
) -> Tuple[Options, BuildConfiguration]:
build_config = BuildConfigInitializer.get(options_bootstrapper)
options = OptionsInitializer.create(options_bootstrapper, build_config)
return options, build_config
@staticmethod
def _init_graph_session(
options_bootstrapper: OptionsBootstrapper,
build_config: BuildConfiguration,
options: Options,
scheduler: Optional[GraphScheduler] = None,
) -> GraphSession:
native = Native()
native.set_panic_handler()
graph_scheduler_helper = scheduler or EngineInitializer.setup_graph(
options_bootstrapper, build_config
)
global_scope = options.for_global_scope()
dynamic_ui = global_scope.dynamic_ui if global_scope.v2 else False
use_colors = global_scope.get("colors", True)
stream_workunits = len(options.for_global_scope().streaming_workunits_handlers) != 0
return graph_scheduler_helper.new_session(
RunTracker.global_instance().run_id,
dynamic_ui=dynamic_ui,
use_colors=use_colors,
should_report_workunits=stream_workunits,
)
@classmethod
def create(
cls,
env: Mapping[str, str],
options_bootstrapper: OptionsBootstrapper,
scheduler: Optional[GraphScheduler] = None,
) -> "LocalPantsRunner":
"""Creates a new LocalPantsRunner instance by parsing options.
By the time this method runs, logging will already have been initialized in either
PantsRunner or DaemonPantsRunner.
:param env: The environment (e.g. os.environ) for this run.
:param options_bootstrapper: The OptionsBootstrapper instance to reuse.
:param scheduler: If being called from the daemon, a warmed scheduler to use.
"""
build_root = get_buildroot()
global_bootstrap_options = options_bootstrapper.bootstrap_options.for_global_scope()
options, build_config = LocalPantsRunner.parse_options(options_bootstrapper)
# Option values are usually computed lazily on demand,
# but command line options are eagerly computed for validation.
for scope in options.scope_to_flags.keys():
options.for_scope(scope)
# Verify configs.
if global_bootstrap_options.verify_config:
options.verify_configs(options_bootstrapper.config)
union_membership = UnionMembership.from_rules(build_config.union_rules)
# If we're running with the daemon, we'll be handed a warmed Scheduler, which we use
# to initialize a session here.
graph_session = cls._init_graph_session(
options_bootstrapper, build_config, options, scheduler
)
specs = calculate_specs(
options_bootstrapper=options_bootstrapper,
options=options,
build_root=build_root,
session=graph_session.scheduler_session,
)
profile_path = env.get("PANTS_PROFILE")
return cls(
build_root=build_root,
options=options,
options_bootstrapper=options_bootstrapper,
build_config=build_config,
specs=specs,
graph_session=graph_session,
union_membership=union_membership,
profile_path=profile_path,
_run_tracker=RunTracker.global_instance(),
)
def _set_start_time(self, start_time: float) -> None:
# Propagates parent_build_id to pants runs that may be called from this pants run.
os.environ["PANTS_PARENT_BUILD_ID"] = self._run_tracker.run_id
self._run_tracker.start(self.options, run_start_time=start_time)
spec_parser = SpecsParser(get_buildroot())
specs = [str(spec_parser.parse_spec(spec)) for spec in self.options.specs]
# Note: This will not include values from `--changed-*` flags.
self._run_tracker.run_info.add_info("specs_from_command_line", specs, stringify=False)
def _run_v2(self) -> ExitCode:
goals = self.options.goals
self._run_tracker.set_v2_goal_rule_names(tuple(goals))
if not goals:
return PANTS_SUCCEEDED_EXIT_CODE
global_options = self.options.for_global_scope()
if not global_options.get("loop", False):
return self._maybe_run_v2_body(goals, poll=False)
iterations = global_options.loop_max
exit_code = PANTS_SUCCEEDED_EXIT_CODE
while iterations:
# NB: We generate a new "run id" per iteration of the loop in order to allow us to
# observe fresh values for Goals. See notes in `scheduler.rs`.
self.graph_session.scheduler_session.new_run_id()
try:
exit_code = self._maybe_run_v2_body(goals, poll=True)
except ExecutionError as e:
logger.warning(e)
iterations -= 1
return exit_code
def _maybe_run_v2_body(self, goals, poll: bool) -> ExitCode:
return self.graph_session.run_goal_rules(
options_bootstrapper=self.options_bootstrapper,
union_membership=self.union_membership,
goals=goals,
specs=self.specs,
poll=poll,
poll_delay=(0.1 if poll else None),
)
@staticmethod
def _merge_exit_codes(code: ExitCode, *codes: ExitCode) -> ExitCode:
"""Returns the exit code with higher abs value in case of negative values."""
max_code = code
for code in codes:
if abs(max_code) < abs(code):
max_code = code
return max_code
def _finish_run(self, code: ExitCode) -> ExitCode:
"""Checks that the RunTracker is in good shape to exit, and then returns its exit code.
TODO: The RunTracker's exit code will likely not be relevant in v2: the exit codes of
individual `@goal_rule`s are everything in that case.
"""
run_tracker_result = PANTS_SUCCEEDED_EXIT_CODE
scheduler_session = self.graph_session.scheduler_session
try:
metrics = scheduler_session.metrics()
self._run_tracker.pantsd_stats.set_scheduler_metrics(metrics)
outcome = WorkUnit.SUCCESS if code == PANTS_SUCCEEDED_EXIT_CODE else WorkUnit.FAILURE
self._run_tracker.set_root_outcome(outcome)
run_tracker_result = self._run_tracker.end()
except ValueError as e:
# If we have been interrupted by a signal, calling .end() sometimes writes to a closed
# file, so we just log that fact here and keep going.
ExceptionSink.log_exception(exc=e)
return run_tracker_result
def run(self, start_time: float) -> ExitCode:
self._set_start_time(start_time)
with maybe_profiled(self.profile_path):
global_options = self.options.for_global_scope()
streaming_handlers = global_options.streaming_workunits_handlers
report_interval = global_options.streaming_workunits_report_interval
callbacks = Subsystem.get_streaming_workunit_callbacks(streaming_handlers)
streaming_reporter = StreamingWorkunitHandler(
self.graph_session.scheduler_session,
callbacks=callbacks,
report_interval_seconds=report_interval,
)
if self.options.help_request:
all_help_info = HelpInfoExtracter.get_all_help_info(
self.options,
self.union_membership,
self.graph_session.goal_consumed_subsystem_scopes,
)
help_printer = HelpPrinter(
bin_name=global_options.pants_bin_name,
help_request=self.options.help_request,
all_help_info=all_help_info,
use_color=global_options.colors,
)
return help_printer.print_help()
with streaming_reporter.session():
engine_result = PANTS_FAILED_EXIT_CODE
try:
engine_result = self._run_v2()
except Exception as e:
ExceptionSink.log_exception(e)
run_tracker_result = self._finish_run(engine_result)
return self._merge_exit_codes(engine_result, run_tracker_result)
|
StarcoderdataPython
|
71671
|
<gh_stars>0
"""Defines Hello message."""
# System imports
from enum import IntEnum
from pyof.foundation.base import GenericMessage, GenericStruct
from pyof.foundation.basic_types import BinaryData, FixedTypeList, UBInt16, UBInt32, TypeList
from pyof.foundation.exceptions import PackException
from pyof.v0x05.common.header import Header, Type
# Third-party imports
__all__ = ('Hello', 'HelloElemHeader', 'HelloElemType', 'ListOfHelloElements', 'HelloElemVersionBitmap')
# Enums
class HelloElemType(IntEnum):
"""Hello element types."""
#: Bitmap of version supported.
OFPHET_VERSIONBITMAP = 1
# Classes
class HelloElemHeader(GenericStruct):
"""Common header for all Hello Elements."""
# One of OFPHET_*.
type = UBInt16()
#Length in bytes of element, including this header, excluding padding.
length = UBInt16()
# This variable does NOT appear in 1.4 specification
#content = BinaryData()
def __init__(self, element_type=None, length=None, content=b''):
"""Create a HelloElemHeader with the optional parameters below.
Args:
element_type: One of OFPHET_*.
length: Length in bytes of the element, including this header,
excluding padding.
"""
super().__init__()
self.type = element_type
self.length = length
#self.content = content
def pack(self, value=None):
"""Update the length and pack the message into binary data.
Returns:
bytes: A binary data that represents the Message.
Raises:
Exception: If there are validation errors.
"""
if value is None:
self.update_length()
return super().pack()
elif isinstance(value, type(self)):
return value.pack()
else:
msg = "{} is not an instance of {}".format(value,
type(self).__name__)
raise PackException(msg)
def update_length(self):
"""Update length attribute."""
self.length = self.get_size()
def unpack(self, buff=None, offset=0):
"""Unpack *buff* into this object.
This method will convert a binary data into a readable value according
to the attribute format.
Args:
buff (bytes): Binary buffer.
offset (int): Where to begin unpacking.
Raises:
:exc:`~.exceptions.UnpackException`: If unpack fails.
"""
# length = UBInt16()
self.length.unpack(buff, offset=offset+2)
super().unpack(buff[:offset+self.length.value], offset)
class ListOfHelloElements(FixedTypeList):
"""List of Hello elements.
Represented by instances of HelloElemHeader and used on Hello
objects.
"""
def __init__(self, items=None):
"""Create a ListOfHelloElements with the optional parameters below.
Args:
items (HelloElemHeader): Instance or a list of instances.
"""
super().__init__(pyof_class=HelloElemHeader, items=items)
# if (items != None and isinstance(HelloElemHeader,items)):
# super().append(items)
class Hello(GenericMessage):
"""OpenFlow Hello Message OFPT_HELLO.
This message includes zero or more hello elements having variable size.
Unknown element types must be ignored/skipped, to allow for future
extensions.
"""
header = Header(Type.OFPT_HELLO)
#: Hello element list
#: List of elements - 0 or more
elements = ListOfHelloElements()
def __init__(self, xid=None, elements=None):
"""Create a Hello with the optional parameters below.
Args:
xid (int): xid to be used on the message header.
elements: List of elements - 0 or more
"""
super().__init__(xid)
self.elements = elements
class HelloElemVersionBitmap(HelloElemHeader):
""" Version bitmap Hello Element
There is not need to enter the Version bitmap because is entered automatically as the type of message
Followed by:
- Exactly (length - 4) bytes containing the bitmaps, then
- Exactly (length + 7) / 8 * 8 - (length) (between 0 and 7)
bytes of all-zero bytes.
"""
# List of bitmaps - supported versions
bitmaps = BinaryData()
# Under Review
def __init__(self, length=None, bitmaps=None):
"""This function will initialize the object with a specific length and different versions """
super().__init__(HelloElemType.OFPHET_VERSIONBITMAP, length)
self.bitmaps = bitmaps
|
StarcoderdataPython
|
1728543
|
import hangpy
import redis
server_configuration = hangpy.ServerConfigurationDto(cycle_interval_milliseconds=10000, slots=10)
redis_client = redis.StrictRedis(host='172.17.0.1', port=6379, password=None)
job_repository = hangpy.RedisJobRepository(redis_client)
server_repository = hangpy.RedisServerRepository(redis_client)
log_service = hangpy.PrintLogService()
server_service = hangpy.ServerService(server_configuration, server_repository, job_repository, log_service)
server_service.start()
__exit = ''
while(__exit.lower() != 'exit'):
__exit = input('\nTo exit the server, type \'exit\'.\n')
server_service.stop()
server_service.join()
|
StarcoderdataPython
|
1615262
|
<reponame>outrofelipe/Python-para-zumbis<gh_stars>1-10
import urllib.request
pagina = urllib.request.urlopen(
'http://beans.itcarlow.ie/prices-loyalty.html')
texto = pagina.read().decode('utf-8')
onde = texto.find('>$')
inicio = onde + 2
fim = onde + 4
preco = texto[inicio:fim]
print(preco)
|
StarcoderdataPython
|
1928022
|
<reponame>JNKielmann/Master-Thesis
"""
Expert list create from online survey data.
List contains Microsoft Academic Graph Ids
"""
survey_expert_list = {
"humanoid robot": [
1047662447,
1305265972,
2570474579,
],
"natural language processing": [
2061074560,
2074083528,
280384801,
2578689274,
2170753228,
2794237920,
],
"internet of things": [
673846798,
102396157,
2130664820,
],
"evolutionary algorithm": [
2056122262,
2132999805,
2036573014,
]
}
|
StarcoderdataPython
|
6682820
|
import torch
from torch import nn, optim
from torchvision import models
import numpy as np
from collections import OrderedDict
def create_model(arch, hidden_units , prob_dropout):
# Create model
model = eval("models." + arch + "(pretrained=True)")
model.epochs = 0
# To prevent backprop through parameters freeze parameters
for param in model.parameters():
param.requires_grad = False
# Determine input units of the selected model
if 'densenet' in arch:
input_units = model.classifier.in_features
elif arch == 'alexnet':
input_units = model.classifier[1].in_features
elif 'vgg' in arch:
input_units = model.classifier[0].in_features
elif 'resnet' in arch:
input_units = model.fc.in_features
else:
raise NameError('The model ' + arch + ' you chose is currently not available in this application. Please check whether you typed your model correctly')
classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(input_units, hidden_units)),
('relu1', nn.ReLU()),
('dropout1', nn.Dropout(prob_dropout)),
('fc2', nn.Linear(hidden_units,102)),
('output', nn.LogSoftmax(dim=1))
]))
model.classifier = classifier
# Save additional model parameters
model.name = arch
model.input_units = input_units
model.hidden_units = hidden_units
model.prob_dropout = prob_dropout
return model
def validation(model, loader, criterion, device):
with torch.no_grad():
accuracy = 0
loss = 0
for inputs, labels in loader:
inputs, labels = inputs.to(device), labels.to(device)
output = model.forward(inputs)
loss += criterion(output, labels).item()
ps = torch.exp(output)
equality = (labels.data == ps.max(dim=1)[1])
accuracy += equality.type(torch.FloatTensor).mean()
loss = loss/len(loader)
accuracy = accuracy/len(loader)
return loss, accuracy
def train_model(model, trainloader, learning_rate, epochs, gpu, print_every = 40, validloader=None):
steps = 0
criterion = nn.NLLLoss()
optimizer = optim.Adam(model.classifier.parameters(), lr=learning_rate)
model.train()
# Train model on gpu if available
#device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
if gpu:
device = "cuda:0"
else:
device = "cpu"
model.to(device)
for e in range(epochs):
running_loss = 0
for inputs, labels in trainloader:
steps += 1
inputs, labels = inputs.to(device), labels.to(device)
optimizer.zero_grad()
output = model.forward(inputs)
loss = criterion(output, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
if steps % print_every == 0:
#print("calculate accuracy")
model.eval()
# Calculate loss and accuracy of validation set
if validloader:
loss_val, accuracy_val = validation(model, validloader, criterion, device)
print("Epoch: {}/{}\t".format(e+1,epochs),
("Step: {}\t".format(steps)),
("Loss (test): {:.4f}\t".format(running_loss/print_every)),
("Loss (val): {:.4f}\t".format(loss_val)),
("Accuracy (val): {:.4f}\n".format(accuracy_val)))
else:
print("Epoch: {}/{}\t".format(e+1,epochs),
("Loss: {:.4f}".format(running_loss/print_every)))
running_loss = 0
model.train()
model.epochs += 1
return model, optimizer
def save_model(model, optimizer):
checkpoint = {'state_dict': model.state_dict(),
'class_to_idx': model.class_to_idx,
'name': model.name,
'input_units': model.input_units,
'hidden_units': model.hidden_units,
'prob_dropout': model.prob_dropout,
'n_epochs': model.epochs,
'optimizer' : optimizer.state_dict}
torch.save(checkpoint, 'checkpoint.pth')
print("model saved to checkpoint.pth")
def load_model(checkpoint, gpu):
if gpu:
device = "cuda:0"
else:
device = "cpu"
if device == "cuda:0":
checkpoint = torch.load(checkpoint, map_location={'cpu': 'cuda:0'})
else:
checkpoint = torch.load(checkpoint, map_location={'cuda:0': 'cpu'})
# Create the identical model
model = eval("models." + checkpoint['name'] + "(pretrained=True)")
#model = models.densenet121(pretrained=True)
for param in model.parameters():
param.requires_grad = False
# Create the classifier
classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(checkpoint['input_units'], checkpoint['hidden_units'])),
('relu1', nn.ReLU()),
('dropout1', nn.Dropout(checkpoint['prob_dropout'])),
('fc2', nn.Linear(checkpoint['hidden_units'],102)),
('output', nn.LogSoftmax(dim=1))
]))
if hasattr(model, 'classifier'):
model.classifier = classifier
elif hasattr(model, 'fc'):
model.fc = classifier
model.load_state_dict(checkpoint['state_dict'])
model.epochs = checkpoint['n_epochs']
model.class_to_idx = checkpoint['class_to_idx']
return model
def predict(image, model, top_k, gpu, cat_to_name=None):
''' Predict the class (or classes) of an image using a trained deep learning model.
'''
if gpu:
device = "cuda:0"
else:
device = "cpu"
# Add additional "image index dimension"
image = np.expand_dims(image,0)
image_tensor = torch.FloatTensor(image)
model.eval()
with torch.no_grad():
model.to(device)
image_tensor = image_tensor.to(device)
output = model.forward(image_tensor)
ps = torch.exp(output)
probs, probs_index = ps.topk(top_k)
probs, probs_index = probs.tolist()[0], probs_index.tolist()[0]
class_to_idx = model.class_to_idx
idx_to_class = {y:x for x,y in class_to_idx.items()}
classes = [idx_to_class[x] for x in probs_index]
# Use a mapping of categories to real names
if cat_to_name:
classes = [cat_to_name[str(x)] for x in classes]
model.train()
return probs, classes
|
StarcoderdataPython
|
3454309
|
import pytest
import requests
import hashlib
import tarfile
import json
import os
import pyhf
import numpy as np
@pytest.fixture(scope='module')
def sbottom_likelihoods_download():
"""Download the sbottom likelihoods tarball from HEPData"""
sbottom_HEPData_URL = "https://doi.org/10.17182/hepdata.89408.v1/r2"
targz_filename = "sbottom_workspaces.tar.gz"
response = requests.get(sbottom_HEPData_URL, stream=True)
assert response.status_code == 200
with open(targz_filename, "wb") as file:
file.write(response.content)
assert (
hashlib.sha256(open(targz_filename, "rb").read()).hexdigest()
== "9089b0e5fabba335bea4c94545ccca8ddd21289feeab2f85e5bcc8bada37be70"
)
# Open as a tarfile
yield tarfile.open(targz_filename, "r:gz")
os.remove(targz_filename)
# Factory as fixture pattern
@pytest.fixture
def get_json_from_tarfile():
def _get_json_from_tarfile(tarfile, json_name):
json_file = (
tarfile.extractfile(tarfile.getmember(json_name)).read().decode("utf8")
)
return json.loads(json_file)
return _get_json_from_tarfile
def calculate_CLs(bkgonly_json, signal_patch_json):
"""
Calculate the observed CLs and the expected CLs band from a background only
and signal patch.
Args:
bkgonly_json: The JSON for the background only model
signal_patch_json: The JSON Patch for the signal model
Returns:
CLs_obs: The observed CLs value
CLs_exp: List of the expected CLs value band
"""
workspace = pyhf.workspace.Workspace(bkgonly_json)
model = workspace.model(
measurement_name=None,
patches=[signal_patch_json],
modifier_settings={
'normsys': {'interpcode': 'code4'},
'histosys': {'interpcode': 'code4p'},
},
)
result = pyhf.infer.hypotest(
1.0, workspace.data(model), model, qtilde=True, return_expected_set=True
)
return result[0].tolist()[0], result[-1].ravel().tolist()
def test_sbottom_regionA_1300_205_60(
sbottom_likelihoods_download, get_json_from_tarfile
):
sbottom_regionA_bkgonly_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionA/BkgOnly.json"
)
sbottom_regionA_1300_205_60_patch_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionA/patch.sbottom_1300_205_60.json"
)
CLs_obs, CLs_exp = calculate_CLs(
sbottom_regionA_bkgonly_json, sbottom_regionA_1300_205_60_patch_json
)
assert CLs_obs == pytest.approx(0.24443627759085326, rel=1e-5)
assert np.all(
np.isclose(
np.array(CLs_exp),
np.array(
[
0.09022509053507759,
0.1937839194960632,
0.38432344933992,
0.6557757334303531,
0.8910420971601081,
]
),
rtol=1e-5,
)
)
def test_sbottom_regionA_1400_950_60(
sbottom_likelihoods_download, get_json_from_tarfile
):
sbottom_regionA_bkgonly_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionA/BkgOnly.json"
)
sbottom_regionA_1400_950_60_patch_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionA/patch.sbottom_1400_950_60.json"
)
CLs_obs, CLs_exp = calculate_CLs(
sbottom_regionA_bkgonly_json, sbottom_regionA_1400_950_60_patch_json
)
assert CLs_obs == pytest.approx(0.021373283911064852, rel=1e-5)
assert np.all(
np.isclose(
np.array(CLs_exp),
np.array(
[
0.002644707461012826,
0.013976754489151644,
0.06497313811425813,
0.23644505123524753,
0.5744843501873754,
]
),
rtol=1e-5,
)
)
def test_sbottom_regionA_1500_850_60(
sbottom_likelihoods_download, get_json_from_tarfile
):
sbottom_regionA_bkgonly_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionA/BkgOnly.json"
)
sbottom_regionA_1500_850_60_patch_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionA/patch.sbottom_1500_850_60.json"
)
CLs_obs, CLs_exp = calculate_CLs(
sbottom_regionA_bkgonly_json, sbottom_regionA_1500_850_60_patch_json
)
assert CLs_obs == pytest.approx(0.04536774062150508, rel=1e-5)
assert np.all(
np.isclose(
np.array(CLs_exp),
np.array(
[
0.0059847029077065295,
0.026103516126601122,
0.10093985752614597,
0.3101988586187604,
0.6553686728646031,
]
),
rtol=1e-5,
)
)
def test_sbottom_regionB_1400_550_60(
sbottom_likelihoods_download, get_json_from_tarfile
):
sbottom_regionB_bkgonly_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionB/BkgOnly.json"
)
sbottom_regionB_1400_550_60_patch_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionB/patch.sbottom_1400_550_60.json"
)
CLs_obs, CLs_exp = calculate_CLs(
sbottom_regionB_bkgonly_json, sbottom_regionB_1400_550_60_patch_json
)
assert CLs_obs == pytest.approx(0.9744675266677597, rel=1e-5)
assert np.all(
np.isclose(
np.array(CLs_exp),
np.array(
[
0.9338879894557114,
0.9569045303300702,
0.9771296335437559,
0.9916370124133669,
0.9983701133999316,
]
),
rtol=1e-5,
)
)
def test_sbottom_regionC_1600_850_60(
sbottom_likelihoods_download, get_json_from_tarfile
):
sbottom_regionC_bkgonly_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionC/BkgOnly.json"
)
sbottom_regionC_1600_850_60_patch_json = get_json_from_tarfile(
sbottom_likelihoods_download, "RegionC/patch.sbottom_1600_850_60.json"
)
CLs_obs, CLs_exp = calculate_CLs(
sbottom_regionC_bkgonly_json, sbottom_regionC_1600_850_60_patch_json
)
assert CLs_obs == pytest.approx(0.711023707425625, rel=1e-5)
assert np.all(
np.isclose(
np.array(CLs_exp),
np.array(
[
0.2955492909588046,
0.4446885457298284,
0.6371473864200973,
0.8336149623750603,
0.9585901381554178,
]
),
rtol=1e-5,
)
)
|
StarcoderdataPython
|
11209889
|
from bs4 import BeautifulSoup
from markdown import markdown
class Markdown():
def markdown_to_plaintext(markdown_text):
html = markdown(markdown_text)
text = ''.join(BeautifulSoup(html, 'html.parser').findAll(text=True))
return text
def tree_to_plaintext(marked_tree):
text_list = []
for item in marked_tree:
if 'text' in item:
text_list.append(item['text'])
return '\n'.join(text_list)
|
StarcoderdataPython
|
1753353
|
#!/usr/bin/env python3
### MORTGAGE CALCULATOR ###
# The program calculates the monthly payments of a fixed term mortgage over given Nth terms at a given interest rate. Also figure out how long it will take the user to pay back the loan.
# Firstly, we define "months" , "interest rate" and "loan".
months = int(input("Please enter mortgage term as month: "))
interest_rate = float(input("Please enter interest rate: "))
loan = float(input("Please enter loan value: "))
# Now, we create the program that calculates the monthly payment of a fixed term mortgage.
monthly_interest_rate = interest_rate / 100 / 12
payment = (monthly_interest_rate / (1 - (1 + monthly_interest_rate) ** (-months))) * loan
print("Monthly payment for a $%.2f %s month mortgage at %.2f interest rate is: $%.2f" % (loan, months, interest_rate, payment))
|
StarcoderdataPython
|
6630783
|
<reponame>sireliah/polish-python<filename>Lib/lib2to3/fixes/fix_tuple_params.py
"""Fixer dla function definitions przy tuple parameters.
def func(((a, b), c), d):
...
->
def func(x, d):
((a, b), c) = x
...
It will also support lambdas:
lambda (x, y): x + y -> lambda t: t[0] + t[1]
# The parens are a syntax error w Python 3
lambda (x): x + y -> lambda x: x + y
"""
# Author: <NAME>
# Local imports
z .. zaimportuj pytree
z ..pgen2 zaimportuj token
z .. zaimportuj fixer_base
z ..fixer_util zaimportuj Assign, Name, Newline, Number, Subscript, syms
def is_docstring(stmt):
zwróć isinstance(stmt, pytree.Node) oraz \
stmt.children[0].type == token.STRING
klasa FixTupleParams(fixer_base.BaseFix):
run_order = 4 #use a lower order since lambda jest part of other
#patterns
BM_compatible = Prawda
PATTERN = """
funcdef< 'def' any parameters< '(' args=any ')' >
['->' any] ':' suite=any+ >
|
lambda=
lambdef< 'lambda' args=vfpdef< '(' inner=any ')' >
':' body=any
>
"""
def transform(self, node, results):
jeżeli "lambda" w results:
zwróć self.transform_lambda(node, results)
new_lines = []
suite = results["suite"]
args = results["args"]
# This crap jest so "def foo(...): x = 5; y = 7" jest handled correctly.
# TODO(cwinter): suite-cleanup
jeżeli suite[0].children[1].type == token.INDENT:
start = 2
indent = suite[0].children[1].value
end = Newline()
inaczej:
start = 0
indent = "; "
end = pytree.Leaf(token.INDENT, "")
# We need access to self dla new_name(), oraz making this a method
# doesn't feel right. Closing over self oraz new_lines makes the
# code below cleaner.
def handle_tuple(tuple_arg, add_prefix=Nieprawda):
n = Name(self.new_name())
arg = tuple_arg.clone()
arg.prefix = ""
stmt = Assign(arg, n.clone())
jeżeli add_prefix:
n.prefix = " "
tuple_arg.replace(n)
new_lines.append(pytree.Node(syms.simple_stmt,
[stmt, end.clone()]))
jeżeli args.type == syms.tfpdef:
handle_tuple(args)
albo_inaczej args.type == syms.typedargslist:
dla i, arg w enumerate(args.children):
jeżeli arg.type == syms.tfpdef:
# Without add_prefix, the emitted code jest correct,
# just ugly.
handle_tuple(arg, add_prefix=(i > 0))
jeżeli nie new_lines:
zwróć
# This isn't strictly necessary, but it plays nicely przy other fixers.
# TODO(cwinter) get rid of this when children becomes a smart list
dla line w new_lines:
line.parent = suite[0]
# TODO(cwinter) suite-cleanup
after = start
jeżeli start == 0:
new_lines[0].prefix = " "
albo_inaczej is_docstring(suite[0].children[start]):
new_lines[0].prefix = indent
after = start + 1
dla line w new_lines:
line.parent = suite[0]
suite[0].children[after:after] = new_lines
dla i w range(after+1, after+len(new_lines)+1):
suite[0].children[i].prefix = indent
suite[0].changed()
def transform_lambda(self, node, results):
args = results["args"]
body = results["body"]
inner = simplify_args(results["inner"])
# Replace lambda ((((x)))): x przy lambda x: x
jeżeli inner.type == token.NAME:
inner = inner.clone()
inner.prefix = " "
args.replace(inner)
zwróć
params = find_params(args)
to_index = map_to_index(params)
tup_name = self.new_name(tuple_name(params))
new_param = Name(tup_name, prefix=" ")
args.replace(new_param.clone())
dla n w body.post_order():
jeżeli n.type == token.NAME oraz n.value w to_index:
subscripts = [c.clone() dla c w to_index[n.value]]
new = pytree.Node(syms.power,
[new_param.clone()] + subscripts)
new.prefix = n.prefix
n.replace(new)
### Helper functions dla transform_lambda()
def simplify_args(node):
jeżeli node.type w (syms.vfplist, token.NAME):
zwróć node
albo_inaczej node.type == syms.vfpdef:
# These look like vfpdef< '(' x ')' > where x jest NAME
# albo another vfpdef instance (leading to recursion).
dopóki node.type == syms.vfpdef:
node = node.children[1]
zwróć node
podnieś RuntimeError("Received unexpected node %s" % node)
def find_params(node):
jeżeli node.type == syms.vfpdef:
zwróć find_params(node.children[1])
albo_inaczej node.type == token.NAME:
zwróć node.value
zwróć [find_params(c) dla c w node.children jeżeli c.type != token.COMMA]
def map_to_index(param_list, prefix=[], d=Nic):
jeżeli d jest Nic:
d = {}
dla i, obj w enumerate(param_list):
trailer = [Subscript(Number(str(i)))]
jeżeli isinstance(obj, list):
map_to_index(obj, trailer, d=d)
inaczej:
d[obj] = prefix + trailer
zwróć d
def tuple_name(param_list):
l = []
dla obj w param_list:
jeżeli isinstance(obj, list):
l.append(tuple_name(obj))
inaczej:
l.append(obj)
zwróć "_".join(l)
|
StarcoderdataPython
|
1715344
|
# python3
from abc import ABC
from collections import namedtuple
from sys import setrecursionlimit, stdin
from threading import stack_size, Thread
from typing import AnyStr, IO, List
from unittest import TestCase
setrecursionlimit(10 ** 6)
stack_size(2 ** 27)
border = namedtuple('border', 'left right')
test = namedtuple('test', 'input expected')
class TreeChecker:
def __init__(self):
self.n = 0
self.key = self.left = self.right = None
def read(self, src: IO):
self.n = int(src.readline())
self.key = [0 for _ in range(self.n)]
self.left = [0 for _ in range(self.n)]
self.right = [0 for _ in range(self.n)]
for i in range(self.n):
[self.key[i], self.left[i], self.right[i]] = map(int, src.readline().split())
return self
def check(self, node: int = 0, bound: border = border(-2 ** 31 - 1, 2 ** 31)) -> bool:
if self.n < 2:
return True
root = self.key[node]
left = self.left[node]
if left != -1:
if self.key[left] >= root:
return False
if self.key[left] < bound.left:
return False
if not self.check(left, border(bound.left, root)):
return False
right = self.right[node]
if right != -1:
if self.key[right] < root:
return False
if self.key[right] >= bound.right:
return False
if not self.check(right, border(root, bound.right)):
return False
return True
class Fake(IO, ABC):
def __init__(self, rows: List[str]):
self.__i = -1
self.__rows = [str(len(rows))] + rows
def readline(self, limit: int = -1) -> AnyStr:
self.__i += 1
return self.__rows[self.__i]
class Test(TestCase):
def test_tree_checker(self):
tests = [
# samples
test([
'2 1 2',
'1 -1 -1',
'3 -1 -1',
], True),
test([
'1 1 2',
'2 -1 -1',
'3 -1 -1',
], False),
test([
'2 1 2',
'1 -1 -1',
'2 -1 -1',
], True),
test([
'2 1 2',
'2 -1 -1',
'3 -1 -1',
], False),
test([], True),
test(['2147483647 -1 -1'], True),
test([
'1 -1 1',
'2 -1 2',
'3 -1 3',
'4 -1 4',
'5 -1 -1',
], True),
test([
'4 1 2',
'2 3 4',
'6 5 6',
'1 -1 -1',
'3 -1 -1',
'5 -1 -1',
'7 -1 -1',
], True),
# additional
test([
'4 1 -1',
'2 -1 2',
'4 -1 -1',
], False),
]
for i, t in enumerate(tests):
src = Fake(t.input)
self.assertEqual(t.expected, TreeChecker().read(src).check(), msg='at {} position'.format(i))
def main():
print('CORRECT') if TreeChecker().read(stdin).check() else print('INCORRECT')
if __name__ == '__main__':
Thread(target=main).start()
|
StarcoderdataPython
|
1985595
|
<filename>daily_menu/crawler/management/commands/helpers/init_zomato_restaurants.py<gh_stars>0
from django.core.management import BaseCommand
from datetime import datetime
from crawler.models import RestaurantScraperConfig
from restaurants.models import Restaurant
class Command(BaseCommand):
help = 'Scrape recipes websites and save new recipes'
def handle(self, *args, **kwargs):
data = (('<NAME>', 'https://www.zomato.com/praha/tankovna-karlín-karlín-praha-8/menu', '18057566'),
('Globus restaurace', 'https://www.zomato.com/praha/globus-restaurace-karlín-praha-8/menu', '18257507'),
('U Zábranských', 'https://www.zomato.com/praha/u-zábranských-karlín-praha-8/menu', '16505936'),
('Peter\'s Burger Pub', 'https://www.zomato.com/praha/peters-burger-pub-karlín-praha-8/menu',
'16506740'),
('Motoburger', 'https://www.zomato.com/praha/motoburger-holešovice-praha-7/menu', '16506093'),
('Lokál', 'https://www.zomato.com/praha/lokál-staré-město-praha-1/menu', '16506246'),
('Naše maso', 'https://www.zomato.com/praha/naše-maso-staré-město-praha-1/menu', '16521490'),
('<NAME>', 'https://www.zomato.com/praha/marina-ristorante-staré-město-praha-1/menu',
'16516177'),
('<NAME>', 'https://www.zomato.com/praha/café-louvre-nové-město-praha-1/menu', '16505933'),
('Meat & Greet', 'https://www.zomato.com/praha/meat-greet-nové-město-praha-1/menu', '16522037'),
('Oblaca', 'https://www.zomato.com/oblaca/menu', '16506767'),
('Café Imperial', 'https://www.zomato.com/praha/café-imperial-nové-město-praha-1/menu', '16507072'),
('Mincovna', 'https://www.zomato.com/praha/mincovna-staré-město-praha-1/menu', '16507496'),
('Dish fine burger bistro',
'https://www.zomato.com/praha/dish-fine-burger-bistro-vinohrady-praha-2/menu', '16506649'),
('Pizza Nuova', 'https://www.zomato.com/praha/pizza-nuova-staré-město-praha-1/menu', '16506006'),
(
'Marthy\'s Kitchen', 'https://www.zomato.com/praha/marthys-kitchen-vinohrady-praha-2/menu',
'16512484'),
('Cacao', 'https://www.zomato.com/cacaoprague/menu', '16507326'),
('Tom\'s Burger restaurant',
'https://www.zomato.com/praha/toms-burger-restaurant-vinohrady-praha-2/menu', '16521732'),
('Nejen Bistro', 'https://www.zomato.com/praha/nejen-bistro-karlín-praha-8/menu', '17887543'),
('Coa', 'https://www.zomato.com/praha/coa-staré-město-praha-1/menu', '16506815'),
('Kredenc', 'https://www.zomato.com/praha/kredenc-žižkov-praha-3/menu', '16507522'),
('G<NAME>', 'https://www.zomato.com/praha/gao-den-stodůlky-praha-5/menu', '16507573'),
('Kravín', 'https://www.zomato.com/praha/kravín-vinohrady-praha-2/menu', '16506957'),
('U provaznice', 'https://www.zomato.com/praha/u-provaznice-staré-město-praha-1/menu', '16506342'),
('Šenkovna', 'https://www.zomato.com/praha/šenkovna-nové-město-praha-2/menu', '16505917'),
('Wine food market', 'https://www.zomato.com/praha/wine-food-market-smíchov-praha-5/menu', '16509899'),
('Del<NAME>', 'https://www.zomato.com/praha/deli-viet-nové-město-praha-1/menu', '16521499'),
('Prostor', 'https://www.zomato.com/praha/prostor-smíchov-praha-5/menu', '16507599'),
('Hybernia', 'https://www.zomato.com/praha/hybernia-nové-město-praha-1/menu', '16506886'),
('Odpočívadlo', 'https://www.zomato.com/praha/odpočívadlo-smíchov-praha-5/menu', '16505924'),
('T-Anker', 'https://www.zomato.com/praha/t-anker-staré-město-praha-1/menu', '16506904'),
('Lidová jídelna', 'https://www.zomato.com/praha/lidová-jídelna-nové-město-praha-1/menu', '16506215'),
('Nominanza', 'https://www.zomato.com/praha/nominanza-žižkov-praha-3/menu', '16513818'),
('T.G.I. Friday\'s', 'https://www.zomato.com/praha/t-g-i-fridays-1-smíchov-praha-5/menu', '16506064'),
('Husinec', 'https://www.zomato.com/praha/husinec-nové-město-praha-1/menu', '16521942'),
('<NAME>', 'https://www.zomato.com/praha/jiná-krajina-nové-město-praha-1/menu', '16506026'),
('Le Burger', 'https://www.zomato.com/leburger/menu', '16521794'),
('Lavande', 'https://www.zomato.com/lavande/menu', '16507605'),
('Lavička', 'https://www.zomato.com/praha/lavička-žižkov-praha-3/menu', '16506844'),
('Etnosvět', 'https://www.zomato.com/praha/etnosvět-nové-město-praha-2/menu', '17978808'),
('Pasta Fresca', 'https://www.zomato.com/praha/pasta-fresca-staré-město-praha-1/menu', '16506538'),
('Pivovar Lužiny', 'https://www.zomato.com/praha/pivovar-lužiny-stodůlky-praha-5/menu', '16507520'),
('Gate Restaurant', 'https://www.zomato.com/praha/gate-restaurant-karlín-praha-8/menu', '16506013'),
('Plevel', 'https://www.zomato.com/praha/plevel-vršovice-praha-10/menu', '16506839'),
('Lokál U Bílé Kuželky', 'https://www.zomato.com/praha/lokál-u-bílé-kuželky-malá-strana-praha-1/menu',
'16506245'),
('Lokál Hamburk', 'https://www.zomato.com/praha/lokál-hamburk-karlín-praha-8/menu', '16507478'),
('Pizzerie Václavka', 'https://www.zomato.com/praha/pizzerie-václavka-nové-město-praha-1/menu',
'16506657'),
('Hoffa', 'https://www.zomato.com/praha/hoffa-nové-město-praha-1/menu', '16506782'),
('EMA espresso bar', 'https://www.zomato.com/praha/ema-espresso-bar-nové-město-praha-1/menu',
'16509309'),
('Lehká hlava', 'https://www.zomato.com/praha/lehká-hlava-staré-město-praha-1/menu', '16507457'),
('R<NAME>', 'https://www.zomato.com/praha/radegastovna-perón-smíchov-praha-5/menu',
'16506659'),
('Pastař', 'https://www.zomato.com/pastar/menu', '16521550'),
('Fiesta', 'https://www.zomato.com/praha/fiesta-dejvice-praha-6/menu', '16505937'),
('Guston', 'https://www.zomato.com/praha/guston-žižkov-praha-3/menu', '16522019'),
('Styl & Interier', 'https://www.zomato.com/praha/styl-interier-nové-město-praha-1/menu', '16521792'),
('Café Lounge', 'https://www.zomato.com/praha/café-lounge-malá-strana-praha-5/menu', '16509748'),
('Pivovar Hostivar', 'https://www.zomato.com/praha/pivovar-hostivar-horní-měcholupy-praha-10/menu',
'16506778'),
('Charleston', 'https://www.zomato.com/praha/charleston-karlín-praha-8/menu', '16506909'),
('Česká hospůdka Na Radnici',
'https://www.zomato.com/praha/česká-hospůdka-na-radnici-vysočany-praha-9/menu', '16507416'),
('Novoměstský pivovar', 'https://www.zomato.com/praha/novoměstský-pivovar-nové-město-praha-1/menu',
'16505916'),
('Salaš U Staré cesty', 'https://www.zomato.com/praha/salaš-u-staré-cesty-střešovice-praha-6/menu',
'17837639'),
('Nota Bene', 'https://www.zomato.com/praha/nota-bene-vinohrady-praha-2/menu', '16507130'),
('Modrá Zahrada', 'https://www.zomato.com/praha/modrá-zahrada-staré-město-praha-1/menu', '16505960'),
('500 restaurant', 'https://www.zomato.com/praha/500-restaurant-hradčany-praha-6/menu', '16507125'),
('Na Kačabce - Putyka', 'https://www.zomato.com/praha/na-kačabce-putyka-hostivař-praha-10/menu',
'16520654'),
('Steakhouse Stará Škola', 'https://www.zomato.com/praha/steakhouse-stará-škola-zbraslav-praha-5/menu',
'16507260'),
('Crystal Bar & Restaurant', 'https://www.zomato.com/crystalprague/menu', '16506812'),
('Café Neustadt', 'https://www.zomato.com/praha/café-neustadt-nové-město-praha-2/menu', '16507113'),
('U Balouna', 'https://www.zomato.com/praha/u-balouna-nové-město-praha-1/menu', '16506176'),
('Sweet & Pepper Days', 'https://www.zomato.com/praha/sweet-pepper-days-vinohrady-praha-2/menu',
'16507029'),
('U Sudů', 'https://www.zomato.com/praha/u-sudů-strašnice-praha-10/menu', '16506980'),
('Kozlovna Lidická', 'https://www.zomato.com/praha/kozlovna-lidická-smíchov-praha-5/menu', '16508973'),
('Garage', 'https://www.zomato.com/praha/garage-karlín-praha-8/menu', '17852238'),
('Indian Jewel', 'https://www.zomato.com/praha/indian-jewel-staré-město-praha-1/menu', '16506888'),
('Maitrea', 'https://www.zomato.com/praha/maitrea-1-staré-město-praha-1/menu', '16517165'),
('Mamy', 'https://www.zomato.com/mamy/menu', '16506687'),
('Na Kopci', 'https://www.zomato.com/praha/na-kopci-smíchov-praha-5/menu', '16516755'),
('Restaurant Esprit - Holiday Inn Prague Congress Centre', 'https://www.zomato.com/ESPRIT/menu',
'16506139'),
('Indian by Nature', 'https://www.zomato.com/praha/indian-by-nature-karlín-praha-8/menu', '16506583'),
('Gourmet Pauza', 'https://www.zomato.com/praha/gourmet-pauza-smíchov-praha-5/menu', '17852225'),
('B<NAME>udu', 'https://www.zomato.com/praha/bistro-proti-proudu-karlín-praha-8/menu',
'16521753'),
('<NAME>', 'https://www.zomato.com/praha/krystal-bistro-karlín-praha-8/menu', '16506262'),
('Kofein', 'https://www.zomato.com/praha/kofein-vinohrady-praha-3/menu', '16521516'),
('<NAME>', 'https://www.zomato.com/praha/café-colore-1-nové-město-praha-1/menu', '16506173'),
('<NAME>', 'https://www.zomato.com/praha/café-nod-staré-město-praha-1/menu', '16507445'),
('Engawa sushi bar', 'https://www.zomato.com/praha/engawa-sushi-bar-nové-město-praha-1/menu',
'17978831'),
('Cafe Frida', 'https://www.zomato.com/praha/cafe-frida-karlín-praha-8/menu', '16507051'),
('mamacoffee', 'https://www.zomato.com/praha/mamacoffee-nové-město-praha-1/menu', '16506094'),
('Restaurace Alcron - Radisson Blu Alcron Hotel',
'https://www.zomato.com/praha/restaurace-alcron-radisson-blu-alcron-hotel-nové-město-praha-1/menu',
'16517185'),
('Levitate Restaurant', 'https://www.zomato.com/praha/levitate-restaurant-nové-město-praha-1/menu',
'18549913'),
('Svatá Klára', 'https://www.zomato.com/praha/svatá-klára-troja-praha-7/menu', '16516425'),
('Bellevue', 'https://www.zomato.com/praha/bellevue-staré-město-praha-1/menu', '16511730'),
('Kalina cuisine & vins', 'https://www.zomato.com/praha/kalina-cuisine-vins-staré-město-praha-1/menu',
'16518242'),
('Salabka', 'https://www.zomato.com/praha/salabka-troja-praha-7/menu', '16516450'),
('Artista - Le Palais', 'https://www.zomato.com/praha/artista-le-palais-vinohrady-praha-2/menu',
'16506187'),
('Zvonice', 'https://www.zomato.com/praha/zvonice-nové-město-praha-1/menu', '16507598'),
('Grand Cru', 'https://www.zomato.com/praha/grand-cru-nové-město-praha-1/menu', '16507519'),
('SaSaZu', 'https://www.zomato.com/praha/sasazu-holešovice-praha-7/menu', '16508860'),
('Divinis', 'https://www.zomato.com/praha/divinis-staré-město-praha-1/menu', '16506960'),
('Buddha Bar', 'https://www.zomato.com/praha/buddha-bar-staré-město-praha-1/menu', '16507413'),
('La Finestra in Cucina', 'https://www.zomato.com/praha/la-finestra-in-cucina-staré-město-praha-1/menu',
'16515923'),
(
'Francouzská restaurace Art Nouveau', 'https://www.zomato.com/francouzska-restaurace/menu',
'16508483'),
('Spices Restaurant and Bar - Mandarin Oriental Prague',
'https://www.zomato.com/praha/spices-restaurant-and-bar-mandarin-oriental-prague-malá-strana-praha-1/menu',
'16514160'),
('La Rotonde - Radisson Blu Alcron Hotel',
'https://www.zomato.com/praha/la-rotonde-radisson-blu-alcron-hotel-nové-město-praha-1/menu',
'16517239'),
('Gourmet restaurant - Hotel Hoffmeister',
'https://www.zomato.com/praha/gourmet-restaurant-hotelu-hoffmeister-hradčany-praha-1/menu',
'16506431'),
('Aromi', 'https://www.zomato.com/praha/aromi-1-vinohrady-praha-2/menu', '18156332'),
('Hergetova Cihelna', 'https://www.zomato.com/praha/hergetova-cihelna-malá-strana-praha-1/menu',
'16507441'),
('Augustine Restaurant', 'https://www.zomato.com/praha/augustine-restaurant-malá-strana-praha-1/menu',
'16510720'),
('Be Bop Bar - Radisson Blu Alcron Hotel',
'https://www.zomato.com/praha/be-bop-bar-radisson-blu-alcron-hotel-nové-město-praha-1/menu',
'16517089'),
('El Emir', 'https://www.zomato.com/praha/el-emir-nové-město-praha-1/menu', '16506427'),
('Portfolio restaurant', 'https://www.zomato.com/portfoliorestaurant/menu', '18311827'),
('Restaurace Hradčany - Hotel Savoy',
'https://www.zomato.com/praha/restaurace-hradčany-hotel-savoy-hradčany-praha-1/menu', '16507555'),
('The Sushi Bar', 'https://www.zomato.com/praha/the-sushi-bar-malá-strana-praha-5/menu', '16510998'),
('U Malířů 1543', 'https://www.zomato.com/praha/u-malířů-1543-malá-strana-praha-1/menu', '16511961'),
('Pálffy Palác', 'https://www.zomato.com/praha/pálffy-palác-malá-strana-praha-1/menu', '16508383'),
('Cloud 9 Sky Bar & Lounge - Hilton Prague Hotel',
'https://www.zomato.com/praha/cloud-9-sky-bar-lounge-hilton-prague-hotel-karlín-praha-8/menu',
'16510868'),
('Miyabi', 'https://www.zomato.com/praha/miyabi-nové-město-praha-1/menu', '16507296'),
('Ginger & Fred', 'https://www.zomato.com/praha/ginger-fred-nové-město-praha-2/menu', '16511343'),
('El Toro negro', 'https://www.zomato.com/praha/el-toro-negro-staré-město-praha-1/menu', '16510064'),
('Franz Josef Restaurant - Grand Hotel Bohemia',
'https://www.zomato.com/praha/franz-josef-restaurant-grand-hotel-bohemia-staré-město-praha-1/menu',
'16509388'),
('Asian Temple', 'https://www.zomato.com/praha/asian-temple-staré-město-praha-1/menu', '18163248'),
('Chateau St. Havel', 'https://www.zomato.com/praha/chateau-st-havel-1-krč-praha-4/menu', '16516941'),
('New York Café - Boscolo Prague',
'https://www.zomato.com/praha/new-york-café-boscolo-prague-nové-město-praha-1/menu', '16510273'),
('VINOdiVINO', 'https://www.zomato.com/praha/vinodivino-staré-město-praha-1/menu', '16506304'),
('ART&FOOD Had', 'https://www.zomato.com/ARTFOODHad/menu', '16514146'),
('Pekařství a cukrářství Helena Šmejkalová',
'https://www.zomato.com/praha/pekařství-a-cukrářství-helena-šmejkalová-vršovice-praha-10/menu',
'16516569'),
('C<NAME>', 'https://www.zomato.com/praha/café-savoy-malá-strana-praha-5/menu', '16511100'),
('Palanda', 'https://www.zomato.com/praha/palanda-nové-město-praha-1/menu', '16507043'),
('Delmart', 'https://www.zomato.com/praha/delmart-smíchov-praha-5/menu', '16521798'),
('Vinohradský parlament', 'https://www.zomato.com/praha/vinohradský-parlament-vinohrady-praha-2/menu',
'16510163'),
('Bejzment', 'https://www.zomato.com/praha/bejzment-smíchov-praha-5/menu', '16508716'),
('Monolok', 'https://www.zomato.com/praha/monolok-vinohrady-praha-2/menu', '16521454'),
('The Farm', 'https://www.zomato.com/praha/the-farm-bubeneč-praha-7/menu', '16509642'),
('IF Café', 'https://www.zomato.com/praha/if-café-1-vinohrady-praha-2/menu', '16521743'),
('Můj šálek kávy', 'https://www.zomato.com/praha/můj-šálek-kávy-karlín-praha-8/menu', '16509186'),
('Cafe ~ cafe', 'https://www.zomato.com/praha/cafe-cafe-staré-město-praha-1/menu', '16510131'),
('Bruxx', 'https://www.zomato.com/praha/bruxx-vinohrady-praha-2/menu', '16510160'),
('Angelato', 'https://www.zomato.com/praha/angelato-malá-strana-praha-1/menu', '16511989'),
('Hard Rock Cafe', 'https://www.zomato.com/hardrock/menu', '16510644'),
('Kandelábr', 'https://www.zomato.com/praha/kandelábr-nusle-praha-4/menu', '16506739'),
('Potrefená husa', 'https://www.zomato.com/praha/potrefená-husa-1-nové-město-praha-1/menu', '16511008'),
('Místo', 'https://www.zomato.com/praha/místo-dejvice-praha-6/menu', '17984486'),
('Barabizna', 'https://www.zomato.com/praha/barabizna-zbraslav-praha-5/menu', '16519949'),
('Ordr', 'https://www.zomato.com/praha/ordr-nové-město-praha-1/menu', '18355488'),
('Želva beers & burgers', 'https://www.zomato.com/praha/želva-beers-burgers-žižkov-praha-3/menu',
'18275803'),
('La Bohème Café', 'https://www.zomato.com/praha/la-bohème-café-vinohrady-praha-2/menu', '16510432'),
('Stereo', 'https://www.zomato.com/praha/stereo-bubeneč-praha-7/menu', '16507141'),
('Modrý zub', 'https://www.zomato.com/praha/modrý-zub-smíchov-praha-5/menu', '16521678'),
('<NAME>', 'https://www.zomato.com/praha/puro-gelato-nové-město-praha-2/menu', '16521994'),
('Nebe Cocktail & Music Club',
'https://www.zomato.com/praha/nebe-cocktail-music-club-nové-město-praha-1/menu', '16516184'),
('James Dean Prague Restaurant', 'https://www.zomato.com/praha/james-dean-staré-město-praha-1/menu',
'16517681'),
('Eska', 'https://www.zomato.com/praha/eska-karlín-praha-8/menu', '18163255'),
('Y<NAME>', 'https://www.zomato.com/praha/yam-yam-nusle-praha-4/menu', '16508845'),
('U Fleků', 'https://www.zomato.com/praha/u-fleků-nové-město-praha-1/menu', '16507333'),
('Angelato', 'https://www.zomato.com/praha/angelato-staré-město-praha-1/menu', '16510167'),
(
'Lokál U Zavadilů', 'https://www.zomato.com/praha/lokál-u-zavadilů-kunratice-praha-4/menu',
'16506696'),
('Sic<NAME>', 'https://www.zomato.com/praha/sicily-café-nové-město-praha-1/menu', '16506438'),
('The Tavern', 'https://www.zomato.com/praha/the-tavern-vinohrady-praha-2/menu', '16508596'),
('P<NAME>', 'https://www.zomato.com/praha/pivovar-národní-nové-město-praha-1/menu', '17830788'),
('<NAME>', 'https://www.zomato.com/praha/kavárna-slavia-staré-město-praha-1/menu', '16510882'),
('Bombay Express', 'https://www.zomato.com/praha/bombay-express-žižkov-praha-3/menu', '16521752'))
for restaurant in data:
name = restaurant[0]
menu_url = restaurant[1]
zomato_id = restaurant[2]
restaurant_model = Restaurant(name=name, menu_url=menu_url)
restaurant_model.save()
scrapper_config = RestaurantScraperConfig(restaurant=restaurant_model, menu_parser='daily_menu.Zomato',
parser_parameters=zomato_id, next_visit=datetime.today(),
next_visit_interval=1
)
scrapper_config.save()
|
StarcoderdataPython
|
8172967
|
import numpy as np
counter = 15
for i in range(1, counter):
if i == 10:
break
|
StarcoderdataPython
|
3250631
|
<filename>subscribers/migrations/0004_subscriptionrequest_token.py
# Generated by Django 2.1.4 on 2018-12-17 23:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('subscribers', '0003_auto_20181216_2144'),
]
operations = [
migrations.AddField(
model_name='subscriptionrequest',
name='token',
field=models.TextField(default='', max_length=125),
preserve_default=False,
),
]
|
StarcoderdataPython
|
12861484
|
import djclick as click
from django.contrib.auth.models import User, Group
from django.db import transaction
from django.core.management.base import CommandError
class DryRunFinished(Exception):
pass
def get_or_create_users(email_addresses):
users = []
for email in email_addresses:
if not email:
continue
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
user = User.objects.create_user(
username=email.split('@')[0],
email=email
)
users.append(user)
return users
def add_users_to_group(group, users):
for u in users:
group.user_set.add(u)
group.save()
@click.command()
@click.argument('user_file', type=click.File('r'))
@click.option('--group', 'groupname', type=click.STRING,
help='Name of group to which all users should be added')
@click.option('--dryrun', default=False, is_flag=True,
help='If set, no changes will be made to the database')
def command(user_file, groupname, dryrun):
'''
Bulk creates users from email addresses in the the specified text file,
which should contain one email address per line.
If the optional "--group <GROUPNAME>" argument is specified, then all the
users (either found or created) are added to the matching group.
'''
if dryrun:
click.echo('Starting dry run (no database records will be modified).')
if groupname:
try:
group = Group.objects.get(name=groupname)
except Group.DoesNotExist:
raise CommandError(
'"{}" group does not exist. Exiting.'.format(groupname))
email_addresses = [s.strip() for s in user_file.readlines()]
try:
with transaction.atomic():
users = get_or_create_users(email_addresses)
click.echo(
'Created (or found) {} user accounts.'.format(len(users)))
if group:
add_users_to_group(group, users)
click.echo('Added users to "{}" group.'.format(groupname))
if dryrun:
raise DryRunFinished()
except DryRunFinished:
click.echo("Dry run complete.")
|
StarcoderdataPython
|
3438005
|
# (C) Copyright 2020 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
#
import mimetypes
from .metadata import init_metadata
mimetypes.add_type("application/x-netcdf", ".nc")
mimetypes.add_type("application/x-netcdf", ".nc4")
mimetypes.add_type("application/x-netcdf", ".cdf")
mimetypes.add_type("application/x-netcdf", ".netcdf")
mimetypes.add_type("application/x-grib", ".grib")
mimetypes.add_type("application/x-grib", ".grib1")
mimetypes.add_type("application/x-grib", ".grib2")
def initialise():
init_metadata()
|
StarcoderdataPython
|
8108953
|
"""
Module defines REST API methods and their handles.
Implementation of handles is in corresponding modules, not here.
"""
from __future__ import print_function, division
import cherrypy
from WMCore.Configuration import Configuration
from WMCore.REST.Server import RESTApi
from WMCore.REST.Services import ProcessMatrix
from WMCore.ReqMgr.ReqMgrCouch import ReqMgrCouch
from WMCore.ReqMgr.Service.Auxiliary import (Info, ReqMgrConfigData, PermissionsConfig, CMSSWVersions,
WMAgentConfig, CampaignConfig, UnifiedConfig, TransferInfo)
from WMCore.ReqMgr.Service.RequestAdditionalInfo import (RequestSpec,
WorkloadConfig, WorkloadSplitting)
from WMCore.ReqMgr.Service.Request import Request, RequestStatus, RequestType
from WMCore.ReqMgr.Service.WMStatsInfo import WMStatsInfo
class IndividualCouchManager(object):
"""
Wrapper for the database API object, such that it's *not* shared
among all the Rest APIs.
"""
def __init__(self, config):
self.db_handler = ReqMgrCouch(config)
class RestApiHub(RESTApi):
"""
Server object for REST data access API.
"""
def __init__(self, app, config, mount):
"""
:arg app: reference to application object; passed to all entities.
:arg config: reference to configuration; passed to all entities.
:arg str mount: API URL mount point; passed to all entities."""
RESTApi.__init__(self, app, config, mount)
cherrypy.log("ReqMgr entire configuration:\n%s" % Configuration.getInstance())
cherrypy.log("ReqMgr REST hub configuration subset:\n%s" % config)
# Makes raw format as default
# self.formats.insert(0, ('application/raw', RawFormat()))
self._add({"about": Info(app, IndividualCouchManager(config), config, mount),
"info": Info(app, IndividualCouchManager(config), config, mount),
"app_config": ReqMgrConfigData(app, IndividualCouchManager(config), config, mount),
"request": Request(app, IndividualCouchManager(config), config, mount),
"cmsswversions": CMSSWVersions(app, IndividualCouchManager(config), config, mount),
"wmagentconfig": WMAgentConfig(app, IndividualCouchManager(config), config, mount),
"permissions": PermissionsConfig(app, IndividualCouchManager(config), config, mount),
"campaignconfig": CampaignConfig(app, IndividualCouchManager(config), config, mount),
"unifiedconfig": UnifiedConfig(app, IndividualCouchManager(config), config, mount),
"transferinfo": TransferInfo(app, IndividualCouchManager(config), config, mount),
"status": RequestStatus(app, IndividualCouchManager(config), config, mount),
"type": RequestType(app, IndividualCouchManager(config), config, mount),
"spec_template": RequestSpec(app, IndividualCouchManager(config), config, mount),
"workload_config": WorkloadConfig(app, IndividualCouchManager(config), config, mount),
"splitting": WorkloadSplitting(app, IndividualCouchManager(config), config, mount),
"wmstats_info": WMStatsInfo(app, IndividualCouchManager(config), config, mount),
"proc_status": ProcessMatrix(app, self, config, mount)
})
|
StarcoderdataPython
|
11375916
|
<gh_stars>0
class Converter:
def __init__(self, num):
self.num = num
self.lst = []
def binary(self):
"""Function to return a number decimal in binary"""
aux = self.num
while aux > 0:
self.lst.append(str(aux % 2))
aux //= 2
return ''.join(self.sort_list()).zfill(8)
def sort_list(self):
return self.lst[::-1]
|
StarcoderdataPython
|
9746756
|
import os
from xml.etree import ElementTree
import pytest
from . import Dictionary, YaTranslateException
class TestDictionary:
def setup_class(self):
self.api_key = os.environ.get("API_KEY_YA_DICT")
assert self.api_key
self.v_json = Dictionary(self.api_key)
assert self.v_json
assert self.v_json.ok
self.v_xml = Dictionary(self.api_key, xml=True)
assert self.v_xml
assert self.v_xml.ok
def test_get_langs_json(self):
langs = self.v_json.get_langs()
assert langs
assert isinstance(langs, list)
assert self.v_json._cache_langs
self.v_json._cache_langs = None
langs = self.v_json.get_langs()
assert langs
assert self.v_json._cache_langs
def test_get_langs_xml(self):
langs = self.v_xml.get_langs()
assert langs
assert isinstance(langs, list)
assert self.v_xml._cache_langs
self.v_xml._cache_langs = None
langs = self.v_xml.get_langs()
assert langs
assert self.v_xml._cache_langs
def test_get_langs_jsonb(self) -> NotImplemented:
return NotImplemented
def test_ok(self):
assert self.v_json.ok
assert self.v_xml.ok
def test_lookup_json(self):
with pytest.raises(YaTranslateException) as excinfo:
assert excinfo
__ = self.v_json.lookup("hello", "cpp")
definition = self.v_json.lookup("hello", 'en-en')
assert definition
assert isinstance(definition, dict)
assert 'head' not in definition
assert 'def' in definition
def test_lookup_xml(self):
with pytest.raises(YaTranslateException) as excinfo:
assert excinfo
__ = self.v_xml.lookup("hello", 'cpp')
definition = self.v_xml.lookup("hello", 'en-ru')
assert definition
assert isinstance(definition, ElementTree.Element)
def test_lookup_jsonb(self) -> NotImplemented:
return NotImplemented
def test_definitions(self):
with pytest.raises(YaTranslateException) as excinfo:
assert excinfo
__ = self.v_json.definitions("hello", "cpp")
with pytest.raises(ValueError) as excinfo:
assert excinfo
__ = self.v_json.definitions("hello", 'en-en',
callback=lambda: None)
definition = self.v_json.definitions("hello", 'en-en')
assert definition
assert isinstance(definition, list)
definition = self.v_xml.definitions("hello", 'en-en')
assert definition is NotImplemented
|
StarcoderdataPython
|
6651387
|
from django.urls import path
from . import views
app_name = 'grading'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<int:test_id>/question/<int:pk>', views.QuestionView.as_view(), name='question'),
path('<int:test_id>/question/<int:question_id>/submit', views.submit, name='submit'),
path('<int:pk>/result/', views.ResultView.as_view(), name='result'),
]
|
StarcoderdataPython
|
185665
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2017, JK
# Full license can be found in License.md
# -----------------------------------------------------------------------------
import os
import sys
from setuptools import setup
import subprocess
# get home directory
home_dir = os.path.expanduser('~')
# get name of virtual environment
env_name = os.path.split(sys.prefix)[-1]
# generate path for fortran model files
here = os.path.abspath(os.path.dirname(__file__))
fortran_path = os.path.join(here, 'sami2py', 'fortran')
# generate path for test files
test_data_path = os.path.join(here, 'sami2py', 'tests', 'test_data')
# generate path to store test and fortran file paths
file_path = os.path.join(home_dir, '.sami2py', env_name)
# generate path to ExB coefficients
exb_path = os.path.join(fortran_path, 'exb.inp')
# %% build
if not os.path.isfile(os.path.join(fortran_path, 'sami2py.x')):
try: # py27 does not have shutil.which()
cmd = ['gfortran', '-fno-range-check', '-fno-automatic',
'-ffixed-line-length-none', '-o', 'sami2py.x']
src = ['nrlmsise00_modified.f', 'grid-1.00.f', 'sami2py-1.00.f',
'hwm93.f', 'hwm07e_modified.f90', 'apexcord.f90', 'hwm14.f90']
subprocess.call(cmd + src, cwd=fortran_path)
except OSError:
pass
if not os.path.isfile(os.path.join(fortran_path, 'sami2py.x')):
print('\nYou will need to compile the fortran files. Try\n'
'$ make -C {} compile\n'.format(os.path.join('sami2py', 'fortran')),
file=sys.stderr)
if not os.path.isdir(file_path):
os.makedirs(file_path)
print('Created {} directory to store settings.'.format(file_path))
if not os.path.isfile(exb_path):
zero_list = ["0 0"] * 10
with open(exb_path, 'w') as exb:
exb.writelines("%s\n" % line for line in zero_list)
with open(os.path.join(file_path, 'fortran_path.txt'), 'w+') as fout:
fout.write(fortran_path)
with open(os.path.join(file_path, 'test_data_path.txt'), 'w+') as fout:
fout.write(test_data_path)
setup()
|
StarcoderdataPython
|
11272995
|
<filename>trafilatura/filters.py
"""
Functions related to content filtering, mostly duplicate detection and language
detection.
"""
import logging
import re
# language detection
try:
import cld3
LANGID_FLAG = True
except ImportError:
LANGID_FLAG = False
from .lru import LRUCache
from .settings import LRU_SIZE, MAX_REPETITIONS, MIN_DUPLCHECK_SIZE
from .utils import trim #, remove_control_characters
LOGGER = logging.getLogger(__name__)
LRU_TEST = LRUCache(maxsize=LRU_SIZE)
RE_FILTER = re.compile(r'\W*(Drucken|E-?Mail|Facebook|Flipboard|Google|Instagram|Linkedin|Mail|PDF|Pinterest|Pocket|Print|Reddit|Twitter|Whatsapp|Xing)$', flags=re.IGNORECASE)
# COMMENTS_BLACKLIST = ('( Abmelden / Ändern )') # Fill in your details below|Trage deine Daten unten|Kommentar verfassen|Bitte logge dich|Hinterlasse einen Kommentar| to %s| mit %s)
def put_in_cache(teststring):
'''Implement LRU cache'''
cacheval = LRU_TEST.get(teststring)
# if the value is already defined
if cacheval != -1:
# print(cacheval, teststring[:10] + '...')
LRU_TEST.put(teststring, cacheval + 1)
else:
# print(0, teststring[:10] + '...')
LRU_TEST.put(teststring, 1)
def duplicate_test(element):
'''Check for duplicate text with LRU cache'''
teststring = trim(' '.join(element.itertext()))
# teststring = element.text
if len(teststring) > MIN_DUPLCHECK_SIZE:
# retrieve value from cache
cacheval = LRU_TEST.get(teststring)
if cacheval > MAX_REPETITIONS: # non-existent key will return -1
LRU_TEST.put(teststring, cacheval + 1)
return True
put_in_cache(teststring)
return False
def language_filter(temp_text, temp_comments, target_language, docmeta):
'''Run external component (if installed) for language identification'''
# sanity check on language
if target_language is not None:
if LANGID_FLAG is True:
# comments
if len(temp_comments) > len(temp_text):
langtest = temp_comments
# default
else:
langtest = temp_text
result = cld3.get_language(langtest)
if result.language != target_language:
LOGGER.warning('wrong language: %s %s %s', result, docmeta['id'], docmeta['url'])
return True
else:
LOGGER.warning('Detector not installed, no language detection run')
return False
def textfilter(element):
'''Filter out unwanted text'''
# print('#', element.text)
if element.text is None and element.tail is not None:
testtext = element.tail
else:
testtext = element.text
if text_chars_test(testtext) is False:
return True
for line in testtext.splitlines():
#if len(line) <= 5:
# continue
if RE_FILTER.match(line):
return True
return False
def text_chars_test(string):
'''Determine if a string is only composed of spaces and/or control characters'''
if string is None or string.isspace(): # or remove_control_characters(string).isspace():
return False
return True
|
StarcoderdataPython
|
6547418
|
<gh_stars>0
from pyserverpilot.models.basemodel import BaseModel
class Server(BaseModel):
id: str
name: str
plan: str
autoupdates: str
firewall: str
deny_unknown_domains: str
available_runtimes: list
lastaddress: str
lastconn: int
datecreated: int
|
StarcoderdataPython
|
9664075
|
# Generated by Django 3.1.12 on 2021-07-12 19:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('survey', '0003_lastpage_description'),
]
operations = [
migrations.AddField(
model_name='lastpage',
name='whatsapp_button',
field=models.CharField(default=1, max_length=250),
preserve_default=False,
),
]
|
StarcoderdataPython
|
11280969
|
<gh_stars>1-10
for i in range(10):
print(i, end="")
print()
print(i)
print(2 + 3 * 5.)
print(1//2*3)
x= 11
y = 4
x = x % y
x = x % y
y = y % x
print(y)
print(1/2+3//3+4**2)
x = 2
y = 4
x = x/y
print(y/x)
x=1
y=2
z=x
x=y
y=z
print(x,y)
#x = 2 // 4
#y = 4 // x
|
StarcoderdataPython
|
4847313
|
<gh_stars>1-10
from gym_flexlab.envs import flexlab_env
import os
from datetime import timedelta
import pandas as pd
import numpy as np
import pytz
import random
import time
from drllib import dqn_functions
from drllib import models, utils
import torch
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
from tensorboardX import SummaryWriter
import time
#LEARNING_RATE = 0.01
#BATCH_SIZE = 8
EPSILON_START = 1.0
EPSILON_STOP = 0.02
EPSILON_STEPS = 5000
REPLAY_BUFFER = 50000
GAMMA = 0.99
BATCH_SIZE = 10 #%64
LEARNING_RATE = 1e-4
REPLAY_SIZE = 200000
REPLAY_INITIAL = 36000
TEST_ITERS = 10# 36000
CUDA = False
RunName = "TestDQN"
class DQN(nn.Module):
def __init__(self, obs_size, act_size):
super(DQN, self).__init__()
self.net = nn.Sequential(
nn.Linear(obs_size, 400),
nn.ReLU(),
nn.Linear(400, 300),
nn.ReLU(),
nn.Linear(300, act_size),
nn.Tanh()
)
def forward(self, x):
return self.net(x)
def calc_target(net, local_reward, next_state):
if next_state is None:
return local_reward
state_v = torch.tensor([next_state], dtype=torch.float32)
next_q_v = net(state_v)
best_q = next_q_v.max(dim=1)[0].item()
return local_reward + GAMMA * best_q
def test_net(net, env, writer, frame_idx, count=1, device="cpu"):
# print(frame_idx)
# shift = frame_idx + 50000
dtype=torch.bool
rewards = 0.0
steps = 0
for _ in range(count):
obs = env.reset()
# t_idx = 0
while True:
# t_idx += 1
obs_v = utils.float32_preprocessor([obs]).to(device)
print("before")
#net = DQN(env.observation_space.shape[0], env.action_space.shape[0]).to(device)
mu_v = net(obs_v)
print("after")
action = mu_v.squeeze(dim=0).data.cpu().numpy()
action = np.clip(action, -1, 1)
# # display actions
# action_scaled = env.scale_action(action)
# writer.add_scalar("Actions/SaFr", action_scaled[0], t_idx + shift)
# writer.add_scalar("Actions/SaTemp", action_scaled[1], t_idx + shift)
# writer.add_scalar("Actions/CwTemp", action_scaled[2], t_idx + shift)
# writer.add_scalar("Actions/HwTemp", action_scaled[3], t_idx + shift)
# writer.add_scalar("Actions/Shade", action_scaled[4], t_idx + shift)
# writer.add_scalar("Actions/Lig_input", action_scaled[5], t_idx + shift)
# writer.add_scalar("Actions/P_ctrl", action_scaled[6], t_idx + shift)
# writer.add_scalar("Actions/PV", action_scaled[7], t_idx + shift)
# # display observation
# obs_scaled = env.scale_obs(obs)
# ZoneTempA_t = obs_scaled[3]
# writer.add_scalar("Temp/OutTemp_t", obs_scaled[0], t_idx + shift)
# writer.add_scalar("Temp/OutRH_t", obs_scaled[1], t_idx + shift)
# writer.add_scalar("Temp/OutSI_t", obs_scaled[1], t_idx + shift)
# writer.add_scalar("Temp/ZoneTempA_t", obs_scaled[3], t_idx + shift)
# writer.add_scalar("LigA_t", obs_scaled[4], t_idx + shift)
obs, reward, done, _ = env.step(action)
# display change
# ZoneTempA_t_1 = ZoneTempA_t - obs[3]
# writer.add_scalar("ZoneTempA_t_1", ZoneTempA_t_1, t_idx + shift)
# display reward
# writer.add_scalar("reward_t_test", reward, t_idx + shift)
# writer.add_scalar("rewards_t_test", rewards, t_idx + shift)
rewards += reward
steps += 1
if done:
break
return rewards / count, steps / count
def pred_net(net, env, writer, device="cpu"):
buffer = flexlab_env.ExperienceBuffer(env.obs_names, env.action_names)
rewards = 0.0
steps = 0
obs = env.reset()
while True:
obs_v = utils.float32_preprocessor([obs]).to(device)
mu_v = net(obs_v)
action = mu_v.squeeze(dim=0).data.cpu().numpy()
action = np.clip(action, -1, 1)
obs, reward, done, _ = env.step(action)
action_scaled = env.scale_action(action)
obs_scaled = env.scale_obs(obs)
buffer.append(action_scaled,obs_scaled,reward)
rewards += reward
steps += 1
if done:
break
actions_df = buffer.action_data()
obs_df = buffer.obs_data()
reward_df = buffer.reward_data()
actions_df.to_csv('preds/actions_df.csv',index=False)
obs_df.to_csv('preds/obs_df.csv',index=False)
reward_df.to_csv('preds/reward_df.csv',index=False)
return rewards, steps
if __name__ == "__main__":
device = torch.device("cuda" if CUDA else "cpu")
save_path = os.path.join("saves", "dqn-" + RunName)
if not os.path.exists(save_path):
os.makedirs(save_path)
print("STARTING PROGRAM")
env = flexlab_env.FlexLabEnv(envelope_path = 'fmu_models/FlexlabXR_fmu_2015.fmu',
battery_path = 'fmu_models/battery.fmu',
pv_path = 'fmu_models/PV_2015.fmu',
eprice_path = 'e_tariffs/e_price_2015.csv',
chiller_COP = 3.0,
boiler_COP = 0.95,
sim_year = 2015,
tz_name = 'America/Los_Angeles',
sim_days = 365,
step_size = 900)
test_env = flexlab_env.FlexLabEnv(envelope_path = 'fmu_models/FlexlabXR_fmu_2017.fmu',
battery_path = 'fmu_models/battery.fmu',
pv_path = 'fmu_models/PV_2017.fmu',
eprice_path = 'e_tariffs/e_price_2017.csv',
chiller_COP = 3.0,
boiler_COP = 0.95,
sim_year = 2017,
tz_name = 'America/Los_Angeles',
sim_days = 365,
step_size = 900)
writer = SummaryWriter(comment="-dqn_" + RunName)
start_time=time.time()
print("&&&&&&&&&&&&&&&&&&&&&action_space")
print(env.action_space.shape[0])
net = dqn_functions.DQNActor(env.observation_space.shape[0], env.action_space.shape[0]).to(device)
#target_q = dqn_functions.DQNActor(env.observation_space.shape[0], env.action_space.shape[0]).to(device)
#act_net = models.DDPGActor(env.observation_space.shape[0], env.action_space.shape[0]).to(device)
print(net)
target_q = utils.TargetNet(net)
selector = dqn_functions.EpsilonGreedyActionSelector(epsilon=EPSILON_START)
agent = dqn_functions.AgentDQN(net, selector, preprocessor=dqn_functions.float32_preprocessor)
exp_source = utils.ExperienceSourceFirstLast(env, agent, gamma=GAMMA, steps_count=1)
replay_buffer = utils.ExperienceReplayBuffer(exp_source, buffer_size=REPLAY_BUFFER)
optimizer = optim.Adam(net.parameters(), lr=LEARNING_RATE)
mse_loss = nn.MSELoss()
print(mse_loss)
total_rewards = []
step_idx = 0
done_episodes = 0
best_reward=None
print("Before loop")
while True:
#print("In true loop")
step_idx += 1
selector.epsilon = max(EPSILON_STOP, EPSILON_START - step_idx / EPSILON_STEPS)
replay_buffer.populate(1)
if len(replay_buffer) < BATCH_SIZE:
continue
# sample batch
batch = replay_buffer.sample(BATCH_SIZE)
batch_states = [exp.state for exp in batch]
batch_actions = [exp.action for exp in batch]
batch_targets = [calc_target(net, exp.reward, exp.last_state)
for exp in batch]
# train
optimizer.zero_grad()
states_v = torch.FloatTensor(batch_states)
net_q_v = net(states_v)
target_q = net_q_v.data.numpy().copy()
target_q[range(BATCH_SIZE), batch_actions] = batch_targets
target_q_v = torch.tensor(target_q)
loss_v = mse_loss(net_q_v, target_q_v)
loss_v.backward()
optimizer.step()
#print(batch_actions)# actions are changing all the time
# handle new rewards
new_rewards = exp_source.pop_total_rewards()
if new_rewards:
print("new rewards")
print(new_rewards)
print("in loop")
done_episodes += 1
reward = new_rewards[0]
print("reward %s", reward)
total_rewards.append(reward)
mean_rewards = float(np.mean(total_rewards[-100:]))
print("%d: reward: %6.2f, mean_100: %6.2f, epsilon: %.2f, episodes: %d" % (
step_idx, reward, mean_rewards, selector.epsilon, done_episodes))
writer.add_scalar("reward", reward, step_idx)
writer.add_scalar("reward_100", mean_rewards, step_idx)
writer.add_scalar("epsilon", selector.epsilon, step_idx)
writer.add_scalar("episodes", done_episodes, step_idx)
if mean_rewards > 195:
print("Solved in %d steps and %d episodes!" % (step_idx, done_episodes))
break
#print time for each episode run
print("--- %s seconds ---" % (time.time() - start_time))
newreward=1.2345
#adding the test code native here
################
if step_idx % ((env.n_steps -1) * TEST_ITERS) == 0:
#net.load_state_dict(target_q.state_dict())
rewardsq, stepsq = pred_net(net, test_env, writer, device=device)
####################
#testing the dqn
if step_idx % ((env.n_steps -1) * TEST_ITERS) == 0:
ts = time.time()
print("*********************In saving loop")
#name = "best_%+.3f_%d.dat" % (newreward, step_idx)
#fname = os.path.join(save_path, name)
#torch.save(target_q, fname)
#print("saved")
#print(fname)
rewards, steps = test_net(net, test_env, writer, step_idx, device=device)
print("Test done in %.2f sec, reward %.3f, steps %d" % (
time.time() - ts, rewards, steps))
print("test_reward %s, %s" %(rewards, step_idx))
print("test_steps %s, %s" %(steps, step_idx))
print("best_reward and rewards")
print(best_reward)
print(rewards)
if best_reward is None or best_reward < rewards:
print("saving actual model")
#if best_reward is not None:
#best_reward=float(best_reward)
#rewards=float(rewards)
#print("Best reward updated: %.3f -> %.3f" % (best_reward, rewards))
name = "best_%d.dat" % (step_idx)
fname = os.path.join(save_path, name)
torch.save(net.state_dict(), fname)
best_reward = rewards
print(fname)
writer.close()
|
StarcoderdataPython
|
3501127
|
<filename>brax/envs/wrappers.py<gh_stars>0
# Copyright 2021 The Brax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrappers for Brax and Gym env."""
from typing import ClassVar, Optional
from brax import jumpy as jp
from brax.envs import env as brax_env
import gym
from gym import spaces
from gym.vector import utils
import jax
class VectorWrapper(brax_env.Wrapper):
"""Vectorizes Brax env."""
def __init__(self, env: brax_env.Env, batch_size: int):
super().__init__(env)
self.batch_size = batch_size
def reset(self, rng: jp.ndarray) -> brax_env.State:
rng = jp.random_split(rng, self.batch_size)
return jp.vmap(self.env.reset)(rng)
def step(self, state: brax_env.State, action: jp.ndarray) -> brax_env.State:
return jp.vmap(self.env.step)(state, action)
class EpisodeWrapper(brax_env.Wrapper):
"""Maintains episode step count and sets done at episode end."""
def __init__(self, env: brax_env.Env, episode_length: int,
action_repeat: int):
super().__init__(env)
if hasattr(self.unwrapped, 'sys'):
self.unwrapped.sys.config.dt *= action_repeat
self.unwrapped.sys.config.substeps *= action_repeat
self.episode_length = episode_length
self.action_repeat = action_repeat
def reset(self, rng: jp.ndarray) -> brax_env.State:
state = self.env.reset(rng)
state.info['steps'] = jp.zeros(())
state.info['truncation'] = jp.zeros(())
return state
def step(self, state: brax_env.State, action: jp.ndarray) -> brax_env.State:
state = self.env.step(state, action)
steps = state.info['steps'] + self.action_repeat
one = jp.ones_like(state.done)
zero = jp.zeros_like(state.done)
done = jp.where(steps >= self.episode_length, one, state.done)
state.info['truncation'] = jp.where(steps >= self.episode_length,
1 - state.done, zero)
state.info['steps'] = steps
return state.replace(done=done)
class AutoResetWrapper(brax_env.Wrapper):
"""Automatically resets Brax envs that are done."""
def reset(self, rng: jp.ndarray) -> brax_env.State:
state = self.env.reset(rng)
state.info['first_qp'] = state.qp
state.info['first_obs'] = state.obs
return state
def step(self, state: brax_env.State, action: jp.ndarray) -> brax_env.State:
if 'steps' in state.info:
steps = state.info['steps']
steps = jp.where(state.done, jp.zeros_like(steps), steps)
state.info.update(steps=steps)
state = state.replace(done=jp.zeros_like(state.done))
state = self.env.step(state, action)
def where_done(x, y):
done = state.done
if done.shape:
done = jp.reshape(done, [x.shape[0]] + [1] * (len(x.shape) - 1)) # type: ignore
return jp.where(done, x, y)
qp = jp.tree_map(where_done, state.info['first_qp'], state.qp)
obs = where_done(state.info['first_obs'], state.obs)
return state.replace(qp=qp, obs=obs)
class GymWrapper(gym.Env):
"""A wrapper that converts Brax Env to one that follows Gym API."""
# Flag that prevents `gym.register` from misinterpreting the `_step` and
# `_reset` as signs of a deprecated gym Env API.
_gym_disable_underscore_compat: ClassVar[bool] = True
def __init__(self,
env: brax_env.Env,
seed: int = 0,
backend: Optional[str] = None):
self._env = env
self.seed(seed)
self.backend = backend
self._state = None
obs_high = jp.inf * jp.ones(self._env.observation_size, dtype='float32')
self.observation_space = spaces.Box(-obs_high, obs_high, dtype='float32')
action_high = jp.ones(self._env.action_size, dtype='float32')
self.action_space = spaces.Box(-action_high, action_high, dtype='float32')
def reset(key):
key1, key2 = jp.random_split(key)
state = self._env.reset(key2)
return state, state.obs, key1
self._reset = jax.jit(reset, backend=self.backend)
def step(state, action):
state = self._env.step(state, action)
return state, state.obs, state.reward, state.done
self._step = jax.jit(step, backend=self.backend)
def reset(self):
self._state, obs, self._key = self._reset(self._key)
return obs
def step(self, action):
self._state, obs, reward, done = self._step(self._state, action)
return obs, reward, done, {}
def seed(self, seed: int = 0):
self._key = jax.random.PRNGKey(seed)
def render(self, mode='human'):
# pylint:disable=g-import-not-at-top
from brax.io import image
if mode == 'rgb_array':
sys, qp = self._env.sys, self._state.qp
return image.render_array(sys, qp, 256, 256)
else:
return super().render(mode=mode) # just raise an exception
class VectorGymWrapper(gym.vector.VectorEnv):
"""A wrapper that converts batched Brax Env to one that follows Gym VectorEnv API."""
# Flag that prevents `gym.register` from misinterpreting the `_step` and
# `_reset` as signs of a deprecated gym Env API.
_gym_disable_underscore_compat: ClassVar[bool] = True
def __init__(self,
env: brax_env.Env,
seed: int = 0,
backend: Optional[str] = None):
self._env = env
if not hasattr(self._env, 'batch_size'):
raise ValueError('underlying env must be batched')
self.num_envs = self._env.batch_size
self.seed(seed)
self.backend = backend
self._state = None
obs_high = jp.inf * jp.ones(self._env.observation_size, dtype='float32')
self.single_observation_space = spaces.Box(
-obs_high, obs_high, dtype='float32')
self.observation_space = utils.batch_space(self.single_observation_space,
self.num_envs)
action_high = jp.ones(self._env.action_size, dtype='float32')
self.single_action_space = spaces.Box(
-action_high, action_high, dtype='float32')
self.action_space = utils.batch_space(self.single_action_space,
self.num_envs)
def reset(key):
key1, key2 = jp.random_split(key)
state = self._env.reset(key2)
return state, state.obs, key1
self._reset = jax.jit(reset, backend=self.backend)
def step(state, action):
state = self._env.step(state, action)
return state, state.obs, state.reward, state.done
self._step = jax.jit(step, backend=self.backend)
def reset(self):
self._state, obs, self._key = self._reset(self._key)
return obs
def step(self, action):
self._state, obs, reward, done = self._step(self._state, action)
return obs, reward, done, {}
def seed(self, seed: int = 0):
self._key = jax.random.PRNGKey(seed)
def render(self, mode='human'):
# pylint:disable=g-import-not-at-top
from brax.io import image
if mode == 'rgb_array':
sys = self._env.sys
imgs = []
for i in range(self.num_envs):
qp = jp.take(self._state.qp, i)
imgs.append(image.render_array(sys, qp, 256, 256))
return jp.stack(imgs)
else:
return super().render(mode=mode) # just raise an exception
|
StarcoderdataPython
|
4964711
|
# -*- coding: utf-8 -*-
"""
The ANN classification method in TDEM
Methods:
MLP: the ANN classification method
"""
__all__ = ['MLP']
from sklearn.neural_network import MLPClassifier # 'tanh',lbfgs,(50,)
from sklearn.metrics import accuracy_score
import numpy as np
def MLP(train_set, test_set, solver='lbfgs', hidden_layer_sizes=(50,), activation='tanh'):
"""the ANN classification algorithm
Parameters
----------
train_set: ndarry
the train set
test_set: ndarry
the test set
solver: str
the optimization of the ANN
hidden_layer_sizes: tuple
the ANN hidden layer sizes
activation: str
the activation of the ANN
Returns
-------
res: tuple
(accuracy, y_pred, y_true),the accuracy of classification, the predict value and
the true value
"""
res = {}
clf = MLPClassifier(solver=solver, hidden_layer_sizes=hidden_layer_sizes, activation=activation)
trainTarget = np.array(train_set)[:, -1]
testTarget = np.array(test_set)[:, -1]
tmp_clf = clf.fit(train_set, trainTarget)
tmp_clf.predict(test_set)
y_pred = (clf.fit(train_set[:, :-1], trainTarget)).predict(test_set[:, :-1])
y_true = testTarget
tmp_accuracy = accuracy_score(y_true=y_true, y_pred=y_pred)
res = {'accuracy': tmp_accuracy, 'y_pred': y_pred, 'y_true': y_true}
return res
|
StarcoderdataPython
|
6701161
|
<reponame>apache/geode-native
#!/usr/local/bin/python3
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import struct
import sys
from dateutil import parser
from client_messages import parse_client_message
from decoder_base import DecoderBase
from message_types import message_types
from numeric_conversion import to_hex_digit
from gnmsg_globals import global_protocol_state
class ClientMessageDecoder(DecoderBase):
def __init__(self, output_queue):
super(ClientMessageDecoder, self).__init__(output_queue)
self.STATE_NEUTRAL_ = 0
self.STATE_FOUND_SECURITY_FOOTER_ = 1
self.send_trace_parts_retriever_ = None
self.send_trace_parser_ = None
self.connection_states_ = {}
self.get_send_trace_parts_functions = [
self.get_send_trace_parts_base,
self.get_send_trace_parts_v911,
]
self.send_trace_parsers = [
self.parse_request_fields_base,
self.parse_request_fields_v911,
]
#
# Native client code believes this is the list of messages that require a security footer.
# We will use this list to verify and report if a message is sent that needs one but doesn't
# have it, since this has been the source of at least one difficult-to-diagnose bug in the
# past. To see the decision-making code that filters on this message list, look at
# ThinClientBaseDM::beforeSendingRequest and TcrMessage::isUserInitiativeOps in geode-native
# C++ code base.
self.message_requires_security_part = [
"ADD_PDX_ENUM",
"ADD_PDX_TYPE",
"CLIENT_READY",
"CLOSE_CONNECTION",
"COMMIT",
"GETCQSTATS_MSG_TYPE",
"GET_CLIENT_PARTITION_ATTRIBUTES",
"GET_CLIENT_PR_METADATA",
"GET_ENTRY",
"GET_FUNCTION_ATTRIBUTES",
"GET_PDX_ENUM_BY_ID",
"GET_PDX_ID_FOR_ENUM",
"GET_PDX_ID_FOR_TYPE",
"GET_PDX_TYPE_BY_ID",
"INVALID",
"MAKE_PRIMARY",
"MONITORCQ_MSG_TYPE",
"PERIODIC_ACK",
"PING",
"REQUEST_EVENT_VALUE",
"ROLLBACK",
"SIZE",
"TX_FAILOVER",
"TX_SYNCHRONIZATION",
"USER_CREDENTIAL_MESSAGE",
]
self.security_trace_expression_ = re.compile(
r"(\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d\.\d+).*([\d|a-f|A-F|x|X]+)\]\s*TcrMessage::addSecurityPart\s*\[(0x[\d|a-f|A-F]*).*length\s*=\s*(\d+)\s*,\s*encrypted\s+ID\s*=\s*([\d|a-f|A-F]+)"
)
self.send_trace_expression_v911_ = re.compile(
r"(\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d\.\d+).*TcrConnection::send:\s*\[([\d|a-f|A-F|x|X]+).*sending request to endpoint.*bytes:\s*([\d| ]+)"
)
self.send_trace_expression_base_ = re.compile(
r"(\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d\.\d+).+:\d+\s+([\d|a-f|A-F|x|X]+)\]\s*TcrConnection::send:\s*\[([\d|a-f|A-F|x|X]+).*sending request to endpoint.*bytes:\s*(.+)"
)
def get_send_trace_parts_v911(self, line, parts):
result = False
match = self.send_trace_expression_v911_.search(line)
if match:
parts.append(parser.parse(match.group(1)))
# TODO: Revisit parsing TID here if we ever see a v9 client log again
parts.append("0")
parts.append(match.group(2))
parts.append(match.group(3))
result = True
return result
def get_send_trace_parts_base(self, line, parts):
result = False
match = self.send_trace_expression_base_.search(line)
if match:
parts.append(parser.parse(match.group(1)))
parts.append(match.group(2))
parts.append(match.group(3))
parts.append(match.group(4))
result = True
return result
def get_send_trace_parts(self, line, parts):
if self.send_trace_parts_retriever_ is not None:
return self.send_trace_parts_retriever_(line, parts)
else:
for retriever in self.get_send_trace_parts_functions:
if retriever(line, parts):
self.send_trace_parts_retriever_ = retriever
self.send_trace_parser_ = self.send_trace_parsers[
self.get_send_trace_parts_functions.index(retriever)
]
return True
else:
return False
def get_add_security_trace_parts(self, line, parts):
result = False
if "addSec" in line:
match = self.security_trace_expression_.search(line)
if match:
parts.append(parser.parse(match.group(1)))
parts.append(match.group(2))
parts.append(match.group(3))
parts.append(match.group(4))
parts.append(match.group(5))
result = True
return result
def decimal_string_to_hex_string(self, byte):
high_nibble = int(int(byte) / 16)
low_nibble = int(byte) % 16
return to_hex_digit[high_nibble] + to_hex_digit[low_nibble]
def format_bytes_as_hex_v911(self, message_bytes):
byte_list = message_bytes.split(" ")
hex_string = ""
for byte in byte_list:
if byte:
hex_string += self.decimal_string_to_hex_string(byte)
return hex_string
def parse_request_fields_v911(self, message_bytes):
hex_message_bytes = self.format_bytes_as_hex_v911(message_bytes)
message_type = message_types[int(hex_message_bytes[0:8], 16)]
message_length = int(hex_message_bytes[8:16], 16)
message_number_of_parts = int(hex_message_bytes[16:24], 16)
message_transaction_id = struct.unpack(
">i", bytes.fromhex(hex_message_bytes[24:32])
)[0]
message_security_flag = (int(hex_message_bytes[32:34], 16) & 0x02) >> 1
return (
message_type,
message_length,
message_number_of_parts,
message_transaction_id,
message_security_flag,
)
def parse_request_fields_base(self, message_bytes):
message_type = message_types[int(message_bytes[0:8], 16)]
message_length = int(message_bytes[8:16], 16)
message_number_of_parts = int(message_bytes[16:24], 16)
message_transaction_id = struct.unpack(
">i", bytes.fromhex(message_bytes[24:32])
)[0]
message_security_flag = (int(message_bytes[32:34], 16) & 0x02) >> 1
return (
message_type,
message_length,
message_number_of_parts,
message_transaction_id,
message_security_flag,
)
def parse_request_fields(self, message_bytes):
if self.send_trace_parser_ is not None:
return self.send_trace_parser_(message_bytes)
def request_requires_security_footer(self, message_type):
return message_type in self.message_requires_security_part
def is_candidate_line(self, line):
return "TcrMess" in line or "TcrConn" in line
def process_line(self, line):
connection = None
is_send_trace = False
is_add_security_trace = False
send_trace = {}
if not self.is_candidate_line(line):
return
parts = []
if self.get_send_trace_parts(line, parts):
(
send_trace["Timestamp"],
send_trace["tid"],
send_trace["Connection"],
message_bytes,
) = parts
is_send_trace = True
elif self.get_add_security_trace_parts(line, parts):
timestamp, tid, connection, security_footer_length, message_bytes = parts
is_add_security_trace = True
else:
return
if connection not in self.connection_states_:
self.connection_states_[connection] = self.STATE_NEUTRAL_
if self.connection_states_[connection] == self.STATE_NEUTRAL_:
if is_add_security_trace:
self.connection_states_[connection] = self.STATE_FOUND_SECURITY_FOOTER_
elif is_send_trace:
send_trace["Direction"] = "--->"
(
send_trace["Type"],
send_trace["Length"],
send_trace["Parts"],
send_trace["TransactionId"],
send_trace["SecurityFlag"],
) = self.parse_request_fields(message_bytes)
if (send_trace["SecurityFlag"] == 1) and (
self.request_requires_security_footer(str(send_trace["Type"]))
):
print(
"ERROR: Security flag is set, but no footer was added for this message!",
file=sys.stderr,
)
parse_client_message(send_trace, message_bytes)
self.output_queue_.put({"message": send_trace})
global_protocol_state.set_last_client_message(
send_trace["tid"], send_trace["Type"]
)
elif self.connection_states_[connection] == self.STATE_FOUND_SECURITY_FOOTER_:
if is_send_trace:
send_trace["Direction"] = "--->"
(
send_trace["Type"],
send_trace["Length"],
send_trace["Parts"],
send_trace["TransactionId"],
send_trace["SecurityFlag"],
) = self.parse_request_fields(message_bytes)
self.output_queue_.put({"message": send_trace})
global_protocol_state.set_last_client_message(
send_trace["tid"], send_trace["Type"]
)
|
StarcoderdataPython
|
386326
|
<gh_stars>1000+
# -*- coding: utf-8 -*-
"""
walle-web
:copyright: © 2015-2017 walle-web.io
:created time: 2017-06-14 15:53:46
:author: <EMAIL>
"""
import logging
from walle.service.extensions import login_manager
from walle.model.user import UserModel
from walle.model.user import RoleModel
from walle.model.user import MenuModel
@login_manager.user_loader
def load_user(user_id):
logging.error(user_id)
user = UserModel.query.get(user_id)
role = RoleModel().item(user.role_id)
access = MenuModel().fetch_access_list_by_role_id(user.role_id)
logging.error(access)
# logging.error(RoleModel.query.get(user.role_id).access_ids)
# logging.error(role['access_ids'].split(','))
# logging.error(UserModel.query.get(user_id))
return UserModel.query.get(user_id)
|
StarcoderdataPython
|
11280762
|
<reponame>AlexNemmo/yt_py_downloader
from pytube import YouTube
link = input("Enter the link: ")
yt = YouTube(link)
print(f"Title: {yt.title}")
print(f"Author: {yt.author}")
print(f"Length: {yt.length}")
ys = yt.streams.get_highest_resolution()
print("Downloading....")
ys.download()
print('\x1b[6;30;42m' + 'Success!' + '\x1b[0m')
|
StarcoderdataPython
|
8104186
|
<reponame>multitudes/Python_am_Freitag
# Capitalizes a copy of a string while checking for errors
import sys
from cs50 import get_string
# Get a string
s = get_string("s: ")
if not s:
sys.exit(1)
# Capitalize first letter in copy
t = s.capitalize()
# Print strings
print(f"s: {s}")
print(f"t: {t}")
sys.exit(0)
|
StarcoderdataPython
|
8050457
|
import numpy as np
import pandas as pd
from tqdm import tqdm
import os
import ast
from IPython import embed
if __name__ == '__main__':
data_path = f'./data/drugs/'
df = pd.read_csv(f'{data_path}train_data_allcontext_sideeffect.csv', index_col=0)
embed()
df.groupby('user')['activity_count'].agg('count')
ax = df['user'].hist()
fig = ax.get_figure()
fig.savefig('/path/to/figure.pdf')
|
StarcoderdataPython
|
6414397
|
import numpy as np
from .instances import construct_canonical, construct_feasible_bounded
def lhs_repair(random_state, lhs):
empty_columns = ((lhs != 0).sum(axis=0) == 0)
if empty_columns.any():
for column in np.where(empty_columns)[1]:
# Hacky, basically just adds a loose upper bound.
lhs[random_state.choice(lhs.shape[0]), column] = 0.0001
return lhs
def canonical_uniform_row_crossover(random_state, parent1, parent2, bias=0.5):
''' bias towards parent1 '''
assert parent1.variables == parent2.variables
assert parent1.constraints == parent2.constraints
choose = random_state.choice([0, 1], size=parent1.constraints, p=(bias, 1-bias))
child1_rows = choose * parent1.constraints + np.arange(parent1.constraints)
child2_rows = (1 - choose) * parent1.constraints + np.arange(parent1.constraints)
lhs_merged = np.concatenate([parent1.lhs(), parent2.lhs()])
rhs_merged = np.concatenate([parent1.rhs(), parent2.rhs()])
return (
construct_canonical(
variable_types=parent1.variable_types,
lhs=lhs_repair(random_state, lhs_merged[child1_rows, :]),
rhs=rhs_merged[child1_rows],
objective=parent1.objective()
),
construct_canonical(
variable_types=parent2.variable_types,
lhs=lhs_repair(random_state, lhs_merged[child2_rows, :]),
rhs=rhs_merged[child2_rows],
objective=parent2.objective()
)
)
def feasible_bounded_uniform_row_crossover(random_state, parent1, parent2, bias=0.5):
''' bias towards parent1 '''
assert parent1.variables == parent2.variables
assert parent1.constraints == parent2.constraints
choose = random_state.choice([0, 1], size=parent1.constraints, p=(bias, 1-bias))
child1_rows = choose * parent1.constraints + np.arange(parent1.constraints)
child2_rows = (1 - choose) * parent1.constraints + np.arange(parent1.constraints)
lhs_merged = np.concatenate([parent1.lhs(), parent2.lhs()])
rhs_merged = np.concatenate([parent1.rhs(), parent2.rhs()])
return (
construct_feasible_bounded(
variable_types=parent1.variable_types,
lhs=lhs_repair(random_state, lhs_merged[child1_rows, :]),
alpha=parent1.alpha(),
beta=parent1.beta()
),
construct_feasible_bounded(
variable_types=parent2.variable_types,
lhs=lhs_repair(random_state, lhs_merged[child2_rows, :]),
alpha=parent2.alpha(),
beta=parent2.beta()
)
)
|
StarcoderdataPython
|
22577
|
"""Test Predict API calls"""
import io
from PIL import Image
from dataclasses import dataclass
import tempfile
from pathlib import Path
import pytest
from mock import patch
from mldock.api.predict import send_image_jpeg, send_csv, send_json, handle_prediction
import responses
import requests
@pytest.fixture
def image_bytes():
"""reads image as bytes string"""
img = Image.open("tests/api/fixtures/eight.png", mode="r")
img_byte_arr = io.BytesIO()
img.save(img_byte_arr, format="PNG")
return img_byte_arr.getvalue()
@dataclass
class MockResponse:
status_code: int
json_data: dict = None
text: str = None
_content: bytes = None
def json(self):
return self.json_data
class TestPredictAPI:
"""
TEST ERROR STATUS_CODE!=200 SCENERIO
"""
@staticmethod
@responses.activate
def test_handle_prediction_send_json_handles_non_200():
responses.add(
responses.POST,
"http://nothing-to-see-here/invocations",
json={"error": "client error"},
status=404,
)
with pytest.raises(requests.exceptions.RequestException):
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/payload.json",
response_file=None,
request_content_type="application/json",
response_content_type="application/json",
)
@staticmethod
@responses.activate
def test_handle_prediction_sending_image_jpeg_handles_non_200():
responses.add(
responses.POST,
"http://nothing-to-see-here/invocations",
json={"error": "client error"},
status=404,
)
with pytest.raises(requests.exceptions.RequestException):
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/eight.png",
response_file=None,
request_content_type="image/jpeg",
response_content_type="application/json",
)
@staticmethod
@responses.activate
def test_handle_prediction_sending_text_csv_handles_non_200():
responses.add(
responses.POST,
"http://nothing-to-see-here/invocations",
json={"error": "client error"},
status=404,
)
with pytest.raises(requests.exceptions.RequestException):
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/payload.csv",
response_file=None,
request_content_type="text/csv",
response_content_type="application/json",
)
"""
TEST SUCCESS STATUS_CODE=200 SCENERIO
"""
@staticmethod
def test_handle_prediction_send_json_success_200():
with patch("mldock.api.predict.execute_request") as mock_execute_request:
mock_execute_request.return_value = MockResponse(
json_data={"result": "success"}, status_code=200
)
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/payload.json",
response_file=None,
request_content_type="application/json",
response_content_type="application/json",
)
validation_kwargs = {
"url": "http://nothing-to-see-here/invocations",
"headers": {"Content-Type": "application/json"},
}
_, kwargs = list(mock_execute_request.call_args)
data_obj = kwargs.pop("data")
assert (
kwargs == validation_kwargs
), "Failure. URL and Headers are incorrect."
assert isinstance(data_obj, str), "Failure. Expected str json object."
@staticmethod
def test_handle_prediction_sending_image_jpeg_success_200(image_bytes):
with patch("mldock.api.predict.execute_request") as mock_execute_request:
mock_execute_request.return_value = MockResponse(
_content=image_bytes, status_code=200
)
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/eight.png",
response_file=None,
request_content_type="image/jpeg",
response_content_type="image/jpeg",
)
validation_kwargs = {
"url": "http://nothing-to-see-here/invocations",
"headers": {"Content-Type": "image/jpeg"},
}
_, kwargs = list(mock_execute_request.call_args)
data_obj = kwargs.pop("data")
assert (
kwargs == validation_kwargs
), "Failure. URL and Headers are incorrect."
assert isinstance(
data_obj, io.BytesIO
), "Failure. Expected io.BytesIO object."
@staticmethod
def test_handle_prediction_sending_text_csv_success_200():
with patch("mldock.api.predict.execute_request") as mock_execute_request:
mock_execute_request.return_value = MockResponse(
text="greet,name\nhello,sam", status_code=200
)
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/payload.csv",
response_file=None,
request_content_type="text/csv",
response_content_type="text/csv",
)
validation_kwargs = {
"url": "http://nothing-to-see-here/invocations",
"headers": {"Content-Type": "text/csv"},
}
_, kwargs = list(mock_execute_request.call_args)
data_obj = kwargs.pop("data")
assert (
kwargs == validation_kwargs
), "Failure. URL and Headers are incorrect."
assert isinstance(data_obj, str), "Failure. Expected str json object."
"""
TEST WRITING RESPONSE TO FILE SCENERIO
"""
@staticmethod
def test_handle_prediction_send_json_success_write_response_file():
with tempfile.TemporaryDirectory() as tmp_dir:
response_filepath = Path(tmp_dir, "response.json")
with patch("mldock.api.predict.execute_request") as mock_execute_request:
mock_execute_request.return_value = MockResponse(
json_data={"result": "success"}, status_code=200
)
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/payload.json",
response_file=response_filepath,
request_content_type="application/json",
response_content_type="application/json",
)
assert (
response_filepath.is_file()
), "Failure. outputfile was not created"
@staticmethod
def test_handle_prediction_sending_image_jpeg_success_write_response_file(
image_bytes,
):
with tempfile.TemporaryDirectory() as tmp_dir:
response_filepath = Path(tmp_dir, "response.png")
with patch("mldock.api.predict.execute_request") as mock_execute_request:
mock_execute_request.return_value = MockResponse(
_content=image_bytes, status_code=200
)
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/eight.png",
response_file=response_filepath,
request_content_type="image/jpeg",
response_content_type="image/jpeg",
)
assert (
response_filepath.is_file()
), "Failure. outputfile was not created"
@staticmethod
def test_handle_prediction_sending_text_csv_success_write_response_file():
with tempfile.TemporaryDirectory() as tmp_dir:
response_filepath = Path(tmp_dir, "response.csv")
with patch("mldock.api.predict.execute_request") as mock_execute_request:
mock_execute_request.return_value = MockResponse(
text="greet,name\nhello,sam", status_code=200
)
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/payload.csv",
response_file=response_filepath,
request_content_type="text/csv",
response_content_type="text/csv",
)
assert (
response_filepath.is_file()
), "Failure. outputfile was not created"
"""
TEST ADDING ADDTIONAL HEADERS
"""
@staticmethod
def test_handle_prediction_send_json_success_add_headers():
with patch("mldock.api.predict.execute_request") as mock_execute_request:
mock_execute_request.return_value = MockResponse(
json_data={"result": "success"}, status_code=200
)
_ = handle_prediction(
host="http://nothing-to-see-here/invocations",
request="tests/api/fixtures/payload.json",
response_file=None,
request_content_type="application/json",
response_content_type="application/json",
headers={"Authentication": "bearer 12345"},
)
validation_kwargs = {
"url": "http://nothing-to-see-here/invocations",
"headers": {
"Content-Type": "application/json",
"Authentication": "bearer 12345",
},
}
_, kwargs = list(mock_execute_request.call_args)
kwargs.pop("data")
assert (
kwargs == validation_kwargs
), "Failure. URL and Headers are incorrect."
|
StarcoderdataPython
|
247269
|
<gh_stars>0
"""Typing helpers to use throughout project.
All totally optional, of course.
"""
from typing import Union
import datetime
DTType = Union[datetime.datetime, datetime.date]
|
StarcoderdataPython
|
1656713
|
#!/usr/bin/env python3
"""
Refactored version of Part 1a.
It turned out to be slow for Part 2.
"""
from collections import deque
from typing import Deque, List
class Rotator:
def __init__(self, digits: str) -> None:
self.d: Deque[int] = deque([int(x) for x in digits])
self.MIN = min(self.d)
self.MAX = max(self.d)
# self.NUMBER_OF_MOVES = 10 # test
self.NUMBER_OF_MOVES = 100 # the real thing
self.curr: int = self.d[0]
def rotate_to(self, value: int) -> None:
"""
Rotate the deque left until the given value is at the beginning.
"""
index = self.d.index(value)
self.d.rotate(-index) # rotate left `index` times
def find_destination(self, curr: int, three: List[int]) -> int:
value = curr
while True:
value -= 1
if value < self.MIN:
value = self.MAX
#
if value not in three:
return value
#
#
def move(self) -> None:
self.rotate_to(self.curr)
# print(self)
# print("current:", self.curr)
three = [self.d[1], self.d[2], self.d[3]]
# print("pick up:", three)
# start: remove the three elements
self.d.rotate(-1)
self.d.popleft(); self.d.popleft(); self.d.popleft()
self.d.rotate()
# end: remove the three elements
dest = self.find_destination(self.curr, three)
# print("destination:", dest)
self.rotate_to(dest)
# start: insert the three elements
self.d.rotate(-1)
for n in reversed(three):
self.d.appendleft(n)
self.d.rotate()
# end: insert the three elements
self.rotate_to(self.curr)
self.curr = self.d[1]
def start(self) -> None:
for i in range(self.NUMBER_OF_MOVES):
step = i + 1
# print("Move", step)
self.move()
# print()
def get_result(self) -> str:
copy = self.d.copy()
while copy[0] != 1:
copy.rotate(-1)
copy.popleft()
return "".join([str(x) for x in copy])
def __str__(self) -> str:
return str(self.d)
def main():
example = "389125467"
input = "394618527"
# rot = Rotator(example)
rot = Rotator(input)
rot.start()
# print(rot.d)
print(rot.get_result())
##############################################################################
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
3275539
|
<filename>wavespectra/construct/specconstruct.py
import os
import numpy as np
import xarray as xr
from wavespectra.core.attributes import attrs, set_spec_attributes
from wavespectra.specdataset import SpecDataset
def prepare_reconstruction(spec_info, base_dset=None):
""" Load parameters for spectral reconstruction.
Arguments:
spec_info: dictionary for updating reconstruction defaults. Optionally extra variables to keep or rename.
- coordinates:
- spectral: freq, dir
- frequency spectrum:
- jonswap/TMA: hs, tp, gamma, dpt
- ochihubble: hs, tp, lam
- directional distribution:
- cos2s: dp, dspr
- normal: dp, dspr
base_dset: path or xarray dataset object
Returns:
ds: xarray dataset with parameters for spectral reconstruction. part dimension is used for concatenation
"""
reconstruction_defaults = {
"freq": np.arange(0.04, 1.0, 0.02), # frequencies
"dir": np.arange(0, 360, 10), # directions
"hs": "hs", # significant wave height
"tp": "tp", # peak period
"gamma": None, # jonswap peak enhancement factor
"dpt": None, # water depth
"lam": None, # ochihubble peak enhancement factor
"dp": "dp", # peak direction
"dspr": None, # directional spread
} # fields used for reconstruction. It can be updated with fields of base_dset, numbers, or datarrays
reconstruction_info = reconstruction_defaults.copy()
reconstruction_info.update(spec_info)
if base_dset is None:
ds0 = xr.Dataset()
elif isinstance(base_dset, str):
if os.path.isfile(base_dset):
ds0 = xr.open_dataset(base_dset)
else:
ds0 = xr.open_mfdataset(base_dset, combine="by_coords")
else:
ds0 = base_dset
ds = ds0[[]] # to keep metadata
spc_coords = ("freq", "dir")
for k, v in reconstruction_info.items():
if isinstance(v, xr.DataArray) and k in spc_coords:
ds[k] = v.values
elif isinstance(v, (list, tuple)) and k not in spc_coords:
ds[k] = xr.concat(
[ds0[e] if isinstance(e, str) else xr.DataArray(e) for e in v],
dim="part",
coords="minimal",
).assign_coords({"part": range(len(v))})
elif isinstance(v, str):
ds[k] = ds0[v]
elif v is None:
if k in ds0:
ds[k] = ds0[k]
else:
ds[k] = v
return ds
def finite_depth(freqs, dpt):
"""Factors for modifiying JONSWAP spectra in shallow water (TMA spectrum)
Arguments:
freqs: frequencies
dpt: water depth
Returns:
phi: factors between 0 and 1 for each frequency
"""
w = 2 * np.pi * freqs
whg = w * (dpt / 9.81) ** 0.5
phi = w ** 0 # filled with ones
phi[whg < 2] = 1 - 0.5 * (2 - whg[whg < 2]) ** 2
phi[whg < 1] = 0.5 * whg[whg < 1] ** 2
return phi
def calc_Sf_jonswap(freqs, hs, fp, gamma, dpt=None, sigmaA=0.07, sigmaB=0.09, alpha=-1):
""" Reconstruct JONSWAP or TMA frequency spectra
Arguments:
freqs: frequencies
hs: significant wave height
fp: peak frequency
gamma: jonswap peak enhancement factor
dpt: water depth
sigmaA, sigmaB: spectral width parameters
alpha: normalization factor
Returns:
Sf: xarray dataarray with reconstructed frequency spectra
"""
sigma = xr.where(freqs <= fp, sigmaA, sigmaB)
r = np.exp(-((freqs - fp) ** 2.0) / (2 * sigma ** 2 * fp ** 2))
Sf = 0.0617 * freqs ** (-5) * np.exp(-1.25 * (freqs / fp) ** (-4)) * gamma ** r
if dpt is not None:
Sf *= finite_depth(freqs, dpt)
if alpha < 0: # normalizing by integration
alpha = (hs / Sf.spec.hs()) ** 2 # make sure m0=Hm0^2/16=int S(w)dw
elif alpha == 0: # original normalization for default values
alpha = 5.061 * hs ** 2 * fp ** 4 * (1 - 0.287 * np.log(gamma))
return (alpha * Sf).fillna(0) # alpha>0 is applied directly
gamma_fun = (
lambda x: np.sqrt(2.0 * np.pi / x)
* ((x / np.exp(1.0)) * np.sqrt(x * np.sinh(1.0 / x))) ** x
) # alternative to scipy.special.gamma
def calc_Sf_ochihubble(freqs, hs, fp, lam):
""" Reconstruct OCHI-HUBBLE frequency spectra
Arguments:
freqs: frequencies
hs: Significant wave height
fp: peak frequency
lam: ochihubble peak enhancement factor
Returns:
Sf: xarray dataarray with reconstructed frequency spectra
"""
w = 2 * np.pi * freqs
w0 = 2 * np.pi * fp
B = xr.ufuncs.maximum(lam, 0.01) + 0.25
A = 0.5 * np.pi * hs ** 2 * ((B * w0 ** 4) ** lam / gamma_fun(lam))
a = xr.ufuncs.minimum((w0 / w) ** 4, 100.0)
Sf = A * np.exp(-B * a) / (w ** (4.0 * B))
return Sf.fillna(0)
def calc_Dth_cos2s(dirs, dp, dspr):
"""Cosine 2s spreading function.
Arguments:
dirs: direction coordinates
dp: wave directions
dspr: wave directional spreads
Returns:
Dth: normalized spreading
Note:
Function defined such that \int{Dth d\theta}=1*
"""
th1 = 0.5 * np.deg2rad(dirs)
th2 = 0.5 * np.deg2rad(dp)
a = abs(
np.cos(th1) * np.cos(th2) + np.sin(th1) * np.sin(th2)
) # cos(a-b) = cos(a)cos(b)+sin(a)sin(b)
# Converting to cos2s spreading parameter
# see Holthuijsen pag165
s = (2./(dspr*np.pi/180)**2)-1
Dth = a ** (2.0 * s) # cos((dirs-dp)/2) ** (2*s)
Dth /= Dth.sum("dir") * abs(dirs[1] - dirs[0])
return Dth
def calc_Dth_normal(dirs, dp, dspr):
"""Normal distribution spreading
Arguments:
dirs: direction coordinates
dp: wave directions
dspr: wave directional spreads
Returns:
Dth: normalized spreading
"""
ddif0 = abs(dirs % 360 - dp % 360)
ddifmin = np.minimum(ddif0, 360 - ddif0)
Dth = np.exp((-(ddifmin ** 2)) / (2 * dspr ** 2)) / (dspr * (2 * np.pi) ** 0.5)
# TODO: wrapped normal but it's a bit pointless for real world dspr values
Dth /= Dth.sum("dir") * abs(dirs[1] - dirs[0])
return Dth
@xr.register_dataset_accessor("construct")
class SpecConstruct(object):
def __init__(self, xarray_obj):
self._obj = xarray_obj
def Sf(self, stype=""):
""" Wrapper for calc_Sf functions
Arguments:
stype: frequency spectrum type
Returns:
Sf: xarray dataarray with reconstructed frequency spectra
"""
if not stype or stype == "jonswap":
Sf = calc_Sf_jonswap(
self._obj.freq,
self._obj.hs,
1 / self._obj.tp,
self._obj.get("gamma", 3.3),
self._obj.get("dpt", None),
)
elif stype == "ochihubble":
Sf = calc_Sf_ochihubble(
self._obj.freq, self._obj.hs, 1 / self._obj.tp, self._obj.lam
)
else:
raise ValueError
return Sf
def Dth(self, dtype=""):
""" Wrapper for calc_Dth functions
Arguments:
dtype: directionl distribution type
Returns:
Dth: normalized directional spreading
"""
dspr = self._obj.get("dspr", 30)
if not dtype or dtype == "cos2s":
Dth = calc_Dth_cos2s(self._obj.dir, self._obj.dp, dspr)
elif dtype == "normal":
Dth = calc_Dth_normal(self._obj.dir, self._obj.dp, dspr)
else:
raise ValueError
return Dth
def efth(self, stype="", dtype="", sumdim="part"):
""" Reconstruct directional spectra
Arguments:
stype: frequency spectrum type
dtype: directionl distribution type
sumdim: dimension to sum values
Returns:
efth: xarray dataarray with reconstructed frequency-direction spectra
"""
efth = self.Sf(stype) * self.Dth(dtype)
if sumdim in efth.coords:
efth = efth.sum(dim=sumdim)
return efth
def to_dset(self, spec_info={}, **kwargs):
""" Create wavespectra dataset
Arguments:
spec_info: dictionary for updating reconstruction defaults.
Returns:
ds: wavespectra dataset with reconstructed frequency-direction spectra
"""
# TODO: Ensure that all arrays have wavespectra compatible names
if spec_info:
ds = prepare_reconstruction(spec_info, base_dset=self._obj)
else:
ds = self._obj.copy()
ds[attrs.SPECNAME] = ds.construct.efth(**kwargs)
set_spec_attributes(ds)
return ds
if __name__ == "__main__":
# Example1
spec_info = {
"hs": [1, 3],
"tp": [5, 12],
"gamma": 3.3,
"dp": [10, 40],
"dspr": [35, 25],
}
ds = prepare_reconstruction(spec_info).construct.to_dset()
# # Example2
# spec_info = {
# "hs": ["phs0", "phs1", "phs2"],
# "tp": ["ptp0", "ptp1", "ptp2"],
# "gamma": [1.0, 3.3, 3.3],
# "dp": ["pdir0", "pdir1", "pdir2"],
# "dspr": 30,
# }
# ds = xr.open_dataset(grdfile).construct.to_dset(spec_info)
# # Example3
# dstmp = xr.open_dataset(grdfile).isel(time=1, longitude=range(79, 82), latitude=62)
# spec_info = {
# 'hs': ["sea8hs", "sw8hs"],
# 'tp': ["sea8tp", "sw8tp"],
# "lam": [1.54 * np.exp(-0.062 * dstmp.hs), 3.00],
# "dp": ["sea8dp", "sw8dp"],
# "dspr": [35, 25],
# }
# ds = dstmp.construct.to_dset(spec_info, stype="ochihubble", dtype="normal")
|
StarcoderdataPython
|
3540273
|
<filename>code/4-ROC_PR_curve/calculate_roc.py
# Siamese Architecture for face recognition
import random
import numpy as np
import time
import tensorflow as tf
import math
import pdb
import sys
import os
import scipy.io as sio
from sklearn import *
import matplotlib.pyplot as plt
tf.app.flags.DEFINE_string(
'evaluation_dir', '../../results/ROC',
'Directory where checkpoints and event logs are written to.')
# Store all elemnts in FLAG structure!
FLAGS = tf.app.flags.FLAGS
score = np.load(os.path.join(FLAGS.evaluation_dir,'score_vector.npy'))
label = np.load(os.path.join(FLAGS.evaluation_dir,'target_label_vector.npy'))
def calculate_eer_auc_ap(label,distance):
fpr, tpr, thresholds = metrics.roc_curve(label, distance, pos_label=1)
AUC = metrics.roc_auc_score(label, distance, average='macro', sample_weight=None)
AP = metrics.average_precision_score(label, distance, average='macro', sample_weight=None)
# Calculating EER
intersect_x = fpr[np.abs(fpr - (1 - tpr)).argmin(0)]
EER = intersect_x
return EER,AUC,AP,fpr, tpr
# K-fold validation for ROC
k=1
step = int(label.shape[0] / float(k))
EER_VECTOR = np.zeros((k,1))
AUC_VECTOR = np.zeros((k,1))
for split_num in range(k):
index_start = split_num * step
index_end = (split_num + 1) * step
EER_temp,AUC_temp,AP,fpr, tpr = calculate_eer_auc_ap(label[index_start:index_end],score[index_start:index_end])
EER_VECTOR[split_num] = EER_temp * 100
AUC_VECTOR[split_num] = AUC_temp * 100
print("EER=",np.mean(EER_VECTOR),np.std(EER_VECTOR))
print("AUC=",np.mean(AUC_VECTOR),np.std(AUC_VECTOR))
|
StarcoderdataPython
|
3230527
|
import os
import numpy as np
try:
import cv2
except ImportError:
cv2 = False
VIDEO_TYPES = ['.avi', '.mp4', ]
IMAGE_TYPES = ['.png', '.bmp', '.tiff', '.jpg', '.jpeg']
class FundusImageWithMetaData(object):
""" Class to hold the fundus image and any related metadata, and enable saving.
Attributes:
image (np.array): Fundus image.
laterality (str): Left or right eye.
patient_id (str): Patient ID.
DOB (str): Patient date of birth.
"""
def __init__(self, image, laterality=None, patient_id=None, patient_dob=None):
self.image = image
self.laterality = laterality
self.patient_id = patient_id
self.DOB = patient_dob
def save(self, filepath):
"""Saves fundus image.
Args:
filepath (str): Location to save volume to. Extension must be in IMAGE_TYPES.
"""
if cv2 is False:
raise RuntimeError("cv2 is missing, please install oct-converter[extras]")
extension = os.path.splitext(filepath)[1]
if extension.lower() in IMAGE_TYPES:
cv2.imwrite(filepath, self.image)
elif extension.lower() == '.npy':
np.save(filepath, self.image)
else:
raise NotImplementedError('Saving with file extension {} not supported'.format(extension))
|
StarcoderdataPython
|
3539028
|
from django.http import HttpResponse
from django.shortcuts import render, redirect
from .forms import FormLinks
from .models import Links
from django.template import RequestContext
def home(request):
form = FormLinks()
status = request.GET.get('status')
return render(request, 'home.html', {'form': form, 'status': status})
def valida_link(request):
form = FormLinks(request.POST)
link_fixo = 'https://sbily.herokuapp.com/'
link_encurtado = form.data['link_encurtado']
link = Links.objects.filter(link_encurtado = link_encurtado)
if len(link) > 0:
return redirect("/?status=1")
if form.is_valid():
try:
form.save()
return render(request, 'links.html', {'link_fixo': link_fixo, 'link_encurtado': link_encurtado})
except:
return HttpResponse('Erro Interno do Sistema')
def redirecionar(request, link):
link = Links.objects.filter(link_encurtado = link)
if len(link) == 0:
return redirect('/')
return redirect(link[0].link_redirecionado)
def handler404(request, exception, template_name="404.html"):
response = render(request, template_name)
response.status_code = 404
return response
def handler500(request, *args, **argv):
response = render(request, '500.html')
response.status_code = 500
return response
|
StarcoderdataPython
|
60372
|
import torch
import torch.nn as nn
class CNNLayer(nn.Module):
"""
This layer is callable for 1d convolution and pooling functions with fatten result
"""
def __init__(self,
input_dim,
kernel_size=(3, 4, 5),
kernel_num=200):
"""
:param input_dim: input dim (type:int)
:param kernel_size: kernel size of convolution, default is (3,4,5) (type:tuple or list)
:param kernel_num: channel of each kernel, default is 200 (type:int)
"""
super(CNNLayer, self).__init__()
self.output_dim = len(kernel_size) * kernel_num
self.convolutions = nn.ModuleList(
[nn.Conv2d(1, kernel_num, (ks, input_dim)) for ks in kernel_size]
)
def forward(self, x):
con_ret = [c(x) for c in self.convolutions]
pooling_x = [nn.functional.max_pool1d(c.squeeze(-1), c.size()[2]) for c in con_ret]
flat_pool = torch.cat(pooling_x, 1)
return flat_pool # (batch, len(kernel_size)*kernel_num)
|
StarcoderdataPython
|
5014542
|
# -*- coding: utf-8 -*-
import traceback
from .interface import uuid
class TopicUuid(uuid.CloudioUuid):
"""Topic based CloudioUuid (Universally Unique Identifier)
In the case of topic based MQTT communication the topic is used directly in order to identify objects
"""
def __init__(self, cloud_io_element=None):
super(TopicUuid, self).__init__()
# The topic is the UUID for every object
self._topic = None # type: str or None
if cloud_io_element:
from cloudio.endpoint.attribute import CloudioAttribute
from cloudio.endpoint.interface.node_container import CloudioNodeContainer
from cloudio.endpoint.interface.object_container import CloudioObjectContainer
try:
if isinstance(cloud_io_element, CloudioAttribute):
self._topic = self._get_attribute_topic(cloud_io_element)
elif isinstance(cloud_io_element, CloudioNodeContainer):
self._topic = self._get_node_container_topic(cloud_io_element)
elif isinstance(cloud_io_element, CloudioObjectContainer):
self._topic = self._get_object_container_topic(cloud_io_element)
except Exception:
traceback.print_exc()
raise RuntimeError('Error in TopicUuid')
######################################################################
# interface.CloudioUuid implementation
#
def equals(self, other):
"""Returns true if the TopicUuid is equal to the given one, false otherwise.
:param other: The TopicUuid to check equality with
:type other: TopicUuid
:return:
"""
if not self.is_valid() or not isinstance(other, TopicUuid) or not other.is_valid():
return False
return True if self.topic == other.topic else False
def is_valid(self):
return True if self.topic is not None and self.topic != '' else False
def to_string(self):
"""
:return: Serialized TopicUuid.
:rtype: str
"""
return self.topic
######################################################################
# Public API
#
@property
def topic(self):
return self._topic
# topic.setter should only be used for testing.
@topic.setter
def topic(self, value):
self._topic = value
######################################################################
# Private methods
#
def _get_attribute_topic(self, cloudio_attribute):
return self._get_attribute_container_topic(cloudio_attribute.get_parent()) + '/attributes/' + \
cloudio_attribute.get_name()
def _get_attribute_container_topic(self, attribute_container):
# TODO Remove check below and put an assert for attributeContainer
if attribute_container is None or attribute_container.get_name() is None:
return '<no parent>' + '/objects/' + '<no name>'
return self._get_object_container_topic(attribute_container.get_parent_object_container()) + \
'/objects/' + attribute_container.get_name()
def _get_object_container_topic(self, object_container):
if not object_container:
return '<no parent>' + '/objects/' + '<no name>'
parentObjectContainer = object_container.get_parent_object_container()
if parentObjectContainer:
return self._get_object_container_topic(parentObjectContainer) + '/objects/' + object_container.get_name()
parentNodeContainer = object_container.get_parent_node_container()
if parentNodeContainer:
return self._get_node_container_topic(parentNodeContainer) + '/nodes/' + object_container.get_name()
@staticmethod
def _get_node_container_topic(node_container):
# As the name of an node container is unique in cloud.io, we just take the name.
return node_container.get_name()
|
StarcoderdataPython
|
8087898
|
<gh_stars>0
# Bienvenido a AiSaturdays Sevilla 2020!
# --------------------------------------
print("Hello world")
|
StarcoderdataPython
|
5042300
|
import discord
from discord.ext import commands
import json
import sys
import os
import time
from modules.log import log
import modules.activity as activity
from modules.misc import get_args
from modules.misc import is_owner
class Debug(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(
name='logmsg',
description='Logs a message in the console',
aliases=['logctx'],
hidden=True
)
async def logmsg_command(self, ctx):
author_ID = str(ctx.message.author.id)
if not is_owner(author_ID):
await ctx.send(content=f"<@!{author_ID}> Owner commands can only be executed by owners.")
await log('Owner command cannot be executed by non-owners.', 'warn', client=self.bot)
return
print(f"\n{ctx}\n\n==============================\n\n{dir(self.bot.user)}")
#print(f"\n\n{ctx.guild == None}\n\n")
return
def setup(bot):
bot.add_cog(Debug(bot))
# Adds the Debug commands to the bot
|
StarcoderdataPython
|
4909349
|
<filename>Tests/test_FramePcapAnalysis.py<gh_stars>0
class TestFramePcapAnalysis:
def test_show_frame(self):
assert False
|
StarcoderdataPython
|
5102768
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen
https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1NodeConfigStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name and the value is attribute
type.
attribute_map (dict): The key is attribute name and the value is json key
in definition.
"""
swagger_types = {
'active': 'V1NodeConfigSource',
'assigned': 'V1NodeConfigSource',
'error': 'str',
'last_known_good': 'V1NodeConfigSource'
}
attribute_map = {
'active': 'active',
'assigned': 'assigned',
'error': 'error',
'last_known_good': 'lastKnownGood'
}
def __init__(self,
active=None,
assigned=None,
error=None,
last_known_good=None):
"""
V1NodeConfigStatus - a model defined in Swagger
"""
self._active = None
self._assigned = None
self._error = None
self._last_known_good = None
self.discriminator = None
if active is not None:
self.active = active
if assigned is not None:
self.assigned = assigned
if error is not None:
self.error = error
if last_known_good is not None:
self.last_known_good = last_known_good
@property
def active(self):
"""
Gets the active of this V1NodeConfigStatus.
Active reports the checkpointed config the node is actively using.
Active will represent either the current version of the Assigned config,
or the current LastKnownGood config, depending on whether attempting to
use the Assigned config results in an error.
:return: The active of this V1NodeConfigStatus.
:rtype: V1NodeConfigSource
"""
return self._active
@active.setter
def active(self, active):
"""
Sets the active of this V1NodeConfigStatus.
Active reports the checkpointed config the node is actively using.
Active will represent either the current version of the Assigned config,
or the current LastKnownGood config, depending on whether attempting to
use the Assigned config results in an error.
:param active: The active of this V1NodeConfigStatus.
:type: V1NodeConfigSource
"""
self._active = active
@property
def assigned(self):
"""
Gets the assigned of this V1NodeConfigStatus.
Assigned reports the checkpointed config the node will try to use. When
Node.Spec.ConfigSource is updated, the node checkpoints the associated
config payload to local disk, along with a record indicating intended
config. The node refers to this record to choose its config checkpoint,
and reports this record in Assigned. Assigned only updates in the status
after the record has been checkpointed to disk. When the Kubelet is
restarted, it tries to make the Assigned config the Active config by
loading and validating the checkpointed payload identified by Assigned.
:return: The assigned of this V1NodeConfigStatus.
:rtype: V1NodeConfigSource
"""
return self._assigned
@assigned.setter
def assigned(self, assigned):
"""
Sets the assigned of this V1NodeConfigStatus.
Assigned reports the checkpointed config the node will try to use. When
Node.Spec.ConfigSource is updated, the node checkpoints the associated
config payload to local disk, along with a record indicating intended
config. The node refers to this record to choose its config checkpoint,
and reports this record in Assigned. Assigned only updates in the status
after the record has been checkpointed to disk. When the Kubelet is
restarted, it tries to make the Assigned config the Active config by
loading and validating the checkpointed payload identified by Assigned.
:param assigned: The assigned of this V1NodeConfigStatus.
:type: V1NodeConfigSource
"""
self._assigned = assigned
@property
def error(self):
"""
Gets the error of this V1NodeConfigStatus.
Error describes any problems reconciling the Spec.ConfigSource to the
Active config. Errors may occur, for example, attempting to checkpoint
Spec.ConfigSource to the local Assigned record, attempting to checkpoint
the payload associated with Spec.ConfigSource, attempting to load or
validate the Assigned config, etc. Errors may occur at different points
while syncing config. Earlier errors (e.g. download or checkpointing
errors) will not result in a rollback to LastKnownGood, and may resolve
across Kubelet retries. Later errors (e.g. loading or validating a
checkpointed config) will result in a rollback to LastKnownGood. In the
latter case, it is usually possible to resolve the error by fixing the
config assigned in Spec.ConfigSource. You can find additional
information for debugging by searching the error message in the Kubelet
log. Error is a human-readable description of the error state; machines
can check whether or not Error is empty, but should not rely on the
stability of the Error text across Kubelet versions.
:return: The error of this V1NodeConfigStatus.
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""
Sets the error of this V1NodeConfigStatus.
Error describes any problems reconciling the Spec.ConfigSource to the
Active config. Errors may occur, for example, attempting to checkpoint
Spec.ConfigSource to the local Assigned record, attempting to checkpoint
the payload associated with Spec.ConfigSource, attempting to load or
validate the Assigned config, etc. Errors may occur at different points
while syncing config. Earlier errors (e.g. download or checkpointing
errors) will not result in a rollback to LastKnownGood, and may resolve
across Kubelet retries. Later errors (e.g. loading or validating a
checkpointed config) will result in a rollback to LastKnownGood. In the
latter case, it is usually possible to resolve the error by fixing the
config assigned in Spec.ConfigSource. You can find additional
information for debugging by searching the error message in the Kubelet
log. Error is a human-readable description of the error state; machines
can check whether or not Error is empty, but should not rely on the
stability of the Error text across Kubelet versions.
:param error: The error of this V1NodeConfigStatus.
:type: str
"""
self._error = error
@property
def last_known_good(self):
"""
Gets the last_known_good of this V1NodeConfigStatus.
LastKnownGood reports the checkpointed config the node will fall back to
when it encounters an error attempting to use the Assigned config. The
Assigned config becomes the LastKnownGood config when the node
determines that the Assigned config is stable and correct. This is
currently implemented as a 10-minute soak period starting when the local
record of Assigned config is updated. If the Assigned config is Active
at the end of this period, it becomes the LastKnownGood. Note that if
Spec.ConfigSource is reset to nil (use local defaults), the
LastKnownGood is also immediately reset to nil, because the local
default config is always assumed good. You should not make assumptions
about the node's method of determining config stability and correctness,
as this may change or become configurable in the future.
:return: The last_known_good of this V1NodeConfigStatus.
:rtype: V1NodeConfigSource
"""
return self._last_known_good
@last_known_good.setter
def last_known_good(self, last_known_good):
"""
Sets the last_known_good of this V1NodeConfigStatus.
LastKnownGood reports the checkpointed config the node will fall back to
when it encounters an error attempting to use the Assigned config. The
Assigned config becomes the LastKnownGood config when the node
determines that the Assigned config is stable and correct. This is
currently implemented as a 10-minute soak period starting when the local
record of Assigned config is updated. If the Assigned config is Active
at the end of this period, it becomes the LastKnownGood. Note that if
Spec.ConfigSource is reset to nil (use local defaults), the
LastKnownGood is also immediately reset to nil, because the local
default config is always assumed good. You should not make assumptions
about the node's method of determining config stability and correctness,
as this may change or become configurable in the future.
:param last_known_good: The last_known_good of this V1NodeConfigStatus.
:type: V1NodeConfigSource
"""
self._last_known_good = last_known_good
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, 'to_dict') else x, value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], 'to_dict') else item, value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1NodeConfigStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
StarcoderdataPython
|
22763
|
""" Here all the blog's urls routes will be mapped """
from django.urls import path
from django.conf.urls import include, url
from . import views
app_name = 'core'
urlpatterns = [
# path('', views.home, name='home-page'),
url(r'^api/', include('apps.core.api.urls', namespace='api')),
]
|
StarcoderdataPython
|
37055
|
<filename>YatzyPy/tests.py
# tests.py
from . main import Yatzy
def runTests():
c = Yatzy([5, 5, 6, 5, 6])
s = c.getScoreTable()
assert s['change'] == 27 and s['fullhouse'] == 27
assert s['double'] == 12 and s['six'] == 12
assert s['five'] == 15 and s['triple'] == 15
assert s['pair'] == 22
c = Yatzy([5, 5, 5, 5, 5])
s = c.getScoreTable()
assert s['change'] == 25 and s['fullhouse'] == 25 and s['five'] == 25
assert s['double'] == 10
assert s['triple'] == 15
assert s['pair'] == 20 and s['quadruple'] == 20
assert s['yatzy'] == 50
c = Yatzy([4,4,4,4,1])
s = c.getScoreTable()
assert s['change'] == 17
assert s['double'] == 8
assert s['triple'] == 12
assert s['one'] == 1
assert s['pair'] == 16 and s['quadruple'] == 16
c = Yatzy([3,3,3,2,1])
s = c.getScoreTable()
assert s['change'] == 12
assert s['double'] == 6
assert s['triple'] == 9 and s['three'] == 9
assert s['one'] == 1
assert s['two'] == 2
c = Yatzy([3,3,4,2,1])
s = c.getScoreTable()
assert s['change'] == 13
assert s['one'] == 1
assert s['two'] == 2
assert s['four'] == 4
assert s['three'] == 6 and s['double'] == 6
c = Yatzy([3,5,4,2,1])
s = c.getScoreTable()
assert s['change'] == 15 and s['smallstraight'] == 15
assert s['one'] == 1
assert s['two'] == 2
assert s['three'] == 3
assert s['four'] == 4
assert s['five'] == 5
c = Yatzy([3,5,4,2,6])
s = c.getScoreTable()
assert s['change'] == 20 and s['largestraight'] == 20
assert s['six'] == 6
assert s['two'] == 2
assert s['three'] == 3
assert s['four'] == 4
assert s['five'] == 5
c = Yatzy([3,5,4,1,6])
s = c.getScoreTable()
assert s['change'] == 19
assert s['six'] == 6
assert s['one'] == 1
assert s['three'] == 3
assert s['four'] == 4
assert s['five'] == 5
c = Yatzy([3,3,4,4,5])
s = c.getScoreTable()
assert s['change'] == 19
assert s['three'] == 6
assert s['four'] == 8 and s['double'] == 8
assert s['five'] == 5
assert s['pair'] == 14
|
StarcoderdataPython
|
5123879
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from swagger_spec_compatibility.spec_utils import load_spec_from_spec_dict
from swagger_spec_compatibility.walkers.response_paths import ResponsePathsWalker
def test_ResponsePathsWalker_returns_no_paths_if_no_endpoints_defined(minimal_spec):
assert ResponsePathsWalker(minimal_spec, minimal_spec).walk() == []
def test_ResponsePathsWalker_returns_paths_of_endpoints_responses(minimal_spec, minimal_spec_dict):
minimal_spec_dict['paths'] = {
'/endpoint': {
'get': {
'responses': {
'200': {'description': ''},
'default': {'description': ''},
},
},
'put': {
'responses': {
'403': {'description': ''},
},
},
},
}
spec = load_spec_from_spec_dict(minimal_spec_dict)
assert set(ResponsePathsWalker(spec, spec).walk()) == {
('paths', '/endpoint', 'get', 'responses', '200'),
('paths', '/endpoint', 'get', 'responses', 'default'),
('paths', '/endpoint', 'put', 'responses', '403'),
}
|
StarcoderdataPython
|
1638564
|
<reponame>filiparente/Predtweet<gh_stars>1-10
# 1) Reportar 6 métricas:
# MSE normal,
# RMSE normal,
# RMSE normalizado pela média das observações y,
# RMSE normalizado por ymax-ymin,
# RMSE normalizado pela std(y),
# RMSE normalizado pela diferença dos quantiles 0.75 e 0.25 de y,
# FFT
#
# 2) guardar numa estrutura BERT_runx_prediction_report.mat
# TODO: THIS IS ADDAPTED FOR NO EXTERNAL FEATURES!, to use with external features, adapt the code first
import random
import torch
import numpy as np
import os
import glob
import json
import argparse
import matplotlib.pyplot as plt
import math
from scipy.io import loadmat
from torch.utils.data import Dataset, DataLoader, SequentialSampler
import pandas as pd
import scipy
import torch.nn as nn
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error, mean_absolute_error
from scipy.io import savemat
class LSTM(nn.Module):
#input size: Corresponds to the number of features in the input. Though our sequence length is 12, for each month we have only 1 value i.e. total number of passengers, therefore the input size will be 1.
#hidden layer size: Specifies the number of hidden layers along with the number of neurons in each layer. We will have one layer of 100 neurons.
#output size: The number of items in the output, since we want to predict the number of passengers for 1 month in the future, the output size will be 1.´
def __init__(self, device, input_size=1, hidden_layer_size=100, output_size=1, num_layers=1):
super().__init__()
self.device = device
self.hidden_layer_size = hidden_layer_size
self.num_layers = num_layers
self.lstm = nn.LSTM(input_size, hidden_layer_size)
self.linear = nn.Linear(hidden_layer_size, output_size)
def forward(self, input_seq):
lstm_out, self.hidden_cell = self.lstm(input_seq.view(len(input_seq) ,1, -1), self.hidden_cell)
predictions = self.linear(lstm_out.view(len(input_seq), -1))
return predictions[-1]
def init_hidden(self, batch_size):
#return (nn.Parameter(torch.randn(self.num_layers,batch_size, self.hidden_dim).type(torch.FloatTensor).to(self.device), requires_grad=True), nn.Parameter(torch.randn(self.num_layers, batch_size, self.hidden_dim).type(torch.FloatTensor).to(self.device),requires_grad=True))
return (torch.zeros(self.num_layers, batch_size, self.hidden_layer_size).to(self.device), #hidden state
torch.zeros(self.num_layers, batch_size, self.hidden_layer_size).to(self.device)) #cell state
class MyDataset(Dataset):
def __init__(self, y, X, window_size, seq_len):
self.dataset = []
for i in range(len(y)):
self.dataset.append({'X': X[:,i],'y': y[i]})
assert len(self.dataset) == len(y), "error creating dataset"
self.window_size = window_size
self.seq_len = seq_len
self.idx = 0
def __len__(self):
return len(self.dataset)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
#each feature is a concatenation of the average embeddings of the previous windows
#X = [self.dataset[i]['X'] for i in range(self.idx,self.idx+self.seq_len)]
#sample = {'X': X, 'y': self.dataset[self.idx:self.idx+self.seq_len]['y'], 'window_size': self.window_size}
#self.idx += self.seq_len
#each feature is the average embedding in the current dt
sample = {'X': self.dataset[idx]['X'], 'y': self.dataset[idx]['y'], 'window_size': self.window_size}
return sample
# convert an array of values into a dataset matrix
def create_dataset(X,y, batch_size, seq_len):
#dataX, dataY = [], []
idx = 0
#auxX = np.zeros((batch_size, seq_len, 768))
#auxY = np.zeros((batch_size, seq_len))
#dataset = dict()
#dataset['X'] = np.array([])
#dataset['y'] = np.array([])
dataset = []
if len(X)>=batch_size*seq_len:
while(1):
auxX = np.zeros((batch_size, seq_len, 768))
auxY = np.zeros((batch_size, seq_len))
for i in range(batch_size):
auxX[i,:,:] = X[idx+i*seq_len:seq_len+idx+i*seq_len]
auxY[i,:] = y[idx+i*seq_len:seq_len+idx+i*seq_len]
dataset.append({'X':np.array(auxX), 'y':np.array(auxY)})
#dataX.append(auxX)
#dataY.append(auxY)
if seq_len+idx+i*seq_len == len(X):
break
idx=idx+1
del auxX
del auxY
#dataset['X'] = np.array(dataX)
#dataset['y'] = np.array(dataY)
return dataset
def create_dataset2(sequence, lag=1):
#(seq_len, batch_size, input_size)=(1,3786,1)
X, y = sequence[0:-lag], sequence[lag:]
return np.reshape(np.array(X), (1,len(sequence)-1,1)), np.reshape(np.array(y), (1,len(sequence)-1))
def create_inout_sequences(input_data, tw):
inout_seq = []
L = len(input_data)
for i in range(L-tw):
train_seq = input_data[i:i+tw]
train_label = input_data[i+tw:i+tw+1]
inout_seq.append((train_seq ,train_label))
return inout_seq
def fft_(signal1, signal2, plot_):
time = range(1,len(signal1)) # Time Vector # Signal data in Time-Domain
N = len(signal1) # Number Of Samples
Ts = np.mean(np.diff(time)) # Sampling Interval
Fs = 1/Ts # Sampling Frequency
Fn = Fs/2 # Nyquist Frequency
Fv = np.linspace(0, 1, int(float(N/2))+1)*Fn # Frequency Vector (For ‘plot’ Call)
Iv = range(1,len(Fv)) # Index Vector (Matches ‘Fv’)
FT_Signal1 = scipy.fft(signal1)/N # Normalized Fourier Transform Of Data
FT_Signal2 = scipy.fft(signal2)/N # Normalized Fourier Transform Of Data
if plot_:
plt.figure(figsize=(15,8))
plt.plot(Fv, abs(FT_Signal1(Iv))*2)
plt.plot(Fv, abs(FT_Signal2(Iv))*2)
plt.show()
#Mean squared errors
#Phase
tmp = pow((np.transpose(np.angle(FT_Signal1))-np.angle(FT_Signal2)),2)
srmse_phase = np.sqrt(sum(tmp[:])/N)/np.std(FT_Signal1)
#Phase
tmp = pow((np.transpose(np.real(FT_Signal1))-np.real(FT_Signal2)),2)
srmse_ampl = np.sqrt(sum(tmp[:])/N)/np.std(FT_Signal1)
def main():
#Parser
parser = argparse.ArgumentParser(description='Normalize LSTM errors.')
parser.add_argument('--model_path', default=r"C:/Users/Filipa/Desktop/Predtweet/lstm/arquitetura2/full_data/wt_features/", help="OS path to the folder where the embeddings are located.")
parser.add_argument('--full_dataset_path', default=r"C:/Users/Filipa/Desktop/Predtweet/bitcoin_data/datasets/server/1.0/", help="OS path to the folder where the embeddings are located.")
parser.add_argument('--seq_len', type = int, default=50, help='Input dimension (number of timestamps).')
parser.add_argument('--batch_size', type = int, default=1, help='How many batches of sequence length inputs per iteration.')
parser.add_argument("--use_features", action="store_true", help="If we want to consider the textual features (from BERT/TFIDF) or only the counts.")
parser.add_argument("--output_dir", default=r"C:/Users/Filipa/Desktop/Predtweet/lstm/arquitetura2/full_data/")
args = parser.parse_args()
print(args)
model_path = args.model_path
path = args.full_dataset_path
train_dev_test_split = True
normalize = True
batch_size = args.batch_size
seq_len = args.seq_len
percentages = [0.05, 0.05, 0.05]
# If there's a GPU available...
if torch.cuda.is_available():
# Tell PyTorch to use the GPU.
device = torch.device("cuda")
print('There are %d GPU(s) available.' % torch.cuda.device_count())
print('We will use the GPU:', torch.cuda.get_device_name(0))
n_gpu = 1
else:
print('No GPU available, using the CPU instead.')
device = torch.device("cpu")
run = input("Save prediction report for which run (accepted input: integer number 1,2,...) ?")
dts = [1] #[1,3,4,6] #[1,3,4,6,12,24,48]
dws = [0]#,1,3,5,7]
result = {}
# Load data
os.chdir(path)
files_ = glob.glob("*.mat")
if len(files_)>1:
#TF-IDF features: alphabetically order dev-test-train
dev_data = loadmat(path+"dev_dataset.mat")
test_data = loadmat(path+"test_dataset.mat")
train_data = loadmat(path+"train_dataset.mat")
data = dict()
data['y'] = np.hstack(( train_data['y'][0],dev_data['y'][0],test_data['y'][0] )).ravel()
data['X'] = scipy.sparse.vstack([train_data['X'],dev_data['X'], test_data['X']]).toarray()
disc_unit = train_data['disc_unit'][0][0] #discretization unit in hours
window_size = train_data['window_size'][0][0] #length of the window to average the tweets
start_date = int(train_data['start_date'])
end_date = int(test_data['end_date'])
train_dev_test_split = False #already done!
else:
#BERT features
data = loadmat(path+'dataset.mat')
start_date = data['start_date'][0][0]
end_date = data['end_date'][0][0]
disc_unit = data['disc_unit'][0][0] #discretization unit in hours
window_size = data['window_size'][0][0] #length of the window to average the tweets
train_dev_test_split = True
pd_start_date = pd.to_datetime(start_date)
pd_end_date = pd.to_datetime(end_date)
print("Start date: " + str(pd_start_date) +" and end date: " + str(pd_end_date))
obs_seq = data['y'].ravel()
X = np.transpose(data['X'])
print("Observation sequence stats: min " + str(np.min(obs_seq)) + " max " + str(np.max(obs_seq)) + " mean " + str(np.mean(obs_seq)) + " std " + str(np.std(obs_seq)))
diff_ = list(np.diff(obs_seq).ravel())
diff_sorted = np.sort(diff_)
values_diff_ = diff_sorted[-10:]
idx2 = np.inf
for i in range(10):
idx_ = diff_.index(values_diff_[i])#[idx_,~] = find(values_diff_[i]==diff_)
if idx_<idx2:
idx2 = idx_
#cut observation sequence and features (drop first samples that correspond
#to small tweet counts
obs_seq = obs_seq[idx2+1:]
X = X[:, idx2+1:]
#plt.plot(obs_seq)
#plt.show()
all_obs_seq = obs_seq
all_X = X
if train_dev_test_split:
# Use train/dev/testtest split 80%10%10% to split our data into train and validation sets for training
len_dataset = len(all_obs_seq)
lengths = np.ceil(np.multiply(len_dataset,percentages))
diff_ = int(np.sum(lengths)-len_dataset)
if diff_>0:
#subtract 1 starting from the end
for i in range(len(lengths)-1,-1,-1):
lengths[i] = lengths[i]-1
diff_=diff_-1
if diff_==0:
break
#lengths = np.cumsum(lengths)
#lengths = [int(l) for l in lengths]
#train_obs_seq = all_obs_seq[:lengths[0]]
#train_X = all_X[:, :lengths[0]]
#dev_obs_seq = all_obs_seq[lengths[0]+window_size:lengths[1]]
#dev_X = all_X[:, lengths[0]+window_size:lengths[1]]
#test_obs_seq = all_obs_seq[lengths[1]+window_size:]
#test_X = all_X[:, lengths[1]+window_size:]
lengths = list(np.insert(np.cumsum(lengths)+len_dataset-sum(lengths), 0, len_dataset-sum(lengths)))
lengths = [int(l) for l in lengths]
train_obs_seq = all_obs_seq[lengths[0]:lengths[1]]
train_X = all_X[:, lengths[0]:lengths[1]]
dev_obs_seq = all_obs_seq[lengths[1]+window_size:lengths[2]]
dev_X = all_X[:, lengths[1]+window_size:lengths[2]]
test_obs_seq = all_obs_seq[lengths[2]+window_size:]
test_X = all_X[:, lengths[2]+window_size:]
#lengths = np.cumsum(lengths)
#lengths = [int(l) for l in lengths]
#train_obs_seq = all_obs_seq[:lengths[0]]
#train_X = all_X[:, :lengths[0]]
#dev_obs_seq = all_obs_seq[lengths[0]+window_size:lengths[1]]
#dev_X = all_X[:, lengths[0]+window_size:lengths[1]]
#test_obs_seq = all_obs_seq[lengths[1]+window_size:]
#test_X = all_X[:, lengths[1]+window_size:]
else:
train_list = train_data['y'].ravel()
c = list(train_list).index(obs_seq[0])
train_obs_seq = np.transpose(train_list[c:])
train_X = np.transpose(train_data['X'].todense()[c:,:])
dev_obs_seq = np.transpose(dev_data['y'].ravel())
dev_X = np.transpose(dev_data['X'].todense())
test_obs_seq = np.transpose(test_data['y'].ravel())
test_X = np.transpose(test_data['X'].todense())
print("Number of points in train dataset = " + str(len(train_obs_seq)))
print("Number of points in dev dataset = " + str(len(dev_obs_seq)))
print("Number of points in test dataset = " + str(len(test_obs_seq)))
n_features = train_X.shape[0]
train_mean = np.zeros((n_features,1))
train_std = np.zeros((n_features,1))
if normalize:
# Normalization (z-score)
for feature in range(n_features): #for all features, normalize independently for each feature
# Get the z-score parameters from the training set (mean and std)
train_mean[feature] = np.mean(train_X[feature,:])
train_std[feature] = np.std(train_X[feature,:])
# Z-score the whole dataset with the parameters from training
# z=(x-mean)/std
train_X[feature,:]=(train_X[feature,:]-train_mean[feature])/train_std[feature]
#min max scaling
#maxV = max(train_X(feature,:))
#minV = min(train_X(feature,:))
#train_X(feature,:) = (train_X(feature,:) - minV) / (maxV - minV)
dev_X[feature,:]=(dev_X[feature,:]-train_mean[feature])/train_std[feature]
test_X[feature,:]=(test_X[feature,:]-train_mean[feature])/train_std[feature]
train_dataset = MyDataset(train_obs_seq, train_X, window_size, seq_len)
dev_dataset = MyDataset(dev_obs_seq, dev_X, window_size, seq_len)
test_dataset = MyDataset(test_obs_seq, test_X, window_size, seq_len)
if args.use_features:
train_dataset = create_dataset(train_X, train_obs_seq, batch_size, seq_len)
dev_dataset = create_dataset(dev_X, dev_obs_seq, batch_size, seq_len)
test_dataset = create_dataset(test_X, test_obs_seq, batch_size, seq_len)
assert len(train_dataset)>0, "Batch size or sequence length too big for training set!"
assert len(dev_dataset)>0, "Batch size or sequence length too big for validation set!"
assert len(test_dataset)>0, "Batch size or sequence length too big for test set!"
# Create an iterator of our data with torch DataLoader. This helps save on memory during training because, unlike a for loop,
# with an iterator the entire dataset does not need to be loaded into memory
print("Number of points in converted train dataset = " + str(len(train_dataset))+ " with sliding batches with batch_size "+ str(batch_size) +" and sequence length "+str(seq_len))
print("Number of points in converted dev dataset = " + str(len(dev_dataset))+ " with sliding batches with batch_size "+ str(batch_size) +" and sequence length "+str(seq_len))
print("Number of points in converted test dataset = " + str(len(test_dataset))+ " with sliding batches with batch_size "+ str(batch_size) +" and sequence length "+str(seq_len))
else:
#normalize data: min/max scaling (-1 and 1)
scaler = MinMaxScaler(feature_range=(-1, 1))
train_data_normalized = scaler.fit_transform(train_obs_seq.reshape(-1, 1))
train_data_normalized = torch.FloatTensor(train_data_normalized).view(-1)
#sequence/labeling
#input sequence length for training is 24. (1h data, 24h memory)
train_window = 24
train_inout_seq = create_inout_sequences(train_data_normalized, train_window)
train_dataset = create_dataset2(train_obs_seq)
dev_dataset = create_dataset2(np.concatenate(([train_obs_seq[-1]],dev_obs_seq)))
test_dataset = create_dataset2(np.concatenate(([dev_obs_seq[-1]],test_obs_seq)))
assert len(train_dataset[0])>0, "Batch size or sequence length too big for training set!"
assert len(dev_dataset[0])>0, "Batch size or sequence length too big for validation set!"
assert len(test_dataset[0])>0, "Batch size or sequence length too big for test set!"
# Create an iterator of our data with torch DataLoader. This helps save on memory during training because, unlike a for loop,
# with an iterator the entire dataset does not need to be loaded into memory
print("Number of points in converted train dataset = " + str(len(train_dataset[0]))+ " with sliding batches with batch_size "+ str(batch_size) +" and sequence length "+str(seq_len))
print("Number of points in converted dev dataset = " + str(len(dev_dataset[0]))+ " with sliding batches with batch_size "+ str(batch_size) +" and sequence length "+str(seq_len))
print("Number of points in converted test dataset = " + str(len(test_dataset[0]))+ " with sliding batches with batch_size "+ str(batch_size) +" and sequence length "+str(seq_len))
mse_train = []
mae_train = []
mse_dev = []
mae_dev = []
mse_test = []
mae_test = []
train_window = 24
n_montecarlos = 10
dw=0
#Load results
for i in range(len(dts)):
dt = dts[i]
#for j in range(len(dws)):
# dw = dws[j]
for montecarlo in range(n_montecarlos):
#Results
#Train predictions
#train_preds_seq = np.concatenate(torch.load(model_path + str(dt) + '.' + str(dw) +'/run1_results/best_model/train_preds_seq.pt')).ravel().tolist()
train_preds_seq = np.concatenate(torch.load(model_path + str(dt) + '.' + str(dw) +'/5_5_5/tensors/train_preds_seq_mc' + str(montecarlo) + '.pt')).ravel().tolist()
#Validation predictions
#best_val_preds_seq = np.concatenate(torch.load(model_path + str(dt) + '.' + str(dw) +'/run1_results/best_model/best_val_preds_seq.pt')).ravel().tolist()
best_val_preds_seq = np.concatenate(torch.load(model_path + str(dt) + '.' + str(dw) +'/5_5_5/tensors/best_val_preds_seq_mc' + str(montecarlo) + '.pt')).ravel().tolist()
#Test predictions
#test_preds_seq = np.concatenate(torch.load(model_path + str(dt) + '.' + str(dw) +'/run1_results/best_model/test_preds_seq_mc.pt')).ravel().tolist()
test_preds_seq = np.concatenate(torch.load(model_path + str(dt) + '.' + str(dw) +'/5_5_5/tensors/test_preds_seq_mc' + str(montecarlo) + '.pt')).ravel().tolist()
mae_test.append(np.mean((abs(test_preds_seq-test_obs_seq))/test_obs_seq)*100)
#mse
#mse_train.append(mean_squared_error(train_obs_seq[train_window:], train_preds_seq))
#mse_dev.append(mean_squared_error(dev_obs_seq[dw:], best_val_preds_seq))
#mse_test.append(mean_squared_error(test_obs_seq[dw:], test_preds_seq))
#mae
#mae_train.append(mean_absolute_error(train_obs_seq[train_window:], train_preds_seq))
#mae_dev.append(mean_absolute_error(dev_obs_seq[dw:], best_val_preds_seq))
#mae_test.append(mean_absolute_error(test_obs_seq[dw:], test_preds_seq))
#MSE
print(np.mean(mae_test))
mse_mean_train = np.mean(mse_train)
mse_std_train = np.std(mse_train)
mse_mean_dev = np.mean(mse_dev)
mse_std_dev = np.std(mse_dev)
mse_mean_test = np.mean(mse_test)
mse_std_test = np.std(mse_test)
#rmse
rmse_mean_train = np.sqrt(mse_mean_train)
rmse_std_train = np.sqrt(mse_std_train)
rmse_mean_dev = np.sqrt(mse_mean_dev)
rmse_std_dev = np.sqrt(mse_std_dev)
rmse_mean_test = np.sqrt(mse_mean_test)
rmse_std_test = np.sqrt(mse_std_test)
#normalize by mean y
mnrmse_mean_train = rmse_mean_train/np.mean(train_obs_seq)
mnrmse_std_train = rmse_std_train/np.mean(train_obs_seq)
mnrmse_mean_dev = rmse_mean_dev/np.mean(dev_obs_seq)
mnrmse_std_dev = rmse_std_dev/np.mean(dev_obs_seq)
mnrmse_mean_test = rmse_mean_test/np.mean(test_obs_seq)
mnrmse_std_test = rmse_std_test/np.mean(test_obs_seq)
#normalize by max(y)-min(y)
mmnrmse_mean_train = rmse_mean_train/(max(train_obs_seq)-min(train_obs_seq))
mmnrmse_std_train = rmse_std_train/(max(train_obs_seq)-min(train_obs_seq))
mmnrmse_mean_dev = rmse_mean_dev/(max(dev_obs_seq)-min(dev_obs_seq))
mmnrmse_std_dev = rmse_std_dev/(max(dev_obs_seq)-min(dev_obs_seq))
mmnrmse_mean_test = rmse_mean_test/(max(test_obs_seq)-min(test_obs_seq))
mmnrmse_std_test = rmse_std_test/(max(test_obs_seq)-min(test_obs_seq))
#normalize by std(y)
snrmse_mean_train = rmse_mean_train/np.std(train_obs_seq)
snrmse_std_train = rmse_std_train/np.std(train_obs_seq)
snrmse_mean_dev = rmse_mean_dev/np.std(dev_obs_seq)
snrmse_std_dev = rmse_std_dev/np.std(dev_obs_seq)
snrmse_mean_test = rmse_mean_test/np.std(test_obs_seq)
snrmse_std_test = rmse_std_test/np.std(test_obs_seq)
#normalize by quantile(y,0.75)-quantile(y,0.25)
qnrmse_mean_train = rmse_mean_train/(np.quantile(train_obs_seq,0.75)-np.quantile(train_obs_seq,0.25))
qnrmse_std_train = rmse_std_train/(np.quantile(train_obs_seq,0.75)-np.quantile(train_obs_seq,0.25))
qnrmse_mean_dev = rmse_mean_dev/(np.quantile(dev_obs_seq,0.75)-np.quantile(dev_obs_seq,0.25))
qnrmse_std_dev = rmse_std_dev/(np.quantile(dev_obs_seq,0.75)-np.quantile(dev_obs_seq,0.25))
qnrmse_mean_test = rmse_mean_test/(np.quantile(test_obs_seq,0.75)-np.quantile(test_obs_seq,0.25))
qnrmse_std_test = rmse_std_test/(np.quantile(test_obs_seq,0.75)-np.quantile(test_obs_seq,0.25))
#MAE
mae_mean_train = np.mean(mae_train)
mae_std_train = np.std(mae_train)
mae_mean_dev = np.mean(mae_dev)
mae_std_dev = np.std(mae_dev)
mae_mean_test = np.mean(mae_test)
mae_std_test = np.std(mae_test)
#rmse
rmae_mean_train = np.sqrt(mae_mean_train)
rmae_std_train = np.sqrt(mae_std_train)
rmae_mean_dev = np.sqrt(mae_mean_dev)
rmae_std_dev = np.sqrt(mae_std_dev)
rmae_mean_test = np.sqrt(mae_mean_test)
rmae_std_test = np.sqrt(mae_std_test)
#normalize by mean y
mnrmae_mean_train = rmae_mean_train/np.mean(train_obs_seq)
mnrmae_std_train = rmae_std_train/np.mean(train_obs_seq)
mnrmae_mean_dev = rmae_mean_dev/np.mean(dev_obs_seq)
mnrmae_std_dev = rmae_std_dev/np.mean(dev_obs_seq)
mnrmae_mean_test = rmae_mean_test/np.mean(test_obs_seq)
mnrmae_std_test = rmae_std_test/np.mean(test_obs_seq)
#normalize by max(y)-min(y)
mmnrmae_mean_train = rmae_mean_train/(max(train_obs_seq)-min(train_obs_seq))
mmnrmae_std_train = rmae_std_train/(max(train_obs_seq)-min(train_obs_seq))
mmnrmae_mean_dev = rmae_mean_dev/(max(dev_obs_seq)-min(dev_obs_seq))
mmnrmae_std_dev = rmae_std_dev/(max(dev_obs_seq)-min(dev_obs_seq))
mmnrmae_mean_test = rmae_mean_test/(max(test_obs_seq)-min(test_obs_seq))
mmnrmae_std_test = rmae_std_test/(max(test_obs_seq)-min(test_obs_seq))
#normalize by std(y)
snrmae_mean_train = rmae_mean_train/np.std(train_obs_seq)
snrmae_std_train = rmae_std_train/np.std(train_obs_seq)
snrmae_mean_dev = rmae_mean_dev/np.std(dev_obs_seq)
snrmae_std_dev = rmae_std_dev/np.std(dev_obs_seq)
snrmae_mean_test = rmae_mean_test/np.std(test_obs_seq)
snrmae_std_test = rmae_std_test/np.std(test_obs_seq)
#normalize by quantile(y,0.75)-quantile(y,0.25)
qnrmae_mean_train = rmae_mean_train/(np.quantile(train_obs_seq,0.75)-np.quantile(train_obs_seq,0.25))
qnrmae_std_train = rmae_std_train/(np.quantile(train_obs_seq,0.75)-np.quantile(train_obs_seq,0.25))
qnrmae_mean_dev = rmae_mean_dev/(np.quantile(dev_obs_seq,0.75)-np.quantile(dev_obs_seq,0.25))
qnrmae_std_dev = rmae_std_dev/(np.quantile(dev_obs_seq,0.75)-np.quantile(dev_obs_seq,0.25))
qnrmae_mean_test = rmae_mean_test/(np.quantile(test_obs_seq,0.75)-np.quantile(test_obs_seq,0.25))
qnrmae_std_test = rmae_std_test/(np.quantile(test_obs_seq,0.75)-np.quantile(test_obs_seq,0.25))
#FFT dev
#[fft_srmse_phase_dev, fft_srmse_ampl_dev] = fft_(dev_obs_seq, best_val_preds_seq, False)
#FFT test
#[fft_srmse_phase_test, fft_srmse_ampl_test] = fft_(test_obs_seq, test_preds_seq, False)
#result['dt'] = dt
#result['dw'] = dw
#result['fft_mse_phase_dev'] = fft_srmse_phase_dev
#result['fft_mse_ampl_dev'] = fft_srmse_ampl_dev
# result['fft_mse_phase_test'] = fft_srmse_phase_test
# result['fft_mse_ampl_test'] = fft_srmse_ampl_test
mse = dict()
mse['mean_train'] = mse_mean_train
mse['std_train'] = mse_std_train
mse['mean_dev'] = mse_mean_dev
mse['std_dev'] = mse_std_dev
mse['mean_test'] = mse_mean_test
mse['std_test'] = mse_std_test
result['mse'] = mse
rmse = dict()
rmse['mean_train'] = rmse_mean_train
rmse['std_train'] = rmse_std_train
rmse['mean_dev'] = rmse_mean_dev
rmse['std_dev'] = rmse_std_dev
rmse['mean_test'] = rmse_mean_test
rmse['std_test'] = rmse_std_test
result['rmse'] = rmse
mnrmse = dict()
mnrmse['mean_train'] = mnrmse_mean_train
mnrmse['std_train'] = mnrmse_std_train
mnrmse['mean_dev'] = mnrmse_mean_dev
mnrmse['std_dev'] = mnrmse_std_dev
mnrmse['mean_test'] = mnrmse_mean_test
mnrmse['std_test'] = mnrmse_std_test
result['mnrmse'] = mnrmse
mmnrmse = dict()
mmnrmse['mean_train'] = mmnrmse_mean_train
mmnrmse['std_train'] = mmnrmse_std_train
mmnrmse['mean_dev'] = mmnrmse_mean_dev
mmnrmse['std_dev'] = mmnrmse_std_dev
mmnrmse['mean_test'] = mmnrmse_mean_test
mmnrmse['std_test'] = mmnrmse_std_test
result['mmnrmse'] = mmnrmse
snrmse = dict()
snrmse['mean_train'] = snrmse_mean_train
snrmse['std_train'] = snrmse_std_train
snrmse['mean_dev'] = snrmse_mean_dev
snrmse['std_dev'] = snrmse_std_dev
snrmse['mean_test'] = snrmse_mean_test
snrmse['std_test'] = snrmse_std_test
result['snrmse'] = snrmse
qnrmse = dict()
qnrmse['mean_train'] = qnrmse_mean_train
qnrmse['std_train'] = qnrmse_std_train
qnrmse['mean_dev'] = qnrmse_mean_dev
qnrmse['std_dev'] = qnrmse_std_dev
qnrmse['mean_test'] = qnrmse_mean_test
qnrmse['std_test'] = qnrmse_std_test
result['qnrmse'] = qnrmse
#MAE
mae = dict()
mae['mean_train'] = mae_mean_train
mae['std_train'] = mae_std_train
mae['mean_dev'] = mae_mean_dev
mae['std_dev'] = mae_std_dev
mae['mean_test'] = mae_mean_test
mae['std_test'] = mae_std_test
result['mae'] = mae
rmae = dict()
rmae['mean_train'] = rmae_mean_train
rmae['std_train'] = rmae_std_train
rmae['mean_dev'] = rmae_mean_dev
rmae['std_dev'] = rmae_std_dev
rmae['mean_test'] = rmae_mean_test
rmae['std_test'] = rmae_std_test
result['rmae'] = rmae
mnrmae = dict()
mnrmae['mean_train'] = mnrmae_mean_train
mnrmae['std_train'] = mnrmae_std_train
mnrmae['mean_dev'] = mnrmae_mean_dev
mnrmae['std_dev'] = mnrmae_std_dev
mnrmae['mean_test'] = mnrmae_mean_test
mnrmae['std_test'] = mnrmae_std_test
result['mnrmae'] = mnrmae
mmnrmae = dict()
mmnrmae['mean_train'] = mmnrmae_mean_train
mmnrmae['std_train'] = mmnrmae_std_train
mmnrmae['mean_dev'] = mmnrmae_mean_dev
mmnrmae['std_dev'] = mmnrmae_std_dev
mmnrmae['mean_test'] = mmnrmae_mean_test
mmnrmae['std_test'] = mmnrmae_std_test
result['mmnrmae'] = mmnrmae
snrmae = dict()
snrmae['mean_train'] = snrmae_mean_train
snrmae['std_train'] = snrmae_std_train
snrmae['mean_dev'] = snrmae_mean_dev
snrmae['std_dev'] = snrmae_std_dev
snrmae['mean_test'] = snrmae_mean_test
snrmae['std_test'] = snrmae_std_test
result['snrmae'] = snrmae
qnrmae = dict()
qnrmae['mean_train'] = qnrmae_mean_train
qnrmae['std_train'] = qnrmae_std_train
qnrmae['mean_dev'] = qnrmae_mean_dev
qnrmae['std_dev'] = qnrmae_std_dev
qnrmae['mean_test'] = qnrmae_mean_test
qnrmae['std_test'] = qnrmae_std_test
result['qnrmae'] = qnrmae
#Save BERT_runx_prediction_report.mat
#save([path, 'BERT_run' num2str(run) '_prediction_report.mat'], 'out_results')
savemat(args.output_dir + 'run' + str(run) + '_prediction_report_LSTM.mat', result, oned_as='row')
if __name__=='__main__':
main()
|
StarcoderdataPython
|
5091873
|
<filename>tests/cli/test_indirect.py<gh_stars>0
# -*- coding: utf-8 -*-
#
# Copyright 2019 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test behavior of indirect inputs/outputs files list."""
def test_indirect_inputs(cli, client):
"""Test indirect inputs that are programmatically created."""
# Set up a script that creates indirect inputs
cli('run', '--no-output', 'mkdir', 'foo')
cli('run', '--no-output', 'mkdir', '.renku/tmp')
cli('run', 'touch', 'foo/bar')
cli('run', 'touch', 'baz')
cli('run', 'touch', 'qux')
cli(
'run', 'sh', '-c',
'echo "echo foo > .renku/tmp/inputs.txt" > script.sh'
)
cli(
'run', 'sh', '-c',
'echo "echo baz >> .renku/tmp/inputs.txt" >> script.sh'
)
cli(
'run', 'sh', '-c',
'echo "echo qux > .renku/tmp/outputs.txt" >> script.sh'
)
exit_code, cwl = cli('run', 'sh', '-c', 'sh script.sh')
assert exit_code == 0
assert len(cwl.inputs) == 3
cwl.inputs.sort(key=lambda e: e.type)
assert str(cwl.inputs[0].default) == '../../foo'
assert cwl.inputs[0].type == 'Directory'
assert cwl.inputs[0].inputBinding is None
assert str(cwl.inputs[1].default) == '../../baz'
assert cwl.inputs[1].type == 'File'
assert cwl.inputs[1].inputBinding is None
assert len(cwl.outputs) == 1
assert cwl.outputs[0].outputBinding.glob == 'qux'
|
StarcoderdataPython
|
4846810
|
<reponame>kasohrab/robocup-software
import main
import robocup
import behavior
import constants
import enum
import math
import standard_play
import evaluation
from situations import Situation
import tactics.coordinated_pass
import skills.move
import skills.capture
import random
## Basic Offensive Pileup play
# Has one robot capture the ball
# One robot goes to a dropback point behind the pileup
# One robot goes to the side of the pileup
#
class BasicOffensivePileup(standard_play.StandardPlay):
_situation_list = [
Situation.OFFENSIVE_PILEUP,
Situation.DEFENSIVE_PILEUP
] # yapf: disable
def __init__(self):
super().__init__(continuous=False)
self.add_transition(behavior.Behavior.State.start,
behavior.Behavior.State.running, lambda: True,
'Immediately')
def on_enter_running(self):
ball = main.ball().pos
dropback_point = robocup.Point(
ball.x, 2 * ball.y / 3
) #Since we have to divide by ball.x, if its zero, make it slightly positive
if (ball.x == 0):
ball.x += 0.01
side_point = robocup.Point(
math.copysign(0.4 * constants.Field.Width, -ball.x) +
ball.x, #Send a robot to the left or right of the ball
ball.y)
self.add_subbehavior(skills.move.Move(dropback_point),
'move to drop back')
self.add_subbehavior(skills.move.Move(side_point),
'move to side point')
self.add_subbehavior(skills.capture.Capture(), 'capture ball')
|
StarcoderdataPython
|
8098335
|
# -*- coding: utf-8 -*-
# pylint: disable=missing-docstring
import datetime as dti
import pytest # type: ignore
import nineties.parser as p
VALID_FUTURE_ISO = "3210-09-08T07:06:05.432"
VALID_FUTURE_ISO_CEST = VALID_FUTURE_ISO + "+0200"
VALID_FUTURE_ISO_CEST_COLON = VALID_FUTURE_ISO + "+02:00"
VALID_FUTURE_DT_UTC_FROM_CEST = dti.datetime.strptime(
"3210-09-08T05:06:05.432", p.ISO_FMT
)
VALID_FUTURE_DT_UTC_FROM_UTC = dti.datetime.strptime(VALID_FUTURE_ISO, p.ISO_FMT)
def test_parse_timestamp_ok():
assert p.parse_timestamp(None) is None
def test_parse_timestamp_ok_null():
assert p.parse_timestamp(p.JR_NULL) is None
def test_parse_timestamp_ok_future_iso_cest():
then = VALID_FUTURE_DT_UTC_FROM_CEST
assert p.parse_timestamp(VALID_FUTURE_ISO_CEST) == then
assert p.parse_timestamp(VALID_FUTURE_ISO_CEST_COLON) == then
def test_parse_timestamp_ok_future_iso_implicit_utc():
then = VALID_FUTURE_DT_UTC_FROM_UTC
assert p.parse_timestamp(VALID_FUTURE_ISO) == then
assert p.parse_timestamp(VALID_FUTURE_ISO + "+00:00") == then
def test_parse_timestamp_ok_future_iso_utc():
then = VALID_FUTURE_DT_UTC_FROM_UTC
assert p.parse_timestamp(VALID_FUTURE_ISO + "+0000") == then
assert p.parse_timestamp(VALID_FUTURE_ISO + "+00:00") == then
assert p.parse_timestamp(VALID_FUTURE_ISO + "-0000") == then
assert p.parse_timestamp(VALID_FUTURE_ISO + "-00:00") == then
def test_parse_timestamp_nok_outer_length():
with pytest.raises(ValueError):
p.parse_timestamp("" * 23)
def test_parse_timestamp_nok_offset_length():
with pytest.raises(AssertionError):
p.parse_timestamp(VALID_FUTURE_ISO + "+12:34X")
with pytest.raises(AssertionError):
p.parse_timestamp(VALID_FUTURE_ISO + "+1234X")
def test_parse_timestamp_nok_offset_sign():
with pytest.raises(AssertionError):
p.parse_timestamp(VALID_FUTURE_ISO + "*1234")
def test_parse_timestamp_nok_offset_hours_value():
with pytest.raises(ValueError):
p.parse_timestamp(VALID_FUTURE_ISO + "+XX34")
def test_parse_timestamp_nok_offset_minutes_value():
with pytest.raises(ValueError):
p.parse_timestamp(VALID_FUTURE_ISO + "+12XX")
def test_parse_dsl_entry_nok_no_content():
assert p.parse_dsl_entry(None) == {}
assert p.parse_dsl_entry("") == {}
def test_parse_dsl_entry_nok_start_data_wrong():
text_entry = '"tag@42(a=b,e=' + "do_not_care" + ']"'
with pytest.raises(ValueError):
p.parse_dsl_entry(text_entry, final_key="e")
def test_parse_dsl_entry_nok_only_kv_seps():
text_entry = (
'"tag@42[=,=,=,=,==,===,=,' + p.FINAL_DSL_KEY + "=" + "do_not_care" + ']"'
)
assert p.parse_dsl_entry(text_entry) == {"final": "do_not_care"}
def test_parse_dsl_entry_nok_start_data_multiple():
text_entry = '"tag@42[[a=b,' + p.FINAL_DSL_KEY + "=" + "do_not_care" + ']"'
assert p.parse_dsl_entry(text_entry) == {"[a": "b", "final": "do_not_care"}
def test_parse_dsl_entry_nok_missing_kv_sep_and_value():
text_entry = '"tag@42(a=b,c,e=' + "do_not_care" + ']"'
with pytest.raises(ValueError):
p.parse_dsl_entry(text_entry, final_key="e")
def test_parse_dsl_entry_nok_rococo_wrong():
text_entry = "'tag@42[a=b,e=" + "no_strip" + "]'"
assert p.parse_dsl_entry(text_entry, final_key="e") == {"a": "b", "e": "no_strip]'"}
def test_parse_dsl_entry_ok_final_value_contains_key():
text_entry = '"tag@42[a=b,e=e=e,e=e,date=2]"'
assert p.parse_dsl_entry(text_entry, final_key="e") == {
"a": "b",
"e": "e=e,e=e,date=2",
}
def test_parse_dsl_entry_nok_final_key_wrong_implicit():
text_entry = '"tag@42[a=b,wrong=' + "do_not_care" + ']"'
with pytest.raises(ValueError):
p.parse_dsl_entry(text_entry)
def test_parse_dsl_entry_nok_final_key_wrong_explicit():
text_entry = '"tag@42[a=b,right=' + "do_not_care" + ']"'
with pytest.raises(ValueError):
p.parse_dsl_entry(text_entry, final_key="wrong")
def test_parse_dsl_entry_ok():
text_entry = '"tag@42[a=b,' + p.FINAL_DSL_KEY + "=" + p.JR_NULL + ']"'
assert p.parse_dsl_entry(text_entry) == {"a": "b", p.FINAL_DSL_KEY: p.NA}
def test_parse_dsl_entry_ok_final_data():
text_entry = '"tag@42[a=b,' + p.FINAL_DSL_KEY + "=" + ",,,,,===" + ']"'
assert p.parse_dsl_entry(text_entry) == {"a": "b", p.FINAL_DSL_KEY: ",,,,,==="}
def test_parse_dsl_entry_ok_date():
text_entry = (
'"tag@42[daTE='
+ VALID_FUTURE_ISO_CEST_COLON
+ ","
+ p.FINAL_DSL_KEY
+ "="
+ p.JR_NULL
+ ']"'
)
assert p.parse_dsl_entry(text_entry) == {
"daTE": VALID_FUTURE_DT_UTC_FROM_CEST,
p.FINAL_DSL_KEY: p.NA,
}
def test_parse_dsl_entry_nok_date_value_invalid():
text_entry = (
'"tag@42[daTE=9999'
+ VALID_FUTURE_ISO_CEST_COLON
+ ","
+ p.FINAL_DSL_KEY
+ "="
+ p.JR_NULL
+ ']"'
)
with pytest.raises(ValueError, match=r"time data '9999.*"):
assert p.parse_dsl_entry(text_entry)
def test_parse_dsl_entry_ok_id_sequence():
text_entry = (
'"tag@42[iD=42,myID=-1,someSequence=0,'
+ p.FINAL_DSL_KEY
+ "="
+ p.JR_NULL
+ ']"'
)
assert p.parse_dsl_entry(text_entry) == {
"iD": 42,
"myID": -1,
"someSequence": 0,
p.FINAL_DSL_KEY: p.NA,
}
def test_parse_dsl_entry_nok_id_value_no_int():
text_entry = '"tag@42[iD=no_int,' + p.FINAL_DSL_KEY + "=" + p.JR_NULL + ']"'
message = r"invalid literal for int\(\) with base 10: 'no_int'"
with pytest.raises(ValueError, match=message):
assert p.parse_dsl_entry(text_entry)
def test_parser_split_kv_ok():
assert p.split_kv("pragma", "ag") == ("pr", "ma")
assert p.split_kv("start", "s") == (None,) * 2
assert p.split_kv("+==", "=") == ("+", "=")
assert p.split_kv("==", "=") == (None,) * 2
assert p.split_kv("=", "=") == (None,) * 2
assert p.split_kv("=", "+") == (None, "=")
assert p.split_kv("", "+") == (None, "")
assert p.split_kv("", "ä") == (None, "")
def test_parser_split_kv_nok():
assert p.split_kv("", 42) == (None, "")
def test_parser_split_issue_key_ok():
text_key = "BAZ-42"
assert p.split_issue_key(text_key) == ("BAZ", 42)
def test_parser_split_issue_key_ok_negative():
text_key = "BAZ--42"
assert p.split_issue_key(text_key) == ("BAZ", -42)
def test_parser_split_issue_key_nok_wrong_sep():
text_key = "BAZ_42"
message = text_key + r" is not a valid issue key composed of project and serial"
with pytest.raises(ValueError, match=message):
assert p.split_issue_key(text_key)
def test_parser_split_issue_key_nok_no_serial():
no_serial = "Bar"
text_key = "Foo-" + no_serial
message = r"invalid literal for int\(\) with base 10: '" + no_serial + "'"
with pytest.raises(ValueError, match=message):
assert p.split_issue_key(text_key)
def test_parser_sorted_issue_keys_ok():
text_keys = ("BAZ-42", "BAR-999", "BAZ-41", "A-1")
sorted_keys = ("A-1", "BAR-999", "BAZ-41", "BAZ-42")
assert tuple(p.sorted_issue_keys_gen(text_keys)) == sorted_keys
def test_parser_sorted_issue_keys_ok_corner_min():
text_keys = ("BAZ-42",)
sorted_keys = text_keys
assert tuple(p.sorted_issue_keys_gen(text_keys)) == sorted_keys
def test_parser_sorted_issue_keys_ok_empty():
text_keys = tuple()
sorted_keys = text_keys
assert tuple(p.sorted_issue_keys_gen(text_keys)) == sorted_keys
def test_parser_sorted_issue_keys_ok_empty_string():
text_keys = ""
sorted_keys = tuple()
assert tuple(p.sorted_issue_keys_gen(text_keys)) == sorted_keys
def test_parser_sorted_issue_keys_nok_non_iterable():
data = 42
message = r"'int' object is not iterable"
with pytest.raises(TypeError, match=message):
tuple(p.sorted_issue_keys_gen(data))
def test_parser_most_common_issue_projects_ok():
text_keys = ("BAZ-42", "BAR-999", "BAZ-41", "A-1")
most_common = [("BAZ", 2), ("BAR", 1), ("A", 1)]
assert p.most_common_issue_projects(text_keys) == most_common
def test_parser_most_common_issue_projects_ok_duplicates():
text_keys = ("BAZ-42", "BAR-999", "BAZ-42", "A-1")
most_common = [("BAZ", 2), ("BAR", 1), ("A", 1)]
assert p.most_common_issue_projects(text_keys) == most_common
def test_parser_stable_make_unique_ok():
data = ("BAZ-41", "BAR-999", "BAZ-41", "A-1")
stable_unique = ("BAZ-41", "BAR-999", "A-1")
assert tuple(p.stable_make_unique(data)) == stable_unique
def test_parser_stable_make_unique_ok_empty():
data = tuple()
stable_unique = tuple()
assert tuple(p.stable_make_unique(data)) == stable_unique
def test_parser_stable_make_unique_nok_non_iterable():
data = 42
message = r"'int' object is not iterable"
with pytest.raises(TypeError, match=message):
tuple(p.stable_make_unique(data))
def test_parser_stable_make_unique_nok_non_hashable():
data = ({}, [], set())
message = r"unhashable type: 'dict'"
with pytest.raises(TypeError, match=message):
tuple(p.stable_make_unique(data))
|
StarcoderdataPython
|
5075988
|
<filename>dbconn.py
from sqlalchemy import *
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import configparser
config = configparser.ConfigParser()
config.read("config.txt")
login_string = config.get("configuration", "password")
engine = create_engine(login_string, echo=False)
conn = engine.connect()
Session = sessionmaker(bind=engine)
Base = declarative_base()
|
StarcoderdataPython
|
1921167
|
<gh_stars>0
L = int(input())
a, g = [int(x) for x in input().split()]
pa, pg = [float(x) for x in input().split()]
pa = L / (a / pa)
pg = L / (g / pg)
print(f'{pa:.2f}')
print(f'{pg:.2f}')
if pa < pg: print('Alcool')
elif pa > pg: print('Gasolina')
else: print('Indiferente')
|
StarcoderdataPython
|
9789254
|
<reponame>tejas1995/nmtpytorch
import torch
class TFEmbedding(torch.nn.Embedding):
"""Position-aware embeddings for Transformer models.
Adapted from OpenNMT-py & original `Attention is all you need` paper.
"""
def __init__(self, num_embeddings, embedding_dim, max_len=1024, dropout=0.1):
self.num_embeddings = num_embeddings
self.embedding_dim = embedding_dim
self.max_len = max_len
self.dropout = dropout
# pos_embs: (max_len, emb_dim)
pos_embs = torch.zeros(self.max_len, self.embedding_dim)
# pos: (max_len, 1)
pos = torch.arange(self.max_len).unsqueeze(1)
# divs:
divs = torch.pow(10000,
torch.arange(self.embedding_dim).float().div(self.embedding_dim))
pos_embs[:, 0::2] = torch.sin(pos / divs[0::2])
pos_embs[:, 1::2] = torch.cos(pos / divs[1::2])
# pos_embs: (max_len, 1, emb_dim)
pos_embs.unsqueeze_(1)
sqrt_dim = torch.scalar_tensor(self.embedding_dim).sqrt()
# Call parent's init() first
super().__init__(num_embeddings, embedding_dim, padding_idx=0)
# Register non-learnable params as buffers
self.register_buffer('pos_embs', pos_embs)
self.register_buffer('sqrt_dim', sqrt_dim)
# Create dropout layer
self.dropout_layer = torch.nn.Dropout(p=self.dropout)
def forward(self, x):
# Get the embeddings from parent's forward first
embs = super().forward(x)
return self.dropout_layer(
embs.mul(self.sqrt_dim) + self.pos_embs[:embs.size(0)])
|
StarcoderdataPython
|
8310
|
from logging import warning
from requests import get
from .info import Info
from .provider import Provider
from .providers import get_provider
class Parser:
def __init__(self, args: dict):
self.params = args
def init_provider(
self,
chapter_progress: callable = None,
global_progress: callable = None,
log: callable = None,
quest: callable = None,
info: Info = None,
quest_password: callable = None,
):
original_url = self.params.get('url', '')
provider_url = self.params.get('force_provider', None)
provider = get_provider(provider_url or original_url)
if isinstance(provider, bool):
raise AttributeError('Provider not found')
# update url (if redirect)
self.provider = provider(info) # type: Provider
self.provider.original_url = original_url
real_url = self.check_url(original_url)
if self.provider.allow_auto_change_url():
if real_url != original_url:
warning('Manga url changed! New url: {}'.format(real_url))
self.params['url'] = real_url
self.provider.quiet = self.params.get('quiet', False)
self.provider.set_chapter_progress_callback(chapter_progress)
self.provider.set_global_progress_callback(global_progress)
self.provider.set_log_callback(log)
self.provider.set_quest_callback(quest)
self.provider.set_quest_password_callback(quest_password)
def start(self):
self.provider.process(self.params['url'], self.params)
def check_url(self, url):
proxy = self.params.get('proxy', None)
proxies = {
'http': proxy,
'https': proxy,
} if proxy else None
with get(url, stream=True, proxies=proxies) as response:
_url = response.url
if url != _url:
url = _url
return url
|
StarcoderdataPython
|
8140437
|
<filename>Code/API/funibot_api/funiserial.py
from __future__ import annotations
from traceback import print_exc
from benedict import benedict
from json import JSONDecoder, JSONEncoder, JSONDecodeError
from enum import Enum
from typing import Union, Tuple, List, Optional
from funibot_api.funimock import IMockSerial
from serial import Serial
class eFuniType(Enum):
"""Type de communication (GET/SET/ACK)"""
GET = 'get'
SET = 'set'
ACK = 'ack'
class eFuniModeDeplacement(Enum):
"""Mode de déplacement pour 'dep' (START/STOP/DISTANCE)"""
START = 'start'
STOP = 'stop'
DISTANCE = 'distance'
class eFuniModeCalibration(Enum):
"""Mode de calibration pour 'cal' (CABLE/SOL)"""
CABLE = 'cable'
SOL = 'sol'
class eFuniModeMoteur(Enum):
"""Mode de calibration pour 'mot' (ON/OFF/RESET)"""
ON = 'on'
OFF = 'off'
RESET = 'reset'
class eFuniRegime(Enum):
"""Régime du Funibot 'reg' (ARRET/DIRECTION/POSITION)"""
ARRET = 'arr'
DIRECTION = 'dir'
POSITION = 'pos'
class FuniCommException(Exception):
"""Exception lancée lors d'une erreur de communication ou de paramètres"""
pass
class ErrSupEstNone(Exception):
"""Levé lorsque 'err_sup' est None dans 'err' (par exemple, avec le MockSerial)"""
def __init__(self, erreurs: List[FuniErreur], *args: object) -> None:
super().__init__(*args)
self.erreurs = erreurs
class ReponseACK(Exception):
"""Levée par FuniSerial.envoyer lorsque le type est ACK.
Signifie aux méthode gérant les commandes de retourner None.
Toujours attrapé dans la classe FuniSerial, ne devrait jamais sortir.
"""
pass
FUNI_ERREUR_MESSAGES =\
[
"AUCUNE_ERREUR",
"ADD_POLE_DEPASSEMENT",
"SET_POLE_INEXISTANT",
"SET_LONGUEUR_CABLE_INEXISTANT",
"GET_POSITION_PAS_DE_POLES",
"GET_POSITION_TROP_COURTS_2D",
"GET_POSITION_0_TROP_LONG_2D",
"GET_POSITION_1_TROP_LONG_2D",
"GET_POSITION_TROP_COURTS_3D",
"GET_POSITION_ALIGNES_3D",
"GET_POSITION_RACINE_ESTIMEE_3D",
"DEPLACEMENT_DIRECTIONNEL_ERREUR_MAJEURE",
"DEPLACEMENT_DIRECTIONNEL_ERREUR_MINEURE",
"DEPLACEMENT_POSITION_ERREUR_MAJEURE",
"DEPLACEMENT_POSITION_ERREUR_MINEURE",
"GET_POLE_INEXISTANT",
"GET_ACCROCHE_INEXISTANTE",
"GET_LONGUEUR_CABLE_INEXISTANT",
"GET_POLE_RELATIF_INEXISTANT",
"POLES_CONFONDUES_2D",
"POLES_CONFONDUES_3D",
"SETUP_SECURITE_AVEC_MOINS_DE_3_POLES",
"SECURITE_AVEC_MOINS_DE_3_POLES",
"SORTIE_DE_ZONE_DE_SECURITE",
# Doit rester la dernière pour avoir l'indice -1
"ERREUR_INCONNUE"
]
FUNI_ERREUR_MAJ =\
[
False, # 0
True, # 1
True, # 2
True, # 3
True, # 4
True, # 5
False, # 6
False, # 7
True, # 8
True, # 9
False, # 10
True, # 11
False, # 12
True, # 13
False, # 14
True, # 15
True, # 16
True, # 17
True, # 18
True, # 19
True, # 20
True, # 21
True, # 22
False, # 23
]
class eFuniErreur(Enum):
"""Énumération des erreurs du OpenCR"""
AUCUNE_ERREUR = 0
ADD_POLE_DEPASSEMENT = 1
SET_POLE_INEXISTANT = 2
SET_LONGUEUR_CABLE_INEXISTANT = 3
GET_POSITION_PAS_DE_POLES = 4
GET_POSITION_TROP_COURTS_2D = 5
GET_POSITION_0_TROP_LONG_2D = 6
GET_POSITION_1_TROP_LONG_2D = 7
GET_POSITION_TROP_COURTS_3D = 8
GET_POSITION_ALIGNES_3D = 9
GET_POSITION_RACINE_ESTIMEE_3D = 10
DEPLACEMENT_DIRECTIONNEL_ERREUR_MAJEURE = 11
DEPLACEMENT_DIRECTIONNEL_ERREUR_MINEURE = 12
DEPLACEMENT_POSITION_ERREUR_MAJEURE = 13
DEPLACEMENT_POSITION_ERREUR_MINEURE = 14
GET_POLE_INEXISTANT = 15
GET_ACCROCHE_INEXISTANTE = 16
GET_LONGUEUR_CABLE_INEXISTANT = 17
GET_POLE_RELATIF_INEXISTANT = 18
POLES_CONFONDUES_2D = 19
POLES_CONFONDUES_3D = 20
SETUP_SECURITE_AVEC_MOINS_DE_3_POLES = 21
SECURITE_AVEC_MOINS_DE_3_POLES = 22
SORTIE_DE_ZONE_DE_SECURITE = 23
ERREUR_INCONNUE_VOIR_DICTIONNAIRE = -1
class FuniErreur:
"""Représente une erreur du Funibot"""
def __init__(self, erreur: Union[int, eFuniErreur], temps: int, maj: bool) -> None:
"""Initialise une FuniErreur à partir de son eFuniErreur ou de son id"""
if isinstance(erreur, int):
self.id = erreur
try:
self.erreur = eFuniErreur(erreur)
except ValueError:
self.erreur = eFuniErreur(-1)
else:
self.id = erreur.value
self.erreur = erreur
self.maj = maj
self.t = temps
def __repr__(self) -> str:
"""Affiche une FuniErreur"""
return f"FuniErreur<{self.t}>{'(M)' if self.maj else ''}[{self.id}:{self.erreur.name}]"
class FuniSerial():
"""Objet Serial possédant des méthodes pour envoyer et recevoir du JSON en lien avec le Funibot"""
def __init__(self, serial: Union[Serial, IMockSerial]):
"""Initialise le port série"""
self.serial = serial
self.json_encoder = JSONEncoder()
self.json_decoder = JSONDecoder()
self.python_type_func = type
def __repr__(self):
if isinstance(self.serial, Serial):
return f"{self.serial.portstr}@{self.serial.baudrate}"
else:
return "Mock"
def envoyer(self, json: dict) -> dict:
"""Envoie du json sous forme de dict"""
self.serial.write(
bytes(self.json_encoder.encode(json), encoding='utf8'))
if json["type"] == eFuniType.ACK.value:
raise ReponseACK
try:
reponse = self.serial.readline()
reponse = self.json_decoder.decode(reponse.decode("utf8"))
except JSONDecodeError:
print_exc()
raise FuniCommException("erreur serial lors du décodage")
if reponse["type"] != eFuniType.ACK.value:
raise FuniCommException(
f"{reponse['type']} au lieu de {eFuniType.ACK.value}")
try:
self._valider_reponse(json_envoye=json, json_recu=reponse)
except FuniCommException:
self.serial.reset_input_buffer()
raise
return reponse
@staticmethod
def _valider_reponse(json_envoye: dict, json_recu: dict) -> None:
"""Compare les documents JSON envoyé et reçu pour valider que la communication a réussi"""
json_envoye_flat: dict = benedict(json_envoye).flatten("/")
json_recu_flat: dict = benedict(json_recu).flatten("/")
for key, value in json_recu_flat.items():
if not key in json_envoye_flat:
raise FuniCommException(
f"{key} est présente dans la réponse mais pas dans le message d'origine")
if key != "type" and json_envoye_flat[key] is not None and json_envoye_flat[key] != value:
raise FuniCommException(
f"{key}: Reçu <{value}>, attendu <{json_envoye_flat[key]}>")
elif key == "type" and json_envoye_flat[key] not in {"get", "set"}:
raise FuniCommException(
f"type: Envoyé <{json_envoye_flat[key]}>, attendu <get | set>")
def pot(self, type: eFuniType, id: int, position: Tuple[float, float, float] = None) -> Optional[Tuple[float, float, float]]:
"""S'occupe de la communication série pour la commande JSON 'pot'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est pas un FuniType")
json = {}
json["comm"] = "pot"
json["type"] = type.value
args = {}
if not isinstance(id, int):
raise TypeError("id n'est pas un entier")
if id < 0:
raise ValueError("id est inférieur à 0")
args["id"] = id
if type is eFuniType.SET:
if position is None:
raise ValueError("position est None")
args["pos_x"] = position[0]
args["pos_y"] = position[1]
args["pos_z"] = position[2]
else:
args["pos_x"] = None
args["pos_y"] = None
args["pos_z"] = None
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
return (retour["args"]["pos_x"], retour["args"]["pos_y"], retour["args"]["pos_z"])
def cal(self, type: eFuniType, mode: eFuniModeCalibration, id: Optional[int] = None, longueur: Optional[float] = None) -> Optional[float]:
"""S'occupe de la communication série pour la commande JSON 'cal'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est pas un FuniType")
json = {}
json["comm"] = "cal"
json["type"] = type.value
args = {}
args["mode"] = mode.value
if mode is not eFuniModeCalibration.SOL:
if not isinstance(id, int):
raise TypeError("id n'est pas un entier")
if id < 0:
raise ValueError("id est inférieur à 0")
else:
id = None
args["id"] = id
if type is eFuniType.SET:
if longueur is None:
raise ValueError("longueur est None")
elif longueur < 0 and mode is not eFuniModeCalibration.SOL:
raise ValueError("longueur est inférieure à zéro")
args["long"] = longueur
else:
args["long"] = None
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
return retour["args"]["long"]
def pos(self, type: eFuniType, position: Tuple[float, float, float] = None) -> Optional[Tuple[float, float, float]]:
"""S'occupe de la communication série pour la commande JSON 'pos'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est pas un FuniType")
json = {}
json["comm"] = "pos"
json["type"] = type.value
args = {}
if type is eFuniType.SET:
if position is None:
raise ValueError("position est None")
args["pos_x"] = position[0]
args["pos_y"] = position[1]
args["pos_z"] = position[2]
else:
args["pos_x"] = None
args["pos_y"] = None
args["pos_z"] = None
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
return (retour["args"]["pos_x"], retour["args"]["pos_y"], retour["args"]["pos_z"])
def dep(self, type: eFuniType, mode: eFuniModeDeplacement, direction: Tuple[float, float, float] = None) -> Optional[Tuple[float, float, float]]:
"""S'occupe de la communication série pour la commande JSON 'dep'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est pas un FuniType")
if type is eFuniType.GET:
raise ValueError("GET n'est pas supporté")
json = {}
json["comm"] = "dep"
json["type"] = type.value
args = {}
args["mode"] = mode.value
if type is eFuniType.SET and \
(mode == eFuniModeDeplacement.DISTANCE or mode == eFuniModeDeplacement.START):
if direction is None:
raise ValueError("direction est None")
args["axe_x"] = direction[0]
args["axe_y"] = direction[1]
args["axe_z"] = direction[2]
else:
args["axe_x"] = None
args["axe_y"] = None
args["axe_z"] = None
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
return (retour["args"]["axe_x"], retour["args"]["axe_y"], retour["args"]["axe_z"])
def err(self, type: eFuniType, code: Union[None, int, eFuniErreur] = None, temps: int = None, err_sup: int = None) -> List[FuniErreur]:
"""S'occupe de la communication série pour la commande JSON 'err'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est pas un FuniType")
if type is eFuniType.SET:
raise ValueError("SET n'est pas supporté")
json = {}
json["comm"] = "err"
json["type"] = type.value
if type is eFuniType.GET:
args = {}
args["id"] = None
args["maj"] = None
args["t"] = None
args["err_sup"] = None
else:
args = {}
if isinstance(code, eFuniErreur):
code = code.value
if not isinstance(code, int):
raise TypeError("code n'est pas une eFuniErreur ou un entier")
if code <= 0:
raise ValueError(
"code est un entier négatif, il devrait être positif")
if not isinstance(temps, int):
raise TypeError("temps n'est pas un entier")
if not isinstance(err_sup, int):
raise TypeError("err_sup n'est pas un entier")
if err_sup < 0:
raise ValueError("err_sup est négatif")
args["id"] = code
args["maj"] = FUNI_ERREUR_MAJ[code]
args["t"] = temps
args["err_sup"] = err_sup
json["args"] = args
encore = True
erreurs = []
err_limite = 10
while encore:
try:
retour = self.envoyer(json)
except ReponseACK:
break
except FuniCommException:
print_exc()
err_limite -= 1
if err_limite == 0:
break
else:
continue
try:
encore = (retour["args"]["err_sup"] > 0)
except KeyError:
print_exc()
break
except TypeError as e:
raise ErrSupEstNone(
erreurs, f"-> {self.python_type_func(e).__name__}: {str(e)}")
erreurs.append(FuniErreur(
retour["args"]["id"], retour["args"]["t"], retour["args"]["maj"]))
return erreurs
def log(self, type: eFuniType, msg: str = None) -> Optional[str]:
"""S'occupe de la communication série pour la commande JSON 'log'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est pas un FuniType")
if type is eFuniType.SET:
raise ValueError("SET n'est pas supporté")
json = {}
json["comm"] = "log"
json["type"] = type.value
if type is eFuniType.GET:
args = {}
args["msg"] = None
else:
args = {}
msg_corrige: str = msg if msg is not None else ""
msg_corrige = msg_corrige if msg_corrige != "" else "__vide__"
msg_corrige = msg_corrige.replace('\n', '\r')
args["msg"] = msg_corrige
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
msg_retour: str = retour["args"]["msg"] if retour["args"]["msg"] is not None else ""
msg_retour = msg_retour if msg_retour != "" else "__vide__"
msg_retour = msg_retour.replace('\r', '\n').rstrip()
return msg_retour
def mot(self, type: eFuniType, mode: Optional[eFuniModeMoteur] = None) -> Optional[eFuniModeMoteur]:
"""S'occupe de la communication série pour la commande JSON 'mot'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est pas un FuniType")
json = {}
json["comm"] = "mot"
json["type"] = type.value
args = {}
if type is not eFuniType.GET:
if mode is None:
raise ValueError("mode est None")
args["mode"] = mode.value
else:
args["mode"] = None
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
try:
return eFuniModeMoteur(retour["args"]["mode"])
except ValueError:
return None
def reg(self, type: eFuniType, regime: Optional[eFuniRegime] = None) -> Optional[eFuniRegime]:
"""S'occupe de la communication série pour la commande JSON 'reg'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est psa un FuniType")
if type is eFuniType.SET:
raise ValueError("SET n'est pas supporté")
json = {}
json["comm"] = "reg"
json["type"] = type.value
args = {}
if type is not eFuniType.GET:
if regime is None:
raise ValueError("regime est None")
args["tache"] = regime.value
else:
args["tache"] = None
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
try:
return eFuniRegime(retour["args"]["tache"])
except ValueError:
return None
def dur(self, type: eFuniType, duree: Optional[float] = None) -> Optional[float]:
"""S'occupe de la communication série pour la commande JSON 'dur'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est psa un FuniType")
if type is eFuniType.SET:
raise ValueError("SET n'est pas supporté")
json = {}
json["comm"] = "dur"
json["type"] = type.value
args = {}
if type is not eFuniType.GET:
if duree is None:
raise ValueError("duree est None")
elif duree < 0:
raise ValueError("duree est inférieure à 0")
args["tmp"] = duree
else:
args["tmp"] = None
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
return retour["args"]["tmp"]
def att(self, type: eFuniType, fin: bool, valide: Optional[bool] = None) -> Optional[Tuple[bool, bool]]:
"""S'occupe de la communication série pour la commande JSON 'dur'"""
if not isinstance(type, eFuniType):
raise TypeError("type n'est psa un FuniType")
if type is eFuniType.GET:
raise ValueError("GET n'est pas supporté")
json = {}
json["comm"] = "att"
json["type"] = type.value
args = {}
if fin is None:
raise ValueError("fin est None")
args["fin"] = fin
if type is not eFuniType.SET:
if valide is None:
raise ValueError(f"valide est None")
args["val"] = valide
else:
args["val"] = None
json["args"] = args
try:
retour = self.envoyer(json)
except ReponseACK:
return None
except FuniCommException:
print_exc()
return None
return (retour["args"]["val"], retour["args"]["fin"])
|
StarcoderdataPython
|
5063255
|
import pyxel
import constants
from random import *
from enum import Enum
class GameStatus(Enum):
GAME = 0,
Main_UI =1
def define_sound():
pyxel.sound(0).set(
notes="c3e3g3c4c4", tones="s", volumes="4", effects=("n" * 4 + "f"), speed=7
)
pyxel.sound(1).set(
notes="f3 b2 f2 b1 f1 f1 f1 f1",
tones="p",
volumes=("4" * 4 + "4321"),
effects=("n" * 7 + "f"),
speed=9,
)
class Drawable:
def __init__(self, x, y, u, v, w, h, name=None):
self.name = name
self.x = x
self.y = y
self.u = u
self.v = v
self.width = w
self.height = h
def draw(self):
pyxel.blt(self.x, self.y, 0, self.u, self.v, self.width, self.height,0)
class Movable(Drawable):
def __init__(self, x, y, u, v, w, h, direction, name):
super().__init__(x, y, u, v, w, h, name)
self.direction = direction
self.speed = 4
def update_position(self):
if self.direction == constants.UP:
self.y -= self.speed
elif self.direction == constants.DOWN:
self.y += self.speed
elif self.direction == constants.LEFT:
self.x -= self.speed
elif self.direction == constants.RIGHT:
self.x += self.speed
def is_outside(self):
return self.x < 4 or self.x > 156 or self.y < 4 or self.y > 156
def collide_with(self, item: Drawable):
return abs(self.x - item.x) < self.width and abs(self.y - item.y) < self.height
class Enemy(Movable):
def __init__(self, x, y, direction):
super().__init__(x, y, 0, 8, 6, 6, direction, "Enemy")
self.speed = 3
class Bullet(Movable):
def __init__(self, x, y, direction):
super().__init__(x, y, 8, 0, 4, 4, direction, "Bullet")
class Player(Drawable):
def __init__(self, x, y):
super().__init__(x, y, 0, 0, 8, 8, "Player")
self.direction = constants.UP
def draw(self):
super(Player, self).draw()
if self.direction == constants.UP:
pyxel.rect(self.x, self.y - 4, 8, 4, 3)
elif self.direction == constants.DOWN:
pyxel.rect(self.x, self.y + self.height, 8, 4, 3)
elif self.direction == constants.LEFT:
pyxel.rect(self.x - 4, self.y, 4, 8, 3)
elif self.direction == constants.RIGHT:
pyxel.rect(self.x + self.width, self.y, 4, 8, 3)
class MainGame:
def init_game(self):
self.player = Player((160 - 8) / 2, (160 - 8) / 2)
self.bullets: list[Bullet] = [None, None, None, None]
self.enemies: list[Enemy] = []
self.enemy_count = 16
self.lives = [Drawable(140-i*8,10,8,8,8,8) for i in range(3)]
self.score = 0
def __init__(self, width=160, height=160):
pyxel.init(width, height, title="Battle Square")
pyxel.load("assets/sqb.pyxres")
self.status = GameStatus.Main_UI
self.arrow = Drawable(0,0,0,16,16,16)
self.selected = 0
self.message = ""
define_sound()
pyxel.run(self.update, self.draw)
def add_enemy(self):
idx = randint(0,3)
if idx == constants.UP:
self.enemies.append(Enemy((160 - 6) / 2, self.player.height, constants.DOWN))
elif idx == constants.DOWN:
self.enemies.append(Enemy((160 - 6) / 2, 160 - self.player.height, constants.UP))
elif idx == constants.LEFT:
self.enemies.append(Enemy(4, (160 - 6) / 2, constants.RIGHT))
elif idx == constants.RIGHT:
self.enemies.append(Enemy(160 - 6, (160 - 6) / 2, constants.LEFT))
def check_collision(self):
for cnt in range(len(self.bullets)):
item = self.bullets[cnt]
if item:
item.update_position()
if item.is_outside():
self.bullets[cnt] = None
for cnt in range(len(self.enemies)):
item = self.enemies[cnt]
if item:
item.update_position()
if item.is_outside():
self.enemies[cnt] = None
collide = False
for bc in range(len(self.bullets)):
b = self.bullets[bc]
if b and item.collide_with(b):
collide = True
self.enemies[cnt] = None
self.bullets[bc] = None
self.score += 1
pyxel.play(0, 0)
if not collide:
if item.collide_with(self.player):
self.enemies[cnt] = None
pyxel.play(0, 1)
if len(self.lives) >1:
self.lives.pop()
else:
self.message = "GAME OVER: SCORE: {}".format(self.score)
self.status= GameStatus.Main_UI
self.enemies = [k for k in self.enemies if k is not None]
def update_game(self):
if pyxel.btnp(pyxel.KEY_DOWN):
self.player.direction = constants.DOWN
if self.bullets[constants.DOWN] is None:
self.bullets[constants.DOWN] = Bullet(self.player.x + 2,
self.player.y + self.player.height, constants.DOWN)
elif pyxel.btnp(pyxel.KEY_UP):
self.player.direction = constants.UP
if self.bullets[constants.UP] is None:
self.bullets[constants.UP] = Bullet(self.player.x + 2,
self.player.y - 4, constants.UP)
elif pyxel.btnp(pyxel.KEY_LEFT):
self.player.direction = constants.LEFT
if self.bullets[constants.LEFT] is None:
self.bullets[constants.LEFT] = Bullet(self.player.x - 4,
self.player.y + 2, constants.LEFT)
elif pyxel.btnp(pyxel.KEY_RIGHT):
self.player.direction = constants.RIGHT
if self.bullets[constants.RIGHT] is None:
self.bullets[constants.RIGHT] = Bullet(self.player.x + self.player.width,
self.player.y + 2, constants.RIGHT)
def update_ui(self):
if pyxel.btnp(pyxel.KEY_UP):
self.selected = abs((self.selected-1) %2)
elif pyxel.btnp(pyxel.KEY_DOWN):
self.selected = abs((self.selected+1) %2)
elif pyxel.btnp(pyxel.KEY_RETURN):
if self.selected == 0:
self.status = GameStatus.GAME
self.init_game()
else:
pyxel.quit()
def update(self):
if pyxel.btnp(pyxel.KEY_Q):
pyxel.quit()
if self.status == GameStatus.GAME:
self.update_game()
elif self.status == GameStatus.Main_UI:
self.update_ui()
def draw_border(self):
pyxel.rect(0, 0, 160, 4, 1)
pyxel.rect(0, 160 - 4, 160, 4, 1)
pyxel.rect(0, 0, 4, 160, 1)
pyxel.rect(160 - 4, 0, 4, 160, 1)
def game_draw(self):
pyxel.cls(0)
pyxel.rect((160 - 8) / 2, 0, 8, 160, 7)
pyxel.rect(0, (160 - 8) / 2, 160, 8, 7)
self.check_collision()
self.player.draw()
for item in self.bullets:
if item is not None:
item.draw()
for item in self.enemies:
if item is not None:
item.draw()
self.draw_border()
s = "Score: {}".format(self.score)
for item in self.lives:
item.draw()
pyxel.text(1,1,s,6)
if pyxel.frame_count % self.enemy_count == 0:
self.add_enemy()
def ui_draw(self):
pyxel.cls(0)
if self.selected == 0:
self.arrow.x = 20
self.arrow.y = 60
elif self.selected == 1:
self.arrow.x = 20
self.arrow.y = 80
self.arrow.draw()
pyxel.text(40,30, "S Q U A R E B R E A K", pyxel.frame_count % 16)
pyxel.text(60,60, "NEW GAME", 3)
pyxel.text(60,80, "QUIT", 4)
if len(self.message)>0:
pyxel.text(40,100, self.message, pyxel.frame_count % 5)
def draw(self):
if self.status == GameStatus.GAME:
self.game_draw()
elif self.status == GameStatus.Main_UI:
self.ui_draw()
if __name__ == "__main__":
MainGame()
|
StarcoderdataPython
|
1699359
|
from rest_framework import serializers
from apps.stt_tests.models import SttTest
class SttTestSerializer(serializers.ModelSerializer):
language = serializers.CharField(
source='language.code'
)
class Meta:
model = SttTest
fields = '__all__'
|
StarcoderdataPython
|
3245944
|
import os
from pathlib import Path
import pytest
from s3fetch import __version__
from s3fetch.command import S3Fetch
from s3fetch.exceptions import DirectoryDoesNotExistError, NoObjectsFoundError
@pytest.fixture(scope="function")
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
os.environ["AWS_SECURITY_TOKEN"] = "testing"
os.environ["AWS_SESSION_TOKEN"] = "testing"
@pytest.fixture
def s3fetch(aws_credentials):
bucket = "my-test-bucket"
prefix = "my/test/objects/"
regex = "_mytestobject_"
s3_path = f"{bucket}/{prefix}"
s3fetch = S3Fetch(s3_uri=s3_path, regex=regex, debug=False)
s3fetch._objects = [
"one_mytestobject_one",
"two_mytestobject_two",
"three_mytestobject_three",
"four*mytestobject*four",
"five)mytestobject_five",
"six!mytestdirectoryobject!six/",
]
return s3fetch
def test_s3fetch_obj(s3fetch):
assert s3fetch._bucket == "my-test-bucket"
assert s3fetch._prefix == "my/test/objects/"
assert s3fetch._regex == "_mytestobject_"
assert s3fetch._objects == [
"one_mytestobject_one",
"two_mytestobject_two",
"three_mytestobject_three",
"four*mytestobject*four",
"five)mytestobject_five",
"six!mytestdirectoryobject!six/",
]
def test_filter_object(s3fetch):
expected_objects = [
"one_mytestobject_one",
"two_mytestobject_two",
"three_mytestobject_three",
]
s3fetch._regex = r"^\w+_\w+_\w+$"
tmp_list = []
for key in filter(s3fetch._filter_object, expected_objects):
tmp_list.append(key)
assert tmp_list == expected_objects
def test_filter_object_no_regex(s3fetch):
expected_objects = [
"one_mytestobject_one",
"two_mytestobject_two",
"three_mytestobject_three",
"four*mytestobject*four",
"five)mytestobject_five",
"six!mytestdirectoryobject!six/",
]
s3fetch._regex = None
tmp_list = []
for key in filter(s3fetch._filter_object, (obj for obj in expected_objects)):
tmp_list.append(key)
assert tmp_list == expected_objects[0:-1]
# TODO: Fixup once moto tests are working.
# NoObjectsFoundError now raised by_retrieve_list_of_objects
# def test_filter_object_no_matching_objects(s3fetch):
# s3fetch._regex = r"^sdfasdfasdfsa$"
# with pytest.raises(NoObjectsFoundError):
# s3fetch._filter_object()
# TODO: Fixup once moto tests are working.
# NoObjectsFoundError now raised by_retrieve_list_of_objects
# def test_filter_object_empty_object_list(s3fetch):
# s3fetch._objects = []
# s3fetch._regex = r"^\w+_\w+_\w+$"
# with pytest.raises(NoObjectsFoundError):
# s3fetch._filter_object()
def test_check_for_failed_downloads(s3fetch, capfd):
s3fetch._failed_downloads = [
(
"my/test/objects/one_mytestobject_one",
"reason",
)
]
s3fetch._check_for_failed_downloads()
out, _ = capfd.readouterr()
assert "objects failed to download" in out
s3fetch._debug = True
s3fetch._check_for_failed_downloads()
out, _ = capfd.readouterr()
assert f"my/test/objects/one_mytestobject_one: reason" in out
def test_check_for_failed_downloads_no_failures(s3fetch, capfd):
s3fetch._failed_downloads = []
s3fetch._check_for_failed_downloads()
out, _ = capfd.readouterr()
assert "objects failed to download" not in out
def test_dry_run_detected(s3fetch, capfd):
s3_path = "s3://my-test-bucket/my/test/objects/"
S3Fetch(s3_uri=s3_path, dry_run=True, debug=True)
out, _ = capfd.readouterr()
assert "Operating in dry run mode. Will not download objects." in out
def test_determine_download_dir_none_dir_specified(s3fetch, mocker):
os_mock = mocker.patch("os.getcwd")
expected_directory = Path("/home/test")
os_mock.return_value = expected_directory
assert s3fetch._determine_download_dir(None) == expected_directory
def test_determine_download_dir_dir_specified_and_exists(s3fetch, mocker):
is_dir_mock = mocker.patch("pathlib.Path.is_dir")
is_dir_mock.return_value = True
expected_directory = Path("/home/test/Downloads")
assert s3fetch._determine_download_dir("/home/test/Downloads") == expected_directory
def test_determine_download_dir_dir_specified_and_raises(s3fetch, mocker):
is_dir_mock = mocker.patch("pathlib.Path.is_dir")
is_dir_mock.return_value = False
expected_directory = "/home/test/Downloads"
with pytest.raises(DirectoryDoesNotExistError):
s3fetch._determine_download_dir(expected_directory)
def test_remove_directories(s3fetch):
expected_objects = [
"five)mytestobject_five",
"six!mytestdirectoryobject!six/",
]
s3fetch._regex = None
tmp_list = []
for key in filter(s3fetch._filter_object, (obj for obj in expected_objects)):
tmp_list.append(key)
assert tmp_list == ["five)mytestobject_five"]
def test_parse_and_split_s3_uri_full_path(s3fetch):
bucket, prefix = s3fetch._parse_and_split_s3_uri(
s3_uri="s3://testbucket/files", delimiter="/"
)
assert bucket == "testbucket"
assert prefix == "files"
bucket, prefix = s3fetch._parse_and_split_s3_uri(
s3_uri="s3://testbucket/files/", delimiter="/"
)
assert bucket == "testbucket"
assert prefix == "files/"
def test_parse_and_split_s3_uri_no_prefix(s3fetch):
bucket, prefix = s3fetch._parse_and_split_s3_uri(
s3_uri="s3://testbucket", delimiter="/"
)
assert bucket == "testbucket"
assert prefix == ""
bucket, prefix = s3fetch._parse_and_split_s3_uri(
s3_uri="s3://testbucket/", delimiter="/"
)
assert bucket == "testbucket"
assert prefix == ""
def test_rollup_prefix(s3fetch):
# (prefix, object_key, expected directory, expected filename)
prefix_and_keys = [
("", "object1", None, "object1"),
("storage", "storage/object1", "storage", "object1"),
("sto", "storage/object1", "storage", "object1"),
("storage/obj", "storage/object1", None, "object1"),
("test/an", "test/another_folder/console", "another_folder", "console"),
("", "test/another_folder/console", "test/another_folder", "console"),
]
for prefix, key, directory, filename in prefix_and_keys:
s3fetch._prefix = prefix
tmp_directory, tmp_filename = s3fetch._rollup_prefix(key)
assert (directory, filename) == (tmp_directory, tmp_filename)
|
StarcoderdataPython
|
6517910
|
from django.conf import settings
def global_settings(request):
return {
'OTP_PRIMARY_COLOR': settings.OTP_PRIMARY_COLOR,
'OTP_BACKGROUND_BTN': settings.OTP_BACKGROUND_BTN,
'OTP_BACKGROUND_BTN_HOVER': settings.OTP_BACKGROUND_BTN_HOVER,
'OTP_COLOR_TEXT_BTN': settings.OTP_COLOR_TEXT_BTN,
'OTP_COLOR_TEXT_BTN_HOVER': settings.OTP_COLOR_TEXT_BTN_HOVER,
'OTP_BRAND_NAME': settings.OTP_BRAND_NAME,
'OTP_BRAND_IMG': settings.OTP_BRAND_IMG,
}
|
StarcoderdataPython
|
5163061
|
from math import ceil, floor
# Define some colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
BLUE = (0, 0, 255)
BROWN = (95, 95, 0)
GRAY = (95, 95, 95)
WINDOW_WIDTH = 1024
WINDOW_HEIGHT = 768
# Time since last logic update
DELTATIME = 5 # In milliseconds; 5 ms = physics update @ 200 Hz
LEVEL_WIDTH = 10
LEVEL_HEIGHT = 10
WALL_THICKNESS = 6
GAMEAREA_WIDTH = ceil(WINDOW_WIDTH * 0.95)
GAMEAREA_HEIGHT = ceil(WINDOW_HEIGHT * 0.9)
GAMEAREA_X = ceil((WINDOW_WIDTH / 2.0) - (GAMEAREA_WIDTH / 2.0))
GAMEAREA_Y = ceil((WINDOW_HEIGHT - GAMEAREA_HEIGHT - (WALL_THICKNESS / 2.0)))
GAME_SPEED = 2.0
GAME_STARTINGSHIPS = 3
GAME_SCOREEXTRASHIP = 1000 # How many points is needed for an extra ship
GAME_STARTINGASTEROIDS = 10 # How many asteroids there are in level 1
GAME_ASTEROIDMINSPEED = 0.15
GAME_ASTEROIDMAXSPEED = 1.0
GAME_SHIELDDURATION = 2.5
BULLET_SPEED = 6.0
BULLET_TIMETOLIVE = 5.0 # How many seconds bullets stay on screen
SHIP_MAXSPEED = 3.0
SHIP_SHOOTDELAY = 0.5 # How often can the ship shoot (in seconds)
STATE_MAINMENU = 1
STATE_GAME = 2
STATE_PAUSE = 3
STATE_GAMEOVER = 4
STATE_VICTORY = 5
ACTION_NEWGAME = 1
ACTION_QUIT = 2
GAMEOBJECT_SHIP = 1
GAMEOBJECT_WALL = 2
GAMEOBJECT_ASTEROID = 3
GAMEOBJECT_BULLET = 4
DIRECTION_UP = 1
DIRECTION_DOWN = 2
DIRECTION_LEFT = 3
DIRECTION_RIGHT = 4
SHIP_SPEED = 0.5
SHIP_WIDTH = 10
SHIP_HEIGHT = 15
ASTEROID_COLOR = BLACK
SHIP_COLOR = BROWN
WALL_COLOR = RED
BULLET_COLOR = BLUE
SHIELD_COLOR = GREEN
ASTEROID_THICKNESS = 0
ASTEROID_SIZE = 15
BULLET_THICKNESS = 0
BULLET_SIZE = 2
SHIELD_THICKNESS = 3
SHAPE_RECT = 1
SHAPE_CIRCLE = 2
SHAPE_SHIP = 3
|
StarcoderdataPython
|
6657317
|
#!/usr/bin/env python
import pdfminer.psparser as p
p.PSLiteral
p.PSLiteralTable.intern()
|
StarcoderdataPython
|
6511720
|
__description__ = \
"""
Submodule with dialog boxes raised by main window.
"""
__author__ = "<NAME>"
__date__ = "2017-06-01"
from .aic_test import AICTest
from .documentation import Documentation
from .about import About
from .fit_options import FitOptions
from .add_experiment import AddExperiment
|
StarcoderdataPython
|
3469372
|
<gh_stars>0
from flask import request
from flask_restful import Resource
# from app.components.process_ingestion_helper import process_ingestion_helper
from app.components.process_ingestion_voter import process_ingestion_voter
from app.components.process_compare_voter import process_compare_voter
from app.components.process_query_counts import process_query_counts
from app.components.process_file_polling import process_file_polling
from app.components.process_copy_raw import process_copy_raw
import logging
logger = logging.getLogger(__name__)
class Process(Resource):
def post(self):
# TODO: payload validation (marshmellow?)
payload = request.get_json()
if not payload:
logger.debug('no payload')
return 'no payload'
if payload['type'] == 'polling':
task = process_file_polling.delay(payload)
logger.debug('polling job running')
return "polling..."
if payload['type'] == 'copy_raw':
task = process_copy_raw.delay(payload)
logger.debug(f"copying {payload['file_type']}")
return f"copying {payload['file_type']}"
# TODO: set up reference tables (full county name, etc..)
"""
if payload['type'] == 'ingest_helper':
task = process_ingestion_helper.delay(payload)
logger.debug('ingesting helper tables')
return "processing helper tables..."
"""
if payload['type'] == 'ingest_voterfile':
task = process_ingestion_voter.delay(payload)
logger.debug('ingesting voter file')
return "processing voter tables..."
if payload['type'] == 'vf_compare':
task = process_compare_voter.delay(payload)
logger.debug("comparing voter files...")
return "comparing voter files..."
if payload['type'] == 'query_counts':
task = process_query_counts.delay(payload)
logger.debug("pulling counts")
return "pulling counts..."
|
StarcoderdataPython
|
4994131
|
import os
import zipfile
import rastervision as rv
from rastervision.utils.files import (download_if_needed, make_dir,
load_json_config, save_json_config)
from rastervision.protos.command_pb2 import CommandConfig as CommandConfigMsg
from rastervision.data.raster_source import ChannelOrderError
class Predictor():
"""Class for making predictions based off of a prediction package."""
def __init__(self,
prediction_package_uri,
tmp_dir,
update_stats=False,
channel_order=None):
"""Creates a new Predictor.
Args:
prediction_package_uri: The URI of the prediction package to use.
Can be any type of URI that Raster Vision can read.
tmp_dir: Temporary directory in which to store files that are used
by the Predictor. This directory is not cleaned up by this
class.
update_stats: Option indicating if any Analyzers should be run on
the image to be predicted on. Otherwise, the Predictor will use
the output of Analyzers that are bundled with the predict
package. This is useful, for instance, if you are predicting
against imagery that needs to be normalized with a
StatsAnalyzer, and the color profile of the new imagery is
significantly different then the imagery the model was trained
on.
channel_order: Option for a new channel order to use for the
imagery being predicted against. If not present, the
channel_order from the original configuration in the predict
package will be used.
"""
self.tmp_dir = tmp_dir
self.update_stats = update_stats
self.model_loaded = False
package_zip_path = download_if_needed(prediction_package_uri, tmp_dir)
package_dir = os.path.join(tmp_dir, 'package')
make_dir(package_dir)
with zipfile.ZipFile(package_zip_path, 'r') as package_zip:
package_zip.extractall(path=package_dir)
# Read bundle command config
bundle_config_path = os.path.join(package_dir, 'bundle_config.json')
msg = load_json_config(bundle_config_path, CommandConfigMsg())
bundle_config = msg.bundle_config
self.task_config = rv.TaskConfig.from_proto(bundle_config.task) \
.load_bundle_files(package_dir)
self.backend_config = rv.BackendConfig.from_proto(bundle_config.backend) \
.load_bundle_files(package_dir)
scene_config = rv.SceneConfig.from_proto(bundle_config.scene)
scene_builder = scene_config.load_bundle_files(package_dir) \
.to_builder() \
.clear_label_source() \
.clear_aois() \
.with_id('PREDICTOR')
# If the scene does not have a label store, generate a default one.
if not scene_config.label_store:
scene_builder = scene_builder.with_task(self.task_config) \
.with_label_store()
if channel_order:
raster_source = scene_builder.config['raster_source'] \
.to_builder() \
.with_channel_order(channel_order) \
.build()
scene_builder = scene_builder.with_raster_source(raster_source)
self.scene_config = scene_builder.build()
self.analyzer_configs = []
if update_stats:
for analyzer in bundle_config.analyzers:
a = rv.AnalyzerConfig.from_proto(analyzer) \
.load_bundle_files(package_dir)
self.analyzer_configs.append(a)
self.bundle_config = rv.command.CommandConfig \
.from_proto(msg) \
.to_builder() \
.with_task(self.task_config) \
.with_backend(self.backend_config) \
.with_scene(self.scene_config) \
.with_analyzers(self.analyzer_configs) \
.build()
def load_model(self):
"""Load the model for this Predictor.
This is useful if you are going to make multiple predictions with the
model, and want it to be fast on the first prediction.
Note: This is called implicitly on the first call of 'predict' if it
hasn't been called already.
"""
self.backend = self.backend_config.create_backend(self.task_config)
self.backend.load_model(self.tmp_dir)
self.task = self.task_config.create_task(self.backend)
self.analyzers = []
for analyzer_config in self.analyzer_configs:
self.analyzers.append(analyzer_config.create_analyzer())
self.model_loaded = True
def predict(self, image_uri, label_uri=None, config_uri=None):
"""Generate predictions for the given image.
Args:
image_uri: URI of the image to make predictions against.
This can be any type of URI readable by Raster Vision
FileSystems.
label_uri: Optional URI to save labels off into.
config_uri: Optional URI in which to save the bundle_config,
which can be useful to client applications for understanding
how to interpret the labels.
Returns:
rastervision.data.labels.Labels containing the predicted labels.
"""
if not self.model_loaded:
self.load_model()
scene_config = self.scene_config.for_prediction(image_uri, label_uri) \
.create_local(self.tmp_dir)
try:
scene = scene_config.create_scene(self.task_config, self.tmp_dir)
# If we are analyzing per scene, run analyzers
# Analyzers should overwrite files in the tmp_dir
if self.update_stats:
for analyzer in self.analyzers:
analyzer.process([scene], self.tmp_dir)
# Reload scene to refresh any new analyzer config
scene = scene_config.create_scene(self.task_config,
self.tmp_dir)
except ChannelOrderError:
raise ValueError(
'The predict package is using a channel_order '
'with channels unavailable in the imagery.\nTo set a new '
'channel_order that only uses channels available in the '
'imagery, use the --channel-order option.')
with scene.activate():
labels = self.task.predict_scene(scene, self.tmp_dir)
if label_uri:
scene.prediction_label_store.save(labels)
if config_uri:
msg = self.bundle_config.to_builder() \
.with_scene(scene_config) \
.build() \
.to_proto()
save_json_config(msg, config_uri)
return labels
|
StarcoderdataPython
|
8180240
|
<filename>Data/ReplayScripts/rendering_attributes.py
import g
def init_scene():
g.set_duration(0)
g.set_dataset('Femur (Vis2021, Teaser)')
g.set_camera_checkpoint('TeaserA-2')
g.set_rendering_algorithm_settings({
'line_width': 0.005,
'band_width': 0.020,
'depth_cue_strength': 0.8
})
g.set_dataset_settings({
'attribute': "Principal Stress Magnitude",
'major_on': True,
'medium_on': False,
'minor_on': True,
'major_lod': 0.3,
'minor_lod': 0.5,
'major_use_bands': True,
'minor_use_bands': False,
'thick_bands': True,
'smoothed_bands': True,
'use_principal_stress_direction_index': True,
})
g.set_transfer_functions(['qualitative-pale-lilac.xml', 'qualitative-emerald.xml', 'qualitative-ocher.xml'])
g.set_transfer_functions_ranges([(0.0, 1.594), (0.0, 1.594), (0.0, 1.594)])
#g.set_transfer_functions_ranges([(-0.06, 1.274), (-0.213, 0.157), (-0.96, 0.019)])
g.set_duration(0.001)
g.set_transfer_functions_ranges([(0.0, 1.594), (0.03, 1.594), (0.0, 1.594)])
g.set_duration(6)
def use_principal_stress():
g.set_duration(0)
g.set_transfer_functions(['blues.xml', 'greens.xml', 'reds.xml'])
#g.set_transfer_functions_ranges([(-0.071, 0.551), (-0.213, 0.157), (-0.665, 0.096)])
#g.set_transfer_functions_ranges([(-0.06, 1.274), (-0.213, 0.157), (-0.96, 0.019)])
g.set_duration(6)
def use_von_mises_stress():
g.set_duration(0)
g.set_dataset_settings({
'attribute': "von Mises Stress",
'use_principal_stress_direction_index': False
})
#g.set_transfer_function_range((0.031, 1.124))
g.set_transfer_function_range((0.075, 1.236))
g.set_duration(6)
def replay():
init_scene()
use_principal_stress()
use_von_mises_stress()
|
StarcoderdataPython
|
66867
|
<filename>cinder/tests/unit/volume/drivers/dell_emc/powermax/test_powermax_common.py<gh_stars>0
# Copyright (c) 2017-2019 Dell Inc. or its subsidiaries.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
from copy import deepcopy
import mock
import six
from cinder import exception
from cinder.objects import fields
from cinder import test
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit.volume.drivers.dell_emc.powermax import (
powermax_data as tpd)
from cinder.tests.unit.volume.drivers.dell_emc.powermax import (
powermax_fake_objects as tpfo)
from cinder.volume.drivers.dell_emc.powermax import common
from cinder.volume.drivers.dell_emc.powermax import fc
from cinder.volume.drivers.dell_emc.powermax import masking
from cinder.volume.drivers.dell_emc.powermax import provision
from cinder.volume.drivers.dell_emc.powermax import rest
from cinder.volume.drivers.dell_emc.powermax import utils
from cinder.volume import volume_utils
class PowerMaxCommonTest(test.TestCase):
def setUp(self):
self.data = tpd.PowerMaxData()
super(PowerMaxCommonTest, self).setUp()
self.mock_object(volume_utils, 'get_max_over_subscription_ratio',
return_value=1.0)
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='<PASSWORD>',
san_api_port=8443, vmax_port_groups=[self.data.port_group_name_f])
rest.PowerMaxRest._establish_rest_session = mock.Mock(
return_value=tpfo.FakeRequestsSession())
driver = fc.PowerMaxFCDriver(configuration=configuration)
self.driver = driver
self.common = self.driver.common
self.masking = self.common.masking
self.provision = self.common.provision
self.rest = self.common.rest
self.utils = self.common.utils
self.utils.get_volumetype_extra_specs = (
mock.Mock(return_value=self.data.vol_type_extra_specs))
@mock.patch.object(rest.PowerMaxRest, 'get_array_ucode_version',
return_value=tpd.PowerMaxData.next_gen_ucode)
@mock.patch.object(rest.PowerMaxRest, 'get_array_model_info',
return_value=('PowerMax 2000', True))
@mock.patch.object(rest.PowerMaxRest, 'set_rest_credentials')
@mock.patch.object(common.PowerMaxCommon, '_get_slo_workload_combinations',
return_value=[])
@mock.patch.object(common.PowerMaxCommon,
'get_attributes_from_cinder_config',
side_effect=[[], tpd.PowerMaxData.array_info_wl])
def test_gather_info_tests(self, mck_parse, mck_combo, mck_rest,
mck_nextgen, mck_ucode):
# Use-Case 1: Gather info no-opts
configuration = tpfo.FakeConfiguration(
None, 'config_group', None, None)
fc.PowerMaxFCDriver(configuration=configuration)
# Use-Case 2: Gather info next-gen with ucode/version
self.common._gather_info()
self.assertTrue(self.common.next_gen)
self.assertEqual(self.common.ucode_level, self.data.next_gen_ucode)
def test_get_slo_workload_combinations_powermax(self):
array_info = self.common.get_attributes_from_cinder_config()
finalarrayinfolist = self.common._get_slo_workload_combinations(
array_info)
self.assertTrue(len(finalarrayinfolist) > 1)
@mock.patch.object(
rest.PowerMaxRest, 'get_vmax_model',
return_value=(tpd.PowerMaxData.vmax_model_details['model']))
@mock.patch.object(
rest.PowerMaxRest, 'get_slo_list',
return_value=(tpd.PowerMaxData.vmax_slo_details['sloId']))
def test_get_slo_workload_combinations_vmax(self, mck_slo, mck_model):
array_info = self.common.get_attributes_from_cinder_config()
finalarrayinfolist = self.common._get_slo_workload_combinations(
array_info)
self.assertTrue(len(finalarrayinfolist) > 1)
@mock.patch.object(
rest.PowerMaxRest, 'get_vmax_model',
return_value=tpd.PowerMaxData.powermax_model_details['model'])
@mock.patch.object(rest.PowerMaxRest, 'get_workload_settings',
return_value=[])
@mock.patch.object(
rest.PowerMaxRest, 'get_slo_list',
return_value=tpd.PowerMaxData.powermax_slo_details['sloId'])
def test_get_slo_workload_combinations_next_gen(self, mck_slo, mck_wl,
mck_model):
self.common.next_gen = True
self.common.array_model = 'PowerMax 2000'
finalarrayinfolist = self.common._get_slo_workload_combinations(
self.data.array_info_no_wl)
self.assertTrue(len(finalarrayinfolist) == 14)
@mock.patch.object(
rest.PowerMaxRest, 'get_vmax_model',
return_value=tpd.PowerMaxData.vmax_model_details['model'])
@mock.patch.object(rest.PowerMaxRest, 'get_workload_settings',
return_value=[])
@mock.patch.object(
rest.PowerMaxRest, 'get_slo_list',
return_value=tpd.PowerMaxData.powermax_slo_details['sloId'])
def test_get_slo_workload_combinations_next_gen_vmax(
self, mck_slo, mck_wl, mck_model):
self.common.next_gen = True
finalarrayinfolist = self.common._get_slo_workload_combinations(
self.data.array_info_no_wl)
self.assertTrue(len(finalarrayinfolist) == 18)
def test_get_slo_workload_combinations_failed(self):
array_info = {}
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._get_slo_workload_combinations, array_info)
@mock.patch.object(
common.PowerMaxCommon, 'get_volume_metadata',
return_value={'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'})
def test_create_volume(self, mck_meta):
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location),
'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}})
volume = deepcopy(self.data.test_volume)
volume.metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
model_update = self.common.create_volume(volume)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_create_volume_qos(self, mck_meta):
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location),
'metadata': ''})
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs['qos'] = {
'total_iops_sec': '4000', 'DistributionType': 'Always'}
with mock.patch.object(self.utils, 'get_volumetype_extra_specs',
return_value=extra_specs):
model_update = self.common.create_volume(self.data.test_volume)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_create_volume_from_snapshot(self, mck_meta, mck_clone_chk):
ref_model_update = ({'provider_location': six.text_type(
deepcopy(self.data.provider_location_snapshot))})
model_update = self.common.create_volume_from_snapshot(
self.data.test_clone_volume, self.data.test_snapshot)
self.assertEqual(
ast.literal_eval(ref_model_update['provider_location']),
ast.literal_eval(model_update['provider_location']))
# Test from legacy snapshot
ref_model_update = (
{'provider_location': six.text_type(
deepcopy(self.data.provider_location_clone))})
model_update = self.common.create_volume_from_snapshot(
self.data.test_clone_volume, self.data.test_legacy_snapshot)
self.assertEqual(
ast.literal_eval(ref_model_update['provider_location']),
ast.literal_eval(model_update['provider_location']))
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_cloned_volume(self, mck_meta, mck_clone_chk):
ref_model_update = ({'provider_location': six.text_type(
self.data.provider_location_clone)})
model_update = self.common.create_cloned_volume(
self.data.test_clone_volume, self.data.test_volume)
self.assertEqual(
ast.literal_eval(ref_model_update['provider_location']),
ast.literal_eval(model_update['provider_location']))
def test_delete_volume(self):
with mock.patch.object(self.common, '_delete_volume') as mock_delete:
self.common.delete_volume(self.data.test_volume)
mock_delete.assert_called_once_with(self.data.test_volume)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
@mock.patch.object(
common.PowerMaxCommon, 'get_snapshot_metadata',
return_value={'snap-meta-key-1': 'snap-meta-value-1',
'snap-meta-key-2': 'snap-meta-value-2'})
def test_create_snapshot(self, mck_meta, mck_clone_chk):
ref_model_update = (
{'provider_location': six.text_type(self.data.snap_location),
'metadata': {'snap-meta-key-1': 'snap-meta-value-1',
'snap-meta-key-2': 'snap-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}})
snapshot = deepcopy(self.data.test_snapshot_manage)
snapshot.metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
model_update = self.common.create_snapshot(
snapshot, self.data.test_volume)
self.assertEqual(ref_model_update, model_update)
def test_delete_snapshot(self):
snap_name = self.data.snap_location['snap_name']
sourcedevice_id = self.data.snap_location['source_id']
generation = 0
with mock.patch.object(
self.provision, 'delete_volume_snap') as mock_delete_snap:
self.common.delete_snapshot(
self.data.test_snapshot, self.data.test_volume)
mock_delete_snap.assert_called_once_with(
self.data.array, snap_name, [sourcedevice_id],
restored=False, generation=generation)
def test_delete_snapshot_not_found(self):
with mock.patch.object(self.common, '_parse_snap_info',
return_value=(None, 'Something')):
with mock.patch.object(
self.provision, 'delete_volume_snap') as mock_delete_snap:
self.common.delete_snapshot(self.data.test_snapshot,
self.data.test_volume)
mock_delete_snap.assert_not_called()
def test_delete_legacy_snap(self):
with mock.patch.object(self.common, '_delete_volume') as mock_del:
self.common.delete_snapshot(self.data.test_legacy_snapshot,
self.data.test_legacy_vol)
mock_del.assert_called_once_with(self.data.test_legacy_snapshot)
@mock.patch.object(masking.PowerMaxMasking,
'return_volume_to_fast_managed_group')
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
def test_remove_members(self, mock_rm, mock_return):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
self.common._remove_members(
array, volume, device_id, extra_specs, self.data.connector, False)
mock_rm.assert_called_once_with(
array, volume, device_id, volume_name,
extra_specs, True, self.data.connector, async_grp=None)
@mock.patch.object(masking.PowerMaxMasking,
'return_volume_to_fast_managed_group')
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
def test_remove_members_multiattach_case(self, mock_rm, mock_return):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
self.common._remove_members(
array, volume, device_id, extra_specs, self.data.connector, True)
mock_rm.assert_called_once_with(
array, volume, device_id, volume_name,
extra_specs, False, self.data.connector, async_grp=None)
mock_return.assert_called_once()
def test_unmap_lun(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
connector = self.data.connector
with mock.patch.object(self.common, '_remove_members') as mock_remove:
self.common._unmap_lun(volume, connector)
mock_remove.assert_called_once_with(
array, volume, device_id, extra_specs,
connector, False, async_grp=None)
@mock.patch.object(common.PowerMaxCommon, '_remove_members')
def test_unmap_lun_attachments(self, mock_rm):
volume1 = deepcopy(self.data.test_volume)
volume1.volume_attachment.objects = [self.data.test_volume_attachment]
connector = self.data.connector
self.common._unmap_lun(volume1, connector)
mock_rm.assert_called_once()
mock_rm.reset_mock()
volume2 = deepcopy(volume1)
volume2.volume_attachment.objects.append(
self.data.test_volume_attachment)
self.common._unmap_lun(volume2, connector)
mock_rm.assert_not_called()
def test_unmap_lun_qos(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs['qos'] = {
'total_iops_sec': '4000', 'DistributionType': 'Always'}
connector = self.data.connector
with mock.patch.object(self.common, '_remove_members') as mock_remove:
with mock.patch.object(self.utils, 'get_volumetype_extra_specs',
return_value=extra_specs):
self.common._unmap_lun(volume, connector)
mock_remove.assert_called_once_with(
array, volume, device_id, extra_specs,
connector, False, async_grp=None)
def test_unmap_lun_not_mapped(self):
volume = self.data.test_volume
connector = self.data.connector
with mock.patch.object(self.common, 'find_host_lun_id',
return_value=({}, False)):
with mock.patch.object(
self.common, '_remove_members') as mock_remove:
self.common._unmap_lun(volume, connector)
mock_remove.assert_not_called()
def test_unmap_lun_connector_is_none(self):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs['storagetype:portgroupname'] = (
self.data.port_group_name_f)
with mock.patch.object(self.common, '_remove_members') as mock_remove:
self.common._unmap_lun(volume, None)
mock_remove.assert_called_once_with(
array, volume, device_id, extra_specs, None,
False, async_grp=None)
def test_initialize_connection_already_mapped(self):
volume = self.data.test_volume
connector = self.data.connector
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_dict = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
device_info_dict = self.common.initialize_connection(volume, connector)
self.assertEqual(ref_dict, device_info_dict)
def test_initialize_connection_already_mapped_next_gen(self):
with mock.patch.object(self.rest, 'is_next_gen_array',
return_value=True):
volume = self.data.test_volume
connector = self.data.connector
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_dict = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
device_info_dict = self.common.initialize_connection(volume,
connector)
self.assertEqual(ref_dict, device_info_dict)
@mock.patch.object(common.PowerMaxCommon, 'find_host_lun_id',
return_value=({}, False))
@mock.patch.object(
common.PowerMaxCommon, '_attach_volume',
return_value=({}, tpd.PowerMaxData.port_group_name_f))
def test_initialize_connection_not_mapped(self, mock_attach, mock_id):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
masking_view_dict[utils.IS_MULTIATTACH] = False
device_info_dict = self.common.initialize_connection(
volume, connector)
self.assertEqual({}, device_info_dict)
mock_attach.assert_called_once_with(
volume, connector, extra_specs, masking_view_dict)
@mock.patch.object(rest.PowerMaxRest, 'is_next_gen_array',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, 'find_host_lun_id',
return_value=({}, False))
@mock.patch.object(
common.PowerMaxCommon, '_attach_volume',
return_value=({}, tpd.PowerMaxData.port_group_name_f))
def test_initialize_connection_not_mapped_next_gen(self, mock_attach,
mock_id, mck_gen):
volume = self.data.test_volume
connector = self.data.connector
device_info_dict = self.common.initialize_connection(
volume, connector)
self.assertEqual({}, device_info_dict)
@mock.patch.object(
masking.PowerMaxMasking, 'pre_multiattach',
return_value=tpd.PowerMaxData.masking_view_dict_multiattach)
@mock.patch.object(common.PowerMaxCommon, 'find_host_lun_id',
return_value=({}, True))
@mock.patch.object(
common.PowerMaxCommon, '_attach_volume',
return_value=({}, tpd.PowerMaxData.port_group_name_f))
def test_initialize_connection_multiattach_case(
self, mock_attach, mock_id, mock_pre):
volume = self.data.test_volume
connector = self.data.connector
self.common.initialize_connection(volume, connector)
mock_attach.assert_called_once()
mock_pre.assert_called_once()
def test_attach_volume_success(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_dict = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
with mock.patch.object(self.masking, 'setup_masking_view',
return_value={
utils.PORTGROUPNAME:
self.data.port_group_name_f}):
device_info_dict, pg = self.common._attach_volume(
volume, connector, extra_specs, masking_view_dict)
self.assertEqual(ref_dict, device_info_dict)
@mock.patch.object(masking.PowerMaxMasking,
'check_if_rollback_action_for_masking_required')
@mock.patch.object(masking.PowerMaxMasking, 'setup_masking_view',
return_value={})
@mock.patch.object(common.PowerMaxCommon, 'find_host_lun_id',
return_value=({}, False))
def test_attach_volume_failed(self, mock_lun, mock_setup, mock_rollback):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertRaises(exception.VolumeBackendAPIException,
self.common._attach_volume, volume,
connector, extra_specs, masking_view_dict)
device_id = self.data.device_id
(mock_rollback.assert_called_once_with(
self.data.array, volume, device_id, {}))
def test_terminate_connection(self):
volume = self.data.test_volume
connector = self.data.connector
with mock.patch.object(self.common, '_unmap_lun') as mock_unmap:
self.common.terminate_connection(volume, connector)
mock_unmap.assert_called_once_with(
volume, connector)
@mock.patch.object(provision.PowerMaxProvision, 'extend_volume')
@mock.patch.object(common.PowerMaxCommon, '_array_ode_capabilities_check',
return_value=[True] * 4)
@mock.patch.object(common.PowerMaxCommon, '_extend_vol_validation_checks')
def test_extend_vol_no_rep_success(self, mck_val_chk, mck_ode_chk,
mck_extend):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.common.extend_volume(volume, new_size)
mck_extend.assert_called_once_with(
array, device_id, new_size, ref_extra_specs, None)
@mock.patch.object(provision.PowerMaxProvision, 'extend_volume')
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, None))
@mock.patch.object(common.PowerMaxCommon, '_array_ode_capabilities_check',
return_value=[True] * 4)
@mock.patch.object(common.PowerMaxCommon, '_extend_vol_validation_checks')
def test_extend_vol_rep_success(self, mck_val_chk, mck_ode_chk,
mck_get_rdf, mck_extend):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size
ref_extra_specs = deepcopy(self.data.rep_extra_specs_ode)
with mock.patch.object(self.common, '_initial_setup',
return_value=self.data.rep_extra_specs_ode):
self.common.next_gen = True
self.common.rep_config = deepcopy(ref_extra_specs)
self.common.extend_volume(volume, new_size)
mck_extend.assert_called_with(
array, device_id, new_size, ref_extra_specs, 10)
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_volume_failed_snap_src(self, mck_sync):
volume = self.data.test_volume
new_size = self.data.test_volume.size
with mock.patch.object(self.rest, 'is_vol_in_rep_session',
return_value=(False, True, None)):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
def test_extend_volume_failed_no_device_id(self):
volume = self.data.test_volume
new_size = self.data.test_volume.size
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_volume_failed_wrong_size(self, mck_sync):
volume = self.data.test_volume
new_size = 1
self.assertRaises(exception.VolumeBackendAPIException,
self.common.extend_volume, volume, new_size)
def test_update_volume_stats(self):
data = self.common.update_volume_stats()
self.assertEqual('CommonTests', data['volume_backend_name'])
def test_update_volume_stats_no_wlp(self):
with mock.patch.object(self.common, '_update_srp_stats',
return_value=('123s#SRP_1#None#None',
100, 90, 90, 10)):
data = self.common.update_volume_stats()
self.assertEqual('CommonTests', data['volume_backend_name'])
def test_update_srp_stats_with_wl(self):
with mock.patch.object(self.rest, 'get_srp_by_name',
return_value=self.data.srp_details):
location_info, __, __, __, __ = self.common._update_srp_stats(
self.data.array_info_wl)
self.assertEqual(location_info, '000197800123#SRP_1#Diamond#OLTP')
def test_update_srp_stats_no_wl(self):
with mock.patch.object(self.rest, 'get_srp_by_name',
return_value=self.data.srp_details):
location_info, __, __, __, __ = self.common._update_srp_stats(
self.data.array_info_no_wl)
self.assertEqual(location_info, '000197800123#SRP_1#Diamond')
def test_find_device_on_array_success(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_device_id = self.data.device_id
founddevice_id = self.common._find_device_on_array(volume, extra_specs)
self.assertEqual(ref_device_id, founddevice_id)
def test_find_device_on_array_provider_location_not_string(self):
volume = fake_volume.fake_volume_obj(
context='cxt', provider_location=None)
extra_specs = self.data.extra_specs
founddevice_id = self.common._find_device_on_array(
volume, extra_specs)
self.assertIsNone(founddevice_id)
def test_find_legacy_device_on_array(self):
volume = self.data.test_legacy_vol
extra_specs = self.data.extra_specs
ref_device_id = self.data.device_id
founddevice_id = self.common._find_device_on_array(volume, extra_specs)
self.assertEqual(ref_device_id, founddevice_id)
def test_find_host_lun_id_attached(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host = 'HostX'
host_lun = (
self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_masked = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
maskedvols, __ = self.common.find_host_lun_id(volume, host,
extra_specs)
self.assertEqual(ref_masked, maskedvols)
def test_find_host_lun_id_not_attached(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host = 'HostX'
with mock.patch.object(self.rest, 'find_mv_connections_for_vol',
return_value=None):
maskedvols, __ = self.common.find_host_lun_id(
volume, host, extra_specs)
self.assertEqual({}, maskedvols)
@mock.patch.object(
common.PowerMaxCommon, '_get_masking_views_from_volume',
return_value=([], [tpd.PowerMaxData.masking_view_name_f]))
def test_find_host_lun_id_multiattach(self, mock_mask):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
__, is_multiattach = self.common.find_host_lun_id(
volume, 'HostX', extra_specs)
self.assertTrue(is_multiattach)
@mock.patch.object(common.PowerMaxCommon, 'get_remote_target_device',
return_value=tpd.PowerMaxData.device_id2)
@mock.patch.object(rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details[0])
def test_find_host_lun_id_rep_extra_specs(self, mock_vol, mock_tgt):
self.common.find_host_lun_id(
self.data.test_volume, 'HostX',
self.data.extra_specs, self.data.rep_extra_specs)
mock_tgt.assert_called_once()
def test_get_masking_views_from_volume(self):
array = self.data.array
device_id = self.data.device_id
host = 'HostX'
ref_mv_list = [self.data.masking_view_name_f]
maskingview_list, __ = self.common.get_masking_views_from_volume(
array, self.data.test_volume, device_id, host)
self.assertEqual(ref_mv_list, maskingview_list)
# is metro
with mock.patch.object(self.utils, 'is_metro_device',
return_value=True):
__, is_metro = self.common.get_masking_views_from_volume(
array, self.data.test_volume, device_id, host)
self.assertTrue(is_metro)
def test_get_masking_views_from_volume_wrong_host(self):
array = self.data.array
device_id = self.data.device_id
host = 'DifferentHost'
maskingview_list, __ = self.common.get_masking_views_from_volume(
array, self.data.test_volume, device_id, host)
self.assertEqual([], maskingview_list)
def test_find_host_lun_id_no_host_check(self):
volume = self.data.test_volume
extra_specs = self.data.extra_specs
host_lun = (self.data.maskingview[0]['maskingViewConnection'][0][
'host_lun_address'])
ref_masked = {'hostlunid': int(host_lun, 16),
'maskingview': self.data.masking_view_name_f,
'array': self.data.array,
'device_id': self.data.device_id}
maskedvols, __ = self.common.find_host_lun_id(
volume, None, extra_specs)
self.assertEqual(ref_masked, maskedvols)
def test_initial_setup_success(self):
volume = self.data.test_volume
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs = self.common._initial_setup(volume)
self.assertEqual(ref_extra_specs, extra_specs)
def test_initial_setup_failed(self):
volume = self.data.test_volume
with mock.patch.object(
self.common, 'get_attributes_from_cinder_config',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._initial_setup, volume)
@mock.patch.object(common.PowerMaxCommon, 'get_remote_target_device',
return_value=tpd.PowerMaxData.device_id2)
def test_populate_masking_dict(self, mock_tgt):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs[utils.WORKLOAD] = self.data.workload
ref_mv_dict = self.data.masking_view_dict
self.common.next_gen = False
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
# Metro volume, pass in rep_extra_specs and retrieve target device
rep_extra_specs = deepcopy(self.data.rep_extra_specs)
rep_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.common._populate_masking_dict(
volume, connector, extra_specs, rep_extra_specs)
mock_tgt.assert_called_once()
# device_id is None
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._populate_masking_dict,
volume, connector, extra_specs)
def test_populate_masking_dict_no_slo(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = {'slo': None, 'workload': None, 'srp': self.data.srp,
'array': self.data.array,
utils.PORTGROUPNAME: self.data.port_group_name_f}
ref_mv_dict = self.data.masking_view_dict_no_slo
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
def test_populate_masking_dict_compr_disabled(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
extra_specs[utils.DISABLECOMPRESSION] = "true"
ref_mv_dict = self.data.masking_view_dict_compression_disabled
extra_specs[utils.WORKLOAD] = self.data.workload
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual(ref_mv_dict, masking_view_dict)
def test_populate_masking_dict_next_gen(self):
volume = self.data.test_volume
connector = self.data.connector
extra_specs = deepcopy(self.data.extra_specs)
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.common.next_gen = True
masking_view_dict = self.common._populate_masking_dict(
volume, connector, extra_specs)
self.assertEqual('NONE', masking_view_dict[utils.WORKLOAD])
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
def test_create_cloned_volume(self, mck_clone_chk):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location_clone
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs)
self.assertEqual(ref_dict, clone_dict)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
def test_create_cloned_volume_is_snapshot(self, mck_clone_chk):
volume = self.data.test_snapshot
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
ref_dict = self.data.snap_location
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs, True, False)
self.assertEqual(ref_dict, clone_dict)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
def test_create_cloned_volume_from_snapshot(self, mck_clone_chk):
volume = self.data.test_clone_volume
source_volume = self.data.test_snapshot
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location_snapshot
clone_dict = self.common._create_cloned_volume(
volume, source_volume, extra_specs, False, True)
self.assertEqual(ref_dict, clone_dict)
def test_create_cloned_volume_not_licenced(self):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'is_snapvx_licensed',
return_value=False):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_cloned_volume,
volume, source_volume, extra_specs)
@mock.patch.object(common.PowerMaxCommon,
'_find_device_on_array')
def test_create_cloned_volume_not_licenced_2(self, mock_device):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'is_snapvx_licensed',
return_value=False):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_cloned_volume,
volume, source_volume, extra_specs,
False, False)
mock_device.assert_not_called()
@mock.patch.object(common.PowerMaxCommon,
'_find_device_on_array',
return_value=None)
@mock.patch.object(common.PowerMaxCommon,
'_clone_check')
def test_create_cloned_volume_source_not_found(
self, mock_check, mock_device):
volume = self.data.test_clone_volume
source_volume = self.data.test_volume
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'is_snapvx_licensed',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_cloned_volume,
volume, source_volume, extra_specs,
False, False)
mock_check.assert_not_called()
def test_parse_snap_info_found(self):
ref_device_id = self.data.device_id
ref_snap_name = self.data.snap_location['snap_name']
sourcedevice_id, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertEqual(ref_device_id, sourcedevice_id)
self.assertEqual(ref_snap_name, foundsnap_name)
def test_parse_snap_info_not_found(self):
ref_snap_name = None
with mock.patch.object(self.rest, 'get_volume_snap',
return_value=None):
__, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertIsNone(ref_snap_name, foundsnap_name)
def test_parse_snap_info_exception(self):
with mock.patch.object(
self.rest, 'get_volume_snap',
side_effect=exception.VolumeBackendAPIException):
__, foundsnap_name = self.common._parse_snap_info(
self.data.array, self.data.test_snapshot)
self.assertIsNone(foundsnap_name)
def test_parse_snap_info_provider_location_not_string(self):
snapshot = fake_snapshot.fake_snapshot_obj(
context='ctxt', provider_loaction={'not': 'string'})
sourcedevice_id, foundsnap_name = self.common._parse_snap_info(
self.data.array, snapshot)
self.assertIsNone(foundsnap_name)
def test_create_snapshot_success(self):
array = self.data.array
snapshot = self.data.test_snapshot
source_device_id = self.data.device_id
extra_specs = self.data.extra_specs
ref_dict = {'snap_name': self.data.test_snapshot_snap_name,
'source_id': self.data.device_id}
snap_dict = self.common._create_snapshot(
array, snapshot, source_device_id, extra_specs)
self.assertEqual(ref_dict, snap_dict)
def test_create_snapshot_exception(self):
array = self.data.array
snapshot = self.data.test_snapshot
source_device_id = self.data.device_id
extra_specs = self.data.extra_specs
with mock.patch.object(
self.provision, 'create_volume_snapvx',
side_effect=exception.VolumeBackendAPIException):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_snapshot,
array, snapshot, source_device_id, extra_specs)
@mock.patch.object(masking.PowerMaxMasking,
'remove_vol_from_storage_group')
def test_delete_volume_from_srp(self, mock_rm):
array = self.data.array
device_id = self.data.device_id
volume_name = self.data.test_volume.name
ref_extra_specs = self.data.extra_specs_intervals_set
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
volume = self.data.test_volume
with mock.patch.object(self.common, '_sync_check'):
with mock.patch.object(
self.common, '_delete_from_srp') as mock_delete:
self.common._delete_volume(volume)
mock_delete.assert_called_once_with(
array, device_id, volume_name, ref_extra_specs)
def test_delete_volume_not_found(self):
volume = self.data.test_volume
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
with mock.patch.object(
self.common, '_delete_from_srp') as mock_delete:
self.common._delete_volume(volume)
mock_delete.assert_not_called()
def test_create_volume_success(self):
volume_name = '1'
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
ref_dict = self.data.provider_location
with mock.patch.object(self.rest, 'get_volume',
return_value=self.data.volume_details[0]):
volume_dict = self.common._create_volume(
volume_name, volume_size, extra_specs)
self.assertEqual(ref_dict, volume_dict)
def test_create_volume_success_next_gen(self):
volume_name = '1'
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
self.common.next_gen = True
with mock.patch.object(
self.utils, 'is_compression_disabled', return_value=True):
with mock.patch.object(
self.rest, 'get_array_model_info',
return_value=('PowerMax 2000', True)):
with mock.patch.object(
self.masking,
'get_or_create_default_storage_group') as mock_get:
self.common._create_volume(
volume_name, volume_size, extra_specs)
mock_get.assert_called_once_with(
extra_specs['array'], extra_specs[utils.SRP],
extra_specs[utils.SLO], 'NONE', extra_specs, True,
False, None)
def test_create_volume_failed(self):
volume_name = self.data.test_volume.name
volume_size = self.data.test_volume.size
extra_specs = self.data.extra_specs
with mock.patch.object(
self.masking, 'get_or_create_default_storage_group',
return_value=self.data.failed_resource):
with mock.patch.object(
self.rest, 'delete_storage_group') as mock_delete:
# path 1: not last vol in sg
with mock.patch.object(
self.rest, 'get_num_vols_in_sg', return_value=2):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
mock_delete.assert_not_called()
# path 2: last vol in sg, delete sg
with mock.patch.object(self.rest, 'get_num_vols_in_sg',
return_value=0):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
mock_delete.assert_called_once_with(
self.data.array, self.data.failed_resource)
def test_create_volume_incorrect_slo(self):
volume_name = self.data.test_volume.name
volume_size = self.data.test_volume.size
extra_specs = {'slo': 'Diamondz',
'workload': 'DSSSS',
'srp': self.data.srp,
'array': self.data.array}
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_volume,
volume_name, volume_size, extra_specs)
@mock.patch.object(rest.PowerMaxRest, 'is_next_gen_array',
return_value=False)
@mock.patch.object(provision.PowerMaxProvision, 'verify_slo_workload',
return_value=(True, True))
@mock.patch.object(provision.PowerMaxProvision, 'create_volume_from_sg')
def test_create_volume_in_use_replication_enabled(self, mock_create,
mock_verify,
mock_nextgen):
volume_name = '1'
volume_size = self.data.test_volume.size
rep_extra_specs = self.data.rep_extra_specs3
with mock.patch.object(
self.masking,
'get_or_create_default_storage_group') as mck_sg:
self.common._create_volume(
volume_name, volume_size, rep_extra_specs, in_use=True)
mck_sg.assert_called_once_with(
rep_extra_specs['array'], rep_extra_specs['srp'],
rep_extra_specs['slo'], rep_extra_specs['workload'],
rep_extra_specs, False, True, rep_extra_specs['rep_mode'])
def test_set_vmax_extra_specs(self):
srp_record = self.common.get_attributes_from_cinder_config()
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_no_srp_name(self):
srp_record = self.common.get_attributes_from_cinder_config()
with mock.patch.object(self.rest, 'get_slo_list',
return_value=[]):
extra_specs = self.common._set_vmax_extra_specs({}, srp_record)
self.assertIsNone(extra_specs['slo'])
def test_set_vmax_extra_specs_compr_disabled(self):
with mock.patch.object(self.rest, 'is_compression_capable',
return_value=True):
srp_record = self.common.get_attributes_from_cinder_config()
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs_compr_disabled, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
ref_extra_specs[utils.DISABLECOMPRESSION] = "true"
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_compr_disabled_not_compr_capable(self):
srp_record = self.common.get_attributes_from_cinder_config()
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs_compr_disabled, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.assertEqual(ref_extra_specs, extra_specs)
def test_set_vmax_extra_specs_portgroup_as_spec(self):
srp_record = self.common.get_attributes_from_cinder_config()
extra_specs = self.common._set_vmax_extra_specs(
{utils.PORTGROUPNAME: 'extra_spec_pg'}, srp_record)
self.assertEqual('extra_spec_pg', extra_specs[utils.PORTGROUPNAME])
def test_set_vmax_extra_specs_no_portgroup_set(self):
srp_record = {
'srpName': 'SRP_1', 'RestServerIp': '1.1.1.1',
'RestPassword': '<PASSWORD>', 'SSLCert': None, 'RestServerPort': 8443,
'SSLVerify': False, 'RestUserName': 'smc',
'SerialNumber': '000197800123'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common._set_vmax_extra_specs,
{}, srp_record)
def test_set_vmax_extra_specs_next_gen(self):
srp_record = self.common.get_attributes_from_cinder_config()
self.common.next_gen = True
extra_specs = self.common._set_vmax_extra_specs(
self.data.vol_type_extra_specs, srp_record)
ref_extra_specs = deepcopy(self.data.extra_specs_intervals_set)
ref_extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
self.assertEqual('NONE', extra_specs[utils.WORKLOAD])
def test_delete_volume_from_srp_success(self):
array = self.data.array
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.provision, 'delete_volume_from_srp') as mock_del:
self.common._delete_from_srp(array, device_id, volume_name,
extra_specs)
mock_del.assert_called_once_with(array, device_id, volume_name)
def test_delete_volume_from_srp_failed(self):
array = self.data.array
device_id = self.data.failed_resource
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.masking,
'add_volume_to_default_storage_group') as mock_add:
self.assertRaises(exception.VolumeBackendAPIException,
self.common._delete_from_srp, array,
device_id, volume_name, extra_specs)
mock_add.assert_not_called()
@mock.patch.object(utils.PowerMaxUtils, 'is_replication_enabled',
side_effect=[False, True])
def test_remove_vol_and_cleanup_replication(self, mock_rep_enabled):
array = self.data.array
device_id = self.data.device_id
volume = self.data.test_volume
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(
self.masking, 'remove_and_reset_members') as mock_rm:
with mock.patch.object(
self.common, 'cleanup_lun_replication') as mock_clean:
self.common._remove_vol_and_cleanup_replication(
array, device_id, volume_name, extra_specs, volume)
mock_rm.assert_called_once_with(
array, volume, device_id, volume_name, extra_specs, False)
mock_clean.assert_not_called()
self.common._remove_vol_and_cleanup_replication(
array, device_id, volume_name, extra_specs, volume)
mock_clean.assert_called_once_with(
volume, volume_name, device_id, extra_specs)
@mock.patch.object(utils.PowerMaxUtils, 'is_volume_failed_over',
side_effect=[True, False])
@mock.patch.object(common.PowerMaxCommon, '_get_replication_extra_specs',
return_value=tpd.PowerMaxData.rep_extra_specs)
def test_get_target_wwns_from_masking_view(self, mock_rep_specs, mock_fo):
ref_wwns = [self.data.wwnn1]
for x in range(0, 2):
target_wwns = self.common._get_target_wwns_from_masking_view(
self.data.device_id, self.data.connector['host'],
self.data.extra_specs)
self.assertEqual(ref_wwns, target_wwns)
def test_get_target_wwns_from_masking_view_no_mv(self):
with mock.patch.object(self.common, '_get_masking_views_from_volume',
return_value=([], None)):
target_wwns = self.common._get_target_wwns_from_masking_view(
self.data.device_id, self.data.connector['host'],
self.data.extra_specs)
self.assertEqual([], target_wwns)
@mock.patch.object(common.PowerMaxCommon, '_get_replication_extra_specs',
return_value=tpd.PowerMaxData.rep_extra_specs)
@mock.patch.object(common.PowerMaxCommon, 'get_remote_target_device',
return_value=(tpd.PowerMaxData.device_id2,))
@mock.patch.object(utils.PowerMaxUtils, 'is_metro_device',
side_effect=[False, True])
def test_get_target_wwns(self, mock_metro, mock_tgt, mock_specs):
__, metro_wwns = self.common.get_target_wwns_from_masking_view(
self.data.test_volume, self.data.connector)
self.assertEqual([], metro_wwns)
# Is metro volume
__, metro_wwns = self.common.get_target_wwns_from_masking_view(
self.data.test_volume, self.data.connector)
self.assertEqual([self.data.wwnn1], metro_wwns)
def test_get_port_group_from_masking_view(self):
array = self.data.array
maskingview_name = self.data.masking_view_name_f
with mock.patch.object(self.rest,
'get_element_from_masking_view') as mock_get:
self.common.get_port_group_from_masking_view(
array, maskingview_name)
mock_get.assert_called_once_with(
array, maskingview_name, portgroup=True)
def test_get_initiator_group_from_masking_view(self):
array = self.data.array
maskingview_name = self.data.masking_view_name_f
with mock.patch.object(
self.rest, 'get_element_from_masking_view') as mock_get:
self.common.get_initiator_group_from_masking_view(
array, maskingview_name)
mock_get.assert_called_once_with(
array, maskingview_name, host=True)
def test_get_common_masking_views(self):
array = self.data.array
portgroup_name = self.data.port_group_name_f
initiator_group_name = self.data.initiatorgroup_name_f
with mock.patch.object(
self.rest, 'get_common_masking_views') as mock_get:
self.common.get_common_masking_views(
array, portgroup_name, initiator_group_name)
mock_get.assert_called_once_with(
array, portgroup_name, initiator_group_name)
def test_get_ip_and_iqn(self):
ref_ip_iqn = [{'iqn': self.data.initiator,
'ip': self.data.ip}]
director = self.data.portgroup[1]['symmetrixPortKey'][0]['directorId']
port = self.data.portgroup[1]['symmetrixPortKey'][0]['portId']
dirport = "%s:%s" % (director, port)
ip_iqn_list = self.common._get_ip_and_iqn(self.data.array, dirport)
self.assertEqual(ref_ip_iqn, ip_iqn_list)
def test_find_ip_and_iqns(self):
ref_ip_iqn = [{'iqn': self.data.initiator,
'ip': self.data.ip}]
ip_iqn_list = self.common._find_ip_and_iqns(
self.data.array, self.data.port_group_name_i)
self.assertEqual(ref_ip_iqn, ip_iqn_list)
def test_create_replica_snap_name(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
ref_dict = self.data.provider_location_snapshot
clone_dict = self.common._create_replica(
array, clone_volume, source_device_id,
self.data.extra_specs, snap_name)
self.assertEqual(ref_dict, clone_dict)
def test_create_replica_no_snap_name(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = "temp-" + source_device_id + "-snapshot_for_clone"
ref_dict = self.data.provider_location_clone
with mock.patch.object(
self.utils, 'get_temp_snap_name',
return_value=snap_name) as mock_get_snap:
clone_dict = self.common._create_replica(
array, clone_volume, source_device_id,
self.data.extra_specs)
self.assertEqual(ref_dict, clone_dict)
mock_get_snap.assert_called_once_with(source_device_id)
def test_create_replica_failed_cleanup_target(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
device_id = self.data.device_id
snap_name = self.data.failed_resource
clone_name = 'OS-' + clone_volume.id
extra_specs = self.data.extra_specs
with mock.patch.object(
self.common, '_cleanup_target') as mock_cleanup:
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_replica, array, clone_volume, device_id,
self.data.extra_specs, snap_name)
mock_cleanup.assert_called_once_with(
array, device_id, device_id, clone_name, snap_name,
extra_specs, target_volume=clone_volume)
def test_create_replica_failed_no_target(self):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = self.data.failed_resource
with mock.patch.object(self.common, '_create_volume',
return_value={'device_id': None}):
with mock.patch.object(
self.common, '_cleanup_target') as mock_cleanup:
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._create_replica, array, clone_volume,
source_device_id, self.data.extra_specs, snap_name)
mock_cleanup.assert_not_called()
@mock.patch.object(
utils.PowerMaxUtils,
'compare_cylinders',
side_effect=exception.VolumeBackendAPIException)
def test_create_replica_cylinder_mismatch(self, mock_cyl):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
clone_name = 'OS-' + clone_volume.id
with mock.patch.object(
self.common, '_cleanup_target') as mock_cleanup:
self.assertRaises(
Exception, self.common._create_replica, array,
clone_volume, source_device_id,
self.data.extra_specs, snap_name) # noqa: ignore=H202
mock_cleanup.assert_called_once_with(
array, source_device_id, source_device_id,
clone_name, snap_name, self.data.extra_specs,
target_volume=clone_volume)
@mock.patch.object(
masking.PowerMaxMasking,
'remove_and_reset_members')
def test_cleanup_target_sync_present(self, mock_remove):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.failed_resource
clone_name = clone_volume.name
extra_specs = self.data.extra_specs
generation = 0
with mock.patch.object(self.rest, 'get_sync_session',
return_value='session'):
with mock.patch.object(
self.provision,
'break_replication_relationship') as mock_break:
self.common._cleanup_target(
array, target_device_id, source_device_id,
clone_name, snap_name, extra_specs)
mock_break.assert_called_with(
array, target_device_id, source_device_id,
snap_name, extra_specs, generation)
@mock.patch.object(masking.PowerMaxMasking, 'remove_volume_from_sg')
def test_cleanup_target_no_sync(self, mock_remove):
array = self.data.array
clone_volume = self.data.test_clone_volume
source_device_id = self.data.device_id
target_device_id = self.data.device_id2
snap_name = self.data.failed_resource
clone_name = clone_volume.name
extra_specs = self.data.extra_specs
with mock.patch.object(self.rest, 'get_sync_session',
return_value=None):
with mock.patch.object(
self.common, '_delete_from_srp') as mock_delete:
self.common._cleanup_target(
array, target_device_id, source_device_id,
clone_name, snap_name, extra_specs)
mock_delete.assert_called_once_with(
array, target_device_id, clone_name,
extra_specs)
@mock.patch.object(
common.PowerMaxCommon, 'get_volume_metadata',
return_value={'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'})
def test_manage_existing_success(self, mck_meta):
external_ref = {u'source-name': u'00002'}
provider_location = {'device_id': u'00002', 'array': u'000197800123'}
ref_update = {'provider_location': six.text_type(provider_location),
'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}}
volume = deepcopy(self.data.test_volume)
volume.metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
with mock.patch.object(
self.common, '_check_lun_valid_for_cinder_management',
return_value=('vol1', 'test_sg')):
model_update = self.common.manage_existing(volume, external_ref)
self.assertEqual(ref_update, model_update)
@mock.patch.object(
rest.PowerMaxRest, 'get_masking_views_from_storage_group',
return_value=None)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(False, False, None))
def test_check_lun_valid_for_cinder_management(self, mock_rep, mock_mv):
external_ref = {u'source-name': u'00003'}
vol, source_sg = self.common._check_lun_valid_for_cinder_management(
self.data.array, self.data.device_id3,
self.data.test_volume.id, external_ref)
self.assertEqual(vol, '123')
self.assertIsNone(source_sg)
@mock.patch.object(
rest.PowerMaxRest, 'get_masking_views_from_storage_group',
return_value=None)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(False, False, None))
def test_check_lun_valid_for_cinder_management_multiple_sg_exception(
self, mock_rep, mock_mv):
external_ref = {u'source-name': u'00004'}
self.assertRaises(
exception.ManageExistingInvalidReference,
self.common._check_lun_valid_for_cinder_management,
self.data.array, self.data.device_id4,
self.data.test_volume.id, external_ref)
@mock.patch.object(rest.PowerMaxRest, 'get_volume',
side_effect=[None,
tpd.PowerMaxData.volume_details[2],
tpd.PowerMaxData.volume_details[2],
tpd.PowerMaxData.volume_details[1]])
@mock.patch.object(
rest.PowerMaxRest, 'get_masking_views_from_storage_group',
side_effect=[tpd.PowerMaxData.sg_details[1]['maskingview'],
None])
@mock.patch.object(
rest.PowerMaxRest, 'get_storage_groups_from_volume',
return_value=([tpd.PowerMaxData.defaultstoragegroup_name]))
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
side_effect=[(True, False, []), (False, False, None)])
def test_check_lun_valid_for_cinder_management_exception(
self, mock_rep, mock_sg, mock_mvs, mock_get_vol):
external_ref = {u'source-name': u'00003'}
for x in range(0, 3):
self.assertRaises(
exception.ManageExistingInvalidReference,
self.common._check_lun_valid_for_cinder_management,
self.data.array, self.data.device_id3,
self.data.test_volume.id, external_ref)
self.assertRaises(exception.ManageExistingAlreadyManaged,
self.common._check_lun_valid_for_cinder_management,
self.data.array, self.data.device_id3,
self.data.test_volume.id, external_ref)
def test_manage_existing_get_size(self):
external_ref = {u'source-name': u'00001'}
size = self.common.manage_existing_get_size(
self.data.test_volume, external_ref)
self.assertEqual(2, size)
def test_manage_existing_get_size_exception(self):
external_ref = {u'source-name': u'00001'}
with mock.patch.object(self.rest, 'get_size_of_device_on_array',
return_value=3.5):
self.assertRaises(exception.ManageExistingInvalidReference,
self.common.manage_existing_get_size,
self.data.test_volume, external_ref)
@mock.patch.object(common.PowerMaxCommon,
'_remove_vol_and_cleanup_replication')
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_unmanage_success(self, mck_sync, mock_rm):
volume = self.data.test_volume
with mock.patch.object(self.rest, 'rename_volume') as mock_rename:
self.common.unmanage(volume)
mock_rename.assert_called_once_with(
self.data.array, self.data.device_id,
self.data.test_volume.id)
# Test for success when create storage group fails
with mock.patch.object(self.rest, 'rename_volume') as mock_rename:
with mock.patch.object(
self.provision, 'create_storage_group',
side_effect=exception.VolumeBackendAPIException):
self.common.unmanage(volume)
mock_rename.assert_called_once_with(
self.data.array, self.data.device_id,
self.data.test_volume.id)
def test_unmanage_device_not_found(self):
volume = self.data.test_volume
with mock.patch.object(self.common, '_find_device_on_array',
return_value=None):
with mock.patch.object(self.rest, 'rename_volume') as mock_rename:
self.common.unmanage(volume)
mock_rename.assert_not_called()
@mock.patch.object(common.PowerMaxCommon, '_slo_workload_migration')
def test_retype(self, mock_migrate):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs_intervals_set
extra_specs[utils.PORTGROUPNAME] = self.data.port_group_name_f
volume = self.data.test_volume
new_type = {'extra_specs': {}}
host = {'host': self.data.new_host}
self.common.retype(volume, new_type, host)
mock_migrate.assert_called_once_with(
device_id, volume, host, volume_name, new_type, extra_specs)
with mock.patch.object(
self.common, '_find_device_on_array', return_value=None):
self.assertFalse(self.common.retype(volume, new_type, host))
def test_retype_attached_vol(self):
host = {'host': self.data.new_host}
new_type = {'extra_specs': {}}
with mock.patch.object(
self.common, '_find_device_on_array', return_value=True):
with mock.patch.object(self.common,
'_slo_workload_migration') as mock_retype:
self.common.retype(self.data.test_attached_volume,
new_type, host)
mock_retype.assert_called_once()
@mock.patch.object(
rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details_attached)
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
return_value=tpd.PowerMaxData.sg_details[1])
@mock.patch.object(utils.PowerMaxUtils, 'get_child_sg_name',
return_value=('OS-Test-SG', '', '', ''))
@mock.patch.object(rest.PowerMaxRest, 'is_child_sg_in_parent_sg',
return_value=True)
@mock.patch.object(masking.PowerMaxMasking,
'move_volume_between_storage_groups')
@mock.patch.object(rest.PowerMaxRest, 'is_volume_in_storagegroup',
return_value=True)
def test_retype_inuse_volume_tgt_sg_exist(self, mck_vol_in_sg, mck_sg_move,
mck_child_sg_in_sg,
mck_get_sg_name,
mck_get_sg, mck_get_vol):
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
rep_mode = 'Synchronous'
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
success = self.common._retype_inuse_volume(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)[0]
self.assertTrue(success)
mck_sg_move.assert_called()
mck_vol_in_sg.assert_called()
@mock.patch.object(
rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details_attached)
@mock.patch.object(utils.PowerMaxUtils, 'get_child_sg_name',
return_value=('OS-Test-SG', '', '', ''))
@mock.patch.object(provision.PowerMaxProvision, 'create_storage_group')
@mock.patch.object(masking.PowerMaxMasking, 'add_child_sg_to_parent_sg')
@mock.patch.object(rest.PowerMaxRest, 'is_child_sg_in_parent_sg',
return_value=True)
@mock.patch.object(masking.PowerMaxMasking,
'move_volume_between_storage_groups')
@mock.patch.object(rest.PowerMaxRest, 'is_volume_in_storagegroup',
return_value=True)
def test_retype_inuse_volume_no_tgt_sg(self, mck_vol_in_sg, mck_move_vol,
mck_sg_in_sg, mck_add_sg_to_sg,
mck_create_sg, mck_get_csg_name,
mck_get_vol):
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
rep_mode = 'Synchronous'
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
with mock.patch.object(self.rest, 'get_storage_group',
side_effect=[self.data.sg_details[1], None,
self.data.sg_details[1]]):
success = self.common._retype_inuse_volume(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)[0]
mck_create_sg.assert_called()
mck_add_sg_to_sg.assert_called()
self.assertTrue(success)
@mock.patch.object(provision.PowerMaxProvision, 'create_storage_group',
return_value=None)
@mock.patch.object(rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details_attached)
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
side_effect=[tpd.PowerMaxData.sg_details[1], None])
@mock.patch.object(utils.PowerMaxUtils, 'get_child_sg_name',
return_value=('OS-Test-SG', '', '', ''))
@mock.patch.object(rest.PowerMaxRest, 'is_child_sg_in_parent_sg',
return_value=False)
@mock.patch.object(masking.PowerMaxMasking,
'move_volume_between_storage_groups')
@mock.patch.object(rest.PowerMaxRest, 'is_volume_in_storagegroup',
return_value=False)
def test_retype_inuse_volume_fail(self, mck_vol_in_sg, mck_sg_move,
mck_child_sg_in_sg, mck_get_sg_name,
mck_get_sg, mck_get_vol, mck_create_sg):
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
rep_mode = 'Synchronous'
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
success = self.common._retype_inuse_volume(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)[0]
self.assertFalse(success)
mck_vol_in_sg.assert_not_called()
mck_sg_move.assert_not_called()
@mock.patch.object(
rest.PowerMaxRest, 'get_volume',
return_value=tpd.PowerMaxData.volume_details_attached)
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
return_value=tpd.PowerMaxData.sg_details[1])
@mock.patch.object(utils.PowerMaxUtils, 'get_volume_attached_hostname',
return_value=None)
def test_retype_inuse_volume_fail_no_attached_host(self, mck_get_hostname,
mck_get_sg,
mck_get_vol):
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
rep_mode = 'Synchronous'
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
success = self.common._retype_inuse_volume(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)[0]
self.assertFalse(success)
def test_slo_workload_migration_valid(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
volume = self.data.test_volume
host = {'host': self.data.new_host}
with mock.patch.object(self.common, '_migrate_volume') as mock_migrate:
self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
mock_migrate.assert_called_once_with(
extra_specs[utils.ARRAY], volume, device_id,
extra_specs[utils.SRP], 'Silver',
'OLTP', volume_name, new_type, extra_specs)
def test_slo_workload_migration_not_valid(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
new_type = {'extra_specs': {}}
host = {'host': self.data.new_host}
with mock.patch.object(
self.common, '_is_valid_for_storage_assisted_migration',
return_value=(False, 'Silver', 'OLTP')):
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_slo_workload_migration_same_hosts(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
host = {'host': self.data.fake_host}
new_type = {'extra_specs': {'slo': 'Bronze'}}
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_slo_workload_migration_same_host_change_compression(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
host = {'host': self.data.fake_host}
new_type = {'extra_specs': {utils.DISABLECOMPRESSION: "true"}}
with mock.patch.object(
self.common, '_is_valid_for_storage_assisted_migration',
return_value=(True, self.data.slo, self.data.workload)):
with mock.patch.object(
self.common, '_migrate_volume') as mock_migrate:
migrate_status = self.common._slo_workload_migration(
device_id, volume, host, volume_name, new_type,
extra_specs)
self.assertTrue(bool(migrate_status))
mock_migrate.assert_called_once_with(
extra_specs[utils.ARRAY], volume, device_id,
extra_specs[utils.SRP], self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_migrate_volume_success(self, mck_meta, mock_remove):
with mock.patch.object(self.rest, 'is_volume_in_storagegroup',
return_value=True):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
volume = self.data.test_volume
new_type = {'extra_specs': {}}
migrate_status = self.common._migrate_volume(
self.data.array, volume, device_id, self.data.srp,
self.data.slo, self.data.workload, volume_name,
new_type, extra_specs)[0]
self.assertTrue(migrate_status)
target_extra_specs = {
'array': self.data.array, 'interval': 3,
'retries': 120, 'slo': self.data.slo,
'srp': self.data.srp, 'workload': self.data.workload}
mock_remove.assert_called_once_with(
self.data.array, volume, device_id, volume_name,
target_extra_specs, reset=True)
mock_remove.reset_mock()
with mock.patch.object(
self.rest, 'get_storage_groups_from_volume',
return_value=[]):
migrate_status = self.common._migrate_volume(
self.data.array, volume, device_id, self.data.srp,
self.data.slo, self.data.workload, volume_name,
new_type, extra_specs)[0]
self.assertTrue(migrate_status)
mock_remove.assert_not_called()
@mock.patch.object(common.PowerMaxCommon, 'cleanup_lun_replication')
@mock.patch.object(common.PowerMaxCommon, '_retype_inuse_volume',
return_value=(True, 'Test'))
@mock.patch.object(common.PowerMaxCommon,
'setup_inuse_volume_replication',
return_value=('Status', 'Data', 'Info'))
@mock.patch.object(common.PowerMaxCommon, '_retype_remote_volume',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
@mock.patch.object(utils.PowerMaxUtils, 'get_async_rdf_managed_grp_name')
@mock.patch.object(rest.PowerMaxRest, 'get_storage_group',
return_value=True)
@mock.patch.object(masking.PowerMaxMasking, 'add_volume_to_storage_group')
def test_migrate_in_use_volume(
self, mck_add_vol, mck_get_sg, mck_get_rdf_name, mck_meta,
mck_remote_retype, mck_setup, mck_retype, mck_cleanup):
# Array/Volume info
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume = self.data.test_attached_volume
volume_name = self.data.test_attached_volume.name
# Rep Config
rep_mode = 'Synchronous'
self.common.rep_config = {'mode': rep_mode, 'metro_use_bias': True}
# Extra Specs
new_type = {'extra_specs': {}}
src_extra_specs = self.data.extra_specs_migrate
interval = src_extra_specs['interval']
retries = src_extra_specs['retries']
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': interval, 'retries': retries, 'rep_mode': rep_mode}
def _reset_mocks():
mck_cleanup.reset_mock()
mck_setup.reset_mock()
mck_retype.reset_mock()
mck_remote_retype.reset_mock()
# Scenario 1: no_rep => no_rep
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, False]):
success = self.common._migrate_volume(
array, volume, device_id, srp, slo, workload, volume_name,
new_type, src_extra_specs)[0]
mck_retype.assert_called_once_with(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)
mck_cleanup.assert_not_called()
mck_setup.assert_not_called()
mck_remote_retype.assert_not_called()
self.assertTrue(success)
_reset_mocks()
# Scenario 2: rep => no_rep
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[True, False]):
success = self.common._migrate_volume(
array, volume, device_id, srp, slo, workload, volume_name,
new_type, src_extra_specs)[0]
cleanup_specs = src_extra_specs
cleanup_specs['force_vol_add'] = True
mck_cleanup.assert_called_once_with(
volume, volume_name, device_id, cleanup_specs)
mck_retype.assert_called_once_with(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)
mck_setup.assert_not_called()
mck_remote_retype.assert_not_called()
self.assertTrue(success)
_reset_mocks()
# Scenario 3: no_rep => rep
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, True]):
tgt_extra_specs['rep_mode'] = utils.REP_METRO
self.common.rep_config['mode'] = utils.REP_METRO
success = self.common._migrate_volume(
array, volume, device_id, srp, slo, workload, volume_name,
new_type, src_extra_specs)[0]
mck_setup_specs = src_extra_specs
mck_setup_specs[utils.METROBIAS] = self.common.rep_config[
'metro_use_bias']
mck_setup.assert_called_once_with(
self.data.array, volume, device_id, mck_setup_specs)
mck_retype.assert_called_once_with(
array, srp, volume, device_id, src_extra_specs, slo,
workload, tgt_extra_specs, False)
mck_add_vol.assert_called_once()
mck_get_sg.assert_called_once()
mck_get_rdf_name.assert_called_once()
mck_cleanup.assert_not_called()
mck_remote_retype.assert_not_called()
self.assertTrue(success)
_reset_mocks()
# Scenario 4: rep => rep
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[True, True]):
success = self.common._migrate_volume(
array, volume, device_id, srp, slo, workload, volume_name,
new_type, src_extra_specs)[0]
mck_retype.assert_called_once_with(
array, srp, volume, device_id, src_extra_specs, slo, workload,
tgt_extra_specs, False)
mck_remote_retype.assert_called_once_with(
array, volume, device_id, volume_name, utils.REP_METRO, True,
tgt_extra_specs)
mck_cleanup.assert_not_called()
mck_setup.assert_not_called()
self.assertTrue(success)
@mock.patch.object(common.PowerMaxCommon, 'setup_volume_replication',
return_value=('Status', 'Data', 'Info'))
@mock.patch.object(common.PowerMaxCommon, '_retype_volume',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, 'cleanup_lun_replication')
@mock.patch.object(common.PowerMaxCommon, '_retype_inuse_volume',
return_value=(True, 'test'))
@mock.patch.object(common.PowerMaxCommon,
'setup_inuse_volume_replication',
return_value=('Status', 'Data', 'Info'))
@mock.patch.object(common.PowerMaxCommon, '_retype_remote_volume',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_migrate_volume_attachment_path(
self, mck_meta, mck_remote_retype, mck_setup_use, mck_inuse_retype,
mck_cleanup, mck_retype, mck_setup):
# Array/Volume info
array = self.data.array
srp = self.data.srp
slo = self.data.slo
workload = self.data.workload
device_id = self.data.device_id
volume_attached = self.data.test_attached_volume
volume_attached_name = self.data.test_attached_volume.name
volume_not_attached = self.data.test_volume
volume_not_attached_name = self.data.test_volume.name
# Extra Specs
new_type = {'extra_specs': {}}
self.common.rep_config = {'mode': None}
src_extra_specs = self.data.extra_specs_migrate
# Scenario 1: Volume attached
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, False]):
success = self.common._migrate_volume(
array, volume_attached, device_id, srp, slo, workload,
volume_attached_name, new_type, src_extra_specs)[0]
mck_inuse_retype.assert_called_once()
self.assertTrue(success)
mck_cleanup.reset_mock()
mck_setup_use.reset_mock()
# Scenario 2: Volume not attached
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, False]):
success = self.common._migrate_volume(
array, volume_not_attached, device_id, srp, slo, workload,
volume_not_attached_name, new_type, src_extra_specs)[0]
mck_retype.assert_called_once()
self.assertTrue(success)
# Scenario 3: Volume not attached, enable RDF
tgt_extra_specs = {
'srp': srp, 'array': array, 'slo': slo, 'workload': workload,
'interval': src_extra_specs['interval'],
'retries': src_extra_specs['retries'],
utils.METROBIAS: True}
self.common.rep_config[utils.METROBIAS] = True
with mock.patch.object(self.utils, 'is_replication_enabled',
side_effect=[False, True]):
success = self.common._migrate_volume(
array, volume_not_attached, device_id, srp, slo, workload,
volume_not_attached_name, new_type, src_extra_specs)[0]
mck_setup.assert_called_once_with(array, volume_not_attached,
device_id, tgt_extra_specs)
mck_retype.assert_called_once()
self.assertTrue(success)
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
def test_migrate_volume_failed_get_new_sg_failed(self, mock_remove):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
with mock.patch.object(
self.masking, 'get_or_create_default_storage_group',
side_effect=exception.VolumeBackendAPIException):
migrate_status = self.common._migrate_volume(
self.data.array, self.data.test_volume, device_id,
self.data.srp, self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)
self.assertFalse(migrate_status)
def test_migrate_volume_failed_vol_not_added(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
extra_specs = self.data.extra_specs
new_type = {'extra_specs': {}}
with mock.patch.object(
self.rest, 'is_volume_in_storagegroup',
return_value=False):
migrate_status = self.common._migrate_volume(
self.data.array, self.data.test_volume, device_id,
self.data.srp, self.data.slo,
self.data.workload, volume_name, new_type, extra_specs)[0]
self.assertFalse(migrate_status)
def test_is_valid_for_storage_assisted_migration_true(self):
device_id = self.data.device_id
host = {'host': self.data.new_host}
volume_name = self.data.test_volume.name
ref_return = (True, 'Silver', 'OLTP')
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
# No current sgs found
with mock.patch.object(self.rest, 'get_storage_groups_from_volume',
return_value=None):
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array, self.data.srp,
volume_name, False, False)
self.assertEqual(ref_return, return_val)
host = {'host': 'HostX@Backend#Silver+SRP_1+000197800123'}
ref_return = (True, 'Silver', 'NONE')
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
def test_is_valid_for_storage_assisted_migration_false(self):
device_id = self.data.device_id
volume_name = self.data.test_volume.name
ref_return = (False, None, None)
# IndexError
host = {'host': 'HostX@Backend#Silver+SRP_1+000197800123+dummy+data'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
# Wrong array
host2 = {'host': 'HostX@Backend#Silver+OLTP+SRP_1+00012345678'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host2, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
# Wrong srp
host3 = {'host': 'HostX@Backend#Silver+OLTP+SRP_2+000197800123'}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host3, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
# Already in correct sg
host4 = {'host': self.data.fake_host}
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host4, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
def test_is_valid_for_storage_assisted_migration_next_gen(self):
device_id = self.data.device_id
host = {'host': self.data.new_host}
volume_name = self.data.test_volume.name
ref_return = (True, 'Silver', 'NONE')
with mock.patch.object(self.rest, 'is_next_gen_array',
return_value=True):
return_val = self.common._is_valid_for_storage_assisted_migration(
device_id, host, self.data.array,
self.data.srp, volume_name, False, False)
self.assertEqual(ref_return, return_val)
def test_find_volume_group(self):
group = self.data.test_group_1
array = self.data.array
volume_group = self.common._find_volume_group(array, group)
ref_group = self.data.sg_details_rep[0]
self.assertEqual(ref_group, volume_group)
def test_get_volume_device_ids(self):
array = self.data.array
volumes = [self.data.test_volume]
ref_device_ids = [self.data.device_id]
device_ids = self.common._get_volume_device_ids(volumes, array)
self.assertEqual(ref_device_ids, device_ids)
def test_get_members_of_volume_group(self):
array = self.data.array
group_name = self.data.storagegroup_name_source
ref_volumes = [self.data.device_id, self.data.device_id2]
member_device_ids = self.common._get_members_of_volume_group(
array, group_name)
self.assertEqual(ref_volumes, member_device_ids)
def test_get_members_of_volume_group_empty(self):
array = self.data.array
group_name = self.data.storagegroup_name_source
with mock.patch.object(
self.rest, 'get_volumes_in_storage_group',
return_value=None):
member_device_ids = self.common._get_members_of_volume_group(
array, group_name
)
self.assertIsNone(member_device_ids)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_create_group_replica(self, mock_check):
source_group = self.data.test_group_1
snap_name = self.data.group_snapshot_name
with mock.patch.object(
self.common,
'_create_group_replica') as mock_create_replica:
self.common._create_group_replica(
source_group, snap_name)
mock_create_replica.assert_called_once_with(
source_group, snap_name)
def test_create_group_replica_exception(self):
source_group = self.data.test_group_failed
snap_name = self.data.group_snapshot_name
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common._create_group_replica,
source_group,
snap_name)
def test_create_group_snapshot(self):
context = None
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common.create_group_snapshot(
context, group_snapshot, snapshots))
self.assertEqual(ref_model_update, model_update)
def test_create_group_snapshot_exception(self):
context = None
group_snapshot = self.data.test_group_snapshot_failed
snapshots = []
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.create_group_snapshot,
context,
group_snapshot,
snapshots)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
def test_create_group(self, mock_type, mock_cg_type):
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
model_update = self.common.create_group(None, self.data.test_group_1)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(provision.PowerMaxProvision, 'create_volume_group',
side_effect=exception.CinderException)
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
def test_create_group_exception(self, mock_type, mock_create):
context = None
group = self.data.test_group_failed
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.create_group,
context, group)
def test_delete_group_snapshot(self):
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
context = None
ref_model_update = {'status': fields.GroupSnapshotStatus.DELETED}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common.delete_group_snapshot(context,
group_snapshot, snapshots))
self.assertEqual(ref_model_update, model_update)
def test_delete_group_snapshot_success(self):
group_snapshot = self.data.test_group_snapshot_1
snapshots = []
ref_model_update = {'status': fields.GroupSnapshotStatus.DELETED}
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common._delete_group_snapshot(group_snapshot,
snapshots))
self.assertEqual(ref_model_update, model_update)
def test_delete_group_snapshot_failed(self):
group_snapshot = self.data.test_group_snapshot_failed
snapshots = []
ref_model_update = (
{'status': fields.GroupSnapshotStatus.ERROR_DELETING})
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, snapshots_model_update = (
self.common._delete_group_snapshot(group_snapshot,
snapshots))
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(volume_utils, 'is_group_a_type',
return_value=False)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_update_group(self, mock_cg_type, mock_type_check):
group = self.data.test_group_1
add_vols = [self.data.test_volume]
remove_vols = []
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
model_update, __, __ = self.common.update_group(group,
add_vols,
remove_vols)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(common.PowerMaxCommon, '_find_volume_group',
return_value=None)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_update_group_not_found(self, mock_check, mock_grp):
self.assertRaises(exception.GroupNotFound, self.common.update_group,
self.data.test_group_1, [], [])
@mock.patch.object(common.PowerMaxCommon, '_find_volume_group',
side_effect=exception.VolumeBackendAPIException)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_update_group_exception(self, mock_check, mock_grp):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.update_group,
self.data.test_group_1, [], [])
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
def test_delete_group(self, mock_check):
group = self.data.test_group_1
volumes = [self.data.test_volume]
context = None
ref_model_update = {'status': fields.GroupStatus.DELETED}
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True), mock.patch.object(
self.rest, 'get_volumes_in_storage_group',
return_value=[]):
model_update, __ = self.common.delete_group(
context, group, volumes)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
def test_delete_group_success(self, mock_check):
group = self.data.test_group_1
volumes = []
ref_model_update = {'status': fields.GroupStatus.DELETED}
with mock.patch.object(
volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True), mock.patch.object(
self.rest, 'get_volumes_in_storage_group',
return_value=[]):
model_update, __ = self.common._delete_group(group, volumes)
self.assertEqual(ref_model_update, model_update)
def test_delete_group_already_deleted(self):
group = self.data.test_group_failed
ref_model_update = {'status': fields.GroupStatus.DELETED}
volumes = []
with mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True):
model_update, __ = self.common._delete_group(group, volumes)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(volume_utils, 'is_group_a_type', return_value=False)
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
def test_delete_group_failed(self, mock_check, mock_type_check):
group = self.data.test_group_1
volumes = []
ref_model_update = {'status': fields.GroupStatus.ERROR_DELETING}
with mock.patch.object(
self.rest, 'delete_storage_group',
side_effect=exception.VolumeBackendAPIException):
model_update, __ = self.common._delete_group(
group, volumes)
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(
common.PowerMaxCommon, '_get_clone_vol_info',
return_value=(tpd.PowerMaxData.device_id,
tpd.PowerMaxData.extra_specs, 1, 'tgt_vol'))
@mock.patch.object(volume_utils, 'is_group_a_cg_snapshot_type',
return_value=True)
@mock.patch.object(volume_utils, 'is_group_a_type',
return_value=False)
@mock.patch.object(common.PowerMaxCommon, 'get_volume_metadata',
return_value='')
def test_create_group_from_src_success(self, mck_meta, mock_type,
mock_cg_type, mock_info):
ref_model_update = {'status': fields.GroupStatus.AVAILABLE}
model_update, volumes_model_update = (
self.common.create_group_from_src(
None, self.data.test_group_1, [self.data.test_volume],
self.data.test_group_snapshot_1, [], None, []))
self.assertEqual(ref_model_update, model_update)
@mock.patch.object(
common.PowerMaxCommon, '_remove_vol_and_cleanup_replication')
@mock.patch.object(
masking.PowerMaxMasking, 'remove_volumes_from_storage_group')
def test_rollback_create_group_from_src(
self, mock_rm, mock_clean):
rollback_dict = {
'target_group_name': self.data.target_group_name,
'snap_name': 'snap1', 'source_group_name': 'src_grp',
'volumes': (self.data.device_id, self.data.extra_specs,
self.data.test_volume),
'device_ids': [self.data.device_id],
'interval_retries_dict': self.data.extra_specs}
for x in range(0, 2):
self.common._rollback_create_group_from_src(
self.data.array, rollback_dict)
self.assertEqual(2, mock_rm.call_count)
def test_get_snap_src_dev_list(self):
src_dev_ids = self.common._get_snap_src_dev_list(
self.data.array, [self.data.test_snapshot])
ref_dev_ids = [self.data.device_id]
self.assertEqual(ref_dev_ids, src_dev_ids)
def test_get_clone_vol_info(self):
ref_dev_id = self.data.device_id
source_vols = [self.data.test_volume,
self.data.test_attached_volume]
src_snapshots = [self.data.test_snapshot]
src_dev_id1, extra_specs1, vol_size1, tgt_vol_name1 = (
self.common._get_clone_vol_info(
self.data.test_clone_volume, source_vols, []))
src_dev_id2, extra_specs2, vol_size2, tgt_vol_name2 = (
self.common._get_clone_vol_info(
self.data.test_clone_volume, [], src_snapshots))
self.assertEqual(ref_dev_id, src_dev_id1)
self.assertEqual(ref_dev_id, src_dev_id2)
def test_get_attributes_from_cinder_config_new_and_old(self):
kwargs_expected = (
{'RestServerIp': '1.1.1.1', 'RestServerPort': 8443,
'RestUserName': 'smc', 'RestPassword': '<PASSWORD>', 'SSLVerify': False,
'SerialNumber': self.data.array, 'srpName': 'SRP_1',
'PortGroup': self.data.port_group_name_i})
old_conf = tpfo.FakeConfiguration(None, 'CommonTests', 1, 1)
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='<PASSWORD>',
san_api_port=8443, vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
kwargs_returned = self.common.get_attributes_from_cinder_config()
self.assertEqual(kwargs_expected, kwargs_returned)
self.common.configuration = old_conf
kwargs = self.common.get_attributes_from_cinder_config()
self.assertIsNone(kwargs)
def test_get_attributes_from_cinder_config_with_port(self):
kwargs_expected = (
{'RestServerIp': '1.1.1.1', 'RestServerPort': 3448,
'RestUserName': 'smc', 'RestPassword': '<PASSWORD>', 'SSLVerify': False,
'SerialNumber': self.data.array, 'srpName': 'SRP_1',
'PortGroup': self.data.port_group_name_i})
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='<PASSWORD>',
san_api_port=3448, vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
kwargs_returned = self.common.get_attributes_from_cinder_config()
self.assertEqual(kwargs_expected, kwargs_returned)
def test_get_attributes_from_cinder_config_no_port(self):
kwargs_expected = (
{'RestServerIp': '1.1.1.1', 'RestServerPort': 8443,
'RestUserName': 'smc', 'RestPassword': '<PASSWORD>', 'SSLVerify': False,
'SerialNumber': self.data.array, 'srpName': 'SRP_1',
'PortGroup': self.data.port_group_name_i})
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='<PASSWORD>',
vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
kwargs_returned = self.common.get_attributes_from_cinder_config()
self.assertEqual(kwargs_expected, kwargs_returned)
def test_get_ssl_attributes_from_cinder_config(self):
conf = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='<PASSWORD>',
vmax_port_groups=[self.data.port_group_name_i],
driver_ssl_cert_verify=True,
driver_ssl_cert_path='/path/to/cert')
self.common.configuration = conf
conf_returned = self.common.get_attributes_from_cinder_config()
self.assertEqual('/path/to/cert', conf_returned['SSLVerify'])
conf.driver_ssl_cert_verify = True
conf.driver_ssl_cert_path = None
conf_returned = self.common.get_attributes_from_cinder_config()
self.assertTrue(conf_returned['SSLVerify'])
conf.driver_ssl_cert_verify = False
conf.driver_ssl_cert_path = None
conf_returned = self.common.get_attributes_from_cinder_config()
self.assertFalse(conf_returned['SSLVerify'])
@mock.patch.object(rest.PowerMaxRest, 'get_size_of_device_on_array',
return_value=2.0)
def test_manage_snapshot_get_size_success(self, mock_get_size):
size = self.common.manage_existing_snapshot_get_size(
self.data.test_snapshot)
self.assertEqual(2, size)
@mock.patch.object(rest.PowerMaxRest, 'get_volume_snap',
return_value={'snap_name': 'snap_name'})
@mock.patch.object(
common.PowerMaxCommon, 'get_snapshot_metadata',
return_value={'snap-meta-key-1': 'snap-meta-value-1',
'snap-meta-key-2': 'snap-meta-value-2'})
def test_manage_snapshot_success(self, mck_meta, mock_snap):
snapshot = deepcopy(self.data.test_snapshot_manage)
snapshot.metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
existing_ref = {u'source-name': u'test_snap'}
updates_response = self.common.manage_existing_snapshot(
snapshot, existing_ref)
prov_loc = {'source_id': self.data.device_id,
'snap_name': 'OS-%s' % existing_ref['source-name']}
updates = {'display_name': 'my_snap',
'provider_location': six.text_type(prov_loc),
'metadata': {'snap-meta-key-1': 'snap-meta-value-1',
'snap-meta-key-2': 'snap-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}}
self.assertEqual(updates_response, updates)
def test_manage_snapshot_fail_already_managed(self):
snapshot = self.data.test_snapshot_manage
existing_ref = {u'source-name': u'OS-test_snap'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common.manage_existing_snapshot,
snapshot, existing_ref)
@mock.patch.object(utils.PowerMaxUtils, 'is_volume_failed_over',
return_value=True)
def test_manage_snapshot_fail_vol_failed_over(self, mock_failed):
snapshot = self.data.test_snapshot_manage
existing_ref = {u'source-name': u'test_snap'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common.manage_existing_snapshot,
snapshot, existing_ref)
@mock.patch.object(rest.PowerMaxRest, 'get_volume_snap',
return_value=False)
def test_manage_snapshot_fail_vol_not_snap_src(self, mock_snap):
snapshot = self.data.test_snapshot_manage
existing_ref = {u'source-name': u'test_snap'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common.manage_existing_snapshot,
snapshot, existing_ref)
@mock.patch.object(utils.PowerMaxUtils, 'modify_snapshot_prefix',
side_effect=exception.VolumeBackendAPIException)
def test_manage_snapshot_fail_add_prefix(self, mock_mod):
snapshot = self.data.test_snapshot_manage
existing_ref = {u'source-name': u'test_snap'}
self.assertRaises(exception.VolumeBackendAPIException,
self.common.manage_existing_snapshot,
snapshot, existing_ref)
@mock.patch.object(rest.PowerMaxRest, 'modify_volume_snap')
def test_unmanage_snapshot_success(self, mock_mod, ):
self.common.unmanage_snapshot(self.data.test_snapshot_manage)
mock_mod.assert_called_once()
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
@mock.patch.object(rest.PowerMaxRest, 'modify_volume_snap')
def test_unmanage_snapshot_no_sync_check(self, mock_mod, mock_sync):
self.common.unmanage_snapshot(self.data.test_snapshot_manage)
mock_mod.assert_called_once()
mock_sync.assert_not_called()
@mock.patch.object(utils.PowerMaxUtils, 'is_volume_failed_over',
return_value=True)
def test_unmanage_snapshot_fail_failover(self, mock_failed):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.unmanage_snapshot,
self.data.test_snapshot_manage)
@mock.patch.object(rest.PowerMaxRest, 'modify_volume_snap',
side_effect=exception.VolumeBackendAPIException)
def test_unmanage_snapshot_fail_rename(self, mock_snap):
self.assertRaises(exception.VolumeBackendAPIException,
self.common.unmanage_snapshot,
self.data.test_snapshot_manage)
@mock.patch.object(provision.PowerMaxProvision, 'delete_volume_snap')
@mock.patch.object(provision.PowerMaxProvision, 'is_restore_complete',
return_value=True)
@mock.patch.object(common.PowerMaxCommon, '_clone_check')
@mock.patch.object(provision.PowerMaxProvision, 'revert_volume_snapshot')
def test_revert_to_snapshot(self, mock_revert, mock_clone,
mock_complete, mock_delete):
volume = self.data.test_volume
snapshot = self.data.test_snapshot
array = self.data.array
device_id = self.data.device_id
snap_name = self.data.snap_location['snap_name']
extra_specs = deepcopy(self.data.extra_specs_intervals_set)
extra_specs['storagetype:portgroupname'] = (
self.data.port_group_name_f)
self.common.revert_to_snapshot(volume, snapshot)
mock_revert.assert_called_once_with(
array, device_id, snap_name, extra_specs)
mock_clone.assert_called_once_with(array, device_id, extra_specs)
mock_complete.assert_called_once_with(array, device_id,
snap_name, extra_specs)
mock_delete.assert_called_once_with(array, snap_name, device_id,
restored=True, generation=0)
@mock.patch.object(utils.PowerMaxUtils, 'is_replication_enabled',
return_value=True)
def test_revert_to_snapshot_replicated(self, mock_rep):
volume = self.data.test_volume
snapshot = self.data.test_snapshot
self.assertRaises(exception.VolumeDriverException,
self.common.revert_to_snapshot, volume, snapshot)
def test_get_initiator_check_flag(self):
self.common.configuration.initiator_check = False
initiator_check = self.common._get_initiator_check_flag()
self.assertFalse(initiator_check)
def test_get_initiator_check_flag_true(self):
self.common.configuration.initiator_check = True
initiator_check = self.common._get_initiator_check_flag()
self.assertTrue(initiator_check)
def test_get_manageable_volumes_success(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_single):
vols_lists = self.common.get_manageable_volumes(
marker, limit, offset, sort_keys, sort_dirs)
expected_response = [
{'reference': {'source-id': '00001'}, 'safe_to_manage': True,
'size': 1.0, 'reason_not_safe': None, 'cinder_id': None,
'extra_info': {'config': 'TDEV', 'emulation': 'FBA'}}]
self.assertEqual(vols_lists, expected_response)
def test_get_manageable_volumes_filters_set(self):
marker, limit, offset = '00002', 2, 1
sort_keys, sort_dirs = 'size', 'desc'
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_multi):
vols_lists = self.common.get_manageable_volumes(
marker, limit, offset, sort_keys, sort_dirs)
expected_response = [
{'reference': {'source-id': '00003'}, 'safe_to_manage': True,
'size': 300, 'reason_not_safe': None, 'cinder_id': None,
'extra_info': {'config': 'TDEV', 'emulation': 'FBA'}},
{'reference': {'source-id': '00004'}, 'safe_to_manage': True,
'size': 400, 'reason_not_safe': None, 'cinder_id': None,
'extra_info': {'config': 'TDEV', 'emulation': 'FBA'}}]
self.assertEqual(vols_lists, expected_response)
def test_get_manageable_volumes_fail_no_vols(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=[]):
expected_response = []
vol_list = self.common.get_manageable_volumes(
marker, limit, offset, sort_keys, sort_dirs)
self.assertEqual(vol_list, expected_response)
def test_get_manageable_volumes_fail_no_valid_vols(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_multi_invalid):
expected_response = []
vol_list = self.common.get_manageable_volumes(
marker, limit, offset, sort_keys, sort_dirs)
self.assertEqual(vol_list, expected_response)
def test_get_manageable_snapshots_success(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_single):
snap_list = self.common.get_manageable_snapshots(
marker, limit, offset, sort_keys, sort_dirs)
expected_response = [{
'reference': {'source-name': 'testSnap1'},
'safe_to_manage': True, 'size': 1,
'reason_not_safe': None, 'cinder_id': None,
'extra_info': {
'generation': 0, 'secured': False, 'timeToLive': 'N/A',
'timestamp': mock.ANY},
'source_reference': {'source-id': '00001'}}]
self.assertEqual(snap_list, expected_response)
def test_get_manageable_snapshots_filters_set(self):
marker, limit, offset = 'testSnap2', 2, 1
sort_keys, sort_dirs = 'size', 'desc'
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_multi):
vols_lists = self.common.get_manageable_snapshots(
marker, limit, offset, sort_keys, sort_dirs)
expected_response = [
{'reference': {'source-name': 'testSnap3'},
'safe_to_manage': True, 'size': 300, 'reason_not_safe': None,
'cinder_id': None, 'extra_info': {
'generation': 0, 'secured': False, 'timeToLive': 'N/A',
'timestamp': mock.ANY},
'source_reference': {'source-id': '00003'}},
{'reference': {'source-name': 'testSnap4'},
'safe_to_manage': True, 'size': 400, 'reason_not_safe': None,
'cinder_id': None, 'extra_info': {
'generation': 0, 'secured': False, 'timeToLive': 'N/A',
'timestamp': mock.ANY},
'source_reference': {'source-id': '00004'}}]
self.assertEqual(vols_lists, expected_response)
def test_get_manageable_snapshots_fail_no_snaps(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(self.rest, 'get_private_volume_list',
return_value=[]):
expected_response = []
vols_lists = self.common.get_manageable_snapshots(
marker, limit, offset, sort_keys, sort_dirs)
self.assertEqual(vols_lists, expected_response)
def test_get_manageable_snapshots_fail_no_valid_snaps(self):
marker = limit = offset = sort_keys = sort_dirs = None
with mock.patch.object(
self.rest, 'get_private_volume_list',
return_value=self.data.priv_vol_func_response_multi_invalid):
expected_response = []
vols_lists = self.common.get_manageable_snapshots(
marker, limit, offset, sort_keys, sort_dirs)
self.assertEqual(vols_lists, expected_response)
def test_get_slo_workload_combo_from_cinder_conf(self):
self.common.configuration.vmax_service_level = 'Diamond'
self.common.configuration.vmax_workload = 'DSS'
response1 = self.common.get_attributes_from_cinder_config()
self.assertEqual('Diamond', response1['ServiceLevel'])
self.assertEqual('DSS', response1['Workload'])
self.common.configuration.vmax_service_level = 'Diamond'
self.common.configuration.vmax_workload = None
response2 = self.common.get_attributes_from_cinder_config()
self.assertEqual(self.common.configuration.vmax_service_level,
response2['ServiceLevel'])
self.assertIsNone(response2['Workload'])
expected_response = {
'RestServerIp': '1.1.1.1', 'RestServerPort': 8443,
'RestUserName': 'smc', 'RestPassword': '<PASSWORD>', 'SSLVerify': False,
'SerialNumber': '000197800123', 'srpName': 'SRP_1',
'PortGroup': 'OS-fibre-PG'}
self.common.configuration.vmax_service_level = None
self.common.configuration.vmax_workload = 'DSS'
response3 = self.common.get_attributes_from_cinder_config()
self.assertEqual(expected_response, response3)
self.common.configuration.vmax_service_level = None
self.common.configuration.vmax_workload = None
response4 = self.common.get_attributes_from_cinder_config()
self.assertEqual(expected_response, response4)
def test_get_u4p_failover_info(self):
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='test',
san_password='<PASSWORD>', san_api_port=8443,
driver_ssl_cert_verify='/path/to/cert',
u4p_failover_target=(self.data.u4p_failover_config[
'u4p_failover_targets']), u4p_failover_backoff_factor='2',
u4p_failover_retries='3', u4p_failover_timeout='10',
u4p_primary='10.10.10.10')
self.common.configuration = configuration
self.common._get_u4p_failover_info()
self.assertTrue(self.rest.u4p_failover_enabled)
self.assertIsNotNone(self.rest.u4p_failover_targets)
def test_update_vol_stats_retest_u4p(self):
self.rest.u4p_in_failover = True
self.rest.u4p_failover_autofailback = True
with mock.patch.object(
self.common, 'retest_primary_u4p') as mock_retest:
self.common.update_volume_stats()
mock_retest.assert_called_once()
self.rest.u4p_in_failover = True
self.rest.u4p_failover_autofailback = False
with mock.patch.object(
self.common, 'retest_primary_u4p') as mock_retest:
self.common.update_volume_stats()
mock_retest.assert_not_called()
@mock.patch.object(rest.PowerMaxRest, 'request', return_value=[200, None])
@mock.patch.object(
common.PowerMaxCommon, 'get_attributes_from_cinder_config',
return_value=tpd.PowerMaxData.u4p_failover_target[0])
def test_retest_primary_u4p(self, mock_primary_u4p, mock_request):
self.common.retest_primary_u4p()
self.assertFalse(self.rest.u4p_in_failover)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(None, False, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_vol_validation_checks_success(self, mck_sync, mck_rep):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.common._extend_vol_validation_checks(
array, device_id, volume.name, extra_specs, volume.size, new_size)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(None, False, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_vol_val_check_no_device(self, mck_sync, mck_rep):
volume = self.data.test_volume
array = self.data.array
device_id = None
new_size = self.data.test_volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._extend_vol_validation_checks,
array, device_id, volume.name, extra_specs, volume.size, new_size)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(None, True, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_vol_val_check_snap_src(self, mck_sync, mck_rep):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = self.data.test_volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.common.next_gen = False
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._extend_vol_validation_checks,
array, device_id, volume.name, extra_specs, volume.size, new_size)
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(None, False, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_vol_val_check_wrong_size(self, mck_sync, mck_rep):
volume = self.data.test_volume
array = self.data.array
device_id = self.data.device_id
new_size = volume.size - 1
extra_specs = deepcopy(self.data.extra_specs)
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._extend_vol_validation_checks,
array, device_id, volume.name, extra_specs, volume.size, new_size)
def test_array_ode_capabilities_check_non_next_gen_local(self):
"""Rep enabled, neither array next gen, returns F,F,F,F"""
array = self.data.powermax_model_details['symmetrixId']
self.common.next_gen = False
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, True)
self.assertFalse(r1_ode)
self.assertFalse(r1_ode_metro)
self.assertFalse(r2_ode)
self.assertFalse(r2_ode_metro)
@mock.patch.object(rest.PowerMaxRest, 'get_array_detail',
return_value={'ucode': '5977.1.1'})
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, tpd.PowerMaxData.remote_array))
def test_array_ode_capabilities_check_next_gen_non_rep_pre_elm(
self, mock_rdf, mock_det):
"""Rep disabled, local array next gen, pre elm, returns T,F,F,F"""
array = self.data.powermax_model_details['symmetrixId']
self.common.ucode_level = '5978.1.1'
self.common.next_gen = True
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, False)
self.assertTrue(r1_ode)
self.assertFalse(r1_ode_metro)
self.assertFalse(r2_ode)
self.assertFalse(r2_ode_metro)
@mock.patch.object(rest.PowerMaxRest, 'get_array_detail',
return_value={'ucode': '5977.1.1'})
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, tpd.PowerMaxData.remote_array))
def test_array_ode_capabilities_check_next_gen_remote_rep(
self, mock_rdf, mock_det):
"""Rep enabled, remote not next gen, returns T,T,F,F"""
array = self.data.powermax_model_details['symmetrixId']
self.common.ucode_level = self.data.powermax_model_details['ucode']
self.common.next_gen = True
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, True)
self.assertTrue(r1_ode)
self.assertTrue(r1_ode_metro)
self.assertFalse(r2_ode)
self.assertFalse(r2_ode_metro)
@mock.patch.object(rest.PowerMaxRest, 'get_array_detail',
return_value={'ucode': '5978.1.1'})
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, tpd.PowerMaxData.remote_array))
def test_array_ode_capabilities_check_next_gen_pre_elm_rep(
self, mock_rdf, mock_det):
"""Rep enabled, both array next gen, tgt<5978.221, returns T,T,T,F"""
array = self.data.powermax_model_details['symmetrixId']
self.common.ucode_level = self.data.powermax_model_details['ucode']
self.common.next_gen = True
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, True)
self.assertTrue(r1_ode)
self.assertTrue(r1_ode_metro)
self.assertTrue(r2_ode)
self.assertFalse(r2_ode_metro)
@mock.patch.object(rest.PowerMaxRest, 'get_array_detail',
return_value=tpd.PowerMaxData.ucode_5978_foxtail)
@mock.patch.object(common.PowerMaxCommon, 'get_rdf_details',
return_value=(10, tpd.PowerMaxData.remote_array))
def test_array_ode_capabilities_check_next_gen_post_elm_rep(
self, mock_rdf, mock_det):
"""Rep enabled, both array next gen, tgt>5978.221 returns T,T,T,T"""
array = self.data.powermax_model_details['symmetrixId']
self.common.ucode_level = self.data.powermax_model_details['ucode']
self.common.next_gen = True
(r1_ode, r1_ode_metro,
r2_ode, r2_ode_metro) = self.common._array_ode_capabilities_check(
array, True)
self.assertTrue(r1_ode)
self.assertTrue(r1_ode_metro)
self.assertTrue(r2_ode)
self.assertTrue(r2_ode_metro)
@mock.patch.object(common.PowerMaxCommon,
'_add_new_volume_to_volume_group')
@mock.patch.object(common.PowerMaxCommon, 'setup_volume_replication')
@mock.patch.object(provision.PowerMaxProvision, 'extend_volume')
@mock.patch.object(rest.PowerMaxRest, 'get_size_of_device_on_array',
return_value=tpd.PowerMaxData.test_volume.size)
@mock.patch.object(provision.PowerMaxProvision, 'break_rdf_relationship')
@mock.patch.object(masking.PowerMaxMasking, 'remove_and_reset_members')
@mock.patch.object(
common.PowerMaxCommon, '_get_replication_extra_specs',
return_value=tpd.PowerMaxData.rep_extra_specs)
@mock.patch.object(
common.PowerMaxCommon, 'get_remote_target_device',
return_value=(
tpd.PowerMaxData.device_id2, tpd.PowerMaxData.remote_array,
tpd.PowerMaxData.rdf_group_vol_details['localRdfGroupNumber'],
tpd.PowerMaxData.rdf_group_vol_details['localVolumeState'],
tpd.PowerMaxData.rdf_group_vol_details['rdfpairState']))
def test_extend_legacy_replicated_vol(self, mck_get_tgt, mck_rdf_specs,
mck_reset, mck_break_rdf, mck_size,
mck_extend, mck_set_rep, mck_add):
volume = self.data.test_volume_group_member
array = self.data.array
device_id = self.data.device_id
new_size = volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.common._extend_legacy_replicated_vol(
array, volume, device_id, volume.name, new_size, extra_specs)
@mock.patch.object(
common.PowerMaxCommon, 'get_remote_target_device',
return_value=(None, None, None, None, None))
@mock.patch.object(common.PowerMaxCommon, '_sync_check')
def test_extend_legacy_replicated_vol_fail(self, mck_sync, mck_get_tgt):
volume = self.data.test_volume_group_member
array = self.data.array
device_id = self.data.device_id
new_size = volume.size + 1
extra_specs = deepcopy(self.data.extra_specs)
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._extend_vol_validation_checks,
array, device_id, volume.name, extra_specs, volume.size, new_size)
def test_get_unisphere_port(self):
# Test user set port ID
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='<PASSWORD>',
san_api_port=1234, vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
port = self.common._get_unisphere_port()
self.assertEqual(1234, port)
# Test no set port ID, use default port
configuration = tpfo.FakeConfiguration(
None, 'CommonTests', 1, 1, san_ip='1.1.1.1', san_login='smc',
vmax_array=self.data.array, vmax_srp='SRP_1', san_password='<PASSWORD>',
vmax_port_groups=[self.data.port_group_name_i])
self.common.configuration = configuration
ref_port = utils.DEFAULT_PORT
port = self.common._get_unisphere_port()
self.assertEqual(ref_port, port)
@mock.patch.object(utils.PowerMaxUtils,
'get_replication_config')
def test_get_replication_info(self, mock_config):
self.common._get_replication_info()
mock_config.assert_not_called()
@mock.patch.object(common.PowerMaxCommon,
'_do_sync_check')
def test_sync_check_no_source_device_on_array(self, mock_check):
with mock.patch.object(self.rest, 'get_volume',
side_effect=exception.VolumeBackendAPIException(
"404 00123 does not exist")):
array = self.data.array
device_id = self.data.device_id
extra_specs = self.data.extra_specs
self.common._sync_check(array, device_id, extra_specs,
source_device_id='00123')
mock_check.assert_not_called()
def test_sync_check(self):
array = self.data.array
device_id = self.data.device_id
extra_specs = self.data.extra_specs
with mock.patch.object(self.common, '_do_sync_check') as mck_sync:
self.common._sync_check(array, device_id, extra_specs, False,
self.data.device_id2)
mck_sync.assert_called_with(array, self.data.device_id2,
extra_specs, False)
mck_sync.reset_mock()
with mock.patch.object(self.common, '_get_target_source_device',
return_value=self.data.device_id3):
self.common._sync_check(array, device_id, extra_specs, True)
mck_sync.assert_called_with(array, self.data.device_id3,
extra_specs, True)
mck_sync.reset_mock()
self.common._sync_check(array, device_id, extra_specs)
mck_sync.assert_called_with(array, device_id, extra_specs, False)
@mock.patch.object(common.PowerMaxCommon,
'_unlink_targets_and_delete_temp_snapvx')
@mock.patch.object(rest.PowerMaxRest, 'find_snap_vx_sessions',
return_value=(tpd.PowerMaxData.snap_src_sessions,
tpd.PowerMaxData.snap_tgt_session))
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(True, True, False))
def test_do_sync_check(self, mck_rep, mck_find, mck_unlink):
array = self.data.array
device_id = self.data.device_id
extra_specs = self.data.extra_specs
self.common._do_sync_check(array, device_id, extra_specs)
self.assertEqual(3, mck_unlink.call_count)
@mock.patch.object(provision.PowerMaxProvision, 'delete_temp_volume_snap')
@mock.patch.object(provision.PowerMaxProvision,
'break_replication_relationship')
def test_unlink_targets_and_delete_temp_snapvx(self, mck_break, mck_del):
array = self.data.array
extra_specs = self.data.extra_specs
session = self.data.snap_tgt_session_cm_enabled
snap_name = session['snap_name']
source = session['source_vol_id']
generation = session['generation']
target = session['target_vol_id']
self.common._unlink_targets_and_delete_temp_snapvx(
session, array, extra_specs)
mck_break.assert_called_with(array, target, source, snap_name,
extra_specs, generation, True)
mck_del.assert_called_once_with(array, snap_name, source, generation)
mck_break.reset_mock()
mck_del.reset_mock()
session['copy_mode'] = False
session['expired'] = True
self.common._unlink_targets_and_delete_temp_snapvx(
session, array, extra_specs)
mck_break.assert_called_with(array, target, source, snap_name,
extra_specs, generation, False)
mck_del.assert_not_called()
@mock.patch.object(rest.PowerMaxRest, 'find_snap_vx_sessions',
return_value=(None, tpd.PowerMaxData.snap_tgt_session))
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(True, False, False))
def test_get_target_source_device(self, mck_rep, mck_find):
array = self.data.array
tgt_device = self.data.device_id2
src_device = self.common._get_target_source_device(array, tgt_device)
self.assertEqual(src_device, self.data.device_id)
@mock.patch.object(common.PowerMaxCommon, '_delete_valid_snapshot')
@mock.patch.object(rest.PowerMaxRest, 'find_snap_vx_sessions',
return_value=(tpd.PowerMaxData.snap_src_sessions,
tpd.PowerMaxData.snap_tgt_session))
@mock.patch.object(rest.PowerMaxRest, 'is_vol_in_rep_session',
return_value=(True, True, False))
def test_clone_check(self, mck_rep, mck_find, mck_del):
array = self.data.array
device_id = self.data.device_id
extra_specs = self.data.extra_specs
self.common.snapvx_unlink_limit = 3
self.common._clone_check(array, device_id, extra_specs)
self.assertEqual(3, mck_del.call_count)
@mock.patch.object(common.PowerMaxCommon,
'_unlink_targets_and_delete_temp_snapvx')
def test_delete_valid_snapshot(self, mck_unlink):
array = self.data.array
extra_specs = self.data.extra_specs
session = {'snap_name': 'EMC_SMI_TEST', 'expired': False}
self.common._delete_valid_snapshot(array, session, extra_specs)
mck_unlink.assert_called_with(session, array, extra_specs)
mck_unlink.reset_mock()
session = {'snap_name': 'temp-000AA-snapshot_for_clone',
'expired': True}
self.common._delete_valid_snapshot(array, session, extra_specs)
mck_unlink.assert_called_with(session, array, extra_specs)
mck_unlink.reset_mock()
session = {'snap_name': 'temp-000AA-snapshot_for_clone',
'expired': False}
self.common._delete_valid_snapshot(array, session, extra_specs)
mck_unlink.assert_not_called()
def test_delete_valid_snapshot_exception(self):
array = self.data.array
extra_specs = self.data.extra_specs
session = {'snap_name': 'temp-000AA-snapshot_for_clone',
'expired': True}
with mock.patch.object(
self.common, '_unlink_targets_and_delete_temp_snapvx',
side_effect=exception.VolumeBackendAPIException(
"404 temp-000AA-snapshot_for_clone does not exist")
) as mck_unlink:
self.common._delete_valid_snapshot(array, session, extra_specs)
mck_unlink.assert_called_with(session, array, extra_specs)
with mock.patch.object(
self.common, '_unlink_targets_and_delete_temp_snapvx',
side_effect=exception.VolumeBackendAPIException(
"500 internal server error")):
self.assertRaises(
exception.VolumeBackendAPIException,
self.common._unlink_targets_and_delete_temp_snapvx,
array, session, extra_specs)
@mock.patch.object(rest.PowerMaxRest, '_get_private_volume',
return_value=tpd.PowerMaxData.priv_vol_response_rep)
@mock.patch.object(rest.PowerMaxRest, 'get_array_model_info',
return_value=(tpd.PowerMaxData.array_model, None))
@mock.patch.object(rest.PowerMaxRest, 'get_rdf_group',
return_value=(tpd.PowerMaxData.rdf_group_details))
def test_get_volume_metadata_rep(self, mck_rdf, mck_model, mck_priv):
ref_metadata = {
'DeviceID': self.data.device_id,
'DeviceLabel': self.data.device_label, 'ArrayID': self.data.array,
'ArrayModel': self.data.array_model, 'ServiceLevel': 'None',
'Workload': 'None', 'Emulation': 'FBA', 'Configuration': 'TDEV',
'CompressionDisabled': 'True', 'ReplicationEnabled': 'True',
'R2-DeviceID': self.data.device_id2,
'R2-ArrayID': self.data.remote_array,
'R2-ArrayModel': self.data.array_model,
'ReplicationMode': 'Synchronized',
'RDFG-Label': self.data.rdf_group_name,
'R1-RDFG': 1, 'R2-RDFG': 1}
array = self.data.array
device_id = self.data.device_id
act_metadata = self.common.get_volume_metadata(array, device_id)
self.assertEqual(ref_metadata, act_metadata)
@mock.patch.object(rest.PowerMaxRest, '_get_private_volume',
return_value=tpd.PowerMaxData.
priv_vol_response_metro_active_rep)
@mock.patch.object(rest.PowerMaxRest, 'get_array_model_info',
return_value=(tpd.PowerMaxData.array_model, None))
@mock.patch.object(rest.PowerMaxRest, 'get_rdf_group',
return_value=(tpd.PowerMaxData.rdf_group_details))
def test_get_volume_metadata_metro_active_rep(self, mck_rdf,
mck_model, mck_priv):
ref_metadata = {
'DeviceID': self.data.device_id,
'DeviceLabel': self.data.device_label, 'ArrayID': self.data.array,
'ArrayModel': self.data.array_model, 'ServiceLevel': 'None',
'Workload': 'None', 'Emulation': 'FBA', 'Configuration': 'TDEV',
'CompressionDisabled': 'True', 'ReplicationEnabled': 'True',
'R2-DeviceID': self.data.device_id2,
'R2-ArrayID': self.data.remote_array,
'R2-ArrayModel': self.data.array_model,
'ReplicationMode': 'Metro',
'RDFG-Label': self.data.rdf_group_name,
'R1-RDFG': 1, 'R2-RDFG': 1}
array = self.data.array
device_id = self.data.device_id
act_metadata = self.common.get_volume_metadata(array, device_id)
self.assertEqual(ref_metadata, act_metadata)
@mock.patch.object(rest.PowerMaxRest, '_get_private_volume',
return_value=tpd.PowerMaxData.priv_vol_response_no_rep)
@mock.patch.object(rest.PowerMaxRest, 'get_array_model_info',
return_value=(tpd.PowerMaxData.array_model, None))
def test_get_volume_metadata_no_rep(self, mck_model, mck_priv):
ref_metadata = {
'DeviceID': self.data.device_id,
'DeviceLabel': self.data.device_label, 'ArrayID': self.data.array,
'ArrayModel': self.data.array_model, 'ServiceLevel': 'None',
'Workload': 'None', 'Emulation': 'FBA', 'Configuration': 'TDEV',
'CompressionDisabled': 'True', 'ReplicationEnabled': 'False'}
array = self.data.array
device_id = self.data.device_id
act_metadata = self.common.get_volume_metadata(array, device_id)
self.assertEqual(ref_metadata, act_metadata)
@mock.patch.object(rest.PowerMaxRest, 'get_volume_snap_info',
return_value=tpd.PowerMaxData.priv_snap_response)
def test_get_snapshot_metadata(self, mck_snap):
array = self.data.array
device_id = self.data.device_id
device_label = self.data.managed_snap_id
snap_name = self.data.test_snapshot_snap_name
ref_metadata = {'SnapshotLabel': snap_name,
'SourceDeviceID': device_id,
'SourceDeviceLabel': device_label}
act_metadata = self.common.get_snapshot_metadata(
array, device_id, snap_name)
self.assertEqual(ref_metadata, act_metadata)
def test_update_metadata(self):
model_update = {'provider_location': six.text_type(
self.data.provider_location)}
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location),
'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}})
existing_metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
object_metadata = {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}
model_update = self.common.update_metadata(
model_update, existing_metadata, object_metadata)
self.assertEqual(ref_model_update, model_update)
def test_update_metadata_no_model(self):
model_update = None
ref_model_update = (
{'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2',
'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}})
existing_metadata = {'user-meta-key-1': 'user-meta-value-1',
'user-meta-key-2': 'user-meta-value-2'}
object_metadata = {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}
model_update = self.common.update_metadata(
model_update, existing_metadata, object_metadata)
self.assertEqual(ref_model_update, model_update)
def test_update_metadata_no_existing_metadata(self):
model_update = {'provider_location': six.text_type(
self.data.provider_location)}
ref_model_update = (
{'provider_location': six.text_type(self.data.provider_location),
'metadata': {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}})
existing_metadata = None
object_metadata = {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}
model_update = self.common.update_metadata(
model_update, existing_metadata, object_metadata)
self.assertEqual(ref_model_update, model_update)
def test_update_metadata_model_list_exception(self):
model_update = [{'provider_location': six.text_type(
self.data.provider_location)}]
existing_metadata = None
object_metadata = {'device-meta-key-1': 'device-meta-value-1',
'device-meta-key-2': 'device-meta-value-2'}
self.assertRaises(
exception.VolumeBackendAPIException,
self.common.update_metadata, model_update, existing_metadata,
object_metadata)
|
StarcoderdataPython
|
9705690
|
from time import sleep
print('-=-=-=-= DESAFIO 99 -=-=-=-=')
print()
def maior(* num): # MINHA RESOLUÇÃO FOI COM TUPLA
print('-='*20)
print('Analisando os valores passados...')
maior = 0
for c, n in enumerate(num): # TUPLA
if c == 0 or n > maior:
maior = n
print(n, end=' ')
sleep(0.5)
print()
print(f'Foram informados {len(num)} valores ao todo.')
print(f'O maior valor informado foi {maior}.')
maior(2, 9, 4, 5, 7, 1)
maior(4, 7, 0)
maior(1, 2)
maior(6)
maior()
# RESOLUÇÃO do PROF:
# def maior(* num):
# cont = maior = 0
# for valor in num:
# print(f'{valor} ', end='')
# sleep(0.5)
# if cont == 0:
# maior = valor
# else:
# if valor > maior:
# maior = valor
# cont += 1
# print(f'{cont} valores ao todx')
# print(f'O maior foi {maior}')
|
StarcoderdataPython
|
6565576
|
<reponame>crazbot/unet-gan-matting
import os
import cv2
import random
import numpy
import math
def image_fill(img, size, value):
border = [math.ceil((size[0] - img.shape[0])/2),
math.floor((size[0] - img.shape[0])/2),
math.ceil((size[1] - img.shape[1])/2),
math.floor((size[1] - img.shape[1])/2)]
return cv2.copyMakeBorder(img,border[0],border[1],border[2],border[3],cv2.BORDER_CONSTANT,value=value)
def combine_object_background(object_file, background_file, output_name):
border = 20
size = [960, 720]
foreground = cv2.imread(object_file, cv2.IMREAD_UNCHANGED)
if foreground is None:
return False
ratio = numpy.amin(numpy.divide(
numpy.subtract(size, [2*border, 2*border]), foreground.shape[0:2]))
forground_size = numpy.floor(numpy.multiply(foreground.shape[0:2], ratio)).astype(int)
foreground = cv2.resize(foreground, (forground_size[1], forground_size[0]))
foreground = image_fill(foreground,size,[0,0,0,0])
foreground = foreground.astype(float)
cv2.normalize(foreground, foreground, 0.0, 1.0, cv2.NORM_MINMAX)
alpha = cv2.split(foreground)[3]
#foreground = cv2.imread(object_file, cv2.IMREAD_COLOR)
background = cv2.imread(background_file)
if background is None:
return False
ratio = numpy.amax(numpy.divide(foreground.shape[0:2], background.shape[0:2]))
background_size = numpy.ceil(numpy.multiply(background.shape[0:2], ratio)).astype(int)
#print(numpy.multiply(background.shape[0:2], ratio).astype(int))
background = cv2.resize(background, (background_size[1], background_size[0]))
background = background[0:foreground.shape[0], 0:foreground.shape[1]]
background = background.astype(float)
for i in range(0, 3):
foreground[:,:,i] = numpy.multiply(alpha, foreground[:,:,i]*255)
background[:,:,i] = numpy.multiply(1.0 - alpha, background[:,:,i])
outImage = numpy.add(foreground[:,:,0:3], background)
cv2.imwrite(output_name, outImage)
return True
def generate_trimap(object_file, trimap_name):
border = 20
size = [960, 720]
foreground = cv2.imread(object_file, cv2.IMREAD_UNCHANGED)
if foreground is None:
return False
alpha = cv2.split(foreground)[3]
ratio = numpy.amin(numpy.divide(
numpy.subtract(size, [2*border, 2*border]), alpha.shape[0:2]))
forground_size = numpy.floor(numpy.multiply(alpha.shape[0:2], ratio)).astype(int)
alpha = cv2.resize(alpha, (forground_size[1], forground_size[0]))
alpha = image_fill(alpha,size,[0,0,0,0])
alpha = alpha.astype(float)
cv2.normalize(alpha, alpha, 0.0, 1.0, cv2.NORM_MINMAX)
_, inner_map = cv2.threshold(alpha, 0.999, 255, cv2.THRESH_BINARY)
_, outer_map = cv2.threshold(alpha, 0.001, 255, cv2.THRESH_BINARY)
inner_map = cv2.erode(inner_map, numpy.ones((5,5),numpy.uint8), iterations = 3)
outer_map = cv2.dilate(outer_map, numpy.ones((5,5),numpy.uint8), iterations = 3)
cv2.imwrite(trimap_name, inner_map + (outer_map - inner_map) /2)
foreground = cv2.imread(object_file, cv2.IMREAD_UNCHANGED)
def generate_target(object_file, target_name):
border = 20
size = [960, 720]
foreground = cv2.imread(object_file, cv2.IMREAD_UNCHANGED)
if foreground is None:
return False
cv2.normalize(foreground, foreground, 0, 255, cv2.NORM_MINMAX)
foreground = foreground.astype(numpy.uint8)
ratio = numpy.amin(numpy.divide(
numpy.subtract(size, [2*border, 2*border]), foreground.shape[0:2]))
forground_size = numpy.floor(numpy.multiply(foreground.shape[0:2], ratio)).astype(int)
foreground = cv2.resize(foreground, (forground_size[1], forground_size[0]))
foreground = image_fill(foreground,size,[0,0,0,0])
cv2.imwrite(target_name, foreground)
def build_dataset(object_dir, background_dir, input_dir, trimap_dir, target_dir):
object_filenames = os.listdir(object_dir)
background_filenames = os.listdir(background_dir)
for i, object_file in enumerate(object_filenames):
generate_trimap(
os.path.join(object_dir, object_file),
os.path.join(trimap_dir, str(i) + '_trimap.jpg'))
generate_target(
os.path.join(object_dir, object_file),
os.path.join(target_dir, str(i) + '.png'))
backgrounds = random.sample(background_filenames, 20)
for j, background_file in enumerate(backgrounds):
print(i, j, object_file, background_file)
combine_object_background(os.path.join(object_dir, object_file),
os.path.join(background_dir, background_file),
os.path.join(input_dir, str(i) + '_' + str(j) + '.jpg'))
if __name__ == "__main__":
object_dir = os.path.join("data", "matting", "portrait transparent background")
background_dir = os.path.join("data", "matting", "texture background")
input_dir = os.path.join("data", "matting", "input")
trimap_dir = os.path.join("data", "matting", "trimap")
target_dir = os.path.join("data", "matting", "target")
if not os.path.isdir(input_dir):
os.makedirs(input_dir)
if not os.path.isdir(trimap_dir):
os.makedirs(trimap_dir)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
build_dataset(object_dir, background_dir, input_dir, trimap_dir, target_dir)
|
StarcoderdataPython
|
9772799
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# Note that this is mandatory. If absent, the "autodoc" extension enabled below
# fails with the following build-time error:
# autodoc: failed to import module 'beartype_sphinx'; the following exception was raised:
# No module named 'beartype_sphinx'
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'beartype_sphinx'
copyright = '2021, @leycec'
author = '@leycec'
# The full version, including alpha/beta/rc tags
release = '0.0.1'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
# ..................{ BUILTIN }..................
# Builtin extensions unconditionally available under *ALL* reasonably
# modern versions of Sphinx uniquely prefixed by "sphinx.ext.".
# Builtin extension autogenerating reStructuredText documentation from
# class, callable, and variable docstrings embedded in Python modules,
# documenting this project's public (and optionally also private) API.
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
|
StarcoderdataPython
|
1742166
|
<reponame>grahamgower/demesdraw
"""
Add version string to the navbar and footer.
"""
import demesdraw
def inject_version(app, config):
v = demesdraw.__version__
if v != "undefined":
v_short = v.split("+")[0]
config.html_theme_options["extra_navbar"] = f"demesdraw {v_short}"
config.html_theme_options["extra_footer"] = f"demesdraw {v}"
def setup(app):
app.connect("config-inited", inject_version)
|
StarcoderdataPython
|
11219410
|
<filename>server/app/services/tasks_scheduler/timer_tasks/app/api_count/sql_statements.py
api_hour_count_sql = """
INSERT INTO app_api_logs_hour("createAt", "countTime", "apiCount", "tenantID")
SELECT current_timestamp AS "createAt",
date_trunc('hour', current_timestamp - INTERVAL '1 hour') AS "countTime",
COUNT(*) AS "apiCount",
app_api_logs."tenantID"
FROM app_api_logs
WHERE "createAt" >= date_trunc('hour', current_timestamp - INTERVAL '1 hour')
AND "createAt" < date_trunc('hour', current_timestamp)
GROUP BY app_api_logs."tenantID"
"""
# Daily aggregation based on hourly aggregation
api_day_count_sql = """
INSERT INTO app_api_logs_day(
"createAt", "countTime", "apiCount", "tenantID"
)
SELECT
current_timestamp AS "createAt",
date_trunc('day', current_timestamp - INTERVAL '1 day') AS "countTime",
SUM(app_api_logs_hour."apiCount") AS "apiCount",
app_api_logs_hour."tenantID"
FROM app_api_logs_hour
WHERE
"countTime" >= date_trunc('day', current_timestamp - INTERVAL '1 day')
AND "countTime" < date_trunc('day', current_timestamp)
GROUP BY app_api_logs_hour."tenantID"
"""
# Monthly aggregation based on daily aggregation
api_month_count_sql = """
INSERT INTO app_api_logs_month("createAt", "countTime", "apiCount", "tenantID")
SELECT current_timestamp AS "createAt",
date_trunc('month',
current_timestamp - INTERVAL '1 month') AS "countTime",
SUM(app_api_logs_day."apiCount") AS "apiCount",
app_api_logs_day."tenantID"
FROM app_api_logs_day
WHERE "countTime" >= date_trunc('month', current_timestamp - INTERVAL '1 month')
AND "countTime" < date_trunc('month', current_timestamp)
GROUP BY app_api_logs_day."tenantID"
"""
|
StarcoderdataPython
|
3216935
|
print(ma<caret>)
|
StarcoderdataPython
|
3286302
|
<reponame>fuh/wechatpy
# -*- coding: utf-8 -*-
"""
wechatpy.exceptions
~~~~~~~~~~~~~~~~~~~~
Basic exceptions definition.
:copyright: (c) 2014 by messense.
:license: MIT, see LICENSE for more details.
"""
class WeChatException(Exception):
"""Base exception for wechatpy"""
def __init__(self, errcode, errmsg):
"""
:param errcode: Error code
:param errmsg: Error message
"""
self.errcode = errcode
self.errmsg = errmsg
def __str__(self):
s = f"Error code: {self.errcode}, message: {self.errmsg}"
return s
def __repr__(self):
_repr = f"{self.__class__.__name__}({self.errcode}, {self.errmsg})"
return _repr
class WeChatClientException(WeChatException):
"""WeChat API client exception class"""
def __init__(self, errcode, errmsg, client=None, request=None, response=None):
super().__init__(errcode, errmsg)
self.client = client
self.request = request
self.response = response
class InvalidSignatureException(WeChatException):
"""Invalid signature exception class"""
def __init__(self, errcode=-40001, errmsg="Invalid signature"):
super().__init__(errcode, errmsg)
class APILimitedException(WeChatClientException):
"""WeChat API call limited exception class"""
pass
class InvalidAppIdException(WeChatException):
"""Invalid app_id exception class"""
def __init__(self, errcode=-40005, errmsg="Invalid AppId"):
super().__init__(errcode, errmsg)
class InvalidMchIdException(WeChatException):
"""Invalid mch_id exception class"""
def __init__(self, errcode=-40006, errmsg="Invalid MchId"):
super().__init__(errcode, errmsg)
class WeChatOAuthException(WeChatClientException):
"""WeChat OAuth API exception class"""
pass
class WeChatComponentOAuthException(WeChatClientException):
"""WeChat Component OAuth API exception class"""
pass
class WeChatPayException(WeChatClientException):
"""WeChat Pay API exception class"""
def __init__(
self,
return_code,
result_code=None,
return_msg=None,
errcode=None,
errmsg=None,
client=None,
request=None,
response=None,
):
"""
:param return_code: 返回状态码
:param result_code: 业务结果
:param return_msg: 返回信息
:param errcode: 错误代码
:param errmsg: 错误代码描述
"""
super().__init__(errcode, errmsg, client, request, response)
self.return_code = return_code
self.result_code = result_code
self.return_msg = return_msg
def __str__(self):
_str = f"Error code: {self.return_code}, message: {self.return_msg}. Pay Error code: {self.errcode}, message: {self.errmsg}"
return _str
def __repr__(self):
_repr = f"{self.__class__.__name__}({self.return_code}, {self.return_msg}). Pay({self.errcode}, {self.errmsg})"
return _repr
|
StarcoderdataPython
|
9671938
|
<filename>experiments/ERA5/plot.py
try:
from mpl_toolkits.basemap import Basemap
except:
print('Install `basemap` library first!')
import glob
import pickle
import sys
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
def output_preprocessor(
mdl_list=['Ridge', 'KNN', 'SVM', 'ELM', 'RF', 'Adaboost',
'GBDT', 'Xgboost', 'LightGBM', 'ConvLSTM'],
file_path='/hard/lilu/score/',
num_metrics=14):
# init score matrix
score = np.full((len(mdl_list), num_metrics, 180, 360), np.nan)
for i, mdl_name in enumerate(mdl_list):
# generate score path of model
path = file_path + mdl_name.lower() + '_score' + '.pickle'
f = open(path, 'rb')
# load score file
score[i, :, :, :] = pickle.load(f)
# remove polar
score[:, :, 0:32, 299:346] = np.nan
score[:, :, 0:18, 287:300] = np.nan
#dscore[:, :, 150:, :] = np.nan
# generate metrics
bias = score[:, 0, :, :]
rmse = score[:, 1, :, :]
nse = score[:, 2, :, :]
r2 = score[:, 3, :, :]
wi = score[:, 4, :, :]
kge = score[:, 5, :, :]
r = score[:, 6, :, :]
m1 = score[:, 7, :, :] # true
m2 = score[:, 8, :, :] # predict
mae = score[:, 9, :, :]
mse = score[:, 10, :, :]
score_ = score[:, 11, :, :]
std1 = score[:, 12, :, :]
std2 = score[:, 13, :, :]
# remove bad grids.
bias[r2 < 0] = np.nan
nse[r2 < 0] = np.nan
rmse[r2 < 0] = np.nan
wi[r2 < 0] = np.nan
kge[r2 < 0] = np.nan
r[r2 < 0] = np.nan
mae[r2 < 0] = np.nan
mse[r2 < 0] = np.nan
score_[r2 < 0] = np.nan
std1[r2 < 0] = np.nan
std2[r2 < 0] = np.nan
r2[r2 < 0] = np.nan
return bias, rmse, nse, wi, kge, r, m1, m2, mae, mse, score_, std1, std2
def get_na_mask_data(data):
mask = ~np.isnan(data)
data = [d[m] for d, m in zip(data, mask)]
return data
def figure1(
mdl_list=None,
color_list=None,
file_path='/hard/lilu/score/'):
bias, rmse, nse, wi, kge, r, m1, m2, mae, mse, score_, std1, std2 = \
output_preprocessor(
mdl_list=mdl_list,
file_path=file_path)
# boxplot
plt.figure(figsize=(20, 11))
# ----------------------------------
ax1 = plt.subplot(3, 3, 1)
data = get_na_mask_data(bias.reshape(len(mdl_list), -1))
plot1 = ax1.boxplot(data, vert=True, patch_artist=True, # labels=model_list, \
showfliers=False, showmeans=True)
plt.xticks(rotation=300)
ax1.set_title('(a) Bias')
ax1.axhline(y=0, c="black", lw=0.2)
# -------------------------------------
ax2 = plt.subplot(3, 3, 2)
data = get_na_mask_data(rmse.reshape(len(mdl_list), -1))
plot2 = ax2.boxplot(data, vert=True, patch_artist=True, # labels=model_list, \
showfliers=False, showmeans=True)
plt.xticks(rotation=300)
ax2.set_title('(b) Root Mean Squared Error')
# -------------------------------------
ax3 = plt.subplot(3, 3, 3)
data = get_na_mask_data(nse.reshape(len(mdl_list), -1))
plot3 = ax3.boxplot(data,
vert=True,
patch_artist=True, # labels=model_list, \
showfliers=False,
showmeans=True)
plt.xticks(rotation=300)
ax3.set_title('(c) Nash-Sutcliffe Efficiency Coefficient')
# -------------------------------------
ax4 = plt.subplot(3, 3, 4)
data = get_na_mask_data(wi.reshape(len(mdl_list), -1))
plot4 = ax4.boxplot(data, vert=True, patch_artist=True, # labels=mdl_list, \
showfliers=False, showmeans=True)
plt.xticks(rotation=300)
ax4.set_title('(d) Willmott Index')
# -------------------------------------
ax5 = plt.subplot(3, 3, 5)
data = get_na_mask_data(kge.reshape(len(mdl_list), -1))
plot5 = ax5.boxplot(data, vert=True, patch_artist=True, # labels=mdl_list, \
showfliers=False, showmeans=True)
plt.xticks(rotation=300)
ax5.set_title('(e) Kling-Gupta Efficiency')
# -------------------------------------
ax6 = plt.subplot(3, 3, 6)
data = get_na_mask_data(r.reshape(len(mdl_list), -1))
plot6 = ax6.boxplot(data, vert=True, patch_artist=True, # labels=mdl_list, \
showfliers=False, showmeans=True)
plt.xticks(rotation=300)
ax6.set_title('(f) Pearson’s Correlation Index')
# -------------------------------------
ax7 = plt.subplot(3, 3, 7)
data = get_na_mask_data(mae.reshape(len(mdl_list), -1))
plot7 = ax7.boxplot(data, vert=True, patch_artist=True, labels=mdl_list,
showfliers=False, showmeans=True)
plt.xticks(rotation=300)
ax7.set_title('(g) Mean Absolute Error')
# -------------------------------------
ax8 = plt.subplot(3, 3, 8)
data = get_na_mask_data(mse.reshape(len(mdl_list), -1))
plot8 = ax8.boxplot(data, vert=True, patch_artist=True, labels=mdl_list,
showfliers=False, showmeans=True)
plt.xticks(rotation=300)
ax8.set_title('(h) Mean Squared Error')
# -------------------------------------
ax9 = plt.subplot(3, 3, 9)
data = get_na_mask_data(score_.reshape(len(mdl_list), -1))
plot9 = ax9.boxplot(data, vert=True, patch_artist=True, labels=mdl_list,
showfliers=False, showmeans=True)
plt.xticks(rotation=300)
ax9.set_title('(i) MetReg score')
for bplot in (plot1, plot2, plot3, plot4, plot5, plot6, plot7, plot8, plot9):
for patch, color in zip(bplot['boxes'], color_list):
patch.set_facecolor(color)
plt.savefig('/Users/lewlee/Desktop/figure1.pdf')
# plt.savefig('/hard/lilu/boxplot_model_score.pdf')
def figure3(mdl_list=['Ridge', 'KNN', 'SVM', 'ELM', 'RF', 'Adaboost',
'GBDT', 'Xgboost', 'LightGBM', 'ConvLSTM'],
color_list=['pink', 'lightblue', 'gray', 'yellow', 'lightgreen',
'lightgreen', 'lightgreen', 'lightgreen', 'lightgreen',
'red'],
file_type='.npy',
file_path='/hard/lilu/score/'):
koppen_index = np.load('koppen_index.npy').astype('float')
koppen_index[koppen_index == 0] = np.nan
koppen_index = np.concatenate(
(koppen_index[:, 181:], koppen_index[:, :181]), axis=-1)
for i in range(1, 4):
koppen_index[koppen_index == i] = 0
for i in range(4, 6):
koppen_index[koppen_index == i] = 1
for i in range(6, 8):
koppen_index[koppen_index == i] = 2
for i in range(8, 11):
koppen_index[koppen_index == i] = 3
for i in range(11, 14):
koppen_index[koppen_index == i] = 4
for i in range(14, 17):
koppen_index[koppen_index == i] = 5
for i in range(17, 21):
koppen_index[koppen_index == i] = 6
for i in range(21, 25):
koppen_index[koppen_index == i] = 7
for i in range(25, 29):
koppen_index[koppen_index == i] = 8
for i in range(29, 31):
koppen_index[koppen_index == i] = 9
bias, rmse, nse, wi, kge, r, m1, m2, mae, mse, score_ = output_preprocessor(
mdl_list=mdl_list,
file_path=file_path,
file_type=file_type)
plt.figure()
for i in range(10):
print(i)
mask = np.where(koppen_index[:150, :] == i)
score = score_[:, mask[0], mask[1]]
print(score.shape)
data = get_na_mask_data(score)
ax = plt.subplot(5, 2, i+1)
ax.boxplot(data, vert=True, patch_artist=True,
showfliers=False, ) # showmeans=True)
plt.savefig('/hard/lilu/boxplot_koppen_index.pdf')
def figure4(mdl_list=['Ridge', 'KNN', 'SVM', 'ELM', 'RF', 'Adaboost',
'GBDT', 'Xgboost', 'LightGBM', 'ConvLSTM'],
color_list=['pink', 'lightblue', 'gray', 'yellow', 'lightgreen',
'lightgreen', 'lightgreen', 'lightgreen', 'lightgreen',
'red'],
file_type='.npy',
file_path='/hard/lilu/score/'):
bias, rmse, nse, wi, kge, r, m1, m2, mae, mse, score_, std1, std2 = output_preprocessor(
mdl_list=mdl_list,
file_path=file_path,
file_type=file_type)
koppen_index = np.load('koppen_index.npy').astype('float')
koppen_index[koppen_index == 0] = np.nan
koppen_index = np.concatenate(
(koppen_index[:, 181:], koppen_index[:, :181]), axis=-1)
for i in range(1, 4):
koppen_index[koppen_index == i] = 0
for i in range(4, 6):
koppen_index[koppen_index == i] = 1
for i in range(6, 8):
koppen_index[koppen_index == i] = 2
for i in range(8, 11):
koppen_index[koppen_index == i] = 3
for i in range(11, 14):
koppen_index[koppen_index == i] = 4
for i in range(14, 17):
koppen_index[koppen_index == i] = 5
for i in range(17, 21):
koppen_index[koppen_index == i] = 6
for i in range(21, 25):
koppen_index[koppen_index == i] = 7
for i in range(25, 29):
koppen_index[koppen_index == i] = 8
for i in range(29, 31):
koppen_index[koppen_index == i] = 9
#ref_std = np.nanmean(std1)
#std = np.nanmean(std2, axis=(-1, -2))
#nse = np.nanmean(nse, axis=(-1, -2))
color_list = color_list
from MetReg.plot.__plotting import plot_taylor_diagram
for i in range(10):
print(i)
mask = np.where(koppen_index[:150, :] == i)
sns.kdeplot(nse[-1, mask[0], mask[1]])
"""
std_ = np.nanmean(std2[:, mask[0], mask[1]], axis=-1)
ref_std_ = np.nanmean(std1[:, mask[0], mask[1]])
nse_ = np.nanmean(r[:, mask[0], mask[1]], axis=-1)
fig = plt.figure()
plot_taylor_diagram(std_, nse_, ref_std_, fig=fig, colors=color_list)
plt.savefig('/hard/lilu/taylor_diagram'+'_'+str(i)+'.pdf')
"""
plt.savefig('/hard/lilu/kde.pdf')
def select_best_model(metrics):
"""Get best model of models list."""
H = metrics.shape[-2]
W = metrics.shape[-1]
loc = np.full((H, W), np.nan)
for i in range(H):
for j in range(W):
if not np.isnan(metrics[:, i, j]).any():
loc[i, j] = np.argmax(metrics[:, i, j])
return loc
if __name__ == '__main__':
figure1(
mdl_list=['Ridge', 'KNN', 'ELM', 'Adaboost', 'GBDT',
'Xgboost', 'LightGBM', 'RNN', 'GRU', 'LSTM'],
color_list=['pink', 'lightblue', 'yellow', 'lightgreen', 'lightgreen', 'lightgreen',
'lightgreen', 'red', 'red', 'red'],
file_path='/Users/lewlee/Desktop/MetReg/experiments/ERA5/score/7D/')
# figure3()
# figure4()
|
StarcoderdataPython
|
1813733
|
# from app.models.db import db, migrate
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
db = SQLAlchemy()
migrate = Migrate()
from app.models.fact import Fact
from app.models.user import User
|
StarcoderdataPython
|
105481
|
#! /usr/bin/env python3
# coding: utf-8
# modified example from https://weatherstack.com/documentation
import requests, hvac, getpass
# sets up the vault client, prompts for creds, and gather's the data
client = hvac.Client(url='http://localhost:8200')
client.auth.userpass.login(input("username: "), getpass.getpass(prompt='password: ', stream=None),mount_point='userpass')
token = client.secrets.kv.v2.read_secret_version(path='api_key')['data']['data']['token']
params = {
'access_key': token,
# Nashville
'query': '36.1608014,-86.7833216',
'forcast_days': '1',
'days': '1',
'units': 'f'
}
api_result = requests.get('http://api.weatherstack.com/forecast', params)
api_response = api_result.json()
city = api_response['location']['name']
region = api_response['location']['region']
for i, k in enumerate(api_response['forecast']):
forecast_values = api_response['forecast'][k]
max_temp = forecast_values['maxtemp']
min_temp = forecast_values['mintemp']
avg_temp = forecast_values['avgtemp']
uv_index = forecast_values['uv_index']
print(f'Forcast for {city}, {region} tomorrow is \n\taverage temp: {avg_temp} F \n\tminimum temp: {min_temp} F \n\tmaximum temp: {max_temp} F \n\tUV index: {uv_index}')
|
StarcoderdataPython
|
3287739
|
from django import forms
from spark.articles.models import Article
class ArticleForm(forms.ModelForm):
status = forms.CharField(widget=forms.HiddenInput())
title = forms.CharField(
widget=forms.TextInput(attrs={'class': 'form-control'}),
max_length=255)
content = forms.CharField(
widget=forms.Textarea(attrs={'class': 'form-control'}),
max_length=4000)
tags = forms.CharField(
widget=forms.TextInput(attrs={'class': 'form-control'}),
max_length=255, required=False,
help_text='Use spaces to separate the tags, such as "java jsf primefaces"') # noqa: E501
class Meta:
model = Article
fields = ['title', 'content', 'tags', 'status']
|
StarcoderdataPython
|
11293500
|
<gh_stars>1-10
from .stop_words import STOP_WORDS
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .punctuation import TOKENIZER_PREFIXES, TOKENIZER_INFIXES
from ...language import Language
class ItalianDefaults(Language.Defaults):
tokenizer_exceptions = TOKENIZER_EXCEPTIONS
stop_words = STOP_WORDS
prefixes = TOKENIZER_PREFIXES
infixes = TOKENIZER_INFIXES
class Italian(Language):
lang = "it"
Defaults = ItalianDefaults
__all__ = ["Italian"]
|
StarcoderdataPython
|
5111111
|
<reponame>hixio-mh/plugin-python
a = 12
b = 14
a, b = b, a
a = b, c = 1, 2
a = b = 1, 2
new_file_name = file_name[:-6] + extension
|
StarcoderdataPython
|
6662209
|
<reponame>christiansteinert/tibetan-dictionary
#!/usr/bin/env python3
import sys, re
def getAlternativeValue(value):
value = re.sub(r"\b1\b","one",value)
value = re.sub(r"\b2\b","two",value)
value = re.sub(r"\b3\b","three",value)
value = re.sub(r"\b4\b","four",value)
value = re.sub(r"\b5\b","five",value)
value = re.sub(r"\b6\b","six",value)
value = re.sub(r"\b7\b","seven",value)
value = re.sub(r"\b8\b","eight",value)
value = re.sub(r"\b9\b","nine",value)
value = re.sub(r"\b10\b","ten",value)
value = re.sub(r"\b11\b","eleven",value)
value = re.sub(r"\b12\b","twelve",value)
value = re.sub(r"\b13\b","thirteen",value)
value = re.sub(r"\b14\b","fourteen",value)
value = re.sub(r"\b15\b","fifteen",value)
value = re.sub(r"\b16\b","sixteen",value)
value = re.sub(r"\b17\b","seventeen",value)
value = re.sub(r"\b18\b","eighteen",value)
value = re.sub(r"\b19\b","nineteen",value)
value = re.sub(r"\b20\b","twenty",value)
value = re.sub(r"\b21\b","twenty one",value)
value = re.sub(r"\b22\b","twenty two",value)
value = re.sub(r"\b23\b","twenty three",value)
value = re.sub(r"\b24\b","twenty four",value)
value = re.sub(r"\b25\b","twenty five",value)
value = re.sub(r"\b26\b","twenty six",value)
value = re.sub(r"\b27\b","twenty seven",value)
value = re.sub(r"\b28\b","twenty eight",value)
value = re.sub(r"\b29\b","twenty nine",value)
value = re.sub(r"\b30\b","thirty",value)
value = re.sub(r"\b31\b","thirty one",value)
value = re.sub(r"\b32\b","thirty two",value)
value = re.sub(r"\b33\b","thirty three",value)
value = re.sub(r"\b34\b","thirty four",value)
value = re.sub(r"\b35\b","thirty five",value)
value = re.sub(r"\b36\b","thirty six",value)
value = re.sub(r"\b37\b","thirty seven",value)
value = re.sub(r"\b38\b","thirty eight",value)
value = re.sub(r"\b39\b","thirty nine",value)
value = re.sub(r"\b40\b","fourty",value)
value = re.sub(r"\b41\b","fourty one",value)
value = re.sub(r"\b42\b","fourty two",value)
value = re.sub(r"\b43\b","fourty three",value)
value = re.sub(r"\b44\b","fourty four",value)
value = re.sub(r"\b45\b","fourty five",value)
value = re.sub(r"\b46\b","fourty six",value)
value = re.sub(r"\b47\b","fourty seven",value)
value = re.sub(r"\b48\b","fourty eight",value)
value = re.sub(r"\b49\b","fourty nine",value)
value = re.sub(r"\b50\b","fifty",value)
value = re.sub(r"\b51\b","fifty one",value)
value = re.sub(r"\b52\b","fifty two",value)
value = re.sub(r"\b53\b","fifty three",value)
value = re.sub(r"\b54\b","fifty four",value)
value = re.sub(r"\b55\b","fifty five",value)
value = re.sub(r"\b56\b","fifty six",value)
value = re.sub(r"\b57\b","fifty seven",value)
value = re.sub(r"\b58\b","fifty eight",value)
value = re.sub(r"\b59\b","fifty nine",value)
value = re.sub(r"\b60\b","sixty",value)
value = re.sub(r"\b61\b","sixty one",value)
value = re.sub(r"\b62\b","sixty two",value)
value = re.sub(r"\b63\b","sixty three",value)
value = re.sub(r"\b64\b","sixty four",value)
value = re.sub(r"\b65\b","sixty five",value)
value = re.sub(r"\b66\b","sixty six",value)
value = re.sub(r"\b67\b","sixty seven",value)
value = re.sub(r"\b68\b","sixty eight",value)
value = re.sub(r"\b69\b","sixty nine",value)
value = re.sub(r"\b70\b","seventy",value)
value = re.sub(r"\b71\b","seventy one",value)
value = re.sub(r"\b72\b","seventy two",value)
value = re.sub(r"\b73\b","seventy three",value)
value = re.sub(r"\b74\b","seventy four",value)
value = re.sub(r"\b75\b","seventy five",value)
value = re.sub(r"\b76\b","seventy six",value)
value = re.sub(r"\b77\b","seventy seven",value)
value = re.sub(r"\b78\b","seventy eight",value)
value = re.sub(r"\b79\b","seventy nine",value)
value = re.sub(r"\b80\b","eighty",value)
value = re.sub(r"\b81\b","eighty one",value)
value = re.sub(r"\b82\b","eighty two",value)
value = re.sub(r"\b83\b","eighty three",value)
value = re.sub(r"\b84\b","eighty four",value)
value = re.sub(r"\b85\b","eighty five",value)
value = re.sub(r"\b86\b","eighty six",value)
value = re.sub(r"\b87\b","eighty seven",value)
value = re.sub(r"\b88\b","eighty eight",value)
value = re.sub(r"\b89\b","eighty nine",value)
value = re.sub(r"\b90\b","ninety",value)
value = re.sub(r"\b91\b","ninety one",value)
value = re.sub(r"\b92\b","ninety two",value)
value = re.sub(r"\b93\b","ninety three",value)
value = re.sub(r"\b94\b","ninety four",value)
value = re.sub(r"\b95\b","ninety five",value)
value = re.sub(r"\b96\b","ninety six",value)
value = re.sub(r"\b97\b","ninety seven",value)
value = re.sub(r"\b98\b","ninety eight",value)
value = re.sub(r"\b99\b","ninety nine",value)
value = re.sub(r"\b100\b","hundred",value)
value = re.sub(r"\b1000\b","thousand",value)
if value.startswith("to "):
value=value[3:]
return value
def cleanupValueMinimal(value):
value = value.rstrip("\r\n")
value = value.rstrip("\r")
value = value.rstrip("\n")
value = value.replace("-"," ")
value = re.sub(r"\s+"," ",value)
value = re.sub(r"^\s+","",value)
value = re.sub(r"\s+$","",value)
value = value.replace("\\n",";")
return value
def cleanupValue(value):
value = cleanupValueMinimal(value)
value = re.sub(r"[:]","",value)
value = re.sub(r"\{.*?\}","",value)
value = re.sub(r"\[.*?\]","",value)
value = re.sub(r"\(.*?\)","",value)
value = re.sub(r"\"","",value)
value = re.sub(r"″","",value)
value = re.sub(r"^\s*as verb\s*:\s*","",value)
value = re.sub(r"^\s*verb\s*:\s*","",value)
value = re.sub(r"^\s*as noun\s*:\s*","",value)
value = re.sub(r"^\s*noun\s*:\s*","",value)
value = re.sub(r"^\s*as adjective\s*:\s*","",value)
value = re.sub(r"^\s*adjective\s*:\s*","",value)
value = re.sub(r"^\s*adj\.\s*:\s*","",value)
value = re.sub(r"\.\.\.","",value)
value = value.replace("?","")
value = value.replace("*","")
value = value.replace("—"," ")
value = value.replace("'","")
value = re.sub(r"^\s+","",value)
value = re.sub(r"\s+$","",value)
value = value.replace("ä","a")
value = value.replace("ö","o")
value = value.replace("ü","u")
value = value.replace("Ä","A")
value = value.replace("Ö","O")
value = value.replace("Ü","U")
value = value.replace("ā","A")
value = value.replace("Ā","A")
value = value.replace("ī","I")
value = value.replace("Ī","I")
value = value.replace("ū","U")
value = value.replace("Ū","U")
value = value.replace("ē","e")
value = value.replace("ō","o")
value = value.replace("ṛ","R")
value = value.replace("ṛ","R")
value = value.replace("ṝ","RR")
value = value.replace("Ṛ","R")
value = value.replace("Ṝ","RR")
value = value.replace("ḷ","lR")
value = value.replace("ḹ","lRR")
value = value.replace("Ḷ","lR")
value = value.replace("Ḹ","lRR")
value = value.replace("ṃ","M")
value = value.replace("ḥ","H")
value = value.replace("ṅ","N")
value = value.replace("Ṅ","N")
value = value.replace("ṇ","N")
value = value.replace("Ṇ","N")
value = value.replace("ñ","N")
value = value.replace("Ñ","N")
value = value.replace("ṭ","T")
value = value.replace("Ṭ","T")
value = value.replace("ḍ","D")
value = value.replace("Ḍ","D")
value = value.replace("ś","S")
value = value.replace("Ś","S")
value = value.replace("ṣ","S")
value = value.replace("Ṣ","S")
value = value.replace("","") # delete zero-width non-joiner
value = re.sub(r"\s+"," ",value)
return value
def main():
with open(sys.argv[1], 'r') as inp:
print(sys.argv[1])
with open(sys.argv[2], 'w') as out:
for line in inp:
if ( not line.startswith("#") ) and ( "|" in line ):
tibetanOriginal, englishOriginal = line.split("|");
tibetan = cleanupValueMinimal(tibetanOriginal)
englishTerms = cleanupValue(englishOriginal)
englishTermList = englishTerms.split(";")
for english in englishTermList:
english = cleanupValue(english)
if english != "" and tibetan != "" and ( "/" not in english ) and ("." not in english) and ("," not in english) and (")" not in english) and ("(" not in english) and ("=" not in english):
#print(r"{0}|{{{1}}}: {2}".format(english,tibetan,englishOriginal))
out.write(r"{0}|{{{1}}}: {2}".format(english,tibetan,englishOriginal))
english2 = getAlternativeValue(english)
if english != english2:
out.write(r"{0}|{{{1}}}: {2}".format(english2,tibetan,englishOriginal))
main()
|
StarcoderdataPython
|
1828924
|
#!/usr/bin/env python
'''Base class for layout tests.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import unittest
import sys
from pyglet.gl import *
from layout import *
from pyglet.window import *
from pyglet.window.event import *
class LayoutTestBase(unittest.TestCase):
# Supply either XHTML or HTML.
xhtml = None
html = None
def on_expose(self):
glClearColor(1, 1, 1, 1)
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
self.layout.draw()
self.window.flip()
def test_main(self):
width, height = 800, 600
self.window = w = Window(width, height, visible=False)
w.push_handlers(self)
self.layout = Layout()
w.push_handlers(self.layout)
if self.xhtml:
self.layout.set_xhtml(self.xhtml)
else:
self.layout.set_html(self.html)
w.set_visible()
while not w.has_exit:
w.dispatch_events()
self.on_expose()
w.close()
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
9652295
|
# -*- coding: utf-8 -*-
from nose.tools import eq_
from anytree import Node
from anytree.util import commonancestors
from anytree.util import leftsibling
from anytree.util import rightsibling
def test_commonancestors():
"""commonancestors."""
udo = Node("Udo")
marc = Node("Marc", parent=udo)
lian = Node("Lian", parent=marc)
dan = Node("Dan", parent=udo)
jet = Node("Jet", parent=dan)
joe = Node("Joe", parent=dan)
eq_(commonancestors(jet, joe), (udo, dan))
eq_(commonancestors(jet, marc), (udo,))
eq_(commonancestors(jet), (udo, dan))
eq_(commonancestors(), ())
eq_(commonancestors(jet, lian), (udo, ))
def test_leftsibling():
"""leftsibling."""
dan = Node("Dan")
jet = Node("Jet", parent=dan)
jan = Node("Jan", parent=dan)
joe = Node("Joe", parent=dan)
eq_(leftsibling(dan), None)
eq_(leftsibling(jet), None)
eq_(leftsibling(jan), jet)
eq_(leftsibling(joe), jan)
def test_rightsibling():
"""rightsibling."""
dan = Node("Dan")
jet = Node("Jet", parent=dan)
jan = Node("Jan", parent=dan)
joe = Node("Joe", parent=dan)
eq_(rightsibling(dan), None)
eq_(rightsibling(jet), jan)
eq_(rightsibling(jan), joe)
eq_(rightsibling(joe), None)
|
StarcoderdataPython
|
3458784
|
from typing import Generator
from fastapi import Depends, HTTPException, Security, status
from fastapi.security import OAuth2PasswordBearer
from fastapi.security.api_key import APIKeyCookie, APIKeyHeader, APIKeyQuery
from jose import JWTError, jwt
from sqlalchemy.orm import Session
from starlette.status import HTTP_403_FORBIDDEN
from app.core.config import settings
from app.crud import user as crud
from app.db.session import SessionLocal
from app.schemas import TokenData
# DB
def get_db() -> Generator:
try:
db = SessionLocal()
yield db
finally:
db.close()
# API key
api_key_query = APIKeyQuery(name=settings.API_KEY_NAME, auto_error=False)
api_key_header = APIKeyHeader(name=settings.API_KEY_NAME, auto_error=False)
api_key_cookie = APIKeyCookie(name=settings.API_KEY_NAME, auto_error=False)
async def get_api_key(
api_key_query: str = Security(api_key_query),
api_key_header: str = Security(api_key_header),
api_key_cookie: str = Security(api_key_cookie),
):
if api_key_query == settings.API_KEY:
return api_key_query
elif api_key_header == settings.API_KEY:
return api_key_header
elif api_key_cookie == settings.API_KEY:
return api_key_cookie
else:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Could not validate credentials"
)
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/v1/token")
oauth2_scheme_no_error = OAuth2PasswordBearer(
tokenUrl="/api/v1/token", auto_error=False
)
async def get_current_user(
db: Session = Depends(get_db), token: str = Depends(oauth2_scheme)
):
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(
token, settings.JWT_SECRET_KEY, algorithms=[settings.JWT_ALGORITHM]
)
username: str = payload.get("sub")
if username is None:
raise credentials_exception
token_data = TokenData(username=username)
except JWTError:
raise credentials_exception
user = crud.get_user(db, username=token_data.username)
if user is None:
raise credentials_exception
return user
async def oauth2_password_bearer_or_api_key(
db: Session = Depends(get_db),
token: str = Depends(oauth2_scheme_no_error),
api_key_query: str = Security(api_key_query),
api_key_header: str = Security(api_key_header),
api_key_cookie: str = Security(api_key_cookie),
):
if token is not None:
return await get_current_user(db, token)
else:
return await get_api_key(api_key_query, api_key_header, api_key_cookie)
|
StarcoderdataPython
|
12802869
|
"""
The main entry point for the web server
"""
from aiohttp import web
import time
import base64
from cryptography import fernet
from aiohttp_session import setup, get_session
from aiohttp_session.cookie_storage import EncryptedCookieStorage
import quakes
import db
import auth
import game
import configparser
routes = web.RouteTableDef()
config = configparser.ConfigParser()
@routes.get("/")
async def index(request):
"""
Provide the index.html at the root
"""
return web.FileResponse("static/index.html")
@routes.post("/user")
async def user_post(request):
"""
Handle login, logout, and account creation
"""
session = await get_session(request)
data = await request.post()
action = data.get("action")
if action == "create" or action == "login":
username = data.get("username")
password = data.get("password")
if (username is None or password is None):
raise web.HTTPBadRequest
if action == "create":
auth.create_account(session, username, password)
elif action == "login":
auth.login(session, username, password)
elif action == "logout":
auth.logout(session)
raise web.HTTPOk
@routes.get("/user")
async def user_get(request):
"""
Allow users to query the current login status
"""
session = await get_session(request)
user = auth.get_user(session, strip_id=True)
return web.json_response(user)
@routes.post("/game")
async def game_post(request):
"""
Handle game guesses
"""
session = await get_session(request)
data = await request.post()
guess = data.get("guess")
game.upsert_game(session, guess)
@routes.get("/game")
async def game_get(request):
"""
Allows users to grab stats on the game
"""
session = await get_session(request)
leaderboard = await game.get_leaderboard()
history = await game.get_history(session)
payload = {"leaderboard": leaderboard, "history": history}
return web.json_response(payload)
@routes.get("/quakes")
async def quakes_api(request):
"""Returns the data from the GeoJSON quakes API"""
# prevent too many refreshes too quickly
session = await get_session(request)
current_ms = time.time()
last_refresh = session["last_refresh"] if "last_refresh" in session else 0
ratelimited = last_refresh > (current_ms - 10) # 10 second timeout
if not ratelimited:
session["last_refresh"] = current_ms
data = await quakes.get_quakes()
return web.json_response(data)
else:
raise web.HTTPTooManyRequests
@routes.get("/mapbox")
async def mapbox_token(request):
"""Returns the mapbox token from the config"""
mapbox = config["mapbox"]
return web.json_response({
"token": mapbox["token"]
})
def make_app():
"""Neatly creates the webapp"""
app = web.Application()
config.read("config.cfg")
fernet_key = fernet.Fernet.generate_key()
secret_key = base64.urlsafe_b64decode(fernet_key)
setup(app, EncryptedCookieStorage(secret_key, cookie_name="session"))
routes.static("/", "static")
app.add_routes(routes)
db.initialize_db()
return app
web.run_app(make_app())
|
StarcoderdataPython
|
3307461
|
<reponame>johnstcn/whatsnew
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth import authenticate, login
from django.db.models.aggregates import Max
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from django.views import generic
from django.views.decorators.cache import never_cache
from django.utils.decorators import method_decorator
from .models import *
from .forms import *
@login_required
def watch(request, site_id):
return_url = request.META.get('HTTP_REFERER', '/')
if not request.user.is_anonymous():
site = Site.objects.get(pk=site_id)
us = UserSeen.objects.get(user=request.user)
us.seen[site_id] = site.latest_update.id
us.save()
messages.info(request, 'You are now watching %s' % site.name)
else:
messages.warning(request, 'You need to sign in to do that!')
return redirect(return_url)
@login_required
def unwatch(request, site_id):
return_url = request.META.get('HTTP_REFERER', '/')
if not request.user.is_anonymous():
site = Site.objects.get(pk=site_id)
us = UserSeen.objects.get(user=request.user)
del(us.seen[site_id])
us.save()
messages.info(request, 'You are no longer watching %s' % site.name)
else:
messages.warning(request, 'You need to sign in to do that!')
return redirect(return_url)
@never_cache
def redirect_to_update(request, site_update_id):
return_url = request.META.get('HTTP_REFERER', '/')
update = SiteUpdate.objects.filter(pk=site_update_id).get()
if not request.user.is_anonymous():
try:
us = UserSeen.objects.get(user=request.user)
if str(update.site_id) in us.seen:
us.seen[update.site_id] = site_update_id
us.save()
except ObjectDoesNotExist:
messages.error(request, 'Sorry, something went wrong!')
return redirect(return_url)
else:
messages.info(request, 'Sign up to keep track of the comics you like!')
return redirect(update.url)
@method_decorator(never_cache, name='dispatch')
class SitesView(generic.ListView):
template_name = 'all_sites.html'
context_object_name = 'sites'
model = Site
paginate_by = 10
def get_queryset(self):
sites = Site.objects.annotate(Max('siteupdate__date')).order_by('-siteupdate__date__max')
tag = self.kwargs.get("tag", "all")
nsfw = self.kwargs.get("nsfw", "0")
watched = self.kwargs.get("watched", "0")
if tag != "all":
sites = sites.filter(site_tags__name=tag)
if nsfw == "0":
sites = sites.exclude(site_tags__name="nsfw")
if watched == "1":
if self.request.user.is_anonymous():
messages.info(self.request, "You need to sign in to do that!")
else:
seen = self.request.user.userseen.seen
sites = sites.filter(pk__in=seen)
return sites
def get_context_data(self, **kwargs):
context = super(SitesView, self).get_context_data(**kwargs)
tags = [t.name for t in Tag.objects.all() if t.name != 'nsfw']
context["tags"] = tags
context["selected_tag"] = self.kwargs.get("tag", "all")
context["show_nsfw"] = self.kwargs.get("nsfw", "0")
context["only_watched"] = self.kwargs.get("watched", "0")
next_updates = {}
if not self.request.user.is_anonymous():
seen = UserSeen.objects.get(user=self.request.user).seen
for site_id, update_id in seen.iteritems():
try:
update = SiteUpdate.objects.get(pk=update_id)
next_update = SiteUpdate.objects.filter(site_id=site_id, date__gt=update.date).order_by('date').first()
except ObjectDoesNotExist:
next_update = None
if next_update is not None:
next_updates[site_id] = next_update.id
else:
next_updates[site_id] = None
else:
seen = {}
context['next_updates'] = next_updates
context['seen'] = seen
return context
@method_decorator(never_cache, name='dispatch')
class SignUpView(generic.FormView):
template_name = 'sign_in.html'
form_class = SignUpForm
success_url = '/'
def dispatch(self, request):
if self.request.user.is_anonymous():
return super(SignUpView, self).dispatch(request)
else:
messages.warning(self.request, 'You are already signed in!')
return HttpResponseRedirect('/')
def form_valid(self, form):
form.send_signin_email()
messages.info(self.request, 'Check your email for a link to sign in!')
return super(SignUpView, self).form_valid(form)
@method_decorator(never_cache, name='dispatch')
class AuthenticateView(generic.RedirectView):
permanent = False
query_string = False
def get_redirect_url(self, *args, **kwargs):
auth_code = self.kwargs.get('auth_code', '')
try:
user = authenticate(code=auth_code)
login(self.request, user)
messages.success(self.request, 'Welcome %s!' %(user.username))
except ObjectDoesNotExist:
messages.error(self.request, "Sorry, we couldn't figure out who you are.")
finally:
return '/'
|
StarcoderdataPython
|
11274216
|
# Copyright (c) 2021 Qianyun, Inc. All rights reserved.
from cloudify import ctx
from . import constants
from abstract_plugin.platforms.common.base import CommonResource
class CommonEip(CommonResource):
@staticmethod
def get_related_vm():
relationships = ctx.instance.relationships
for relationship in relationships:
if relationship.target.node.type in [constants.COMPUTE_NODE_TYPE, constants.WINDOWS_COMPUTE_NODE_TYPE]:
return relationship.target.instance
return None
|
StarcoderdataPython
|
1685245
|
"""
Matplotlib draw methods for TGraph
The plot and errorbar methods are available.
When used through the decorators with mplbplot.plot (recommended),
the methods will be called rplot and rerrorbar,
and the full documentation is available through
TGraph.__plot__ and TGraph.__errorbar__.
"""
__all__ = ("plot", "errorbar")
from .decorators import points
def plot( graph, fmt=None, axes=None, **kwargs ):
"""
Wrapper around axes.plot for TGraph, replacement for ROOT's P and L options
Point coordinates are taken from the graph.
"""
x, y = zip(*[ (p.x, p.y) for p in points(graph) ])
return axes.plot( x, y, fmt, **kwargs )
def errorbar( graph, axes=None, xErrors=True, kind="bar", removeZero=False, **kwargs ):
"""
Wrapper around axes.errorbar for TGraph, replacement for ROOT's E option (with P and/or L at a time, in case kind is bar)
Point coordinates and errors are taken from the graph.
The type of error visualisation can be set by setting kind="bar", "box" or "band".
x errors can be turned off by setting xErrors to False (ignored in case kind is box; meaningless in case kind is band).
Points with y=0 can be removed by passing the option removeZero=True
"""
if kind == "bar":
x,xle,xue,y,yle,yue = zip(*[ (p.x, p.xLowError, p.xHighError, p.y, p.yLowError, p.yHighError) for p in points(graph) if not removeZero or p.y != 0. ])
return axes.errorbar(x, y, yerr=(yle, yue), xerr=( (xle,xue) if xErrors else None ), **kwargs)
elif kind == "box":
import matplotlib.patches
return [ axes.add_patch( matplotlib.patches.Rectangle(
(p.x-p.xLowError, p.y-p.yLowError), ## left bottom
width=(p.xLowError+p.xHighError), height=(p.yLowError+p.yHighError),
**kwargs ) )
for p in points(graph) if not removeZero or p.y != 0. ]
elif kind == "band":
x,yLow,yHigh = zip(*[ (p.x, p.y-p.yLowError, p.y+p.yHighError) for p in points(graph) if not removeZero or p.y != 0. ])
return axes.fill_between( x, yLow, y2=yHigh, **kwargs )
def _addDecorations():
""" load decorators for draw methods that need dispatch """
from cppyy import gbl
gbl.TGraph.__plot__ = plot
gbl.TGraph.__errorbar__ = errorbar
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.