content
stringlengths 0
1.55M
|
---|
'''
VGDL example: Missile Command.
@author: <NAME> and <NAME>
'''<line_sep>missilecommand_level="""
w m m m m w
w w
w w
w w
w w
w w
w A w
w w
w w
w w
w c c c w
wwwwwwwwwwwwwwwwwwwwwwww
"""<line_sep>missilecommand_game="""
BasicGame
SpriteSet
city > Immovable color=GREEN
incoming > Chaser stype=city color=ORANGE speed=0.1
explosion > Flicker limit=5
avatar > ShootAvatar stype=explosion
LevelMapping
c > city
m > incoming
InteractionSet
movable wall > stepBack
incoming city > killSprite
city incoming > killSprite
incoming explosion > killSprite
TerminationSet
SpriteCounter stype=city win=False
SpriteCounter stype=incoming win=True
"""<if_stmt>__name__<eq>"__main__"<block_start><import_from_stmt>vgdl.core VGDLParser<line_sep>VGDLParser.playGame(missilecommand_game missilecommand_level)<block_end>
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<def_stmt>multiscale_def image_shape num_scale use_flip=<true><block_start>base_name_list=['image']<line_sep>multiscale_def={}<line_sep>ms_def_names=[]<if_stmt>use_flip<block_start>num_scale<augfloordiv>2<line_sep>base_name_list.append('image_flip')<line_sep>multiscale_def['image_flip']={'shape':[<none>]+image_shape 'dtype':'float32' 'lod_level':0}<line_sep>multiscale_def['im_info_image_flip']={'shape':[<none> 3] 'dtype':'float32' 'lod_level':0}<line_sep>ms_def_names.append('image_flip')<line_sep>ms_def_names.append('im_info_image_flip')<block_end><for_stmt>base_name base_name_list<block_start><for_stmt>i range(0 num_scale-1)<block_start>name=base_name+'_scale_'+str(i)<line_sep>multiscale_def[name]={'shape':[<none>]+image_shape 'dtype':'float32' 'lod_level':0}<line_sep>im_info_name='im_info_'+name<line_sep>multiscale_def[im_info_name]={'shape':[<none> 3] 'dtype':'float32' 'lod_level':0}<line_sep>ms_def_names.append(name)<line_sep>ms_def_names.append(im_info_name)<block_end><block_end><return>multiscale_def ms_def_names<block_end>
|
<import_from_stmt>recon.core.module BaseModule<import_stmt>csv<import_stmt>os<class_stmt>Module(BaseModule)<block_start>meta={'name':'CSV File Creator' 'author':'<NAME> (@LaNMaSteR53)' 'description':'Creates a CSV file containing the specified harvested data.' 'options':(('table' 'hosts' <true> 'source table of data to export') ('filename' os.path.join(BaseModule.workspace 'results.csv') <true> 'path and filename for output') ) }<def_stmt>module_run self<block_start>filename=self.options['filename']<line_sep># codecs module not used because the csv module converts to ascii
<with_stmt>open(filename 'w')<as>outfile# build a list of table names
<block_start>table=self.options['table']<line_sep>rows=self.query('SELECT * FROM "%s" ORDER BY 1'%(table))<line_sep>cnt=0<for_stmt>row rows<block_start>row=[x<if>x<else>''<for>x row]<if_stmt>any(row)<block_start>cnt<augadd>1<line_sep>csvwriter=csv.writer(outfile quoting=csv.QUOTE_ALL)<line_sep>csvwriter.writerow([s.encode("utf-8")<for>s row])<block_end><block_end><block_end>self.output('%d records added to \'%s\'.'%(cnt filename))<block_end><block_end>
|
# -*- coding: utf-8 -*-
"""
test_events.py
~~~~~~~~~~~~~~
Specific tests for any function that is logically self-contained as part of
events.py.
"""<import_stmt>inspect<import_stmt>sys<import_from_stmt>hypothesis given<import_from_stmt>hypothesis.strategies integers lists tuples <import_stmt>pytest<import_stmt>h2.errors<import_stmt>h2.events<import_stmt>h2.settings<line_sep># We define a fairly complex Hypothesis strategy here. We want to build a list
# of two tuples of (Setting, value). For Setting we want to make sure we can
# handle settings that the rest of hyper knows nothing about, so we want to
# use integers from 0 to (2**16-1). For values, they're from 0 to (2**32-1).
# Define that strategy here for clarity.
SETTINGS_STRATEGY=lists(tuples(integers(min_value=0 max_value=2<power>16-1) integers(min_value=0 max_value=2<power>32-1) ))<class_stmt>TestRemoteSettingsChanged(object)<block_start>"""
Validate the function of the RemoteSettingsChanged event.
"""<line_sep>@given(SETTINGS_STRATEGY)<def_stmt>test_building_settings_from_scratch self settings_list<block_start>"""
Missing old settings are defaulted to None.
"""<line_sep>settings_dict=dict(settings_list)<line_sep>e=h2.events.RemoteSettingsChanged.from_settings(old_settings={} new_settings=settings_dict )<for_stmt>setting,new_value settings_dict.items()<block_start><assert_stmt>e.changed_settings[setting].setting<eq>setting<assert_stmt>e.changed_settings[setting].original_value<is><none><assert_stmt>e.changed_settings[setting].new_value<eq>new_value<block_end><block_end>@given(SETTINGS_STRATEGY SETTINGS_STRATEGY)<def_stmt>test_only_reports_changed_settings self old_settings_list new_settings_list<block_start>"""
Settings that were not changed are not reported.
"""<line_sep>old_settings_dict=dict(old_settings_list)<line_sep>new_settings_dict=dict(new_settings_list)<line_sep>e=h2.events.RemoteSettingsChanged.from_settings(old_settings=old_settings_dict new_settings=new_settings_dict )<assert_stmt>len(e.changed_settings)<eq>len(new_settings_dict)<assert_stmt>(sorted(list(e.changed_settings.keys()))<eq>sorted(list(new_settings_dict.keys())))<block_end>@given(SETTINGS_STRATEGY SETTINGS_STRATEGY)<def_stmt>test_correctly_reports_changed_settings self old_settings_list new_settings_list<block_start>"""
Settings that are changed are correctly reported.
"""<line_sep>old_settings_dict=dict(old_settings_list)<line_sep>new_settings_dict=dict(new_settings_list)<line_sep>e=h2.events.RemoteSettingsChanged.from_settings(old_settings=old_settings_dict new_settings=new_settings_dict )<for_stmt>setting,new_value new_settings_dict.items()<block_start>original_value=old_settings_dict.get(setting)<assert_stmt>e.changed_settings[setting].setting<eq>setting<assert_stmt>e.changed_settings[setting].original_value<eq>original_value<assert_stmt>e.changed_settings[setting].new_value<eq>new_value<block_end><block_end><block_end><class_stmt>TestEventReprs(object)<block_start>"""
Events have useful representations.
"""<line_sep>example_request_headers=[(':authority' 'example.com') (':path' '/') (':scheme' 'https') (':method' 'GET') ]<line_sep>example_informational_headers=[(':status' '100') ('server' 'fake-serv/0.1.0')]<line_sep>example_response_headers=[(':status' '200') ('server' 'fake-serv/0.1.0')]<def_stmt>test_requestreceived_repr self<block_start>"""
RequestReceived has a useful debug representation.
"""<line_sep>e=h2.events.RequestReceived()<line_sep>e.stream_id=5<line_sep>e.headers=self.example_request_headers<assert_stmt>repr(e)<eq>("<RequestReceived stream_id:5, headers:["<concat>"(':authority', 'example.<EMAIL>'), "<concat>"(':path', '/'), "<concat>"(':scheme', 'https'), "<concat>"(':method', 'GET')]>")<block_end><def_stmt>test_responsereceived_repr self<block_start>"""
ResponseReceived has a useful debug representation.
"""<line_sep>e=h2.events.ResponseReceived()<line_sep>e.stream_id=500<line_sep>e.headers=self.example_response_headers<assert_stmt>repr(e)<eq>("<ResponseReceived stream_id:500, headers:["<concat>"(':status', '200'), "<concat>"('server', 'fake-serv/0.1.0')]>")<block_end><def_stmt>test_trailersreceived_repr self<block_start>"""
TrailersReceived has a useful debug representation.
"""<line_sep>e=h2.events.TrailersReceived()<line_sep>e.stream_id=62<line_sep>e.headers=self.example_response_headers<assert_stmt>repr(e)<eq>("<TrailersReceived stream_id:62, headers:["<concat>"(':status', '200'), "<concat>"('server', 'fake-serv/0.1.0')]>")<block_end><def_stmt>test_informationalresponsereceived_repr self<block_start>"""
InformationalResponseReceived has a useful debug representation.
"""<line_sep>e=h2.events.InformationalResponseReceived()<line_sep>e.stream_id=62<line_sep>e.headers=self.example_informational_headers<assert_stmt>repr(e)<eq>("<InformationalResponseReceived stream_id:62, headers:["<concat>"(':status', '100'), "<concat>"('server', 'fake-serv/0.1.0')]>")<block_end><def_stmt>test_datareceived_repr self<block_start>"""
DataReceived has a useful debug representation.
"""<line_sep>e=h2.events.DataReceived()<line_sep>e.stream_id=888<line_sep>e.data=b"abcdefghijklmnopqrstuvwxyz"<line_sep>e.flow_controlled_length=88<assert_stmt>repr(e)<eq>("<DataReceived stream_id:888, flow_controlled_length:88, "<concat>"data:6162636465666768696a6b6c6d6e6f7071727374>")<block_end><def_stmt>test_windowupdated_repr self<block_start>"""
WindowUpdated has a useful debug representation.
"""<line_sep>e=h2.events.WindowUpdated()<line_sep>e.stream_id=0<line_sep>e.delta=2<power>16<assert_stmt>repr(e)<eq>"<WindowUpdated stream_id:0, delta:65536>"<block_end><def_stmt>test_remotesettingschanged_repr self<block_start>"""
RemoteSettingsChanged has a useful debug representation.
"""<line_sep>e=h2.events.RemoteSettingsChanged()<line_sep>e.changed_settings={h2.settings.SettingCodes.INITIAL_WINDOW_SIZE:h2.settings.ChangedSetting(h2.settings.SettingCodes.INITIAL_WINDOW_SIZE 2<power>16 2<power>15) }<assert_stmt>repr(e)<eq>("<RemoteSettingsChanged changed_settings:{ChangedSetting("<concat>"setting=SettingCodes.INITIAL_WINDOW_SIZE, original_value=65536, "<concat>"new_value=32768)}>")<block_end><def_stmt>test_pingreceived_repr self<block_start>"""
PingReceived has a useful debug representation.
"""<line_sep>e=h2.events.PingReceived()<line_sep>e.ping_data=b'abcdefgh'<assert_stmt>repr(e)<eq>"<PingReceived ping_data:6162636465666768>"<block_end><def_stmt>test_pingackreceived_repr self<block_start>"""
PingAckReceived has a useful debug representation.
"""<line_sep>e=h2.events.PingAckReceived()<line_sep>e.ping_data=b'abcdefgh'<assert_stmt>repr(e)<eq>"<PingAckReceived ping_data:6162636465666768>"<block_end><def_stmt>test_streamended_repr self<block_start>"""
StreamEnded has a useful debug representation.
"""<line_sep>e=h2.events.StreamEnded()<line_sep>e.stream_id=99<assert_stmt>repr(e)<eq>"<StreamEnded stream_id:99>"<block_end><def_stmt>test_streamreset_repr self<block_start>"""
StreamEnded has a useful debug representation.
"""<line_sep>e=h2.events.StreamReset()<line_sep>e.stream_id=919<line_sep>e.error_code=h2.errors.ErrorCodes.ENHANCE_YOUR_CALM<line_sep>e.remote_reset=<false><assert_stmt>repr(e)<eq>("<StreamReset stream_id:919, "<concat>"error_code:ErrorCodes.ENHANCE_YOUR_CALM, remote_reset:False>")<block_end><def_stmt>test_pushedstreamreceived_repr self<block_start>"""
PushedStreamReceived has a useful debug representation.
"""<line_sep>e=h2.events.PushedStreamReceived()<line_sep>e.pushed_stream_id=50<line_sep>e.parent_stream_id=11<line_sep>e.headers=self.example_request_headers<assert_stmt>repr(e)<eq>("<PushedStreamReceived pushed_stream_id:50, parent_stream_id:11, "<concat>"headers:["<concat>"(':authority', 'example.com'), "<concat>"(':path', '/'), "<concat>"(':scheme', 'https'), "<concat>"(':method', 'GET')]>")<block_end><def_stmt>test_settingsacknowledged_repr self<block_start>"""
SettingsAcknowledged has a useful debug representation.
"""<line_sep>e=h2.events.SettingsAcknowledged()<line_sep>e.changed_settings={h2.settings.SettingCodes.INITIAL_WINDOW_SIZE:h2.settings.ChangedSetting(h2.settings.SettingCodes.INITIAL_WINDOW_SIZE 2<power>16 2<power>15) }<assert_stmt>repr(e)<eq>("<SettingsAcknowledged changed_settings:{ChangedSetting("<concat>"setting=SettingCodes.INITIAL_WINDOW_SIZE, original_value=65536, "<concat>"new_value=32768)}>")<block_end><def_stmt>test_priorityupdated_repr self<block_start>"""
PriorityUpdated has a useful debug representation.
"""<line_sep>e=h2.events.PriorityUpdated()<line_sep>e.stream_id=87<line_sep>e.weight=32<line_sep>e.depends_on=8<line_sep>e.exclusive=<true><assert_stmt>repr(e)<eq>("<PriorityUpdated stream_id:87, weight:32, depends_on:8, "<concat>"exclusive:True>")<block_end>@pytest.mark.parametrize("additional_data,data_repr" [(<none> "None") (b'some data' "736f6d652064617461")])<def_stmt>test_connectionterminated_repr self additional_data data_repr<block_start>"""
ConnectionTerminated has a useful debug representation.
"""<line_sep>e=h2.events.ConnectionTerminated()<line_sep>e.error_code=h2.errors.ErrorCodes.INADEQUATE_SECURITY<line_sep>e.last_stream_id=33<line_sep>e.additional_data=additional_data<assert_stmt>repr(e)<eq>("<ConnectionTerminated error_code:ErrorCodes.INADEQUATE_SECURITY, "<concat>"last_stream_id:33, additional_data:%s>"%data_repr)<block_end><def_stmt>test_alternativeserviceavailable_repr self<block_start>"""
AlternativeServiceAvailable has a useful debug representation.
"""<line_sep>e=h2.events.AlternativeServiceAvailable()<line_sep>e.origin=b"example.com"<line_sep>e.field_value=b'h2=":8000"; ma=60'<assert_stmt>repr(e)<eq>('<AlternativeServiceAvailable origin:example.com, '<concat>'field_value:h2=":8000"; ma=60>')<block_end><def_stmt>test_unknownframereceived_repr self<block_start>"""
UnknownFrameReceived has a useful debug representation.
"""<line_sep>e=h2.events.UnknownFrameReceived()<assert_stmt>repr(e)<eq>'<UnknownFrameReceived>'<block_end><block_end><def_stmt>all_events <block_start>"""
Generates all the classes (i.e., events) defined in h2.events.
"""<for_stmt>_,obj inspect.getmembers(sys.modules['h2.events'])# We are only interested in objects that are defined in h2.events;
# objects that are imported from other modules are not of interest.
<block_start><if_stmt>hasattr(obj '__module__')<and>(obj.__module__<ne>'h2.events')<block_start><continue><block_end><if_stmt>inspect.isclass(obj)<block_start><yield>obj<block_end><block_end><block_end>@pytest.mark.parametrize('event' all_events())<def_stmt>test_all_events_subclass_from_event event<block_start>"""
Every event defined in h2.events subclasses from h2.events.Event.
"""<assert_stmt>(event<is>h2.events.Event)<or>issubclass(event h2.events.Event)<block_end>
|
<import_from_stmt>collections deque<import_stmt>random<class_stmt>ReplayBuffer(object)<block_start><def_stmt>__init__ self buffer_size<block_start>self.buffer_size=buffer_size<line_sep>self.num_experiences=0<line_sep>self.buffer=deque()<block_end><def_stmt>getBatch self batch_size# random draw N
<block_start><return>random.sample(self.buffer batch_size)<block_end><def_stmt>size self<block_start><return>self.buffer_size<block_end><def_stmt>add self state action reward next_action done<block_start>new_experience=(state action reward next_action done)<if_stmt>self.num_experiences<l>self.buffer_size<block_start>self.buffer.append(new_experience)<line_sep>self.num_experiences<augadd>1<block_end><else_stmt><block_start>self.buffer.popleft()<line_sep>self.buffer.append(new_experience)<block_end><block_end><def_stmt>count self# if buffer is full, return buffer size
# otherwise, return experience counter
<block_start><return>self.num_experiences<block_end><def_stmt>erase self<block_start>self.buffer=deque()<line_sep>self.num_experiences=0<block_end><block_end>
|
<import_from_stmt>typing Dict List Optional<import_from_stmt>..factories.stack_frame_factory create_stack<import_from_stmt>..models.heap_object HeapObject<import_from_stmt>..models.stack StackFrame<import_from_stmt>..models.trace_step TraceStep<def_stmt>create_trace_step stack_frames:List[StackFrame] heap:Dict[str HeapObject] line_numbers:List[int] stdout:Optional[str]=<none> stderr:Optional[str]=<none><arrow>TraceStep<block_start>stack=create_stack(stack_frames)<line_sep>trace_step=TraceStep(stack heap)<line_sep>trace_step.line_numbers=line_numbers<line_sep>trace_step.stdout=stdout<line_sep>trace_step.stderr=stderr<line_sep><return>trace_step<block_end>
|
<import_stmt>numpy<import_stmt>cupy<import_stmt>cupyx.scipy.sparse<def_stmt>_sparse_frobenius_norm x<block_start><if_stmt>cupy.issubdtype(x.dtype cupy.complexfloating)<block_start>sqnorm=abs(x).power(2).sum()<block_end><else_stmt><block_start>sqnorm=x.power(2).sum()<block_end><return>cupy.sqrt(sqnorm)<block_end><def_stmt>norm x ord=<none> axis=<none><block_start>"""Norm of a cupy.scipy.spmatrix
This function is able to return one of seven different sparse matrix norms,
depending on the value of the ``ord`` parameter.
Args:
x (sparse matrix) : Input sparse matrix.
ord (non-zero int, inf, -inf, 'fro', optional) : Order of the norm (see
table under ``Notes``). inf means numpy's `inf` object.
axis : (int, 2-tuple of ints, None, optional): If `axis` is an
integer, it specifies the axis of `x` along which to
compute the vector norms. If `axis` is a 2-tuple, it specifies the
axes that hold 2-D matrices, and the matrix norms of these matrices
are computed. If `axis` is None then either a vector norm
(when `x` is 1-D) or a matrix norm (when `x` is 2-D) is returned.
Returns:
ndarray : 0-D or 1-D array or norm(s).
.. seealso:: :func:`scipy.sparse.linalg.norm`
"""<if_stmt><not>cupyx.scipy.sparse.issparse(x)<block_start><raise>TypeError(("input is not sparse. use cupy.linalg.norm"))<block_end># Check the default case first and handle it immediately.
<if_stmt>axis<is><none><and>ord<in>(<none> 'fro' 'f')<block_start><return>_sparse_frobenius_norm(x)<block_end># Some norms require functions that are not implemented for all types.
x=x.tocsr()<if_stmt>axis<is><none><block_start>axis=(0 1)<block_end><elif_stmt><not>isinstance(axis tuple)<block_start>msg="'axis' must be None, an integer or a tuple of integers"<try_stmt><block_start>int_axis=int(axis)<block_end><except_stmt>TypeError<block_start><raise>TypeError(msg)<block_end><if_stmt>axis<ne>int_axis<block_start><raise>TypeError(msg)<block_end>axis=(int_axis )<block_end>nd=2<if_stmt>len(axis)<eq>2<block_start>row_axis,col_axis=axis<if_stmt><not>(-nd<le>row_axis<l>nd<and>-nd<le>col_axis<l>nd)<block_start><raise>ValueError('Invalid axis %r for an array with shape %r'%(axis x.shape))<block_end><if_stmt>row_axis%nd<eq>col_axis%nd<block_start><raise>ValueError('Duplicate axes given.')<block_end><if_stmt>ord<eq>2<block_start><raise>NotImplementedError<line_sep># return _multi_svd_norm(x, row_axis, col_axis, amax)
<block_end><elif_stmt>ord<eq>-2<block_start><raise>NotImplementedError<line_sep># return _multi_svd_norm(x, row_axis, col_axis, amin)
<block_end><elif_stmt>ord<eq>1<block_start><return>abs(x).sum(axis=row_axis).max()<block_end><elif_stmt>ord<eq>numpy.Inf<block_start><return>abs(x).sum(axis=col_axis).max()<block_end><elif_stmt>ord<eq>-1<block_start><return>abs(x).sum(axis=row_axis).min()<block_end><elif_stmt>ord<eq>-numpy.Inf<block_start><return>abs(x).sum(axis=col_axis).min()<block_end><elif_stmt>ord<in>(<none> 'f' 'fro')# The axis order does not matter for this norm.
<block_start><return>_sparse_frobenius_norm(x)<block_end><else_stmt><block_start><raise>ValueError("Invalid norm order for matrices.")<block_end><block_end><elif_stmt>len(axis)<eq>1<block_start>a,=axis<if_stmt><not>(-nd<le>a<l>nd)<block_start><raise>ValueError('Invalid axis %r for an array with shape %r'%(axis x.shape))<block_end><if_stmt>ord<eq>numpy.Inf<block_start><return>abs(x).max(axis=a).A.ravel()<block_end><elif_stmt>ord<eq>-numpy.Inf<block_start><return>abs(x).min(axis=a).A.ravel()<block_end><elif_stmt>ord<eq>0# Zero norm
<block_start><return>(x<ne>0).astype(numpy.float32).sum(axis=a).ravel().astype(numpy.int_)<block_end><elif_stmt>ord<eq>1# special case for speedup
<block_start><return>abs(x).sum(axis=a).ravel()<block_end><elif_stmt>ord<in>(2 <none>)<block_start><return>cupy.sqrt(abs(x).power(2).sum(axis=a)).ravel()<block_end><else_stmt><block_start><try_stmt><block_start>ord+1<block_end><except_stmt>TypeError<block_start><raise>ValueError('Invalid norm order for vectors.')<block_end><return>cupy.power(abs(x).power(ord).sum(axis=a) 1/ord).ravel()<block_end><block_end><else_stmt><block_start><raise>ValueError("Improper number of dimensions to norm.")<block_end><block_end>
|
"""
* Copyright 2008 Google Inc.
* Copyright 2011 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
"""<import_stmt>math<import_from_stmt>pyjamas.Canvas.ColorStop ColorStop<line_sep>"""*
* Gradients for IE6 implementation need some extra meta info.
"""<class_stmt>CanvasGradientImplIE6<block_start><def_stmt>__init__ self x0 y0 x1 y1<block_start>self.startX=x0<line_sep>self.startY=y0<line_sep>self.endX=x1<line_sep>self.endY=y1<line_sep>self.startRad=0<line_sep>self.endRad=0<line_sep>self.dx=x1-x0<line_sep>self.dy=y1-y0<line_sep>self.length=math.sqrt((self.dx<times>self.dx)+(self.dy<times>self.dy))<if_stmt>self.dy<eq>0#self.angle = int(math.atan(self.dx) * 180 / math.pi) + 180
<block_start>self.angle=90<block_end><else_stmt><block_start>self.angle=int(math.atan(self.dx/self.dy)<times>180/math.pi)+180<block_end>self.colorStops=[]<block_end><def_stmt>addColorStop self offset color<block_start>newColorStop=ColorStop(offset color)<for_stmt>i range(len(self.colorStops))<block_start>cs=self.colorStops[i]<if_stmt>offset<l>cs.offset<block_start>self.colorStops.append(i newColorStop)<line_sep><return><block_end><block_end>self.colorStops.append(newColorStop)<block_end>"""*
* Creates an equivalent copy of this Gradient object.
*
* @return returns an equivalent copy of this gradient object
"""<def_stmt>cloneGradient self<block_start><pass><block_end><block_end>
|
# -*- coding: utf-8 -*-
<import_stmt>bpy<import_from_stmt>bpy.types PropertyGroup<import_from_stmt>bpy.props BoolProperty EnumProperty FloatProperty FloatVectorProperty IntProperty StringProperty<import_from_stmt>mmd_tools.core material<line_sep>#===========================================
# Property classes
#===========================================
<class_stmt>MMDMaterial(PropertyGroup)<block_start>""" マテリアル
"""<line_sep>name_j=StringProperty(name='Name' description='Japanese Name' default='' )<line_sep>name_e=StringProperty(name='Name(Eng)' description='English Name' default='' )<line_sep>material_id=IntProperty(name='Material ID' default=-1)<line_sep>ambient_color=FloatVectorProperty(name='Ambient' subtype='COLOR' size=3 min=0 max=1 precision=3 step=0.1 default=[0 0 0] )<line_sep>is_double_sided=BoolProperty(name='Double Sided' description='' default=<true> )<line_sep>enabled_drop_shadow=BoolProperty(name='Drop Shadow' description='' default=<true> )<line_sep>enabled_self_shadow_map=BoolProperty(name='Self Shadow Map' description='' default=<true> )<line_sep>enabled_self_shadow=BoolProperty(name='Self Shadow' description='' default=<true> )<line_sep>enabled_toon_edge=BoolProperty(name='Toon Edge' description='' default=<true> )<line_sep>edge_color=FloatVectorProperty(name='Edge Color' subtype='COLOR' size=4 min=0 max=1 precision=3 step=0.1 default=[0 0 0 1] )<line_sep>edge_weight=FloatProperty(name='Edge Weight' min=0 max=100 step=0.1 default=0.5 )<line_sep>sphere_texture_type=EnumProperty(name='Sphere Map Type' description='' items=[(str(material.SPHERE_MODE_OFF) 'Off' '' 1) (str(material.SPHERE_MODE_MULT) 'Multiply' '' 2) (str(material.SPHERE_MODE_ADD) 'Add' '' 3) (str(material.SPHERE_MODE_SUBTEX) 'SubTexture' '' 4) ] )<line_sep>is_shared_toon_texture=BoolProperty(name='Use Shared Toon Texture' description='' default=<false> )<line_sep>toon_texture=StringProperty(name='Toon Texture' subtype='FILE_PATH' description='' default='' )<line_sep>shared_toon_texture=IntProperty(name='Shared Toon Texture' description='' default=0 )<line_sep>comment=StringProperty(name='Comment' )<block_end>
|
"""Tests for toggle command."""<import_from_stmt>rust_test_common *<class_stmt>TestToggle(TestBase)<block_start><def_stmt>test_toggle self<block_start>window=sublime.active_window()<line_sep>self.assertEqual(util.get_setting('rust_syntax_checking' <true>) <true>)<line_sep>window.run_command('toggle_rust_syntax_setting')<line_sep>self.assertEqual(util.get_setting('rust_syntax_checking' <true>) <false>)<line_sep>window.run_command('toggle_rust_syntax_setting')<line_sep>self.assertEqual(util.get_setting('rust_syntax_checking' <true>) <true>)<block_end><block_end>
|
<def_stmt>TowerOfHanoi n first last mid<block_start><if_stmt>n<eq>1<block_start>print("Move disk 1 from rod" first "to rod" last)<line_sep><return><block_end>TowerOfHanoi(n-1 first mid last)<line_sep>print("Move disk" n "from rod" first "to rod" last)<line_sep>TowerOfHanoi(n-1 mid last first)<block_end>n=int(input())<line_sep>TowerOfHanoi(n 'F' 'M' 'L')# First Rod-> F, Middle rod -> M, Last Rod -> L
"""
Complexity of the code
-Time Complexity - O(2^n)
-Space Complexity - O(2^n)
"""<line_sep>
|
<import_stmt>moderngl<as>mgl<import_from_stmt>pathlib Path<import_stmt>moderngl_window<as>mglw<import_from_stmt>moderngl_window geometry<class_stmt>ComputeRenderToTexture(mglw.WindowConfig)<block_start>"""Simple example rendering to a texture with a compute shader"""<line_sep>title="Render Texture Using Compute Shader"<line_sep>resource_dir=(Path(__file__)/'../../resources').resolve()<line_sep>gl_version=4 3<line_sep>aspect_ratio=1.0<def_stmt>__init__ self **kwargs<block_start>super().__init__(**kwargs)<line_sep>self.compute_shader=self.load_compute_shader('programs/compute/render_to_texture.glsl')<line_sep>self.compute_shader['destTex']=0<line_sep>self.texture_program=self.load_program('programs/texture.glsl')<line_sep>self.quad_fs=geometry.quad_fs()<line_sep>self.texture=self.ctx.texture((256 256) 4)<line_sep>self.texture.filter=mgl.NEAREST mgl.NEAREST<block_end><def_stmt>render self time frame_time<block_start>self.ctx.clear(0.3 0.3 0.3)<line_sep>w,h=self.texture.size<line_sep>gw,gh=16 16<line_sep>nx,ny,nz=int(w/gw) int(h/gh) 1<try_stmt><block_start>self.compute_shader['time']=time<block_end><except_stmt>Exception<block_start><pass><block_end># Automatically binds as a GL_R32F / r32f (read from the texture)
self.texture.bind_to_image(0 read=<false> write=<true>)<line_sep>self.compute_shader.run(nx ny nz)<line_sep># Render texture
self.texture.use(location=0)<line_sep>self.quad_fs.render(self.texture_program)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>mglw.run_window_config(ComputeRenderToTexture)<block_end>
|
<import_stmt>numpy<as>np<import_stmt>chainer<import_stmt>chainer.functions<as>F<import_from_stmt>chainer Variable<import_stmt>chainercv<def_stmt>reconstruction_loss dis recon gt<block_start><with_stmt>chainer.using_config('train' <false>)<block_start>v1=dis.feature_vector(recon)<line_sep>v2=dis.feature_vector(gt)<block_end>denom=F.sqrt(F.batch_l2_norm_squared(v1)<times>F.batch_l2_norm_squared(v2))<line_sep><return>-F.sum(F.reshape(F.batch_matmul(v1 v2 transa=<true>) (v1.shape[0] ))/denom)<block_end><class_stmt>UpdaterEnc(chainer.training.StandardUpdater)<block_start><def_stmt>__init__ self *args **kwargs<block_start>self.models=kwargs.pop('models')<if_stmt>'input_size'<in>kwargs<block_start>self.input_size=kwargs.pop('input_size')<block_end><else_stmt><block_start>self.input_size=<none><block_end>self.loss_func=reconstruction_loss<line_sep>super(UpdaterEnc self).__init__(*args **kwargs)<block_end><def_stmt>get_batch self xp<block_start>batch=self.get_iterator('main').next()<line_sep>batchsize=len(batch)<line_sep>x=[]<line_sep>gt=[]<line_sep>c=[]<for_stmt>j range(batchsize)<block_start>x.append(np.asarray(batch[j][0]).astype("f"))<line_sep>gt.append(np.asarray(batch[j][1]).astype("f"))<line_sep>c.append(np.asarray(batch[j][2]).astype(np.int32))<block_end>x=Variable(xp.asarray(x))<line_sep>gt=Variable(xp.asarray(gt))<line_sep>c=Variable(xp.asarray(c))<line_sep><return>x gt c<block_end><def_stmt>update_core self<block_start>gen=self.models['gen']<line_sep>dis=self.models['dis']<line_sep>enc=self.models['enc']<line_sep>enc_optimizer=self.get_optimizer('opt_enc')<line_sep>xp=enc.xp<line_sep># fetch batch
x,gt,c=self.get_batch(xp)<if_stmt>self.input_size<is><not><none><block_start>_x=[]<for_stmt>img x.data.get()<block_start>_x.append(chainercv.transforms.resize(img (self.input_size self.input_size)))<block_end>x=Variable(xp.asarray(_x))<block_end>z=enc(x y=c)<with_stmt>chainer.using_config('train' <false>)<block_start>recon=gen(batchsize=len(z) z=z y=c)<block_end>loss=reconstruction_loss(dis recon gt)<line_sep>enc.cleargrads()<line_sep>loss.backward()<line_sep>enc_optimizer.update()<line_sep>chainer.reporter.report({'loss':loss})<line_sep>chainer.reporter.report({'min_slope':xp.min(enc.prelu_out.W.data)})<line_sep>chainer.reporter.report({'max_slope':xp.max(enc.prelu_out.W.data)})<line_sep>chainer.reporter.report({'min_z':xp.min(z.data)})<line_sep>chainer.reporter.report({'max_z':xp.max(z.data)})<block_end><block_end>
|
# Generated by Django 3.2.5 on 2021-07-16 14:15
<import_stmt>django.core.serializers.json<import_from_stmt>django.db migrations models<import_stmt>django.db.models.deletion<import_stmt>django.db.models.expressions<import_stmt>ipam.fields<import_stmt>taggit.managers<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('extras' '0061_extras_change_logging') ('tenancy' '0001_squashed_0012') ('ipam' '0049_prefix_mark_utilized') ]<line_sep>operations=[migrations.CreateModel(name='IPRange' fields=[('created' models.DateField(auto_now_add=<true> null=<true>)) ('last_updated' models.DateTimeField(auto_now=<true> null=<true>)) ('custom_field_data' models.JSONField(blank=<true> default=dict encoder=django.core.serializers.json.DjangoJSONEncoder)) ('id' models.BigAutoField(primary_key=<true> serialize=<false>)) ('start_address' ipam.fields.IPAddressField()) ('end_address' ipam.fields.IPAddressField()) ('size' models.PositiveIntegerField(editable=<false>)) ('status' models.CharField(default='active' max_length=50)) ('description' models.CharField(blank=<true> max_length=200)) ('role' models.ForeignKey(blank=<true> null=<true> on_delete=django.db.models.deletion.SET_NULL related_name='ip_ranges' to='ipam.role')) ('tags' taggit.managers.TaggableManager(through='extras.TaggedItem' to='extras.Tag')) ('tenant' models.ForeignKey(blank=<true> null=<true> on_delete=django.db.models.deletion.PROTECT related_name='ip_ranges' to='tenancy.tenant')) ('vrf' models.ForeignKey(blank=<true> null=<true> on_delete=django.db.models.deletion.PROTECT related_name='ip_ranges' to='ipam.vrf')) ] options={'verbose_name':'IP range' 'verbose_name_plural':'IP ranges' 'ordering':(django.db.models.expressions.OrderBy(django.db.models.expressions.F('vrf') nulls_first=<true>) 'start_address' 'pk') } ) ]<block_end>
|
"""
radish
~~~~~~
the root from red to green. BDD tooling for Python.
:copyright: (c) 2019 by <NAME> <<EMAIL>>
:license: MIT, see LICENSE for more details.
"""<import_stmt>itertools<import_stmt>textwrap<import_from_stmt>pathlib Path<import_from_stmt>lark Transformer<import_from_stmt>radish.models Background ConstantTag DefaultRule Feature PreconditionTag Rule Scenario ScenarioLoop ScenarioOutline Step Tag <import_from_stmt>radish.parser.errors RadishFirstStepMustUseFirstLevelKeyword RadishScenarioOutlineExamplesInconsistentCellCount RadishStepDataTableInconsistentCellCount <class_stmt>RadishGherkinTransformer(Transformer)<block_start><def_stmt>__init__ self *args **kwargs<block_start>super().__init__(*args **kwargs)<line_sep>self.featurefile_contents=<none><line_sep>self.feature_id=<none><line_sep>self.__step_id=<none><line_sep>self.__scenario_id=<none><line_sep>self.__step_keyword_ctx=<none><block_end><def_stmt>prepare self language_spec featurefile_path:Path featurefile_contents:str feature_id:int <block_start>"""Prepare the Transformer for the next transformation"""<line_sep>self.language_spec=language_spec<line_sep>self.featurefile_path=featurefile_path<line_sep>self.featurefile_contents=featurefile_contents.splitlines(<true>)<line_sep>self.feature_id=feature_id<line_sep>self.__step_id=1<line_sep>self.__scenario_id=1<block_end><def_stmt>start self subtree<block_start>"""Transform the root element for the radish AST"""<if_stmt>len(subtree)<g>0<block_start><return>subtree[0]<block_end><return><none><block_end><def_stmt>step_doc_string self subtree<block_start>"""Transform the ``step_doc_string``-subtree for the radish AST"""<line_sep>startline=subtree[0].line<line_sep>endline=subtree[-1].line-1<line_sep>lines="".join(self.featurefile_contents[startline:endline])<line_sep><return>textwrap.dedent(lines)<block_end><def_stmt>_table_cell self subtree<block_start>"""Transform a Table Cell"""<line_sep>raw_cell_value=subtree[0].strip()<line_sep># remove VBAR escape sequences
cell_value=raw_cell_value.replace(r"\|" "|")<line_sep><return>cell_value<block_end><def_stmt>_table_row self subtree<block_start>"""Transform a Table Row"""<line_sep><return>list(subtree)<block_end>#: Transform the ``step_data_table_cell``-subtree for the radish AST
step_data_table_cell=_table_cell<line_sep>#: Transform the ``step_data_table_row``-subtree for the radish AST
step_data_table_row=_table_row<def_stmt>step_data_table self subtree<block_start>"""Transform the ``step_data_table``-subtree for the radish AST"""<line_sep># check if all rows have the same amount of cells
table=list(subtree)<if_stmt>len({len(row)<for>row table})<g>1<block_start><raise>RadishStepDataTableInconsistentCellCount()<block_end><return>table<block_end><def_stmt>step_arguments self subtree<block_start>"""Transform the ``step_arguments``-subtree for the radish AST"""<if_stmt>len(subtree)<eq>0<block_start>doc_string=<none><line_sep>data_table=<none><block_end><elif_stmt>len(subtree)<eq>2<block_start>doc_string,data_table=subtree<block_end><elif_stmt>isinstance(subtree[0] str)<block_start>doc_string=subtree[0]<line_sep>data_table=<none><block_end><else_stmt><block_start>doc_string=<none><line_sep>data_table=subtree[0]<block_end><return>doc_string data_table<block_end><def_stmt>step self subtree<block_start>"""Transform the ``step``-subtree for the radish AST"""<line_sep>keyword,text,(doc_string data_table)=subtree<line_sep>keyword_line=keyword.line<line_sep>keyword=keyword.strip()<if_stmt>self.__step_keyword_ctx<is><none><block_start><if_stmt>keyword<not><in>self.language_spec.first_level_step_keywords<block_start><raise>RadishFirstStepMustUseFirstLevelKeyword()<block_end>self.__step_keyword_ctx=keyword<block_end><else_stmt><block_start><if_stmt>keyword<in>self.language_spec.first_level_step_keywords<block_start><if_stmt>keyword<ne>self.__step_keyword_ctx<block_start>self.__step_keyword_ctx=keyword<block_end><block_end><block_end>english_keyword=next(key<for>key,value self.language_spec.keywords.items()<if>value<eq>self.__step_keyword_ctx)<line_sep>step=Step(self.__step_id english_keyword keyword text doc_string data_table self.featurefile_path keyword_line )<line_sep># increment step id for the next step
self.__step_id<augadd>1<line_sep><return>step<block_end><def_stmt>scenario self subtree<block_start>"""Transform the ``scenario``-subtree for the radish AST"""<line_sep>tags=list(itertools.takewhile(<lambda>t:isinstance(t Tag) subtree))<line_sep>keyword=subtree[len(tags)]<line_sep>short_description,*steps=subtree[len(tags)+1:]<line_sep>scenario=Scenario(self.__scenario_id keyword short_description tags self.featurefile_path short_description.line steps )<line_sep># increment scenario id and reset step id for the next scenario
self.__scenario_id<augadd>1<line_sep>self.__step_id=1<line_sep>self.__step_keyword_ctx=<none><line_sep><return>scenario<block_end>#: Transform the ``example_cell``-subtree for the radish AST
example_cell=_table_cell<line_sep>#: Transform the ``example_row``-subtree for the radish AST
example_row=_table_row<def_stmt>examples self subtree<block_start>"""Transform the ``examples``-subtree for the radish AST"""<line_sep># check if all rows have the same amount of cells
<if_stmt>len({len(row)<for>row subtree})<g>1<block_start><raise>RadishScenarioOutlineExamplesInconsistentCellCount()<block_end>header,*rows=subtree<line_sep><return>[dict(zip(header row))<for>row rows]<block_end><def_stmt>scenario_outline self subtree<block_start>"""Transform the ``scenario_outline``-subtree for the radish AST"""<line_sep># consume Feature Tags
tags=list(itertools.takewhile(<lambda>t:isinstance(t Tag) subtree))<line_sep>keyword=subtree[len(tags)]<line_sep>short_description=subtree[len(tags)+1]<line_sep>steps=list(itertools.takewhile(<lambda>s:isinstance(s Step) subtree[len(tags)+2:]))<line_sep>examples_table=subtree[len(tags)+2+len(steps):][0]<line_sep>scenario_outline=ScenarioOutline(self.__scenario_id keyword short_description tags self.featurefile_path short_description.line steps examples_table )<line_sep># increment scenario id and reset step id for the next scenario
self.__scenario_id<augadd>1+len(examples_table)<line_sep>self.__step_id=1<line_sep>self.__step_keyword_ctx=<none><line_sep><return>scenario_outline<block_end><def_stmt>iterations self subtree<block_start>"""Transform the ``scenario_loop``-subtree for the radish AST"""<line_sep><return>int(subtree[0])<block_end><def_stmt>scenario_loop self subtree<block_start>"""Transform the ``scenario_outline``-subtree for the radish AST"""<line_sep># consume Feature Tags
tags=list(itertools.takewhile(<lambda>t:isinstance(t Tag) subtree))<line_sep>keyword=subtree[len(tags)]<line_sep>short_description=subtree[len(tags)+1]<line_sep>steps=list(itertools.takewhile(<lambda>s:isinstance(s Step) subtree[len(tags)+2:]))<line_sep>iterations=subtree[len(tags)+2+len(steps)]<line_sep>scenario_loop=ScenarioLoop(self.__scenario_id keyword short_description tags self.featurefile_path short_description.line steps iterations )<line_sep># increment scenario id and reset step id for the next scenario
self.__scenario_id<augadd>1+iterations<line_sep>self.__step_id=1<line_sep>self.__step_keyword_ctx=<none><line_sep><return>scenario_loop<block_end><def_stmt>background self subtree<block_start>"""Transform the ``background``-subtree for the radish AST"""<line_sep>keyword=subtree.pop(0)<if_stmt>len(subtree)<eq>0<block_start>short_description=<none><line_sep>steps=[]<block_end><elif_stmt>isinstance(subtree[0] Step)<block_start>short_description=<none><line_sep>steps=subtree<block_end><else_stmt><block_start>short_description,*steps=subtree<block_end>background=Background(keyword short_description self.featurefile_path short_description.line<if>short_description<else>0 steps )<line_sep><return>background<block_end><def_stmt>rule self subtree<block_start>"""Transform the ``rule``-subtree for the radish AST"""<line_sep>keyword=subtree.pop(0)<line_sep>short_description=subtree[0]<if_stmt>len(subtree)<g>1<block_start>scenarios=subtree[1:]<block_end><else_stmt><block_start>scenarios=[]<block_end>rule=Rule(keyword short_description self.featurefile_path short_description.line scenarios )<line_sep># let the Scenarios know to which Rule they belong
<for_stmt>scenario scenarios<block_start>scenario.set_rule(rule)<block_end><return>rule<block_end><def_stmt>description self description_lines<block_start>"""Transform the ``description``-subtree for the radish AST"""<line_sep><return>list((str(line)<for>line description_lines))<block_end><def_stmt>feature_body self subtree<block_start>"""Transform the ``feature_body``-subtree for the radish AST"""<line_sep>description,*scenarios=subtree<line_sep>background,scenarios=self._expand_background_and_scenarios(scenarios)<line_sep># create DefaultRule for scenarios without a Rul.
scenarios_for_default_rule=list(itertools.takewhile(<lambda>s:<not>isinstance(s Rule) scenarios))<line_sep>rules=scenarios[len(scenarios_for_default_rule):]<if_stmt>scenarios_for_default_rule<block_start>default_rule=DefaultRule(scenarios_for_default_rule[0].path scenarios_for_default_rule[0].line scenarios_for_default_rule )<line_sep># let the Scenarios in the DefaultRule know to which Rule they belong
<for_stmt>scenario scenarios_for_default_rule<block_start>scenario.set_rule(default_rule)<block_end>rules=[default_rule]+rules<block_end><return>description background rules<block_end><def_stmt>feature self subtree<block_start>"""Transform the ``feature``-subtree for the radish AST"""<line_sep># consume Feature Tags
tags=list(itertools.takewhile(<lambda>t:isinstance(t Tag) subtree))<line_sep>keyword=subtree[len(tags)]<line_sep>short_description=subtree[len(tags)+1]<if_stmt>len(subtree)<g>len(tags)+2<block_start>description,background,rules=subtree[len(tags)+2:][0]<block_end><else_stmt><block_start>description=<none><line_sep>background=<none><line_sep>rules=[]<block_end>feature=Feature(self.feature_id keyword short_description description tags self.featurefile_path short_description.line background rules self.language_spec )<line_sep># let the Background know to which Feature it belongs to
<if_stmt>background<block_start>background.set_feature(feature)<block_end># let the Rules know to which Feature they belong
<for_stmt>rule rules<block_start>rule.set_feature(feature)<block_end># add Background to all Rules
<for_stmt>rule rules<block_start>rule.set_background(background)<block_end><return>feature<block_end><def_stmt>tag self subtree<block_start>"""Transform the ``tag``-subtree for the radish AST"""<line_sep><return>subtree[0]<block_end>feature_tag=tag<line_sep>scenario_tag=tag<def_stmt>std_tag self subtree<block_start>"""Transform the ``tag``-subtree for the radish AST"""<line_sep>tag_name=subtree[0]<line_sep>tag=Tag(str(tag_name).strip() self.featurefile_path tag_name.line)<line_sep><return>tag<block_end><def_stmt>precondition_tag self subtree<block_start>"""Transform the ``precondition_tag``-subtree for the radish AST"""<line_sep>feature_filename,scenario_short_description=subtree<line_sep>tag=PreconditionTag(str(feature_filename) str(scenario_short_description) self.featurefile_path feature_filename.line )<line_sep><return>tag<block_end><def_stmt>constant_tag self subtree<block_start>"""Transform the ``constant_tag``-subtree for the radish AST"""<line_sep>key,value=subtree<line_sep>tag=ConstantTag(str(key) str(value) self.featurefile_path key.line)<line_sep><return>tag<block_end><def_stmt>_expand_background_and_scenarios self scenarios<block_start>"""Expand the given list of Scenarios into Background and Scenarios if applicable"""<line_sep>background=<none><if_stmt>scenarios<block_start><if_stmt>isinstance(scenarios Background)<block_start>background=scenarios<line_sep>scenarios=[]<block_end><elif_stmt>isinstance(scenarios Scenario)<block_start><pass><block_end><elif_stmt>isinstance(scenarios[0] Background)<block_start>background=scenarios.pop(0)<block_end><block_end><return>background scenarios<block_end><block_end>
|
"""
Extract allele/peptide pairs to exclude from training data.
"""<import_stmt>sys<import_stmt>os<import_stmt>argparse<import_stmt>pandas<import_from_stmt>mhcflurry.common normalize_allele_name<def_stmt>normalize_allele_name_or_return_unknown s<block_start><return>normalize_allele_name(s raise_on_error=<false> default_value="UNKNOWN")<block_end>parser=argparse.ArgumentParser(usage=__doc__)<line_sep>parser.add_argument("data" metavar="CSV" help="Training data")<line_sep>parser.add_argument("--remove-filename" action="append" default=[] metavar="NAME" help="Data to drop" required=<true>)<line_sep>parser.add_argument("--remove-kind" action="append" default=[] metavar="KIND" help="Format of data to drop. For published data, use the PMID." choices=["30377561"# Koşaloğlu-Yalçın, ..., Peters. Oncoimmunology 2018 [PMID 30377561]
] required=<true>)<line_sep>parser.add_argument("--out" metavar="CSV" help="Result data path")<line_sep>parser.add_argument("--out-removed" metavar="CSV" help="Write removed data to given path")<line_sep>pandas.set_option('display.max_columns' 500)<line_sep>LOADERS={}<def_stmt>load_30377561 filename# Koşaloğlu-Yalçın, ..., Peters. Oncoimmunology 2018 [PMID 30377561]
<block_start>dfs=pandas.read_excel(filename sheet_name=<none>)<line_sep>df1=dfs['Supp Table 5 positive & random']<line_sep>result_df=[]<line_sep>result_df.append(df1.rename(columns={"mt.pep":"peptide" "hla":"allele" })[["allele" "peptide"]])<line_sep>result_df.append(df1.rename(columns={"wt.pep":"peptide" "hla":"allele" })[["allele" "peptide"]])<line_sep>df2=dfs["Supp Table 4 viral epitopes"]<line_sep>result_df.append(df2.rename(columns={"Epitope":"peptide" "Restriction":"allele" })[["allele" "peptide"]])<line_sep>result_df=pandas.concat(result_df ignore_index=<true>)<line_sep><return>result_df<block_end>LOADERS["30377561"]=load_30377561<def_stmt>go args<block_start>df=pandas.read_csv(args.data)<line_sep>print("Read training data of length %d: "%len(df))<line_sep>print(df)<line_sep>df["allele_peptide"]=df.allele+"~"+df.peptide<if_stmt>len(args.remove_kind)<ne>len(args.remove_filename)<block_start>parser.error("Number of arguments mismatch: --remove-kind [%d] != "<concat>"--remove-filename [%d]"%(len(args.remove_kind) len(args.remove_filename)))<block_end>removed=[]<for_stmt>(i (kind path)) enumerate(zip(args.remove_kind args.remove_filename))<block_start>print("Processing file %d / %d: %s %s"%(i+1 len(args.remove_kind) kind path))<line_sep>to_remove=LOADERS[kind](path)<line_sep>print("Remove data contains %d entries"%len(to_remove))<line_sep>to_remove["normalized_allele"]=to_remove.allele.map(normalize_allele_name_or_return_unknown)<line_sep>remove_allele_peptides=set(to_remove.normalized_allele+"~"+to_remove.peptide)<line_sep>remove_mask=df.allele_peptide.isin(remove_allele_peptides)<line_sep>print("Will remove %d entries."%remove_mask.sum())<line_sep>removed.append(df.loc[remove_mask].copy())<line_sep>df=df.loc[~remove_mask].copy()<line_sep>print("New training data size: %d"%len(df))<block_end>print("Done processing.")<line_sep>removed_df=pandas.concat(removed)<line_sep>print("Removed %d entries in total:"%len(removed_df))<line_sep>print(removed_df)<if_stmt>args.out_removed<block_start>removed_df.to_csv(args.out_removed index=<false>)<line_sep>print("Wrote: " args.out_removed)<block_end><if_stmt>args.out<block_start>df.to_csv(args.out index=<false>)<line_sep>print("Wrote: " args.out)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>go(parser.parse_args(sys.argv[1:]))<block_end>
|
<import_from_stmt>typing Optional<import_from_stmt>botocore.client BaseClient<import_from_stmt>typing Dict<import_from_stmt>typing Union<import_from_stmt>botocore.paginate Paginator<import_from_stmt>datetime datetime<import_from_stmt>botocore.waiter Waiter<import_from_stmt>typing List<class_stmt>Client(BaseClient)<block_start><def_stmt>can_paginate self operation_name:str=<none><block_start><pass><block_end><def_stmt>create_app self name:str repository:str platform:str oauthToken:str description:str=<none> iamServiceRoleArn:str=<none> environmentVariables:Dict=<none> enableBranchAutoBuild:bool=<none> enableBasicAuth:bool=<none> basicAuthCredentials:str=<none> customRules:List=<none> tags:Dict=<none> buildSpec:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_branch self appId:str branchName:str description:str=<none> stage:str=<none> framework:str=<none> enableNotification:bool=<none> enableAutoBuild:bool=<none> environmentVariables:Dict=<none> basicAuthCredentials:str=<none> enableBasicAuth:bool=<none> tags:Dict=<none> buildSpec:str=<none> ttl:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_domain_association self appId:str domainName:str subDomainSettings:List enableAutoSubDomain:bool=<none><arrow>Dict<block_start><pass><block_end><def_stmt>delete_app self appId:str<arrow>Dict<block_start><pass><block_end><def_stmt>delete_branch self appId:str branchName:str<arrow>Dict<block_start><pass><block_end><def_stmt>delete_domain_association self appId:str domainName:str<arrow>Dict<block_start><pass><block_end><def_stmt>delete_job self appId:str branchName:str jobId:str<arrow>Dict<block_start><pass><block_end><def_stmt>generate_presigned_url self ClientMethod:str=<none> Params:Dict=<none> ExpiresIn:int=<none> HttpMethod:str=<none><block_start><pass><block_end><def_stmt>get_app self appId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_branch self appId:str branchName:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_domain_association self appId:str domainName:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_job self appId:str branchName:str jobId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_paginator self operation_name:str=<none><arrow>Paginator<block_start><pass><block_end><def_stmt>get_waiter self waiter_name:str=<none><arrow>Waiter<block_start><pass><block_end><def_stmt>list_apps self nextToken:str=<none> maxResults:int=<none><arrow>Dict<block_start><pass><block_end><def_stmt>list_branches self appId:str nextToken:str=<none> maxResults:int=<none><arrow>Dict<block_start><pass><block_end><def_stmt>list_domain_associations self appId:str nextToken:str=<none> maxResults:int=<none><arrow>Dict<block_start><pass><block_end><def_stmt>list_jobs self appId:str branchName:str nextToken:str=<none> maxResults:int=<none><arrow>Dict<block_start><pass><block_end><def_stmt>start_job self appId:str branchName:str jobType:str jobId:str=<none> jobReason:str=<none> commitId:str=<none> commitMessage:str=<none> commitTime:datetime=<none><arrow>Dict<block_start><pass><block_end><def_stmt>stop_job self appId:str branchName:str jobId:str<arrow>Dict<block_start><pass><block_end><def_stmt>update_app self appId:str name:str=<none> description:str=<none> platform:str=<none> iamServiceRoleArn:str=<none> environmentVariables:Dict=<none> enableBranchAutoBuild:bool=<none> enableBasicAuth:bool=<none> basicAuthCredentials:str=<none> customRules:List=<none> buildSpec:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_branch self appId:str branchName:str description:str=<none> framework:str=<none> stage:str=<none> enableNotification:bool=<none> enableAutoBuild:bool=<none> environmentVariables:Dict=<none> basicAuthCredentials:str=<none> enableBasicAuth:bool=<none> buildSpec:str=<none> ttl:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_domain_association self appId:str domainName:str subDomainSettings:List enableAutoSubDomain:bool=<none><arrow>Dict<block_start><pass><block_end><block_end>
|
<import_stmt>cudarray<as>ca<import_from_stmt>...base PhaseMixin<import_from_stmt>..base UnaryElementWise<class_stmt>Dropout(UnaryElementWise PhaseMixin)<block_start><def_stmt>__init__ self dropout=0.5<block_start>self.dropout=dropout<line_sep>self._tmp_mask=<none><line_sep>self.phase='train'<block_end><def_stmt>__call__ self x<block_start><if_stmt>self.dropout<eq>0.0<block_start><return>x<block_end><return>super(Dropout self).__call__(x)<block_end><def_stmt>setup self<block_start>super(Dropout self).setup()<line_sep>self.mask_shape=self.shape<line_sep>self._tmp_mask=ca.zeros(self.mask_shape dtype=ca.int_)<block_end><def_stmt>fprop self<block_start><if_stmt>self.phase<eq>'train'<block_start>ca.less(self.dropout ca.random.uniform(size=self.mask_shape) self._tmp_mask)<line_sep>ca.multiply(self.x.array self._tmp_mask self.array)<line_sep>self.array<augmul>1.0/(1.0-self.dropout)<block_end><elif_stmt>self.phase<eq>'test'<block_start>self.array=self.x.array<block_end><else_stmt><block_start><raise>ValueError('Invalid phase: %s'%self.phase)<block_end><block_end><def_stmt>bprop self<block_start>ca.multiply(self.grad_array self._tmp_mask self.x.grad_array)<block_end><block_end><class_stmt>SpatialDropout(Dropout)<block_start><def_stmt>setup self<block_start>super(SpatialDropout self).setup()<line_sep>self.mask_shape=self.shape[:2]+(1 1)<line_sep>self._tmp_mask=ca.zeros(self.mask_shape dtype=ca.int_)<block_end><block_end>
|
<import_stmt>sys<line_sep>sys.argv.append("--ge_config=profiling")<import_stmt>os<line_sep>os.environ['PYTORCH_CUDA_FUSER_DISABLE_FALLBACK']='1'<line_sep>os.environ['PYTORCH_CUDA_FUSER_DISABLE_FMA']='1'<line_sep>os.environ['PYTORCH_CUDA_FUSER_JIT_OPT_LEVEL']='0'<import_from_stmt>test_jit_cuda_fuser *<if_stmt>__name__<eq>'__main__'<block_start>run_tests()<block_end>
|
<import_from_stmt>common *<line_sep>sys.path.append(os.path.dirname(os.path.abspath(__file__))+'/../..')<import_from_stmt>demo QueryInfo<line_sep>graph=<none><line_sep>redis_con=<none><line_sep>GRAPH_ID="G"<line_sep>NEW_GRAPH_ID="G2"<class_stmt>testKeyspaceAccesses(FlowTestsBase)<block_start><def_stmt>__init__ self<block_start>self.env=Env(decodeResponses=<true>)<line_sep><global>graph<line_sep><global>redis_con<line_sep>redis_con=self.env.getConnection()<line_sep>graph=Graph(redis_con GRAPH_ID)<block_end><def_stmt>test00_test_data_valid_after_rename self<block_start><global>graph<line_sep>node0=Node(node_id=0 label="L" properties={'name':'x' 'age':1})<line_sep>graph.add_node(node0)<line_sep>graph.flush()<line_sep>redis_con.rename(GRAPH_ID NEW_GRAPH_ID)<line_sep>graph=Graph(redis_con NEW_GRAPH_ID)<line_sep>node1=Node(node_id=1 label="L" properties={'name':'x' 'age':1})<line_sep>graph.add_node(node1)<line_sep>graph.flush()<line_sep>query="MATCH (n) return n"<line_sep>expected_results=[[node0] [node1]]<line_sep>query_info=QueryInfo(query=query description="Tests data is valid after renaming" expected_result=expected_results)<line_sep>self._assert_resultset_and_expected_mutually_included(graph.query(query) query_info)<block_end># Graph queries should fail gracefully on accessing non-graph keys.
<def_stmt>test01_graph_access_on_invalid_key self<block_start>redis_con.set("integer_key" 5)<line_sep>graph=Graph(redis_con "integer_key")<try_stmt><block_start>query="""MATCH (n) RETURN noneExistingFunc(n.age) AS cast"""<line_sep>graph.query(query)<assert_stmt>(<false>)<block_end><except_stmt>redis.exceptions.ResponseError<as>e# Expecting an error.
<block_start><assert_stmt>("WRONGTYPE"<in>str(e))<line_sep><pass><block_end><block_end># Fail gracefully on attempting a graph deletion of an empty key.
<def_stmt>test02_graph_delete_on_empty_key self<block_start>graph=Graph(redis_con "nonexistent_key")<try_stmt><block_start>graph.delete()<assert_stmt>(<false>)<block_end><except_stmt>redis.exceptions.ResponseError<as>e# Expecting an error.
<block_start><assert_stmt>("empty key"<in>str(e))<line_sep><pass><block_end><block_end><block_end>
|
"""
=================
Camera Projection
=================
We can see the camera coordinate frame and a grid of points in the camera
coordinate system which will be projected on the sensor. From the coordinates
on the sensor we can compute the corresponding pixels.
"""<line_sep>print(__doc__)<import_stmt>numpy<as>np<import_stmt>matplotlib.pyplot<as>plt<import_from_stmt>pytransform3d.transformations plot_transform<import_from_stmt>pytransform3d.camera make_world_grid cam2sensor sensor2img<line_sep>focal_length=0.2<line_sep>sensor_size=(0.2 0.15)<line_sep>image_size=(640 480)<line_sep>plt.figure(figsize=(12 5))<line_sep>ax=plt.subplot(121 projection="3d")<line_sep>ax.set_title("Grid in 3D camera coordinate system")<line_sep>ax.set_xlim((-1 1))<line_sep>ax.set_ylim((-1 1))<line_sep>ax.set_zlim((0 2))<line_sep>ax.set_xlabel("X")<line_sep>ax.set_ylabel("Y")<line_sep>ax.set_zlabel("Z")<line_sep>cam_grid=make_world_grid(n_points_per_line=11)-np.array([0 0 -2 0])<line_sep>img_grid=cam_grid<times>focal_length<line_sep>c=np.arange(len(cam_grid))<line_sep>ax.scatter(cam_grid[: 0] cam_grid[: 1] cam_grid[: 2] c=c)<line_sep>ax.scatter(img_grid[: 0] img_grid[: 1] img_grid[: 2] c=c)<line_sep>plot_transform(ax)<line_sep>sensor_grid=cam2sensor(cam_grid focal_length)<line_sep>img_grid=sensor2img(sensor_grid sensor_size image_size)<line_sep>ax=plt.subplot(122 aspect="equal")<line_sep>ax.set_title("Grid in 2D image coordinate system")<line_sep>ax.scatter(img_grid[: 0] img_grid[: 1] c=c)<line_sep>ax.set_xlim((0 image_size[0]))<line_sep>ax.set_ylim((0 image_size[1]))<line_sep>plt.show()<line_sep>
|
# encoding: UTF-8
<import_from_stmt>opendatatools.common get_current_day<import_from_stmt>bs4 BeautifulSoup<import_stmt>pandas<as>pd<import_stmt>numpy<as>np<import_from_stmt>opendatatools.common RestAgent<import_from_stmt>opendatatools.aqi.constant city_code_map<class_stmt>AQIAgent(RestAgent)<block_start><def_stmt>__init__ self<block_start>RestAgent.__init__(self)<block_end><def_stmt>handle_visit_limit self<block_start>url=""<block_end><def_stmt>get_daily_aqi self date<block_start>url="http://datacenter.mep.gov.cn/websjzx/report/list.vm"<line_sep>page_no=0<line_sep>aqi_result=list()<while_stmt><true><block_start>page_no=page_no+1<line_sep># 1. 分页爬取数据
data={'pageNum':page_no 'V_DATE':date 'xmlname':1512478367400 'roleType':'CFCD2084' }<line_sep>rsp=self.do_request(url data self.proxies)<if_stmt>rsp<is><none><block_start><return><none><block_end>data=list()<line_sep>soup=BeautifulSoup(rsp "html5lib")<line_sep>divs=soup.find_all('div')<for_stmt>div divs<block_start><if_stmt>div.has_attr('class')<and>'report_main'<in>div['class']<block_start>rows=div.table.findAll('tr')<for_stmt>row rows<block_start>cols=row.findAll('td')<if_stmt>len(cols)<eq>9<block_start>city=cols[3].text<line_sep>aqi=cols[4].text<line_sep>indicator=cols[5].text<line_sep>date=cols[6].text<line_sep>code=cols[7].text<line_sep>level=cols[8].text<line_sep>data.append({"date":date "city":city "aqi":aqi "code":code "level":level "indicator":indicator })<block_end><block_end><block_end><block_end><if_stmt>len(data)<eq>0<block_start><break><line_sep><block_end>aqi_result.extend(data)<block_end>df=pd.DataFrame(aqi_result)<line_sep><return>df<block_end><def_stmt>get_hour_aqi self time<block_start>url="http://datacenter.mep.gov.cn/websjzx/report/list.vm"<line_sep>page_no=0<line_sep>aqi_result=list()<while_stmt><true><block_start>page_no=page_no+1<line_sep># 1. 分页爬取数据
data={'pageNum':page_no 'xmlname':1512382906122 'roleType':'CFCD2084' 'V_DATE':time 'E_DATE':time }<line_sep>rsp=self.do_request(url data self.proxies)<if_stmt>rsp<is><none><block_start><return><none><block_end>data=list()<line_sep>soup=BeautifulSoup(rsp "html5lib")<line_sep>divs=soup.find_all('div')<for_stmt>div divs<block_start><if_stmt>div.has_attr('class')<and>'report_main'<in>div['class']<block_start>rows=div.table.findAll('tr')<for_stmt>row rows<block_start>cols=row.findAll('td')<if_stmt>len(cols)<eq>8<block_start>city=cols[2].text<line_sep>aqi=cols[3].text<line_sep>indicator=cols[4].text<line_sep>time=cols[5].text<line_sep>code=cols[6].text<line_sep>level=cols[7].text<line_sep>data.append({"time":time "city":city "aqi":aqi "code":code "level":level "indicator":indicator })<block_end><block_end><block_end><block_end><if_stmt>len(data)<eq>0<block_start><break><line_sep><block_end>aqi_result.extend(data)<block_end>df=pd.DataFrame(aqi_result)<line_sep><return>df<block_end><def_stmt>get_daily_aqi_onecity self city<block_start>url='http://datacenter.mep.gov.cn/websjzx/report/list.vm'<if_stmt>city<not><in>city_code_map<block_start>print("this city is not ready !"+city)<line_sep><return><none><block_end>city_code=city_code_map[city]<line_sep>aqi_result=list()<line_sep>page_no=0<while_stmt><true><block_start>page_no=page_no+1<line_sep># 1. 分页爬取数据
data={'pageNum':page_no 'citycodes':city_code 'citytime':"2000-01-01" 'xmlname':"1513844769596kqzllb"}<line_sep>rsp=self.do_request(url data self.proxies)<line_sep># 2. 开始解析返回数据,并从html中提取需要的内容
data=list()<line_sep>soup=BeautifulSoup(rsp "html5lib")<line_sep>divs=soup.find_all('div')<for_stmt>div divs<block_start><if_stmt>div.has_attr('class')<and>'report_main'<in>div['class']<block_start>rows=div.table.findAll('tr')<for_stmt>row rows<block_start>cols=row.findAll('td')<if_stmt>len(cols)<eq>7<block_start>date=cols[1].text<line_sep>aqi=cols[3].text<line_sep>level=cols[5].text<line_sep>indicator=cols[4].text<line_sep>data.append({"date":date "aqi":aqi "level":level "indicator":indicator })<block_end><block_end><block_end><block_end><if_stmt>len(data)<eq>0<block_start><break><line_sep><block_end>aqi_result.extend(data)<block_end>df=pd.DataFrame(aqi_result)<line_sep><return>df<block_end><def_stmt>get_recent_daily_aqi_onecity self city<block_start>url='http://datacenter.mep.gov.cn/websjzx/report!list.vm'<if_stmt>city<not><in>city_code_map<block_start>print("this city is not ready !"+city)<line_sep><return><none><block_end>city_code=city_code_map[city]<line_sep>data={'citycodes':city_code 'citytime':get_current_day() 'xmlname':"1513844769596kqzllb"}<line_sep>rsp=self.do_request(url data self.proxies)<line_sep># 2. 开始解析返回数据,并从html中提取需要的内容
data=list()<line_sep>soup=BeautifulSoup(rsp "html5lib")<line_sep>divs=soup.find_all('div')<for_stmt>div divs<block_start><if_stmt>div.has_attr('class')<and>'report_main'<in>div['class']<block_start>rows=div.table.findAll('tr')<for_stmt>row rows<block_start>cols=row.findAll('td')<if_stmt>len(cols)<eq>7<block_start>date=cols[1].text<line_sep>aqi=cols[3].text<line_sep>level=cols[5].text<line_sep>indicator=cols[4].text<line_sep>data.append({"date":date "aqi":aqi "level":level "indicator":indicator })<block_end><block_end><block_end><block_end>df=pd.DataFrame(data)<line_sep><return>df<block_end><def_stmt>get_hour_aqi_onecity self city date<block_start>url='http://datacenter.mep.gov.cn/websjzx/report/list.vm'<if_stmt>city<not><in>city_code_map<block_start>print("this city is not ready !"+city)<line_sep><return><none><block_end>city_code=city_code_map[city]<line_sep>aqi_result=list()<line_sep>page_no=0<while_stmt><true><block_start>page_no=page_no+1<line_sep># 1. 分页爬取数据
data={'pageNum':page_no 'ctiycodes':city_code 'citytime':date 'xmlname':"1511257916552" "queryflag":"close" "customquery":"false" "isdesignpatterns":"false" }<line_sep>rsp=self.do_request(url data self.proxies)<line_sep># 2. 开始解析返回数据,并从html中提取需要的内容
data=list()<line_sep>soup=BeautifulSoup(rsp "html5lib")<line_sep>divs=soup.find_all('div')<for_stmt>div divs<block_start><if_stmt>div.has_attr('class')<and>'report_main'<in>div['class']<block_start>rows=div.table.findAll('tr')<for_stmt>row rows<block_start>cols=row.findAll('td')<if_stmt>len(cols)<eq>7<block_start>time=cols[2].text<line_sep>aqi=cols[4].text<line_sep>city=cols[3].text<line_sep>level=cols[5].text<line_sep>indicator=cols[6].text<line_sep>data.append({"time":time "aqi":aqi "city":city "level":level "indicator":indicator })<block_end><block_end><block_end><block_end>aqi_result.extend(data)<if_stmt>len(data)<l>10<block_start><break><line_sep><block_end><block_end>df=pd.DataFrame(aqi_result)<line_sep><return>df<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>aqi=AQIAgent()<line_sep>result=aqi.get_hour_aqi_onecity('北京市' '2018-05-26')<line_sep>print(result)<block_end>
|
# Copyright 2020 The SQLFlow Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_stmt>os<import_stmt>pathlib<import_stmt>subprocess<import_stmt>sys<import_from_stmt>runtime db<import_from_stmt>runtime.dbapi table_writer<import_from_stmt>runtime.feature.derivation get_ordered_field_descs infer_feature_columns <import_from_stmt>runtime.model.db read_metadata_from_db<import_from_stmt>runtime.model.model EstimatorType Model<import_from_stmt>runtime.step.create_result_table create_evaluate_table create_explain_table create_predict_table <import_from_stmt>runtime.step.tensorflow.evaluate evaluate_step<as>tf_evaluate<import_from_stmt>runtime.step.tensorflow.explain explain_step<as>tf_explain<import_from_stmt>runtime.step.tensorflow.explain print_image_as_base64_html<import_from_stmt>runtime.step.tensorflow.predict predict_step<as>tf_pred<import_from_stmt>runtime.step.tensorflow.train train_step<as>tf_train<import_from_stmt>runtime.step.xgboost.evaluate evaluate<as>xgboost_evaluate<import_from_stmt>runtime.step.xgboost.explain explain<as>xgboost_explain<import_from_stmt>runtime.step.xgboost.predict predict<as>xgboost_pred<import_from_stmt>runtime.step.xgboost.train train<as>xgboost_train<def_stmt>submit_local_train datasource original_sql select validation_select estimator_string model_image feature_column_map label_column model_params train_params validation_params save load user=""<block_start>"""This function run train task locally.
Args:
datasource: string
Like: odps://access_id:[email protected]/api?
curr_project=test_ci&scheme=http
select: string
The SQL statement for selecting data for train
validation_select: string
Ths SQL statement for selecting data for validation
estimator_string: string
TensorFlow estimator name, Keras class name, or XGBoost
model_image: string
Docker image used to train this model,
default: sqlflow/sqlflow:step
feature_column_map: string
A map of Python feature column IR.
label_column: string
Feature column instance describing the label.
model_params: dict
Params for training, crossponding to WITH clause
train_params: dict
Extra train params, will be passed to runtime.tensorflow.train
or runtime.xgboost.train. Optional fields:
- disk_cache: Use dmatrix disk cache if True, default: False.
- batch_size: Split data to batches and train, default: 1.
- epoch: Epochs to train, default: 1.
validation_params: dict
Params for validation.
save: string
Model name to be saved.
load: string
The pre-trained model name to load
user: string
Not used for local submitter, used in runtime.pai only.
"""<if_stmt>estimator_string.lower().startswith("xgboost")<block_start>train_func=xgboost_train<block_end><else_stmt><block_start>train_func=tf_train<block_end><with_stmt>db.connect_with_data_source(datasource)<as>conn<block_start>feature_column_map,label_column=infer_feature_columns(conn select feature_column_map label_column n=1000)<block_end><return>train_func(original_sql=original_sql model_image=model_image estimator_string=estimator_string datasource=datasource select=select validation_select=validation_select model_params=model_params train_params=train_params validation_params=validation_params feature_column_map=feature_column_map label_column=label_column save=save load=load)<block_end><def_stmt>submit_local_pred datasource original_sql select model label_name pred_params result_table user=""<block_start>model=Model.load_from_db(datasource model)<if_stmt>model.get_type()<eq>EstimatorType.XGBOOST<block_start>pred_func=xgboost_pred<block_end><else_stmt><block_start>pred_func=tf_pred<block_end><if_stmt>model.get_meta("label")<is><none><block_start>train_label_desc=<none><block_end><else_stmt><block_start>train_label_desc=model.get_meta("label").get_field_desc()[0]<block_end><if_stmt>pred_params<is><none><block_start>extra_result_cols=[]<block_end><else_stmt><block_start>extra_result_cols=pred_params.get("predict.extra_outputs" "")<line_sep>extra_result_cols=[c.strip()<for>c extra_result_cols.split(",")<if>c.strip()]<block_end><with_stmt>db.connect_with_data_source(datasource)<as>conn<block_start>result_column_names,train_label_idx=create_predict_table(conn select result_table train_label_desc label_name extra_result_cols)<block_end>pred_func(datasource=datasource select=select result_table=result_table result_column_names=result_column_names train_label_idx=train_label_idx model=model extra_result_cols=extra_result_cols)<block_end><def_stmt>submit_local_evaluate datasource original_sql select label_name model model_params result_table user=""<block_start>model=Model.load_from_db(datasource model)<if_stmt>model.get_type()<eq>EstimatorType.XGBOOST<block_start>evaluate_func=xgboost_evaluate<line_sep>validation_metrics=model_params.get("validation.metrics" "accuracy_score")<block_end><else_stmt><block_start>evaluate_func=tf_evaluate<line_sep>validation_metrics=model_params.get("validation.metrics" "Accuracy")<block_end>conn=db.connect_with_data_source(datasource)<line_sep>validation_metrics=[m.strip()<for>m validation_metrics.split(",")]<line_sep>result_column_names=create_evaluate_table(conn result_table validation_metrics)<line_sep>conn.close()<line_sep>evaluate_func(datasource=datasource select=select result_table=result_table model=model label_name=label_name model_params=model_params result_column_names=result_column_names)<block_end><def_stmt>submit_local_explain datasource original_sql select model model_params result_table explainer="TreeExplainer" user=""<block_start>model=Model.load_from_db(datasource model)<if_stmt>model.get_type()<eq>EstimatorType.XGBOOST<block_start>explain_func=xgboost_explain<block_end><else_stmt><block_start>explain_func=tf_explain<block_end><if_stmt>result_table<block_start>feature_columns=model.get_meta("features")<line_sep>estimator_string=model.get_meta("class_name")<line_sep>field_descs=get_ordered_field_descs(feature_columns)<line_sep>feature_column_names=[fd.name<for>fd field_descs]<with_stmt>db.connect_with_data_source(datasource)<as>conn<block_start>create_explain_table(conn model.get_type() explainer estimator_string result_table feature_column_names)<block_end><block_end>explain_func(datasource=datasource select=select explainer=explainer model_params=model_params result_table=result_table model=model)<if_stmt><not>result_table<block_start>print_image_as_base64_html("summary.png")<block_end><block_end>SQLFLOW_TO_RUN_CONTEXT_KEY_SELECT="SQLFLOW_TO_RUN_SELECT"<line_sep>SQLFLOW_TO_RUN_CONTEXT_KEY_INTO="SQLFLOW_TO_RUN_INTO"<line_sep>SQLFLOW_TO_RUN_CONTEXT_KEY_IMAGE="SQLFLOW_TO_RUN_IMAGE"<def_stmt>submit_local_run datasource select image_name params into<block_start><if_stmt><not>params<block_start><raise>ValueError("params should not be None or empty.")<block_end>subprocess_env=os.environ.copy()<line_sep>update_env={SQLFLOW_TO_RUN_CONTEXT_KEY_SELECT:select SQLFLOW_TO_RUN_CONTEXT_KEY_INTO:into SQLFLOW_TO_RUN_CONTEXT_KEY_IMAGE:image_name}<line_sep>subprocess_env.update(update_env)<line_sep>program_file_path=pathlib.Path(params[0])<if_stmt><not>program_file_path.is_file<block_start><raise>ValueError("{} is not a file".format(params[0]))<block_end>sub_process=<none><line_sep>file_ext=program_file_path.suffix<if_stmt><not>file_ext<block_start>args=[program_file_path]<line_sep>args.extend(params[1:])<line_sep>sub_process=subprocess.run(args=args env=subprocess_env stdout=subprocess.PIPE stderr=subprocess.PIPE)<block_end><elif_stmt>file_ext.lower()<eq>".py"<block_start>args=["python" "-m" program_file_path.stem]<line_sep>args.extend(params[1:])<line_sep>sub_process=subprocess.run(args=args env=subprocess_env stdout=subprocess.PIPE stderr=subprocess.PIPE)<block_end><else_stmt><block_start><raise>RuntimeError("The other executable except Python program is not supported yet")<block_end><if_stmt>sub_process<block_start>print(sub_process.stdout.decode("utf-8"))<if_stmt>sub_process.returncode<ne>0<block_start>print(sub_process.stderr.decode("utf-8") file=sys.stderr)<line_sep><raise>RuntimeError("Executing {} failed.".format(params[0]))<block_end><block_end><block_end><def_stmt>submit_local_show_train datasource model_name<block_start>meta=read_metadata_from_db(datasource model_name)<line_sep>original_sql=meta.get("original_sql")<if_stmt><not>original_sql<block_start><raise>ValueError("cannot find the train SQL statement")<block_end>result_set=[(model_name original_sql)]<line_sep>header=["Model" "Train Statement"]<line_sep>writer=table_writer.ProtobufWriter(result_set header)<for_stmt>line writer.dump_strings()<block_start>print(line)<block_end><block_end>
|
<import_stmt>pytest<import_from_stmt>indy_common.constants GET_ATTR GET_NYM GET_SCHEMA GET_CLAIM_DEF CLAIM_DEF_FROM CLAIM_DEF_SCHEMA_REF CLAIM_DEF_SIGNATURE_TYPE SCHEMA_NAME SCHEMA_VERSION SCHEMA_ATTR_NAMES JSON_LD_CONTEXT RICH_SCHEMA RICH_SCHEMA_ENCODING RICH_SCHEMA_MAPPING RICH_SCHEMA_CRED_DEF RS_CRED_DEF_TYPE_VALUE RS_ENCODING_TYPE_VALUE RS_MAPPING_TYPE_VALUE RS_SCHEMA_TYPE_VALUE RS_CONTEXT_TYPE_VALUE RS_ID GET_RICH_SCHEMA_OBJECT_BY_ID GET_RICH_SCHEMA_OBJECT_BY_METADATA RS_NAME RS_VERSION RS_TYPE RICH_SCHEMA_PRES_DEF RS_PRES_DEF_TYPE_VALUE<import_from_stmt>indy_node.test.rich_schema.templates W3C_BASE_CONTEXT RICH_SCHEMA_EX1<import_from_stmt>indy_node.test.state_proof.helper check_valid_proof sdk_submit_operation_and_get_result<import_from_stmt>plenum.common.constants TARGET_NYM TXN_TYPE RAW DATA<line_sep># fixtures, do not remove
<import_from_stmt>indy_node.test.attrib_txn.test_nym_attrib sdk_added_raw_attribute attributeName attributeValue attributeData<import_from_stmt>plenum.common.util randomString<def_stmt>check_no_data_and_valid_proof result<block_start><assert_stmt>result.get(DATA)<is><none><line_sep>check_valid_proof(result)<block_end><def_stmt>test_state_proof_returned_for_missing_attr looper nodeSetWithOneNodeResponding attributeName sdk_pool_handle sdk_wallet_endorser<block_start>"""
Tests that state proof is returned in the reply for GET_ATTR transactions
"""<line_sep>_,dest=sdk_wallet_endorser<line_sep>get_attr_operation={TARGET_NYM:dest TXN_TYPE:GET_ATTR RAW:attributeName}<line_sep>result=sdk_submit_operation_and_get_result(looper sdk_pool_handle sdk_wallet_endorser get_attr_operation)<line_sep>check_no_data_and_valid_proof(result)<block_end><def_stmt>test_state_proof_returned_for_missing_nym looper nodeSetWithOneNodeResponding sdk_pool_handle sdk_wallet_endorser sdk_user_wallet_a<block_start>"""
Tests that state proof is returned in the reply for GET_NYM transactions
"""<line_sep># Make not existing id
_,dest=sdk_user_wallet_a<line_sep>dest=dest[:-3]<line_sep>dest<augadd>"fff"<line_sep>get_nym_operation={TARGET_NYM:dest TXN_TYPE:GET_NYM}<line_sep>result=sdk_submit_operation_and_get_result(looper sdk_pool_handle sdk_wallet_endorser get_nym_operation)<line_sep>check_no_data_and_valid_proof(result)<block_end><def_stmt>test_state_proof_returned_for_missing_schema looper nodeSetWithOneNodeResponding sdk_pool_handle sdk_wallet_endorser<block_start>"""
Tests that state proof is returned in the reply for GET_SCHEMA transactions
"""<line_sep>_,dest=sdk_wallet_endorser<line_sep>schema_name="test_schema"<line_sep>schema_version="1.0"<line_sep>get_schema_operation={TARGET_NYM:dest TXN_TYPE:GET_SCHEMA DATA:{SCHEMA_NAME:schema_name SCHEMA_VERSION:schema_version }}<line_sep>result=sdk_submit_operation_and_get_result(looper sdk_pool_handle sdk_wallet_endorser get_schema_operation)<assert_stmt>SCHEMA_ATTR_NAMES<not><in>result[DATA]<line_sep>check_valid_proof(result)<block_end><def_stmt>test_state_proof_returned_for_missing_claim_def looper nodeSetWithOneNodeResponding sdk_pool_handle sdk_wallet_endorser<block_start>"""
Tests that state proof is returned in the reply for GET_CLAIM_DEF
transactions
"""<line_sep>_,dest=sdk_wallet_endorser<line_sep>get_claim_def_operation={CLAIM_DEF_FROM:dest TXN_TYPE:GET_CLAIM_DEF CLAIM_DEF_SCHEMA_REF:12 CLAIM_DEF_SIGNATURE_TYPE:'CL'}<line_sep>result=sdk_submit_operation_and_get_result(looper sdk_pool_handle sdk_wallet_endorser get_claim_def_operation)<line_sep>check_no_data_and_valid_proof(result)<block_end>@pytest.mark.skip# TODO fix this test so it does not rely on Indy-SDK,
# or, fix this test once Rich Schema objects are part of Indy-SDK
<def_stmt>test_state_proof_returned_for_missing_get_rich_schema_obj_by_id looper nodeSetWithOneNodeResponding sdk_wallet_endorser sdk_pool_handle sdk_wallet_client<block_start>"""
Tests that state proof is returned in the reply for GET_RICH_SCHEMA_OBJECT_BY_ID.
Use different submitter and reader!
"""<line_sep>rs_id=randomString()<line_sep>get_rich_schema_by_id_operation={TXN_TYPE:GET_RICH_SCHEMA_OBJECT_BY_ID RS_ID:rs_id }<line_sep>result=sdk_submit_operation_and_get_result(looper sdk_pool_handle sdk_wallet_client get_rich_schema_by_id_operation)<line_sep>check_no_data_and_valid_proof(result)<block_end>@pytest.mark.skip# TODO fix this test so it does not rely on Indy-SDK,
# or, fix this test once Rich Schewma objects are part of Indy-SDK
@pytest.mark.parametrize('rs_type' [RS_CONTEXT_TYPE_VALUE RS_SCHEMA_TYPE_VALUE RS_ENCODING_TYPE_VALUE RS_MAPPING_TYPE_VALUE RS_CRED_DEF_TYPE_VALUE RS_PRES_DEF_TYPE_VALUE])<def_stmt>test_state_proof_returned_for_missing_get_rich_schema_obj_by_metadata looper nodeSetWithOneNodeResponding sdk_wallet_endorser sdk_pool_handle sdk_wallet_client rs_type<block_start>"""
Tests that state proof is returned in the reply for GET_RICH_SCHEMA_OBJECT_BY_ID.
Use different submitter and reader!
"""<line_sep>rs_name=randomString()<line_sep>rs_version='1.0'<line_sep>get_rich_schema_by_metadata_operation={TXN_TYPE:GET_RICH_SCHEMA_OBJECT_BY_METADATA RS_NAME:rs_name RS_VERSION:rs_version RS_TYPE:rs_type}<line_sep>result=sdk_submit_operation_and_get_result(looper sdk_pool_handle sdk_wallet_client get_rich_schema_by_metadata_operation)<line_sep>check_no_data_and_valid_proof(result)<block_end>
|
"""
The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""<import_from_stmt>typing Union<import_from_stmt>objects.components.ownable_component OwnableComponent<import_from_stmt>objects.game_object GameObject<import_from_stmt>server_commands.argument_helpers OptionalTargetParam<import_from_stmt>sims.sim_info SimInfo<import_from_stmt>sims4.commands Command CommandType CheatOutput<import_from_stmt>sims4communitylib.exceptions.common_exceptions_handler CommonExceptionHandler<import_from_stmt>sims4communitylib.modinfo ModInfo<import_from_stmt>sims4communitylib.utils.sims.common_sim_name_utils CommonSimNameUtils<import_from_stmt>sims4communitylib.utils.sims.common_sim_utils CommonSimUtils<class_stmt>CommonObjectOwnershipUtils<block_start>"""Utilities for manipulating the ownership of Objects.
"""<line_sep>@staticmethod<def_stmt>set_owning_household_id game_object:GameObject household_id:int<arrow>bool<block_start>"""set_owning_household_id(game_object, household_id)
Set the Household that owns the Object.
:param game_object: An instance of an Object.
:type game_object: GameObject
:param household_id: The decimal identifier of a Household.
:type household_id: int
:return: True, if the Household was successfully set as the owner. False, if not.
:rtype: bool
"""<if_stmt>game_object<is><none><or>household_id<eq>-1<block_start><return><false><block_end>game_object.set_household_owner_id(household_id)<line_sep><return><true><block_end>@staticmethod<def_stmt>get_owning_household_id game_object:GameObject<arrow>int<block_start>"""get_owning_household_id(game_object)
Retrieve the decimal identifier of the Household that owns the Object.
:param game_object: An instance of an Object.
:type game_object: GameObject
:return: The decimal identifier of the Household that owns the object.
:rtype: int
"""<if_stmt>game_object<is><none><block_start><return>-1<block_end><return>game_object.get_household_owner_id()<block_end>@staticmethod<def_stmt>set_owning_sim game_object:GameObject sim_info:SimInfo make_sim_sole_owner:bool=<true><arrow>bool<block_start>"""set_owning_sim(game_object, sim_info, make_sim_sole_owner=True)
Change the ownership of an Object to become owned by the household of a Sim and optional by the Sim themselves.
:param game_object: An instance of an Object.
:type game_object: GameObject
:param sim_info: An instance of a Sim.
:type sim_info: SimInfo
:param make_sim_sole_owner: If True, the Sim will become the sole owner in their household of the Object (In addition to the household owning it). If False, only the household will own the Object. Default is True.
:type make_sim_sole_owner: bool, optional
:return: True, if ownership was transferred successfully. False, if not.
:rtype: bool
"""<if_stmt>game_object<is><none><or>sim_info<is><none><block_start><return><false><block_end>sim=CommonSimUtils.get_sim_instance(sim_info)<if_stmt>sim<is><none><block_start><return><false><block_end>game_object.update_ownership(sim make_sim_owner=make_sim_sole_owner)<line_sep><return><true><block_end>@staticmethod<def_stmt>get_owning_sim game_object:GameObject<arrow>Union[SimInfo <none>]<block_start>"""get_owning_sim(game_object)
Retrieve the Sim that owns an Object, if a Sim owns the Object.
:param game_object: An instance of an Object.
:type game_object: GameObject
:return: The SimInfo of the Sim that owns the specified Object or None if no Sim owns the Object.
:rtype: Union[SimInfo, None]
"""<if_stmt>game_object<is><none><block_start><return><none><block_end>ownable_component:OwnableComponent=CommonObjectOwnershipUtils.get_ownable_component(game_object)<if_stmt>ownable_component<is><none><block_start><return><none><block_end><return>CommonSimUtils.get_sim_info(ownable_component.get_sim_owner_id())<block_end>@staticmethod<def_stmt>get_ownable_component game_object:GameObject<arrow>Union[OwnableComponent <none>]<block_start>"""get_ownable_component(game_object)
Retrieve the Ownable Component of an Object if it has one.
:param game_object: An instance of an Object.
:type game_object: GameObject
:return: The OwnableComponent of the Object or None if no OwnableComponent was found.
:rtype: Union[OwnableComponent, None]
"""<if_stmt>game_object<is><none><block_start><return><none><block_end><if_stmt><not>hasattr(game_object 'ownable_component')<block_start><return><none><block_end><return>game_object.ownable_component<block_end><block_end>@Command('s4clib.change_ownership' command_type=CommandType.Live)<def_stmt>_common_change_ownership object_id:str='20359' opt_sim:OptionalTargetParam=<none> _connection:int=<none><block_start><import_from_stmt>server_commands.argument_helpers get_optional_target<line_sep>output=CheatOutput(_connection)<line_sep>sim_info=CommonSimUtils.get_sim_info(get_optional_target(opt_sim _connection))<if_stmt>sim_info<is><none><block_start>output('Failed, no Sim was specified or the specified Sim was not found!')<line_sep><return><block_end># noinspection PyBroadException
<try_stmt><block_start>object_id=int(object_id)<block_end><except_stmt>Exception<block_start>output('ERROR: object_id must be a number.')<line_sep><return><block_end><if_stmt>object_id<l>0<block_start>output('ERROR: object_id must be a positive number.')<line_sep><return><block_end>output('Attempting to change the ownership of object with id \'{}\'.'.format(object_id))<import_from_stmt>sims4communitylib.utils.objects.common_object_utils CommonObjectUtils<line_sep>game_object=CommonObjectUtils.get_game_object(object_id)<if_stmt>game_object<is><none><block_start>output('ERROR: No object was found with id \'{}\''.format(object_id))<line_sep><return><block_end>output('Object found, attempting to change the ownership of it to {}. {}'.format(CommonSimNameUtils.get_full_name(sim_info) game_object))<try_stmt><block_start><if_stmt>CommonObjectOwnershipUtils.set_owning_sim(game_object sim_info make_sim_sole_owner=<true>)<block_start>output('Successfully changed the ownership of the object.')<block_end><else_stmt><block_start>output('ERROR: Failed to change the ownership of the object.')<block_end><block_end><except_stmt>Exception<as>ex<block_start>output('ERROR: A problem occurred while attempting to change the ownership of the object. {}'.format(object_id))<line_sep>CommonExceptionHandler.log_exception(ModInfo.get_identity() 'Error occurred attempting to change the ownership of object. {}'.format(object_id) exception=ex)<block_end>output('Done changing the ownership of the object.')<block_end>
|
# zwei 12/03/13
# subscribe list by index
<import_stmt>time<def_stmt>index_list ll num<block_start>ll_len=len(ll)<line_sep>item=0<for_stmt>t range(num)<block_start><for_stmt>i range(ll_len)<block_start>item=(item+ll[i])%7<block_end><block_end><return>item<block_end><def_stmt>measure <block_start>print("Start timing...")<line_sep>start=time.time()<line_sep>ll=[x<times>2<for>x range(1000)]<line_sep>last_item=index_list(ll 1000000)#1000000
print("Last item " last_item)<line_sep>duration="%.3f\n"%(time.time()-start)<line_sep>print("list-indexing: "+duration)<block_end>#warm up
<for_stmt>run range(1200)<block_start>index_list([1 2 3 4 5 6 7 8 9 10] 10000)<block_end>measure()<line_sep>
|
<import_from_stmt>requests post<line_sep>headers={'ipNum':'5'}<line_sep>payload={'0':'1.1.1.1:8080' '1':'2.2.2.2:8080' '2':'2.2.2.2:8080' '3':'2.2.2.2:8080' '4':'2.2.2.2:8080' }<line_sep>response=post(url='http://127.0.0.1:8999/main' headers=headers json=payload)<line_sep><pass><line_sep>
|
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 <NAME>, <NAME>
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""<import_from_stmt>sleekxmpp.plugins.base register_plugin<import_from_stmt>sleekxmpp.plugins.xep_0059.stanza Set<import_from_stmt>sleekxmpp.plugins.xep_0059.rsm ResultIterator XEP_0059<line_sep>register_plugin(XEP_0059)<line_sep># Retain some backwards compatibility
xep_0059=XEP_0059<line_sep>
|
<import_stmt>pytest<import_from_stmt>dissect.cstruct.expression Expression<line_sep>testdata=[('1 * 0' 0) ('1 * 1' 1) ('7 * 8' 56) ('7*8' 56) ('7 *8' 56) (' 7 * 8 ' 56) ('0 / 1' 0) ('1 / 1' 1) ('2 / 2' 1) ('3 / 2' 1) ('4 / 2' 2) ('1 % 1' 0) ('1 % 2' 1) ('5 % 3' 2) ('0 + 0' 0) ('1 + 0' 1) ('1 + 3' 4) ('0 - 0' 0) ('1 - 0' 1) ('0 - 1' -1) ('1 - 3' -2) ('3 - 1' 2) ('0x0 >> 0' 0x0) ('0x1 >> 0' 0x1) ('0x1 >> 1' 0x0) ('0xf0 >> 4' 0xf) ('0x0 << 4' 0) ('0x1 << 0' 1) ('0xf << 4' 0xf0) ('0 & 0' 0) ('1 & 0' 0) ('1 & 1' 1) ('1 & 2' 0) ('1 ^ 1' 0) ('1 ^ 0' 1) ('1 ^ 3' 2) ('0 | 0' 0) ('0 | 1' 1) ('1 | 1' 1) ('1 | 2' 3) # This type of expression is not supported by the parser and will fail.
# ('4 * 1 + 1', 5),
('-42' -42) ('42 + (-42)' 0) ('A + 5' 13) ('21 - B' 8) ('A + B' 21) ]<class_stmt>Consts(object)<block_start>consts={'A':8 'B':13 }<block_end><def_stmt>id_fn val<block_start><if_stmt>isinstance(val (str ))<block_start><return>val<block_end><block_end>@pytest.mark.parametrize('expression, answer' testdata ids=id_fn)<def_stmt>test_expression expression answer<block_start>parser=Expression(Consts() expression)<assert_stmt>parser.evaluate()<eq>answer<block_end>
|
<import_from_stmt>gym.spaces Discrete<class_stmt>Discretized(Discrete)<block_start><def_stmt>__init__ self n min_action max_action<block_start>super().__init__(n)<line_sep>self.min_action=min_action<line_sep>self.max_action=max_action<block_end><def_stmt>to_continuous self discrete_action<block_start>step=(self.max_action-self.min_action)/(self.n-1)# min=-1, max=1, n=11, step=0.2
action=self.min_action+discrete_action<times>step<line_sep><return>action<block_end><block_end>
|
<import_stmt>os<line_sep># Environment variables (required for each respective report)
ALGO_HIST_INDEXER_NODE=os.environ.get("ALGO_HIST_INDEXER_NODE" "https://indexer.algoexplorerapi.io")<line_sep>ALGO_INDEXER_NODE=os.environ.get("ALGO_INDEXER_NODE" "https://algoindexer.algoexplorerapi.io")<line_sep>ALGO_NFDOMAINS=os.environ.get("ALGO_NFDOMAINS" "https://api.nf.domains")<line_sep>ATOM_NODE=os.environ.get("ATOM_NODE" "")<line_sep>COVALENT_NODE=os.environ.get("COVALENT_NODE" "https://api.covalenthq.com")<line_sep>DVPN_LCD_NODE=os.environ.get("DVPN_LCD_NODE" "https://lcd.sentinel.co")<line_sep>DVPN_RPC_NODE=os.environ.get("DVPN_RPC_NODE" "https://rpc.sentinel.co")<line_sep>FET_NODE=os.environ.get("FET_NODE" "https://rest-fetchhub.fetch.ai")<line_sep>HUAHUA_NODE=os.environ.get("HUAHUA_NODE" "")<line_sep>JUNO_NODE=os.environ.get("JUNO_NODE" "")<line_sep>BTSG_NODE=os.environ.get("BTSG_NODE" "https://lcd.explorebitsong.com")<line_sep>STARS_NODE=os.environ.get("STARS_NODE" "")<line_sep>SOL_NODE=os.environ.get("SOL_NODE" "")<line_sep>TERRA_LCD_NODE=os.environ.get("TERRA_LCD_NODE" "")<line_sep>LUNA2_LCD_NODE=os.environ.get("LUNA2_LCD_NODE" "https://phoenix-lcd.terra.dev")<line_sep># Optional environment variables
COVALENT_API_KEY=os.environ.get("COVALENT_API_KEY" "")<line_sep># #############################################################################
TICKER_ALGO="ALGO"<line_sep>TICKER_ATOM="ATOM"<line_sep>TICKER_DVPN="DVPN"<line_sep>TICKER_FET="FET"<line_sep>TICKER_HUAHUA="HUAHUA"<line_sep>TICKER_IOTEX="IOTX"<line_sep>TICKER_JUNO="JUNO"<line_sep>TICKER_BTSG="BTSG"<line_sep>TICKER_STARS="STARS"<line_sep>TICKER_LUNA1="LUNA1"<line_sep>TICKER_LUNA2="LUNA2"<line_sep>TICKER_OSMO="OSMO"<line_sep>TICKER_SOL="SOL"<line_sep>DONATION_WALLETS=set([os.environ.get("DONATION_WALLET_ALGO" "") os.environ.get("DONATION_WALLET_ATOM" "") os.environ.get("DONATION_WALLET_FET" "") os.environ.get("DONATION_WALLET_HUAHUA" "") os.environ.get("DONATION_WALLET_IOTX" "") os.environ.get("DONATION_WALLET_JUNO" "") os.environ.get("DONATION_WALLET_BTSG" "") os.environ.get("DONATION_WALLET_STARS" "") os.environ.get("DONATION_WALLET_LUNA" "") os.environ.get("DONATION_WALLET_OSMO" "") os.environ.get("DONATION_WALLET_SOL" "") ])<line_sep>MESSAGE_ADDRESS_NOT_FOUND="Wallet address not found"<line_sep>MESSAGE_STAKING_ADDRESS_FOUND="Staking address found. Please input the main wallet address instead."<line_sep>REPORTS_DIR=os.path.dirname(os.path.realpath(__file__))+"/_reports"<line_sep>
|
# coding: utf-8
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""sequence labeling model
"""<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_from_future_stmt> unicode_literals<import_from_future_stmt> absolute_import<import_stmt>os<import_stmt>time<import_stmt>json<import_stmt>argparse<import_stmt>numpy<as>np<import_stmt>multiprocessing<import_stmt>paddle<import_stmt>logging<import_stmt>paddle.fluid<as>fluid<import_from_stmt>six.moves xrange<import_from_stmt>model.ernie ErnieModel<line_sep>log=logging.getLogger(__name__)<def_stmt>create_model args pyreader_name ernie_config is_prediction=<false><block_start>"""func"""<line_sep>pyreader=fluid.layers.py_reader(capacity=50 shapes=[[-1 args.max_seq_len 1] [-1 args.max_seq_len 1] [-1 args.max_seq_len 1] [-1 args.max_seq_len 1] [-1 args.max_seq_len 1] [-1 args.max_seq_len 1] [-1]] dtypes=['int64' 'int64' 'int64' 'int64' 'float32' 'int64' 'int64'] lod_levels=[0 0 0 0 0 0 0] name=pyreader_name use_double_buffer=<true>)<line_sep>(src_ids sent_ids pos_ids task_ids input_mask labels seq_lens)=fluid.layers.read_file(pyreader)<line_sep>ernie=ErnieModel(src_ids=src_ids position_ids=pos_ids sentence_ids=sent_ids task_ids=task_ids input_mask=input_mask config=ernie_config use_fp16=args.use_fp16)<line_sep>enc_out=ernie.get_sequence_output()<line_sep>emission=fluid.layers.fc(input=enc_out size=args.num_labels param_attr=fluid.ParamAttr(initializer=fluid.initializer.Uniform(low=-0.1 high=0.1) regularizer=fluid.regularizer.L2DecayRegularizer(regularization_coeff=1e-4)) num_flatten_dims=2)<line_sep>crf_cost=fluid.layers.linear_chain_crf(input=emission label=labels param_attr=fluid.ParamAttr(name='crfw' learning_rate=args.crf_learning_rate) length=seq_lens)<line_sep>loss=fluid.layers.mean(x=crf_cost)<line_sep>crf_decode=fluid.layers.crf_decoding(input=emission param_attr=fluid.ParamAttr(name='crfw') length=seq_lens)<line_sep>lod_labels=fluid.layers.squeeze(labels axes=[-1])<line_sep>num_chunk_types=((args.num_labels-1)<floordiv>(len(args.chunk_scheme)-1))<line_sep># IOB配置
(_ _ _ num_infer num_label num_correct)=fluid.layers.chunk_eval(input=crf_decode label=lod_labels chunk_scheme=args.chunk_scheme num_chunk_types=num_chunk_types seq_length=seq_lens)<line_sep>"""
enc_out = fluid.layers.dropout(x=enc_out,
dropout_prob=0.1,
dropout_implementation="upscale_in_train")
logits = fluid.layers.fc(
input=enc_out,
size=args.num_labels,
num_flatten_dims=2,
param_attr=fluid.ParamAttr(
name="cls_seq_label_out_w",
initializer=fluid.initializer.TruncatedNormal(scale=0.02)),
bias_attr=fluid.ParamAttr(name="cls_seq_label_out_b",
initializer=fluid.initializer.Constant(0.)))
infers = fluid.layers.argmax(logits, axis=2)
ret_infers = fluid.layers.reshape(x=infers, shape=[-1, 1])
lod_labels = fluid.layers.sequence_unpad(labels, seq_lens)
lod_infers = fluid.layers.sequence_unpad(infers, seq_lens)
num_chunk_types = (
(args.num_labels - 1) // (len(args.chunk_scheme) - 1)) # IOB配置
(_, _, _, num_infer, num_label,
num_correct) = fluid.layers.chunk_eval(input=lod_infers,
label=lod_labels,
chunk_scheme=args.chunk_scheme,
num_chunk_types=num_chunk_types)
labels = fluid.layers.flatten(labels, axis=2)
ce_loss, probs = fluid.layers.softmax_with_cross_entropy(
logits=fluid.layers.flatten(logits, axis=2),
label=labels,
return_softmax=True)
input_mask = fluid.layers.flatten(input_mask, axis=2)
ce_loss = ce_loss * input_mask
loss = fluid.layers.mean(x=ce_loss)
"""<line_sep>graph_vars={"inputs":src_ids "loss":loss "seqlen":seq_lens "crf_decode":crf_decode "num_infer":num_infer "num_label":num_label "num_correct":num_correct }<for_stmt>k,v graph_vars.items()<block_start>v.persistable=<true><block_end><return>pyreader graph_vars<block_end><def_stmt>calculate_f1 num_label num_infer num_correct<block_start>"""calculate_f1"""<if_stmt>num_infer<eq>0<block_start>precision=0.0<block_end><else_stmt><block_start>precision=num_correct<times>1.0/num_infer<block_end><if_stmt>num_label<eq>0<block_start>recall=0.0<block_end><else_stmt><block_start>recall=num_correct<times>1.0/num_label<block_end><if_stmt>num_correct<eq>0<block_start>f1=0.0<block_end><else_stmt><block_start>f1=2<times>precision<times>recall/(precision+recall)<block_end><return>precision recall f1<block_end><def_stmt>evaluate exe program pyreader graph_vars tag_num dev_count=1<block_start>"""func"""<line_sep>fetch_list=[graph_vars["num_infer"].name graph_vars["num_label"].name graph_vars["num_correct"].name]<line_sep>total_label,total_infer,total_correct=0.0 0.0 0.0<line_sep>time_begin=time.time()<line_sep>pyreader.start()<while_stmt><true><block_start><try_stmt><block_start>np_num_infer,np_num_label,np_num_correct=exe.run(program=program fetch_list=fetch_list)<line_sep>total_infer<augadd>np.sum(np_num_infer)<line_sep>total_label<augadd>np.sum(np_num_label)<line_sep>total_correct<augadd>np.sum(np_num_correct)<block_end><except_stmt>fluid.core.EOFException<block_start>pyreader.reset()<line_sep><break><block_end><block_end>precision,recall,f1=calculate_f1(total_label total_infer total_correct)<line_sep><return>precision recall f1<block_end><def_stmt>parse_crf_ret np_inputs crf_decodes np_lens<block_start>"""parse_crf_ret"""<line_sep>np_inputs=np.squeeze(np_inputs)<line_sep>out=[]<for_stmt>index range(len(np_lens))<block_start>src_ids=[_id<for>_id np_inputs[index][1:np_lens[index]-1]]<line_sep>tag_ids=[_id<for>_id crf_decodes[index][1:np_lens[index]-1]]<line_sep>out.append((list(src_ids) list(tag_ids)))<block_end><return>out<block_end><def_stmt>predict exe test_program test_pyreader graph_vars dev_count=1<block_start>"""func"""<line_sep>fetch_list=[graph_vars["inputs"].name graph_vars["crf_decode"].name graph_vars["seqlen"].name ]<line_sep>test_pyreader.start()<line_sep>res=[]<while_stmt><true><block_start><try_stmt><block_start>inputs,crf_decodes,np_lens=exe.run(program=test_program fetch_list=fetch_list)<line_sep>#r = chunk_predict(inputs, probs, np_lens, dev_count)
res<augadd>parse_crf_ret(inputs crf_decodes np_lens)<block_end><except_stmt>fluid.core.EOFException<block_start>test_pyreader.reset()<line_sep><break><block_end><block_end><return>res<block_end>
|
# Tencent is pleased to support the open source community by making IoTHunter available.
# Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
# Licensed under the MIT License (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://opensource.org/licenses/MIT
#
# Unless required by applicable law or agreed to in writing, software distributed under the
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
<import_stmt>subprocess<import_stmt>time<import_stmt>logging<import_stmt>os<import_stmt>re<import_stmt>json<import_stmt>shutil<import_stmt>hashlib<import_stmt>VMControl<import_stmt>ConfigManager<class_stmt>DynamicBehaviors(object)<block_start>"""Dynamic Behaviors Class"""<def_stmt>__init__ self<block_start>self.filename=''<line_sep>self.md5_hash=''<line_sep>self.file_log={'read':[] 'write':[] 'open':[] 'unlink':[]}<line_sep>self.socket_log={'connect':[] 'recvfrom':[] 'sendto':[] 'bind':[]}<line_sep>self.tcp_log=[]<line_sep>self.http_log=[]<line_sep>self.udp_log=[]<line_sep>self.irc_log=[]<line_sep>self.dns_log=[]<line_sep>self.file_read_data={}<line_sep>self.recvfrom_data={}<line_sep>self.plugins_result={}<line_sep>self.proc_log={'execve':[] 'clone':[]}<line_sep>self.packets=[]<block_end><def_stmt>to_report self<block_start>report={'md5_hash':self.md5_hash 'filename':self.filename 'file_log':self.file_log 'socket_log':self.socket_log 'file_read_data':self.file_read_data 'recvfrom_data':self.recvfrom_data 'tcp_info':self.tcp_log 'udp_info':self.udp_log 'http_info':self.http_log 'irc_info':self.irc_log 'dns_info':self.dns_log 'plugin_info':self.plugins_result 'proc_info':self.proc_log 'packets_info':self.packets}<line_sep><return>json.dumps(report indent=4)<block_end><block_end><class_stmt>DynamicAnalyzer<block_start>"""Dynamic Analyzer for Iot Malware
"""<def_stmt>__init__ self<block_start>self.strace_log=[]<line_sep>self.analyze_timeout=10<block_end><def_stmt>init_vm self<block_start>"""init vm controller configuration. """<line_sep>self.vm_control=VMControl.VMController()<line_sep>self.vm_control.init_config()<line_sep>vmc=ConfigManager.ConfigManager()<line_sep>self.strace_log_max_lines=vmc.get('analyzer' 'max_strace_lines')<line_sep>self.strace_log_path=vmc.get('analyzer' 'strace_log_path')<line_sep>self.tshark_path=vmc.get('analyzer' 'tshark_path')<line_sep>self.tcpdump_log_path=vmc.get('analyzer' 'host_log_tcpdump')<block_end><def_stmt>set_log_path self logpath<block_start>"""set log path"""<line_sep>self.log_path=logpath<block_end><def_stmt>parse_strace_log self log_path<block_start>"""parse strace log."""<line_sep>line_count=0<line_sep>self.strace_log_path=log_path<with_stmt>open(self.strace_log_path 'r')<as>log_file<block_start><for_stmt>line log_file.readlines()<block_start>self.strace_log.append(line)<line_sep>line_count=line_count+1<if_stmt>line_count<ge>20000<block_start><break><block_end><block_end><block_end><block_end><def_stmt>parse_proc_log self behavior_obj<block_start><for_stmt>line self.strace_log<block_start><if_stmt>'execve('<in>line<block_start>behavior_obj.proc_log['execve'].append(line[line.find('execve'):-1])<block_end><if_stmt>'clone('<in>line<block_start>behavior_obj.proc_log['clone'].append(line[line.find('clone'):-1])<block_end><block_end><block_end><def_stmt>parse_file_log self behavior_obj<block_start>"""Parse file related log from strace."""<for_stmt>line self.strace_log# if 'read(' in line:
# behavior_obj.file_log['read'].append(
# line[line.find('read'):-1])
<block_start><if_stmt>'openat('<in>line<block_start>behavior_obj.file_log['open'].append(line[line.find('openat'):-1])<block_end><if_stmt>'unlink('<in>line<block_start>behavior_obj.file_log['unlink'].append(line[line.find('unlink'):-1])<block_end># if 'write(' in line:
# behavior_obj.file_log['write'].append(
# line[line.find('write'):-1])
<block_end><block_end><def_stmt>format_recvfrom_str self line recefrom_data<block_start>"""format recvfrom function string."""<line_sep>read_data_pattern=re.compile(r'recvfrom\(.+,.+,.+,.+,.+\)')<line_sep>read_func_find=read_data_pattern.search(line)<if_stmt>read_func_find<block_start>read_func_str=read_func_find.group(0)<line_sep>fd=read_func_str.split(',')[0][9:]<line_sep>read_byte=read_func_str.split(',')[1][2:-1]<if_stmt>fd<ne>''<and>read_byte<ne>' '<block_start><if_stmt>recefrom_data.has_key(fd)<block_start>recefrom_data[fd]=recefrom_data[fd]+read_byte<block_end><else_stmt><block_start>recefrom_data[fd]=read_byte<block_end><block_end><block_end><block_end><def_stmt>format_read_str self line file_read_data<block_start>"""format read function args."""<line_sep>read_data_pattern=re.compile(r'read\(.+,.+,.+\)')<line_sep>read_func_find=read_data_pattern.search(line)<if_stmt>read_func_find<block_start>read_func_str=read_func_find.group(0)<line_sep>fd=read_func_str.split(',')[0][5:]<line_sep>read_byte=read_func_str.split(',')[1][2:-1]<if_stmt>fd<ne>''<and>read_byte<ne>' '<block_start><if_stmt>file_read_data.has_key(fd)<block_start>file_read_data[fd]=file_read_data[fd]+read_byte<block_end><else_stmt><block_start>file_read_data[fd]=read_byte<block_end><block_end><block_end><block_end><def_stmt>parse_file_read_data self behavior_obj<block_start>"""parse file data from strace."""<for_stmt>line self.strace_log<block_start><if_stmt>'read('<in>line<block_start>self.format_read_str(line behavior_obj.file_read_data)<block_end><block_end><block_end><def_stmt>parse_recvfrom_data self behavior_obj<block_start>"""parse recvfrom data ."""<for_stmt>line self.strace_log<block_start><if_stmt>'recvfrom('<in>line<block_start>self.format_recvfrom_str(line behavior_obj.recvfrom_data)<block_end><block_end><block_end><def_stmt>parse_socket_log self behavior_obj<block_start>"""parse socket related log from starce."""<for_stmt>line self.strace_log<block_start><if_stmt>'connect('<in>line<block_start>behavior_obj.socket_log['connect'].append(self.parse_ip_port(line))<block_end><if_stmt>'bind('<in>line<block_start>behavior_obj.socket_log['bind'].append(self.parse_ip_port(line))<block_end><if_stmt>'sendto('<in>line<block_start>behavior_obj.socket_log['sendto'].append({'port':self.parse_ip_port(line)['port'] 'addr':self.parse_ip_port(line)['addr'] 'info':line[line.find('sendto'):-1]})<block_end><if_stmt>'recvfrom('<in>line<block_start>behavior_obj.socket_log['recvfrom'].append({'port':self.parse_ip_port(line)['port'] 'addr':self.parse_ip_port(line)['addr'] 'info':line[line.find('recvfrom'):-1]})<block_end><block_end><block_end><def_stmt>parse_ip_port self log_str<block_start>"""parse ip port from socket log."""<line_sep>connect_info={'port':'' 'addr':''}<line_sep>port_pattern=re.compile(r'sin_port=htons\(\d+\)')<line_sep>addr_pattern=re.compile(r'inet_addr\(".+"\)')<line_sep>port_result=port_pattern.search(log_str)<if_stmt>port_result<block_start>connect_info['port']=port_result.group(0)[15:-1]<block_end>addr_result=addr_pattern.search(log_str)<if_stmt>addr_result<block_start>connect_info['addr']=addr_result.group(0)[11:-2]<block_end><return>connect_info<block_end><def_stmt>fetch_strace_log self guest_vm<block_start>"""fetch strace log from guset os."""<if_stmt>os.path.isfile(self.strace_log_path)<block_start>os.remove(self.strace_log_path)<block_end>self.vm_control.vm_copyfrom(guest_vm.name guest_vm.vm_log_path guest_vm.host_log_path guest_vm.user guest_vm.password)<block_end><def_stmt>fetch_tcpdump_log self guest_vm<block_start>"""fetch tcpdump log from guest os."""<if_stmt>os.path.isfile(self.tcpdump_log_path)<block_start>os.remove(self.tcpdump_log_path)<block_end>self.vm_control.vm_copyfrom(guest_vm.name guest_vm.vm_log_tcpdump guest_vm.host_log_tcpdump guest_vm.user guest_vm.password)<block_end><def_stmt>get_analyze_file_md5 self filepath<block_start><try_stmt><block_start>m=hashlib.md5(open(filepath 'rb').read())<line_sep><return>m.hexdigest()<block_end><except_stmt>Exception<as>e<block_start><return>''<line_sep>logger.error("get file md5 error." exc_info=<true>)<block_end><block_end><def_stmt>analyze_file self filepath<block_start>"""main analyze function. """<line_sep>self.strace_log=[]<line_sep>guest_vm=VMControl.GuestVM()<line_sep>guest_vm.init_config()<line_sep>self.init_vm()<line_sep># calculate md5
self.md5_hash=self.get_analyze_file_md5(filepath)<line_sep># get guest analyzer path
file_root=os.path.dirname(__file__)<line_sep>guest_analyzer_path=os.path.join(file_root 'GuestAnalyzer.py')<line_sep>file_name=os.path.split(filepath)[1]<line_sep>self.file_name=file_name<if_stmt>self.vm_control.start_vm(guest_vm.name)<eq><false><block_start>logging.error('Start Guest VM Failed')<block_end>self.vm_control.vm_copyto(guest_vm.name filepath guest_vm.runpath guest_vm.user guest_vm.password)<line_sep>self.vm_control.vm_copyto(guest_vm.name guest_analyzer_path guest_vm.runpath guest_vm.user guest_vm.password)<line_sep>self.vm_control.vm_guest_run(guest_vm.name '/bin/chmod' ' +x %s/%s'%(guest_vm.runpath file_name) guest_vm.user guest_vm.password)<line_sep>self.vm_control.vm_guest_run(guest_vm.name '/usr/bin/python' '%s/GuestAnalyzer.py %s/%s'%(guest_vm.guest_analyzer_path guest_vm.guest_analyzer_path file_name) guest_vm.user guest_vm.password)<line_sep>time.sleep(10)<line_sep>self.fetch_strace_log(guest_vm)<line_sep>self.fetch_tcpdump_log(guest_vm)<line_sep>self.vm_control.control_vm(guest_vm.name 'poweroff')<line_sep>time.sleep(5)<line_sep>self.vm_control.vm_snap_control(guest_vm.name 'restore' 'analysis')<block_end><def_stmt>do_log_parse self behaviors<block_start>"""main log parse function."""<line_sep>behaviors.md5_hash=self.md5_hash<line_sep>behaviors.file_name=self.file_name<line_sep># parse strace log
<if_stmt>os.path.isfile(self.strace_log_path)<block_start>self.parse_strace_log(self.strace_log_path)<line_sep>os.remove(self.strace_log_path)<block_end>self.parse_socket_log(behaviors)<line_sep>self.parse_file_log(behaviors)<line_sep>self.parse_proc_log(behaviors)<line_sep>self.parse_recvfrom_data(behaviors)<line_sep>self.parse_file_read_data(behaviors)<line_sep># parse tcpdump info
<if_stmt>os.path.isfile(self.tcpdump_log_path)<block_start>self.parse_tcpdump_log(behaviors)<line_sep>os.remove(self.tcpdump_log_path)<block_end><block_end><def_stmt>parse_tcpdump_log self behaviors<block_start>"""parse tcpdump pcap file. """<if_stmt>os.path.isfile(self.tcpdump_log_path)<block_start>behaviors.tcp_log=self.tcp_info(self.tcpdump_log_path)<line_sep>behaviors.http_log=self.http_info(self.tcpdump_log_path)<line_sep>behaviors.udp_log=self.udp_info(self.tcpdump_log_path)<line_sep>behaviors.dns_log=self.dns_info(self.tcpdump_log_path)<line_sep>behaviors.irc_log=self.irc_info(self.tcpdump_log_path)<line_sep>behaviors.packets=self.packets_info(self.tcpdump_log_path)<block_end><block_end><def_stmt>packets_info self tcpdumpfile<block_start>cmd=[self.tshark_path '-n' '-ta' '-r' tcpdumpfile]<line_sep>cmd_output=self.check_output_safe(cmd)<line_sep>packet_list=[]<for_stmt>line cmd_output.splitlines()<block_start>packet_list.append(line.strip().replace('\xe2\x86\x92 ' ' '))<block_end><return>packet_list<block_end><def_stmt>check_output_safe self cmd<block_start>output=""<try_stmt><block_start>output=subprocess.check_output(cmd)<block_end><except_stmt>subprocess.CalledProcessError<as>e<block_start>logging.error("CalledProcessError: %s" str(e))<line_sep>output=e.output<block_end><return>output<block_end><def_stmt>filter_packets_by_protocal self tcpdumpfile protocal<block_start>"""use tshark to analyze tcpdump pcap file"""<if_stmt>os.path.isfile(tcpdumpfile)<block_start>cmd=[self.tshark_path '-Tjson' '-n' '-ta' '-r' tcpdumpfile protocal]<line_sep>cmd_output=self.check_output_safe(cmd)<line_sep>json_data=json.loads(cmd_output)<line_sep>packet_list=[]<for_stmt>line json_data<block_start>packet_data={}<if_stmt>'ip'<in>line['_source']['layers'].keys()<block_start>packet_data['ip.src']=line['_source']['layers']['ip']['ip.src']<line_sep>packet_data['ip.dst']=line['_source']['layers']['ip']['ip.dst']<block_end><if_stmt>protocal<eq>'irc'<block_start>irc_info=line['_source']['layers']['irc']<if_stmt>'irc.response'<in>irc_info.keys()<block_start>packet_data['irc.response']=irc_info['irc.response']<block_end><block_end><if_stmt>protocal<eq>'http'<block_start>http_info=line['_source']['layers']['http']<if_stmt>'http.host'<in>http_info.keys()<block_start>packet_data['http.host']=http_info['http.host']<block_end><if_stmt>'http.request'<in>http_info.keys()<block_start>packet_data['http.request']=http_info['http.request.full_uri']<block_end><block_end><if_stmt>protocal<eq>'dns'<block_start>packet_data.clear()<if_stmt>'dns'<in>line['_source']['layers'].keys()<block_start>dns_info=line['_source']['layers']['dns']<if_stmt>'Queries'<in>dns_info.keys()<block_start><for_stmt>dns_query dns_info['Queries'].values()<block_start>packet_data['dns_query']=dns_query['dns.qry.name']<block_end><block_end><block_end><block_end>packet_list.append(packet_data)<block_end><return>packet_list<block_end><block_end><def_stmt>tcp_info self tcpdumpfile<block_start>"""get tcp info"""<line_sep><return>self.filter_packets_by_protocal(tcpdumpfile 'tcp')<block_end><def_stmt>udp_info self tcpdumpfile<block_start>"""get udp info"""<line_sep><return>self.filter_packets_by_protocal(tcpdumpfile 'udp')<block_end><def_stmt>irc_info self tcpdumpfile<block_start><return>self.filter_packets_by_protocal(tcpdumpfile 'irc')<block_end><def_stmt>http_info self tcpdumpfile<block_start><return>self.filter_packets_by_protocal(tcpdumpfile 'http')<block_end><def_stmt>dns_info self tcpdumpfile<block_start>dns_packet=self.filter_packets_by_protocal(tcpdumpfile 'dns')<line_sep>dns_query_list=[]<for_stmt>line dns_packet<block_start><if_stmt>line['dns_query']<in>dns_query_list<block_start><continue><block_end>dns_query_list.append(line['dns_query'])<block_end><return>dns_query_list<block_end><block_end><if_stmt>__name__<eq>'__main__'# test code here
<block_start><pass><block_end>
|
# Copyright (C) 2012 Ion Torrent Systems, Inc. All Rights Reserved
# Sequence and read streaming utilities
<import_stmt>sys<import_stmt>time<import_stmt>pysam<line_sep># decorators section
<def_stmt>_use_and_del d k v<block_start>"""
Use a values from a dict and remove it.
"""<if_stmt>k<in>d<block_start>v=d[k]<del_stmt>d[k]<block_end><return>v<block_end><def_stmt>open_read_stream fn<block_start>"""
Take a string or file argument and pass it to the function as an open file.
"""<def_stmt>wrapped f *args **kargs<block_start><if_stmt>type(f)<is>file<block_start><return>fn(f *args **kargs)<block_end><elif_stmt>type(f)<is>str<block_start><return>fn(open(f 'r' 16384) *args **kargs)<block_end><else_stmt><block_start><raise>Exception(str(f)+' is not a string or file')<block_end><block_end><return>wrapped<block_end><def_stmt>value_stream fn<block_start>"""
Take an iterator that iterates over lines (e.g., from a file) and
return the lines stripped and split into values using delimter.
Skip blank lines and lines that start with characters in the
skip_lines string.
"""<def_stmt>wrapped f *args **kargs<block_start>skip_header=_use_and_del(kargs 'skip_header' <true>)<line_sep>skip_blank=_use_and_del(kargs 'skip_blank' <true>)<line_sep>skip_lines=_use_and_del(kargs 'skip_lines' '#')<line_sep>delimiter=_use_and_del(kargs 'delimiter' <none>)<line_sep>lines=fn(f *args **kargs)<if_stmt>skip_header<block_start>header=lines.next()<block_end><for_stmt>line lines<block_start>values=line.strip().split(delimiter)<if_stmt>skip_blank<and>len(values)<eq>0<block_start><continue><block_end><if_stmt>values[0][0]<in>skip_lines<block_start><continue><block_end><yield>values<block_end><block_end><return>wrapped<block_end><def_stmt>metered_stream fn<block_start>"""
Display a progress meter to standard out for a stream.
"""<def_stmt>wrapped *args **kargs<block_start>meter=_use_and_del(kargs 'meter' <true>)<line_sep>freq=_use_and_del(kargs 'meter_freq' 10000)<line_sep>stream=fn(*args **kargs)<line_sep># Hijack the stream and keep count of the items
<if_stmt>meter<block_start>count=0<line_sep>start=time.time()<block_end><for_stmt>record stream<block_start><yield>record<if_stmt>meter<block_start>count<augadd>1<if_stmt>count%freq<eq>0<block_start>tick=time.time()<line_sep>sys.stderr.write('\rProcessed %d lines %.2f lines/sec'%(count count/(tick-start)))<line_sep>sys.stderr.flush()<block_end><block_end><block_end><if_stmt>meter<block_start>tick=time.time()<line_sep>sys.stderr.write('\rProcessed %d lines %.2f lines/sec\n'%(count count/(tick-start)))<line_sep>sys.stderr.flush()<block_end><block_end><return>wrapped<block_end># end of decorators section
@open_read_stream@value_stream<def_stmt>file_values f *args **kargs<block_start><return>f<block_end>@metered_stream<def_stmt>sam_stream sam meter=<true> skip_unmapped=<false> skip_reverse=<false><block_start>"""
Use pysam instead...
"""<line_sep>stream=pysam.Samfile(sam)<for_stmt>read stream<block_start><if_stmt>skip_unmapped<and>read.is_unmapped<block_start><continue># skip umapped reads
<block_end><if_stmt>skip_reverse<and>read.is_reverse<block_start><continue># skip reverse reads
<block_end><if_stmt>read.rname<ne>-1<block_start>rname=stream.getrname(read.tid)# make rname visible to the caller
<block_end><else_stmt><block_start>rname=''<block_end><yield>read rname<block_end><return><block_end>
|
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# import all the meta_arch, so they will be registered
<import_from_stmt>.semantic_seg SemanticSegmentor<import_from_stmt>.dynamic4seg DynamicNet4Seg<line_sep>
|
<import_stmt>copy<import_from_stmt>cStringIO StringIO<import_from_stmt>nativedroid.analyses.resolver.annotation *<import_from_stmt>nativedroid.analyses.resolver.armel_resolver ArmelResolver<import_from_stmt>nativedroid.analyses.resolver.jni.jni_helper *<import_from_stmt>nativedroid.analyses.resolver.model.__android_log_print *<import_from_stmt>nativedroid.protobuf.jnsaf_grpc_pb2 *<line_sep>__author__="<NAME>, <NAME>"<line_sep>__copyright__="Copyright 2018, The Argus-SAF Project"<line_sep>__license__="Apache v2.0"<line_sep>nativedroid_logger=logging.getLogger('AnnotationBasedAnalysis')<line_sep>nativedroid_logger.setLevel(logging.INFO)<line_sep>annotation_location={'from_reflection_call':'~' 'from_native':'~' 'from_class':'~'}<class_stmt>AnnotationBasedAnalysis(angr.Analysis)<block_start>"""
This class performs taint analysis based upon angr's annotation technique.
"""<def_stmt>__init__ self analysis_center jni_method_addr jni_method_arguments is_native_pure native_pure_info=<none><block_start>"""
init
:param AnalysisCenter analysis_center:
:param str jni_method_addr: address of jni method
:param str jni_method_arguments:
:param list is_native_pure: whether it is pure native and android_main type or direct type.
:param Object native_pure_info: initial SimState and native pure argument
"""<if_stmt>self.project.arch.name<is>'ARMEL'<block_start>self._resolver=ArmelResolver(self.project analysis_center)<block_end><else_stmt><block_start><raise>ValueError('Unsupported architecture: %d'%self.project.arch.name)<block_end>self._hook_system_calls()<line_sep>self._analysis_center=analysis_center<line_sep>self._jni_method_signature=analysis_center.get_signature()<line_sep>self._jni_method_addr=jni_method_addr<if_stmt>is_native_pure<block_start>self._state=self._resolver.prepare_native_pure_state(native_pure_info)<line_sep>self._arguments_summary=<none><block_end><else_stmt><block_start>self._state,self._arguments_summary=self._resolver.prepare_initial_state(jni_method_arguments)<block_end><if_stmt>is_native_pure<block_start>self.cfg=self.project.analyses.CFGAccurate(fail_fast=<true> starts=[self._jni_method_addr] initial_state=self._state context_sensitivity_level=1 keep_state=<true> normalize=<true> call_depth=5)<block_end><else_stmt><block_start>self.cfg=self.project.analyses.CFGAccurate(fail_fast=<true> starts=[self._jni_method_addr] initial_state=self._state context_sensitivity_level=1 keep_state=<true> normalize=<true> call_depth=5)<block_end><block_end><def_stmt>_hook_system_calls self<block_start><if_stmt>'__android_log_print'<in>self.project.loader.main_object.imports<block_start>self.project.hook_symbol('__android_log_print' AndroidLogPrint() replace=<true>)<block_end><block_end><def_stmt>count_cfg_instructions self<block_start>"""
Count instructions size from CFG.
:return: Instructions size
:rtype: int
"""<line_sep>total_instructions=0<for_stmt>func_addr,func self.cfg.kb.functions.iteritems()<block_start>func_instructions=0<line_sep># print func.name
<for_stmt>block func.blocks<block_start>block_instructions=len(block.instruction_addrs)<line_sep># print block, block_instructions
func_instructions<augadd>block_instructions<block_end>total_instructions<augadd>func_instructions<block_end># print('Total INS: %d' % total_instructions)
<return>total_instructions<block_end><def_stmt>_collect_taint_sources self<block_start>"""
Collect source nodes from CFG.
:param: jni_method_signature: method signature
:return: A dictionary contains source nodes with its source tags (positions, taint_tags).
:rtype: list
"""<line_sep>sources_annotation=set()<if_stmt>self._arguments_summary<block_start><for_stmt>_,arg_summary self._arguments_summary.iteritems()<block_start><for_stmt>annotation arg_summary.annotations<block_start><if_stmt>isinstance(annotation JobjectAnnotation)<block_start>worklist=list(annotation.fields_info)<while_stmt>worklist<block_start>field_info=worklist[0]<line_sep>worklist=worklist[1:]<if_stmt>isinstance(field_info JobjectAnnotation)<block_start><if_stmt>field_info.taint_info['is_taint']<and>field_info.taint_info['taint_type'][0]<eq>'_SOURCE_'<and>'_ARGUMENT_'<not><in>field_info.taint_info['taint_type'][1]<block_start>sources_annotation.add(annotation)<block_end><else_stmt><block_start>worklist.extend(field_info.fields_info)<block_end><block_end><block_end><block_end><block_end><block_end><block_end><if_stmt><not>self._jni_method_signature.endswith(")V")<block_start><for_stmt>node self.cfg.nodes()<block_start><if_stmt><not>node.is_simprocedure<and>node.block.vex.jumpkind<eq>'Ijk_Ret'<and>node.function_address<eq>self._jni_method_addr<block_start><for_stmt>final_state node.final_states<block_start>return_value=final_state.regs.r0<for_stmt>annotation return_value.annotations<block_start><if_stmt>isinstance(annotation JobjectAnnotation)<block_start><if_stmt>annotation.taint_info['is_taint']<and>annotation.taint_info['taint_type'][0]<eq>'_SOURCE_'<and>'_ARGUMENT_'<not><in>annotation.taint_info['taint_type'][1]<block_start>sources_annotation.add(annotation)<block_end><block_end><block_end><block_end><block_end><block_end><block_end><return>sources_annotation<block_end><def_stmt>_collect_taint_sinks self<block_start>"""
Collect sink nodes from CFG.
:return: A dictionary contains sink nodes with its sink tags (positions, taint_tags).
:rtype: dict
"""<line_sep>sink_nodes={}<line_sep>sink_annotations=set()<for_stmt>node self.cfg.nodes()<block_start><if_stmt>node.is_simprocedure<and>node.name.startswith('Call')<block_start><for_stmt>final_state node.final_states<block_start>regs=[final_state.regs.r0 final_state.regs.r1 final_state.regs.r2 final_state.regs.r3 final_state.regs.r4 final_state.regs.r5 final_state.regs.r6 final_state.regs.r7 final_state.regs.r8 final_state.regs.r9 final_state.regs.r10]<for_stmt>reg regs# node_return_value = final_state.regs.r0
<block_start><for_stmt>annotation reg.annotations<block_start><if_stmt>isinstance(annotation JobjectAnnotation)<block_start><if_stmt>annotation.taint_info['is_taint']<and>annotation.taint_info['taint_type'][0]<eq>'_SINK_'<block_start>sink_annotations.add(annotation)<block_end><block_end><block_end><block_end><block_end><block_end>fn=self.cfg.project.kb.functions.get(node.addr)<if_stmt>fn<block_start>ssm=self._analysis_center.get_source_sink_manager()<if_stmt>ssm.is_sink(fn.name)<block_start>sink_tag=ssm.get_sink_tags(fn.name)<line_sep>sink_nodes[node]=sink_tag<block_end><block_end><block_end><for_stmt>sink,(positions tags) sink_nodes.iteritems()<block_start>input_state=sink.input_state<line_sep>final_states=sink.final_states<line_sep>args=self._resolver.get_taint_args(input_state final_states positions tags)<if_stmt>args<block_start>nativedroid_logger.debug('tainted: %s, belong_obj: %s'%(args sink.final_states[0].regs.r0))<for_stmt>arg args<block_start><for_stmt>annotation arg.annotations<block_start>sink_annotation=copy.deepcopy(annotation)<line_sep>sink_annotation.taint_info['taint_type'][0]='_SINK_'<if_stmt>annotation.taint_info['is_taint']<and>annotation.taint_info['taint_type']<eq>['_SOURCE_' '_API_']<block_start>sink_annotation.taint_info['taint_type'][1]='_SOURCE_'<block_end>sink_annotations.add(sink_annotation)<block_end><block_end><block_end><block_end>annotations=set()<for_stmt>annotation sink_annotations<block_start><if_stmt>annotation.taint_info['is_taint']<and>annotation.taint_info['taint_type'][1]<eq>'_SOURCE_'<block_start>nativedroid_logger.info('Found taint in function %s.' self._jni_method_signature)<line_sep>jnsaf_client=self._analysis_center.get_jnsaf_client()<if_stmt>jnsaf_client<block_start>request=RegisterTaintRequest(apk_digest=jnsaf_client.apk_digest signature=self._analysis_center.get_signature() source_kind=annotation.taint_info['source_kind'] sink_kind=annotation.taint_info['sink_kind'])<line_sep>response=jnsaf_client.RegisterTaint(request)<if_stmt>response.status<block_start>nativedroid_logger.info('Registered %s as taint.' self._jni_method_signature)<block_end><block_end><block_end><else_stmt><block_start>annotations.add(annotation)<block_end><block_end><return>annotations<block_end><def_stmt>gen_taint_analysis_report self sources sinks<block_start>"""
Generate the taint analysis report
:param sources: Sources annotation
:param sinks: Sinks annotation
:return: taint analysis report
"""<line_sep>report_file=StringIO()<if_stmt>sinks<block_start>report_file.write(self._jni_method_signature)<line_sep>report_file.write(' -> _SINK_ ')<line_sep>args=set([])<for_stmt>sink_annotation sinks<block_start><if_stmt>sink_annotation.array_info['is_element']<block_start><if_stmt>sink_annotation.array_info['base_annotation'].source.startswith('arg')<block_start>arg_index=re.split('arg|_' sink_annotation.array_info['base_annotation'].source)[1]<line_sep>sink_location=arg_index<line_sep>args.add(str(sink_location))<block_end><block_end><else_stmt><block_start>taint_field_name=''<line_sep>anno=sink_annotation<while_stmt>anno<block_start><if_stmt>anno.field_info['is_field']<block_start>taint_field_name='.'+anno.field_info['field_name']+taint_field_name<block_end><if_stmt>anno.taint_info['is_taint']<and>anno.source<and>anno.source.startswith('arg')<block_start>args.add(anno.source.split('arg')[-1]+taint_field_name)<line_sep><break><block_end>anno=anno.field_info['base_annotation']<block_end><block_end><block_end>report_file.write('|'.join(args))<line_sep>report_file.write('\n')<block_end><if_stmt>sources<block_start>report_file.write(self._jni_method_signature)<line_sep>report_file.write(' -> _SOURCE_ ')<for_stmt>source_annotation sources<block_start><if_stmt>isinstance(source_annotation JobjectAnnotation)<and>source_annotation.source.startswith('arg')<block_start>source_location=source_annotation.source<line_sep>taint_field_name=''<line_sep>worklist=list(source_annotation.fields_info)<while_stmt>worklist<block_start>field_info=worklist[0]<line_sep>worklist=worklist[1:]<if_stmt>field_info.taint_info['is_taint']<and>'_ARGUMENT_'<not><in>field_info.taint_info['taint_type'][1]<block_start>taint_field_name<augadd>'.'+field_info.field_info['field_name']<line_sep><break><block_end><elif_stmt>isinstance(field_info JobjectAnnotation)<block_start>taint_field_name<augadd>'.'+field_info.field_info['field_name']<line_sep>worklist.extend(field_info.fields_info)<block_end><block_end><if_stmt>taint_field_name<block_start>report_file.write(source_location.split('arg')[-1]+taint_field_name)<block_end><block_end><block_end><block_end><return>report_file.getvalue().strip()<block_end><def_stmt>gen_saf_summary_report self<block_start>"""
Generate SAF summary report
:return: summary report
"""<line_sep>args_safsu=dict()<line_sep>rets_safsu=list()<if_stmt>self._arguments_summary<block_start><for_stmt>arg_index,arg_summary self._arguments_summary.iteritems()<block_start>arg_safsu=dict()<for_stmt>annotation arg_summary.annotations<block_start><if_stmt>isinstance(annotation JobjectAnnotation)<and>annotation.fields_info<block_start><for_stmt>field_info annotation.fields_info<block_start>field_name=field_info.field_info['field_name']<line_sep>field_type=field_info.obj_type.replace('/' '.')<line_sep>field_locations=list()<if_stmt>field_info.source<in>annotation_location<block_start>field_location=annotation_location[field_info.source]<line_sep>field_locations.append(field_location)<block_end><elif_stmt>field_info.source.startswith('arg')<block_start>field_location=field_info.heap<line_sep>field_locations.append(field_location)<block_end>arg_safsu[field_name]=(field_type field_locations)<block_end><block_end><block_end>args_safsu[arg_index]=arg_safsu<block_end><block_end>return_nodes=list()<for_stmt>node self.cfg.nodes()<block_start><if_stmt><not>node.is_simprocedure<block_start><if_stmt>node.block.vex.jumpkind<eq>'Ijk_Ret'<and>node.function_address<eq>self._jni_method_addr<block_start>return_nodes.append(node)<block_end><block_end><block_end><if_stmt><not>self._jni_method_signature.endswith(")V")<block_start><for_stmt>return_node return_nodes<block_start><for_stmt>final_state return_node.final_states<block_start>return_value=final_state.regs.r0<for_stmt>annotation return_value.annotations<block_start><if_stmt>isinstance(annotation JstringAnnotation)# ret_type = annotation.primitive_type.split('L')[-1].replace('/', '.')
<block_start>ret_type='java.lang.String'<line_sep>ret_location=annotation_location[annotation.source]<line_sep>ret_value=annotation.value<if_stmt>ret_value<is><not><none><block_start>ret_safsu=' ret = "'+ret_value+'"@'+ret_location<block_end><else_stmt><block_start>ret_safsu=' ret = '+ret_type+'@'+ret_location<block_end>rets_safsu.append(ret_safsu)<block_end><elif_stmt>isinstance(annotation JobjectAnnotation)<block_start><if_stmt>annotation.heap<block_start>ret_value=annotation.heap<line_sep>ret_safsu=' ret = '+ret_value<line_sep>rets_safsu.append(ret_safsu)<block_end><else_stmt><block_start>ret_type=annotation.obj_type.replace('/' '.')<line_sep>ret_location=annotation_location[annotation.source]<line_sep>ret_safsu=' ret = '+ret_type+'@'+ret_location<line_sep>rets_safsu.append(ret_safsu)<block_end><block_end><block_end><block_end><block_end><block_end>report_file=StringIO()<line_sep>report_file.write('`'+self._jni_method_signature+'`:'+'\n')<if_stmt>args_safsu<block_start><for_stmt>arg_index,fields_safsu args_safsu.iteritems()<block_start>arg_index='arg:'+str(re.split('arg|_' arg_index)[1])<for_stmt>field_name,field_su fields_safsu.iteritems()<block_start>field_type=field_su[0]<line_sep>field_locations=field_su[1]<if_stmt>field_locations[0]<eq>'~'<block_start>field_safsu=arg_index+'.'+field_name+' = '+field_type+'@'+field_locations[0]<block_end><else_stmt><block_start>field_safsu=arg_index+'.'+field_name+' = '+field_locations[0]<block_end>report_file.write(' '+field_safsu.strip()+'\n')<block_end><block_end><block_end><if_stmt>rets_safsu<block_start><for_stmt>ret_safsu rets_safsu<block_start>report_file.write(ret_safsu+'\n')<block_end><block_end>report_file.write(';\n')<line_sep><return>report_file.getvalue().strip()<block_end><def_stmt>run self<block_start>"""
run the analysis.
:return:
"""<line_sep>sources=self._collect_taint_sources()<line_sep>sinks=self._collect_taint_sinks()<line_sep><return>sources sinks<block_end><block_end>
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""<import_from_stmt>twilio.base.version Version<import_from_stmt>twilio.rest.preview.deployed_devices.fleet FleetList<class_stmt>DeployedDevices(Version)<block_start><def_stmt>__init__ self domain<block_start>"""
Initialize the DeployedDevices version of Preview
:returns: DeployedDevices version of Preview
:rtype: twilio.rest.preview.deployed_devices.DeployedDevices.DeployedDevices
"""<line_sep>super(DeployedDevices self).__init__(domain)<line_sep>self.version='DeployedDevices'<line_sep>self._fleets=<none><block_end>@property<def_stmt>fleets self<block_start>"""
:rtype: twilio.rest.preview.deployed_devices.fleet.FleetList
"""<if_stmt>self._fleets<is><none><block_start>self._fleets=FleetList(self)<block_end><return>self._fleets<block_end><def_stmt>__repr__ self<block_start>"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""<line_sep><return>'<Twilio.Preview.DeployedDevices>'<block_end><block_end>
|
<import_from_stmt>libcity.pipeline.pipeline run_model hyper_parameter objective_function<line_sep>__all__=["run_model" "hyper_parameter" "objective_function"]<line_sep>
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides functions for parsing and outputting Zulu time."""<import_stmt>datetime<import_stmt>pytz<import_from_stmt>infra_libs.time_functions timestamp<def_stmt>parse_zulu_time string<block_start>"""Parses a Zulu time string, returning None if unparseable."""<line_sep># Ugh https://bugs.python.org/issue19475.
zulu_format="%Y-%m-%dT%H:%M:%S"<if_stmt>'.'<in>string<block_start>zulu_format<augadd>".%f"<block_end>zulu_format<augadd>"Z"<try_stmt><block_start><return>datetime.datetime.strptime(string zulu_format)<block_end><except_stmt>ValueError<block_start><return><none><block_end><block_end><def_stmt>parse_zulu_ts string<block_start>"""Parses Zulu time and converts into a timestamp or None."""<line_sep>zuluparse=parse_zulu_time(string)<if_stmt>zuluparse<is><none><block_start><return><none><block_end><return>timestamp.utctimestamp(zuluparse)<block_end><def_stmt>to_zulu_string dt<block_start>"""Returns a Zulu time string from a datetime.
Assumes naive datetime objects are in UTC.
Ensures the output always has a floating-point number of seconds.
"""<line_sep># Assume non-tz-aware datetimes are in UTC.
<if_stmt>dt.tzinfo<is><none><or>dt.tzinfo.utcoffset(dt)<is><none><block_start>dt=dt.replace(tzinfo=pytz.UTC)<block_end># Convert datetime into UTC.
isodate=dt.astimezone(pytz.UTC).isoformat().split('+')[0]<line_sep># Add fractional seconds if not present.
<if_stmt>'.'<not><in>isodate<block_start>isodate<augadd>'.0'<block_end><return>isodate+'Z'<block_end>
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
#
# Generated by configen, do not edit.
# See https://github.com/facebookresearch/hydra/tree/main/tools/configen
# fmt: off
# isort:skip_file
# flake8: noqa
<import_from_stmt>packaging version<import_from_stmt>pkg_resources get_distribution<import_stmt>warnings<import_stmt>torchvision<line_sep>CONFIGS_VERSION=get_distribution('hydra-configs-torchvision').version<line_sep># checks if major.minor versions are matched. patch version is always different
<if_stmt>version.parse(torchvision.__version__).release[:2]<ne>version.parse(CONFIGS_VERSION).release[:2]<block_start>warnings.warn(f'Your config and library versions are mismatched. \n HYDRA-CONFIGS-TORCHVISION VERSION: {CONFIGS_VERSION}, \n TORCHVISION VERSION: {torchvision.__version__}. \n Please install the matching configs for reliable functionality.')<block_end>
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
<import_from_stmt>spack *<class_stmt>Librmm(CMakePackage)<block_start>"""RMM: RAPIDS Memory Manager. Achieving optimal
performance in GPU-centric workflows frequently requires
customizing how host and device memory are allocated."""<line_sep>homepage="https://github.com/rapidsai/rmm"<line_sep>url="https://github.com/rapidsai/rmm/archive/v0.15.0.tar.gz"<line_sep>version('0.15.0' sha256='599f97b95d169a90d11296814763f7e151a8a1e060ba10bc6c8f4684a5cd7972')<line_sep>depends_on('[email protected]:')<block_end>
|
<import_stmt>math<import_stmt>random<import_stmt>cv2<line_sep>cv2.setNumThreads(0)<import_stmt>numpy<as>np<import_stmt>torch<import_from_stmt>numpy.core.multiarray ndarray<line_sep>_DEFAULT_ALPHASTD=0.1<line_sep>_DEFAULT_EIGVAL=torch.Tensor([0.2175 0.0188 0.0045])<line_sep>_DEFAULT_EIGVEC=torch.Tensor([[-0.5675 0.7192 0.4009] [-0.5808 -0.0045 -0.8140] [-0.5836 -0.6948 0.4203]])<line_sep>_DEFAULT_BCS=[0.2 0.2 0.2]<class_stmt>Normalize(object)<block_start><def_stmt>__init__ self mean std<block_start>self.mean=mean<line_sep>self.std=std<block_end><def_stmt>__call__ self sample<block_start>sample["img"]=self.normalize(sample["img"] self.mean self.std)<if_stmt>"imgs"<in>sample<block_start>sample["imgs"]=[self.normalize(img self.mean self.std)<for>img sample["imgs"]]<block_end><return>sample<block_end><def_stmt>normalize self tensor mean std<block_start><if_stmt><not>(torch.is_tensor(tensor)<and>tensor.ndimension()<eq>3)<block_start><raise>TypeError('tensor is not a torch image.')<block_end><for_stmt>t,m,s zip(tensor mean std)<block_start>t.sub_(m).div_(s)<block_end><return>tensor<block_end><block_end><class_stmt>HFlip(object)<block_start><def_stmt>__call__ self sample<block_start><if_stmt>random.random()<l>0.5<block_start><try_stmt><block_start>sample["img"]=cv2.flip(sample["img"] 1)<block_end><except_stmt>Exception<as>e<block_start>print(sample["img_name"])<line_sep><raise>e<block_end><if_stmt>sample["mask"]<is><not><none><block_start>sample["mask"]=cv2.flip(sample["mask"] 1)<block_end><block_end><return>sample<block_end><block_end><class_stmt>VFlip(object)<block_start><def_stmt>__call__ self sample<block_start><if_stmt>random.random()<l>0.5<block_start>sample["img"]=cv2.flip(sample["img"] 0)<if_stmt>sample["mask"]<is><not><none><block_start>sample["mask"]=cv2.flip(sample["mask"] 0)<block_end><block_end><return>sample<block_end><block_end><def_stmt>rot90 img factor<block_start>img=np.rot90(img factor)<line_sep><return>np.ascontiguousarray(img)<block_end><class_stmt>Rotate90(object)<block_start><def_stmt>__call__ self sample<block_start><if_stmt>random.random()<l>0.5<block_start>factor=random.randint(0 4)<line_sep>sample["img"]=rot90(sample["img"] factor)<if_stmt>sample["mask"]<is><not><none><block_start>sample["mask"]=rot90(sample["mask"] factor)<block_end><block_end><return>sample<block_end><block_end><class_stmt>Pad(object)<block_start><def_stmt>__init__ self block=32 mode='reflect'<block_start>super().__init__()<line_sep>self.block=block<line_sep>self.mode=mode<block_end><def_stmt>__call__ self sample<block_start>sample["img"]=pad(sample["img"] self.block type='reflect')<if_stmt>sample["mask"]<is><not><none><and>sample["mask"]<ne>[]<block_start>sample["mask"]=pad(sample["mask"] self.block type='reflect')<block_end><return>sample<block_end><block_end><def_stmt>pad image block type='reflect' **kwargs<block_start>params={}<if_stmt>type<eq>'zero'<block_start>params={'constant_values':0}<line_sep>type='constant'<block_end>x0,x1,y0,y1=0 0 0 0<if_stmt>(image.shape[1]%block)<ne>0<block_start>x0=int((block-image.shape[1]%block)/2)<line_sep>x1=(block-image.shape[1]%block)-x0<block_end><if_stmt>(image.shape[0]%block)<ne>0<block_start>y0=int((block-image.shape[0]%block)/2)<line_sep>y1=(block-image.shape[0]%block)-y0<block_end><if_stmt>len(image.shape)<g>1<block_start><return>np.pad(image ((y0 y1) (x0 x1) (0 0)) type **params **kwargs)<block_end><else_stmt><block_start><return>np.pad(image ((y0 y1) (x0 x1)) type **params **kwargs)<block_end><block_end><class_stmt>ToTensor(object)<block_start><def_stmt>__call__ self sample<block_start>sample["img"]=torch.from_numpy(sample["img"].transpose((2 0 1))).float()<line_sep>sample["angle"]=torch.from_numpy(sample["angle"].transpose((2 0 1))).float()<if_stmt>isinstance(sample["mask"] ndarray)<block_start>sample["mask"]=torch.from_numpy(sample["mask"].transpose((2 0 1))).float()<block_end><return>sample<block_end><block_end><class_stmt>ColorJitterImage(object)<block_start><def_stmt>__init__ self<block_start>self.transform=ColorJitter()<block_end><def_stmt>__call__ self sample<block_start><if_stmt>random.random()<l>0.5<block_start>sample["img"]=self.transform(sample['img'])<block_end><return>sample<block_end><block_end><class_stmt>LightingImage(object)<block_start><def_stmt>__init__ self<block_start>self.transform=Lighting()<block_end><def_stmt>__call__ self sample<block_start><if_stmt>random.random()<l>0.5<block_start>sample["img"]=self.transform(sample['img'])<block_end><return>sample<block_end><block_end><class_stmt>RandomCropAndScale(object)<block_start><def_stmt>__init__ self height width scale_range=(0.5 2.0) rescale_prob=0.5 prob=1.<block_start>self.prob=prob<line_sep>self.height=height<line_sep>self.width=width<line_sep>self.scale_range=scale_range<line_sep>self.rescale_prob=rescale_prob<block_end><def_stmt>__call__ self sample<block_start><if_stmt>random.random()<g>self.prob<block_start><return>sample<block_end>scale=random.uniform(self.scale_range[0] self.scale_range[1])<if_stmt>random.random()<g>self.rescale_prob<block_start>scale=1.<block_end>random_state=np.random.randint(0 10000)<line_sep>sample["img"]=random_crop(sample['img'] self.height self.width scale np.random.RandomState(random_state))<if_stmt>sample["mask"]<is><not><none><and>sample["mask"]<ne>[]<block_start>sample["mask"]=random_crop(sample['mask'] self.height self.width scale np.random.RandomState(random_state) mode=cv2.INTER_NEAREST)<block_end><return>sample<block_end><block_end><def_stmt>random_crop img height width scale random_state mode=<none><block_start><if_stmt>random_state<is><none><block_start>random_state=np.random.RandomState(1234)<block_end>crop_height=height<line_sep>crop_width=width<line_sep>img_height,img_width=img.shape[:2]<line_sep>max_height=int(min(crop_height<times>scale img_height))<line_sep>max_width=int(min(crop_width<times>scale img_width))<line_sep>adjusted_scale=scale<times>min(max_width/(crop_width<times>scale) max_height/(crop_height<times>scale))<line_sep>crop_width=int(adjusted_scale<times>width)<line_sep>crop_height=int(adjusted_scale<times>height)<line_sep>start_y=random_state.randint(0 max(img_height-crop_height 1))<line_sep>start_x=random_state.randint(0 max(img_width-crop_width 1))<line_sep>crop=img[start_y:start_y+crop_height start_x:start_x+crop_width]<if_stmt>mode<is><none><block_start><if_stmt>1/adjusted_scale<l>1.<block_start>mode=cv2.INTER_AREA<block_end><else_stmt><block_start>mode=cv2.INTER_CUBIC<block_end><block_end><if_stmt>scale<ne>1.<block_start>img=cv2.resize(crop (width height) interpolation=mode)<block_end><else_stmt><block_start>img=crop<block_end><return>img<block_end><def_stmt>shift_scale_rotate img angle scale dx dy borderMode=cv2.BORDER_CONSTANT<block_start>height,width=img.shape[:2]<line_sep>cc=math.cos(angle/180<times>math.pi)<times>scale<line_sep>ss=math.sin(angle/180<times>math.pi)<times>scale<line_sep>rotate_matrix=np.array([[cc -ss] [ss cc]])<line_sep>box0=np.array([[0 0] [width 0] [width height] [0 height] ])<line_sep>box1=box0-np.array([width/2 height/2])<line_sep>box1=np.dot(box1 rotate_matrix.T)+np.array([width/2+dx<times>width height/2+dy<times>height])<line_sep>box0=box0.astype(np.float32)<line_sep>box1=box1.astype(np.float32)<line_sep>mat=cv2.getPerspectiveTransform(box0 box1)<line_sep>img=cv2.warpPerspective(img mat (width height) flags=cv2.INTER_NEAREST borderMode=borderMode)<line_sep><return>img<block_end><class_stmt>RandomRotate(object)<block_start><def_stmt>__init__ self angle=15 prob=0.3<block_start>self.prob=prob<line_sep>self.angle=angle<block_end><def_stmt>__call__ self sample<block_start><if_stmt>random.random()<g>self.prob<block_start><return>sample<block_end>angle=random.uniform(-self.angle self.angle)<if_stmt>angle<eq>0<block_start><return>sample<block_end>sample["img"]=shift_scale_rotate(sample['img'] angle=angle scale=1 dx=0 dy=0)<if_stmt>sample["mask"]<is><not><none><and>sample["mask"]<ne>[]<block_start>sample["mask"]=shift_scale_rotate(sample['mask'] angle=angle scale=1 dx=0 dy=0)<block_end><return>sample<block_end><block_end><def_stmt>_grayscale img<block_start>alpha=torch.Tensor([0.299 0.587 0.114])<line_sep><return>(alpha.view(3 1 1)<times>img).sum(0 keepdim=<true>)<block_end><def_stmt>_blend img1 img2 alpha<block_start><return>img1<times>alpha+(1-alpha)<times>img2<block_end><class_stmt>Lighting(object)<block_start><def_stmt>__init__ self alphastd=_DEFAULT_ALPHASTD eigval=_DEFAULT_EIGVAL eigvec=_DEFAULT_EIGVEC<block_start>self._alphastd=alphastd<line_sep>self._eigval=eigval<line_sep>self._eigvec=eigvec<block_end><def_stmt>__call__ self img<block_start><if_stmt>self._alphastd<eq>0.<block_start><return>img<block_end>alpha=torch.normal(torch.zeros(3) self._alphastd)<line_sep>rgb=(self._eigvec<times>alpha<times>self._eigval).sum(dim=1)<line_sep><return>img+rgb.view(3 1 1)<block_end><block_end><class_stmt>Saturation(object)<block_start><def_stmt>__init__ self var<block_start>self._var=var<block_end><def_stmt>__call__ self img<block_start>gs=_grayscale(img)<line_sep>alpha=torch.FloatTensor(1).uniform_(-self._var self._var)+1.0<line_sep><return>_blend(img gs alpha)<block_end><block_end><class_stmt>Brightness(object)<block_start><def_stmt>__init__ self var<block_start>self._var=var<block_end><def_stmt>__call__ self img<block_start>gs=torch.zeros(img.size())<line_sep>alpha=torch.FloatTensor(1).uniform_(-self._var self._var)+1.0<line_sep><return>_blend(img gs alpha)<block_end><block_end><class_stmt>Contrast(object)<block_start><def_stmt>__init__ self var<block_start>self._var=var<block_end><def_stmt>__call__ self img<block_start>gs=_grayscale(img)<line_sep>gs=torch.FloatTensor(1 1 1).fill_(gs.mean())<line_sep>alpha=torch.FloatTensor(1).uniform_(-self._var self._var)+1.0<line_sep><return>_blend(img gs alpha)<block_end><block_end><class_stmt>ColorJitter(object)<block_start><def_stmt>__init__ self saturation=_DEFAULT_BCS[0] brightness=_DEFAULT_BCS[1] contrast=_DEFAULT_BCS[2]<block_start>self._transforms=[]<if_stmt>saturation<is><not><none><block_start>self._transforms.append(Saturation(saturation))<block_end><if_stmt>brightness<is><not><none><block_start>self._transforms.append(Brightness(brightness))<block_end><if_stmt>contrast<is><not><none><block_start>self._transforms.append(Contrast(contrast))<block_end><block_end><def_stmt>__call__ self img<block_start><if_stmt>len(self._transforms)<eq>0<block_start><return>img<block_end><for_stmt>t random.sample(self._transforms len(self._transforms))<block_start>img[:3 <ellipsis>]=t(img[:3 <ellipsis>])<block_end><return>img<block_end><block_end>
|
<import_stmt>time<import_stmt>pytest<import_from_stmt>cadence.exceptions QueryFailureException<import_from_stmt>cadence.workerfactory WorkerFactory<import_from_stmt>cadence.workflow workflow_method signal_method Workflow WorkflowClient query_method<line_sep>TASK_LIST="TestQueryWorkflow"<line_sep>DOMAIN="sample"<class_stmt>GreetingException(Exception)<block_start><pass><block_end><class_stmt>TestQueryWorkflow<block_start>@query_method()<async_keyword><def_stmt>get_message self<arrow>str<block_start><raise>NotImplementedError<block_end>@query_method()<async_keyword><def_stmt>get_message_fail self<arrow>str<block_start><raise>NotImplementedError<block_end>@signal_method()<async_keyword><def_stmt>put_message self message<block_start><raise>NotImplementedError<block_end>@workflow_method(task_list=TASK_LIST)<async_keyword><def_stmt>get_greetings self<arrow>list<block_start><raise>NotImplementedError<block_end><block_end><class_stmt>TestQueryWorkflowImpl(TestQueryWorkflow)<block_start><def_stmt>__init__ self<block_start>self.message=""<block_end><async_keyword><def_stmt>get_message self<arrow>str<block_start><return>self.message<block_end><async_keyword><def_stmt>get_message_fail self<arrow>str<block_start><raise>GreetingException("error from query")<block_end><async_keyword><def_stmt>put_message self message<block_start>self.message=message<block_end><async_keyword><def_stmt>get_greetings self<arrow>list<block_start>self.message="initial-message"<line_sep><await>Workflow.await_till(<lambda>:self.message<eq>"done")<block_end><block_end><def_stmt>test_query_workflow <block_start>factory=WorkerFactory("localhost" 7933 DOMAIN)<line_sep>worker=factory.new_worker(TASK_LIST)<line_sep>worker.register_workflow_implementation_type(TestQueryWorkflowImpl)<line_sep>factory.start()<line_sep>client=WorkflowClient.new_client(domain=DOMAIN)<line_sep>workflow:TestQueryWorkflow=client.new_workflow_stub(TestQueryWorkflow)<line_sep>workflow_ec=WorkflowClient.start(workflow.get_greetings)<assert_stmt>workflow.get_message()<eq>"initial-message"<line_sep>workflow.put_message("second-message")<assert_stmt>workflow.get_message()<eq>"second-message"<with_stmt>pytest.raises(QueryFailureException)<as>exc_info<block_start>workflow.get_message_fail()<block_end>ex=exc_info.value<assert_stmt>isinstance(ex.__cause__ GreetingException)<line_sep>workflow.put_message("done")<line_sep>client.wait_for_close(workflow_ec)<assert_stmt>workflow.get_message()<eq>"done"<line_sep>print("Stopping workers")<line_sep>worker.stop()<block_end>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
<import_from_future_stmt> print_function<line_sep>'''These are unit tests that are derived from the rfc at
http://www.robotstxt.org/norobots-rfc.txt'''<import_stmt>codecs<import_stmt>unittest<import_stmt>mock<import_from_stmt>requests.exceptions SSLError<import_from_stmt>reppy robots<import_from_stmt>.util requests_fixtures<class_stmt>RobotsTest(unittest.TestCase)<block_start>'''Tests about our Robots class.'''<def_stmt>test_expired self<block_start>'''Returns true if expired.'''<with_stmt>mock.patch.object(robots.time 'time' return_value=10)<block_start>robot=robots.Robots.parse('http://example.com/robots.txt' '' expires=5)<line_sep>self.assertTrue(robot.expired)<block_end><block_end><def_stmt>test_not_expired self<block_start>'''Returns false if not expired.'''<with_stmt>mock.patch.object(robots.time 'time' return_value=10)<block_start>robot=robots.Robots.parse('http://example.com/robots.txt' '' expires=15)<line_sep>self.assertFalse(robot.expired)<block_end><block_end><def_stmt>test_ttl self<block_start>'''Returns the time remaining until expiration.'''<with_stmt>mock.patch.object(robots.time 'time' return_value=10)<block_start>robot=robots.Robots.parse('http://example.com/robots.txt' '' expires=15)<line_sep>self.assertEqual(robot.ttl 5)<block_end><block_end><def_stmt>test_no_leading_user_agent self<block_start>'''Treats missing User-Agent as default user agent'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
Disallow: /path
Allow: /path/exception
Crawl-delay: 7
''')<line_sep>self.assertNotEqual(robot.agent('agent') <none>)<line_sep>self.assertTrue(robot.allowed('/path/exception' 'agent'))<line_sep>self.assertFalse(robot.allowed('/path' 'agent'))<line_sep>self.assertTrue(robot.allowed('/' 'agent'))<line_sep>self.assertEquals(robot.agent('agent').delay 7)<block_end><def_stmt>test_malformed_crawl_delay self<block_start>'''Handles a malformed delay.'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-agent: *
Crawl-delay: word
''')<line_sep>self.assertEqual(robot.agent('agent').delay <none>)<block_end><def_stmt>test_honors_default_agents self<block_start>'''Honors the default user agent when that's all that's available.'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-agent: *
Disallow: /tmp
User-agent: other-agent
Allow: /tmp
''')<line_sep>self.assertFalse(robot.allowed('/tmp' 'agent'))<line_sep>self.assertTrue(robot.allowed('/path' 'agent'))<block_end><def_stmt>test_honors_specific_agent self<block_start>'''Honors the specific user agent if a match is found.'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-agent: *
Disallow: /tmp
User-agent: agent
Allow: /tmp
''')<line_sep>self.assertTrue(robot.allowed('/tmp' 'agent'))<line_sep>self.assertTrue(robot.allowed('/path' 'agent'))<block_end><def_stmt>test_grouping self<block_start>'''Multiple consecutive User-Agent lines are allowed.'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-agent: one
User-agent: two
Disallow: /tmp
''')<line_sep>self.assertFalse(robot.allowed('/tmp' 'one'))<line_sep>self.assertFalse(robot.allowed('/tmp' 'two'))<block_end><def_stmt>test_grouping_unknown_keys self<block_start>'''
When we encounter unknown keys, we should disregard any grouping that may have
happened between user agent rules.
This is an example from the wild. Despite `Noindex` not being a valid directive,
we'll not consider the '*' and 'ia_archiver' rules together.
'''<line_sep>rules=robots.Robots.parse('http://example.com/robots.txt' '''
User-agent: *
Disallow: /content/2/
User-agent: *
Noindex: /gb.html
Noindex: /content/2/
User-agent: ia_archiver
Disallow: /
''')<line_sep>self.assertTrue(rules.allowed('/foo' 'agent'))<line_sep>self.assertTrue(<not>rules.allowed('/bar' 'ia_archiver'))<block_end><def_stmt>test_separates_agents self<block_start>'''Hands back an appropriate agent.'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-agent: one
Crawl-delay: 1
User-agent: two
Crawl-delay: 2
''')<line_sep>self.assertNotEqual(robot.agent('one').delay robot.agent('two').delay)<block_end><def_stmt>test_exposes_sitemaps self<block_start>'''Finds and exposes sitemaps.'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
Sitemap: http://a.com/sitemap.xml
Sitemap: http://b.com/sitemap.xml
''')<line_sep>self.assertEqual(robot.sitemaps ['http://a.com/sitemap.xml' 'http://b.com/sitemap.xml'])<block_end><def_stmt>test_case_insensitivity self<block_start>'''Make sure user agent matches are case insensitive'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-agent: Agent
Disallow: /path
''')<line_sep>self.assertFalse(robot.allowed('/path' 'agent'))<line_sep>self.assertFalse(robot.allowed('/path' 'aGeNt'))<block_end><def_stmt>test_empty self<block_start>'''Makes sure we can parse an empty robots.txt'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '')<line_sep>self.assertEqual(robot.sitemaps [])<line_sep>self.assertTrue(robot.allowed('/' 'agent'))<block_end><def_stmt>test_comments self<block_start>'''Robust against comments.'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-Agent: * # comment saying it's the default agent
Allow: /
''')<line_sep>self.assertNotEqual(robot.agent('agent') <none>)<block_end><def_stmt>test_accepts_full_url self<block_start>'''Can accept a url string.'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-Agent: agent
Disallow: /
''')<line_sep>self.assertFalse(robot.allowed('http://example.com/path' 'agent'))<block_end><def_stmt>test_skip_malformed_line self<block_start>'''If there is no colon in a line, then we must skip it'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' '''
User-Agent: agent
Disallow /no/colon/in/this/line
''')<line_sep>self.assertTrue(robot.allowed('/no/colon/in/this/line' 'agent'))<block_end><def_stmt>test_fetch_status_200 self<block_start>'''A 200 parses things normally.'''<with_stmt>requests_fixtures('test_fetch_status_200')<block_start>robot=robots.Robots.fetch('http://localhost:8080/robots.txt')<line_sep>self.assertFalse(robot.allowed('/path' 'agent'))<block_end><block_end><def_stmt>test_fetch_status_401 self<block_start>'''A 401 gives us an AllowNone Robots.'''<with_stmt>requests_fixtures('test_fetch_status_401')<block_start>robot=robots.Robots.fetch('http://localhost:8080/robots.txt')<line_sep>self.assertIsInstance(robot robots.AllowNone)<block_end><block_end><def_stmt>test_fetch_status_403 self<block_start>'''A 403 gives us an AllowNone Robots.'''<with_stmt>requests_fixtures('test_fetch_status_403')<block_start>robot=robots.Robots.fetch('http://localhost:8080/robots.txt')<line_sep>self.assertIsInstance(robot robots.AllowNone)<block_end><block_end><def_stmt>test_fetch_status_4XX self<block_start>'''A 4XX gives us an AllowAll Robots.'''<with_stmt>requests_fixtures('test_fetch_status_4XX')<block_start>robot=robots.Robots.fetch('http://localhost:8080/robots.txt')<line_sep>self.assertIsInstance(robot robots.AllowAll)<block_end><block_end><def_stmt>test_fetch_status_5XX self<block_start>'''A server error raises an exception.'''<with_stmt>requests_fixtures('test_fetch_status_5XX')<block_start><with_stmt>self.assertRaises(robots.exceptions.BadStatusCode)<block_start>robots.Robots.fetch('http://localhost:8080/robots.txt')<block_end><block_end><block_end><def_stmt>test_content_too_big self<block_start>'''Raises an exception if the content is too big.'''<with_stmt>requests_fixtures('test_content_too_big')<block_start><with_stmt>self.assertRaises(robots.exceptions.ReppyException)<block_start>robots.Robots.fetch('http://localhost:8080/robots.txt' max_size=5)<block_end><block_end><block_end><def_stmt>test_ssl_exception self<block_start>'''Raises a ReppyException on SSL errors.'''<with_stmt>mock.patch.object(robots.requests 'get' side_effect=SSLError('Kaboom'))<block_start><with_stmt>self.assertRaises(robots.exceptions.SSLException)<block_start>robots.Robots.fetch('https://localhost:8080/robots.txt')<block_end><block_end><block_end><def_stmt>test_connection_exception self<block_start>'''Raises a ReppyException on connection errors.'''<with_stmt>self.assertRaises(robots.exceptions.ConnectionException)<block_start>robots.Robots.fetch('http://localhost:8080/robots.txt')<block_end><block_end><def_stmt>test_malformed_url self<block_start>'''Raises a ReppyException on malformed URLs.'''<with_stmt>self.assertRaises(robots.exceptions.MalformedUrl)<block_start>robots.Robots.fetch('gobbledygook')<block_end><block_end><def_stmt>test_excessive_redirects self<block_start>'''Raises a ReppyException on too many redirects.'''<with_stmt>requests_fixtures('test_excessive_redirects')<block_start><with_stmt>self.assertRaises(robots.exceptions.ExcessiveRedirects)<block_start>robots.Robots.fetch('http://localhost:8080/robots.txt')<block_end><block_end><block_end><def_stmt>test_robots_url_http self<block_start>'''Works with a http URL.'''<line_sep>url='http://[email protected]:80/path;params?query#fragment'<line_sep>expected='http://example.com/robots.txt'<line_sep>self.assertEqual(robots.Robots.robots_url(url) expected)<block_end><def_stmt>test_robots_url_https self<block_start>'''Works with a https URL.'''<line_sep>url='https://[email protected]:443/path;params?query#fragment'<line_sep>expected='https://example.com/robots.txt'<line_sep>self.assertEqual(robots.Robots.robots_url(url) expected)<block_end><def_stmt>test_robots_url_non_default_port self<block_start>'''Works with a URL with a non-default port.'''<line_sep>url='http://[email protected]:8080/path;params?query#fragment'<line_sep>expected='http://example.com:8080/robots.txt'<line_sep>self.assertEqual(robots.Robots.robots_url(url) expected)<block_end><def_stmt>test_robots_url_invalid_port self<block_start>'''Raises exception when given an invalid port.'''<line_sep>url='http://:::cnn.com/'<with_stmt>self.assertRaises(ValueError)<block_start>robots.Robots.robots_url(url)<block_end><block_end><def_stmt>test_utf8_bom self<block_start>'''If there's a utf-8 BOM, we should parse it as such'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' codecs.BOM_UTF8+b'''
User-Agent: agent
Allow: /path
User-Agent: other
Disallow: /path
''')<line_sep>self.assertTrue(robot.allowed('http://example.com/path' 'agent'))<line_sep>self.assertFalse(robot.allowed('http://example.com/path' 'other'))<block_end><def_stmt>test_str_function self<block_start>'''
If there is valid UTF-8, str() should return a representation of the
directives.
This came out of a UnicodeDecodeError happening in Python 2, when we
were unduly decoding the bytes (via UTF-8) to unicode, then implictly
converting back to bytes via UTF-8.
'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' codecs.BOM_UTF8+b'''
User-Agent: \xc3\xa4gent
Allow: /swedish-chef
''')<line_sep>s=str(robot)<line_sep>self.assertTrue('ägent'<in>s)<block_end><def_stmt>test_utf16_bom self<block_start>'''If there's a utf-16 BOM, we should parse it as such'''<line_sep>robot=robots.Robots.parse('http://example.com/robots.txt' codecs.BOM_UTF16+b'''
User-Agent: agent
Allow: /path
User-Agent: other
Disallow: /path
''')<line_sep>self.assertTrue(robot.allowed('http://example.com/path' 'agent'))<line_sep>self.assertFalse(robot.allowed('http://example.com/path' 'other'))<block_end><def_stmt>test_rfc_example self<block_start>'''Tests the example provided by the RFC.'''<line_sep>robot=robots.Robots.parse('http://www.fict.org' '''
# /robots.txt for http://www.fict.org/
# comments to <EMAIL>
User-agent: unhipbot
Disallow: /
User-agent: webcrawler
User-agent: excite
Disallow:
User-agent: *
Disallow: /org/plans.html
Allow: /org/
Allow: /serv
Allow: /~mak
Disallow: /
''')<line_sep># The unhip bot
self.assertFalse(robot.allowed('/' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/index.html' 'unhipbot'))<line_sep>self.assertTrue(robot.allowed('/robots.txt' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/server.html' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/services/fast.html' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/services/slow.html' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/orgo.gif' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/org/about.html' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/org/plans.html' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/%7Ejim/jim.html' 'unhipbot'))<line_sep>self.assertFalse(robot.allowed('/%7Emak/mak.html' 'unhipbot'))<line_sep># The webcrawler agent
self.assertTrue(robot.allowed('/' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/index.html' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/robots.txt' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/server.html' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/services/fast.html' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/services/slow.html' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/orgo.gif' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/org/about.html' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/org/plans.html' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/%7Ejim/jim.html' 'webcrawler'))<line_sep>self.assertTrue(robot.allowed('/%7Emak/mak.html' 'webcrawler'))<line_sep># The excite agent
self.assertTrue(robot.allowed('/' 'excite'))<line_sep>self.assertTrue(robot.allowed('/index.html' 'excite'))<line_sep>self.assertTrue(robot.allowed('/robots.txt' 'excite'))<line_sep>self.assertTrue(robot.allowed('/server.html' 'excite'))<line_sep>self.assertTrue(robot.allowed('/services/fast.html' 'excite'))<line_sep>self.assertTrue(robot.allowed('/services/slow.html' 'excite'))<line_sep>self.assertTrue(robot.allowed('/orgo.gif' 'excite'))<line_sep>self.assertTrue(robot.allowed('/org/about.html' 'excite'))<line_sep>self.assertTrue(robot.allowed('/org/plans.html' 'excite'))<line_sep>self.assertTrue(robot.allowed('/%7Ejim/jim.html' 'excite'))<line_sep>self.assertTrue(robot.allowed('/%7Emak/mak.html' 'excite'))<line_sep># All others
self.assertFalse(robot.allowed('/' 'anything'))<line_sep>self.assertFalse(robot.allowed('/index.html' 'anything'))<line_sep>self.assertTrue(robot.allowed('/robots.txt' 'anything'))<line_sep>self.assertTrue(robot.allowed('/server.html' 'anything'))<line_sep>self.assertTrue(robot.allowed('/services/fast.html' 'anything'))<line_sep>self.assertTrue(robot.allowed('/services/slow.html' 'anything'))<line_sep>self.assertFalse(robot.allowed('/orgo.gif' 'anything'))<line_sep>self.assertTrue(robot.allowed('/org/about.html' 'anything'))<line_sep>self.assertFalse(robot.allowed('/org/plans.html' 'anything'))<line_sep>self.assertFalse(robot.allowed('/%7Ejim/jim.html' 'anything'))<line_sep>self.assertTrue(robot.allowed('/%7Emak/mak.html' 'anything'))<block_end><def_stmt>test_after_response_hook self<block_start>'''Calls after_response_hook when response is received'''<line_sep>state={"called":<false>}<def_stmt>hook response<block_start>state["called"]=<true><line_sep>self.assertEquals(response.status_code 200)<block_end><with_stmt>requests_fixtures('test_after_response_hook')<block_start>robots.Robots.fetch('http://example.com/robots.txt' after_response_hook=hook)<line_sep>self.assertTrue(state["called"])<block_end><block_end><def_stmt>test_after_response_hook_on_error self<block_start>'''Calls after_response_hook when error occurs during fetch'''<line_sep>state={"called":<false>}<line_sep>expected_url='http://localhost:8080/robots.txt'<def_stmt>hook response<block_start>state["called"]=<true><line_sep>self.assertIsInstance(response robots.exceptions.ConnectionException)<line_sep>self.assertEquals(response.url expected_url)<block_end><with_stmt>self.assertRaises(robots.exceptions.ConnectionException)<block_start>robots.Robots.fetch(expected_url after_response_hook=hook)<block_end>self.assertTrue(state["called"])<block_end><def_stmt>test_after_parse_hook self<block_start>'''Calls after_parse_hook after parsing robots.txt'''<line_sep>state={"called":<false>}<def_stmt>hook robots<block_start>state["called"]=<true><line_sep>self.assertFalse(robots.allowed('/disallowed' 'me'))<block_end><with_stmt>requests_fixtures('test_after_parse_hook')<block_start>robots.Robots.fetch('http://example.com/robots.txt' after_parse_hook=hook)<line_sep>self.assertTrue(state["called"])<block_end><block_end><block_end><class_stmt>AllowNoneTest(unittest.TestCase)<block_start>'''Tests about the AllowNone Robots class.'''<def_stmt>test_allow self<block_start>'''Allows nothing.'''<line_sep>robot=robots.AllowNone('http://example.com/robots.txt')<line_sep>self.assertFalse(robot.allowed('/' 'agent'))<block_end><def_stmt>test_allow_robots_txt self<block_start>'''Allows robots.txt.'''<line_sep>robot=robots.AllowNone('http://example.com/robots.txt')<line_sep>self.assertTrue(robot.allowed('/robots.txt' 'agent'))<block_end><block_end><class_stmt>AllowAllTest(unittest.TestCase)<block_start>'''Tests about the AllowAll Robots class.'''<def_stmt>test_allow self<block_start>'''Allows nothing.'''<line_sep>robot=robots.AllowAll('http://example.com/robots.txt')<line_sep>self.assertTrue(robot.allowed('/' 'agent'))<block_end><block_end>
|
<import_stmt>pytest<import_from_stmt>osbrain run_agent<import_from_stmt>osbrain run_logger<import_from_stmt>osbrain run_nameserver<import_from_stmt>osbrain.helper sync_agent_logger<line_sep>@pytest.fixture(scope='function')<def_stmt>nsproxy request<block_start>ns=run_nameserver()<line_sep><yield>ns<line_sep>ns.shutdown()<block_end>@pytest.fixture(scope='function')<def_stmt>agent_logger request<block_start>ns=run_nameserver()<line_sep>agent=run_agent('a0')<line_sep>logger=run_logger('logger')<line_sep>agent.set_logger(logger)<line_sep>sync_agent_logger(agent=agent logger=logger)<line_sep><yield>agent logger<line_sep>ns.shutdown()<block_end>
|
'''
Things this test checks:
- frame.f_trace is None when there are only regular breakpoints.
- The no-op tracing function is set by default (otherwise when set tracing functions have no effect).
- When stepping in, frame.f_trace must be set by the frame eval.
- When stepping over/return, the frame.f_trace must not be set on intermediate callers.
TODO:
- When frame.f_trace is set to the default tracing function, it'll become None again in frame
eval mode if not stepping (if breakpoints weren't changed).
- The tracing function in the frames that deal with unhandled exceptions must be set when dealing
with unhandled exceptions.
- The tracing function in the frames that deal with unhandled exceptions must NOT be set when
NOT dealing with unhandled exceptions.
- If handled exceptions should be dealt with, the proper tracing should be set in frame.f_trace.
'''<import_stmt>sys<import_from_stmt>_pydevd_frame_eval pydevd_frame_tracing<def_stmt>check_with_no_trace <block_start><if_stmt><false><block_start>print('break on check_with_trace')<block_end>frame=sys._getframe()<if_stmt>frame.f_trace<is><not><none><block_start><raise>AssertionError('Expected %s to be None'%(frame.f_trace ))<block_end><if_stmt>sys.gettrace()<is><not>pydevd_frame_tracing.dummy_tracing_holder.dummy_trace_func<block_start><raise>AssertionError('Expected %s to be dummy_trace_func'%(sys.gettrace() ))<block_end><block_end><def_stmt>check_step_in_then_step_return <block_start>frame=sys._getframe()<line_sep>f_trace=frame.f_trace<if_stmt>f_trace.__class__.__name__<ne>'SafeCallWrapper'<block_start><raise>AssertionError('Expected %s to be SafeCallWrapper'%(f_trace.__class__.__name__ ))<block_end>check_with_no_trace()<block_end><def_stmt>check_revert_to_dummy <block_start>check_with_no_trace()<block_end><if_stmt>__name__<eq>'__main__'# Check how frame eval works.
<block_start><if_stmt>sys.version_info[0:2]<l>(3 6)<block_start><raise>AssertionError('Only available for Python 3.6 onwards. Found: %s'%(sys.version_info[0:1] ))<block_end>check_with_no_trace()# break on global (step over)
check_step_in_then_step_return()<import_stmt>pydevd_tracing<import_stmt>pydevd<line_sep># This is what a remote attach would do (should revert to the frame eval mode).
pydevd_tracing.SetTrace(pydevd.get_global_debugger().trace_dispatch)<line_sep>check_revert_to_dummy()<line_sep>print('TEST SUCEEDED!')<block_end>
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_stmt>scipy.sparse<as>sp<import_stmt>numpy<as>np<import_from_stmt>time time<import_stmt>argparse<def_stmt>parse_args <block_start>parser=argparse.ArgumentParser(description="Run GMF.")<line_sep>parser.add_argument('--path' nargs='?' default='Data/' help='Input data path.')<line_sep>parser.add_argument('--dataset' nargs='?' default='ml-1m' help='Choose a dataset.')<line_sep>parser.add_argument('--num_neg' type=int default=4 help='Number of negative instances to pair with a positive instance.')<line_sep>parser.add_argument('--train_data_path' type=str default="Data/train_data.csv" help='train_data_path')<line_sep><return>parser.parse_args()<block_end><def_stmt>get_train_data filename write_file num_negatives<block_start>'''
Read .rating file and Return dok matrix.
The first line of .rating file is: num_users\t num_items
'''<line_sep># Get number of users and items
num_users,num_items=0 0<with_stmt>open(filename "r")<as>f<block_start>line=f.readline()<while_stmt>line<ne><none><and>line<ne>""<block_start>arr=line.split("\t")<line_sep>u,i=int(arr[0]) int(arr[1])<line_sep>num_users=max(num_users u)<line_sep>num_items=max(num_items i)<line_sep>line=f.readline()<block_end><block_end>print("users_num:" num_users "items_num:" num_items)<line_sep># Construct matrix
mat=sp.dok_matrix((num_users+1 num_items+1) dtype=np.float32)<with_stmt>open(filename "r")<as>f<block_start>line=f.readline()<while_stmt>line<ne><none><and>line<ne>""<block_start>arr=line.split("\t")<line_sep>user,item,rating=int(arr[0]) int(arr[1]) float(arr[2])<if_stmt>(rating<g>0)<block_start>mat[user item]=1.0<block_end>line=f.readline()<block_end><block_end>file=open(write_file 'w')<line_sep>print("writing "+write_file)<for_stmt>(u i) mat.keys()# positive instance
<block_start>user_input=str(u)<line_sep>item_input=str(i)<line_sep>label=str(1)<line_sep>sample="{0},{1},{2}".format(user_input item_input label)+"\n"<line_sep>file.write(sample)<line_sep># negative instances
<for_stmt>t range(num_negatives)<block_start>j=np.random.randint(num_items)<while_stmt>(u j)<in>mat.keys()<block_start>j=np.random.randint(num_items)<block_end>user_input=str(u)<line_sep>item_input=str(j)<line_sep>label=str(0)<line_sep>sample="{0},{1},{2}".format(user_input item_input label)+"\n"<line_sep>file.write(sample)<block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>args=parse_args()<line_sep>get_train_data(args.path+args.dataset+".train.rating" args.train_data_path args.num_neg)<block_end>
|
"""Package containing classes that represent a source of CloudTrail records, e.g. from an API or disk storage"""<line_sep>
|
"""Amazon QuickSight Delete Module."""<import_stmt>logging<import_from_stmt>typing Any Callable Dict Optional<import_stmt>boto3<import_from_stmt>awswrangler _utils exceptions sts<import_from_stmt>awswrangler.quicksight._get_list get_dashboard_id get_data_source_id get_dataset_id get_template_id list_dashboards list_data_sources list_datasets list_templates <line_sep>_logger:logging.Logger=logging.getLogger(__name__)<def_stmt>_delete func_name:str account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none> **kwargs:Any<arrow><none><block_start>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>account_id<is><none><block_start>account_id=sts.get_account_id(boto3_session=session)<block_end>client:boto3.client=_utils.client(service_name="quicksight" session=session)<line_sep>func:Callable[<ellipsis> <none>]=getattr(client func_name)<line_sep>func(AwsAccountId=account_id **kwargs)<block_end><def_stmt>delete_dashboard name:Optional[str]=<none> dashboard_id:Optional[str]=<none> version_number:Optional[int]=<none> account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none> <arrow><none><block_start>"""Delete a dashboard.
Note
----
You must pass a not None ``name`` or ``dashboard_id`` argument.
Parameters
----------
name : str, optional
Dashboard name.
dashboard_id : str, optional
The ID for the dashboard.
version_number : int, optional
The version number of the dashboard. If the version number property is provided,
only the specified version of the dashboard is deleted.
account_id : str, optional
If None, the account ID will be inferred from your boto3 session.
boto3_session : boto3.Session(), optional
Boto3 Session. The default boto3 session will be used if boto3_session receive None.
Returns
-------
None
None.
Examples
--------
>>> import awswrangler as wr
>>> wr.quicksight.delete_dashboard(name="...")
"""<if_stmt>(name<is><none>)<and>(dashboard_id<is><none>)<block_start><raise>exceptions.InvalidArgument("You must pass a not None name or dashboard_id argument.")<block_end>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>(dashboard_id<is><none>)<and>(name<is><not><none>)<block_start>dashboard_id=get_dashboard_id(name=name account_id=account_id boto3_session=session)<block_end>args:Dict[str Any]={"func_name":"delete_dashboard" "account_id":account_id "boto3_session":session "DashboardId":dashboard_id }<if_stmt>version_number<is><not><none><block_start>args["VersionNumber"]=version_number<block_end>_delete(**args)<block_end><def_stmt>delete_dataset name:Optional[str]=<none> dataset_id:Optional[str]=<none> account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none> <arrow><none><block_start>"""Delete a dataset.
Note
----
You must pass a not None ``name`` or ``dataset_id`` argument.
Parameters
----------
name : str, optional
Dashboard name.
dataset_id : str, optional
The ID for the dataset.
account_id : str, optional
If None, the account ID will be inferred from your boto3 session.
boto3_session : boto3.Session(), optional
Boto3 Session. The default boto3 session will be used if boto3_session receive None.
Returns
-------
None
None.
Examples
--------
>>> import awswrangler as wr
>>> wr.quicksight.delete_dataset(name="...")
"""<if_stmt>(name<is><none>)<and>(dataset_id<is><none>)<block_start><raise>exceptions.InvalidArgument("You must pass a not None name or dataset_id argument.")<block_end>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>(dataset_id<is><none>)<and>(name<is><not><none>)<block_start>dataset_id=get_dataset_id(name=name account_id=account_id boto3_session=session)<block_end>args:Dict[str Any]={"func_name":"delete_data_set" "account_id":account_id "boto3_session":session "DataSetId":dataset_id }<line_sep>_delete(**args)<block_end><def_stmt>delete_data_source name:Optional[str]=<none> data_source_id:Optional[str]=<none> account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none> <arrow><none><block_start>"""Delete a data source.
Note
----
You must pass a not None ``name`` or ``data_source_id`` argument.
Parameters
----------
name : str, optional
Dashboard name.
data_source_id : str, optional
The ID for the data source.
account_id : str, optional
If None, the account ID will be inferred from your boto3 session.
boto3_session : boto3.Session(), optional
Boto3 Session. The default boto3 session will be used if boto3_session receive None.
Returns
-------
None
None.
Examples
--------
>>> import awswrangler as wr
>>> wr.quicksight.delete_data_source(name="...")
"""<if_stmt>(name<is><none>)<and>(data_source_id<is><none>)<block_start><raise>exceptions.InvalidArgument("You must pass a not None name or data_source_id argument.")<block_end>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>(data_source_id<is><none>)<and>(name<is><not><none>)<block_start>data_source_id=get_data_source_id(name=name account_id=account_id boto3_session=session)<block_end>args:Dict[str Any]={"func_name":"delete_data_source" "account_id":account_id "boto3_session":session "DataSourceId":data_source_id }<line_sep>_delete(**args)<block_end><def_stmt>delete_template name:Optional[str]=<none> template_id:Optional[str]=<none> version_number:Optional[int]=<none> account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none> <arrow><none><block_start>"""Delete a tamplate.
Note
----
You must pass a not None ``name`` or ``template_id`` argument.
Parameters
----------
name : str, optional
Dashboard name.
template_id : str, optional
The ID for the dashboard.
version_number : int, optional
Specifies the version of the template that you want to delete.
If you don't provide a version number, it deletes all versions of the template.
account_id : str, optional
If None, the account ID will be inferred from your boto3 session.
boto3_session : boto3.Session(), optional
Boto3 Session. The default boto3 session will be used if boto3_session receive None.
Returns
-------
None
None.
Examples
--------
>>> import awswrangler as wr
>>> wr.quicksight.delete_template(name="...")
"""<if_stmt>(name<is><none>)<and>(template_id<is><none>)<block_start><raise>exceptions.InvalidArgument("You must pass a not None name or template_id argument.")<block_end>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>(template_id<is><none>)<and>(name<is><not><none>)<block_start>template_id=get_template_id(name=name account_id=account_id boto3_session=session)<block_end>args:Dict[str Any]={"func_name":"delete_template" "account_id":account_id "boto3_session":session "TemplateId":template_id }<if_stmt>version_number<is><not><none><block_start>args["VersionNumber"]=version_number<block_end>_delete(**args)<block_end><def_stmt>delete_all_dashboards account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none><arrow><none><block_start>"""Delete all dashboards.
Parameters
----------
account_id : str, optional
If None, the account ID will be inferred from your boto3 session.
boto3_session : boto3.Session(), optional
Boto3 Session. The default boto3 session will be used if boto3_session receive None.
Returns
-------
None
None.
Examples
--------
>>> import awswrangler as wr
>>> wr.quicksight.delete_all_dashboards()
"""<line_sep>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>account_id<is><none><block_start>account_id=sts.get_account_id(boto3_session=session)<block_end><for_stmt>dashboard list_dashboards(account_id=account_id boto3_session=session)<block_start>delete_dashboard(dashboard_id=dashboard["DashboardId"] account_id=account_id boto3_session=session)<block_end><block_end><def_stmt>delete_all_datasets account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none><arrow><none><block_start>"""Delete all datasets.
Parameters
----------
account_id : str, optional
If None, the account ID will be inferred from your boto3 session.
boto3_session : boto3.Session(), optional
Boto3 Session. The default boto3 session will be used if boto3_session receive None.
Returns
-------
None
None.
Examples
--------
>>> import awswrangler as wr
>>> wr.quicksight.delete_all_datasets()
"""<line_sep>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>account_id<is><none><block_start>account_id=sts.get_account_id(boto3_session=session)<block_end><for_stmt>dataset list_datasets(account_id=account_id boto3_session=session)<block_start>delete_dataset(dataset_id=dataset["DataSetId"] account_id=account_id boto3_session=session)<block_end><block_end><def_stmt>delete_all_data_sources account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none><arrow><none><block_start>"""Delete all data sources.
Parameters
----------
account_id : str, optional
If None, the account ID will be inferred from your boto3 session.
boto3_session : boto3.Session(), optional
Boto3 Session. The default boto3 session will be used if boto3_session receive None.
Returns
-------
None
None.
Examples
--------
>>> import awswrangler as wr
>>> wr.quicksight.delete_all_data_sources()
"""<line_sep>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>account_id<is><none><block_start>account_id=sts.get_account_id(boto3_session=session)<block_end><for_stmt>data_source list_data_sources(account_id=account_id boto3_session=session)<block_start>delete_data_source(data_source_id=data_source["DataSourceId"] account_id=account_id boto3_session=session)<block_end><block_end><def_stmt>delete_all_templates account_id:Optional[str]=<none> boto3_session:Optional[boto3.Session]=<none><arrow><none><block_start>"""Delete all templates.
Parameters
----------
account_id : str, optional
If None, the account ID will be inferred from your boto3 session.
boto3_session : boto3.Session(), optional
Boto3 Session. The default boto3 session will be used if boto3_session receive None.
Returns
-------
None
None.
Examples
--------
>>> import awswrangler as wr
>>> wr.quicksight.delete_all_templates()
"""<line_sep>session:boto3.Session=_utils.ensure_session(session=boto3_session)<if_stmt>account_id<is><none><block_start>account_id=sts.get_account_id(boto3_session=session)<block_end><for_stmt>template list_templates(account_id=account_id boto3_session=session)<block_start>delete_template(template_id=template["TemplateId"] account_id=account_id boto3_session=session)<block_end><block_end>
|
"""API 接口的设置文件"""<import_from_stmt>typing Optional<import_from_stmt>flask abort<import_from_stmt>..core error<class_stmt>Authorization<block_start>"""权限验证中的设置"""<line_sep>ExecuteAPMissing=<true># 在未找到接入点信息时是否允许
@staticmethod<def_stmt>UnAuthorized _reason:error.Error<block_start>"""
验证失败时的返回值:
_reason: 原因-错误类型
"""<line_sep><return>abort(403)<block_end>@staticmethod<def_stmt>NotPermitted _diff:int _strict:Optional[bool]=<false><block_start>"""
权限不足时的返回值:
_diff: 所需权限与拥有权限的差值
_strict: 是否指定需要某一级别权限值
"""<line_sep><return>abort(403)<block_end><block_end><class_stmt>HTTPResponseHeader<block_start>"""HTTP响应头部分的设置"""<line_sep>AddCORSSupport=<true># 是否启用 CORS 请求支持
CORSDomain='*'# 启用支持 CORS 的域设置
SupportMethods=['GET' 'HEAD' 'POST' 'PUT' 'DELETE' 'CONNECT' 'OPTIONS' 'TRACE' 'PATCH']<block_end># 支持的请求类型 - 如非必要请勿修改
<class_stmt>Response<block_start>"""响应中的设置"""<line_sep>Code="code"# 错误代码键
Description="description"# 错误解释键
Message="message"# 错误消息键
<class_stmt>Codes<block_start>"""响应代码设置"""<line_sep>Success=0# 未发生错误的成功代码
Unknown=-1<block_end># 未知错误代码
<class_stmt>Messages<block_start>"""响应消息设置"""<line_sep>Success="success"# 未发生错误时的成功消息
Unknown="undefined"<block_end># 未知错误消息
<class_stmt>Descriptions<block_start>"""响应解释设置"""<line_sep>Success="成功"# 成功时的解释
Unknown="发生未知错误"<block_end><block_end># 未知错误解释
|
<import_from_stmt>datapackage Package<class_stmt>DataStream<block_start><def_stmt>__init__ self dp=<none> res_iter=<none> stats=<none><block_start>self.dp=dp<if>dp<is><not><none><else>Package()<line_sep>self.res_iter=res_iter<if>res_iter<is><not><none><else>[]<line_sep>self.stats=stats<if>stats<is><not><none><else>[]<block_end><def_stmt>merge_stats self<block_start>ret={}<for_stmt>s self.stats<block_start>ret.update(s)<block_end><return>ret<block_end><def_stmt>_process self<block_start><return>self<block_end><block_end>
|
"""Concertina: small Python Workflow execution handler."""<import_stmt>datetime<import_stmt>graphviz<import_from_stmt>IPython.display display<import_from_stmt>IPython.display update_display<class_stmt>ConcertinaQueryEngine(object)<block_start><def_stmt>__init__ self final_predicates sql_runner<block_start>self.final_predicates=final_predicates<line_sep>self.final_result={}<line_sep>self.sql_runner=sql_runner<block_end><def_stmt>Run self action<block_start><assert_stmt>action['launcher']<in>('query' 'none')<if_stmt>action['launcher']<eq>'query'<block_start>predicate=action['predicate']<line_sep>print('Running predicate:' predicate end='')<line_sep>start=datetime.datetime.now()<line_sep>result=self.sql_runner(action['sql'] action['engine'] is_final=(predicate<in>self.final_predicates))<line_sep>end=datetime.datetime.now()<line_sep>print(' (%d seconds)'%(end-start).seconds)<if_stmt>predicate<in>self.final_predicates<block_start>self.final_result[predicate]=result<block_end><block_end><block_end><block_end><class_stmt>ConcertinaDryRunEngine(object)<block_start><def_stmt>Run self action<block_start>print(action)<block_end><block_end><class_stmt>Concertina(object)<block_start>DISPLAY_COUNT=0<line_sep>@classmethod<def_stmt>GetDisplayId cls<block_start>cls.DISPLAY_COUNT=cls.DISPLAY_COUNT+1<line_sep><return>'Concertina_%d'%cls.DISPLAY_COUNT<block_end><def_stmt>SortActions self<block_start>actions_to_assign={a['name']<for>a self.config}<line_sep>complete=set()<line_sep>result=[]<while_stmt>actions_to_assign<block_start>remains=len(actions_to_assign)<for_stmt>a list(actions_to_assign)<block_start><if_stmt>complete<ge>set(self.action[a]["requires"])<block_start>result.append(a)<line_sep>complete<augor>{a}<line_sep>actions_to_assign<augsub>{a}<block_end><block_end><if_stmt>len(actions_to_assign)<eq>remains<block_start><assert_stmt><false> "Could not schedule: %s"%self.config<block_end><block_end><return>result<block_end><def_stmt>__init__ self config engine<block_start>self.config=config<line_sep>self.action={a["name"]:a<for>a self.config}<line_sep>self.actions_to_run=self.SortActions()<line_sep>self.engine=engine<assert_stmt>len(self.action)<eq>len(self.config)<line_sep>self.all_actions={a["name"]<for>a self.config}<line_sep>self.complete_actions=set()<line_sep>self.running_actions=set()<line_sep>self.display_id=self.GetDisplayId()<line_sep>self.Display()<block_end><def_stmt>RunOneAction self<block_start>self.UpdateDisplay()<line_sep>one_action=self.actions_to_run[0]<del_stmt>self.actions_to_run[0]<line_sep>self.running_actions<augor>{one_action}<line_sep>self.UpdateDisplay()<line_sep>self.engine.Run(self.action[one_action].get('action' {}))<line_sep>self.running_actions<augsub>{one_action}<line_sep>self.complete_actions<augor>{one_action}<line_sep>self.UpdateDisplay()<block_end><def_stmt>Run self<block_start><while_stmt>self.actions_to_run<block_start>self.RunOneAction()<block_end><block_end><def_stmt>ActionColor self a<block_start><if_stmt>self.action[a].get('type')<eq>'data'<block_start><return>'lightskyblue1'<block_end><if_stmt>a<in>self.complete_actions<block_start><return>'darkolivegreen1'<block_end><if_stmt>a<in>self.running_actions<block_start><return>'gold'<block_end><return>'gray'<block_end><def_stmt>ActionShape self a<block_start><if_stmt>'type'<in>self.action[a]<block_start>action_type=self.action[a]['type']<if_stmt>action_type<eq>'data'<block_start><return>'cylinder'<block_end><if_stmt>action_type<eq>'final'<block_start><return>'diamond'<block_end><block_end><return>'box'<block_end><def_stmt>AsGraphViz self<block_start>g=graphviz.Digraph('Concertina')<for_stmt>a self.all_actions<block_start>color=self.ActionColor(a)<line_sep>shape=self.ActionShape(a)<line_sep>styles=['filled']<line_sep>g.node(a shape=shape fillcolor=color style='filled,rounded' color='gray34')<for_stmt>prerequisite self.action[a]['requires']<block_start>g.edge(prerequisite a)<block_end><block_end><return>g<block_end><def_stmt>Display self<block_start>display(self.AsGraphViz() display_id=self.display_id)<block_end><def_stmt>UpdateDisplay self<block_start>update_display(self.AsGraphViz() display_id=self.display_id)<block_end><block_end><def_stmt>RenamePredicate table_to_export_map dependency_edges data_dependency_edges from_name to_name<block_start>new_table_to_export_map={}<line_sep>new_dependency_edges=set()<line_sep>new_data_dependency_edges=set()<for_stmt>k,v table_to_export_map.items()<block_start><if_stmt>k<eq>from_name<block_start>new_table_to_export_map[to_name]=v<block_end><else_stmt><block_start>new_table_to_export_map[k]=v<block_end><block_end><for_stmt>a,b dependency_edges<block_start><if_stmt>a<eq>from_name<block_start>a=to_name<block_end><if_stmt>b<eq>from_name<block_start>b=to_name<block_end>new_dependency_edges.add((a b))<block_end><for_stmt>a,b data_dependency_edges<block_start><if_stmt>a<eq>from_name<block_start>a=to_name<block_end><if_stmt>b<eq>from_name<block_start>b=to_name<block_end>new_data_dependency_edges.add((a b))<block_end><return>new_table_to_export_map new_dependency_edges new_data_dependency_edges<block_end><def_stmt>ExecuteLogicaProgram logica_executions sql_runner sql_engine<block_start><def_stmt>ConcertinaConfig table_to_export_map dependency_edges data_dependency_edges final_predicates<block_start>depends_on={}<for_stmt>source,target dependency_edges|data_dependency_edges<block_start>depends_on[target]=depends_on.get(target set())|{source}<block_end>data={d<for>d,_ data_dependency_edges}<line_sep>data<augor>{d<for>d,_ dependency_edges<if>d<not><in>table_to_export_map}<line_sep>result=[]<for_stmt>d data<block_start>result.append({'name':d 'type':'data' 'requires':[] 'action':{'predicate':d 'launcher':'none'}})<block_end><for_stmt>t,sql table_to_export_map.items()<block_start>result.append({'name':t 'type':('final'<if>t<in>final_predicates<else>'intermediate') 'requires':list(depends_on.get(t set())) 'action':{'predicate':t 'launcher':'query' 'engine':sql_engine 'sql':sql}})<block_end><return>result<block_end>table_to_export_map={}<line_sep>dependency_edges=set()<line_sep>data_dependency_edges=set()<line_sep>final_predicates={e.main_predicate<for>e logica_executions}<for_stmt>e logica_executions<block_start>p_table_to_export_map,p_dependency_edges,p_data_dependency_edges=(e.table_to_export_map e.dependency_edges e.data_dependency_edges)<for_stmt>p final_predicates<block_start><if_stmt>e.main_predicate<ne>p<and>p<in>e.table_to_export_map<block_start>p_table_to_export_map,p_dependency_edges,p_data_dependency_edges=(RenamePredicate(p_table_to_export_map p_dependency_edges p_data_dependency_edges p '⤓'+p))<block_end><block_end><for_stmt>k,v p_table_to_export_map.items()<block_start>table_to_export_map[k]=e.PredicateSpecificPreamble(e.main_predicate)+v<block_end><for_stmt>a,b p_dependency_edges<block_start>dependency_edges.add((a b))<block_end><for_stmt>a,b p_data_dependency_edges<block_start>data_dependency_edges.add((a b))<block_end><block_end>config=ConcertinaConfig(table_to_export_map dependency_edges data_dependency_edges final_predicates)<line_sep>engine=ConcertinaQueryEngine(final_predicates=final_predicates sql_runner=sql_runner)<line_sep>preambles=set(e.preamble<for>e logica_executions)<assert_stmt>len(preambles)<eq>1 'Inconsistent preambles: %s'%preambles<line_sep>[preamble]=list(preambles)<if_stmt>preamble<block_start>sql_runner(preamble sql_engine is_final=<false>)<block_end>concertina=Concertina(config engine)<line_sep>concertina.Run()<line_sep><return>engine.final_result<block_end>
|
<import_from_stmt>plugin.core.helpers.variable merge<import_from_stmt>subprocess Popen<import_stmt>json<import_stmt>logging<import_stmt>os<import_stmt>subprocess<import_stmt>sys<line_sep>CURRENT_PATH=os.path.abspath(__file__)<line_sep>HOST_PATH=os.path.join(os.path.dirname(CURRENT_PATH) 'host.py')<line_sep>log=logging.getLogger(__name__)<class_stmt>BaseTest(object)<block_start>name=<none><line_sep>optional=<false><line_sep>@classmethod<def_stmt>run cls search_paths<block_start>metadata={}<line_sep>message=<none><line_sep>success=<none><line_sep># Retrieve names of test functions
names=[name<for>name dir(cls)<if>name.startswith('test_')]<if_stmt><not>names<block_start><return>cls.build_failure('No tests defined')<block_end># Run tests
<for_stmt>name names# Ensure function exists
<block_start><if_stmt><not>hasattr(cls name)<block_start><return>cls.build_failure('Unable to find function: %r'%name)<block_end># Run test
<try_stmt><block_start>result=cls.spawn(name search_paths)<line_sep># Merge test result into `metadata`
merge(metadata result recursive=<true>)<line_sep># Test successful
message=<none><line_sep>success=<true><block_end><except_stmt>Exception<as>ex<block_start><if_stmt>success<block_start><continue><block_end>message=ex.message<line_sep>success=<false><block_end><block_end><if_stmt><not>success# Trigger event
<block_start>cls.on_failure(message)<line_sep># Build result
<return>cls.build_failure(message)<block_end># Trigger event
cls.on_success(metadata)<line_sep># Build result
<return>cls.build_success(metadata)<block_end>@classmethod<def_stmt>spawn cls name search_paths# Find path to python executable
<block_start>python_exe=cls.find_python_executable()<if_stmt><not>python_exe<block_start><raise>Exception('Unable to find python executable')<block_end># Ensure test host exists
<if_stmt><not>os.path.exists(HOST_PATH)<block_start><raise>Exception('Unable to find "host.py" script')<block_end># Build test process arguments
args=[python_exe HOST_PATH '--module' cls.__module__ '--name' name '--search-paths="%s"'%(';'.join(search_paths)) ]<line_sep># Spawn test (in sub-process)
log.debug('Starting test: %s:%s' cls.__module__ name)<line_sep>process=Popen(args stdout=subprocess.PIPE stderr=subprocess.PIPE)<line_sep># Wait for test to complete
stdout,stderr=process.communicate()<if_stmt>stderr<block_start>log.debug('Test returned messages:\n%s' stderr.replace("\r\n" "\n"))<block_end># Parse output
result=<none><if_stmt>stdout<block_start><try_stmt><block_start>result=json.loads(stdout)<block_end><except_stmt>Exception<as>ex<block_start>log.warn('Invalid output returned %r - %s' stdout ex exc_info=<true>)<block_end><block_end># Build result
<if_stmt>process.returncode<ne>0# Test failed
<block_start><if_stmt>result<and>result.get('message')<block_start><if_stmt>result.get('traceback')<block_start>log.info('%s - %s' result['message'] result['traceback'])<block_end><raise>Exception(result['message'])<block_end><raise>Exception('Unknown error (code: %s)'%process.returncode)<block_end># Test successful
<return>result<block_end>@classmethod<def_stmt>find_python_executable cls<block_start>candidates=[sys.executable]<line_sep># Add candidates based on the script path in `sys.argv`
<if_stmt>sys.argv<and>len(sys.argv)<g>0<and>os.path.exists(sys.argv[0])<block_start>bootstrap_path=sys.argv[0]<line_sep>resources_pos=bootstrap_path.lower().find('resources')<if_stmt>resources_pos<g>0<block_start>pms_path=bootstrap_path[:resources_pos]<line_sep>cls._add_python_home_candidates(candidates pms_path)<block_end><block_end># Add candidates relative to `PLEX_MEDIA_SERVER_HOME`
pms_home=os.environ.get('PLEX_MEDIA_SERVER_HOME')<if_stmt>pms_home<and>os.path.exists(pms_home)<block_start>cls._add_python_home_candidates(candidates pms_home)<block_end># Add candidates relative to `PYTHONHOME`
python_home=os.environ.get('PYTHONHOME')<if_stmt>python_home<and>os.path.exists(python_home)<block_start>candidates.append(os.path.join(python_home 'bin' 'python'))<block_end># Use first candidate that exists
<for_stmt>path candidates<block_start><if_stmt>os.path.exists(path)<block_start><return>path<block_end><block_end>log.warn('Unable to find python executable' extra={'candidates':candidates})<line_sep><return><none><block_end>@staticmethod<def_stmt>_add_python_home_candidates candidates path# Windows
<block_start>candidates.append(os.path.join(path 'PlexScriptHost.exe'))<line_sep># *nix
candidates.append(os.path.join(path 'Plex Script Host'))<line_sep>candidates.append(os.path.join(path 'Resources' 'Plex Script Host'))<line_sep>candidates.append(os.path.join(path 'Resources' 'Python' 'bin' 'python'))<block_end>#
# Events
#
@classmethod<def_stmt>on_failure cls message<block_start><pass><block_end>@classmethod<def_stmt>on_success cls metadata<block_start><pass><block_end>#
# Helpers
#
@classmethod<def_stmt>build_exception cls message exc_info=<none><block_start><if_stmt>exc_info<is><none><block_start>exc_info=sys.exc_info()<block_end><return>cls.build_failure(message exc_info=exc_info)<block_end>@classmethod<def_stmt>build_failure cls message **kwargs<block_start>result={'success':<false> 'message':message}<line_sep># Merge extra attributes
merge(result kwargs)<line_sep><return>result<block_end>@staticmethod<def_stmt>build_success metadata<block_start><return>{'success':<true> 'metadata':metadata}<block_end><block_end>
|
"""
Tests for fig.logo.
"""<import_stmt>pytest<import_from_stmt>pygmt Figure<line_sep>@pytest.mark.mpl_image_compare<def_stmt>test_logo <block_start>"""
Plot the GMT logo as a stand-alone plot.
"""<line_sep>fig=Figure()<line_sep>fig.logo()<line_sep><return>fig<block_end>@pytest.mark.mpl_image_compare<def_stmt>test_logo_on_a_map <block_start>"""
Plot the GMT logo at the upper right corner of a map.
"""<line_sep>fig=Figure()<line_sep>fig.basemap(region=[-90 -70 0 20] projection="M15c" frame=<true>)<line_sep>fig.logo(position="jTR+o0.25c/0.25c+w7.5c" box=<true>)<line_sep><return>fig<block_end>
|
<import_stmt>os<import_stmt>re<import_from_stmt>itertools izip<import_stmt>inflection<def_stmt>preprocess <block_start>"splits _sources/reference.rst into separate files"<line_sep>text=open("./_sources/reference.rst" "r").read()<line_sep>os.remove("./_sources/reference.rst")<if_stmt><not>os.path.exists("./_sources/reference")<block_start>os.makedirs("./_sources/reference")<block_end><def_stmt>pairwise iterable<block_start>"s -> (s0, s1), (s2, s3), (s4, s5), ..."<line_sep>iteration=iter(iterable)<line_sep><return>izip(iteration iteration)<block_end>sections=map(str.strip re.split(r"<!--\s*(.+)\s*-->" text))<for_stmt>section,content pairwise(sections[1:])<block_start><if_stmt>section.endswith(".proto")<block_start>section_name=section[:-len(".proto")]<line_sep>file_name="./_sources/reference/{0}.rst".format(section_name)<with_stmt>open(file_name "w")<as>f<block_start>f.truncate()<line_sep>f.write(content)<line_sep>f.close()<block_end><block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>preprocess()<block_end>
|
# -*- coding: utf-8 -*-
<import_stmt>sys<import_stmt>argparse<import_from_stmt>mamba application_factory __version__<def_stmt>main <block_start>arguments=_parse_arguments()<if_stmt>arguments.version<block_start>print(__version__)<line_sep><return><block_end>factory=application_factory.ApplicationFactory(arguments)<line_sep>runner=factory.runner()<line_sep>runner.run()<if_stmt>runner.has_failed_examples<block_start>sys.exit(1)<block_end><block_end><def_stmt>_parse_arguments <block_start>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('--version' '-v' default=<false> action='store_true' help='display the version')<line_sep>parser.add_argument('--slow' '-s' default=0.075 type=float help='slow test threshold in seconds (default: %(default)s)')<line_sep>parser.add_argument('--enable-coverage' default=<false> action='store_true' help='enable code coverage measurement (default: %(default)s)')<line_sep>parser.add_argument('--coverage-file' default='.coverage' action='store' help='name of coverage data file (default: %(default)s)')<line_sep>parser.add_argument('--format' '-f' default='progress' action='store' help='output format (default: %(default)s)')<line_sep>parser.add_argument('specs' default=['./spec' './specs'] nargs='*' help='paths to specs to run or directories with specs to run (default: %(default)s)')<line_sep>parser.add_argument('--no-color' default=<false> action='store_true' help='turn off all output coloring (default: %(default)s)')<line_sep>parser.add_argument('--tags' '-t' default=<none> type=<lambda>x:[tag.strip()<for>tag x.split(',')] action='store' help='run examples with specified tags (example: -t unit,integration)')<line_sep><return>parser.parse_args()<block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
|
<import_stmt>os<import_stmt>sys<def_stmt>_write_message kind message<block_start>program=os.path.basename(sys.argv[0])<line_sep>sys.stderr.write('%s: %s: %s\n'%(program kind message))<block_end>note=<lambda>message:_write_message('note' message)<line_sep>warning=<lambda>message:_write_message('warning' message)<line_sep>error=<lambda>message:_write_message('error' message)<line_sep>fatal=<lambda>message:(_write_message('fatal error' message) sys.exit(1))<line_sep>__all__=['note' 'warning' 'error' 'fatal']<line_sep>
|
<import_stmt>filters<as>f<import_from_stmt>iota TransactionHash<import_from_stmt>iota.commands FilterCommand RequestFilter<import_from_stmt>iota.filters Trytes<line_sep>__all__=['CheckConsistencyCommand' ]<class_stmt>CheckConsistencyCommand(FilterCommand)<block_start>"""
Executes ``checkConsistency`` extended API command.
See :py:meth:`iota.api.Iota.check_consistency` for more info.
"""<line_sep>command='checkConsistency'<def_stmt>get_request_filter self<block_start><return>CheckConsistencyRequestFilter()<block_end><def_stmt>get_response_filter self<block_start><pass><block_end><block_end><class_stmt>CheckConsistencyRequestFilter(RequestFilter)<block_start><def_stmt>__init__ self<arrow><none><block_start>super(CheckConsistencyRequestFilter self).__init__({'tails':f.Required|f.Array|f.FilterRepeater(f.Required|Trytes(TransactionHash)) })<block_end><block_end>
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_stmt>os<import_stmt>argparse<import_stmt>time<import_from_stmt>functools partial<import_stmt>numpy<as>np<import_stmt>tqdm<import_stmt>pgl<import_stmt>paddle<import_from_stmt>pgl.utils.logger log<import_from_stmt>pgl.utils.data Dataloader<import_from_stmt>model GraphSage<import_from_stmt>dataset ShardedDataset batch_fn<def_stmt>train dataloader model feature criterion optim log_per_step=100<block_start>model.train()<line_sep>batch=0<line_sep>total_loss=0.<line_sep>total_acc=0.<line_sep>total_sample=0<for_stmt>g,sample_index,index,label dataloader<block_start>batch<augadd>1<line_sep>num_samples=len(index)<line_sep>g.tensor()<line_sep>sample_index=paddle.to_tensor(sample_index)<line_sep>index=paddle.to_tensor(index)<line_sep>label=paddle.to_tensor(label)<line_sep>feat=paddle.gather(feature sample_index)<line_sep>pred=model(g feat)<line_sep>pred=paddle.gather(pred index)<line_sep>loss=criterion(pred label)<line_sep>loss.backward()<line_sep>acc=paddle.metric.accuracy(input=pred label=label k=1)<line_sep>optim.step()<line_sep>optim.clear_grad()<line_sep>total_loss<augadd>loss.numpy()<times>num_samples<line_sep>total_acc<augadd>acc.numpy()<times>num_samples<line_sep>total_sample<augadd>num_samples<if_stmt>batch%log_per_step<eq>0<block_start>log.info("Batch %s %s-Loss %s %s-Acc %s"%(batch "train" loss.numpy() "train" acc.numpy()))<block_end><block_end><return>total_loss/total_sample total_acc/total_sample<block_end>@paddle.no_grad()<def_stmt>eval dataloader model feature criterion<block_start>model.eval()<line_sep>loss_all,acc_all=[] []<for_stmt>g,sample_index,index,label dataloader<block_start>g.tensor()<line_sep>sample_index=paddle.to_tensor(sample_index)<line_sep>index=paddle.to_tensor(index)<line_sep>label=paddle.to_tensor(label)<line_sep>feat=paddle.gather(feature sample_index)<line_sep>pred=model(g feat)<line_sep>pred=paddle.gather(pred index)<line_sep>loss=criterion(pred label)<line_sep>acc=paddle.metric.accuracy(input=pred label=label k=1)<line_sep>loss_all.append(loss.numpy())<line_sep>acc_all.append(acc.numpy())<block_end><return>np.mean(loss_all) np.mean(acc_all)<block_end><def_stmt>main args<block_start><if_stmt>paddle.distributed.get_world_size()<g>1<block_start>paddle.distributed.init_parallel_env()<block_end>data=pgl.dataset.RedditDataset(args.normalize args.symmetry)<line_sep>log.info("Preprocess finish")<line_sep>log.info("Train Examples: %s"%len(data.train_index))<line_sep>log.info("Val Examples: %s"%len(data.val_index))<line_sep>log.info("Test Examples: %s"%len(data.test_index))<line_sep>log.info("Num nodes %s"%data.graph.num_nodes)<line_sep>log.info("Num edges %s"%data.graph.num_edges)<line_sep>log.info("Average Degree %s"%np.mean(data.graph.indegree()))<line_sep>graph=data.graph<line_sep>train_index=data.train_index<line_sep>val_index=data.val_index<line_sep>test_index=data.test_index<line_sep>train_label=data.train_label<line_sep>val_label=data.val_label<line_sep>test_label=data.test_label<line_sep>model=GraphSage(input_size=data.feature.shape[-1] num_class=data.num_classes hidden_size=args.hidden_size num_layers=len(args.samples))<line_sep>model=paddle.DataParallel(model)<line_sep>criterion=paddle.nn.loss.CrossEntropyLoss()<line_sep>optim=paddle.optimizer.Adam(learning_rate=args.lr parameters=model.parameters() weight_decay=0.001)<line_sep>feature=paddle.to_tensor(data.feature)<line_sep>train_ds=ShardedDataset(train_index train_label)<line_sep>val_ds=ShardedDataset(val_index val_label)<line_sep>test_ds=ShardedDataset(test_index test_label)<line_sep>collate_fn=partial(batch_fn graph=graph samples=args.samples)<line_sep>train_loader=Dataloader(train_ds batch_size=args.batch_size shuffle=<true> num_workers=args.sample_workers collate_fn=collate_fn)<line_sep>val_loader=Dataloader(test_ds batch_size=args.batch_size shuffle=<false> num_workers=args.sample_workers collate_fn=collate_fn)<line_sep>test_loader=Dataloader(test_ds batch_size=args.batch_size shuffle=<false> num_workers=args.sample_workers collate_fn=collate_fn)<line_sep>cal_val_acc=[]<line_sep>cal_test_acc=[]<line_sep>cal_val_loss=[]<for_stmt>epoch tqdm.tqdm(range(args.epoch))<block_start>train_loss,train_acc=train(train_loader model feature criterion optim)<line_sep>log.info("Runing epoch:%s\t train_loss:%s\t train_acc:%s" epoch train_loss train_acc)<line_sep>val_loss,val_acc=eval(val_loader model feature criterion)<line_sep>cal_val_acc.append(val_acc)<line_sep>cal_val_loss.append(val_loss)<line_sep>log.info("Runing epoch:%s\t val_loss:%s\t val_acc:%s" epoch val_loss val_acc)<line_sep>test_loss,test_acc=eval(test_loader model feature criterion)<line_sep>cal_test_acc.append(test_acc)<line_sep>log.info("Runing epoch:%s\t test_loss:%s\t test_acc:%s" epoch test_loss test_acc)<block_end>log.info("Runs %s: Model: %s Best Test Accuracy: %f"%(0 "graphsage" cal_test_acc[np.argmax(cal_val_acc)]))<block_end><if_stmt>__name__<eq>"__main__"<block_start>parser=argparse.ArgumentParser(description='graphsage')<line_sep>parser.add_argument("--normalize" action='store_true' help="normalize features")<line_sep>parser.add_argument("--symmetry" action='store_true' help="undirect graph")<line_sep>parser.add_argument("--sample_workers" type=int default=5)<line_sep>parser.add_argument("--epoch" type=int default=10)<line_sep>parser.add_argument("--hidden_size" type=int default=128)<line_sep>parser.add_argument("--batch_size" type=int default=128)<line_sep>parser.add_argument("--lr" type=float default=0.01)<line_sep>parser.add_argument('--samples' nargs='+' type=int default=[25 10])<line_sep>args=parser.parse_args()<line_sep>log.info(args)<line_sep>main(args)<block_end>
|
<import_stmt>gym<import_from_stmt>gym.spaces Discrete Box<import_from_stmt>gym.utils seeding<import_stmt>numpy<as>np<import_stmt>random<class_stmt>MarketBandit(gym.Env)<block_start><def_stmt>__init__ self config={}<block_start>self.max_inflation=config.get('max-inflation' DEFAULT_MAX_INFLATION)<line_sep>self.tickers=config.get('tickers' DEFAULT_TICKERS)<line_sep>self.data_file=config.get('data-file' DEFAULT_DATA_FILE)<line_sep>print(f"MarketBandit: max_inflation: {self.max_inflation}, tickers: {self.tickers}, data file: {self.data_file} (config: {config})")<line_sep>self.action_space=Discrete(4)<line_sep>self.observation_space=Box(low=-self.max_inflation high=self.max_inflation shape=(1 ))<line_sep>self.df=load_market_data(self.data_file)<line_sep>self.cur_context=<none><block_end><def_stmt>reset self<block_start>self.year=self.df["year"].min()<line_sep>self.cur_context=self.df.loc[self.df["year"]<eq>self.year]["inflation"][0]<line_sep>self.done=<false><line_sep>self.info={}<line_sep><return>[self.cur_context]<block_end><def_stmt>step self action<block_start><if_stmt>self.done<block_start>reward=0.<line_sep>regret=0.<block_end><else_stmt><block_start>row=self.df.loc[self.df["year"]<eq>self.year]<line_sep># calculate reward
ticker=self.tickers[action]<line_sep>reward=float(row[ticker])<line_sep># calculate regret
max_reward=max(map(<lambda>t:float(row[t]) self.tickers))<line_sep>regret=round(max_reward-reward)<line_sep># update the context
self.cur_context=float(row["inflation"])<line_sep># increment the year
self.year<augadd>1<if_stmt>self.year<ge>self.df["year"].max()<block_start>self.done=<true><block_end><block_end>context=[self.cur_context]<line_sep>#context = self.observation_space.sample()
self.info={"regret":regret "year":self.year}<line_sep><return>[context reward self.done self.info]<block_end><def_stmt>seed self seed=<none><block_start>"""Sets the seed for this env's random number generator(s).
Note:
Some environments use multiple pseudorandom number generators.
We want to capture all such seeds used in order to ensure that
there aren't accidental correlations between multiple generators.
Returns:
list<bigint>: Returns the list of seeds used in this env's random
number generators. The first value in the list should be the
"main" seed, or the value which a reproducer should pass to
'seed'. Often, the main seed equals the provided 'seed', but
this won't be true if seed=None, for example.
"""<line_sep>self.np_random,seed=seeding.np_random(seed)<line_sep><return>[seed]<block_end><block_end>
|
<import_stmt>numpy<as>np<import_from_stmt>BoxBlur BoxBlur_random<import_from_stmt>DefocusBlur DefocusBlur_random<import_from_stmt>GaussianBlur GaussianBlur_random<import_from_stmt>LinearMotionBlur LinearMotionBlur_random<import_from_stmt>PsfBlur PsfBlur_random<line_sep>blurFunctions={"0":BoxBlur_random "1":DefocusBlur_random "2":GaussianBlur_random "3":LinearMotionBlur_random "4":PsfBlur_random}<def_stmt>RandomizedBlur img<block_start>blurToApply=blurFunctions[str(np.random.randint(0 len(blurFunctions)))]<line_sep><return>blurToApply(img)<block_end>
|
"""
Benchmark inference speed on ImageNet
Example (run on Firefly RK3399):
python mali_imagenet_bench.py --target-host 'llvm -target=aarch64-linux-gnu' --host 192.168.0.100 --port 9090 --model mobilenet
"""<import_stmt>time<import_stmt>argparse<import_stmt>numpy<as>np<import_stmt>tvm<import_stmt>nnvm.compiler<import_stmt>nnvm.testing<import_from_stmt>tvm.contrib util rpc<import_from_stmt>tvm.contrib graph_runtime<as>runtime<def_stmt>run_case model dtype# load model
<block_start><if_stmt>model<eq>'vgg16'<block_start>net,params=nnvm.testing.vgg.get_workload(num_layers=16 batch_size=1 image_shape=image_shape dtype=dtype)<block_end><elif_stmt>model<eq>'resnet18'<block_start>net,params=nnvm.testing.resnet.get_workload(num_layers=18 batch_size=1 image_shape=image_shape dtype=dtype)<block_end><elif_stmt>model<eq>'mobilenet'<block_start>net,params=nnvm.testing.mobilenet.get_workload(batch_size=1 image_shape=image_shape dtype=dtype)<block_end><else_stmt><block_start><raise>ValueError('no benchmark prepared for {}.'.format(model))<block_end># compile
opt_level=2<if>dtype<eq>'float32'<else>1<with_stmt>nnvm.compiler.build_config(opt_level=opt_level)<block_start>graph,lib,params=nnvm.compiler.build(net tvm.target.mali() shape={"data":data_shape} params=params dtype=dtype target_host=args.target_host)<block_end># upload model to remote device
tmp=util.tempdir()<line_sep>lib_fname=tmp.relpath('net.tar')<line_sep>lib.export_library(lib_fname)<if_stmt>args.host<is><not><none><block_start>remote=rpc.connect(args.host args.port)<line_sep>remote.upload(lib_fname)<line_sep>ctx=remote.cl(0)<line_sep>rlib=remote.load_module('net.tar')<line_sep>rparams={k:tvm.nd.array(v ctx)<for>k,v params.items()}<block_end><else_stmt><block_start>ctx=tvm.cl(0)<line_sep>rlib=lib<line_sep>rparams=params<block_end># create graph runtime
module=runtime.create(graph rlib ctx)<line_sep>module.set_input('data' tvm.nd.array(np.random.uniform(size=(data_shape)).astype(dtype)))<line_sep>module.set_input(**rparams)<line_sep># benchmark
# print("============================================================")
# print("model: %s, dtype: %s" % (model, dtype))
# the num of runs for warm up and test
num_warmup=10<line_sep>num_test=60<if_stmt>model<eq>'mobilenet'# mobilenet is fast, need more runs for stable measureament
<block_start>num_warmup<augmul>5<line_sep>num_test<augmul>5<block_end># perform some warm up runs
# print("warm up..")
warm_up_timer=module.module.time_evaluator("run" ctx num_warmup)<line_sep>warm_up_timer()<line_sep># test
# print("test..")
ftimer=module.module.time_evaluator("run" ctx num_test)<line_sep>prof_res=ftimer()<line_sep># print("cost per image: %.4fs" % prof_res.mean)
print("backend: TVM-mali\tmodel: %s\tdtype: %s\tcost:%.4f"%(model dtype prof_res.mean))<block_end><if_stmt>__name__<eq>'__main__'<block_start>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('--model' type=str required=<true> choices=['vgg16' 'resnet18' 'mobilenet' 'all'] help="The model type.")<line_sep>parser.add_argument('--dtype' type=str default='float32' choices=['float16' 'float32'])<line_sep>parser.add_argument('--host' type=str help="The host address of your arm device." default=<none>)<line_sep>parser.add_argument('--port' type=int help="The port number of your arm device" default=<none>)<line_sep>parser.add_argument('--target-host' type=str help="The compilation target of host device." default=<none>)<line_sep>args=parser.parse_args()<line_sep># set parameter
batch_size=1<line_sep>num_classes=1000<line_sep>image_shape=(3 224 224)<line_sep># load model
data_shape=(batch_size )+image_shape<line_sep>out_shape=(batch_size num_classes)<if_stmt>args.model<eq>'all'# test all
<block_start><for_stmt>model ['vgg16' 'resnet18' 'mobilenet']<block_start><for_stmt>dtype ['float32' 'float16']<block_start>run_case(model dtype)<line_sep>time.sleep(10)<block_end><block_end><block_end><else_stmt># test single
<block_start>run_case(args.model args.dtype)<block_end><block_end>
|
<import_from_stmt>torch nn<import_from_stmt>torch.nn.modules.conv _ConvNd<import_from_stmt>torch.nn.modules.pooling _MaxPoolNd<import_from_stmt>onnx2pytorch.operations BatchNormWrapper InstanceNormWrapper Loop LSTMWrapper Split TopK <line_sep>COMPOSITE_LAYERS=(nn.Sequential )<line_sep>MULTIOUTPUT_LAYERS=(_MaxPoolNd Loop LSTMWrapper Split TopK)<line_sep>STANDARD_LAYERS=(_ConvNd BatchNormWrapper InstanceNormWrapper LSTMWrapper nn.Linear )<line_sep>
|
"""Events"""<import_from_stmt>._core PreambleInsn ConstructorInsn SingleCommandInsn<import_from_stmt>..core_types VirtualString AdvEventRef TagEventRef EventRef Selector SelectorType <import_from_stmt>..core IRFunction VisibleFunction<import_stmt>commands<as>c<class_stmt>CreateAdvEvent(ConstructorInsn)<block_start>"""Creates an advancement-based event object."""<line_sep>args=[VirtualString]<line_sep>argnames='event_name'<line_sep>argdocs=["The event name"]<line_sep>rettype=AdvEventRef<line_sep>insn_name='adv_event'<def_stmt>construct self<block_start><return>AdvEventRef(str(self.event_name))<block_end><block_end><class_stmt>CreateTagEvent(ConstructorInsn)<block_start>"""Creates a tag-based event object."""<line_sep>args=[VirtualString]<line_sep>argnames='tag_name'<line_sep>argdocs=["The function tag name"]<line_sep>rettype=TagEventRef<line_sep>insn_name='tag_event'<def_stmt>construct self<block_start><return>TagEventRef(str(self.tag_name))<block_end><block_end><class_stmt>AddEventCondition(PreambleInsn)<block_start>"""Add a condition to an event that must be true for the event handler
to be invoked."""<line_sep>args=[AdvEventRef VirtualString VirtualString]<line_sep>argnames='event path value'<line_sep>argdocs=["Event to add the condition to" "JSON path in the advancement" "Value that must match"]<line_sep>insn_name='add_event_condition'<def_stmt>preapply self preamble<block_start>self.event.add_condition(tuple(str(self.path).split('.')) str(self.value))<block_end><block_end><class_stmt>EventHandler(PreambleInsn)<block_start>"""Add an event handler to the given event specification."""<line_sep>args=[IRFunction EventRef]<line_sep>argnames='handler event'<line_sep>argdocs=["Event handler" "Event"]<line_sep>top_preamble_only=<true><line_sep>insn_name='event_handler'<def_stmt>preapply self preamble<block_start><if_stmt><not>self.event.is_tag<block_start>self.handler.add_advancement_revoke(self.event)<block_end><block_end><def_stmt>declare self<block_start>self.handler.usage()<block_end><def_stmt>postapply self out top<block_start><assert_stmt><not>self.handler.is_inline<line_sep>out.write_event_handler(self.handler self.event)<block_end><block_end><class_stmt>FireEventInsn(SingleCommandInsn)<block_start>"""Fires a tag-based event to all listeners."""<line_sep>args=[TagEventRef]<line_sep>argnames='event'<line_sep>argdocs=["Tag event to fire"]<line_sep>insn_name='fire_event'<def_stmt>get_cmd self func<block_start><return>c.FunctionTag(c.NSName(self.event.name))<block_end><block_end><class_stmt>RevokeEventAdvancement(SingleCommandInsn)<block_start>"""(Internal) Revokes an advancement to allow an event to re-fire."""<line_sep>args=[IRFunction]<line_sep>argnames='func'<line_sep>argdocs=["Handler"]<line_sep>insn_name='revoke_event_adv'<def_stmt>get_cmd self func# Advancement name = handler func name
<block_start><return>c.Advancement('revoke' Selector.new(SelectorType.SENDER).as_resolve() 'only' c.AdvancementRef(self.func.global_name))<block_end><block_end><class_stmt>SetupInsn(PreambleInsn)<block_start>"""Tags a function as being part of the setup phase. It is called whenever
the datapack is reloaded."""<line_sep>args=[VisibleFunction]<line_sep>argnames='func'<line_sep>argdocs=["The setup function"]<line_sep>top_preamble_only=<true><line_sep>insn_name='setupfn'<def_stmt>declare self<block_start>self.func.usage()<block_end><def_stmt>preapply self preamble<block_start><pass><block_end><def_stmt>postapply self out top<block_start><assert_stmt><not>self.func.is_inline<line_sep>out.write_setup_function(self.func)<block_end><block_end>
|
# Problem : https://practice.geeksforgeeks.org/problems/print-anagrams-together/1
# Input:
# N = 5
# words[] = {act,god,cat,dog,tac}
# Output:
# god dog
# act cat tac
# Explanation:
# There are 2 groups of
# anagrams "god", "dog" make group 1.
# "act", "cat", "tac" make group 2.
<import_from_stmt>collections defaultdict<def_stmt>Anagrams words n<block_start>'''
words: list of word
n: no of words
return : list of group of anagram {list will be sorted in driver code (not word in grp)}
'''<line_sep>#code here
anagrams=defaultdict(list)<for_stmt>word words<block_start>anagrams["".join(sorted(word))].append(word)<block_end><return>anagrams.values()<block_end># Driver Code
<if_stmt>__name__<eq>'__main__'<block_start>t=int(input())<for_stmt>tcs range(t)<block_start>n=int(input())<line_sep>words=input().split()<line_sep>ans=Anagrams(words n)<for_stmt>grp sorted(ans)<block_start><for_stmt>word grp<block_start>print(word end=' ')<block_end>print()<block_end><block_end><block_end># Used default dict from collections module . It does not raise key value error
|
<import_from_stmt>copy deepcopy<import_from_stmt>typing Tuple<import_stmt>GPy<import_stmt>numpy<as>np<import_from_stmt>emukit.model_wrappers.gpy_model_wrappers GPyModelWrapper<class_stmt>FabolasKernel(GPy.kern.Kern)<block_start><def_stmt>__init__ self input_dim basis_func a=1. b=1. active_dims=<none><block_start>super(FabolasKernel self).__init__(input_dim active_dims "fabolas_kernel")<assert_stmt>input_dim<eq>1<line_sep>self.basis_func=basis_func<line_sep>self.a=GPy.core.parameterization.Param("a" a)<line_sep>self.b=GPy.core.parameterization.Param("b" b)<line_sep>self.link_parameters(self.a self.b)<block_end><def_stmt>K self X X2<block_start><if_stmt>X2<is><none><block_start>X2=X<block_end>X_=self.basis_func(X)<line_sep>X2_=self.basis_func(X2)<line_sep>k=np.dot(X_<times>self.b X2_.T)+self.a<line_sep><return>k<block_end><def_stmt>update_gradients_full self dL_dK X X2<block_start><if_stmt>X2<is><none><block_start>X2=X<block_end>X_=self.basis_func(X)<line_sep>X2_=self.basis_func(X2)<line_sep>self.a.gradient=np.sum(dL_dK)<line_sep>self.b.gradient=np.sum(np.dot(np.dot(X_ X2_.T) dL_dK))<block_end><def_stmt>Kdiag self X<block_start><return>np.diag(self.K(X X))<block_end><block_end><def_stmt>linear s<block_start><return>s<block_end><def_stmt>quad s<block_start><return>(1-s)<power>2<block_end><def_stmt>transform s s_min s_max<block_start>s_transform=(np.log2(s)-np.log2(s_min))/(np.log2(s_max)-np.log2(s_min))<line_sep><return>s_transform<block_end><def_stmt>retransform s_transform s_min s_max<block_start>s=np.rint(2<power>(s_transform<times>(np.log2(s_max)-np.log2(s_min))+np.log2(s_min)))<line_sep><return>s<block_end><class_stmt>FabolasModel(GPyModelWrapper)<block_start><def_stmt>__init__ self X_init:np.ndarray Y_init:np.ndarray s_min:float s_max:float basis_func=linear noise:float=1e-6<block_start>"""
Fabolas Gaussian processes model which models the validation error / cost of
hyperparameter configurations across training dataset subsets.
:param X_init: training data points
:param Y_init: training targets
:param basis_func: basis function which describes the change in performance across dataset subsets
:param noise: observation noise added to the diagonal of the kernel matrix
"""<line_sep>self.noise=noise<line_sep>self.s_min=s_min<line_sep>self.s_max=s_max<line_sep>self._X=deepcopy(X_init)<line_sep>self._X[: -1]=transform(self._X[: -1] self.s_min self.s_max)<line_sep>self._Y=Y_init<line_sep>self.basis_func=basis_func<line_sep>kernel=GPy.kern.Matern52(input_dim=self._X.shape[1]-1 active_dims=[i<for>i range(self._X.shape[1]-1)] variance=np.var(self._Y) ARD=<true>)<line_sep>kernel<augmul>FabolasKernel(input_dim=1 active_dims=[self._X.shape[1]-1] basis_func=basis_func)<line_sep>kernel<augadd>GPy.kern.White(input_dim=1 active_dims=[self._X.shape[1]-1] variance=1e-6)<line_sep>gp=GPy.models.GPRegression(self._X self._Y kernel=kernel noise_var=noise)<line_sep>gp.kern.set_prior(GPy.priors.Uniform(0 5))<line_sep>gp.likelihood.constrain_positive()<line_sep>super(FabolasModel self).__init__(gpy_model=gp n_restarts=3)<block_end><def_stmt>predict self X<block_start>"""
:param X: (n_points x n_dimensions) array containing locations at which to get predictions
:return: (mean, variance) Arrays of size n_points x 1 of the predictive distribution at each input location
"""<line_sep>X_=deepcopy(X)<line_sep>X_[: -1]=transform(X_[: -1] self.s_min self.s_max)<line_sep><return>super(FabolasModel self).predict(X_)<block_end><def_stmt>set_data self X Y<block_start>"""
Sets training data in model
:param X: New training features
:param Y: New training outputs
"""<line_sep>self._X=deepcopy(X)<line_sep>self._X[: -1]=transform(self._X[: -1] self.s_min self.s_max)<line_sep>self._Y=Y<try_stmt><block_start>self.model.set_XY(self._X self.Y)<block_end><except_stmt><block_start>kernel=GPy.kern.Matern52(input_dim=self._X.shape[1]-1 active_dims=[i<for>i range(self._X.shape[1]-1)] variance=np.var(self.Y) ARD=<true>)<line_sep>kernel<augmul>FabolasKernel(input_dim=1 active_dims=[self._X.shape[1]-1] basis_func=self.basis_func)<line_sep>kernel<augmul>GPy.kern.OU(input_dim=1 active_dims=[self._X.shape[1]-1])<line_sep>self.model=GPy.models.GPRegression(self._X self.Y kernel=kernel noise_var=self.noise)<line_sep>self.model.likelihood.constrain_positive()<block_end><block_end><def_stmt>get_f_minimum self<block_start>"""
Predicts for all observed data points the validation error on the full dataset and returns
the smallest mean prediciton
:return: Array of size 1 x 1
"""<line_sep>proj_X=deepcopy(self._X)<line_sep>proj_X[: -1]=np.ones(proj_X.shape[0])<times>self.s_max<line_sep>mean_highest_dataset=self.model.predict(proj_X)<line_sep><return>np.min(mean_highest_dataset axis=0)<block_end>@property<def_stmt>X self<block_start>X=deepcopy(self._X)<line_sep>X[: -1]=retransform(X[: -1] self.s_min self.s_max)<line_sep><return>X<block_end>@property<def_stmt>Y self<block_start><return>self._Y<block_end><def_stmt>get_prediction_gradients self X:np.ndarray<arrow>Tuple[np.ndarray np.ndarray]<block_start>"""
:param X: (n_points x n_dimensions) array containing locations at which to get gradient of the predictions
:return: (mean gradient, variance gradient) n_points x n_dimensions arrays of the gradients of the predictive
distribution at each input location
"""<line_sep>X_=deepcopy(X)<line_sep>X_[: -1]=transform(X_[: -1] self.s_min self.s_max)<line_sep><return>super(FabolasModel self).get_prediction_gradients(X_)<block_end><def_stmt>predict_covariance self X:np.ndarray with_noise:bool=<true><arrow>np.ndarray<block_start>"""
Calculates posterior covariance between points in X
:param X: Array of size n_points x n_dimensions containing input locations to compute posterior covariance at
:param with_noise: Whether to include likelihood noise in the covariance matrix
:return: Posterior covariance matrix of size n_points x n_points
"""<line_sep>X_=deepcopy(X)<line_sep>X_[: -1]=transform(X_[: -1] self.s_min self.s_max)<line_sep><return>super(FabolasModel self).predict_covariance(X_ with_noise)<block_end><def_stmt>get_covariance_between_points self X1:np.ndarray X2:np.ndarray<arrow>np.ndarray<block_start>"""
Calculate posterior covariance between two points
:param X1: An array of shape 1 x n_dimensions that contains a data single point. It is the first argument of the
posterior covariance function
:param X2: An array of shape n_points x n_dimensions that may contain multiple data points. This is the second
argument to the posterior covariance function.
:return: An array of shape n_points x 1 of posterior covariances between X1 and X2
"""<line_sep>X_1=deepcopy(X1)<line_sep>X_1[: -1]=transform(X_1[: -1] self.s_min self.s_max)<line_sep>X_2=deepcopy(X2)<line_sep>X_2[: -1]=transform(X_2[: -1] self.s_min self.s_max)<line_sep><return>super(FabolasModel self).get_covariance_between_points(X1 X2)<block_end><block_end>
|
<import_stmt>tarfile<import_from_stmt>io BytesIO<import_from_stmt>.record Record<class_stmt>Tar(Record)<block_start>__attributes__=['path']<line_sep>mode='r'<def_stmt>__init__ self path<block_start>self.path=path<block_end><def_stmt>__enter__ self<block_start>self.tar=tarfile.open(self.path self.mode)<line_sep><return>self<block_end><def_stmt>__exit__ self *args<block_start>self.tar.close()<block_end><def_stmt>load self filename<block_start>member=self.tar.getmember(filename)<line_sep><return>self.tar.extractfile(member)<block_end><block_end><class_stmt>DumpTar(Tar)<block_start>mode='w'<def_stmt>dump self bytes filename<block_start>file=BytesIO(bytes)<line_sep>info=tarfile.TarInfo(filename)<line_sep>info.size=len(bytes)<line_sep>self.tar.addfile(tarinfo=info fileobj=file)<block_end><block_end>
|
<import_from_stmt>decimal Decimal<import_stmt>unittest.mock<import_stmt>hummingbot.strategy.perpetual_market_making.start<as>strategy_start<import_from_stmt>hummingbot.connector.exchange_base ExchangeBase<import_from_stmt>hummingbot.strategy.perpetual_market_making.perpetual_market_making_config_map perpetual_market_making_config_map<as>c_map <import_from_stmt>test.hummingbot.strategy assign_config_default<class_stmt>PerpetualMarketMakingStartTest(unittest.TestCase)<block_start><def_stmt>setUp self<arrow><none><block_start>super().setUp()<line_sep>self.strategy=<none><line_sep>self.markets={"binance":ExchangeBase()}<line_sep>self.notifications=[]<line_sep>self.log_errors=[]<line_sep>assign_config_default(c_map)<line_sep>c_map.get("derivative").value="binance"<line_sep>c_map.get("market").value="ETH-USDT"<line_sep>c_map.get("leverage").value=Decimal("5")<line_sep>c_map.get("order_amount").value=Decimal("1")<line_sep>c_map.get("order_refresh_time").value=60.<line_sep>c_map.get("bid_spread").value=Decimal("1")<line_sep>c_map.get("ask_spread").value=Decimal("2")<block_end><def_stmt>_initialize_market_assets self market trading_pairs<block_start><return>[("ETH" "USDT")]<block_end><def_stmt>_initialize_markets self market_names<block_start><pass><block_end><def_stmt>_notify self message<block_start>self.notifications.append(message)<block_end><def_stmt>logger self<block_start><return>self<block_end><def_stmt>error self message exc_info<block_start>self.log_errors.append(message)<block_end><def_stmt>test_strategy_creation self<block_start>strategy_start.start(self)<line_sep>self.assertEqual(self.strategy.order_amount Decimal("1"))<line_sep>self.assertEqual(self.strategy.order_refresh_time 60.)<line_sep>self.assertEqual(self.strategy.bid_spread Decimal("0.01"))<line_sep>self.assertEqual(self.strategy.ask_spread Decimal("0.02"))<block_end><block_end>
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for Batch Normalization Layer."""<import_stmt>os<import_stmt>unittest<import_stmt>objax<import_stmt>numpy<as>np<line_sep># Split CPU cores into 8 devices for tests of sync batch norm
os.environ['XLA_FLAGS']=' '.join(os.environ.get('XLA_FLAGS' '').split(' ')+['--xla_force_host_platform_device_count=8'])<class_stmt>TestBatchnorm(unittest.TestCase)<block_start><def_stmt>test_batchnorm_0d self<block_start>x=objax.random.normal((64 8))<line_sep>bn=objax.nn.BatchNorm0D(8)<line_sep># run batch norm in training mode
yt=bn(x training=<true>)<line_sep>self.assertEqual(yt.shape x.shape)<line_sep># run batch norm in eval mode
ye=bn(x training=<false>)<line_sep>self.assertEqual(ye.shape x.shape)<block_end><def_stmt>test_batchnorm_1d self<block_start>x=objax.random.normal((64 4 16))<line_sep>bn=objax.nn.BatchNorm1D(4)<line_sep># run batch norm in training mode
yt=bn(x training=<true>)<line_sep>self.assertEqual(yt.shape x.shape)<line_sep># run batch norm in eval mode
ye=bn(x training=<false>)<line_sep>self.assertEqual(ye.shape x.shape)<block_end><def_stmt>test_batchnorm_2d self<block_start>x=objax.random.normal((64 3 16 16))<line_sep>bn=objax.nn.BatchNorm2D(3)<line_sep># run batch norm in training mode
yt=bn(x training=<true>)<line_sep>self.assertEqual(yt.shape x.shape)<line_sep># run batch norm in eval mode
ye=bn(x training=<false>)<line_sep>self.assertEqual(ye.shape x.shape)<block_end><block_end><class_stmt>TestSyncBatchnorm(unittest.TestCase)<block_start><def_stmt>assertTensorsAlmostEqual self a b<block_start>a=np.array(a)<line_sep>b=np.array(b)<line_sep>np.testing.assert_almost_equal(a b decimal=5)<block_end><def_stmt>assertTensorsNotEqual self a b<block_start>a=np.array(a)<line_sep>b=np.array(b)<line_sep>self.assertGreater(((a-b)<power>2).sum() 1e-5)<block_end><def_stmt>helper_test_syncbn self x bn_fn syncbn_fn# run regular batch norm in train and eval mode
<block_start>bn=bn_fn()<line_sep>yt=bn(x training=<true>)<line_sep>ye=bn(x training=<false>)<line_sep># run replicated sync batch norm in train and eval mode
sync_bn=syncbn_fn()<line_sep>sync_bn_train=objax.Parallel(<lambda>x:sync_bn(x training=<true>) vc=sync_bn.vars())<line_sep>sync_bn_eval=objax.Parallel(<lambda>x:sync_bn(x training=<false>) vc=sync_bn.vars())<with_stmt>sync_bn.vars().replicate()<block_start>yt_syncbn=sync_bn_train(x)<line_sep>ye_syncbn=sync_bn_eval(x)<block_end># replicated sync bn should have the same behavior as non-replicated regular bn
self.assertTensorsAlmostEqual(yt yt_syncbn)<line_sep>self.assertTensorsAlmostEqual(ye ye_syncbn)<line_sep>self.assertTensorsAlmostEqual(yt yt_syncbn)<line_sep>self.assertTensorsAlmostEqual(bn.running_mean.value sync_bn.running_mean.value)<line_sep>self.assertTensorsAlmostEqual(bn.running_var.value sync_bn.running_var.value)<line_sep># run replicated non-sync batch norm - it should yield different result
non_sync_bn=bn_fn()<line_sep>non_sync_bn_train=objax.Parallel(<lambda>x:non_sync_bn(x training=<true>) vc=non_sync_bn.vars())<line_sep>non_sync_bn_eval=objax.Parallel(<lambda>x:non_sync_bn(x training=<false>) vc=non_sync_bn.vars())<with_stmt>non_sync_bn.vars().replicate()<block_start>yt_non_syncbn=non_sync_bn_train(x)<line_sep>ye_non_syncbn=non_sync_bn_eval(x)<block_end>self.assertTensorsNotEqual(yt yt_non_syncbn)<line_sep>self.assertTensorsNotEqual(ye ye_non_syncbn)<block_end><def_stmt>test_syncbn_0d self<block_start>x=objax.random.normal((64 8))<line_sep>self.helper_test_syncbn(x <lambda>:objax.nn.BatchNorm0D(8) <lambda>:objax.nn.SyncedBatchNorm0D(8))<block_end><def_stmt>test_syncbn_1d self<block_start>x=objax.random.normal((64 4 16))<line_sep>self.helper_test_syncbn(x <lambda>:objax.nn.BatchNorm1D(4) <lambda>:objax.nn.SyncedBatchNorm1D(4))<block_end><def_stmt>test_syncbn_2d self<block_start>x=objax.random.normal((64 3 16 16))<line_sep>self.helper_test_syncbn(x <lambda>:objax.nn.BatchNorm2D(3) <lambda>:objax.nn.SyncedBatchNorm2D(3))<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
|
width=10<line_sep>precision=4<line_sep>value=decimal.Decimal("12.34567")<line_sep>f"result: {value:{width}.{precision}}"<line_sep>rf"result: {value:{width}.{precision}}"<line_sep>foo(f'this SHOULD be a multi-line string because it is '<concat>f'very long and does not fit on one line. And {value} is the value.')<line_sep>foo('this SHOULD be a multi-line string, but not reflowed because it is '<concat>f'very long and and also unusual. And {value} is the value.')<line_sep>foo(fR"this should NOT be \t "<concat>rF'a multi-line string \n')<line_sep>
|
<import_stmt>grpc<import_stmt>sys<import_stmt>os<import_stmt>socket<import_stmt>logging<import_stmt>importlib<import_from_stmt>concurrent futures<import_stmt>jet_to_python_pb2<import_stmt>jet_to_python_pb2_grpc<line_sep>logger=logging.getLogger('Python PID %d'%os.getpid())<class_stmt>JetToPythonServicer(jet_to_python_pb2_grpc.JetToPythonServicer)<block_start><def_stmt>__init__ self handler_function<block_start>self._handler_function=handler_function<block_end><def_stmt>streamingCall self request_iterator context<block_start><for_stmt>request request_iterator<block_start>output_list=self._handler_function(request.inputValue)<line_sep>output_item=jet_to_python_pb2.OutputMessage(outputValue=output_list)<line_sep><yield>output_item<block_end>logger.info('gRPC call completed')<block_end><block_end><def_stmt>load_handler_function handler_module_name handler_function_name<block_start><try_stmt><block_start>handler_module=importlib.import_module(handler_module_name)<block_end><except_stmt>ImportError<as>e<block_start><raise>RuntimeError("Cannot import module %s"%(handler_module_name) e)<block_end><if_stmt><not>hasattr(handler_module handler_function_name)<block_start><raise>RuntimeError("Handler function %s.%s doesn't exist"%(handler_module_name handler_function_name))<block_end><return>getattr(handler_module handler_function_name)<block_end><def_stmt>serve phoneback_port handler_module_name handler_function_name# Fail as soon as possible for any simple problem with passed-in arguments
<block_start>phoneback_port_int=int(phoneback_port)<line_sep>handler_function=load_handler_function(handler_module_name handler_function_name)<line_sep>server=grpc.server(futures.ThreadPoolExecutor(max_workers=1) options=[('grpc.max_send_message_length' 100<times>1024<times>1024) ('grpc.max_receive_message_length' 100<times>1024<times>1024) ('grpc.so_reuseport' 0)])<line_sep>jet_to_python_pb2_grpc.add_JetToPythonServicer_to_server(JetToPythonServicer(handler_function) server)<line_sep>listen_port=server.add_insecure_port('localhost:0')<if_stmt>listen_port<eq>0<block_start>logger.error("Couldn't find a port to bind to")<line_sep><return><block_end>phoneback_message=('%d\n'%listen_port).encode('utf-8')<line_sep>server.start()<line_sep>logger.info('started listening on port %d' listen_port)<with_stmt>socket.socket(socket.AF_INET socket.SOCK_STREAM)<as>s<block_start>s.connect(('localhost' phoneback_port_int))<line_sep>s.sendall(phoneback_message)<block_end># Wait for a stop signal in stdin
stdin_message=input()<if_stmt>stdin_message<eq>'stop'<block_start>logger.info('Received a "stop" message from stdin. Stopping the server.')<block_end><else_stmt><block_start>logger.info('Received an unexpected message from stdin: "%s"'%stdin_message)<block_end>server.stop(0).wait()<block_end><if_stmt>__name__<eq>'__main__'<block_start>logging.basicConfig(stream=sys.stdout format='%(asctime)s %(levelname)s [%(name)s] %(threadName)s - %(message)s' level=logging.INFO)<line_sep># Expecting these command-line parameters:
# - $1 is the port where Jet is listening for the Python process to
# 'phone back' and tell Jet on which port it started its gRPC endpoint.
# - $2.$3 is the module.function of the handler function that will handle
# the input from Jet.
serve(phoneback_port=sys.argv[1] handler_module_name=sys.argv[2] handler_function_name=sys.argv[3])<block_end>
|
<import_stmt>clr<line_sep>clr.AddReference('RevitAPI')<import_from_stmt>Autodesk.Revit.DB *<import_from_stmt>System.Collections.Generic *<line_sep>clr.AddReference("RevitServices")<import_stmt>RevitServices<import_from_stmt>RevitServices.Persistence DocumentManager<import_from_stmt>RevitServices.Transactions TransactionManager<line_sep>doc=DocumentManager.Instance.CurrentDBDocument<line_sep>groups=UnwrapElement(IN[0])<line_sep>elementlist=list()<line_sep>TransactionManager.Instance.EnsureInTransaction(doc)<for_stmt>group groups<block_start><try_stmt><block_start>ids=group.UngroupMembers()<line_sep>ungrouped=list()<for_stmt>id ids<block_start>ungrouped.append(group.Document.GetElement(id))<block_end>elementlist.append(ungrouped)<block_end><except_stmt><block_start>elementlist.append(list())<block_end><block_end>TransactionManager.Instance.TransactionTaskDone()<line_sep>OUT=elementlist<line_sep>
|
expected_output={"BDI3147":{"interface":"BDI3147" "redirects_disable":<false> "address_family":{"ipv4":{"version":{1:{"groups":{31:{"group_number":31 "hsrp_router_state":"active" "statistics":{"num_state_changes":17} "last_state_change":"12w6d" "primary_ipv4_address":{"address":"10.190.99.49"} "virtual_mac_address":"0000.0c07.ac1f" "virtual_mac_address_mac_in_use":<true> "local_virtual_mac_address":"0000.0c07.ac1f" "local_virtual_mac_address_conf":"v1 default" "timers":{"hello_msec_flag":<false> "hello_sec":3 "hold_msec_flag":<false> "hold_sec":10 "next_hello_sent":1.856} "active_router":"local" "standby_priority":90 "standby_expires_in":11.504 "standby_router":"10.190.99.51" "standby_ip_address":"10.190.99.51" "priority":110 "configured_priority":110 "session_name":"hsrp-BD3147-31"} 32:{"group_number":32 "hsrp_router_state":"active" "statistics":{"num_state_changes":17} "last_state_change":"12w6d" "primary_ipv4_address":{"address":"10.188.109.1"} "virtual_mac_address":"0000.0c07.ac20" "virtual_mac_address_mac_in_use":<true> "local_virtual_mac_address":"0000.0c07.ac20" "local_virtual_mac_address_conf":"v1 default" "timers":{"hello_msec_flag":<false> "hello_sec":3 "hold_msec_flag":<false> "hold_sec":10 "next_hello_sent":2.496} "active_router":"local" "standby_priority":90 "standby_expires_in":10.576 "standby_router":"10.188.109.3" "standby_ip_address":"10.188.109.3" "priority":110 "configured_priority":110 "session_name":"hsrp-BD3147-32"}}}}}} "use_bia":<false>}}<line_sep>
|
<import_from_stmt>flask_admin tools<def_stmt>test_encode_decode <block_start><assert_stmt>tools.iterdecode(tools.iterencode([1 2 3]))<eq>(u'1' u'2' u'3')<assert_stmt>tools.iterdecode(tools.iterencode([',' ',' ',']))<eq>(u',' u',' u',')<assert_stmt>tools.iterdecode(tools.iterencode(['.hello.,' ',' ',']))<eq>(u'.hello.,' u',' u',')<assert_stmt>tools.iterdecode(tools.iterencode(['.....,,,.,,..,.,,.,']))<eq>(u'.....,,,.,,..,.,,.,' )<assert_stmt>tools.iterdecode(tools.iterencode([]))<eq>tuple()<line_sep># Malformed inputs should not crash
<assert_stmt>tools.iterdecode('.')<assert_stmt>tools.iterdecode(',')<eq>(u'' u'')<block_end>
|
"""
This example script shows how to read button state with
debouncing that does not rely on time.sleep().
"""<import_stmt>board<import_from_stmt>digitalio DigitalInOut Direction Pull<line_sep>btn=DigitalInOut(board.SWITCH)<line_sep>btn.direction=Direction.INPUT<line_sep>btn.pull=Pull.UP<line_sep>prev_state=btn.value<while_stmt><true><block_start>cur_state=btn.value<if_stmt>cur_state<ne>prev_state<block_start><if_stmt><not>cur_state<block_start>print("BTN is down")<block_end><else_stmt><block_start>print("BTN is up")<block_end><block_end>prev_state=cur_state<block_end>
|
<import_from_stmt>nndet.inference.detection.wbc batched_wbc wbc<import_from_stmt>nndet.inference.detection.model batched_nms_model<import_from_stmt>nndet.inference.detection.ensemble batched_wbc_ensemble batched_nms_ensemble wbc_nms_no_label_ensemble<line_sep>
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""domain_utilstes extract test class."""<import_stmt>pytest_check<as>check<import_from_stmt>msticpy.sectools domain_utils<def_stmt>test_validate_domain <block_start>test_dom_val=domain_utils.DomainValidator()<line_sep>valid_tld=test_dom_val.validate_tld("www.microsoft.com")<line_sep>resolvable=test_dom_val.is_resolvable("www.microsoft.com")<line_sep>blacklisted=test_dom_val.in_abuse_list("www.microsoft.com")<assert_stmt>valid_tld<assert_stmt>resolvable<assert_stmt><not>blacklisted[0]<block_end><def_stmt>test_validate_domain_fail <block_start>test_dom_val=domain_utils.DomainValidator()<line_sep>valid_tld=test_dom_val.validate_tld("www.contoso.garbage")<line_sep>resolvable=test_dom_val.is_resolvable("www.contoso.garbage")<line_sep>blacklisted=test_dom_val.in_abuse_list("www.contoso.garbage")<assert_stmt><not>valid_tld<assert_stmt><not>resolvable<assert_stmt><not>blacklisted[0]<assert_stmt>blacklisted[1]<is><none><block_end><def_stmt>test_resolver_funcs <block_start>"""Test domain utils functions."""<line_sep>result=domain_utils.dns_resolve("www.microsoft.com")<line_sep>check.is_not_none(result["qname"])<line_sep>check.is_true(result["rrset"])<line_sep>ip=result["rrset"][0]<line_sep>result=domain_utils.dns_resolve("www.contoso.garbage")<line_sep>check.is_not_none(result)<line_sep>check.is_false(result.get("rrset"))<line_sep>result=domain_utils.ip_rev_resolve(ip)<line_sep>check.is_not_none(result)<line_sep>result=domain_utils.dns_components("www.microsoft.com")<line_sep>check.equal(result["subdomain"] "www")<line_sep>check.equal(result["domain"] "microsoft")<line_sep>check.equal(result["suffix"] "com")<line_sep>result=domain_utils.url_components("http://www.microsoft.com")<line_sep>check.equal(result["scheme"] "http")<line_sep>check.equal(result["host"] "www.microsoft.com")<block_end>
|
<import_from_stmt>.geom_abline geom_abline<import_from_stmt>.geom_area geom_area<import_from_stmt>.geom_bar geom_bar<import_from_stmt>.geom_bin2d geom_bin2d<import_from_stmt>.geom_blank geom_blank<import_from_stmt>.geom_boxplot geom_boxplot<import_from_stmt>.geom_density geom_density<import_from_stmt>.geom_errorbar geom_errorbar<import_from_stmt>.geom_histogram geom_histogram<import_from_stmt>.geom_hline geom_hline<import_from_stmt>.geom_jitter geom_jitter<import_from_stmt>.geom_line geom_line<import_from_stmt>.geom_now_its_art geom_now_its_art<import_from_stmt>.geom_path geom_path<import_from_stmt>.geom_point geom_point<import_from_stmt>.geom_polygon geom_polygon<import_from_stmt>.geom_rect geom_rect<import_from_stmt>.geom_ribbon geom_ribbon<import_from_stmt>.geom_step geom_step<import_from_stmt>.geom_text geom_text<import_from_stmt>.geom_tile geom_tile<import_from_stmt>.geom_violin geom_violin<import_from_stmt>.geom_vline geom_vline<line_sep>
|
<import_stmt>codecs<import_stmt>json<def_stmt>dump_json dictionary file_path<block_start>"""
:param dict:
:param file_path:
:return:
"""<with_stmt>codecs.open(file_path 'w+' encoding='utf-8')<as>fp<block_start>json.dump(dictionary fp)<block_end><block_end><def_stmt>load_json file_path<block_start>"""
:param file_path:
:return: dict object
"""<with_stmt>codecs.open(file_path 'r' encoding='utf-8')<as>fp<block_start><return>json.load(fp)<block_end><block_end>
|
<import_stmt>pytest<import_from_stmt>iotedgedev.buildoptionsparser BuildOptionsParser<line_sep>pytestmark=pytest.mark.unit<def_stmt>test_filter_build_options <block_start>build_options=["--rm" "-f test" "--file test" "-t image" "--tag image"]<line_sep>build_options_parser=BuildOptionsParser(build_options)<assert_stmt><not>build_options_parser.parse_build_options()<block_end><def_stmt>test_parse_to_dict <block_start>build_options=["--add-host=github.com:192.168.127.12" "--add-host=ports.ubuntu.com:172.16.58.3" "--build-arg a=b" "--build-arg c=d" "--label e=f" "--label g"]<line_sep>sdk_options={'extra_hosts':{'github.com':'192.168.127.12' 'ports.ubuntu.com':'172.16.58.3'} 'buildargs':{'a':'b' 'c':'d'} 'labels':{'e':'f' 'g':''}}<line_sep>build_options_parser=BuildOptionsParser(build_options)<assert_stmt>sdk_options<eq>build_options_parser.parse_build_options()<block_end><def_stmt>test_parse_to_list <block_start>build_options=["--cache-from a" "--cache-from b"]<line_sep>sdk_options={'cache_from':['a' 'b']}<line_sep>build_options_parser=BuildOptionsParser(build_options)<assert_stmt>sdk_options<eq>build_options_parser.parse_build_options()<block_end><def_stmt>test_parse_val <block_start>build_options=["--network bridge" "--platform Linux" "--shm-size 1000000" "--target target"]<line_sep>sdk_options={'network_mode':'bridge' 'platform':'Linux' 'shmsize':'1000000' 'target':'target'}<line_sep>build_options_parser=BuildOptionsParser(build_options)<assert_stmt>sdk_options<eq>build_options_parser.parse_build_options()<block_end><def_stmt>test_parse_container_limits <block_start>build_options=["--cpu-shares 50" "--cpuset-cpus 0-1" "--memory 10000000" "--memory-swap 2000000"]<line_sep>sdk_options={'container_limits':{'cpushares':'50' 'cpusetcpus':'0-1' 'memory':'10000000' 'memswap':'2000000'}}<line_sep>build_options_parser=BuildOptionsParser(build_options)<assert_stmt>sdk_options<eq>build_options_parser.parse_build_options()<block_end><def_stmt>test_parse_flag <block_start>build_options=["--pull=true" "-q=false" "--no-cache"]<line_sep>sdk_options={'pull':<true> 'quiet':<false> 'nocache':<true>}<line_sep>build_options_parser=BuildOptionsParser(build_options)<assert_stmt>sdk_options<eq>build_options_parser.parse_build_options()<block_end><def_stmt>test_invalid_build_options <block_start><with_stmt>pytest.raises(KeyError)<block_start>build_options=["--cgroup-parent" "--compress" "--cpu-period" "--cpuset-mems 10" ]<line_sep>build_options_parser=BuildOptionsParser(build_options)<line_sep>build_options_parser.parse_build_options()<block_end><block_end><def_stmt>test_filtered_valid_build_options <block_start>build_options=["--rm" "--file test" "--tag image" "--add-host=github.com:192.168.127.12" "--add-host=ports.ubuntu.com:172.16.58.3" "--cache-from a" "--cache-from b" "--network bridge" "--platform Linux" "--cpu-shares 50" "--memory 10000000" "--pull=true" "-q=false" "--no-cache"]<line_sep>sdk_options={'extra_hosts':{'github.com':'192.168.127.12' 'ports.ubuntu.com':'172.16.58.3'} 'cache_from':['a' 'b'] 'network_mode':'bridge' 'platform':'Linux' 'container_limits':{'cpushares':'50' 'memory':'10000000' } 'pull':<true> 'quiet':<false> 'nocache':<true>}<line_sep>build_options_parser=BuildOptionsParser(build_options)<assert_stmt>sdk_options<eq>build_options_parser.parse_build_options()<block_end>
|
<import_from_stmt>unreal_engine FSlowTask<import_stmt>time<line_sep># Create an FSlowTask object, defining the amount of work that
# will be done, and the initial message.
t=FSlowTask(10 "Doing Something")<line_sep>t.initialize()<line_sep># Make the dialog, and include a Cancel button (default is not to
# allow a cancel button).
t.make_dialog(<true>)<line_sep>time.sleep(1)<for_stmt>i range(10)# Update the progress bar. Note that the first argument is the
# amount of work to be done this frame, not the overall work
# done so far.
<block_start>t.enter_progress_frame(1 "Progress Position : {}".format(i))<line_sep>time.sleep(0.2)<line_sep># If there was a cancel button included, we can check if it was
# pressed.
<if_stmt>t.received_user_cancel()<block_start>print("Cancelled")<line_sep><break><block_end><block_end>t.destroy()<line_sep>
|
##########################################################################
# Copyright (c) 2009, 2010, ETH Zurich.
# All rights reserved.
#
# This file is distributed under the terms in the attached LICENSE file.
# If you do not find this file, copies can be found by writing to:
# ETH Zurich D-INFK, Universitaetstrasse 6, CH-8092 Zurich. Attn: Systems Group.
##########################################################################
<import_stmt>re<import_stmt>debug tests<import_from_stmt>common TestCommon TimeoutError<import_from_stmt>results RawResults PassFailResult<class_stmt>MultihopTestCommon(TestCommon)<block_start><def_stmt>get_module_name self<block_start><raise>NotImplementedError<block_end><def_stmt>get_modules self build machine<block_start>modules=super(MultihopTestCommon self).get_modules(build machine)<line_sep>modules.add_module(self.get_module_name())<line_sep><return>modules<block_end><block_end>@tests.add_test<class_stmt>MultihopTest(MultihopTestCommon)<block_start>''' Test whether multi-hop messaging is working '''<line_sep>name="multihop_test"<def_stmt>get_module_name self<block_start><return>"multihoptest"<block_end><def_stmt>get_finish_string self<block_start><return>"server all done"<block_end><def_stmt>get_modules self build machine<block_start>modules=super(MultihopTestCommon self).get_modules(build machine)<line_sep>modules.add_module(self.get_module_name() ["core=0" "server"])<line_sep>modules.add_module(self.get_module_name() ["core=1" "client"])<line_sep><return>modules<block_end><def_stmt>process_data self testdir rawiter# the test passed iff we see the finish string
<block_start>passed=<false><for_stmt>line rawiter<block_start><if_stmt>self.get_finish_string()<in>line<block_start>passed=<true><line_sep><return>PassFailResult(<true>)<block_end><block_end><return>PassFailResult(<false>)<block_end><block_end>@tests.add_test<class_stmt>MultihopLatencyTest(MultihopTestCommon)<block_start>''' Multihop Transport Throughput microbenchmark '''<line_sep>name="multihop_throughput_latency"<def_stmt>get_module_name self<block_start><return>"multihop_latency_bench"<block_end><def_stmt>process_data self testdir rawiter<block_start>results=RawResults('message type')<line_sep>times=[]<line_sep>iteration=<none><for_stmt>line rawiter<block_start>m=re.match("Running latency test for message (.*)...." line)<if_stmt>m<block_start><if_stmt>times<block_start>results.add_group(iteration times)<block_end>iteration=m.group(1)<line_sep>times=[]<line_sep><continue><block_end>m=re.match("page \d+ took (\d+)" line)<if_stmt>m<block_start><assert_stmt>(iteration<is><not><none>)<line_sep>times.append(int(m.group(1)))<block_end><block_end><if_stmt>len(times)<ne>0<block_start>results.add_group(iteration times)<block_end><return>results<block_end><block_end>
|
# -*- coding: utf-8 -*-
# 版权所有 2019 深圳米筐科技有限公司(下称“米筐科技”)
#
# 除非遵守当前许可,否则不得使用本软件。
#
# * 非商业用途(非商业用途指个人出于非商业目的使用本软件,或者高校、研究所等非营利机构出于教育、科研等目的使用本软件):
# 遵守 Apache License 2.0(下称“Apache 2.0 许可”),您可以在以下位置获得 Apache 2.0 许可的副本:http://www.apache.org/licenses/LICENSE-2.0。
# 除非法律有要求或以书面形式达成协议,否则本软件分发时需保持当前许可“原样”不变,且不得附加任何条件。
#
# * 商业用途(商业用途指个人出于任何商业目的使用本软件,或者法人或其他组织出于任何目的使用本软件):
# 未经米筐科技授权,任何个人不得出于任何商业目的使用本软件(包括但不限于向第三方提供、销售、出租、出借、转让本软件、本软件的衍生产品、引用或借鉴了本软件功能或源代码的产品或服务),任何法人或其他组织不得出于任何目的使用本软件,否则米筐科技有权追究相应的知识产权侵权责任。
# 在此前提下,对本软件的使用同样需要遵守 Apache 2.0 许可,Apache 2.0 许可与本许可冲突之处,以本许可为准。
# 详细的授权流程,请联系 <EMAIL> 获取。
<import_from_stmt>.order Order<import_from_stmt>.trade Trade<import_from_stmt>.instrument Instrument<import_from_stmt>.bar BarMap BarObject PartialBarObject<import_from_stmt>.tick TickObject<line_sep>
|
# -*- coding: utf-8 -*-
<import_from_stmt>io StringIO<import_from_stmt>unittest.mock MagicMock Mock PropertyMock call patch<import_from_stmt>django.core.management CommandError call_command<import_from_stmt>django.test TestCase<import_from_stmt>django.test.utils override_settings<line_sep># Database testing configurations
UNKOWN_ENGINE={'default':{'ENGINE':'django.db.backends.unknown' 'NAME':'unknown' }}<line_sep>NO_TEST_NAME={'default':{'ENGINE':'django.db.backends.mysql' 'NAME':'test' 'TEST':{'NAME':'' }}}<line_sep>SQLITE={'default':{'ENGINE':'django.db.backends.sqlite3' 'NAME':'db.sqlite3' }}<line_sep>MYSQL_HOST_PORT={'default':{'ENGINE':'django.db.backends.mysql' 'NAME':'test' 'USER':'test' 'PASSWORD':'<PASSWORD>' 'HOST':'localhost' 'PORT':'3306' } }<line_sep>MYSQL_SOCKET={'default':{'ENGINE':'django.db.backends.mysql' 'NAME':'test' 'USER':'test' 'PASSWORD':'<PASSWORD>' 'HOST':'/var/run/mysqld/mysql.sock' } }<line_sep>POSTGRES={'default':{'ENGINE':'django.db.backends.postgresql_psycopg2' 'NAME':'test' 'USER':'test' 'PASSWORD':'<PASSWORD>' 'PORT':'5432' 'HOST':'localhost' } }<class_stmt>DropTestDatabaseExceptionsTests(TestCase)<block_start>"""Test for drop_test_database command."""<def_stmt>test_should_raise_CommandError_if_database_is_unknown self<block_start><with_stmt>self.assertRaisesRegex(CommandError "Unknown database unknown")<block_start>call_command('drop_test_database' '--database=unknown')<block_end><block_end>@override_settings(DATABASES=UNKOWN_ENGINE)@patch('django_extensions.management.commands.drop_test_database.input')<def_stmt>test_should_raise_CommandError_if_unknown_database_engine self m_input<block_start>m_input.return_value='yes'<with_stmt>self.assertRaisesRegex(CommandError "Unknown database engine django.db.backends.unknown")<block_start>call_command('drop_test_database')<block_end><block_end>@override_settings(DATABASES=NO_TEST_NAME)<def_stmt>test_should_raise_CommandError_if_test_database_name_is_empty self<block_start><with_stmt>self.assertRaisesRegex(CommandError "You need to specify DATABASE_NAME in your Django settings file.")<block_start>call_command('drop_test_database')<block_end><block_end><block_end><class_stmt>DropTestDatabaseTests(TestCase)<block_start>"""Test for drop_test_database command."""<line_sep>@patch('sys.stdout' new_callable=StringIO)@patch('django_extensions.management.commands.drop_test_database.input')<def_stmt>test_should_raise_CommandError_if_database_is_unknown self m_input m_stdout<block_start>m_input.return_value='no'<line_sep>call_command('drop_test_database')<line_sep>self.assertEqual("Reset cancelled.\n" m_stdout.getvalue())<block_end>@override_settings(DATABASES=SQLITE)@patch('sys.stdout' new_callable=StringIO)@patch('os.path.isfile')@patch('os.unlink')<def_stmt>test_sqlite3_should_unlink_primary_test_database self m_unlink m_isfile m_stdout# Indicate that no clone databases exist
<block_start>m_isfile.side_effect=(<true> <false>)<line_sep>call_command('drop_test_database' '--noinput' verbosity=2)<with_stmt>self.subTest('Should check for test database names until failure')<block_start>self.assertListEqual(m_isfile.call_args_list # See production code comments regarding double dots
[call('test_db.sqlite3') call('test_db_1..sqlite3')] )<block_end><with_stmt>self.subTest('Should unlink only primary test database')<block_start>self.assertListEqual(m_unlink.call_args_list [call('test_db.sqlite3')] )<block_end><with_stmt>self.subTest('Should report successful message')<block_start>self.assertIn("Reset successful." m_stdout.getvalue())<block_end><block_end>@override_settings(DATABASES=SQLITE)@patch('os.path.isfile')@patch('os.unlink')<def_stmt>test_sqlite3_should_unlink_all_existing_clone_databases self m_unlink m_isfile<block_start>"""Test cloned test databases created via 'manage.py test --parallel'."""<line_sep># Indicate that clone databases exist up to test_db_2.sqlite3
m_isfile.side_effect=(<true> <true> <true> <false>)<line_sep>call_command('drop_test_database' '--noinput')<with_stmt>self.subTest('Should check for test database names until failure')<block_start>self.assertListEqual(m_isfile.call_args_list [call('test_db.sqlite3') # See production code comments regarding double dots
call('test_db_1..sqlite3') call('test_db_2..sqlite3') call('test_db_3..sqlite3') ] )<block_end><with_stmt>self.subTest('Should unlink all existing test databases')<block_start>self.assertListEqual(m_unlink.call_args_list [call('test_db.sqlite3') # See production code comments regarding double dots
call('test_db_1..sqlite3') call('test_db_2..sqlite3') ] )<block_end><block_end>@override_settings(DATABASES=SQLITE)@patch('sys.stdout' new_callable=StringIO)@patch('os.path.isfile')@patch('os.unlink')<def_stmt>test_sqlite3_should_not_print_Reset_successful_when_OSError_exception self m_unlink m_isfile m_stdout<block_start>m_isfile.return_value=<true><line_sep>m_unlink.side_effect=OSError<line_sep>call_command('drop_test_database' '--noinput' verbosity=2)<line_sep>self.assertNotIn("Reset successful." m_stdout.getvalue())<block_end>@override_settings(DATABASES=MYSQL_HOST_PORT)@patch('sys.stdout' new_callable=StringIO)<def_stmt>test_mysql_should_drop_database_with_host_and_port self m_stdout<block_start>m_database=MagicMock()<line_sep># Indicate that no clone databases exist
# DROP queries return None while SELECT queries return a row count
m_database.connect.return_value.cursor.return_value.execute.side_effect=(1 <none> 0)<with_stmt>patch.dict("sys.modules" MySQLdb=m_database)<block_start>call_command('drop_test_database' '--noinput' verbosity=2)<block_end><with_stmt>self.subTest('Should check for and remove test database names until failure')<block_start>exists_query="SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME="<line_sep>self.assertListEqual(m_database.connect.return_value.cursor.return_value.execute.call_args_list [call(exists_query+"'test_test';") call('DROP DATABASE IF EXISTS `test_test`') call(exists_query+"'test_test_1';") ] )<block_end><with_stmt>self.subTest('Should report successful message')<block_start>self.assertIn("Reset successful." m_stdout.getvalue())<block_end><block_end>@override_settings(DATABASES=MYSQL_SOCKET)@patch('sys.stdout' new_callable=StringIO)<def_stmt>test_mysql_should_drop_database_with_unix_socket self m_stdout<block_start>m_database=MagicMock()<line_sep># Indicate that no clone databases exist
# DROP queries return None while SELECT queries return a row count
m_database.connect.return_value.cursor.return_value.execute.side_effect=(1 <none> 0)<with_stmt>patch.dict("sys.modules" MySQLdb=m_database)<block_start>call_command('drop_test_database' '--noinput' verbosity=2)<block_end><with_stmt>self.subTest('Should check for and remove test database names until failure')<block_start>exists_query="SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME="<line_sep>self.assertListEqual(m_database.connect.return_value.cursor.return_value.execute.call_args_list [call(exists_query+"'test_test';") call('DROP DATABASE IF EXISTS `test_test`') call(exists_query+"'test_test_1';") ] )<block_end><with_stmt>self.subTest('Should report successful message')<block_start>self.assertIn("Reset successful." m_stdout.getvalue())<block_end><block_end>@override_settings(DATABASES=MYSQL_HOST_PORT)<def_stmt>test_mysql_should_drop_all_existing_clone_databases self<block_start>"""Test cloned test databases created via 'manage.py test --parallel'."""<line_sep>m_database=MagicMock()<line_sep># Indicate that clone databases exist up to test_test_2
# DROP queries return None while SELECT queries return a row count
m_database.connect.return_value.cursor.return_value.execute.side_effect=(1 <none> 1 <none> 1 <none> 0)<with_stmt>patch.dict("sys.modules" MySQLdb=m_database)<block_start>call_command('drop_test_database' '--noinput')<block_end>exists_query="SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME="<line_sep>self.assertListEqual(m_database.connect.return_value.cursor.return_value.execute.call_args_list [call(exists_query+"'test_test';") call('DROP DATABASE IF EXISTS `test_test`') call(exists_query+"'test_test_1';") call('DROP DATABASE IF EXISTS `test_test_1`') call(exists_query+"'test_test_2';") call('DROP DATABASE IF EXISTS `test_test_2`') call(exists_query+"'test_test_3';") ] )<block_end>@override_settings(DATABASES=POSTGRES)@patch('sys.stdout' new_callable=StringIO)<def_stmt>test_postgresql_should_drop_database self m_stdout<block_start>m_database=MagicMock()<line_sep>m_cursor=Mock()<line_sep>m_database.connect.return_value.cursor.return_value=m_cursor<line_sep># Indicate that no clone databases exist
type(m_cursor).rowcount=PropertyMock(side_effect=(1 0))<with_stmt>patch.dict("sys.modules" psycopg2=m_database)<block_start>call_command('drop_test_database' '--noinput' verbosity=2)<block_end><with_stmt>self.subTest('Should check for and remove test database names until failure')<block_start>exists_query="SELECT datname FROM pg_catalog.pg_database WHERE datname="<line_sep>self.assertListEqual(m_cursor.execute.call_args_list [call(exists_query+"'test_test';") call('DROP DATABASE IF EXISTS "test_test";') call(exists_query+"'test_test_1';") ] )<block_end><with_stmt>self.subTest('Should report successful message')<block_start>self.assertIn("Reset successful." m_stdout.getvalue())<block_end><block_end>@override_settings(DATABASES=POSTGRES)<def_stmt>test_postgresql_should_drop_all_existing_cloned_databases self<block_start>"""Test cloned test databases created via 'manage.py test --parallel'."""<line_sep>m_database=MagicMock()<line_sep>m_cursor=Mock()<line_sep>m_database.connect.return_value.cursor.return_value=m_cursor<line_sep># Indicate that clone databases exist up to test_test_2
type(m_cursor).rowcount=PropertyMock(side_effect=(1 1 1 0))<with_stmt>patch.dict("sys.modules" psycopg2=m_database)<block_start>call_command('drop_test_database' '--noinput')<block_end>exists_query="SELECT datname FROM pg_catalog.pg_database WHERE datname="<line_sep>self.assertListEqual(m_cursor.execute.call_args_list [call(exists_query+"'test_test';") call('DROP DATABASE IF EXISTS "test_test";') call(exists_query+"'test_test_1';") call('DROP DATABASE IF EXISTS "test_test_1";') call(exists_query+"'test_test_2';") call('DROP DATABASE IF EXISTS "test_test_2";') call(exists_query+"'test_test_3';") ] )<block_end>@override_settings(DATABASES=POSTGRES)@patch('sys.stdout' new_callable=StringIO)<def_stmt>test_postgresql_should_not_print_Reset_successful_when_exception_occured self m_stdout<block_start>m_database=MagicMock()<line_sep>m_database.ProgrammingError=Exception<line_sep>m_cursor=Mock()<line_sep>m_cursor.execute.side_effect=m_database.ProgrammingError<line_sep>m_database.connect.return_value.cursor.return_value=m_cursor<with_stmt>patch.dict("sys.modules" psycopg2=m_database)<block_start>call_command('drop_test_database' '--noinput' verbosity=2)<block_end>self.assertNotIn("Reset successful." m_stdout.getvalue())<block_end><block_end>
|
# Generated by Django 3.2.5 on 2021-09-04 14:30
<import_from_stmt>django.conf settings<import_from_stmt>django.db migrations models<import_stmt>django.db.models.deletion<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('auth_api' '0003_githubidentity') ]<line_sep>operations=[migrations.AlterModelOptions(name='githubidentity' options={'verbose_name':'GitHub Identity' 'verbose_name_plural':'GitHub Identities'} ) migrations.AddField(model_name='githubidentity' name='avatar_url' field=models.CharField(max_length=200 null=<true>) ) migrations.AddField(model_name='githubidentity' name='db_id' field=models.IntegerField(default=-1 help_text='The numeric database ID of the user on GitHub') preserve_default=<false> ) migrations.AddField(model_name='githubidentity' name='user_handle' field=models.CharField(default='' max_length=100) preserve_default=<false> ) migrations.AlterField(model_name='githubidentity' name='uid' field=models.CharField(help_text='The newer string ID of the user on GitHub' max_length=100) ) migrations.AlterField(model_name='githubidentity' name='user' field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE related_name='github_ids' to=settings.AUTH_USER_MODEL) ) ]<block_end>
|
<import_stmt>json<import_from_stmt>urlparse parse_qs<def_stmt>handler event context<block_start><with_stmt>open('.context' 'r')<as>f<block_start>gordon_context=json.loads(f.read())<block_end>expected_token=gordon_context['token']<line_sep>req_body=event['body']<line_sep>params=parse_qs(req_body)<line_sep># Check if the token is the correct one
token=params['token'][0]<if_stmt>token<ne>expected_token<block_start><raise>Exception("Invalid request token")<block_end>user=params['user_name'][0]<line_sep>command=params['command'][0]<line_sep>channel=params['channel_name'][0]<line_sep>command_text=params['text'][0]<line_sep>response={'response_type':'in_channel' 'text':"Hello {}! you invoked {} while you were in {} with the following text: {}".format(user command channel command_text) "attachments":[{"text":"This is some extra information!"}]}<line_sep><return>response<block_end>
|
'''
TODO:
def get_wrapper
def get_optimizer
'''<line_sep>
|
# Importing the Kratos Library
<import_stmt>KratosMultiphysics<def_stmt>Factory settings Model<block_start><if_stmt><not>isinstance(settings KratosMultiphysics.Parameters)<block_start><raise>Exception("expected input shall be a Parameters object, encapsulating a json string")<block_end><return>TimerProcess(Model settings["Parameters"])<block_end># All the processes python processes should be derived from "Process"
<class_stmt>TimerProcess(KratosMultiphysics.Process)<block_start>"""This process helps to measure the time consumed on the simulations
Only the member variables listed below should be accessed directly.
Public member variables:
Model -- the container of the different model parts.
settings -- Kratos parameters containing solver settings.
"""<def_stmt>__init__ self Model settings<block_start>""" The default constructor of the class
Keyword arguments:
self -- It signifies an instance of a class.
Model -- the container of the different model parts.
settings -- Kratos parameters containing solver settings.
"""<line_sep>KratosMultiphysics.Process.__init__(self)<line_sep>#The value can be a double or a string (function)
default_settings=KratosMultiphysics.Parameters("""
{
"help" : "This process helps to measure the time consumed on the simulations",
"output_filename" : "",
"print_interval_information" : false,
"interval_name" : "Analysis"
}
""")<line_sep># Assign this here since it will change the "interval" prior to validation
settings.ValidateAndAssignDefaults(default_settings)<line_sep>self.interval_name=settings["interval_name"].GetString()<line_sep>self.output_filename=settings["output_filename"].GetString()<line_sep># Defining timer
self.timer=KratosMultiphysics.Timer()<line_sep># Interval information
self.timer.SetPrintIntervalInformation(settings["print_interval_information"].GetBool())<line_sep># Output file
<if_stmt>self.output_filename<ne>""<block_start>self.timer.SetOuputFile(self.output_filename)<block_end><else_stmt><block_start>self.timer.SetPrintOnScreen(<true>)<block_end># Starting timer
self.timer.Start(self.interval_name)<block_end><def_stmt>ExecuteFinalize self<block_start>""" This function is designed for being called at the end of the computations
Keyword arguments:
self -- It signifies an instance of a class.
"""<line_sep>self.timer.Stop(self.interval_name)<if_stmt>self.output_filename<ne>""<block_start>self.timer.PrintTimingInformation(self.timer)<line_sep>self.timer.CloseOuputFile()<block_end><block_end><block_end>
|
<import_from_stmt>keras_squeezenet.squeezenet SqueezeNet<import_from_stmt>keras_squeezenet.version __version__<line_sep>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Compute v2 API Library Tests"""<import_from_stmt>keystoneauth1 session<import_from_stmt>osc_lib exceptions<as>osc_lib_exceptions<import_from_stmt>requests_mock.contrib fixture<import_from_stmt>openstackclient.api compute_v2<as>compute<import_from_stmt>openstackclient.tests.unit utils<line_sep>FAKE_PROJECT='xyzpdq'<line_sep>FAKE_URL='http://gopher.com/v2'<class_stmt>TestComputeAPIv2(utils.TestCase)<block_start><def_stmt>setUp self<block_start>super(TestComputeAPIv2 self).setUp()<line_sep>sess=session.Session()<line_sep>self.api=compute.APIv2(session=sess endpoint=FAKE_URL)<line_sep>self.requests_mock=self.useFixture(fixture.Fixture())<block_end><block_end><class_stmt>TestFloatingIP(TestComputeAPIv2)<block_start>FAKE_FLOATING_IP_RESP={'id':1 'ip':'203.0.113.11' # TEST-NET-3
'fixed_ip':'198.51.100.11' # TEST-NET-2
'pool':'nova' 'instance_id':<none> }<line_sep>FAKE_FLOATING_IP_RESP_2={'id':2 'ip':'203.0.113.12' # TEST-NET-3
'fixed_ip':'198.51.100.12' # TEST-NET-2
'pool':'nova' 'instance_id':<none> }<line_sep>LIST_FLOATING_IP_RESP=[FAKE_FLOATING_IP_RESP FAKE_FLOATING_IP_RESP_2 ]<line_sep>FAKE_SERVER_RESP_1={'id':1 'name':'server1' }<def_stmt>test_floating_ip_add_id self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/servers/1/action' json={'server':{}} status_code=200 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/servers/1' json={'server':self.FAKE_SERVER_RESP_1} status_code=200 )<line_sep>ret=self.api.floating_ip_add('1' '192.168.3.11')<line_sep>self.assertEqual(200 ret.status_code)<block_end><def_stmt>test_floating_ip_add_name self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/servers/1/action' json={'server':{}} status_code=200 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/servers/server1' json={'server':self.FAKE_SERVER_RESP_1} status_code=200 )<line_sep>ret=self.api.floating_ip_add('server1' '192.168.3.11')<line_sep>self.assertEqual(200 ret.status_code)<block_end><def_stmt>test_floating_ip_create self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-floating-ips' json={'floating_ip':self.FAKE_FLOATING_IP_RESP} status_code=200 )<line_sep>ret=self.api.floating_ip_create('nova')<line_sep>self.assertEqual(self.FAKE_FLOATING_IP_RESP ret)<block_end><def_stmt>test_floating_ip_create_not_found self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-floating-ips' status_code=404 )<line_sep>self.assertRaises(osc_lib_exceptions.NotFound self.api.floating_ip_create 'not-nova' )<block_end><def_stmt>test_floating_ip_delete self<block_start>self.requests_mock.register_uri('DELETE' FAKE_URL+'/os-floating-ips/1' status_code=202 )<line_sep>ret=self.api.floating_ip_delete('1')<line_sep>self.assertEqual(202 ret.status_code)<line_sep>self.assertEqual("" ret.text)<block_end><def_stmt>test_floating_ip_delete_none self<block_start>ret=self.api.floating_ip_delete()<line_sep>self.assertIsNone(ret)<block_end><def_stmt>test_floating_ip_find_id self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-floating-ips/1' json={'floating_ip':self.FAKE_FLOATING_IP_RESP} status_code=200 )<line_sep>ret=self.api.floating_ip_find('1')<line_sep>self.assertEqual(self.FAKE_FLOATING_IP_RESP ret)<block_end><def_stmt>test_floating_ip_find_ip self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-floating-ips/'+self.FAKE_FLOATING_IP_RESP['ip'] status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-floating-ips' json={'floating_ips':self.LIST_FLOATING_IP_RESP} status_code=200 )<line_sep>ret=self.api.floating_ip_find(self.FAKE_FLOATING_IP_RESP['ip'])<line_sep>self.assertEqual(self.FAKE_FLOATING_IP_RESP ret)<block_end><def_stmt>test_floating_ip_find_not_found self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-floating-ips/1.2.3.4' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-floating-ips' json={'floating_ips':self.LIST_FLOATING_IP_RESP} status_code=200 )<line_sep>self.assertRaises(osc_lib_exceptions.NotFound self.api.floating_ip_find '192.168.3.11' )<block_end><def_stmt>test_floating_ip_list self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-floating-ips' json={'floating_ips':self.LIST_FLOATING_IP_RESP} status_code=200 )<line_sep>ret=self.api.floating_ip_list()<line_sep>self.assertEqual(self.LIST_FLOATING_IP_RESP ret)<block_end><def_stmt>test_floating_ip_remove_id self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/servers/1/action' status_code=200 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/servers/1' json={'server':self.FAKE_SERVER_RESP_1} status_code=200 )<line_sep>ret=self.api.floating_ip_remove('1' '192.168.3.11')<line_sep>self.assertEqual(200 ret.status_code)<block_end><def_stmt>test_floating_ip_remove_name self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/servers/1/action' status_code=200 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/servers/server1' json={'server':self.FAKE_SERVER_RESP_1} status_code=200 )<line_sep>ret=self.api.floating_ip_remove('server1' '192.168.3.11')<line_sep>self.assertEqual(200 ret.status_code)<block_end><block_end><class_stmt>TestFloatingIPPool(TestComputeAPIv2)<block_start>LIST_FLOATING_IP_POOL_RESP=[{"name":"tide"} {"name":"press"} ]<def_stmt>test_floating_ip_pool_list self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-floating-ip-pools' json={'floating_ip_pools':self.LIST_FLOATING_IP_POOL_RESP} status_code=200 )<line_sep>ret=self.api.floating_ip_pool_list()<line_sep>self.assertEqual(self.LIST_FLOATING_IP_POOL_RESP ret)<block_end><block_end><class_stmt>TestHost(TestComputeAPIv2)<block_start>FAKE_HOST_RESP_1={"zone":"internal" "host_name":"myhost" "service":"conductor" }<line_sep>FAKE_HOST_RESP_2={"zone":"internal" "host_name":"myhost" "service":"scheduler" }<line_sep>FAKE_HOST_RESP_3={"zone":"nova" "host_name":"myhost" "service":"compute" }<line_sep>LIST_HOST_RESP=[FAKE_HOST_RESP_1 FAKE_HOST_RESP_2 FAKE_HOST_RESP_3 ]<def_stmt>test_host_list_no_options self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-hosts' json={'hosts':self.LIST_HOST_RESP} status_code=200 )<line_sep>ret=self.api.host_list()<line_sep>self.assertEqual(self.LIST_HOST_RESP ret)<block_end><def_stmt>test_host_list_zone self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-hosts?zone=nova' json={'hosts':[self.FAKE_HOST_RESP_3]} status_code=200 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-hosts' json={'hosts':[self.FAKE_HOST_RESP_3]} status_code=200 )<line_sep>ret=self.api.host_list(zone='nova')<line_sep>self.assertEqual([self.FAKE_HOST_RESP_3] ret)<block_end><def_stmt>test_host_set_none self<block_start>ret=self.api.host_set(host='myhost')<line_sep>self.assertIsNone(ret)<block_end><def_stmt>test_host_set self<block_start>self.requests_mock.register_uri('PUT' FAKE_URL+'/os-hosts/myhost' json={} status_code=200 )<line_sep>ret=self.api.host_set(host='myhost' status='enabled')<line_sep>self.assertEqual({} ret)<block_end><def_stmt>test_host_show self<block_start>FAKE_RESOURCE_1={"cpu":2 "disk_gb":1028 "host":"c1a7de0ac9d94e4baceae031d05caae3" "memory_mb":8192 "project":"(total)" }<line_sep>FAKE_RESOURCE_2={"cpu":0 "disk_gb":0 "host":"c1a7de0ac9d94e4baceae031d05caae3" "memory_mb":512 "project":"(used_now)" }<line_sep>FAKE_RESOURCE_3={"cpu":0 "disk_gb":0 "host":"c1a7de0ac9d94e4baceae031d05caae3" "memory_mb":0 "project":"(used_max)" }<line_sep>FAKE_HOST_RESP=[{'resource':FAKE_RESOURCE_1} {'resource':FAKE_RESOURCE_2} {'resource':FAKE_RESOURCE_3} ]<line_sep>FAKE_HOST_LIST=[FAKE_RESOURCE_1 FAKE_RESOURCE_2 FAKE_RESOURCE_3 ]<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-hosts/myhost' json={'host':FAKE_HOST_RESP} status_code=200 )<line_sep>ret=self.api.host_show(host='myhost')<line_sep>self.assertEqual(FAKE_HOST_LIST ret)<block_end><block_end><class_stmt>TestNetwork(TestComputeAPIv2)<block_start>FAKE_NETWORK_RESP={'id':'1' 'label':'label1' 'cidr':'1.2.3.0/24' }<line_sep>FAKE_NETWORK_RESP_2={'id':'2' 'label':'label2' 'cidr':'4.5.6.0/24' }<line_sep>LIST_NETWORK_RESP=[FAKE_NETWORK_RESP FAKE_NETWORK_RESP_2 ]<def_stmt>test_network_create_default self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-networks' json={'network':self.FAKE_NETWORK_RESP} status_code=200 )<line_sep>ret=self.api.network_create('label1')<line_sep>self.assertEqual(self.FAKE_NETWORK_RESP ret)<block_end><def_stmt>test_network_create_options self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-networks' json={'network':self.FAKE_NETWORK_RESP} status_code=200 )<line_sep>ret=self.api.network_create(name='label1' subnet='1.2.3.0/24' )<line_sep>self.assertEqual(self.FAKE_NETWORK_RESP ret)<block_end><def_stmt>test_network_delete_id self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks/1' json={'network':self.FAKE_NETWORK_RESP} status_code=200 )<line_sep>self.requests_mock.register_uri('DELETE' FAKE_URL+'/os-networks/1' status_code=202 )<line_sep>ret=self.api.network_delete('1')<line_sep>self.assertEqual(202 ret.status_code)<line_sep>self.assertEqual("" ret.text)<block_end><def_stmt>test_network_delete_name self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks/label1' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks' json={'networks':self.LIST_NETWORK_RESP} status_code=200 )<line_sep>self.requests_mock.register_uri('DELETE' FAKE_URL+'/os-networks/1' status_code=202 )<line_sep>ret=self.api.network_delete('label1')<line_sep>self.assertEqual(202 ret.status_code)<line_sep>self.assertEqual("" ret.text)<block_end><def_stmt>test_network_delete_not_found self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks/label3' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks' json={'networks':self.LIST_NETWORK_RESP} status_code=200 )<line_sep>self.assertRaises(osc_lib_exceptions.NotFound self.api.network_delete 'label3' )<block_end><def_stmt>test_network_find_id self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks/1' json={'network':self.FAKE_NETWORK_RESP} status_code=200 )<line_sep>ret=self.api.network_find('1')<line_sep>self.assertEqual(self.FAKE_NETWORK_RESP ret)<block_end><def_stmt>test_network_find_name self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks/label2' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks' json={'networks':self.LIST_NETWORK_RESP} status_code=200 )<line_sep>ret=self.api.network_find('label2')<line_sep>self.assertEqual(self.FAKE_NETWORK_RESP_2 ret)<block_end><def_stmt>test_network_find_not_found self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks/label3' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks' json={'networks':self.LIST_NETWORK_RESP} status_code=200 )<line_sep>self.assertRaises(osc_lib_exceptions.NotFound self.api.network_find 'label3' )<block_end><def_stmt>test_network_list_no_options self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-networks' json={'networks':self.LIST_NETWORK_RESP} status_code=200 )<line_sep>ret=self.api.network_list()<line_sep>self.assertEqual(self.LIST_NETWORK_RESP ret)<block_end><block_end><class_stmt>TestSecurityGroup(TestComputeAPIv2)<block_start>FAKE_SECURITY_GROUP_RESP={'id':'1' 'name':'sg1' 'description':'test security group' 'tenant_id':'0123456789' 'rules':[]}<line_sep>FAKE_SECURITY_GROUP_RESP_2={'id':'2' 'name':'sg2' 'description':'another test security group' 'tenant_id':'0123456789' 'rules':[]}<line_sep>LIST_SECURITY_GROUP_RESP=[FAKE_SECURITY_GROUP_RESP_2 FAKE_SECURITY_GROUP_RESP ]<def_stmt>test_security_group_create_default self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-security-groups' json={'security_group':self.FAKE_SECURITY_GROUP_RESP} status_code=200 )<line_sep>ret=self.api.security_group_create('sg1')<line_sep>self.assertEqual(self.FAKE_SECURITY_GROUP_RESP ret)<block_end><def_stmt>test_security_group_create_options self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-security-groups' json={'security_group':self.FAKE_SECURITY_GROUP_RESP} status_code=200 )<line_sep>ret=self.api.security_group_create(name='sg1' description='desc' )<line_sep>self.assertEqual(self.FAKE_SECURITY_GROUP_RESP ret)<block_end><def_stmt>test_security_group_delete_id self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups/1' json={'security_group':self.FAKE_SECURITY_GROUP_RESP} status_code=200 )<line_sep>self.requests_mock.register_uri('DELETE' FAKE_URL+'/os-security-groups/1' status_code=202 )<line_sep>ret=self.api.security_group_delete('1')<line_sep>self.assertEqual(202 ret.status_code)<line_sep>self.assertEqual("" ret.text)<block_end><def_stmt>test_security_group_delete_name self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups/sg1' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups' json={'security_groups':self.LIST_SECURITY_GROUP_RESP} status_code=200 )<line_sep>self.requests_mock.register_uri('DELETE' FAKE_URL+'/os-security-groups/1' status_code=202 )<line_sep>ret=self.api.security_group_delete('sg1')<line_sep>self.assertEqual(202 ret.status_code)<line_sep>self.assertEqual("" ret.text)<block_end><def_stmt>test_security_group_delete_not_found self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups/sg3' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups' json={'security_groups':self.LIST_SECURITY_GROUP_RESP} status_code=200 )<line_sep>self.assertRaises(osc_lib_exceptions.NotFound self.api.security_group_delete 'sg3' )<block_end><def_stmt>test_security_group_find_id self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups/1' json={'security_group':self.FAKE_SECURITY_GROUP_RESP} status_code=200 )<line_sep>ret=self.api.security_group_find('1')<line_sep>self.assertEqual(self.FAKE_SECURITY_GROUP_RESP ret)<block_end><def_stmt>test_security_group_find_name self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups/sg2' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups' json={'security_groups':self.LIST_SECURITY_GROUP_RESP} status_code=200 )<line_sep>ret=self.api.security_group_find('sg2')<line_sep>self.assertEqual(self.FAKE_SECURITY_GROUP_RESP_2 ret)<block_end><def_stmt>test_security_group_find_not_found self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups/sg3' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups' json={'security_groups':self.LIST_SECURITY_GROUP_RESP} status_code=200 )<line_sep>self.assertRaises(osc_lib_exceptions.NotFound self.api.security_group_find 'sg3' )<block_end><def_stmt>test_security_group_list_no_options self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups' json={'security_groups':self.LIST_SECURITY_GROUP_RESP} status_code=200 )<line_sep>ret=self.api.security_group_list()<line_sep>self.assertEqual(self.LIST_SECURITY_GROUP_RESP ret)<block_end><def_stmt>test_security_group_set_options_id self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups/1' json={'security_group':self.FAKE_SECURITY_GROUP_RESP} status_code=200 )<line_sep>self.requests_mock.register_uri('PUT' FAKE_URL+'/os-security-groups/1' json={'security_group':self.FAKE_SECURITY_GROUP_RESP} status_code=200 )<line_sep>ret=self.api.security_group_set(security_group='1' description='desc2')<line_sep>self.assertEqual(self.FAKE_SECURITY_GROUP_RESP ret)<block_end><def_stmt>test_security_group_set_options_name self<block_start>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups/sg2' status_code=404 )<line_sep>self.requests_mock.register_uri('GET' FAKE_URL+'/os-security-groups' json={'security_groups':self.LIST_SECURITY_GROUP_RESP} status_code=200 )<line_sep>self.requests_mock.register_uri('PUT' FAKE_URL+'/os-security-groups/2' json={'security_group':self.FAKE_SECURITY_GROUP_RESP_2} status_code=200 )<line_sep>ret=self.api.security_group_set(security_group='sg2' description='desc2')<line_sep>self.assertEqual(self.FAKE_SECURITY_GROUP_RESP_2 ret)<block_end><block_end><class_stmt>TestSecurityGroupRule(TestComputeAPIv2)<block_start>FAKE_SECURITY_GROUP_RULE_RESP={'id':'1' 'name':'sgr1' 'tenant_id':'proj-1' 'ip_protocol':'TCP' 'from_port':1 'to_port':22 'group':{} # 'ip_range': ,
# 'cidr': ,
# 'parent_group_id': ,
}<def_stmt>test_security_group_create_no_options self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-security-group-rules' json={'security_group_rule':self.FAKE_SECURITY_GROUP_RULE_RESP} status_code=200 )<line_sep>ret=self.api.security_group_rule_create(security_group_id='1' ip_protocol='tcp' )<line_sep>self.assertEqual(self.FAKE_SECURITY_GROUP_RULE_RESP ret)<block_end><def_stmt>test_security_group_create_options self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-security-group-rules' json={'security_group_rule':self.FAKE_SECURITY_GROUP_RULE_RESP} status_code=200 )<line_sep>ret=self.api.security_group_rule_create(security_group_id='1' ip_protocol='tcp' from_port=22 to_port=22 remote_ip='1.2.3.4/24' )<line_sep>self.assertEqual(self.FAKE_SECURITY_GROUP_RULE_RESP ret)<block_end><def_stmt>test_security_group_create_port_errors self<block_start>self.requests_mock.register_uri('POST' FAKE_URL+'/os-security-group-rules' json={'security_group_rule':self.FAKE_SECURITY_GROUP_RULE_RESP} status_code=200 )<line_sep>self.assertRaises(compute.InvalidValue self.api.security_group_rule_create security_group_id='1' ip_protocol='tcp' from_port='' to_port=22 remote_ip='1.2.3.4/24' )<line_sep>self.assertRaises(compute.InvalidValue self.api.security_group_rule_create security_group_id='1' ip_protocol='tcp' from_port=0 to_port=[] remote_ip='1.2.3.4/24' )<block_end><def_stmt>test_security_group_rule_delete self<block_start>self.requests_mock.register_uri('DELETE' FAKE_URL+'/os-security-group-rules/1' status_code=202 )<line_sep>ret=self.api.security_group_rule_delete('1')<line_sep>self.assertEqual(202 ret.status_code)<line_sep>self.assertEqual("" ret.text)<block_end><block_end>
|
"""Ability to link issues to other tech type items
Revision ID: <KEY>
Revises: 67ea2aac5ea0
Create Date: 2016-02-23 18:52:45.024716
"""<line_sep># revision identifiers, used by Alembic.
revision='<KEY>'<line_sep>down_revision='<KEY>'<import_from_stmt>alembic op<import_stmt>sqlalchemy<as>sa<def_stmt>upgrade ### commands auto generated by Alembic - please adjust! ###
<block_start>op.create_table('issue_item_association' sa.Column('super_issue_id' sa.Integer() nullable=<true>) sa.Column('sub_item_id' sa.Integer() nullable=<true>) sa.ForeignKeyConstraint(['sub_item_id'] ['item.id'] ) sa.ForeignKeyConstraint(['super_issue_id'] ['itemaudit.id'] ))<line_sep>### end Alembic commands ###
<block_end><def_stmt>downgrade ### commands auto generated by Alembic - please adjust! ###
<block_start>op.drop_table('issue_item_association')<line_sep>### end Alembic commands ###
<block_end>
|
"""Simple XML marshaling (serializing) and
unmarshaling(de-serializing) module using Python
dictionaries and the marshal module.
"""<import_from_stmt>xml.sax.handler ContentHandler<import_from_stmt>xml.sax.saxutils XMLGenerator<import_from_stmt>xml.sax.xmlreader XMLReader<import_from_stmt>xml.sax make_parser<import_stmt>marshal<import_stmt>os sys zlib<class_stmt>XMLDictionaryHandler(ContentHandler)<block_start>"""SAX Handler class which converts an XML
file to a corresponding Python dictionary """<def_stmt>__init__ self<block_start>self.curr=''<line_sep>self.parent=''<line_sep>self.count=0<line_sep>self.d={}<line_sep>self.currd={}<line_sep>self.parentd={}<line_sep>self.stack=[]<line_sep>self.stack2=[]<block_end><def_stmt>startElement self name attrs<block_start>""" Start element handler """<if_stmt>self.count<eq>0<block_start>self.parent=name<line_sep>self.d[name]=[dict(attrs) '' []]<line_sep>self.currd=self.d<block_end><else_stmt><block_start>chld={name:[dict(attrs) '' []]}<line_sep>self.parent=self.stack[-1]<line_sep>self.parentd=self.stack2[-1]<line_sep>chldlist=(self.parentd[self.parent])[2]<line_sep>chldlist.append(chld)<line_sep>self.currd=chld<block_end>self.stack.append(name)<line_sep>self.stack2.append(self.currd)<line_sep>self.curr=name<line_sep>self.count<augadd>1<block_end><def_stmt>endElement self name<block_start>""" End element handler """<line_sep>self.stack.remove(name)<for_stmt>item self.stack2<block_start><if_stmt>item.has_key(name)<block_start>self.stack2.remove(item)<block_end><block_end><block_end><def_stmt>characters self content<block_start>""" Character handler """<line_sep>content=(content.encode('utf-8')).strip()<if_stmt>content<block_start>myd=((self.parentd[self.parent])[2])[-1]<line_sep>currcontent=(myd[self.curr])[1]<line_sep>(myd[self.curr])[1]="".join((currcontent content))<block_end><block_end><def_stmt>endDocument self<block_start>""" End document handler """<line_sep># Compress all text items
self.packtext(self.d)<block_end><def_stmt>packtext self map<block_start><for_stmt>key,value map.items()<block_start>text=value[1]<line_sep>value[1]=zlib.compress(text)<line_sep>children=value[2]<for_stmt>submap children<block_start>self.packtext(submap)<block_end><block_end><block_end><block_end><class_stmt>BinXMLSAXParser(XMLReader)<block_start>"""A parser for Python binary marshal files representing
XML information using SAX interfaces """<def_stmt>__init__ self<block_start>XMLReader.__init__(self)<line_sep>self.depth=0<block_end><def_stmt>parse self stream<block_start>""" Parse Method """<line_sep># Check if it is a file object
<if_stmt>type(stream)<is>file<block_start><try_stmt><block_start>self.d=marshal.load(stream)<block_end><except_stmt>Exception e<block_start>sys.exit(e)<block_end><block_end># Check if it is a file path
<elif_stmt>os.path.exists(stream)<block_start><try_stmt><block_start>self.d=marshal.load(open(stream 'rb'))<block_end><except_stmt>Exception e<block_start>sys.exit(e)<block_end><block_end><else_stmt><block_start><raise>'BinXMLSAXParserException: Invalid Input Source'<block_end>self._cont_handler.startDocument()<line_sep>self.__parse(self.d)<line_sep>self._cont_handler.endDocument()<block_end><def_stmt>__parse self map<block_start>""" Recursive parse method for
XML dictionary """<for_stmt>key,value map.items()# For pretty printing
<block_start>self._cont_handler.ignorableWhitespace(" "<times>self.depth)<line_sep>attrs=value[0]<line_sep>text=value[1]<line_sep>children=value[2]<line_sep># Fire startElement handler event for key
self._cont_handler.startElement(key attrs)<line_sep># Fire character handler event for value
self._cont_handler.characters(zlib.decompress(text))<line_sep># Nested element, recursively call
# this function...
self.depth<augadd>1<line_sep># For pretty printing
self._cont_handler.ignorableWhitespace('\n')<for_stmt>child children<block_start>self.__parse(child)<block_end>self.depth<augsub>1<line_sep># For pretty printing
self._cont_handler.ignorableWhitespace(" "<times>self.depth)<line_sep># Fire end element handler event
self._cont_handler.endElement(key)<line_sep># For pretty printing
self._cont_handler.ignorableWhitespace('\n')<block_end><block_end><block_end><class_stmt>XMLMarshal(object)<block_start>""" The XML marshalling class """<def_stmt>dump stream xmlfile<block_start>""" Serialize XML data to a file """<try_stmt><block_start>p=make_parser()<line_sep>h=XMLDictionaryHandler()<line_sep>p.setContentHandler(h)<line_sep>p.parse(open(xmlfile))<line_sep># print h.d
marshal.dump(h.d stream)<block_end><except_stmt>Exception e<block_start>sys.exit(e)<block_end><block_end><def_stmt>dumps stream xmlfile<block_start>""" Serialize XML data to a string """<try_stmt><block_start>p=make_parser()<line_sep>p.setContentHandler()<line_sep>h=XMLDictionaryHandler()<line_sep>p.parse(open(xmlfile))<line_sep><return>marshal.dumps(h.d stream)<block_end><except_stmt>Exception e<block_start>sys.exit(e)<block_end><return><none><block_end><def_stmt>load stream out=sys.stdout<block_start>""" Load an XML binary stream
and send XML text to the output
stream 'out' """<try_stmt><block_start>p=BinXMLSAXParser()<line_sep>p.setContentHandler(XMLGenerator(out))<line_sep>p.parse(stream)<block_end><except_stmt>Exception e<block_start>sys.exit(e)<block_end><block_end><def_stmt>loads stream<block_start>""" Load an XML binary stream
and return XML text as string """<import_stmt>cStringIO<line_sep>c=cStringIO.StringIO()<try_stmt><block_start>p=BinXMLSAXParser()<line_sep>p.setContentHandler(XMLGenerator(c))<line_sep>p.parse(stream)<block_end><except_stmt>Exception e<block_start>sys.exit(e)<block_end><return>c.getvalue()<block_end>dump=staticmethod(dump)<line_sep>dumps=staticmethod(dumps)<line_sep>load=staticmethod(load)<line_sep>loads=staticmethod(loads)<block_end><if_stmt>__name__<eq>'__main__'<block_start>fname='sample.xml'<line_sep>binname=os.path.splitext(fname)[0]+'.bin'<line_sep># Dump XML text to binary
XMLMarshal.dump(open(binname 'wb') fname)<line_sep># Dump XML binary to text
XMLMarshal.load(open(binname 'rb') open('sample.xml' 'w'))<block_end>
|
<import_stmt>abc<import_from_stmt>nuplan.common.actor_state.dynamic_car_state DynamicCarState<import_from_stmt>nuplan.common.actor_state.ego_state EgoState<import_from_stmt>nuplan.planning.simulation.simulation_time_controller.simulation_iteration SimulationIteration<import_from_stmt>nuplan.planning.simulation.trajectory.abstract_trajectory AbstractTrajectory<class_stmt>AbstractTracker(abc.ABC)<block_start>"""
Interface for a generic tracker.
"""<line_sep>@abc.abstractmethod<def_stmt>initialize self<arrow><none><block_start>"""
Initializes the tracker.
"""<line_sep><pass><block_end>@abc.abstractmethod<def_stmt>track_trajectory self current_iteration:SimulationIteration next_iteration:SimulationIteration initial_state:EgoState trajectory:AbstractTrajectory <arrow>DynamicCarState<block_start>"""
Return an ego state with updated dynamics according to the controller commands.
:param current_iteration: The current simulation iteration.
:param next_iteration: The desired next simulation iteration.
:param initial_state: The current simulation iteration.
:param trajectory: The reference trajectory to track.
:return: The ego state to be propagated
"""<line_sep><pass><block_end><block_end>
|
"""Tests for the CPU Speed integration."""<line_sep>
|
<import_stmt>numpy<as>np<import_stmt>pytest<import_stmt>mbuild<as>mb<import_from_stmt>mbuild.tests.base_test BaseTest<class_stmt>TestLattice(BaseTest)<block_start>"""
Unit Tests for Lattice class functionality.
"""<line_sep>@pytest.mark.parametrize("spacing" [([1 1 1]) ([0.1 0.1 0.1]) (["1" "1" "1"]) (["1" 0.1 "0.1"]) ] )<def_stmt>test_spacing_success self spacing<block_start>spacing=np.asarray(spacing dtype=np.float64)<line_sep>spacing=np.reshape(spacing (3 ))<line_sep>test_lattice=mb.Lattice(lattice_spacing=spacing)<line_sep>np.testing.assert_allclose(spacing test_lattice.lattice_spacing rtol=1e-7 atol=0 equal_nan=<true> )<block_end>@pytest.mark.parametrize("dim, spacing" [(3 [1 1 1]) (3 [1 1 0]) (3 [1 0 0])])<def_stmt>test_dimension_set self dim spacing<block_start>test_lattice=mb.Lattice(lattice_spacing=spacing)<assert_stmt>test_lattice.dimension<eq>dim<block_end>@pytest.mark.parametrize("spacing" [([1]) (1) ([1 1]) ([-1 1 1]) ([1 1 1 1]) ([1 "a"]) (<none>) ([]) ([<none> <none> <none>]) ] )<def_stmt>test_spacing_incorrect self spacing<block_start><with_stmt>pytest.raises(ValueError)<block_start>mb.Lattice(lattice_spacing=spacing)<block_end><block_end>@pytest.mark.parametrize("spacing" [([0.1 0.1 0.1]) ([1 2 3]) (["1" "2" "3"]) ([1 2 "3"]) ([1 0 0]) ([1 1 0]) ] )<def_stmt>test_spacing_correct self spacing<block_start>mb.Lattice(lattice_spacing=spacing)<block_end>@pytest.mark.parametrize("vectors" [([[1 2] [0 1 0] [0 0 1]]) ([[1 0 0] [0 1 0] [0 1 0]]) (np.identity(4 dtype=np.float64)) ([[1 2 3] [3 2 1] [2 1 3]]) ] )<def_stmt>test_incorrect_lattice_vectors self vectors<block_start><with_stmt>pytest.raises(ValueError)<block_start>mb.Lattice(lattice_spacing=[1 1 1] lattice_vectors=vectors)<block_end><block_end>@pytest.mark.parametrize("vectors" [([[1 0 0] [0 1 0] [0 0 1]]) ([[1 0 0] [-0.5 0.85 0] [0 0 1]]) ] )<def_stmt>test_correct_lattice_vectors self vectors<block_start>mb.Lattice(lattice_spacing=[1 1 1] lattice_vectors=vectors)<block_end><def_stmt>test_overdefinied_inputs self<block_start>space=[1 1 1]<line_sep>vectors=[[1 0 0] [0 1 0] [0 0 1]]<line_sep>angles=[90 90 90]<with_stmt>pytest.raises(ValueError)<block_start>mb.Lattice(lattice_spacing=space lattice_vectors=vectors angles=angles)<block_end><block_end>@pytest.mark.parametrize("the_type" [(list()) (tuple()) (str()) ([])])<def_stmt>test_lattice_points_input_type self the_type<block_start><with_stmt>pytest.raises(TypeError)<block_start>mb.Lattice(lattice_spacing=[1 1 1] lattice_points=the_type)<block_end><block_end>@pytest.mark.parametrize("incorrect" [({"A":[[0.2 0.3 0.2 0.1]]}) ({"A":[[<none>]]}) ({"A":[[0.2 0.3 <none>]]}) ({"A":[[0.2 0.3 -0.5]]}) ({"A":[[0.2 0.3 1]]}) ({"A":[[0.2 0.3 0.1] [0.2 0.3 0.1]]}) ] )<def_stmt>test_lattice_points_input_type self incorrect<block_start><with_stmt>pytest.raises(ValueError)<block_start>mb.Lattice(lattice_spacing=[1 1 1] lattice_points=incorrect)<block_end><block_end>@pytest.mark.parametrize("angles" [([150 150 150]) ([90 90 -90]) ([90 90 180]) ([90 90 0]) ([90 90 90 90]) ([97 3 120]) ] )<def_stmt>test_improper_angles self angles<block_start><with_stmt>pytest.raises(ValueError)<block_start>mb.Lattice(lattice_spacing=[1 1 1] angles=angles)<block_end><block_end>@pytest.mark.parametrize("vectors, angles" [([[1 0 0] [0 1 0] [0 0 1]] [90 90 90]) ([[1.0 0.0 0.0] [-0.45399049973954675 0.8910065241883679 0.0] [-0.034899496702500955 -0.037369475398893195 0.9986919181801381 ] ] [91 92 117] ) ] )<def_stmt>test_proper_angles self vectors angles<block_start>testlattice=mb.Lattice(lattice_spacing=[1 1 1] lattice_vectors=vectors)<line_sep>np.testing.assert_allclose(testlattice.angles np.asarray(angles dtype=np.float64) rtol=1e-05 atol=1e-08 equal_nan=<false> )<block_end>@pytest.mark.parametrize("x, y, z" [(<none> 1 0) (1 <none> 1) (1 1 <none>) (-1 1 1) (1 -1 1) (1 1 -1) (1 1 np.NaN) ] )<def_stmt>test_incorrect_populate_inputs self x y z<block_start><with_stmt>pytest.raises(ValueError)<block_start>test_lattice=mb.Lattice(lattice_spacing=[1 1 1])<line_sep>test_lattice.populate(compound_dict={"id":mb.Compound()} x=x y=y z=z)<block_end><block_end>@pytest.mark.parametrize("my_type" [([]) (()) (np.array) (np.ndarray)])<def_stmt>test_populate_basis_type_incorrect self my_type<block_start>test_lattice=mb.Lattice(lattice_spacing=[1 1 1])<with_stmt>pytest.raises(TypeError)<block_start>test_lattice.populate(compound_dict=my_type)<block_end><block_end>@pytest.mark.parametrize("not_compound" [(1) (mb.Box(lengths=[1 1 1] angles=[90.0 90.0 90.0])) ("aLattice") ] )<def_stmt>test_populate_not_compound self not_compound<block_start>test_lattice=mb.Lattice(lattice_spacing=[1 1 1])<line_sep>particle_dict={"id":not_compound}<with_stmt>pytest.raises(TypeError)<block_start>test_lattice.populate(compound_dict=particle_dict)<block_end><block_end><def_stmt>test_proper_populate self<block_start>values_to_check=[[0 0 0] [1 0 0] [0 1 0] [0 0 1] [1 1 0] [0 1 1] [1 0 1] [1 1 1] ]<line_sep>test_lattice=mb.Lattice(lattice_spacing=[1 1 1] angles=[90 90 90])<line_sep>new_compound=test_lattice.populate(x=2 y=2 z=2)<line_sep>values_to_check=np.asarray(values_to_check dtype=np.float64)<line_sep>is_true=[]<for_stmt>pos1 np.split(values_to_check 8 axis=0)<block_start><for_stmt>pos2 np.split(new_compound.xyz 8 axis=0)<block_start><if_stmt>np.allclose(pos1 pos2)<block_start>is_true.append(<true>)<block_end><block_end><block_end><assert_stmt>len(is_true)<eq>len(values_to_check)<block_end><def_stmt>test_box self<block_start>lattice=mb.Lattice(lattice_spacing=[1 1 1] angles=[90 90 90] lattice_points={"A":[[0 0 0]]} )<line_sep>compound_test=lattice.populate(compound_dict={"A":mb.Compound()} x=2 y=5 z=9)<line_sep>replication=[2 5 9]<line_sep>np.testing.assert_allclose(compound_test.box.lengths np.asarray([x<times>y<for>x,y zip(replication lattice.lattice_spacing)]) )<line_sep>np.testing.assert_allclose(compound_test.box.angles np.asarray([90.0 90.0 90.0]))<block_end><def_stmt>test_box_non_rectangular self<block_start>lattice=mb.Lattice(lattice_spacing=[0.5 0.5 1] angles=[90 90 120] lattice_points={"A":[[0 0 0]]} )<line_sep>compound_test=lattice.populate(compound_dict={"A":mb.Compound()} x=2 y=2 z=1)<line_sep>replication=[2 2 1]<line_sep>np.testing.assert_allclose(compound_test.box.lengths np.asarray([x<times>y<for>x,y zip(replication lattice.lattice_spacing)]) )<line_sep>np.testing.assert_allclose(compound_test.box.angles np.asarray([90.0 90.0 120.0]))<block_end><def_stmt>test_get_box self<block_start>lattice=mb.Lattice(lattice_spacing=[1 1 1] angles=[90 90 90] lattice_points={"A":[[0 0 0]]} )<line_sep>replication=[5 4 3]<line_sep>expected_lengths=[x<times>y<for>x,y zip(replication lattice.lattice_spacing)]<line_sep>mylat=lattice.populate(x=5 y=4 z=3)<assert_stmt>isinstance(mylat.box mb.Box)<line_sep>np.testing.assert_allclose([90 90 90] mylat.box.angles)<line_sep>np.testing.assert_allclose(expected_lengths mylat.box.lengths)<block_end><def_stmt>test_get_box_non_rectangular self<block_start>lattice=mb.Lattice(lattice_spacing=[0.5 0.5 1] angles=[90 90 120] lattice_points={"A":[[0 0 0]]} )<line_sep>replication=[2 2 1]<line_sep>expected_lengths=[x<times>y<for>x,y zip(replication lattice.lattice_spacing)]<line_sep>mylat=lattice.populate(x=2 y=2 z=1)<assert_stmt>isinstance(mylat.box mb.Box)<line_sep>np.testing.assert_allclose([90 90 120] mylat.box.angles)<line_sep>np.testing.assert_allclose(expected_lengths mylat.box.lengths)<block_end><block_end>
|
<import_stmt>pytest<import_from_stmt>py_backwards.transformers.six_moves SixMovesTransformer<line_sep>@pytest.mark.parametrize('before, after' [('from functools import reduce' '''
try:
from functools import reduce
except ImportError:
from six.moves import reduce as reduce
''') ('from shlex import quote' '''
try:
from shlex import quote
except ImportError:
from six.moves import shlex_quote as quote
''') ('from itertools import zip_longest' '''
try:
from itertools import zip_longest
except ImportError:
from six.moves import zip_longest as zip_longest
''') ('from urllib.request import Request, pathname2url' '''
try:
from urllib.request import Request, pathname2url
except ImportError:
from six.moves.urllib.request import Request as Request
from six.moves.urllib.request import pathname2url as pathname2url
''')])<def_stmt>test_transform transform ast before after<block_start>code=transform(SixMovesTransformer before)<assert_stmt>ast(code)<eq>ast(after)<block_end>
|
<import_stmt>math<import_stmt>numpy<as>np<import_stmt>scipy.misc<import_stmt>tensorflow<as>tf<class_stmt>Container(object)<block_start>"""Dumb container object"""<def_stmt>__init__ self dictionary<block_start>self.__dict__.update(dictionary)<block_end><block_end><def_stmt>_edge_filter <block_start>"""Returns a 3x3 edge-detection functionally filter similar to Sobel"""<line_sep># See https://en.wikipedia.org/w/index.php?title=Talk:Sobel_operator&oldid=737772121#Scharr_not_the_ultimate_solution
a=.5<times>(1-math.sqrt(.5))<line_sep>b=math.sqrt(.5)<line_sep># Horizontal filter as a 4-D tensor suitable for tf.nn.conv2d()
h=np.zeros([3 3 3 3])<for_stmt>d range(3)# I.e. each RGB channel is processed independently
<block_start>h[0 : d d]=[a b a]<line_sep>h[2 : d d]=[-a -b -a]<block_end># Vertical filter
v=np.transpose(h axes=[1 0 2 3])<line_sep><return>h v<block_end><def_stmt>total_variation_loss images name='total_variation_loss'<block_start>"""Returns a loss function that penalizes high-frequency features in the image.
Similar to the 'total variation loss' but using a different high-pass filter."""<line_sep>filter_h,filter_v=_edge_filter()<line_sep>strides=[1 1 1 1]<line_sep>hor_edges=tf.nn.conv2d(images filter_h strides padding='VALID' name='horizontal_edges')<line_sep>ver_edges=tf.nn.conv2d(images filter_v strides padding='VALID' name='vertical_edges')<line_sep>l2_edges=tf.add(hor_edges<times>hor_edges ver_edges<times>ver_edges name='L2_edges')<line_sep>total_variation_loss=tf.reduce_mean(l2_edges name=name)<line_sep><return>total_variation_loss<block_end><def_stmt>distort_image image<block_start>"""Perform random distortions to the given 4D image and return result"""<line_sep># Switch to 3D as that's what these operations require
slices=tf.unpack(image)<line_sep>output=[]<line_sep># Perform pixel-wise distortions
<for_stmt>image slices<block_start>image=tf.image.random_flip_left_right(image)<line_sep>image=tf.image.random_saturation(image .2 2.)<line_sep>image<augadd>tf.truncated_normal(image.get_shape() stddev=.05)<line_sep>image=tf.image.random_contrast(image .85 1.15)<line_sep>image=tf.image.random_brightness(image .3)<line_sep>output.append(image)<block_end># Go back to 4D
image=tf.pack(output)<line_sep><return>image<block_end><def_stmt>downscale images K<block_start>"""Differentiable image downscaling by a factor of K"""<line_sep>arr=np.zeros([K K 3 3])<line_sep>arr[: : 0 0]=1.0/(K<times>K)<line_sep>arr[: : 1 1]=1.0/(K<times>K)<line_sep>arr[: : 2 2]=1.0/(K<times>K)<line_sep>dowscale_weight=tf.constant(arr dtype=tf.float32)<line_sep>downscaled=tf.nn.conv2d(images dowscale_weight strides=[1 K K 1] padding='SAME')<line_sep><return>downscaled<block_end><def_stmt>upscale images K<block_start>"""Differentiable image upscaling by a factor of K"""<line_sep>prev_shape=images.get_shape()<line_sep>size=[K<times>int(s)<for>s prev_shape[1:3]]<line_sep>out=tf.image.resize_nearest_neighbor(images size)<line_sep><return>out<block_end><def_stmt>save_image image filename verbose=<true><block_start>"""Saves a (height,width,3) numpy array into a file"""<line_sep>scipy.misc.toimage(image cmin=0. cmax=1.).save(filename)<line_sep>print(" Saved %s"%(filename ))<block_end>
|
<import_stmt>numpy<as>np<import_from_stmt>pyray.rotation planar_rotation<class_stmt>Line(object)<block_start><def_stmt>__init__ self pt1 pt2<block_start>"""
"""<line_sep>self.pt1=pt1<line_sep>self.pt2=pt2<line_sep>self.vec_along=(pt2-pt1)<line_sep>r=planar_rotation(np.pi/2)<line_sep>self.w=np.dot(r self.vec_along)<line_sep>mod_w_sq=np.dot(self.w self.w)<line_sep>self.w=self.w/np.sqrt(mod_w_sq)<line_sep># Eqn of line is assumed to be w^T x+b=0
self.b=-np.dot(self.w self.pt1)<line_sep>self.closest_pt_from_origin=-self.b<times>self.w<block_end><block_end>
|
"""
Utility to generate terminology.py from PITerminology.h and
PIStringTerminology.h in the Photoshop SDK. Use with Python 3.
Usage:
python3 tools/extract_terminology.py \
photoshopsdk/PITerminology.h \
src/psd_tools/terminology.py
.. note:: Adobe explicitly prohibits Photoshop SDK to be used for Open source
software. Therefore, psd-tools does not bundle the official header files.
https://www.adobe.com/devnet/photoshop/sdk/eula.html
"""<import_from_future_stmt> print_function<import_stmt>re<import_stmt>sys<line_sep>FILE_HEADER='''"""
Constants for descriptor.
This file is automaticaly generated by tools/extract_terminology.py
"""
from enum import Enum as _Enum'''<line_sep>TERM_DEF='''class {0}(_Enum):
"""
{0} definitions extracted from PITerminology.h.
See https://www.adobe.com/devnet/photoshop/sdk.html
"""'''<line_sep>TERM_PATTERN=re.compile(r"^#define\s+(?P<key>\w+)\s+'(?P<value>....)'(\s+//\s(?P<comment>.*))?$")<line_sep>STERM_DEF='''class StringTerm(Enum):
"""
String terms extracted from PIStringTerminology.h in Photoshop SDK.
This defines constants for the strings used to access descriptor events,
keys, classes, enum types, and enum values.
See https://www.adobe.com/devnet/photoshop/sdk.html
"""'''<line_sep>STERM_PATTERN=re.compile(r'^#define\s+(?P<key>\w+)\s+"(?P<value>[^"]+)"(\s+//\s(?P<comment>.*))?$')<def_stmt>extract_terminology filepath pattern=<none><block_start>terms={}<line_sep>keys=set()<with_stmt>open(filepath 'r')<as>f<block_start><for_stmt>line f<block_start>m=re.match(pattern<or>TERM_PATTERN line)<if_stmt>m<block_start>key=m.group('key')<if_stmt>key<in>keys<block_start><continue><block_end>upper=re.search(r'[0-9A-Z]' key)<line_sep>kls,name=key[:upper.start()] key[upper.start():]<if_stmt>re.match(r'[0-9]' name[0])<or>name<in>('None' 'True' 'False')<block_start>name='_'+name<block_end>kls=kls.capitalize().replace('Class' 'Klass')<if_stmt>kls<in>terms<block_start>terms[kls].append((name m.group('value')))<block_end><else_stmt><block_start>terms[kls]=[(name m.group('value'))]<block_end><block_end><block_end><block_end><return>terms<block_end><def_stmt>print_class name fields header=<none> **kwargs<block_start>print('\n' **kwargs)<line_sep>print((header<or>"class {0}:").format(name) **kwargs)<for_stmt>field fields<block_start>print(' %s = b%r'%field **kwargs)<block_end><block_end><def_stmt>main <block_start>terms=extract_terminology(sys.argv[1])<with_stmt>open(sys.argv[2] 'w')<as>f<block_start>print(FILE_HEADER file=f)<for_stmt>name terms<block_start>print_class(name terms[name] TERM_DEF file=f)<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
|
<import_stmt>h5py<import_stmt>os<import_from_stmt>tqdm tqdm<line_sep>input_dump_dir='dumps/sbcd_sqd_ftinb84_kl_x4_20181220_concat/dump/phrase/'<line_sep>select=6<line_sep>print(f'************** {select} *****************')<line_sep>input_dump_paths=sorted([os.path.join(input_dump_dir name)<for>name os.listdir(input_dump_dir)<if>'hdf5'<in>name])[select:select+1]<line_sep>print(input_dump_paths)<line_sep>input_dumps=[h5py.File(path 'r')<for>path input_dump_paths]<line_sep>dump_names=[os.path.splitext(os.path.basename(path))[0]<for>path input_dump_paths]<line_sep>dump_ranges=[list(map(int name.split('-')))<for>name dump_names]<line_sep>new_ranges=[]<for_stmt>range_ dump_ranges# print(range_)
<block_start>middle=sum(range_)<floordiv>2# split by half
new_range_=[[range_[0] middle] [middle range_[1]]]<line_sep># print(new_range_)
new_ranges.append(new_range_)<block_end>output_dumps=[[h5py.File(f'dumps/sbcd_sqd_ftinb84_kl_x4_20181220_concat/dump/phrase/{ra[0]}-{ra[1]}.hdf5' 'w')<for>ra range_]<for>range_ new_ranges]<line_sep>print(input_dumps)<line_sep>print(output_dumps)<line_sep>print(new_ranges)<line_sep># dev-100M-c 160408
# dev_wiki_noise 250000
<for_stmt>dump_idx,(input_dump new_range output_dump) tqdm(enumerate(zip(input_dumps new_ranges output_dumps)))<block_start>print(f'splitting {input_dump} to {output_dump}')<for_stmt>idx,(key val) tqdm(enumerate(input_dump.items()))# if idx < 250000/2:
<block_start><if_stmt>int(key)<l>new_range[0][1]<times>1000<block_start>output_dump[0].copy(val key)<block_end><else_stmt><block_start>output_dump[1].copy(val key)<block_end><block_end>input_dump.close()<line_sep>output_dump[0].close()<line_sep>output_dump[1].close()<block_end>print('copy done')<line_sep>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.