content
stringlengths 0
1.55M
|
---|
"""Base DirecTV Entity."""<import_from_future_stmt> annotations<import_from_stmt>directv DIRECTV<import_from_stmt>homeassistant.helpers.entity DeviceInfo Entity<import_from_stmt>.const DOMAIN<class_stmt>DIRECTVEntity(Entity)<block_start>"""Defines a base DirecTV entity."""<def_stmt>__init__ self * dtv:DIRECTV address:str="0"<arrow><none><block_start>"""Initialize the DirecTV entity."""<line_sep>self._address=address<line_sep>self._device_id=address<if>address<ne>"0"<else>dtv.device.info.receiver_id<line_sep>self._is_client=address<ne>"0"<line_sep>self.dtv=dtv<block_end>@property<def_stmt>device_info self<arrow>DeviceInfo<block_start>"""Return device information about this DirecTV receiver."""<line_sep><return>DeviceInfo(identifiers={(DOMAIN self._device_id)} manufacturer=self.dtv.device.info.brand name=self.name sw_version=self.dtv.device.info.version via_device=(DOMAIN self.dtv.device.info.receiver_id) )<block_end><block_end>
|
# Copyright 2016-2021 The <NAME> at the California Institute of
# Technology (Caltech), with support from the Paul Allen Family Foundation,
# Google, & National Institutes of Health (NIH) under Grant U24CA224309-01.
# All rights reserved.
#
# Licensed under a modified Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.github.com/vanvalenlab/deepcell-tf/LICENSE
#
# The Work provided may be used for non-commercial academic purposes only.
# For any other use of the Work, including commercial use, please contact:
# <EMAIL>
#
# Neither the name of Caltech nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Layers to encode location data"""<import_from_future_stmt> absolute_import<import_from_future_stmt> print_function<import_from_future_stmt> division<import_stmt>tensorflow<as>tf<import_from_stmt>tensorflow.keras backend<as>K<import_from_stmt>tensorflow.keras.layers Layer<import_from_stmt>tensorflow.python.keras.utils conv_utils<import_from_stmt>tensorflow.python.framework tensor_shape<line_sep>logger=tf.get_logger()<class_stmt>Location2D(Layer)<block_start>"""Location Layer for 2D cartesian coordinate locations.
Args:
data_format (str): A string, one of ``channels_last`` (default)
or ``channels_first``. The ordering of the dimensions in the
inputs. ``channels_last`` corresponds to inputs with shape
``(batch, height, width, channels)`` while ``channels_first``
corresponds to inputs with shape
``(batch, channels, height, width)``.
"""<def_stmt>__init__ self data_format=<none> **kwargs<block_start>in_shape=kwargs.pop('in_shape' <none>)<if_stmt>in_shape<is><not><none><block_start>logger.warn('in_shape (from deepcell.layerse.location) is '<concat>'deprecated and will be removed in a future version.')<block_end>super(Location2D self).__init__(**kwargs)<line_sep>self.data_format=conv_utils.normalize_data_format(data_format)<block_end><def_stmt>compute_output_shape self input_shape<block_start>input_shape=tensor_shape.TensorShape(input_shape).as_list()<line_sep>channel_axis=1<if>self.data_format<eq>'channels_first'<else>3<line_sep>input_shape[channel_axis]=2<line_sep><return>tensor_shape.TensorShape(input_shape)<block_end><def_stmt>call self inputs<block_start>input_shape=K.shape(inputs)<if_stmt>self.data_format<eq>'channels_first'<block_start>x=K.arange(0 input_shape[2] dtype=inputs.dtype)<line_sep>y=K.arange(0 input_shape[3] dtype=inputs.dtype)<block_end><else_stmt><block_start>x=K.arange(0 input_shape[1] dtype=inputs.dtype)<line_sep>y=K.arange(0 input_shape[2] dtype=inputs.dtype)<block_end>x=x/K.max(x)<line_sep>y=y/K.max(y)<line_sep>loc_x,loc_y=tf.meshgrid(x y indexing='ij')<if_stmt>self.data_format<eq>'channels_first'<block_start>loc=K.stack([loc_x loc_y] axis=0)<block_end><else_stmt><block_start>loc=K.stack([loc_x loc_y] axis=-1)<block_end>location=K.expand_dims(loc axis=0)<if_stmt>self.data_format<eq>'channels_first'<block_start>location=K.permute_dimensions(location pattern=[0 2 3 1])<block_end>location=tf.tile(location [input_shape[0] 1 1 1])<if_stmt>self.data_format<eq>'channels_first'<block_start>location=K.permute_dimensions(location pattern=[0 3 1 2])<block_end><return>location<block_end><def_stmt>get_config self<block_start>config={'data_format':self.data_format}<line_sep>base_config=super(Location2D self).get_config()<line_sep><return>dict(list(base_config.items())+list(config.items()))<block_end><block_end><class_stmt>Location3D(Layer)<block_start>"""Location Layer for 3D cartesian coordinate locations.
Args:
data_format (str): A string, one of ``channels_last`` (default)
or ``channels_first``. The ordering of the dimensions in the
inputs. ``channels_last`` corresponds to inputs with shape
``(batch, height, width, channels)`` while ``channels_first``
corresponds to inputs with shape
``(batch, channels, height, width)``.
"""<def_stmt>__init__ self data_format=<none> **kwargs<block_start>in_shape=kwargs.pop('in_shape' <none>)<if_stmt>in_shape<is><not><none><block_start>logger.warn('in_shape (from deepcell.layerse.location) is '<concat>'deprecated and will be removed in a future version.')<block_end>super(Location3D self).__init__(**kwargs)<line_sep>self.data_format=conv_utils.normalize_data_format(data_format)<block_end><def_stmt>compute_output_shape self input_shape<block_start>input_shape=tensor_shape.TensorShape(input_shape).as_list()<line_sep>channel_axis=1<if>self.data_format<eq>'channels_first'<else>4<line_sep>input_shape[channel_axis]=3<line_sep><return>tensor_shape.TensorShape(input_shape)<block_end><def_stmt>call self inputs<block_start>input_shape=K.shape(inputs)<if_stmt>self.data_format<eq>'channels_first'<block_start>z=K.arange(0 input_shape[2] dtype=inputs.dtype)<line_sep>x=K.arange(0 input_shape[3] dtype=inputs.dtype)<line_sep>y=K.arange(0 input_shape[4] dtype=inputs.dtype)<block_end><else_stmt><block_start>z=K.arange(0 input_shape[1] dtype=inputs.dtype)<line_sep>x=K.arange(0 input_shape[2] dtype=inputs.dtype)<line_sep>y=K.arange(0 input_shape[3] dtype=inputs.dtype)<block_end>x=x/K.max(x)<line_sep>y=y/K.max(y)<line_sep>z=z/K.max(z)<line_sep>loc_z,loc_x,loc_y=tf.meshgrid(z x y indexing='ij')<if_stmt>self.data_format<eq>'channels_first'<block_start>loc=K.stack([loc_z loc_x loc_y] axis=0)<block_end><else_stmt><block_start>loc=K.stack([loc_z loc_x loc_y] axis=-1)<block_end>location=K.expand_dims(loc axis=0)<if_stmt>self.data_format<eq>'channels_first'<block_start>location=K.permute_dimensions(location pattern=[0 2 3 4 1])<block_end>location=tf.tile(location [input_shape[0] 1 1 1 1])<if_stmt>self.data_format<eq>'channels_first'<block_start>location=K.permute_dimensions(location pattern=[0 4 1 2 3])<block_end><return>location<block_end><def_stmt>get_config self<block_start>config={'data_format':self.data_format}<line_sep>base_config=super(Location3D self).get_config()<line_sep><return>dict(list(base_config.items())+list(config.items()))<block_end><block_end>
|
<import_stmt>util<class_stmt>test_collect<block_start><def_stmt>init self<block_start><for_stmt>t util.TYPES.ALL<block_start>cmd="a = M.arange(%d, dtype=%s); "%(100 t)<line_sep><yield>cmd<block_end><block_end><def_stmt>test_contract self cmd<block_start>cmd<augadd>"res = a / 180.0 * 3.14"<line_sep><return>cmd<block_end><def_stmt>test_contract_reverse self cmd<block_start>cmd<augadd>"res = a * 3.14 / 180.0"<line_sep><return>cmd<block_end><block_end>
|
<class_stmt>DumbCRC32(object)<block_start><def_stmt>__init__ self<block_start>self._remainder=0xffffffff<line_sep>self._reversed_polynomial=0xedb88320<line_sep>self._final_xor=0xffffffff<block_end><def_stmt>update self data<block_start>bit_count=len(data)<times>8<for_stmt>bit_n range(bit_count)<block_start>bit_in=data[bit_n<rshift>3]&(1<lshift>(bit_n&7))<line_sep>self._remainder<augxor>1<if>bit_in<ne>0<else>0<line_sep>bit_out=(self._remainder&1)<line_sep>self._remainder<augrshift>1<if_stmt>bit_out<ne>0<block_start>self._remainder<augxor>self._reversed_polynomial<line_sep><block_end><block_end><block_end><def_stmt>digest self<block_start><return>self._remainder^self._final_xor<block_end><def_stmt>hexdigest self<block_start><return>'%08x'%self.digest()<block_end><block_end>
|
"""Compute Anomaly map."""<line_sep># Original Code
# Copyright (c) 2022 hq-deng
# https://github.com/hq-deng/RD4AD
# SPDX-License-Identifier: MIT
#
# Modified
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
<import_from_stmt>typing List Tuple Union<import_stmt>torch<import_stmt>torch.nn.functional<as>F<import_from_stmt>kornia.filters gaussian_blur2d<import_from_stmt>omegaconf ListConfig<import_from_stmt>torch Tensor<class_stmt>AnomalyMapGenerator<block_start>"""Generate Anomaly Heatmap.
Args:
image_size (Union[ListConfig, Tuple]): Size of original image used for upscaling the anomaly map.
sigma (int): Standard deviation of the gaussian kernel used to smooth anomaly map.
mode (str, optional): Operation used to generate anomaly map. Options are `add` and `multiply`.
Defaults to "multiply".
Raises:
ValueError: In case modes other than multiply and add are passed.
"""<def_stmt>__init__ self image_size:Union[ListConfig Tuple] sigma:int=4 mode:str="multiply"<block_start>self.image_size=image_size<if>isinstance(image_size tuple)<else>tuple(image_size)<line_sep>self.sigma=sigma<line_sep>self.kernel_size=2<times>int(4.0<times>sigma+0.5)+1<if_stmt>mode<not><in>("add" "multiply")<block_start><raise>ValueError(f"Found mode {mode}. Only multiply and add are supported.")<block_end>self.mode=mode<block_end><def_stmt>__call__ self student_features:List[Tensor] teacher_features:List[Tensor]<arrow>Tensor<block_start>"""Computes anomaly map given encoder and decoder features.
Args:
student_features (List[Tensor]): List of encoder features
teacher_features (List[Tensor]): List of decoder features
Returns:
Tensor: Anomaly maps of length batch.
"""<if_stmt>self.mode<eq>"multiply"<block_start>anomaly_map=torch.ones([student_features[0].shape[0] 1 *self.image_size] device=student_features[0].device)<line_sep># b c h w
<block_end><elif_stmt>self.mode<eq>"add"<block_start>anomaly_map=torch.zeros([student_features[0].shape[0] 1 *self.image_size] device=student_features[0].device)<block_end><for_stmt>student_feature,teacher_feature zip(student_features teacher_features)<block_start>distance_map=1-F.cosine_similarity(student_feature teacher_feature)<line_sep>distance_map=torch.unsqueeze(distance_map dim=1)<line_sep>distance_map=F.interpolate(distance_map size=self.image_size mode="bilinear" align_corners=<true>)<if_stmt>self.mode<eq>"multiply"<block_start>anomaly_map<augmul>distance_map<block_end><elif_stmt>self.mode<eq>"add"<block_start>anomaly_map<augadd>distance_map<block_end><block_end>anomaly_map=gaussian_blur2d(anomaly_map kernel_size=(self.kernel_size self.kernel_size) sigma=(self.sigma self.sigma))<line_sep><return>anomaly_map<block_end><block_end>
|
<import_from_stmt>resotolib.config Config<import_from_stmt>resoto_plugin_cleanup_untagged CleanupUntaggedPlugin<def_stmt>test_config <block_start>config=Config("dummy" "dummy")<line_sep>CleanupUntaggedPlugin.add_config(config)<line_sep>Config.init_default_config()<assert_stmt>Config.plugin_cleanup_untagged.enabled<is><false><assert_stmt>(Config.plugin_cleanup_untagged.validate(Config.plugin_cleanup_untagged)<is><true>)<block_end>
|
# Copyright 2021 The Layout Parser team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_from_stmt>layoutparser.elements *<import_from_stmt>layoutparser.ocr *<import_from_stmt>layoutparser.visualization *<import_stmt>cv2<import_stmt>numpy<as>np<def_stmt>test_viz <block_start>image=cv2.imread("tests/fixtures/ocr/test_gcv_image.jpg")<line_sep>ocr_agent=GCVAgent.with_credential("tests/fixtures/ocr/test_gcv_credential.json" languages=["en"])<line_sep>res=ocr_agent.load_response("tests/fixtures/ocr/test_gcv_response.json")<line_sep>draw_box(image Layout([]))<line_sep>draw_text(image Layout([]))<line_sep>draw_box(image Layout([Interval(0 10 axis="x") Rectangle(0 50 100 80) Quadrilateral(np.array([[10 10] [30 40] [90 40] [10 20]])) ]) )<line_sep>draw_text(image Layout([Interval(0 10 axis="x") Rectangle(0 50 100 80) Quadrilateral(np.array([[10 10] [30 40] [90 40] [10 20]])) ]) )<for_stmt>idx,level enumerate([GCVFeatureType.SYMBOL GCVFeatureType.WORD GCVFeatureType.PARA GCVFeatureType.BLOCK GCVFeatureType.PAGE ])<block_start>layout=ocr_agent.gather_full_text_annotation(res level)<line_sep>draw_text(image layout arrangement="ud"<if>idx%2<else>"ud" font_size=15 text_color="pink" text_background_color="grey" text_background_alpha=0.1 with_box_on_text=<true> text_box_width=2 text_box_color="yellow" text_box_alpha=0.2 with_layout=<true> box_width=1 color_map={<none>:"blue"} show_element_id=<true> id_font_size=8 box_alpha=0.25 id_text_background_alpha=0.25)<line_sep>draw_box(image layout)<line_sep>draw_text(image layout)<block_end><block_end>
|
<import_stmt>requests<import_stmt>yaml<import_stmt>os<line_sep>'''特别鸣谢项目:https://github.com/jhao104/proxy_pool 提供的代理池服务'''<line_sep>'''这个类主要使用来对上述项目接口的进一步封装以方便调用'''<line_sep>StopEvent=object()<class_stmt>Proxy_pool()# 默认本机ip,端口是https://github.com/jhao104/proxy_pool项目的默认端口。
<block_start>host="127.0.0.1"<line_sep>port="5010"<def_stmt>__init__ self<block_start>config=open(os.getcwd()+"\\config.yaml" mode="r" encoding="utf-8")<line_sep>cfg=config.read()<line_sep>yaml_line=yaml.load(stream=cfg Loader=yaml.FullLoader)<line_sep>self.host=yaml_line["host"]<line_sep>self.port=yaml_line["port"]<block_end><def_stmt>get_proxy self<block_start><return>requests.get("http://{host}:{port}/get/".format(host=self.host port=self.port)).json()<block_end><def_stmt>delete_proxy self proxy<block_start>requests.get("http://{host}:{port}/delete/?proxy={}".format(host=self.host port=self.port proxy=proxy))<block_end># your spider code
<def_stmt>get_response self url headers https=<false> cookie="" retry_count=5<block_start><if_stmt>https<block_start>is_https=self.get_proxy().get("https")<line_sep>print(is_https)<block_end>proxy=self.get_proxy().get("proxy")<while_stmt>retry_count<g>0<block_start><try_stmt><block_start>response=requests.get(url=url headers=headers cookies=cookie proxies={"http":"http://{}".format(proxy)})<line_sep># 使用代理访问
<return>response<block_end><except_stmt>Exception<block_start>retry_count<augsub>1<block_end><block_end># 删除代理池中代理
self.delete_proxy(proxy)<line_sep><return>response<block_end><def_stmt>post_response self url headers cookie data retry_count=5<block_start>proxy=self.get_proxy().get("proxy")<while_stmt>retry_count<g>0<block_start><try_stmt><block_start>response=requests.post(url=url headers=headers data=data cookies=cookie proxies={"http":"http://{}".format(proxy)})<line_sep># 使用代理访问
<return>response<block_end><except_stmt>Exception<block_start>retry_count<augsub>1<block_end><block_end># 删除代理池中代理
self.delete_proxy(proxy)<line_sep><return>response<block_end><block_end>
|
<import_stmt>os<import_from_stmt>flask Flask render_template<import_from_stmt>azure.eventhub EventHubConsumerClient<import_from_stmt>flask_socketio SocketIO<import_from_stmt>threading Thread<line_sep># Load the environment variables
maps_key=os.environ["MAPS_KEY"]<line_sep>connection_string=os.environ["CONNECTION_STRING"]<line_sep>consumer_group_name="$Default"<line_sep># Create the website with Flask
app=Flask(__name__)<line_sep># Create a secret key to keep the client side socket.io sessions secure.
# We are generating a random 24 digit Hex Key.
app.config["SECRET_KEY"]=os.urandom(24)<line_sep># Create the socket.io app
socketio=SocketIO(app async_mode="threading")<line_sep>thread=Thread()<line_sep># When a message is received from IoT hub, broadcast it to all clients that are listening through socket
<def_stmt>on_event_batch partition_context events# Loop through all the events on the event hub - each event is a message from IoT Hub
<block_start><for_stmt>event events# Send the event over the socket
<block_start>socketio.emit("mapdata" {"data":event.body_as_str()} namespace="/get_data" broadcast=<true>)<block_end># Update the event hub checkpoint so we don't get the same messages again if we reconnect
partition_context.update_checkpoint()<block_end># A background method that is triggered by socket.io. This method connects to the Event Hub compatible endpoint
# on the IoT Hub and listens for messages
<def_stmt>event_hub_task # Create the event hub client to receive messages from IoT hub
<block_start>client=EventHubConsumerClient.from_connection_string(conn_str=connection_string consumer_group=consumer_group_name)<line_sep># Set up the batch receiving of messages
<with_stmt>client<block_start>client.receive_batch(on_event_batch=on_event_batch)<block_end><block_end># This method is called when a request comes in for the root page
@app.route("/")<def_stmt>root # Create data for the home page to pass the maps key
<block_start>data={"maps_key":maps_key}<line_sep># Return the rendered HTML page
<return>render_template("index.html" data=data)<block_end># This is called when the socket on the web page connects to the socket.
# This starts a background thread that listens on the event hub
@socketio.on("connect" namespace="/get_data")<def_stmt>socketio_connect <block_start><global>thread<line_sep>print("Client connected")<line_sep># If the thread is not already running, start it as a socket.io background task to
# listen on messages from IoT Hub
<if_stmt><not>thread.is_alive()<block_start>thread=socketio.start_background_task(event_hub_task)<block_end><block_end># The main method - if this app is run via the command line, it starts the socket.io app.
<def_stmt>main <block_start>socketio.run(app)<block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
|
"""Objects that compare equal to other objects for testing."""<import_from_stmt>h_matchers Any<import_from_stmt>pyramid.response Response<def_stmt>temporary_redirect_to location<block_start>"""Return a matcher for any `HTTP 302 Found` redirect to the given URL."""<line_sep><return>Any.instance_of(Response).with_attrs({"status_code":302 "location":location})<block_end>
|
<import_from_stmt>anonymization.base_anonymization BaseAnonymization<import_from_stmt>PIL ImageFilter Image<def_stmt>find_boxes bbox<block_start>nb=[]<for_stmt>i bbox<block_start>nb.append(i)<block_end><return>nb<block_end><class_stmt>DetectionAnonymization(BaseAnonymization)<block_start><def_stmt>__init__ self<block_start><pass><block_end><def_stmt>blurring self image response degree=<none> id=<none> mask=<none><block_start>"""
Blur the detected objects based on the user's requirements
:param image: input image
:param response: The response parsed from the object detection api
:param degree: The degree of the anonymization (specified in the user_configuration file)
:param id:
:param mask:
:return: The anonymized image
"""<line_sep>boxes=find_boxes(response)<for_stmt>i boxes<block_start>cropped_image=image.crop((i[0] i[1] i[2] i[3]))<line_sep>blurred_image=cropped_image.filter(ImageFilter.GaussianBlur(25<times>float(degree)))<line_sep>image.paste(blurred_image (i[0] i[1] i[2] i[3]))<block_end><return>image<block_end><def_stmt>pixelating self image response degree=<none> id=<none> mask=<none><block_start>"""
Pixelate the detected objects based on the user's requirements
:param image: input image
:param response: The response parsed from the object detection api
:param degree: The degree of the anonymization (specified in the user_configuration file)
:param id:
:param mask:
:return: The anonymized image
"""<line_sep>boxes=find_boxes(response)<for_stmt>i boxes<block_start>cropped_image=image.crop((i[0] i[1] i[2] i[3]))<line_sep>w,h=cropped_image.size<line_sep>small=cropped_image.resize((int(w/(float(degree)<times>w)) int(h/(float(degree)<times>h))) Image.BILINEAR)<line_sep>result=small.resize(cropped_image.size Image.NEAREST)<line_sep>image.paste(result (i[0] i[1] i[2] i[3]))<block_end><return>image<block_end><def_stmt>blackening self image response degree=<none> id=<none> mask=<none><block_start>"""
Blacken the detected objects based on the user's requirements
:param image: input image
:param response: The response parsed from the object detection api
:param degree: The degree of the anonymization (specified in the user_configuration file)
:param id:
:param mask:
:return: The anonymized image
"""<line_sep>boxes=find_boxes(response)<for_stmt>i boxes<block_start>cropped=image.crop((i[0] i[1] i[2] i[3]))<line_sep>h,w=cropped.size<line_sep>black=Image.new(str(image.mode) (h w) 'black')<line_sep>result=Image.blend(cropped black float(degree))<line_sep>cropped.paste(result)<line_sep>image.paste(cropped (i[0] i[1] i[2] i[3]))<block_end><return>image<block_end><block_end>
|
# Copyright (c) 2017 The Verde Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
#
# This code is part of the Fatiando a Terra project (https://www.fatiando.org)
#
"""
Testing utilities.
"""<import_stmt>pytest<try_stmt><block_start><import_stmt>numba<block_end><except_stmt>ImportError<block_start>numba=<none><block_end><def_stmt>requires_numba function<block_start>"""
Skip the decorated test if numba is not installed.
"""<line_sep>mark=pytest.mark.skipif(numba<is><none> reason="requires numba")<line_sep><return>mark(function)<block_end>
|
# Simple n-gram (Markov chain) model for character-based text generation.
#
# Only tested with Python 3.6+
#
# <NAME> (http://eli.thegreenplace.net)
# This code is in the public domain
<import_from_future_stmt> print_function<import_from_stmt>collections defaultdict Counter<import_stmt>random<import_stmt>sys<line_sep># This is the length of the "state" the current character is predicted from.
# For Markov chains with memory, this is the "order" of the chain. For n-grams,
# the n is STATE_LEN+1 since it includes the predicted character as well.
STATE_LEN=4<def_stmt>weighted_from_counter c<block_start>total=sum(c.values())<line_sep>idx=random.randrange(total)<for_stmt>elem,count c.most_common()<block_start>idx<augsub>count<if_stmt>idx<l>0<block_start><return>elem<block_end><block_end><block_end><def_stmt>main <block_start>filename=sys.argv[1]<with_stmt>open(filename 'r')<as>f<block_start>data=f.read()<block_end>states=defaultdict(Counter)<line_sep>print('Learning model...')<for_stmt>i range(len(data)-STATE_LEN-1)<block_start>state=data[i:i+STATE_LEN]<line_sep>next=data[i+STATE_LEN]<line_sep>states[state][next]<augadd>1<block_end>print('Model has {0} states'.format(len(states)))<line_sep>j=0<for_stmt>k,v states.items()<block_start>print(k v)<if_stmt>j<g>9<block_start><break><block_end>j<augadd>1<block_end>print('Sampling...')<line_sep>state=random.choice(list(states))<line_sep>sys.stdout.write(state)<for_stmt>i range(200)<block_start>nextc=weighted_from_counter(states[state])<line_sep>sys.stdout.write(nextc)<line_sep>state=state[1:]+nextc<block_end>print()<block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
|
<import_stmt>linecache<import_stmt>numpy<import_stmt>json<import_from_stmt>..sources ChunkSource DerivedSource<line_sep>###############################################################################
<class_stmt>TextLength(DerivedSource)<block_start>""" Data source for audio lengths.
"""<def_stmt>__init__ self source num_entries<block_start>super().__init__()<line_sep>self.source=source<line_sep>self.num_entries=num_entries<block_end><def_stmt>derive self inputs<block_start>text_chunks,=inputs<line_sep><return>numpy.array([[len(x)]<for>x text_chunks] dtype='int32')<block_end><def_stmt>shape self<block_start><return>(1 )<block_end><def_stmt>requires self<block_start><return>(self.source )<block_end><def_stmt>__len__ self<block_start><return>self.num_entries<block_end><block_end>###############################################################################
<class_stmt>TextSource(DerivedSource)<block_start>""" Data source for neat (non-ragged) one-hot represented text arrays.
"""<def_stmt>__init__ self source vocab num_entries seq_len padding='right' pad_with=<none><block_start>super().__init__()<line_sep>self.num_entries=num_entries<line_sep>self.source=source<line_sep>self.seq_len=seq_len<line_sep>self.padding=padding<line_sep>self.pad_with=pad_with<line_sep>self.vocab=vocab<line_sep>self.char_to_int={c:i<for>i,c enumerate(self.vocab)}<block_end><def_stmt>_encode self char_seq<block_start>output=numpy.zeros(shape=(len(char_seq) len(self.vocab) ))<for_stmt>i range(len(char_seq))<block_start>output[i][self.char_to_int[char_seq[i]]]=1<block_end><assert_stmt>output.shape[0]<eq>len(char_seq)<line_sep><return>output<block_end><def_stmt>derive self inputs<block_start>text_chunk,=inputs<line_sep>output=numpy.zeros(shape=(len(text_chunk) self.seq_len len(self.vocab) ) dtype='int32')<for_stmt>i,char_seq enumerate(text_chunk)<block_start>char_seq=list(char_seq)<if_stmt>self.padding<eq>'right'<block_start><if_stmt>self.pad_with<is><not><none><block_start>char_seq=char_seq+(self.seq_len-len(char_seq))<times>[self.pad_with]<block_end>encoded_char_seq=self._encode(char_seq)<assert_stmt>len(encoded_char_seq)<eq>len(char_seq)<for_stmt>j range(len(encoded_char_seq))<block_start>output[i][j]=encoded_char_seq[j]<block_end><block_end><elif_stmt>self.padding<eq>'left'<block_start><if_stmt>self.pad_with<is><not><none><block_start>char_seq=(self.seq_len-len(char_seq))<times>[self.pad_with]+char_seq<block_end>encoded_char_seq=self._encode(char_seq)<assert_stmt>len(encoded_char_seq)<eq>len(char_seq)<for_stmt>j range(len(encoded_char_seq))<block_start>output[i][-len(char_seq)+j]=encoded_char_seq[j]<block_end><block_end><else_stmt><block_start><raise>ValueError('Padding must be left or right, not %s'%padding)<block_end><block_end><return>output<block_end><def_stmt>shape self<block_start>""" Return the shape of the tensor (excluding batch size) returned by
this data source.
"""<line_sep><return>(self.seq_len len(self.vocab) )<block_end><def_stmt>requires self<block_start><return>(self.source )<block_end><def_stmt>__len__ self<block_start><return>self.num_entries<block_end><block_end>###############################################################################
<class_stmt>RawText(ChunkSource)<block_start>""" Data source for text stored in JSONL format like:
['a', 'p', 'p', 'l', 'e', ' ', 'p', 'i', 'e']
"""<line_sep>###########################################################################
@classmethod<def_stmt>default_chunk_size cls<block_start>""" Returns the default chunk size for this source.
"""<line_sep><return>256<block_end>###########################################################################
<def_stmt>shape self<block_start><return>(<none> )<block_end>###########################################################################
<def_stmt>__init__ self source key num_entries *args **kwargs<block_start>""" Creates a new Text source for file named `source`.
"""<line_sep>super().__init__(*args **kwargs)<line_sep>self.source=source<line_sep>self.num_entries=num_entries<line_sep>self.key=key<line_sep>self.indices=numpy.arange(len(self))<block_end>###########################################################################
<def_stmt>__iter__ self<block_start>""" Return an iterator to the data. Yield the value for self.key
from each object
"""<line_sep>start=0<while_stmt>start<l>self.num_entries<block_start>end=min(self.num_entries start+self.chunk_size)<line_sep># linecache line numbering starts at 1
batch=[json.loads(linecache.getline(self.source i+1).strip())[self.key]<for>i self.indices[start:end]]<line_sep><yield>batch<line_sep>start=end<block_end><block_end>###########################################################################
<def_stmt>__len__ self<block_start>""" Returns the total number of entries that this source can return, if
known.
"""<line_sep><return>self.num_entries<block_end>###########################################################################
<def_stmt>can_shuffle self<block_start>""" This source can be shuffled.
"""<line_sep><return><true><block_end>###########################################################################
<def_stmt>shuffle self indices<block_start>""" Applies a permutation to the data.
"""<if_stmt>len(indices)<g>len(self)<block_start><raise>ValueError('Shuffleable was asked to apply permutation, but '<concat>'the permutation is longer than the length of the data set.')<block_end>self.indices[:len(indices)]=self.indices[:len(indices)][indices]<block_end><block_end>
|
<import_stmt>numpy<as>np<import_from_stmt>helpers bbox_helper<def_stmt>test_lr2cw <block_start>lr_bbox=np.array([[1 3] [2 7] [19 50]])<line_sep>output=bbox_helper.lr2cw(lr_bbox)<line_sep>answer=np.array([[2 2] [4.5 5] [34.5 31]])<assert_stmt>np.isclose(output answer).all()<line_sep>lr_bbox=np.array([[1.25 2.75] [1.485 3.123]])<line_sep>output=bbox_helper.lr2cw(lr_bbox)<line_sep>answer=np.array([[2 1.5] [2.304 1.638]])<assert_stmt>np.isclose(output answer).all()<block_end><def_stmt>test_cw2lr <block_start>cw_bbox=np.array([[2 8] [6 7]])<line_sep>output=bbox_helper.cw2lr(cw_bbox)<line_sep>answer=np.array([[-2 6] [2.5 9.5]])<assert_stmt>np.isclose(output answer).all()<line_sep>cw_bbox=np.array([[1.524 9.428] [4.518 1.025]])<line_sep>output=bbox_helper.cw2lr(cw_bbox)<line_sep>answer=np.array([[-3.19 6.238] [4.0055 5.0305]])<assert_stmt>np.isclose(output answer).all()<block_end><def_stmt>test_seq2bbox <block_start>sequence=np.array([0 1 1 1 1 0 0 0 1 1 0 0 0 0 0 1 1 1])<line_sep>output=bbox_helper.seq2bbox(sequence)<line_sep>answer=[[1 5] [8 10] [15 18]]<assert_stmt>np.isclose(output answer).all()<assert_stmt><not>bbox_helper.seq2bbox(np.array([0 0 0])).any()<assert_stmt><not>bbox_helper.seq2bbox(np.array([])).any()<block_end><class_stmt>TestIou(object)<block_start><def_stmt>setup self<block_start>self.anchor_lr=np.array([[1 5] [1 5] [1 5] [1 5] [1 5]] dtype=np.float32)<line_sep>self.target_lr=np.array([[1 5] [0 6] [2 4] [3 8] [8 9]] dtype=np.float32)<line_sep>self.anchor_cw=bbox_helper.lr2cw(self.anchor_lr)<line_sep>self.target_cw=bbox_helper.lr2cw(self.target_lr)<line_sep>self.answer=np.array([1 4/6 2/4 2/7 0])<block_end><def_stmt>test_iou_lr self<block_start>output=bbox_helper.iou_lr(self.anchor_lr self.target_lr)<assert_stmt>np.isclose(output self.answer).all()<block_end><def_stmt>test_iou_cw self<block_start>output=bbox_helper.iou_cw(self.anchor_cw self.target_cw)<assert_stmt>np.isclose(output self.answer).all()<block_end><block_end><def_stmt>test_nms <block_start>scores=np.array([0.9 0.8 0.7 0.6])<line_sep>bboxes=np.array([[1 5] [2 4] [4 8] [5 9]])<line_sep>keep_scores,keep_bboxes=bbox_helper.nms(scores bboxes 0.5)<line_sep>ans_scores=[0.9 0.7]<line_sep>ans_bboxes=[[1 5] [4 8]]<assert_stmt>np.isclose(keep_scores ans_scores).all()<assert_stmt>np.isclose(keep_bboxes ans_bboxes).all()<block_end>
|
<import_from_stmt>..models Location<import_from_stmt>.base BaseLocationBroadcast<class_stmt>LocationBroadcast(BaseLocationBroadcast)<block_start>model=Location<block_end>
|
"""Sorts GO IDs or user-provided sections containing GO IDs."""<line_sep>__copyright__="Copyright (C) 2016-2019, <NAME>, <NAME>, All rights reserved."<line_sep>__author__="<NAME>"<import_stmt>sys<import_stmt>collections<as>cx<import_from_stmt>goatools.wr_tbl prt_txt<import_from_stmt>goatools.grouper.sorter_nts SorterNts<import_from_stmt>goatools.grouper.sorter_gos SorterGoIds<import_from_stmt>goatools.grouper.wr_sections WrSectionsTxt<class_stmt>Sorter(object)<block_start>"""Sorts GO IDs or user-provided sections containing GO IDs.
User GO IDs grouped under header GO IDs are not sorted by the Grouper class.
Sort both user GO IDs in a group and header GO IDs across groups with these:
S: use_sections
s: section_sortby (T=True, F=False, S=lambda sort function)
h: hdrgo_sortby Sorts hdr GO IDs
u: sortby Sorts user GO IDs
P: hdrgo_prt If True, Removes GO IDs used as GO group headers; Leaves list in
sorted order, but removes header GO IDs which are not user GO IDs.
rm_h hdr_sort usr_sort S s h u p
--- ------------ ------------ -- -- -- -- --
case 1: NO hdrgo_sortby usrgo_sortby N T H U T
case 2: YES hdrgo_sortby usrgo_sortby N T H U F
case 3: NO section_order usrgo_sortby S F - U T
case 4: YES section_order usrgo_sortby S F - U F
case 5: YES |<--- section_sortby --->| S S - - -
|print|
sec usesec prthdr prtsec 1d 2d hdr usr
---- ------ ------ ------ -- -- --- ---
none - true - y . hdr usr A
none - false - y . ... usr B
yes False true - y . hdr usr A
yes False false - y . ... usr B
yes True True False . y hdr usr
yes True False False . y ... usr
"""<line_sep># Keywords for creating desc2nts
keys_nts=set(["hdrgo_prt" "section_prt" "top_n" "use_sections"])<def_stmt>__init__ self grprobj **kws# Keyword arguments:
<block_start>_sortby=kws.get('sortby')<line_sep>_hdrgo_sortby=kws.get('hdrgo_sortby')<line_sep>_section_sortby=kws.get('section_sortby')<line_sep># GO IDs are grouped, but not yet sorted
# print('SSSSSSSSSSS Sorter(sortby={} hdrgo_sortby={}, section_sortby={}'.format(
# _sortby, _hdrgo_sortby, _section_sortby))
self.grprobj=grprobj<line_sep># SorterGoIds can return either a 2-D list of sorted GO IDs or a flat sorted GO list
self.sortgos=SorterGoIds(grprobj _sortby _hdrgo_sortby)<line_sep>self.sectobj=SorterNts(self.sortgos _section_sortby)<if>grprobj.hdrobj.sections<else><none><block_end><def_stmt>prt_gos self prt=sys.stdout **kws_usr<block_start>"""Sort user GO ids, grouped under broader GO terms or sections. Print to screen."""<line_sep># deprecated
# Keyword arguments (control content): hdrgo_prt section_prt use_sections
# desc2nts contains: (sections hdrgo_prt sortobj) or (flat hdrgo_prt sortobj)
desc2nts=self.get_desc2nts(**kws_usr)<line_sep># Keyword arguments (control print format): prt prtfmt
self.prt_nts(desc2nts prt kws_usr.get('prtfmt'))<line_sep><return>desc2nts<block_end><def_stmt>prt_nts self desc2nts prt=sys.stdout prtfmt=<none><block_start>"""Print grouped and sorted GO IDs."""<line_sep># deprecated
# Set print format string
<if_stmt>prtfmt<is><none><block_start>prtfmt="{{hdr1usr01:2}} {FMT}\n".format(FMT=self.grprobj.gosubdag.prt_attr['fmt'])<block_end># 1-D: data to print is a flat list of namedtuples
<if_stmt>'flat'<in>desc2nts<block_start>prt_txt(prt desc2nts['flat'] prtfmt=prtfmt)<block_end># 2-D: data to print is a list of [(section, nts), ...
<else_stmt><block_start>WrSectionsTxt.prt_sections(prt desc2nts['sections'] prtfmt)<block_end><block_end><def_stmt>get_desc2nts self **kws_usr<block_start>"""Return grouped, sorted namedtuples in either format: flat, sections."""<line_sep># desc2nts contains: (sections hdrgo_prt sortobj) or (flat hdrgo_prt sortobj)
# keys_nts: hdrgo_prt section_prt top_n use_sections
kws_nts={k:v<for>k,v kws_usr.items()<if>k<in>self.keys_nts}<line_sep><return>self.get_desc2nts_fnc(**kws_nts)<block_end><def_stmt>get_desc2nts_fnc self hdrgo_prt=<true> section_prt=<none> top_n=<none> use_sections=<true><block_start>"""Return grouped, sorted namedtuples in either format: flat, sections."""<line_sep># RETURN: flat list of namedtuples
nts_flat=self.get_nts_flat(hdrgo_prt use_sections)<if_stmt>nts_flat<block_start>flds=nts_flat[0]._fields<if_stmt><not>use_sections<block_start><return>{'sortobj':self 'flat':nts_flat 'hdrgo_prt':hdrgo_prt 'flds':flds 'num_items':len(nts_flat) 'num_sections':1}<block_end><else_stmt><block_start><return>{'sortobj':self 'sections':[(self.grprobj.hdrobj.secdflt nts_flat)] 'hdrgo_prt':hdrgo_prt 'flds':flds 'num_items':len(nts_flat) 'num_sections':1}<block_end><block_end># print('FFFF Sorter:get_desc2nts_fnc: nts_flat is None')
# RETURN: 2-D list [(section_name0, namedtuples0), (section_name1, namedtuples1), ...
# kws: top_n hdrgo_prt section_sortby
# Over-ride hdrgo_prt depending on top_n value
<assert_stmt>top_n<is><not><true><and>top_n<is><not><false> "top_n({T}) MUST BE None OR AN int".format(T=top_n)<assert_stmt>self.sectobj<is><not><none> "SECTIONS OBJECT DOES NOT EXIST"<line_sep>sec_sb=self.sectobj.section_sortby<line_sep># Override hdrgo_prt, if sorting by sections or returning a subset of GO IDs in section
hdrgo_prt_curr=hdrgo_prt<is><true><if_stmt>sec_sb<is><true><or>(sec_sb<is><not><false><and>sec_sb<is><not><none>)<or>top_n<is><not><none><block_start>hdrgo_prt_curr=<false><block_end># print('GGGG Sorter:get_desc2nts_fnc: hdrgo_prt_curr({}) sec_sb({}) top_n({})'.format(
# hdrgo_prt_curr, sec_sb, top_n))
nts_section=self.sectobj.get_sorted_nts_keep_section(hdrgo_prt_curr)<line_sep># print('HHHH Sorter:get_desc2nts_fnc: nts_section')
# Take top_n in each section, if requested
<if_stmt>top_n<is><not><none><block_start>nts_section=[(s nts[:top_n])<for>s,nts nts_section]<if_stmt>section_prt<is><none><block_start>nts_flat=self.get_sections_flattened(nts_section)<line_sep>flds=nts_flat[0]._fields<if>nts_flat<else>[]<line_sep><return>{'sortobj':self 'flat':nts_flat 'hdrgo_prt':hdrgo_prt_curr 'flds':flds 'num_items':len(nts_flat) 'num_sections':1}<block_end><block_end># Send flat list of sections nts back, as requested
<if_stmt>section_prt<is><false><block_start>nts_flat=self.get_sections_flattened(nts_section)<line_sep>flds=nts_flat[0]._fields<if>nts_flat<else>[]<line_sep><return>{'sortobj':self 'flat':nts_flat 'hdrgo_prt':hdrgo_prt_curr 'flds':flds 'num_items':len(nts_flat) 'num_sections':len(nts_section)}<block_end># Send 2-D sections nts back
# print('IIII Sorter:get_desc2nts_fnc: nts_section')
flds=nts_section[0][1][0]._fields<if>nts_section<else>[]<line_sep><return>{'sortobj':self 'sections':nts_section 'hdrgo_prt':hdrgo_prt_curr 'flds':flds 'num_items':sum(len(nts)<for>_,nts nts_section) 'num_sections':len(nts_section)}<block_end>@staticmethod<def_stmt>get_sections_flattened section_nts<block_start>"""Convert [(section0, nts0), (section1, nts1), ... to [*nts0, *nts1, ..."""<line_sep>nt_flds=list(section_nts[0][1][0]._fields)<line_sep># Flatten section_nts 2-D list
<if_stmt>'section'<in>nt_flds<block_start><return>[nt<for>_,nts section_nts<for>nt nts]<block_end># Flatten section_nts 2-D list, and add sections to each namedtuple
nt_flds.append('section')<line_sep>nts_flat=[]<line_sep>ntobj=cx.namedtuple("Nt" " ".join(nt_flds))<for_stmt>section_name,nts section_nts<block_start><for_stmt>nt_go nts<block_start>vals=list(nt_go)+[section_name]<line_sep>nts_flat.append(ntobj._make(vals))<block_end><block_end><return>nts_flat<block_end><def_stmt>get_nts_flat self hdrgo_prt=<true> use_sections=<true><block_start>"""Return a flat list of sorted nts."""<line_sep># Either there are no sections OR we are not using them
<if_stmt>self.sectobj<is><none><or><not>use_sections<block_start><return>self.sortgos.get_nts_sorted(hdrgo_prt hdrgos=self.grprobj.get_hdrgos() hdrgo_sort=<true>)<block_end><if_stmt><not>use_sections<block_start><return>self.sectobj.get_sorted_nts_omit_section(hdrgo_prt hdrgo_sort=<true>)<block_end><return><none><block_end>@staticmethod<def_stmt>get_fields desc2nts<block_start>"""Return grouped, sorted namedtuples in either format: flat, sections."""<if_stmt>'flat'<in>desc2nts<block_start>nts_flat=desc2nts.get('flat')<if_stmt>nts_flat<block_start><return>nts_flat[0]._fields<block_end><block_end><if_stmt>'sections'<in>desc2nts<block_start>nts_sections=desc2nts.get('sections')<if_stmt>nts_sections<block_start><return>nts_sections[0][1][0]._fields<block_end><block_end><block_end><block_end># Copyright (C) 2016-2019, <NAME>, <NAME>, All rights reserved.
|
# Function to calculate the mask of a number.
<def_stmt>split n<block_start>b=[]<line_sep># Iterating the number by digits.
<while_stmt>n<g>0# If the digit is lucky digit it is appended to the list.
<block_start><if_stmt>n%10<eq>4<or>n%10<eq>7<block_start>b.append(n%10)<block_end>n<augfloordiv>10<block_end># Return the mask.
<return>b<block_end># Input the two input values.
x,y=[int(x)<for>x input().split()]<line_sep># Calculate the mask of 'y'.
a=split(y)<line_sep># Iterate for value greater than 'x'.
<for_stmt>i range(x+1 1000000)# If mask equals output the integer and break the loop.
<block_start><if_stmt>split(i)<eq>a<block_start>print(i)<line_sep><break><block_end><block_end>
|
"""
Script for splitting a dataset hdf5 file into training and validation trajectories.
Args:
dataset (str): path to hdf5 dataset
filter_key (str): if provided, split the subset of trajectories
in the file that correspond to this filter key into a training
and validation set of trajectories, instead of splitting the
full set of trajectories
ratio (float): validation ratio, in (0, 1). Defaults to 0.1, which is 10%.
Example usage:
python split_train_val.py --dataset /path/to/demo.hdf5 --ratio 0.1
"""<import_stmt>argparse<import_stmt>h5py<import_stmt>numpy<as>np<import_from_stmt>robomimic.utils.file_utils create_hdf5_filter_key<def_stmt>split_train_val_from_hdf5 hdf5_path val_ratio=0.1 filter_key=<none><block_start>"""
Splits data into training set and validation set from HDF5 file.
Args:
hdf5_path (str): path to the hdf5 file
to load the transitions from
val_ratio (float): ratio of validation demonstrations to all demonstrations
filter_key (str): if provided, split the subset of demonstration keys stored
under mask/@filter_key instead of the full set of demonstrations
"""<line_sep># retrieve number of demos
f=h5py.File(hdf5_path "r")<if_stmt>filter_key<is><not><none><block_start>print("using filter key: {}".format(filter_key))<line_sep>demos=sorted([elem.decode("utf-8")<for>elem np.array(f["mask/{}".format(filter_key)])])<block_end><else_stmt><block_start>demos=sorted(list(f["data"].keys()))<block_end>num_demos=len(demos)<line_sep>f.close()<line_sep># get random split
num_demos=len(demos)<line_sep>num_val=int(val_ratio<times>num_demos)<line_sep>mask=np.zeros(num_demos)<line_sep>mask[:num_val]=1.<line_sep>np.random.shuffle(mask)<line_sep>mask=mask.astype(int)<line_sep>train_inds=(1-mask).nonzero()[0]<line_sep>valid_inds=mask.nonzero()[0]<line_sep>train_keys=[demos[i]<for>i train_inds]<line_sep>valid_keys=[demos[i]<for>i valid_inds]<line_sep>print("{} validation demonstrations out of {} total demonstrations.".format(num_val num_demos))<line_sep># pass mask to generate split
name_1="train"<line_sep>name_2="valid"<if_stmt>filter_key<is><not><none><block_start>name_1="{}_{}".format(filter_key name_1)<line_sep>name_2="{}_{}".format(filter_key name_2)<block_end>train_lengths=create_hdf5_filter_key(hdf5_path=hdf5_path demo_keys=train_keys key_name=name_1)<line_sep>valid_lengths=create_hdf5_filter_key(hdf5_path=hdf5_path demo_keys=valid_keys key_name=name_2)<line_sep>print("Total number of train samples: {}".format(np.sum(train_lengths)))<line_sep>print("Average number of train samples {}".format(np.mean(train_lengths)))<line_sep>print("Total number of valid samples: {}".format(np.sum(valid_lengths)))<line_sep>print("Average number of valid samples {}".format(np.mean(valid_lengths)))<block_end><if_stmt>__name__<eq>"__main__"<block_start>parser=argparse.ArgumentParser()<line_sep>parser.add_argument("--dataset" type=str help="path to hdf5 dataset" )<line_sep>parser.add_argument("--filter_key" type=str default=<none> help="if provided, split the subset of trajectories in the file that correspond to\
this filter key into a training and validation set of trajectories, instead of\
splitting the full set of trajectories" )<line_sep>parser.add_argument("--ratio" type=float default=0.1 help="validation ratio, in (0, 1)")<line_sep>args=parser.parse_args()<line_sep># seed to make sure results are consistent
np.random.seed(0)<line_sep>split_train_val_from_hdf5(args.dataset val_ratio=args.ratio filter_key=args.filter_key)<block_end>
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
The information of network in mindspore_hub.
"""<import_from_stmt>._utils.check ValidMarkdown<class_stmt>CellInfo<block_start>"""
Information of network.
"""<def_stmt>__init__ self md_path<block_start>json_dict=ValidMarkdown(md_path).check_markdown_file()<line_sep>self.name=json_dict.get('model-name')<line_sep>self.backbone_name=json_dict.get('backbone-name')<line_sep>self.type=json_dict.get('module-type')<line_sep>self.fine_tunable=json_dict.get('fine-tunable')<line_sep>self.input_shape=json_dict.get('input-shape')<line_sep>self.author=json_dict.get('author')<line_sep>self.update_time=json_dict.get('update-time')<line_sep>self.repo_link=json_dict.get('repo-link')<line_sep>self.user_id=json_dict.get('user-id')<line_sep>self.backend=json_dict.get('backend')<if_stmt>json_dict.get('allow-cache-ckpt')<is><not><none><block_start>self.allow_cache_ckpt=json_dict.get('allow-cache-ckpt')<block_end>self.dataset=json_dict.get('train-dataset')<line_sep>self.license=json_dict.get('license')<line_sep>self.accuracy=json_dict.get('accuracy')<line_sep>self.used_for=json_dict.get('used-for')<line_sep>self.model_version=json_dict.get('model-version')<line_sep>self.mindspore_version=json_dict.get('mindspore-version')<line_sep>self.asset=json_dict.get('asset')<line_sep>self.asset_id=json_dict.get('asset-id')<block_end><block_end>
|
<import_from_stmt>time sleep<import_from_stmt>kivy.logger Logger<import_stmt>zipfile<import_from_stmt>os.path exists<class_stmt>Waiter(object)<block_start>"""
This class simply waits for the update to finish and then closes.
"""<def_stmt>__init__ self<block_start>super(Waiter self).__init__()<line_sep>''' Hold a reference to the display wrapper class'''<line_sep>self.counter=0<block_end><def_stmt>wait self<block_start>""" Start the loop where we wait for messages. """<while_stmt>self.counter<l>3<block_start>Logger.info("service_one/main.py: counter = {0}".format(self.counter))<line_sep>sleep(.5)<line_sep>self.counter<augadd>1<block_end>Logger.info("service_one/main.py: count ended. About to open")<line_sep>self.open_zip()<block_end><def_stmt>open_zip self<block_start>""" Open a standard zip file. """<line_sep>file_name="service_one/main.zip"<if_stmt>exists(file_name)<block_start>Logger.info("service_one/main.py: zip found. About to open.")<line_sep>my_zip=zipfile.ZipFile(file_name "r")<line_sep>Logger.info("service_one/main.py: zip open. contains {0}".format(my_zip.filelist))<line_sep>Logger.info("service_one/main.py: zip examined. Exiting.")<block_end><else_stmt><block_start>Logger.info("service_one/main.py: zip not found. Exiting.")<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>Waiter().wait()<block_end>
|
<import_stmt>os<import_stmt>shutil<import_from_stmt>tempfile mkdtemp<import_from_stmt>typing List Callable Tuple Union<import_stmt>pandas<as>pd<import_from_stmt>typing_extensions Literal<import_from_stmt>dtale_desktop.settings settings<line_sep>__all__=["fs"]<line_sep>_SENTINEL=object()<line_sep>_TimeStampFormat=Literal["pandas" "unix_seconds" "unix_milliseconds"]<class_stmt>_FileSystem<block_start>ROOT_DIR:str<line_sep>LOADERS_DIR:str<line_sep>ADDITIONAL_LOADERS_DIRS:List[str]<line_sep>CACHE_DIR:str<line_sep>DATA_DIR:str<line_sep>PROFILE_REPORTS_DIR:str<line_sep>_instance=_SENTINEL<def_stmt>__init__ self<block_start><if_stmt>self._instance<is><not>_SENTINEL<block_start><raise>Exception("_Files is a singleton")<block_end>self._instance=self<line_sep>self.ROOT_DIR=settings.ROOT_DIR<line_sep>self.LOADERS_DIR=os.path.join(self.ROOT_DIR "loaders")<line_sep>self.ADDITIONAL_LOADERS_DIRS=settings.ADDITIONAL_LOADERS_DIRS<line_sep>self.CACHE_DIR=os.path.join(self.ROOT_DIR "cache")<line_sep>self.DATA_DIR=os.path.join(self.CACHE_DIR "data")<line_sep>self.PROFILE_REPORTS_DIR=os.path.join(self.CACHE_DIR "profile_reports")<line_sep>self.create_directory(self.ROOT_DIR)<line_sep>self.create_directory(self.CACHE_DIR)<line_sep>self.create_directory(self.DATA_DIR)<line_sep>self.create_directory(self.PROFILE_REPORTS_DIR)<line_sep>self.create_python_package(self.LOADERS_DIR)<block_end><def_stmt>create_directory self path:str<arrow><none><block_start>os.makedirs(path exist_ok=<true>)<block_end><def_stmt>create_file self path:str contents:str=""<arrow><none><block_start>file=open(path "w")<line_sep>file.write(contents)<line_sep>file.close()<block_end><def_stmt>delete_file self path:str<arrow><none><block_start><if_stmt>os.path.exists(path)<block_start>os.remove(path)<block_end><block_end><def_stmt>get_file_last_modified self path:str format:_TimeStampFormat="pandas" <arrow>Union[int pd.Timestamp]<block_start>ts=os.path.getmtime(path)<if_stmt>format<eq>"pandas"<block_start><return>pd.Timestamp.fromtimestamp(ts)<block_end><elif_stmt>format<eq>"unix_seconds"<block_start><return>int(ts)<block_end><else_stmt><block_start><return>int(ts)<times>1000<block_end><block_end>@staticmethod<def_stmt>_format_data_file_name name:str<block_start><return>name<if>name.endswith(".pkl")<else>f"{name}.pkl"<block_end><def_stmt>data_path self data_id:str<arrow>str<block_start><return>os.path.join(self.DATA_DIR self._format_data_file_name(data_id))<block_end><def_stmt>save_data self data_id:str data:pd.DataFrame<arrow><none><block_start>data.to_pickle(self.data_path(data_id))<block_end><def_stmt>data_exists self data_id:str<arrow>bool<block_start><return>os.path.exists(self.data_path(data_id))<block_end><def_stmt>read_data self data_id:str<arrow>pd.DataFrame<block_start><return>pd.read_pickle(self.data_path(data_id))<block_end><def_stmt>delete_data self data_id:str<arrow><none><block_start>self.delete_file(self.data_path(data_id))<block_end>@staticmethod<def_stmt>_format_profile_report_name name:str<block_start><return>name<if>name.endswith(".html")<else>f"{name}.html"<block_end><def_stmt>profile_report_path self data_id:str<block_start><return>os.path.join(self.PROFILE_REPORTS_DIR self._format_profile_report_name(data_id))<block_end><def_stmt>profile_report_exists self data_id:str<arrow>bool<block_start><return>os.path.exists(self.profile_report_path(data_id))<block_end><def_stmt>read_profile_report self data_id:str<arrow>str<block_start><with_stmt>open(self.profile_report_path(data_id) encoding="utf-8")<as>f<block_start><return>f.read()<block_end><block_end><def_stmt>delete_profile_report self data_id:str<arrow><none><block_start>self.delete_file(self.profile_report_path(data_id))<block_end><def_stmt>delete_all_cached_data self data_id:str<arrow><none><block_start>self.delete_data(data_id)<line_sep>self.delete_profile_report(data_id)<block_end><def_stmt>create_temp_directory self folder_name:str="temp"<arrow>Tuple[str Callable[[] <none>]]<block_start>temp_dir=os.path.join(mkdtemp() folder_name)<line_sep><return>temp_dir <lambda>:shutil.rmtree(temp_dir)<block_end><def_stmt>create_python_package self path:str<arrow><none><block_start><if_stmt><not>os.path.exists(path)<block_start>self.create_directory(path)<line_sep>init_file=os.path.join(path "__init__.py")<if_stmt><not>os.path.exists(init_file)<block_start>self.create_file(init_file)<block_end><block_end><block_end><block_end>fs=_FileSystem()<line_sep>
|
<import_stmt>pytest<import_stmt>torch<import_from_stmt>torchphysics.models.activation_fn AdaptiveActivationFunction<def_stmt>test_create_adaptive_with_tanh <block_start>adap_fn=AdaptiveActivationFunction(torch.nn.Tanh())<assert_stmt>isinstance(adap_fn.activation_fn torch.nn.Tanh)<assert_stmt>adap_fn.a<eq>1.0<assert_stmt>adap_fn.scaling<eq>1.0<block_end><def_stmt>test_create_adaptive_with_ReLu <block_start>adap_fn=AdaptiveActivationFunction(torch.nn.ReLU() inital_a=5.0 scaling=10.0)<assert_stmt>isinstance(adap_fn.activation_fn torch.nn.ReLU)<assert_stmt>adap_fn.a<eq>5.0<assert_stmt>adap_fn.a.requires_grad<assert_stmt>adap_fn.scaling<eq>10.0<block_end><def_stmt>test_forward_of_adaptive_activation <block_start>input_x=torch.tensor([[1.0] [2.0] [-5.0]])<line_sep>adap_fn=AdaptiveActivationFunction(torch.nn.ReLU() inital_a=5.0 scaling=10.0)<line_sep>output_x=adap_fn(input_x)<assert_stmt>len(output_x)<eq>3<assert_stmt>output_x[0]<eq>50.0<assert_stmt>output_x[1]<eq>100.0<assert_stmt>output_x[2]<eq>0.0<block_end>
|
<import_stmt>multiprocessing<import_stmt>os<import_from_stmt>datetime datetime<import_stmt>six<import_from_stmt>fabric api<as>fab<import_from_stmt>fabric.contrib files<import_stmt>fabricio<import_from_stmt>fabricio docker utils<import_from_stmt>fabricio.docker.base Attribute<import_from_stmt>fabricio.utils Options<class_stmt>PostgresqlBackupMixin(docker.BaseService)<block_start>"""
Your Docker image must have pg_dump and pg_restore installed in order
to run backup and restore respectively
(usually this requires `postgresql-client-common` package for Ubuntu/Debian)
"""<line_sep>db_name=Attribute()<line_sep>db_user=Attribute(default='postgres')<line_sep>db_host=Attribute()<line_sep>db_port=Attribute()<line_sep>db_backup_dir=Attribute()<line_sep>db_backup_format=Attribute(default='c')<line_sep>db_backup_compress_level=Attribute()# 0-9 (0 - no compression, 9 - max)
db_backup_workers=Attribute(default=1)<line_sep>db_restore_workers=Attribute(default=4)<line_sep>db_backup_filename=Attribute(default='{datetime:%Y-%m-%dT%H:%M:%S.%f}.dump')<line_sep>@property<def_stmt>db_connection_options self<block_start><return>Options([('username' self.db_user) ('host' self.db_host) ('port' self.db_port) ])<block_end>@property<def_stmt>db_backup_options self<block_start><return>Options([('if-exists' <true>) ('create' <true>) ('clean' <true>) ])<block_end><def_stmt>make_backup_command self<block_start>options=Options(self.db_connection_options)<line_sep>options.update(self.db_backup_options)<line_sep>options.update([('format' self.db_backup_format) ('dbname' self.db_name) ('compress' self.db_backup_compress_level) ('jobs' self.db_backup_workers) ('file' os.path.join(self.db_backup_dir self.db_backup_filename.format(datetime=datetime.utcnow()))) ])<line_sep><return>'pg_dump {options}'.format(options=options)<block_end>@fabricio.once_per_task<def_stmt>backup self<block_start><if_stmt>self.db_backup_dir<is><none><block_start>fab.abort('db_backup_dir not set, can\'t continue with backup')<block_end>command=self.make_backup_command()<line_sep>self.image.run(command=command quiet=<false> options=self.safe_options )<block_end>@property<def_stmt>db_restore_options self<block_start><return>self.db_backup_options<block_end><def_stmt>make_restore_command self backup_filename<block_start>options=Options(self.db_connection_options)<line_sep>options.update(self.db_restore_options)<line_sep>options.update([('dbname' 'template1') # use any existing DB
('jobs' self.db_restore_workers) ('file' os.path.join(self.db_backup_dir backup_filename)) ])<line_sep><return>'pg_restore {options}'.format(options=options)<block_end>@fabricio.once_per_task<def_stmt>restore self backup_name=<none><block_start>"""
Before run this method you have somehow to disable incoming connections,
e.g. by stopping all database client containers:
client_container.stop()
pg_container.restore()
client_container.start()
"""<if_stmt>self.db_backup_dir<is><none><block_start>fab.abort('db_backup_dir not set, can\'t continue with restore')<block_end><if_stmt>backup_name<is><none><block_start><raise>ValueError('backup_filename not provided')<block_end>command=self.make_restore_command(backup_name)<line_sep>self.image.run(command=command quiet=<false> options=self.safe_options )<block_end><block_end><class_stmt>PostgresqlContainer(docker.Container)<block_start>pg_conf=Attribute(default='postgresql.conf')<line_sep>pg_hba=Attribute(default='pg_hba.conf')<line_sep>pg_data=Attribute(default=NotImplemented)<line_sep>sudo=Attribute(default=<false>)<line_sep>stop_signal='INT'<line_sep>stop_timeout=30<def_stmt>update_config self content path<block_start>old_file=six.BytesIO()<if_stmt>files.exists(path use_sudo=self.sudo)<block_start>fab.get(remote_path=path local_path=old_file use_sudo=self.sudo)<block_end>old_content=old_file.getvalue()<line_sep>need_update=content<ne>old_content<if_stmt>need_update<block_start>fabricio.move_file(path_from=path path_to=path+'.backup' sudo=self.sudo ignore_errors=<true> )<line_sep>fab.put(six.BytesIO(content) path use_sudo=self.sudo mode='0644')<line_sep>fabricio.log('{path} updated'.format(path=path))<block_end><else_stmt><block_start>fabricio.log('{path} not changed'.format(path=path))<block_end><return>need_update<block_end><def_stmt>db_exists self<block_start><return>files.exists(os.path.join(self.pg_data 'PG_VERSION') use_sudo=self.sudo )<block_end><def_stmt>create_db self tag=<none> registry=<none> account=<none><block_start>"""
Official PostgreSQL Docker image executes 'postgres initdb' before
any command starting with 'postgres' (see /docker-entrypoint.sh),
therefore if you use custom image, you probably have to implement
your own `create_db()`
"""<line_sep>fabricio.log('PostgreSQL database not found, creating new...')<line_sep>self.image[registry:tag:account].run('postgres --version' # create new DB (see method description)
options=self.safe_options quiet=<false> )<block_end><def_stmt>update self tag=<none> registry=<none> account=<none> force=<false><block_start><if_stmt><not>any(map(self.options.__contains__ ['volume' 'mount']))# TODO better check if volume or mount properly defined
<block_start>fab.abort('Make sure you properly define volume or mount for DB data, '<concat>'Fabricio cannot work properly without it')<block_end><if_stmt><not>self.db_exists()<block_start>self.create_db(tag=tag registry=registry account=account)<block_end>main_conf=os.path.join(self.pg_data 'postgresql.conf')<line_sep>hba_conf=os.path.join(self.pg_data 'pg_hba.conf')<line_sep>main_config_updated=self.update_config(content=open(self.pg_conf 'rb').read() path=main_conf )<line_sep>hba_config_updated=self.update_config(content=open(self.pg_hba 'rb').read() path=hba_conf )<line_sep>container_updated=super(PostgresqlContainer self).update(force=force tag=tag registry=registry account=account )<if_stmt><not>container_updated<block_start><if_stmt>main_config_updated<block_start>self.reload()<block_end><elif_stmt>hba_config_updated<block_start>self.signal('HUP')<block_end><else_stmt><block_start><return><false># nothing updated
<block_end><try_stmt># remove container backup to prevent reverting to old version
<block_start>self.get_backup_version().delete(delete_image=<true>)<block_end><except_stmt>docker.ContainerNotFoundError<block_start><pass><block_end><block_end><if_stmt><not>main_config_updated# remove main config backup to prevent reverting to old version
<block_start>main_conf_backup=main_conf+'.backup'<line_sep>fabricio.remove_file(main_conf_backup ignore_errors=<true> sudo=self.sudo )<block_end><if_stmt><not>hba_config_updated# remove pg_hba config backup to prevent reverting to old version
<block_start>hba_conf_backup=hba_conf+'.backup'<line_sep>fabricio.remove_file(hba_conf_backup ignore_errors=<true> sudo=self.sudo )<block_end><return><true><block_end><def_stmt>revert self<block_start>main_conf=os.path.join(self.pg_data 'postgresql.conf')<line_sep>main_conf_backup=main_conf+'.backup'<line_sep>hba_conf=os.path.join(self.pg_data 'pg_hba.conf')<line_sep>hba_conf_backup=hba_conf+'.backup'<line_sep>main_config_reverted=fabricio.move_file(path_from=main_conf_backup path_to=main_conf ignore_errors=<true> sudo=self.sudo ).succeeded<line_sep>hba_config_reverted=fabricio.move_file(path_from=hba_conf_backup path_to=hba_conf ignore_errors=<true> sudo=self.sudo ).succeeded<try_stmt><block_start>super(PostgresqlContainer self).revert()<block_end><except_stmt>docker.ContainerError<block_start><if_stmt>main_config_reverted<block_start>self.reload()<block_end><elif_stmt>hba_config_reverted<block_start>self.signal('HUP')<block_end><else_stmt><block_start><raise><block_end><block_end><block_end><def_stmt>destroy self delete_data=<false><block_start>super(PostgresqlContainer self).destroy()<if_stmt>utils.strtobool(delete_data)<block_start>fabricio.remove_file(self.pg_data sudo=self.sudo force=<true> recursive=<true> )<block_end><block_end><block_end><class_stmt>StreamingReplicatedPostgresqlContainer(PostgresqlContainer)<block_start>pg_recovery=Attribute(default='recovery.conf')<line_sep>pg_recovery_primary_conninfo=Attribute(default="primary_conninfo = 'host={host} port={port} user={user}'")<line_sep># type: str
pg_recovery_port=Attribute(default=5432)<line_sep>pg_recovery_user=Attribute(default='postgres')<line_sep>pg_recovery_revert_enabled=Attribute(default=<false>)<line_sep>pg_recovery_master_promotion_enabled=Attribute(default=<false>)<line_sep>pg_recovery_wait_for_master_seconds=Attribute(default=30)<def_stmt>__init__ self *args **kwargs<block_start>super(StreamingReplicatedPostgresqlContainer self).__init__(*args **kwargs)<line_sep>self.master_obtained=multiprocessing.Event()<line_sep>self.master_lock=multiprocessing.Lock()<line_sep>self.multiprocessing_data=data=multiprocessing.Manager().Namespace()<line_sep>data.db_exists=<false><line_sep>data.exception=<none><line_sep>self.instances=multiprocessing.JoinableQueue()<block_end><def_stmt>copy_data_from_master self tag=<none> registry=<none> account=<none><block_start>pg_basebackup_command=('pg_basebackup'<concat>' --progress'<concat>' --write-recovery-conf'<concat>' -X stream'<concat>' --pgdata=$PGDATA'<concat>' --host={host}'<concat>' --username={user}'<concat>' --port={port}'<concat>''.format(host=self.multiprocessing_data.master user=self.pg_recovery_user port=self.pg_recovery_port ))<line_sep>command="/bin/bash -c '{pg_basebackup_command}'".format(pg_basebackup_command=pg_basebackup_command )<line_sep>self.image[registry:tag:account].run(command=command options=self.options quiet=<false> )<block_end><def_stmt>get_recovery_config self<block_start>recovery_config=open(self.pg_recovery).read()<line_sep>primary_conninfo=self.pg_recovery_primary_conninfo.format(host=self.multiprocessing_data.master port=self.pg_recovery_port user=self.pg_recovery_user )<line_sep>recovery_config_items=[row<for>row recovery_config.splitlines()<if><not>row.startswith('primary_conninfo')]<line_sep>recovery_config_items.append(primary_conninfo)<line_sep><return>('\n'.join(recovery_config_items)+'\n').encode()<block_end><def_stmt>set_master_info self<block_start><if_stmt>self.multiprocessing_data.exception<is><not><none><block_start>fab.abort('Task aborted due an exception: {exception}'.format(exception=self.multiprocessing_data.exception ))<block_end>fabricio.log('Found master: {host}'.format(host=fab.env.host))<line_sep>self.multiprocessing_data.master=fab.env.host<block_end><def_stmt>update_recovery_config self tag=<none> registry=<none> account=<none><block_start>db_exists=self.db_exists()<line_sep>recovery_conf_file=os.path.join(self.pg_data 'recovery.conf')<if_stmt>db_exists<block_start>self.multiprocessing_data.db_exists=<true><if_stmt><not>files.exists(recovery_conf_file use_sudo=self.sudo)# master founded
<block_start>self.set_master_info()<line_sep><return><false><block_end><block_end>fabricio.log('Waiting for master info ({seconds} seconds)...'.format(seconds=self.pg_recovery_wait_for_master_seconds ))<line_sep>self.master_obtained.wait(self.pg_recovery_wait_for_master_seconds)<if_stmt><not>self.master_obtained.is_set()<block_start><if_stmt>db_exists<and><not>self.pg_recovery_master_promotion_enabled<block_start>fab.abort('Database exists but master not found. This probably '<concat>'means master failure. New master promotion disabled '<concat>'by default, but can be enabled by setting attribute '<concat>'\'pg_recovery_master_promotion_enabled\' to True.')<block_end>self.master_lock.acquire()<if_stmt><not>self.master_obtained.is_set()<block_start><if_stmt>db_exists<block_start>fabricio.move_file(path_from=recovery_conf_file path_to=recovery_conf_file+'.backup' sudo=self.sudo )<line_sep>self.set_master_info()<line_sep><return><true><block_end><elif_stmt><not>self.multiprocessing_data.db_exists<block_start>self.set_master_info()<line_sep><return><false><block_end><block_end>self.master_lock.release()<line_sep>self.master_obtained.wait()<block_end><if_stmt><not>db_exists<block_start>self.copy_data_from_master(tag=tag registry=registry account=account )<block_end><return>self.update_config(content=self.get_recovery_config() path=os.path.join(self.pg_data 'recovery.conf') )<block_end><def_stmt>update self tag=<none> registry=<none> account=<none> force=<false><block_start><if_stmt><not>fab.env.parallel<block_start>fab.abort('Master-slave configuration update requires parallel mode. '<concat>'Use Fabric\'s `--parallel` option to enable this mode '<concat>'for a current session.')<block_end>self.instances.put(<none>)<try_stmt><block_start>recovery_config_updated=self.update_recovery_config(tag=tag registry=registry account=account )<line_sep>container_updated=super(StreamingReplicatedPostgresqlContainer self ).update(force=force tag=tag registry=registry account=account)<if_stmt><not>container_updated<and>recovery_config_updated<block_start>self.reload()<block_end>self.master_obtained.set()# one who first comes here is master
<return>container_updated<or>recovery_config_updated<block_end><except_stmt>Exception<as>exception<block_start>self.multiprocessing_data.exception=exception<line_sep><raise><block_end><finally_stmt><block_start><try_stmt><block_start>self.master_lock.release()<block_end><except_stmt>ValueError# ignore "released too many times" error
<block_start><pass><block_end>self.instances.get()<line_sep>self.instances.task_done()<line_sep>self.instances.join()# wait until all instances will be updated
# reset state at the end to prevent fail of the next Fabric command
self.master_obtained.clear()<block_end><block_end><def_stmt>revert self<block_start><if_stmt><not>self.pg_recovery_revert_enabled<block_start>fab.abort("StreamingReplicatedPostgresqlContainer can not be reverted by "<concat>"default. You can change this behaviour by setting attribute "<concat>"'pg_recovery_revert_enabled'. BUT whether this attribute is "<concat>"set or not, recovery configs (master-slave configuration) "<concat>"will not be reverted anyway.")<block_end>super(StreamingReplicatedPostgresqlContainer self).revert()<block_end><block_end>
|
<import_stmt>logging<import_from_stmt>google.protobuf.text_format ParseError<import_from_stmt>sawtooth_sdk.processor.exceptions InternalError<import_from_stmt>sawtooth_sdk.protobuf state_context_pb2<import_from_stmt>remme.settings STATE_TIMEOUT_SEC<line_sep>logger=logging.getLogger(__name__)<class_stmt>CacheContextService<block_start><def_stmt>__init__ self context<block_start>self._storage={}<line_sep>self._context=context<block_end><def_stmt>preload_state self addresses<block_start>addresses=list(filter(<lambda>a:len(a)<eq>70 addresses))<line_sep>entries=self.get_state(addresses)<for_stmt>i,entry enumerate(entries)<block_start>self._storage[entry.address]=entry.data<block_end>logger.debug(f'Stored data for addresses: {self._storage}')<block_end><def_stmt>get_cached_data self resolvers timeout=STATE_TIMEOUT_SEC<block_start><for_stmt>address,pb_class resolvers<block_start><try_stmt><block_start>data=self._storage[address]<line_sep>logger.debug('Got loaded data for address '<concat>f'"{address}": {data}')<block_end><except_stmt>KeyError<block_start><try_stmt><block_start>data=self.get_state([address])[0].data<line_sep>self._storage[address]=data<line_sep>logger.debug('Got pre-loaded data for address '<concat>f'"{address}": {data}')<block_end><except_stmt>IndexError<block_start><yield><none><line_sep><continue><block_end><except_stmt>Exception<as>e<block_start>logger.exception(e)<line_sep><raise>InternalError(f'Address "{address}" does not '<concat>'have access to data')<block_end><block_end><if_stmt>data<is><none><block_start><yield>data<line_sep><continue><block_end><try_stmt><block_start>pb=pb_class()<line_sep>pb.ParseFromString(data)<line_sep><yield>pb<block_end><except_stmt>ParseError<block_start><raise>InternalError('Failed to deserialize data')<block_end><except_stmt>Exception<as>e<block_start>logger.exception(e)<line_sep><yield><none><block_end><block_end><block_end><def_stmt>get_state self addresses timeout=STATE_TIMEOUT_SEC<block_start><return>self._context.get_state(addresses timeout)<block_end><def_stmt>set_state self entries timeout=STATE_TIMEOUT_SEC<block_start><return>self._context.set_state(entries timeout)<block_end><def_stmt>delete_state self addresses timeout=STATE_TIMEOUT_SEC<block_start><return>self._context.delete_state(addresses timeout)<block_end><def_stmt>add_receipt_data self data timeout=STATE_TIMEOUT_SEC<block_start><return>self._context.add_receipt_data(data timeout)<block_end><def_stmt>add_event self event_type attributes=<none> data=<none> timeout=STATE_TIMEOUT_SEC<block_start><return>self._context.add_event(event_type attributes data timeout)<block_end><block_end>
|
<try_stmt><block_start><import_stmt>ctypes<block_end><except_stmt>ImportError<block_start>ctypes=<none><block_end><import_stmt>os<import_stmt>platform<import_stmt>sys<import_stmt>types<import_stmt>warnings<import_stmt>pretend<import_stmt>pytest<import_from_stmt>packaging _manylinux<import_from_stmt>packaging._manylinux _ELFFileHeader _get_elf_header _get_glibc_version _glibc_version_string _glibc_version_string_confstr _glibc_version_string_ctypes _is_compatible _is_linux_armhf _is_linux_i686 _parse_glibc_version <line_sep>@pytest.fixture(autouse=<true>)<def_stmt>clear_lru_cache <block_start><yield><line_sep>_get_glibc_version.cache_clear()<block_end>@pytest.fixture<def_stmt>manylinux_module monkeypatch<block_start>monkeypatch.setattr(_manylinux "_get_glibc_version" <lambda>*args:(2 20))<line_sep>module_name="_manylinux"<line_sep>module=types.ModuleType(module_name)<line_sep>monkeypatch.setitem(sys.modules module_name module)<line_sep><return>module<block_end>@pytest.mark.parametrize("tf" (<true> <false>))@pytest.mark.parametrize("attribute,glibc" (("1" (2 5)) ("2010" (2 12)) ("2014" (2 17))))<def_stmt>test_module_declaration monkeypatch manylinux_module attribute glibc tf<block_start>manylinux=f"manylinux{attribute}_compatible"<line_sep>monkeypatch.setattr(manylinux_module manylinux tf raising=<false>)<line_sep>res=_is_compatible(manylinux "x86_64" glibc)<assert_stmt>tf<is>res<block_end>@pytest.mark.parametrize("attribute,glibc" (("1" (2 5)) ("2010" (2 12)) ("2014" (2 17))))<def_stmt>test_module_declaration_missing_attribute monkeypatch manylinux_module attribute glibc<block_start>manylinux=f"manylinux{attribute}_compatible"<line_sep>monkeypatch.delattr(manylinux_module manylinux raising=<false>)<assert_stmt>_is_compatible(manylinux "x86_64" glibc)<block_end>@pytest.mark.parametrize("version,compatible" (((2 0) <true>) ((2 5) <true>) ((2 10) <false>)))<def_stmt>test_is_manylinux_compatible_glibc_support version compatible monkeypatch<block_start>monkeypatch.setitem(sys.modules "_manylinux" <none>)<line_sep>monkeypatch.setattr(_manylinux "_get_glibc_version" <lambda>:(2 5))<assert_stmt>bool(_is_compatible("manylinux1" "any" version))<eq>compatible<block_end>@pytest.mark.parametrize("version_str" ["glibc-2.4.5" "2"])<def_stmt>test_check_glibc_version_warning version_str<block_start><with_stmt>warnings.catch_warnings(record=<true>)<as>w<block_start>_parse_glibc_version(version_str)<assert_stmt>len(w)<eq>1<assert_stmt>issubclass(w[0].category RuntimeWarning)<block_end><block_end>@pytest.mark.skipif(<not>ctypes reason="requires ctypes")@pytest.mark.parametrize("version_str,expected" [# Be very explicit about bytes and Unicode for Python 2 testing.
(b"2.4" "2.4") ("2.4" "2.4") ] )<def_stmt>test_glibc_version_string version_str expected monkeypatch<block_start><class_stmt>LibcVersion<block_start><def_stmt>__init__ self version_str<block_start>self.version_str=version_str<block_end><def_stmt>__call__ self<block_start><return>version_str<block_end><block_end><class_stmt>ProcessNamespace<block_start><def_stmt>__init__ self libc_version<block_start>self.gnu_get_libc_version=libc_version<block_end><block_end>process_namespace=ProcessNamespace(LibcVersion(version_str))<line_sep>monkeypatch.setattr(ctypes "CDLL" <lambda>_:process_namespace)<line_sep>monkeypatch.setattr(_manylinux "_glibc_version_string_confstr" <lambda>:<false>)<assert_stmt>_glibc_version_string()<eq>expected<del_stmt>process_namespace.gnu_get_libc_version<assert_stmt>_glibc_version_string()<is><none><block_end><def_stmt>test_glibc_version_string_confstr monkeypatch<block_start>monkeypatch.setattr(os "confstr" <lambda>x:"glibc 2.20" raising=<false>)<assert_stmt>_glibc_version_string_confstr()<eq>"2.20"<block_end><def_stmt>test_glibc_version_string_fail monkeypatch<block_start>monkeypatch.setattr(os "confstr" <lambda>x:<none> raising=<false>)<line_sep>monkeypatch.setitem(sys.modules "ctypes" <none>)<assert_stmt>_glibc_version_string()<is><none><assert_stmt>_get_glibc_version()<eq>(-1 -1)<block_end>@pytest.mark.parametrize("failure" [pretend.raiser(ValueError) pretend.raiser(OSError) <lambda>x:"XXX"] )<def_stmt>test_glibc_version_string_confstr_fail monkeypatch failure<block_start>monkeypatch.setattr(os "confstr" failure raising=<false>)<assert_stmt>_glibc_version_string_confstr()<is><none><block_end><def_stmt>test_glibc_version_string_confstr_missing monkeypatch<block_start>monkeypatch.delattr(os "confstr" raising=<false>)<assert_stmt>_glibc_version_string_confstr()<is><none><block_end><def_stmt>test_glibc_version_string_ctypes_missing monkeypatch<block_start>monkeypatch.setitem(sys.modules "ctypes" <none>)<assert_stmt>_glibc_version_string_ctypes()<is><none><block_end><def_stmt>test_glibc_version_string_ctypes_raise_oserror monkeypatch<block_start><def_stmt>patched_cdll name<block_start><raise>OSError("Dynamic loading not supported")<block_end>monkeypatch.setattr(ctypes "CDLL" patched_cdll)<assert_stmt>_glibc_version_string_ctypes()<is><none><block_end>@pytest.mark.skipif(platform.system()<ne>"Linux" reason="requires Linux")<def_stmt>test_is_manylinux_compatible_old # Assuming no one is running this test with a version of glibc released in
# 1997.
<block_start><assert_stmt>_is_compatible("any" "any" (2 0))<block_end><def_stmt>test_is_manylinux_compatible monkeypatch<block_start>monkeypatch.setattr(_manylinux "_glibc_version_string" <lambda>:"2.4")<assert_stmt>_is_compatible("" "any" (2 4))<block_end><def_stmt>test_glibc_version_string_none monkeypatch<block_start>monkeypatch.setattr(_manylinux "_glibc_version_string" <lambda>:<none>)<assert_stmt><not>_is_compatible("any" "any" (2 4))<block_end><def_stmt>test_is_linux_armhf_not_elf monkeypatch<block_start>monkeypatch.setattr(_manylinux "_get_elf_header" <lambda>:<none>)<assert_stmt><not>_is_linux_armhf()<block_end><def_stmt>test_is_linux_i686_not_elf monkeypatch<block_start>monkeypatch.setattr(_manylinux "_get_elf_header" <lambda>:<none>)<assert_stmt><not>_is_linux_i686()<block_end>@pytest.mark.parametrize("machine, abi, elf_class, elf_data, elf_machine" [("x86_64" "x32" _ELFFileHeader.ELFCLASS32 _ELFFileHeader.ELFDATA2LSB _ELFFileHeader.EM_X86_64 ) ("x86_64" "i386" _ELFFileHeader.ELFCLASS32 _ELFFileHeader.ELFDATA2LSB _ELFFileHeader.EM_386 ) ("x86_64" "amd64" _ELFFileHeader.ELFCLASS64 _ELFFileHeader.ELFDATA2LSB _ELFFileHeader.EM_X86_64 ) ("armv7l" "armel" _ELFFileHeader.ELFCLASS32 _ELFFileHeader.ELFDATA2LSB _ELFFileHeader.EM_ARM ) ("armv7l" "armhf" _ELFFileHeader.ELFCLASS32 _ELFFileHeader.ELFDATA2LSB _ELFFileHeader.EM_ARM ) ("s390x" "s390x" _ELFFileHeader.ELFCLASS64 _ELFFileHeader.ELFDATA2MSB _ELFFileHeader.EM_S390 ) ] )<def_stmt>test_get_elf_header monkeypatch machine abi elf_class elf_data elf_machine<block_start>path=os.path.join(os.path.dirname(__file__) "manylinux" f"hello-world-{machine}-{abi}" )<line_sep>monkeypatch.setattr(sys "executable" path)<line_sep>elf_header=_get_elf_header()<assert_stmt>elf_header.e_ident_class<eq>elf_class<assert_stmt>elf_header.e_ident_data<eq>elf_data<assert_stmt>elf_header.e_machine<eq>elf_machine<block_end>@pytest.mark.parametrize("content" [<none> "invalid-magic" "invalid-class" "invalid-data" "too-short"])<def_stmt>test_get_elf_header_bad_executable monkeypatch content<block_start><if_stmt>content<block_start>path=os.path.join(os.path.dirname(__file__) "manylinux" f"hello-world-{content}" )<block_end><else_stmt><block_start>path=<none><block_end>monkeypatch.setattr(sys "executable" path)<assert_stmt>_get_elf_header()<is><none><block_end>
|
proc_name="not-fooey"<line_sep>
|
<import_from_stmt>PyObjCTools.TestSupport *<import_from_stmt>Quartz.QuartzCore *<try_stmt><block_start>long<block_end><except_stmt>NameError<block_start>long=int<block_end><try_stmt><block_start>unicode<block_end><except_stmt>NameError<block_start>unicode=str<block_end><class_stmt>TestCIImage(TestCase)<block_start><def_stmt>testConstants self<block_start>self.assertIsInstance(kCIFormatARGB8 (int long))<line_sep>self.assertIsInstance(kCIFormatRGBA16 (int long))<line_sep>self.assertIsInstance(kCIFormatRGBAf (int long))<line_sep>self.assertIsInstance(kCIImageColorSpace unicode)<block_end><def_stmt>testMethods self<block_start>self.assertArgIsBOOL(CIImage.imageWithTexture_size_flipped_colorSpace_ 2)<line_sep>self.assertArgIsBOOL(CIImage.initWithTexture_size_flipped_colorSpace_ 2)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
|
# -*- coding: UTF-8 -*-
"""
此脚本用于随机生成线性回归模型的训练数据
"""<import_stmt>os<import_stmt>matplotlib.pyplot<as>plt<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<def_stmt>generate_data <block_start>"""
随机生成数据
"""<line_sep># 规定随机数生成的种子
np.random.seed(4889)<line_sep># Python2和Python3的range并不兼容,所以使用list(range(10, 29))
x=np.array([10]+list(range(10 29)))<line_sep>error=np.round(np.random.randn(20) 2)<line_sep>y=x+error<line_sep><return>pd.DataFrame({"x":x "y":y})<block_end><def_stmt>visualize_data data<block_start>"""
数据可视化
"""<line_sep># 创建一个图形框,在里面只画一幅图
fig=plt.figure(figsize=(6 6) dpi=80)<line_sep>ax=fig.add_subplot(111)<line_sep># 设置坐标轴
ax.set_xlabel("$x$")<line_sep>ax.set_xticks(range(10 31 5))<line_sep>ax.set_ylabel("$y$")<line_sep>ax.set_yticks(range(10 31 5))<line_sep># 画点图,点的颜色为蓝色
ax.scatter(data.x data.y color="b" label="$y = x + \epsilon$")<line_sep>plt.legend(shadow=<true>)<line_sep># 展示上面所画的图片。图片将阻断程序的运行,直至所有的图片被关闭
# 在Python shell里面,可以设置参数"block=False",使阻断失效。
plt.show()<block_end><if_stmt>__name__<eq>"__main__"<block_start>data=generate_data()<line_sep>home_path=os.path.dirname(os.path.abspath(__file__))<line_sep># 存储数据,Windows下的存储路径与Linux并不相同
<if_stmt>os.name<eq>"nt"<block_start>data.to_csv("%s\\simple_example.csv"%home_path index=<false>)<block_end><else_stmt><block_start>data.to_csv("%s/simple_example.csv"%home_path index=<false>)<block_end>visualize_data(data)<block_end>
|
<import_stmt>FWCore.ParameterSet.Config<as>cms<import_stmt>os<line_sep>simMuonQualityAdjusterDigis=cms.EDProducer('L1TMuonQualityAdjuster' bmtfInput=cms.InputTag("simBmtfDigis" "BMTF") omtfInput=cms.InputTag("simOmtfDigis" "OMTF") emtfInput=cms.InputTag("simEmtfDigis" "EMTF") bmtfBxOffset=cms.int32(0) )<line_sep>
|
<import_from_stmt>datetime datetime<import_stmt>time<import_from_stmt>pathlib Path<import_stmt>json<import_from_stmt>api.remarkable_client RemarkableClient<import_stmt>utils.config<line_sep>#
# DEFINITIONS
#
STATE_SYNCING=1<line_sep>STATE_SYNCED=2<line_sep>STATE_DELETED=170591<line_sep>RFC3339Nano="%Y-%m-%dT%H:%M:%SZ"<line_sep>#
# HELPER
#
<def_stmt>get_path id<block_start><return>"%s/%s"%(utils.config.PATH id)<block_end><def_stmt>get_path_remapy id<block_start><return>"%s/.remapy"%get_path(id)<block_end><def_stmt>get_path_metadata_local id<block_start><return>"%s/metadata.local"%get_path_remapy(id)<block_end><def_stmt>now_rfc3339 <block_start><return>datetime.utcnow().strftime(RFC3339Nano)<block_end>#
# CLASS
#
<class_stmt>Item(object)#
# CTOR
#
<block_start><def_stmt>__init__ self metadata parent=<none><block_start>self.metadata=metadata<line_sep>self._parent=parent<line_sep>self._children=[]<line_sep>self.path=get_path(self.id())<line_sep>self.path_remapy=get_path_remapy(self.id())<line_sep>self.path_metadata_local=get_path_metadata_local(self.id())<line_sep>self.rm_client=RemarkableClient()<line_sep>self.state_listener=[]<block_end>#
# Getter and setter
#
<def_stmt>is_trash self<block_start><return>self.id()<eq>"trash"<block_end><def_stmt>is_root self<block_start><return>self.metadata<is><none><block_end><def_stmt>id self<block_start><return>self._meta_value("ID")<block_end><def_stmt>name self<block_start><return>self._meta_value("VissibleName")<block_end><def_stmt>version self<block_start><return>self._meta_value("Version" -1)<block_end><def_stmt>bookmarked self<block_start><return>self._meta_value("Bookmarked" <false>)<block_end><def_stmt>is_document self<block_start><return>self._meta_value("Type" "CollectionType")<eq>"DocumentType"<block_end><def_stmt>is_collection self<block_start><return>self._meta_value("Type" "CollectionType")<ne>"DocumentType"<block_end><def_stmt>modified_time self<block_start>modified=self.metadata["ModifiedClient"]<if_stmt>modified<eq><none><block_start><return><none><block_end><try_stmt><block_start>utc=datetime.strptime(modified "%Y-%m-%dT%H:%M:%S.%fZ")<block_end><except_stmt><block_start>utc=datetime.strptime(modified "%Y-%m-%dT%H:%M:%SZ")<block_end><try_stmt><block_start>epoch=time.mktime(utc.timetuple())<line_sep>offset=datetime.fromtimestamp(epoch)-datetime.utcfromtimestamp(epoch)<block_end><except_stmt><block_start>print("(Warning) Failed to parse datetime for item %s"%self.id())<line_sep><return>datetime(1970 1 1 0 0 0)<block_end><return>utc+offset<block_end><def_stmt>parent self<block_start><return>self._parent<block_end><def_stmt>children self<block_start><return>self._children<block_end><def_stmt>_meta_value self key root_value=""<block_start><if_stmt>self.is_root()<block_start><return>root_value<block_end><return>self.metadata[key]<block_end>#
# Functions
#
<def_stmt>set_bookmarked self bookmarked<block_start><if_stmt>self.is_trash()<or>self.is_root()<block_start><return><block_end>self.metadata["Bookmarked"]=bookmarked<line_sep>self.metadata["ModifiedClient"]=now_rfc3339()<line_sep>self.metadata["Version"]<augadd>1<line_sep>self.rm_client.update_metadata(self.metadata)<line_sep>self._write_remapy_file()<line_sep>self._update_state_listener()<block_end><def_stmt>rename self new_name<block_start><if_stmt>self.is_trash()<or>self.is_root()<block_start><return><block_end>self.metadata["VissibleName"]=new_name<line_sep>self.metadata["ModifiedClient"]=now_rfc3339()<line_sep>self.metadata["Version"]<augadd>1<line_sep>self.rm_client.update_metadata(self.metadata)<line_sep>self._write_remapy_file()<line_sep>self._update_state_listener()<block_end><def_stmt>move self new_parent<block_start><if_stmt>self.is_trash()<or>self.is_root()<block_start><return><block_end>self._parent=new_parent<line_sep>self.metadata["Parent"]=new_parent.id()<line_sep>self.metadata["ModifiedClient"]=now_rfc3339()<line_sep>self.metadata["Version"]<augadd>1<line_sep>self.rm_client.update_metadata(self.metadata)<line_sep>self._write_remapy_file()<line_sep>self._update_state_listener()<block_end><def_stmt>add_state_listener self listener<block_start>self.state_listener.append(listener)<block_end><def_stmt>_update_state_listener self<block_start><for_stmt>listener self.state_listener<block_start>listener(self)<block_end><block_end><def_stmt>_write_remapy_file self<block_start><if_stmt>self.is_root()<block_start><return><block_end>Path(self.path_remapy).mkdir(parents=<true> exist_ok=<true>)<with_stmt>open(self.path_metadata_local "w")<as>out<block_start>out.write(json.dumps(self.metadata indent=4))<block_end><block_end><block_end>
|
<import_stmt>math<import_stmt>numpy<as>np<import_stmt>torch<import_from_stmt>collections defaultdict<import_from_stmt>hover.utils.torch_helper cross_entropy_with_probs<def_stmt>loss_coteaching_directed y_student y_teacher target forget_rate<block_start>"""
Subroutine for loss_coteaching_graph.
"""<line_sep>num_remember=math.ceil((1-forget_rate)<times>target.size(0))<assert_stmt>(num_remember<g>0) f"Expected at least one remembered target, got {num_remember}"<line_sep>loss_teacher_detail=cross_entropy_with_probs(y_teacher target reduction="none")<line_sep>idx_to_learn=np.argsort(loss_teacher_detail.data)[:num_remember]<line_sep>loss_student=cross_entropy_with_probs(y_student[idx_to_learn] target[idx_to_learn] reduction="mean").unsqueeze(0)<line_sep><return>loss_student<block_end><def_stmt>prediction_disagreement pred_list reduce=<false><block_start>"""
Compute disagreements between predictions.
"""<line_sep>disagreement=defaultdict(dict)<for_stmt>i range(0 len(pred_list))<block_start><for_stmt>j range(i len(pred_list))<block_start>_disagreed=np.not_equal(pred_list[i] pred_list[j])<if_stmt>reduce<block_start>_disagreed=np.mean(_disagreed)<block_end>disagreement[i][j]=_disagreed<line_sep>disagreement[j][i]=_disagreed<block_end><block_end><return>dict(disagreement)<block_end><def_stmt>loss_coteaching_graph y_list target tail_head_adjacency_list forget_rate<block_start>"""
Co-teaching from differences.
Generalized to graph representation where each vertex is a classifier and each edge is a source to check differences with and to learn from.
y_list: list of logits from different classifiers.
target: target, which is allowed to be probabilistic.
tail_head_adjacency_list: the 'tail' classifier learns from the 'head'.
forget_rate: the proportion of high-loss contributions to discard.
"""<line_sep># initialize co-teaching losses
loss_list=[]<for_stmt>i range(0 len(y_list))<block_start><assert_stmt>tail_head_adjacency_list[i] f"Expected at least one teacher for {i}"<line_sep>_losses=[]<for_stmt>j tail_head_adjacency_list[i]# fetch yi as student(tail), yj as teacher(head)
<block_start>_yi,_yj=y_list[i] y_list[j]<line_sep>_tar=target<line_sep># add loss contribution to list
_contribution=loss_coteaching_directed(_yi _yj _tar forget_rate)<line_sep>_losses.append(_contribution)<block_end># concatenate and average up
_loss=torch.mean(torch.cat(_losses))<line_sep>loss_list.append(_loss)<block_end><return>loss_list<block_end><def_stmt>identity_adjacency info_dict<block_start>"""
Each node points to itself.
"""<line_sep>refs=[]<line_sep>acc_list=info_dict["accuracy"]<for_stmt>i range(0 len(acc_list))<block_start>refs.append([i])<block_end><return>refs<block_end><def_stmt>cyclic_adjacency info_dict acc_bar=0.5<block_start>"""
Nodes form a cycle.
Triggers if accuracies are high enough.
"""<line_sep>refs=[]<line_sep>acc_list=info_dict["accuracy"]<for_stmt>i range(0 len(acc_list))<block_start>candidate=(i+1)%(len(acc_list))<if_stmt>acc_list[i]<g>acc_bar<and>acc_list[candidate]<g>acc_bar<block_start>refs.append([candidate])<block_end><else_stmt><block_start>refs.append([i])<block_end><block_end><return>refs<block_end><def_stmt>cyclic_except_last info_dict acc_bar=0.5<block_start>"""
Cyclic except the last member.
Triggers if accuracies are high enough.
"""<line_sep>refs=[]<line_sep>acc_list=info_dict["accuracy"]<for_stmt>i range(0 len(acc_list)-1)<block_start>candidate=(i+1)%(len(acc_list)-1)<if_stmt>acc_list[i]<g>acc_bar<and>acc_list[candidate]<g>acc_bar<block_start>refs.append([candidate])<block_end><else_stmt><block_start>refs.append([i])<block_end><block_end>refs.append([len(acc_list)-1])<line_sep><return>refs<block_end><def_stmt>accuracy_priority info_dict acc_bar=0.5<block_start>"""
Every node points at the most accurate member that is not itself.
Triggers if accuracies are high enough.
"""<line_sep>refs=[]<line_sep>acc_list=info_dict["accuracy"]<for_stmt>i range(0 len(acc_list))<block_start>top_candidates=sorted(range(len(acc_list)) key=<lambda>j:acc_list[j] reverse=<true>)<line_sep>candidate=top_candidates[0]<if>top_candidates[0]<ne>i<else>top_candidates[1]<if_stmt>acc_list[i]<g>acc_bar<and>acc_list[candidate]<g>acc_bar<block_start>refs.append([candidate])<block_end><else_stmt><block_start>refs.append([i])<block_end><block_end><return>refs<block_end><def_stmt>disagreement_priority info_dict acc_bar=0.5<block_start>"""
Everyone node points at the most different member that is not itself.
Triggers if accuracies are high enough.
"""<line_sep>refs=[]<line_sep>acc_list=info_dict["accuracy"]<line_sep>disagree_dict=info_dict["disagreement_rate"]<for_stmt>i range(0 len(acc_list))<block_start>top_candidates=sorted(disagree_dict[i].keys() key=<lambda>j:disagree_dict[i][j] reverse=<true>)<line_sep>candidate=top_candidates[0]<if>top_candidates[0]<ne>i<else>top_candidates[1]<if_stmt>acc_list[i]<g>acc_bar<and>acc_list[candidate]<g>acc_bar<block_start>refs.append([candidate])<block_end><else_stmt><block_start>refs.append([i])<block_end><block_end><return>refs<block_end>
|
<import_from_stmt>pynitro NitroFrame<import_from_stmt>diesel.protocols.nitro DieselNitroSocket<import_from_stmt>diesel quickstart quickstop<line_sep>#loc = "tcp://127.0.0.1:4444"
loc="inproc://foobar"<def_stmt>server <block_start><with_stmt>DieselNitroSocket(bind=loc)<as>sock<block_start><while_stmt><true><block_start>m=sock.recv()<line_sep>sock.send(NitroFrame("you said: "+m.data))<block_end><block_end><block_end><def_stmt>client <block_start><with_stmt>DieselNitroSocket(connect=loc)<as>sock<block_start><for_stmt>x xrange(100000)<block_start>sock.send(NitroFrame("Hello, dude!"))<line_sep>m=sock.recv()<assert_stmt>m.data<eq>"you said: Hello, dude!"<block_end>quickstop()<block_end><block_end>quickstart(server client)<line_sep>
|
# Here's a challenge for you to help you practice
# See if you can fix the code below
# print the message
# There was a single quote inside the string!
# Use double quotes to enclose the string
print("Why won't this line of code print")<line_sep># print the message
# There was a mistake in the function name
print('This line fails too!')<line_sep># print the message
# Need to add the () around the string
print("I think I know how to fix this one")<line_sep># print the name entered by the user
# You need to store the value returned by the input statement
# in a variable
name=input('Please tell me your name: ')<line_sep>print(name)<line_sep>
|
#
# Copyright 2019 <NAME> <<EMAIL>>
#
# This file is part of Salus
# (see https://github.com/SymbioticLab/Salus).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
<import_from_future_stmt> absolute_import print_function division<import_stmt>sqlite3<as>sql<import_stmt>struct<import_stmt>pandas<as>pd<import_from_stmt>datetime datetime<import_from_stmt>tqdm tqdm<line_sep>metric_name={19923062:'achieved_occupancy' 19923058:'executed_ipc'}<line_sep>event_name={83886184:'active_warps' 83886182:'active_cycles' 83886183:'elapsed_cycles_sm' }<def_stmt>parseMetricValue binstr<block_start><return>struct.unpack('d' binstr)[0]<block_end><def_stmt>parseEventValue s<block_start><return>int(s)<block_end><def_stmt>parseNanoTime nanotime<block_start><return>datetime.utcfromtimestamp(nanotime/1e9)<block_end><class_stmt>Metric(object)<block_start><def_stmt>__init__ self row<block_start>super(Metric self).__init__()<block_end><block_end><class_stmt>Kernel(object)<block_start><def_stmt>__init__ self row refpoint<block_start>super(Kernel self).__init__()<line_sep>self.id=row['correlationId']<line_sep>self.start=row['start']-refpoint<line_sep>self.end=row['end']-refpoint<line_sep>self.duration=self.end-self.start<line_sep>self.name=row['name']<block_end><block_end><class_stmt>NvvpReader(object)<block_start><def_stmt>__init__ self filepath progress=<false><block_start>super(NvvpReader self).__init__()<line_sep>self.filepath=filepath<line_sep>self.dbLoaded=<false><line_sep>self.loadDB(progress)<block_end><def_stmt>loadDB self progress<block_start><if_stmt>self.dbLoaded<block_start><return><block_end>self.dbLoaded=<true><line_sep>self.conn=sql.connect(self.filepath)<line_sep>self.conn.row_factory=sql.Row<line_sep>prog_wrapper=tqdm<if>progress<else><lambda>x *args **kwargs:x<line_sep>cursor=self.conn.cursor()<line_sep># get timeline reference point (start time of the first overhead event is 0ns)
cursor.execute("""select start from CUPTI_ACTIVITY_KIND_OVERHEAD order by start""")<line_sep>(self.refpoint )=cursor.fetchone()<line_sep>self.refpoint=parseNanoTime(self.refpoint)<line_sep># get all kernels
total_amount=0<if_stmt>progress<block_start>cursor.execute('select count(*) from CUPTI_ACTIVITY_KIND_KERNEL')<line_sep>(total_amount )=cursor.fetchone()<block_end>cursor.execute("""select strings.value as strname, kernel.*
from CUPTI_ACTIVITY_KIND_KERNEL as kernel, StringTable as strings
where kernel.name = strings._id_""")<line_sep># create dataset
data=[]<line_sep>cursor2=self.conn.cursor()<for_stmt>row prog_wrapper(cursor total=total_amount)<block_start>correlationId=row['correlationId']<line_sep>kernel={'id':correlationId 'start':parseNanoTime(row['start']) 'end':parseNanoTime(row['end']) 'duration':row['end']-row['start'] 'name':row['strname'] }<line_sep># fetch all instances metric on this kernel
<for_stmt>ins,val,metric_id cursor2.execute("""select instance, value, id
from CUPTI_ACTIVITY_KIND_METRIC_INSTANCE
where correlationId=?""" [correlationId])<block_start>val=parseMetricValue(val)<line_sep>observation={'metric':metric_name[metric_id] 'sm':ins 'metric_val':val}<line_sep>observation.update(kernel)<line_sep>data.append(observation)<block_end># fetch all aggregated metric
<for_stmt>val,metric_id cursor2.execute("""select value, id
from CUPTI_ACTIVITY_KIND_METRIC
where correlationId=?""" [correlationId])<block_start>val=parseMetricValue(val)<line_sep>observation={'metric':metric_name[metric_id] 'sm':-1 'metric_val':val}<line_sep>observation.update(kernel)<line_sep>data.append(observation)<block_end># fetch all instances events on this kernel
<for_stmt>ins,val,event_id cursor2.execute("""select instance, value, id
from CUPTI_ACTIVITY_KIND_EVENT_INSTANCE
where correlationId=?""" [correlationId])<block_start>val=parseEventValue(val)<line_sep>observation={'event':event_name[event_id] 'sm':ins 'event_val':val}<line_sep>observation.update(kernel)<line_sep>data.append(observation)<block_end># fetch all aggregated events on this kernel
<for_stmt>val,event_id cursor2.execute("""select value, id
from CUPTI_ACTIVITY_KIND_EVENT
where correlationId=?""" [correlationId])<block_start>val=parseEventValue(val)<line_sep>observation={'event':event_name[event_id] 'sm':-1 'event_val':val}<line_sep>observation.update(kernel)<line_sep>data.append(observation)<block_end><block_end>self.kernels=pd.DataFrame(data)<block_end><block_end>
|
<import_from_future_stmt> print_function<import_stmt>unittest<import_stmt>numpy<as>np<import_from_stmt>discretize TensorMesh<import_from_stmt>SimPEG maps data_misfit regularization inversion optimization inverse_problem tests <import_from_stmt>SimPEG.utils mkvc<import_from_stmt>SimPEG.electromagnetics resistivity<as>dc<line_sep>np.random.seed(40)<line_sep>TOL=1e-5<line_sep>FLR=1e-20# "zero", so if residual below this --> pass regardless of order
<class_stmt>DC1DSimulation(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>ntx=31<line_sep>xtemp_txP=np.logspace(1 3 ntx)<line_sep>xtemp_txN=-xtemp_txP<line_sep>ytemp_tx=np.zeros(ntx)<line_sep>xtemp_rxP=-5<line_sep>xtemp_rxN=5<line_sep>ytemp_rx=0.0<line_sep>abhalf=abs(xtemp_txP-xtemp_txN)<times>0.5<line_sep>a=xtemp_rxN-xtemp_rxP<line_sep>b=((xtemp_txN-xtemp_txP)-a)<times>0.5<line_sep># We generate tx and rx lists:
srclist=[]<for_stmt>i range(ntx)<block_start>rx=dc.receivers.Dipole(np.r_[xtemp_rxP ytemp_rx -12.5] np.r_[xtemp_rxN ytemp_rx -12.5])<line_sep>locA=np.r_[xtemp_txP[i] ytemp_tx[i] -12.5]<line_sep>locB=np.r_[xtemp_txN[i] ytemp_tx[i] -12.5]<line_sep>src=dc.sources.Dipole([rx] locA locB)<line_sep>srclist.append(src)<block_end>survey=dc.survey.Survey(srclist)<line_sep>rho=np.r_[10 10 10]<line_sep>dummy_hz=100.0<line_sep>hz=np.r_[10 10 dummy_hz]<line_sep>mesh=TensorMesh([hz])<line_sep>simulation=dc.simulation_1d.Simulation1DLayers(survey=survey rhoMap=maps.ExpMap(mesh) thicknesses=hz[:-1] data_type="apparent_resistivity" )<line_sep>simulation.dpred(np.log(rho))<line_sep>mSynth=np.log(rho)<line_sep>dobs=simulation.make_synthetic_data(mSynth add_noise=<true>)<line_sep># Now set up the problem to do some minimization
dmis=data_misfit.L2DataMisfit(simulation=simulation data=dobs)<line_sep>reg=regularization.Tikhonov(mesh)<line_sep>opt=optimization.InexactGaussNewton(maxIterLS=20 maxIter=10 tolF=1e-6 tolX=1e-6 tolG=1e-6 maxIterCG=6)<line_sep>invProb=inverse_problem.BaseInvProblem(dmis reg opt beta=0.0)<line_sep>inv=inversion.BaseInversion(invProb)<line_sep>self.inv=inv<line_sep>self.reg=reg<line_sep>self.p=simulation<line_sep>self.mesh=mesh<line_sep>self.m0=mSynth<line_sep>self.survey=survey<line_sep>self.dmis=dmis<line_sep>self.dobs=dobs<block_end><def_stmt>test_misfit self<block_start>passed=tests.checkDerivative(<lambda>m:[self.p.dpred(m) <lambda>mx:self.p.Jvec(self.m0 mx)] self.m0 plotIt=<false> num=3 )<line_sep>self.assertTrue(passed)<block_end><def_stmt>test_adjoint self# Adjoint Test
# u = np.random.rand(self.mesh.nC*self.survey.nSrc)
<block_start>v=np.random.rand(self.mesh.nC)<line_sep>w=np.random.rand(mkvc(self.dobs).shape[0])<line_sep>wtJv=w.dot(self.p.Jvec(self.m0 v))<line_sep>vtJtw=v.dot(self.p.Jtvec(self.m0 w))<line_sep>passed=np.abs(wtJv-vtJtw)<l>1e-8<line_sep>print("Adjoint Test" np.abs(wtJv-vtJtw) passed)<line_sep>self.assertTrue(passed)<block_end><def_stmt>test_dataObj self<block_start>passed=tests.checkDerivative(<lambda>m:[self.dmis(m) self.dmis.deriv(m)] self.m0 plotIt=<false> num=3)<line_sep>self.assertTrue(passed)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end>
|
<class_stmt>FunctionMetadata<block_start><def_stmt>__init__ self<block_start>self._constantReturnValue=()<block_end><def_stmt>setConstantReturnValue self value<block_start>self._constantReturnValue=(value )<block_end><def_stmt>hasConstantReturnValue self<block_start><return>self._constantReturnValue<block_end><def_stmt>getConstantReturnValue self<block_start><return>self._constantReturnValue[0]<if>self._constantReturnValue<else><none><block_end><block_end>
|
<import_from_stmt>uq360.utils.transformers.feature_transformer FeatureTransformer<class_stmt>OriginalFeaturesTransformer(FeatureTransformer)<block_start>'''
Dummy/identity transformer which passes the data array through unchanged.
'''<def_stmt>__init__ self<block_start>super(OriginalFeaturesTransformer self).__init__()<block_end>@classmethod<def_stmt>name cls<block_start><return>('original_features')<block_end><def_stmt>transform self x predictions<block_start><return>x<block_end><def_stmt>save self output_dir=<none><block_start><pass><block_end><def_stmt>load self input_dir=<none><block_start><pass><block_end><block_end>
|
array=['a' 'b' 'c']<def_stmt>decorator func<block_start><def_stmt>newValueOf pos<block_start><if_stmt>pos<ge>len(array)<block_start>print("Oops! Array index is out of range")<line_sep><return><block_end>func(pos)<block_end><return>newValueOf<block_end>@decorator<def_stmt>valueOf index<block_start>print(array[index])<block_end>valueOf(10)<line_sep>
|
"""Tails server related errors."""<import_from_stmt>..core.error BaseError<class_stmt>TailsServerNotConfiguredError(BaseError)<block_start>"""Error indicating the tails server plugin hasn't been configured."""<block_end>
|
"""
The Sims 4 Community Library is licensed under the Creative Commons Attribution 4.0 International public license (CC BY 4.0).
https://creativecommons.org/licenses/by/4.0/
https://creativecommons.org/licenses/by/4.0/legalcode
Copyright (c) COLONOLNUTTY
"""<import_from_stmt>typing Tuple Dict Callable Iterator<import_stmt>sims4.collections<import_from_stmt>rewards.reward Reward<import_from_stmt>sims4.resources Types<import_from_stmt>sims4communitylib.utils.common_resource_utils CommonResourceUtils<import_from_stmt>sims4communitylib.utils.whims.common_satisfaction_reward_store_item CommonSatisfactionRewardStoreItem<import_from_stmt>whims.whims_tracker WhimsTracker<class_stmt>CommonSatisfactionRewardStoreUtils<block_start>"""Utilities for manipulating the Satisfaction Rewards Store.
"""<line_sep>@staticmethod<def_stmt>add_reward_trait_to_rewards_store reward_trait_definition_id:int reward_point_cost:int<arrow>bool<block_start>"""add_reward_trait_to_rewards_store(reward_trait_definition_id, reward_point_cost)
Add a Reward Trait to the Satisfaction Rewards Store.
:param reward_trait_definition_id: The decimal identifier of a Reward Trait.
:type reward_trait_definition_id: int
:param reward_point_cost: The amount of Satisfaction Reward Points the Reward Trait will cost the Sim to receive.
:type reward_point_cost: int
:return: True, if the Trait was added to the Rewards Store successfully. False, if not.
:rtype: bool
"""<line_sep><return>CommonSatisfactionRewardStoreUtils._add_reward_to_rewards_store(reward_trait_definition_id reward_point_cost WhimsTracker.WhimAwardTypes.TRAIT)<block_end>@staticmethod<def_stmt>add_reward_buff_to_rewards_store reward_buff_definition_id:int reward_point_cost:int<arrow>bool<block_start>"""add_reward_buff_to_rewards_store(reward_buff_definition_id, reward_point_cost)
Add a Reward Buff to the Satisfaction Rewards Store.
:param reward_buff_definition_id: The decimal identifier of a Reward Buff.
:type reward_buff_definition_id: int
:param reward_point_cost: The amount of Satisfaction Reward Points the Reward Buff will cost the Sim to receive.
:type reward_point_cost: int
:return: True, if the Reward Buff was added to the Rewards Store successfully. False, if not.
:rtype: bool
"""<line_sep><return>CommonSatisfactionRewardStoreUtils._add_reward_to_rewards_store(reward_buff_definition_id reward_point_cost WhimsTracker.WhimAwardTypes.BUFF)<block_end>@staticmethod<def_stmt>add_reward_object_to_rewards_store reward_object_definition_id:int reward_point_cost:int<arrow>bool<block_start>"""add_reward_object_to_rewards_store(reward_object_definition_id, reward_point_cost)
Add a Reward Object to the Satisfaction Rewards Store.
:param reward_object_definition_id: The decimal identifier of a Reward Object.
:type reward_object_definition_id: int
:param reward_point_cost: The amount of Satisfaction Reward Points the Reward Object will cost the Sim to receive.
:type reward_point_cost: int
:return: True, if the Reward Object was added to the Rewards Store successfully. False, if not.
:rtype: bool
"""<line_sep><return>CommonSatisfactionRewardStoreUtils._add_reward_to_rewards_store(reward_object_definition_id reward_point_cost WhimsTracker.WhimAwardTypes.OBJECT)<block_end>@staticmethod<def_stmt>add_reward_cas_part_to_rewards_store reward_cas_part_definition_id:int reward_point_cost:int<arrow>bool<block_start>"""add_reward_cas_part_to_rewards_store(reward_cas_part_definition_id, reward_point_cost)
Add a Reward CAS Part to the Satisfaction Rewards Store.
:param reward_cas_part_definition_id: The decimal identifier of a Reward CAS Part.
:type reward_cas_part_definition_id: int
:param reward_point_cost: The amount of Satisfaction Reward Points the Reward CAS Part will cost the Sim to receive.
:type reward_point_cost: int
:return: True, if the Reward CAS Part was added to the Rewards Store successfully. False, if not.
:rtype: bool
"""<line_sep><return>CommonSatisfactionRewardStoreUtils._add_reward_to_rewards_store(reward_cas_part_definition_id reward_point_cost WhimsTracker.WhimAwardTypes.CASPART)<block_end>@staticmethod<def_stmt>remove_reward_from_rewards_store reward_item_definition_id:int<arrow>bool<block_start>"""remove_reward_from_rewards_store(reward_item_definition_id)
Remove a Reward Item from the Satisfaction Rewards Store.
:param reward_item_definition_id: The decimal identifier of a Reward Item.
:type reward_item_definition_id: int
:return: True, if the Reward Item was removed from the Rewards Store successfully. False, if not.
:rtype: bool
"""<line_sep><return>CommonSatisfactionRewardStoreUtils._remove_reward_from_rewards_store(reward_item_definition_id)<block_end>@staticmethod<def_stmt>get_all_satisfaction_reward_store_items_generator include_satisfaction_reward_callback:Callable[[CommonSatisfactionRewardStoreItem] bool]=<none><arrow>Iterator[CommonSatisfactionRewardStoreItem]<block_start>"""get_all_satisfaction_reward_store_items_generator(include_satisfaction_reward_callback=None)
Retrieve all Satisfaction Rewards in the Satisfaction Rewards Store.
:param include_satisfaction_reward_callback: If the result of this callback is True, the Satisfaction Reward\
and Satisfaction Reward Data (Cost, Award Type), will be included in the results. If set to None, All Satisfaction Rewards will be included.
:type include_satisfaction_reward_callback: Callable[[CommonRewardStoreItem]], bool], optional
:return: All items from the satisfaction reward store.
:rtype: Iterator[CommonSatisfactionRewardStoreItem]
"""<line_sep>satisfaction_reward_store_items:Dict[Reward Tuple[int WhimsTracker.WhimAwardTypes]]=dict(WhimsTracker.SATISFACTION_STORE_ITEMS)<for_stmt>(reward data) satisfaction_reward_store_items<block_start>reward_cost=data[0]<line_sep>reward_type=data[1]<line_sep>reward_store_item=CommonSatisfactionRewardStoreItem(reward reward_cost reward_type)<if_stmt>include_satisfaction_reward_callback<is><not><none><and><not>include_satisfaction_reward_callback(reward_store_item)<block_start><continue><block_end><yield>reward_store_item<block_end><block_end>@staticmethod<def_stmt>_add_reward_to_rewards_store reward_definition_id:int reward_point_cost:int reward_type:WhimsTracker.WhimAwardTypes<arrow>bool<block_start>sim_reward_instance=CommonSatisfactionRewardStoreUtils._load_reward_instance(reward_definition_id)<if_stmt>sim_reward_instance<is><none><block_start><return><false><block_end>sim_reward_data_immutable_slots_cls=sims4.collections.make_immutable_slots_class(['cost' 'award_type'])<line_sep>reward_data=sim_reward_data_immutable_slots_cls(dict(cost=reward_point_cost award_type=reward_type))<line_sep>store_items=dict(WhimsTracker.SATISFACTION_STORE_ITEMS)<line_sep>store_items[sim_reward_instance]=reward_data<line_sep>WhimsTracker.SATISFACTION_STORE_ITEMS=sims4.collections.FrozenAttributeDict(store_items)<line_sep><return><true><block_end>@staticmethod<def_stmt>_remove_reward_from_rewards_store reward_definition_id:int<arrow>bool<block_start>sim_reward_instance=CommonSatisfactionRewardStoreUtils._load_reward_instance(reward_definition_id)<if_stmt>sim_reward_instance<is><none><block_start><return><false><block_end>store_items=dict(WhimsTracker.SATISFACTION_STORE_ITEMS)<if_stmt>sim_reward_instance<in>store_items<block_start><del_stmt>store_items[sim_reward_instance]<line_sep>WhimsTracker.SATISFACTION_STORE_ITEMS=sims4.collections.FrozenAttributeDict(store_items)<block_end><return><true><block_end>@staticmethod<def_stmt>_load_reward_instance reward_definition_id:int<arrow>Reward<block_start><return>CommonResourceUtils.load_instance(Types.REWARD reward_definition_id)<block_end><block_end>
|
<import_from_stmt>vpp_object VppObject<class_stmt>VppDHCPProxy(VppObject)<block_start><def_stmt>__init__ self test dhcp_server dhcp_src_address rx_vrf_id=0 server_vrf_id=0 <block_start>self._test=test<line_sep>self._rx_vrf_id=rx_vrf_id<line_sep>self._server_vrf_id=server_vrf_id<line_sep>self._dhcp_server=dhcp_server<line_sep>self._dhcp_src_address=dhcp_src_address<block_end><def_stmt>set_proxy self dhcp_server dhcp_src_address rx_vrf_id=0 server_vrf_id=0<block_start><if_stmt>self.query_vpp_config()<block_start><raise>Exception('Vpp config present')<block_end>self._rx_vrf_id=rx_vrf_id<line_sep>self._server_vrf_id=server_vrf_id<line_sep>self._dhcp_server=dhcp_server<line_sep>self._dhcp_src_address=dhcp_src_address<block_end><def_stmt>add_vpp_config self<block_start>self._test.vapi.dhcp_proxy_config(is_add=1 rx_vrf_id=self._rx_vrf_id server_vrf_id=self._server_vrf_id dhcp_server=self._dhcp_server dhcp_src_address=self._dhcp_src_address)<line_sep>self._test.registry.register(self self._test.logger)<block_end><def_stmt>remove_vpp_config self<block_start>self._test.vapi.dhcp_proxy_config(rx_vrf_id=self._rx_vrf_id server_vrf_id=self._server_vrf_id dhcp_server=self._dhcp_server dhcp_src_address=self._dhcp_src_address is_add=0)<block_end><def_stmt>get_vpp_dump self<block_start>dump=self._test.vapi.dhcp_proxy_dump()<for_stmt>entry dump<block_start><if_stmt>entry.rx_vrf_id<eq>self._rx_vrf_id<block_start><return>entry<block_end><block_end><block_end><def_stmt>query_vpp_config self<block_start>dump=self.get_vpp_dump()<line_sep><return><true><if>dump<else><false><block_end><def_stmt>object_id self<block_start><return>"dhcp-proxy-%d"%self._rx_vrf_id<block_end><block_end><class_stmt>VppDHCPClient(VppObject)<block_start><def_stmt>__init__ self test sw_if_index hostname id=<none> want_dhcp_event=<false> set_broadcast_flag=<true> dscp=<none> pid=<none><block_start>self._test=test<line_sep>self._sw_if_index=sw_if_index<line_sep>self._hostname=hostname<line_sep>self._id=id<line_sep>self._want_dhcp_event=want_dhcp_event<line_sep>self._set_broadcast_flag=set_broadcast_flag<line_sep>self._dscp=dscp<line_sep>self._pid=pid<block_end><def_stmt>set_client self sw_if_index hostname id=<none> want_dhcp_event=<false> set_broadcast_flag=<true> dscp=<none> pid=<none><block_start><if_stmt>self.query_vpp_config()<block_start><raise>Exception('Vpp config present')<block_end>self._sw_if_index=sw_if_index<line_sep>self._hostname=hostname<line_sep>self._id=id<line_sep>self._want_dhcp_event=want_dhcp_event<line_sep>self._set_broadcast_flag=set_broadcast_flag<line_sep>self._dscp=dscp<line_sep>self._pid=pid<block_end><def_stmt>add_vpp_config self<block_start>id=self._id.encode('ascii')<if>self._id<else><none><line_sep>client={'sw_if_index':self._sw_if_index 'hostname':self._hostname 'id':id 'want_dhcp_event':self._want_dhcp_event 'set_broadcast_flag':self._set_broadcast_flag 'dscp':self._dscp 'pid':self._pid}<line_sep>self._test.vapi.dhcp_client_config(is_add=1 client=client)<line_sep>self._test.registry.register(self self._test.logger)<block_end><def_stmt>remove_vpp_config self<block_start>client=client={'sw_if_index':self._sw_if_index 'hostname':self._hostname}<line_sep>self._test.vapi.dhcp_client_config(client=client is_add=0)<block_end><def_stmt>get_vpp_dump self<block_start>dump=self._test.vapi.dhcp_client_dump()<for_stmt>entry dump<block_start><if_stmt>entry.client.sw_if_index<eq>self._sw_if_index<block_start><return>entry<block_end><block_end><block_end><def_stmt>query_vpp_config self<block_start>dump=self.get_vpp_dump()<line_sep><return><true><if>dump<else><false><block_end><def_stmt>object_id self<block_start><return>"dhcp-client-%s/%d"%(self._hostname self._sw_if_index)<block_end><block_end>
|
#####################################################
# Copyright (c) <NAME> [GitHub D-X-Y], 2021.05 #
#####################################################
<import_from_stmt>.math_static_funcs LinearSFunc QuadraticSFunc CubicSFunc QuarticSFunc ConstantFunc ComposedSinSFunc ComposedCosSFunc <import_from_stmt>.math_dynamic_funcs LinearDFunc QuadraticDFunc SinQuadraticDFunc BinaryQuadraticDFunc <import_from_stmt>.math_dynamic_generator UniformDGenerator GaussianDGenerator<line_sep>
|
<import_from_stmt>lib action<class_stmt>ConsulQueryNodeAction(action.ConsulBaseAction)<block_start><def_stmt>run self node<block_start>index,node=self.consul.catalog.node(node)<line_sep><return>node<block_end><block_end>
|
<import_stmt>heterocl<as>hcl<def_stmt>kernel_gemm A B<block_start>k=hcl.reduce_axis(0 A.shape[1] "k")<line_sep><return>hcl.compute((A.shape[0] B.shape[1]) <lambda>i j:hcl.sum(A[i k]<times>B[k j] axis=k) "C")<block_end><def_stmt>main <block_start>M=512<line_sep>N=512<line_sep>K=512<line_sep>A=hcl.placeholder((M K) dtype="float32" name="A")<line_sep>B=hcl.placeholder((K N) dtype="float32" name="B")<line_sep>s=hcl.create_schedule([A B] kernel_gemm)<line_sep># split
C=kernel_gemm.C<line_sep>m,n,k=s[C].op.axis<line_sep>mo,mi=s[C].split(m factor=16)<line_sep>no,ni=s[C].split(n factor=32)<line_sep>ko,ki=s[C].split(k factor=8)<line_sep># reorder shuffle
s[C].reorder(mo no mi ni ko ki)<line_sep># reorder local
s[C].reorder(mi ko ki ni)<line_sep># reshape
s.reshape(C [512<floordiv>16 16 512<floordiv>32 32])<line_sep># partition
s.partition(A dim=3)<line_sep># pipeline
s[C].pipeline(mi)<line_sep># reuse_at
# nothing to do
print(hcl.build(s target="vhls"))<block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<line_sep>"""
// result:
#include <ap_int.h>
#include <ap_fixed.h>
#include <math.h>
void default_function(float A[512][512], float B[512][512], ap_int<32> C[32][16][16][32]) {
#pragma HLS array_partition variable=A complete dim=3
for (ap_int<32> i_outer = 0; i_outer < 32; ++i_outer) {
for (ap_int<32> j_outer = 0; j_outer < 16; ++j_outer) {
for (ap_int<32> i_inner = 0; i_inner < 16; ++i_inner) {
#pragma HLS pipeline
for (ap_int<32> k_outer = 0; k_outer < 64; ++k_outer) {
ap_int<32> sum;
sum = 0;
for (ap_int<32> k_inner = 0; k_inner < 8; ++k_inner) {
for (ap_int<32> j_inner = 0; j_inner < 32; ++j_inner) {
sum = ((ap_int<32>)((A[(i_inner + (i_outer * 16))][(k_inner + (k_outer * 8))] * B[(k_inner + (k_outer * 8))][(j_inner + (j_outer * 32))]) + ((float)sum)));
}
}
C[i_outer][i_inner][j_outer][j_inner] = sum;
}
}
}
}
"""<block_end>
|
<import_stmt>os<import_from_stmt>eth_utils to_checksum_address<as>eth_utils_checksum<import_from_stmt>raiden.utils.formatting to_checksum_address<import_from_stmt>raiden.utils.typing Address<def_stmt>test_random_addresses <block_start><for_stmt>_ range(100)<block_start>address_bytes=Address(os.urandom(20))<assert_stmt>eth_utils_checksum(address_bytes)<eq>to_checksum_address(address_bytes)<block_end><block_end>
|
#
# SPDX-FileCopyrightText: Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
"""Layer for implementing the channel in the time domain"""<import_stmt>tensorflow<as>tf<import_from_stmt>. GenerateTimeChannel ApplyTimeChannel<import_from_stmt>.utils time_lag_discrete_time_channel<class_stmt>TimeChannel(tf.keras.layers.Layer)# pylint: disable=line-too-long
<block_start>r"""TimeChannel(channel_model, bandwidth, num_time_samples, maximum_delay_spread=3e-6, l_min=None, l_max=None, normalize_channel=False, add_awgn=True, return_channel=False, dtype=tf.complex64, **kwargs)
Generate channel responses and apply them to channel inputs in the time domain.
This class inherits from the Keras `Layer` class and can be used as layer
in a Keras model.
The channel output consists of ``num_time_samples`` + ``l_max`` - ``l_min``
time samples, as it is the result of filtering the channel input of length
``num_time_samples`` with the time-variant channel filter of length
``l_max`` - ``l_min`` + 1. In the case of a single-input single-output link and given a sequence of channel
inputs :math:`x_0,\cdots,x_{N_B}`, where :math:`N_B` is ``num_time_samples``, this
layer outputs
.. math::
y_b = \sum_{\ell = L_{\text{min}}}^{L_{\text{max}}} x_{b-\ell} \bar{h}_{b,\ell} + w_b
where :math:`L_{\text{min}}` corresponds ``l_min``, :math:`L_{\text{max}}` to ``l_max``, :math:`w_b` to
the additive noise, and :math:`\bar{h}_{b,\ell}` to the
:math:`\ell^{th}` tap of the :math:`b^{th}` channel sample.
This layer outputs :math:`y_b` for :math:`b` ranging from :math:`L_{\text{min}}` to
:math:`N_B + L_{\text{max}} - 1`, and :math:`x_{b}` is set to 0 for :math:`b < 0` or :math:`b \geq N_B`.
The channel taps :math:`\bar{h}_{b,\ell}` are computed assuming a sinc filter
is used for pulse shaping and receive filtering. Therefore, given a channel impulse response
:math:`(a_{m}(t), \tau_{m}), 0 \leq m \leq M-1`, generated by the ``channel_model``,
the channel taps are computed as follows:
.. math::
\bar{h}_{b, \ell}
= \sum_{m=0}^{M-1} a_{m}\left(\frac{b}{W}\right)
\text{sinc}\left( \ell - W\tau_{m} \right)
for :math:`\ell` ranging from ``l_min`` to ``l_max``, and where :math:`W` is
the ``bandwidth``.
For multiple-input multiple-output (MIMO) links, the channel output is computed for each antenna of each receiver and by summing over all the antennas of all transmitters.
Parameters
----------
channel_model : :class:`~sionna.channel.ChannelModel` object
An instance of a :class:`~sionna.channel.ChannelModel`, such as
:class:`~sionna.channel.RayleighBlockFading` or
:class:`~sionna.channel.tr38901.UMi`.
bandwidth : float
Bandwidth (:math:`W`) [Hz]
num_time_samples : int
Number of time samples forming the channel input (:math:`N_B`)
maximum_delay_spread : float
Maximum delay spread [s].
Used to compute the default value of ``l_max`` if ``l_max`` is set to
`None`. If a value is given for ``l_max``, this parameter is not used.
It defaults to 3us, which was found
to be large enough to include most significant paths with all channel
models included in Sionna assuming a nominal delay spread of 100ns.
l_min : int
Smallest time-lag for the discrete complex baseband channel (:math:`L_{\text{min}}`).
If set to `None`, defaults to the value given by :func:`time_lag_discrete_time_channel`.
l_max : int
Largest time-lag for the discrete complex baseband channel (:math:`L_{\text{max}}`).
If set to `None`, it is computed from ``bandwidth`` and ``maximum_delay_spread``
using :func:`time_lag_discrete_time_channel`. If it is not set to `None`,
then the parameter ``maximum_delay_spread`` is not used.
add_awgn : bool
If set to `False`, no white Gaussian noise is added.
Defaults to `True`.
normalize_channel : bool
If set to `True`, the channel is normalized over the block size
to ensure unit average energy per time step. Defaults to `False`.
return_channel : bool
If set to `True`, the channel response is returned in addition to the
channel output. Defaults to `False`.
dtype : tf.DType
Complex datatype to use for internal processing and output.
Defaults to `tf.complex64`.
Input
-----
(x, no) or x:
Tuple or Tensor:
x : [batch size, num_tx, num_tx_ant, num_time_samples], tf.complex
Channel inputs
no : Scalar or Tensor, tf.float
Scalar or tensor whose shape can be broadcast to the shape of the
channel outputs: [batch size, num_rx, num_rx_ant, num_time_samples].
Only required if ``add_awgn`` is set to `True`.
The noise power ``no`` is per complex dimension. If ``no`` is a scalar,
noise of the same variance will be added to the outputs.
If ``no`` is a tensor, it must have a shape that can be broadcast to
the shape of the channel outputs. This allows, e.g., adding noise of
different variance to each example in a batch. If ``no`` has a lower
rank than the channel outputs, then ``no`` will be broadcast to the
shape of the channel outputs by adding dummy dimensions after the last
axis.
Output
-------
y : [batch size, num_rx, num_rx_ant, num_time_samples + l_max - l_min], tf.complex
Channel outputs
The channel output consists of ``num_time_samples`` + ``l_max`` - ``l_min``
time samples, as it is the result of filtering the channel input of length
``num_time_samples`` with the time-variant channel filter of length
``l_max`` - ``l_min`` + 1.
h_time : [batch size, num_rx, num_rx_ant, num_tx, num_tx_ant, num_time_samples + l_max - l_min, l_max - l_min + 1], tf.complex
(Optional) Channel responses. Returned only if ``return_channel``
is set to `True`.
For each batch example, ``num_time_samples`` + ``l_max`` - ``l_min`` time
steps of the channel realizations are generated to filter the channel input.
"""<def_stmt>__init__ self channel_model bandwidth num_time_samples maximum_delay_spread=3e-6 l_min=<none> l_max=<none> normalize_channel=<false> add_awgn=<true> return_channel=<false> dtype=tf.complex64 **kwargs<block_start>super().__init__(trainable=<false> dtype=dtype **kwargs)<line_sep># Setting l_min and l_max to default values if not given by the user
l_min_default,l_max_default=time_lag_discrete_time_channel(bandwidth maximum_delay_spread)<if_stmt>l_min<is><none><block_start>l_min=l_min_default<block_end><if_stmt>l_max<is><none><block_start>l_max=l_max_default<block_end>self._cir_sampler=channel_model<line_sep>self._bandwidth=bandwidth<line_sep>self._num_time_steps=num_time_samples<line_sep>self._l_min=l_min<line_sep>self._l_max=l_max<line_sep>self._l_tot=l_max-l_min+1<line_sep>self._normalize_channel=normalize_channel<line_sep>self._add_awgn=add_awgn<line_sep>self._return_channel=return_channel<block_end><def_stmt>build self input_shape#pylint: disable=unused-argument
<block_start>self._generate_channel=GenerateTimeChannel(self._cir_sampler self._bandwidth self._num_time_steps self._l_min self._l_max self._normalize_channel)<line_sep>self._apply_channel=ApplyTimeChannel(self._num_time_steps self._l_tot self._add_awgn tf.as_dtype(self.dtype))<block_end><def_stmt>call self inputs<block_start><if_stmt>self._add_awgn<block_start>x,no=inputs<block_end><else_stmt><block_start>x=inputs<block_end>h_time=self._generate_channel(tf.shape(x)[0])<if_stmt>self._add_awgn<block_start>y=self._apply_channel([x h_time no])<block_end><else_stmt><block_start>y=self._apply_channel([x h_time])<block_end><if_stmt>self._return_channel<block_start><return>y h_time<block_end><else_stmt><block_start><return>y<block_end><block_end><block_end>
|
<import_stmt>salt.modules.config<as>config<import_stmt>salt.modules.zenoss<as>zenoss<import_from_stmt>tests.support.mixins LoaderModuleMockMixin<import_from_stmt>tests.support.mock MagicMock call patch<import_from_stmt>tests.support.unit TestCase<class_stmt>ZenossTestCase(TestCase LoaderModuleMockMixin)<block_start>"""
Test cases for salt.modules.keystone
"""<def_stmt>setup_loader_modules self<block_start><return>{zenoss:{"__salt__":{"config.option":config.option}} config:{"__opts__":{}} }<block_end><def_stmt>test_zenoss_session self<block_start>"""
test zenoss._session when using verify_ssl
"""<line_sep>zenoss_conf={"zenoss":{"hostname":"https://test.zenoss.com" "username":"admin" "password":"<PASSWORD>" }}<for_stmt>verify [<true> <false> <none>]<block_start>zenoss_conf["zenoss"]["verify_ssl"]=verify<if_stmt>verify<is><none><block_start>zenoss_conf["zenoss"].pop("verify_ssl")<line_sep>verify=<true><block_end>patch_opts=patch.dict(config.__opts__ zenoss_conf)<line_sep>mock_http=MagicMock(return_value=<none>)<line_sep>patch_http=patch("salt.utils.http.session" mock_http)<with_stmt>patch_http patch_opts<block_start>zenoss._session()<line_sep>self.assertEqual(mock_http.call_args_list [call(ca_bundle=<none> headers={"Content-type":"application/json; charset=utf-8"} password="<PASSWORD>" user="admin" verify_ssl=verify )] )<block_end><block_end><block_end><block_end>
|
<import_from_stmt>.data_audit license_file_is_valid<import_stmt>sys<def_stmt>print_tree n indent=0<block_start><while_stmt>n<block_start>print(" "<times>indent n)<line_sep>print_tree(n.firstChild indent+4)<line_sep>n=n.nextSibling<block_end><block_end>fid=open(sys.argv[1])<line_sep>license_file_is_valid(fid '.')<line_sep>fid.close()<line_sep>
|
<import_from_stmt>exceptions PydolonsError<class_stmt>CantAffordActiveError(PydolonsError)<block_start><def_stmt>__init__ self active missing<block_start><assert_stmt>missing<in>["mana" "stamina" "health"]<line_sep>self.active=active<line_sep>self.missing=missing<block_end><def_stmt>__repr__ self<block_start><return>f"Need more {self.missing} to activate {self.active}"<block_end><block_end>
|
# Copyright 2020 JD.com, Inc. Galileo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
<import_stmt>tensorflow<as>tf<import_from_stmt>tensorflow.keras.layers Layer Dropout<import_from_stmt>galileo.platform.export export<import_from_stmt>galileo.framework.python.base_message_passing BaseMessagePassing<import_from_stmt>galileo.framework.tf.python.layers.aggregators get_aggregator<line_sep>@export('galileo.tf')<class_stmt>SAGELayer(Layer BaseMessagePassing)<block_start>r'''
\brief graphSAGE convolution tf layer
`"Inductive Representation Learning on Large Graphs"
<https://arxiv.org/abs/1706.02216>`
'''<def_stmt>__init__ self output_dim:int aggregator_name:str='mean' use_concat_in_aggregator:bool=<true> bias:bool=<true> dropout_rate:float=0.0 activation=<none> normalization=<none> **kwargs<block_start>r'''
\param output_dim output dim of layer
\param aggregator_name aggregator name, one of
"mean, gcn, meanpool, maxpool, lstm"
\param use_concat_in_aggregator concat if True else sum when aggregate
\param bias bias of layer
\param dropout_rate feature dropout rate
\param activation callable, apply activation to
the updated vertices features
\param normalization callable, apply normalization to
the updated vertices features
'''<line_sep># tensorflow replace the base class to base_layer_v1.Layer
# when use estimator, so can't call Layer.__init__()
self.__class__.__bases__[0].__init__(self **kwargs)<line_sep>BaseMessagePassing.__init__(self)<line_sep>self.output_dim=output_dim<line_sep>self.aggregator_name=aggregator_name<line_sep>self.use_concat_in_aggregator=use_concat_in_aggregator<line_sep>self.bias=bias<line_sep>self.dropout_rate=dropout_rate<line_sep>self.activation=activation<line_sep>self.normalization=normalization<line_sep>aggregator_class=get_aggregator(aggregator_name)<line_sep>self.aggregator=aggregator_class(output_dim use_concat_in_aggregator bias)<line_sep>self.feature_dropout=Dropout(dropout_rate)<block_end><def_stmt>call self inputs training=<none><block_start>r'''
\param inputs
tensors of inputs shape (batch_size, *, fanouts, feature_dim)
'''<line_sep><return>BaseMessagePassing.__call__(self inputs training=training)<block_end><def_stmt>message self inputs training=<none><block_start>src=inputs['src_feature']<line_sep>dst=inputs['dst_feature']<line_sep>edge_weight=inputs.get('edge_weight')<if_stmt>edge_weight<is><not><none><block_start>dst=dst<times>edge_weight<block_end>src=self.feature_dropout(src training=training)<line_sep>dst=self.feature_dropout(dst training=training)<line_sep><return>dict(src_feature=src dst_feature=dst)<block_end><def_stmt>aggregate self inputs<block_start>src=inputs['src_feature']<line_sep>dst=inputs['dst_feature']<line_sep># dst -> src is direction of aggregation
<return>self.aggregator((src dst))<block_end><def_stmt>update self inputs<block_start><if_stmt>callable(self.activation)<block_start>inputs=self.activation(inputs)<block_end><if_stmt>callable(self.normalization)<block_start>inputs=self.normalization(inputs)<block_end><return>inputs<block_end><def_stmt>get_config self<block_start>config=super().get_config()<line_sep>config.update(dict(output_dim=self.output_dim aggregator_name=self.aggregator_name use_concat_in_aggregator=self.use_concat_in_aggregator bias=self.bias dropout_rate=self.dropout_rate activation=self.activation normalization=self.normalization ))<line_sep><return>config<block_end><block_end>
|
__version__="0.14"<line_sep>VERSION=__version__<line_sep>
|
# -*- coding: UTF-8 -*-
# run: $gunicorn -c conf.py upload_api:app
<import_stmt>sys<import_stmt>os<import_stmt>multiprocessing<line_sep>sys.path.append(os.path.abspath('upload_api.py'))<line_sep>sys.path.append('.')<line_sep>sys.path.append('..')<import_from_stmt>upload_api crons_start<line_sep>path_of_current_file=os.path.abspath(__file__)<line_sep>path_of_current_dir=os.path.split(path_of_current_file)[0]<line_sep>_file_name=os.path.basename(__file__)<line_sep>bind='0.0.0.0:5000'<line_sep>workers=3<line_sep># workers = multiprocessing.cpu_count() * 2 + 1
worker_class='sync'<line_sep>timeout=100<line_sep># debug=True
loglevel='debug'<line_sep># pidfile = '%s/run/%s.pid' % (path_of_current_dir, _file_name)
errorlog='%s/logs/%s_error.log'%(path_of_current_dir _file_name)<line_sep>accesslog='%s/logs/%s_access.log'%(path_of_current_dir _file_name)<def_stmt>on_starting server# gunicorn 主进程启动之前的操作
<block_start>crons_start()<block_end>
|
#This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
<import_stmt>datetime<import_stmt>os<import_stmt>pytest<import_stmt>sys<import_stmt>botocore<import_from_stmt>botocore.stub Stubber ANY<import_from_stmt>. context<import_from_stmt>auditors.aws.Amazon_EFS_Auditor efs_filesys_encryption_check describe_file_systems efs_filesys_policy_check efs <line_sep>describe_file_systems={"FileSystems":[{"FileSystemId":"MyEFS" "OwnerId":"Owner12345" "CreationToken":'egCreationToken' "CreationTime":'2015-01-01' "LifeCycleState":'available' "NumberOfMountTargets":1 "SizeInBytes":{'Value':123 'Timestamp':'2015-01-01' 'ValueInIA':123 'ValueInStandard':123} "PerformanceMode":"generalPurpose" "Encrypted":<true> "Tags":[{'Key':'EgKey' 'Value':'EgValue'}]}]}<line_sep>describe_file_systems_blank={"FileSystems":[]}<line_sep>describe_file_systems_enc_false={"FileSystems":[{"FileSystemId":"MyEFS" "OwnerId":"Owner12345" "CreationToken":'egCreationToken' "CreationTime":'2015-01-01' "LifeCycleState":'available' "NumberOfMountTargets":1 "SizeInBytes":{'Value':123 'Timestamp':'2015-01-01' 'ValueInIA':123 'ValueInStandard':123} "PerformanceMode":"generalPurpose" "Encrypted":<false> "Tags":[{'Key':'EgKey' 'Value':'EgValue'}]}]}<line_sep>file_system_policy={"FileSystemId":'MyEFS' "Policy":'{"Version": "2012-10-17", \
"Id": "ExamplePolicy01", \
"Statement": [ \
{ "Sid": "ExampleSatement01", \
"Effect": "Allow", \
"Principal": { \
"AWS": "arn:aws:iam::111122223333:user/CarlosSalazar"}, \
"Action": [ \
"elasticfilesystem:ClientMount", \
"elasticfilesystem:ClientWrite"], \
"Resource": "arn:aws:elasticfilesystem:us-east-2:111122223333:file-system/MyEFS", \
"Condition": {"Bool": {"aws:SecureTransport": "true"}}}]}'}<line_sep>@pytest.fixture(scope="function")<def_stmt>efs_stubber <block_start>efs_stubber=Stubber(efs)<line_sep>efs_stubber.activate()<line_sep><yield>efs_stubber<line_sep>efs_stubber.deactivate()<block_end><def_stmt>test_efs_encryption_true efs_stubber<block_start>efs_stubber.add_response("describe_file_systems" describe_file_systems)<line_sep>results=efs_filesys_encryption_check(cache={} awsAccountId="012345678901" awsRegion="us-east-1" awsPartition="aws")<for_stmt>result results<block_start><if_stmt>"MyEFS"<in>result["Id"]<block_start><assert_stmt>result["RecordState"]<eq>"ARCHIVED"<block_end><else_stmt><block_start><assert_stmt><false><block_end><block_end>efs_stubber.assert_no_pending_responses()<block_end><def_stmt>test_efs_encryption_false efs_stubber<block_start>efs_stubber.add_response("describe_file_systems" describe_file_systems_enc_false)<line_sep>results=efs_filesys_encryption_check(cache={} awsAccountId="012345678901" awsRegion="us-east-1" awsPartition="aws")<for_stmt>result results<block_start><if_stmt>"MyEFS"<in>result["Id"]<block_start><assert_stmt>result["RecordState"]<eq>"ACTIVE"<block_end><else_stmt><block_start><assert_stmt><false><block_end><block_end>efs_stubber.assert_no_pending_responses()<block_end><def_stmt>test_efs_policy efs_stubber<block_start>efs_stubber.add_response("describe_file_systems" describe_file_systems)<line_sep>efs_stubber.add_response("describe_file_system_policy" file_system_policy)<line_sep>results=efs_filesys_policy_check(cache={} awsAccountId="012345678901" awsRegion="us-east-1" awsPartition="aws")<for_stmt>result results<block_start><if_stmt>"MyEFS"<in>result["Id"]<block_start><assert_stmt>result["RecordState"]<eq>"ARCHIVED"<block_end><else_stmt><block_start><assert_stmt><false><block_end><block_end>efs_stubber.assert_no_pending_responses()<block_end><def_stmt>test_efs_no_policy efs_stubber<block_start>efs_stubber.add_response("describe_file_systems" describe_file_systems)<line_sep>efs_stubber.add_client_error("describe_file_system_policy" 'FileSystemNotFound')<line_sep>results=efs_filesys_policy_check(cache={} awsAccountId="012345678901" awsRegion="us-east-1" awsPartition="aws")<for_stmt>result results<block_start><if_stmt>"MyEFS"<in>result["Id"]<block_start><assert_stmt>result["RecordState"]<eq>"ACTIVE"<block_end><else_stmt><block_start><assert_stmt><false><block_end><block_end>efs_stubber.assert_no_pending_responses()<block_end><def_stmt>test_efs_no_fs efs_stubber<block_start>efs_stubber.add_response("describe_file_systems" describe_file_systems_blank)<line_sep>results=efs_filesys_policy_check(cache={} awsAccountId="012345678901" awsRegion="us-east-1" awsPartition="aws")<assert_stmt>len(list(results))<eq>0<line_sep>efs_stubber.assert_no_pending_responses()<block_end>
|
# Python 3.6 introduced a new implementation of dict.
# dict now keeps its items ordered as well.
# An OrderedDict is a dictionary subclass that remembers the order in which its contents are added.
<import_from_stmt>collections OrderedDict<if_stmt>__name__<eq>"__main__"<block_start>order_dic=OrderedDict()<line_sep>order_dic['a']='A'<line_sep>order_dic['b']='B'<line_sep>order_dic['c']='C'<line_sep>order_dic['d']='D'<line_sep>order_dic['e']='E'<for_stmt>k,v order_dic.items()<block_start>print(k v)<block_end># https://docs.python.org/3/library/collections.html#collections.OrderedDict.popitem
# last=True -> LIFO
# last=False -> FIFO
print(order_dic.popitem(last=<true>)# -> ('e', 'E')
)<line_sep>print(order_dic.popitem(last=<false>)# -> ('a', 'A')
)<line_sep># https://docs.python.org/3/library/collections.html#collections.OrderedDict.move_to_end
# last=True -> The item is moved to the right end
# last=False -> The item is moved to the beginning
print('origin:' order_dic)# OrderedDict([('b', 'B'), ('c', 'C'), ('d', 'D')])
order_dic.move_to_end('b' last=<true>)<line_sep>print(order_dic)# OrderedDict([('c', 'C'), ('d', 'D'), ('b', 'B')])
order_dic.move_to_end('b' last=<false>)<line_sep>print(order_dic)<block_end># OrderedDict([('b', 'B'), ('c', 'C'), ('d', 'D')])
|
<import_from_stmt>.unittest_runner UnittestTestRunner<def_stmt>pytest_installed <block_start><import_stmt>importlib<line_sep>pytest_loader=importlib.find_loader('pytest')<line_sep><return>pytest_loader<is><not><none><block_end><class_stmt>TestRunnerNotInstalledException(Exception)<block_start><pass><block_end><def_stmt>__pytest_not_installed *args **kwargs<block_start><raise>TestRunnerNotInstalledException('Pytest is not installed. Please run "pip install pytest" to resolve this issue.')<block_end><if_stmt>pytest_installed()<block_start><import_from_stmt>.pytest_runner PytestTestRunner<block_end><else_stmt><block_start>PytestTestRunner=__pytest_not_installed<block_end>
|
<import_from_stmt>pathlib Path<line_sep>BASE_PATH=Path(__file__).parent.absolute()<line_sep>RESOURCE_PATH=BASE_PATH/"resources"<line_sep>
|
# Generated by Django 2.1.3 on 2018-11-08 15:45
<import_from_stmt>django.db migrations models<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[("peering" "0018_auto_20181014_1612")]<line_sep>operations=[migrations.AddField(model_name="router" name="netbox_device_id" field=models.PositiveIntegerField(blank=<true> default=0) )]<block_end>
|
<import_stmt>requests<line_sep>aes_url='http://tool.chacuo.net/cryptaes'<line_sep>header={'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) '<concat>'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.67 Safari/537.36'}<line_sep>ECB='ecb'<line_sep>PKC='pkcs5'<line_sep>BLOCK='128'<line_sep>PWD='<PASSWORD>'<line_sep>IV='123456'<line_sep>O='0'<line_sep>S='gb2312'<line_sep>T='0'<line_sep>ARG='m='+ECB+'_pad='+PKC+'_block='+BLOCK+'_p='+PWD+'_i='+IV+'_o='+'O'+'_s='+'gb2312'+'_t='+'0'<line_sep>print(ARG)<line_sep>data={'data':'土豆' 'type':'aes' 'arg':ARG}<line_sep>result=requests.post(url=aes_url headers=header data=data)<line_sep>print(result.text)<line_sep>
|
<import_from_future_stmt> print_function<import_stmt>sys<import_from_stmt>google.auth.credentials AnonymousCredentials<import_from_stmt>google.cloud storage<if_stmt>len(sys.argv)-1<ne>3<block_start>print('args: PROJECT BUCKET record|replay')<line_sep>sys.exit(1)<block_end>project=sys.argv[1]<line_sep>bucket_name=sys.argv[2]<line_sep>mode=sys.argv[3]<if_stmt>mode<eq>'record'<block_start>creds=<none># use default creds for demo purposes; not recommended
client=storage.Client(project=project)<block_end><elif_stmt>mode<eq>'replay'<block_start>creds=AnonymousCredentials()<block_end><else_stmt><block_start>print('want record or replay')<line_sep>sys.exit(1)<block_end>client=storage.Client(project=project credentials=creds)<line_sep>bucket=client.get_bucket(bucket_name)<line_sep>print('bucket %s created %s'%(bucket.id bucket.time_created))<line_sep>
|
<import_stmt>itertools<as>it<import_stmt>operator<import_stmt>pint<import_from_stmt>. util<line_sep>units=("meter" "kilometer" "second" "minute" "angstrom")<line_sep>all_values=("int" "float" "complex")<line_sep>all_values_q=tuple("%s_%s"%(a b)<for>a,b it.product(all_values ("meter" "kilometer")))<line_sep>op1=(operator.neg operator.truth)<line_sep>op2_cmp=(operator.eq )# operator.lt)
op2_math=(operator.add operator.sub operator.mul operator.truediv)<line_sep>ureg=<none><line_sep>data={}<def_stmt>setup *args<block_start><global>ureg data<line_sep>data["int"]=1<line_sep>data["float"]=1.0<line_sep>data["complex"]=complex(1 2)<line_sep>ureg=pint.UnitRegistry(util.get_tiny_def())<for_stmt>key all_values<block_start>data[key+"_meter"]=data[key]<times>ureg.meter<line_sep>data[key+"_kilometer"]=data[key]<times>ureg.kilometer<block_end><block_end><def_stmt>time_build_by_mul key<block_start>data[key]<times>ureg.meter<block_end>time_build_by_mul.params=all_values<def_stmt>time_op1 key op<block_start>op(data[key])<block_end>time_op1.params=[all_values_q op1]<def_stmt>time_op2 keys op<block_start>key1,key2=keys<line_sep>op(data[key1] data[key2])<block_end>time_op2.params=[tuple(it.product(all_values_q all_values_q)) op2_math+op2_cmp]<line_sep>
|
expected_output={"vrf":{"L3VPN-1538":{"index":{1:{"address_type":"Interface" "ip_address":"192.168.10.254"}}}}}<line_sep>
|
#
# Copyright (c) 2020 Bitdefender
# SPDX-License-Identifier: Apache-2.0
#
<import_stmt>argparse<import_stmt>pathlib<import_stmt>subprocess<import_stmt>sys<import_stmt>platform<import_from_stmt>build_info write_build_info<def_stmt>get_argparser <block_start>parser=argparse.ArgumentParser()<line_sep>parser.add_argument("--meta-file" help="Path to the project-meta-info.in file" required=<true>)<line_sep>parser.add_argument("--build" help="Built number" required=<false> default=<none>)<line_sep>parser.add_argument("--out" help="Path to the output JSON file" required=<true>)<line_sep>parser.add_argument("--overwrite" help="Overwrite curent ver.h file. Default is False" action="store_true" default=<false> )<line_sep><return>parser<block_end><def_stmt>get_version metainfo<block_start><with_stmt>open(metainfo mode="r")<as>metafile<block_start><for_stmt>line metafile<block_start><if_stmt>"project_version"<in>line<block_start>raw_ver=line.strip().split()[-1]<line_sep>ver_components=raw_ver.split(".")<if_stmt>len(ver_components)<ne>3<block_start><return><none><block_end>major=ver_components[0]<line_sep>minor=ver_components[1]<line_sep>revision=ver_components[2].replace(")" "")<line_sep><return>(major minor revision)<block_end><block_end><block_end><return><none><block_end><def_stmt>get_git_branch <block_start><return>(subprocess.check_output(["git" "rev-parse" "--abbrev-ref" "HEAD"]).decode("utf-8").strip())<block_end><def_stmt>get_git_revision <block_start><return>(subprocess.check_output(["git" "rev-parse" "--short" "HEAD"]).decode("utf-8").strip())<block_end><def_stmt>get_git_commit_count <block_start><return>(subprocess.check_output(["git" "rev-list" "--count" "HEAD"]).decode("utf-8").strip())<block_end><def_stmt>get_machine_name <block_start><return>platform.node()<block_end><def_stmt>main argv<block_start>args=get_argparser().parse_args(argv)<line_sep>out_file=pathlib.Path(args.out)<if_stmt>out_file.exists()<block_start><if_stmt><not>args.overwrite<block_start>print(f"Version file at {out_file} already exists! Will not overwrite it")<line_sep><return>0<block_end>print(f"Version file at {out_file} already exists! Will overwrite it")<block_end>ver=get_version(args.meta_file)<if_stmt><not>ver<block_start>print(f"ERROR: Coult not extract version info from {args.meta_file}")<line_sep><return>1<block_end>(major minor revision)=ver<line_sep>build=args.build<if_stmt>build<is><none># "Sensible" default for local builds
<block_start>build=get_git_commit_count()<block_end>branch=get_git_branch()<line_sep>changeset=get_git_revision()<line_sep>build_machine=get_machine_name()<line_sep>build_info={}<line_sep>build_info["major"]=major<line_sep>build_info["minor"]=minor<line_sep>build_info["revision"]=revision<line_sep>build_info["build"]=build<line_sep>build_info["changeset"]=changeset<line_sep>build_info["branch"]=branch<line_sep>build_info["build_machine"]=build_machine<line_sep>write_build_info(out_file build_info)<line_sep><return>0<block_end><if_stmt>__name__<eq>"__main__"<block_start>sys.exit(main(sys.argv[1:]))<block_end>
|
<import_from_stmt>lightbus.api Api Event<class_stmt>LightbusStateApi(Api)<block_start>"""The API for the state plugin"""<line_sep>worker_started=Event(parameters=["service_name" "process_name" "metrics_enabled" "api_names" "listening_for" "timestamp" "ping_interval" ])<line_sep>worker_ping=Event(parameters=["service_name" "process_name" "metrics_enabled" "api_names" "listening_for" "timestamp" "ping_interval" ])<line_sep>worker_stopped=Event(parameters=["process_name" "timestamp"])<class_stmt>Meta<block_start>name="internal.state"<line_sep>internal=<true><block_end><block_end><class_stmt>LightbusMetricsApi(Api)<block_start>"""The API for the metrics plugin"""<line_sep>rpc_call_sent=Event(parameters=["service_name" "process_name" "id" "api_name" "procedure_name" "kwargs" "timestamp" ])<line_sep>rpc_call_received=Event(parameters=["service_name" "process_name" "id" "api_name" "procedure_name" "timestamp"])<line_sep>rpc_response_sent=Event(parameters=["service_name" "process_name" "id" "api_name" "procedure_name" "result" "timestamp" ])<line_sep>rpc_response_received=Event(parameters=["service_name" "process_name" "id" "api_name" "procedure_name" "timestamp"])<line_sep>event_fired=Event(parameters=["service_name" "process_name" "event_id" "api_name" "event_name" "kwargs" "timestamp" ])<line_sep>event_received=Event(parameters=["service_name" "process_name" "event_id" "api_name" "event_name" "kwargs" "timestamp" ])<line_sep>event_processed=Event(parameters=["service_name" "process_name" "event_id" "api_name" "event_name" "kwargs" "timestamp" ])<class_stmt>Meta<block_start>name="internal.metrics"<line_sep>internal=<true><block_end><block_end>
|
# Command line interface for the `py2deb' program.
#
# Author: <NAME> <<EMAIL>>
# Last Change: May 22, 2017
# URL: https://py2deb.readthedocs.io
"""
Usage: py2deb [OPTIONS] ...
Convert Python packages to Debian packages according to the given
command line options (see below). The command line arguments are the
same as accepted by the `pip install' command because py2deb invokes
pip during the conversion process. This means you can name the
package(s) to convert on the command line but you can also use
`requirement files' if you prefer.
If you want to pass command line options to pip (e.g. because you want
to use a custom index URL or a requirements file) then you will need
to tell py2deb where the options for py2deb stop and the options for
pip begin. In such cases you can use the following syntax:
$ py2deb -r /tmp -- -r requirements.txt
So the `--' marker separates the py2deb options from the pip options.
Supported options:
-c, --config=FILENAME
Load a configuration file. Because the command line arguments are processed
in the given order, you have the choice and responsibility to decide if
command line options override configuration file options or vice versa.
Refer to the documentation for details on the configuration file format.
The default configuration files /etc/py2deb.ini and ~/.py2deb.ini are
automatically loaded if they exist. This happens before environment
variables and command line options are processed.
Can also be set using the environment variable $PY2DEB_CONFIG.
-r, --repository=DIRECTORY
Change the directory where *.deb archives are stored. Defaults to
the system wide temporary directory (which is usually /tmp). If
this directory doesn't exist py2deb refuses to run.
Can also be set using the environment variable $PY2DEB_REPOSITORY.
--use-system-package=PYTHON_PACKAGE_NAME,DEBIAN_PACKAGE_NAME
Exclude a Python package (the name before the comma) from conversion and
replace references to the Python package with a specific Debian package
name. This allows you to use system packages for specific Python
requirements.
--name-prefix=PREFIX
Set the name prefix used during the name conversion from Python to
Debian packages. Defaults to `python'. The name prefix and package
names are always delimited by a dash.
Can also be set using the environment variable $PY2DEB_NAME_PREFIX.
--no-name-prefix=PYTHON_PACKAGE_NAME
Exclude a Python package from having the name prefix applied
during the package name conversion. This is useful to avoid
awkward repetitions.
--rename=PYTHON_PACKAGE_NAME,DEBIAN_PACKAGE_NAME
Override the package name conversion algorithm for the given pair
of package names. Useful if you don't agree with the algorithm :-)
--install-prefix=DIRECTORY
Override the default system wide installation prefix. By setting
this to anything other than `/usr' or `/usr/local' you change the
way py2deb works. It will build packages with a file system layout
similar to a Python virtual environment, except there will not be
a Python executable: The packages are meant to be loaded by
modifying Python's module search path. Refer to the documentation
for details.
Can also be set using the environment variable $PY2DEB_INSTALL_PREFIX.
--install-alternative=LINK,PATH
Use Debian's `update-alternatives' system to add an executable
that's installed in a custom installation prefix (see above) to
the system wide executable search path. Refer to the documentation
for details.
--python-callback=EXPRESSION
Set a Python callback to be called during the conversion process. Refer to
the documentation for details about the use of this feature and the syntax
of EXPRESSION.
Can also be set using the environment variable $PY2DEB_CALLBACK.
--report-dependencies=FILENAME
Add the Debian relationships needed to depend on the converted
package(s) to the given control file. If the control file already
contains relationships the additional relationships will be added
to the control file; they won't overwrite existing relationships.
-y, --yes
Instruct pip-accel to automatically install build time dependencies
where possible. Refer to the pip-accel documentation for details.
Can also be set using the environment variable $PY2DEB_AUTO_INSTALL.
-v, --verbose
Make more noise :-).
-h, --help
Show this message and exit.
"""<line_sep># Standard library modules.
<import_stmt>getopt<import_stmt>logging<import_stmt>os<import_stmt>sys<line_sep># External dependencies.
<import_stmt>coloredlogs<import_from_stmt>deb_pkg_tools.control patch_control_file<import_from_stmt>humanfriendly.terminal usage warning<line_sep># Modules included in our package.
<import_from_stmt>py2deb.converter PackageConverter<line_sep># Initialize a logger.
logger=logging.getLogger(__name__)<def_stmt>main <block_start>"""Command line interface for the ``py2deb`` program."""<line_sep># Configure terminal output.
coloredlogs.install()<try_stmt># Initialize a package converter.
<block_start>converter=PackageConverter()<line_sep># Parse and validate the command line options.
options,arguments=getopt.getopt(sys.argv[1:] 'c:r:yvh' ['config=' 'repository=' 'use-system-package=' 'name-prefix=' 'no-name-prefix=' 'rename=' 'install-prefix=' 'install-alternative=' 'python-callback=' 'report-dependencies=' 'yes' 'verbose' 'help' ])<line_sep>control_file_to_update=<none><for_stmt>option,value options<block_start><if_stmt>option<in>('-c' '--config')<block_start>converter.load_configuration_file(value)<block_end><elif_stmt>option<in>('-r' '--repository')<block_start>converter.set_repository(value)<block_end><elif_stmt>option<eq>'--use-system-package'<block_start>python_package_name,_,debian_package_name=value.partition(',')<line_sep>converter.use_system_package(python_package_name debian_package_name)<block_end><elif_stmt>option<eq>'--name-prefix'<block_start>converter.set_name_prefix(value)<block_end><elif_stmt>option<eq>'--no-name-prefix'<block_start>converter.rename_package(value value)<block_end><elif_stmt>option<eq>'--rename'<block_start>python_package_name,_,debian_package_name=value.partition(',')<line_sep>converter.rename_package(python_package_name debian_package_name)<block_end><elif_stmt>option<eq>'--install-prefix'<block_start>converter.set_install_prefix(value)<block_end><elif_stmt>option<eq>'--install-alternative'<block_start>link,_,path=value.partition(',')<line_sep>converter.install_alternative(link path)<block_end><elif_stmt>option<eq>'--python-callback'<block_start>converter.set_python_callback(value)<block_end><elif_stmt>option<eq>'--report-dependencies'<block_start>control_file_to_update=value<if_stmt><not>os.path.isfile(control_file_to_update)<block_start>msg="The given control file doesn't exist! (%s)"<line_sep><raise>Exception(msg%control_file_to_update)<block_end><block_end><elif_stmt>option<in>('-y' '--yes')<block_start>converter.set_auto_install(<true>)<block_end><elif_stmt>option<in>('-v' '--verbose')<block_start>coloredlogs.increase_verbosity()<block_end><elif_stmt>option<in>('-h' '--help')<block_start>usage(__doc__)<line_sep><return><block_end><else_stmt><block_start><assert_stmt><false> "Unhandled option!"<block_end><block_end><block_end><except_stmt>Exception<as>e<block_start>warning("Failed to parse command line arguments: %s" e)<line_sep>sys.exit(1)<block_end># Convert the requested package(s).
<try_stmt><block_start><if_stmt>arguments<block_start>archives,relationships=converter.convert(arguments)<if_stmt>relationships<and>control_file_to_update<block_start>patch_control_file(control_file_to_update dict(depends=relationships))<block_end><block_end><else_stmt><block_start>usage(__doc__)<block_end><block_end><except_stmt>Exception<block_start>logger.exception("Caught an unhandled exception!")<line_sep>sys.exit(1)<block_end><block_end>
|
# pylint: disable=missing-docstring,too-few-public-methods
<class_stmt>AbstractFoo<block_start><def_stmt>kwonly_1 self first * second third<block_start>"Normal positional with two positional only params."<block_end><def_stmt>kwonly_2 self * first second<block_start>"Two positional only parameter."<block_end><def_stmt>kwonly_3 self * first second<block_start>"Two positional only params."<block_end><def_stmt>kwonly_4 self * first second=<none><block_start>"One positional only and another with a default."<block_end><def_stmt>kwonly_5 self * first **kwargs<block_start>"Keyword only and keyword variadics."<block_end><def_stmt>kwonly_6 self first second * third<block_start>"Two positional and one keyword"<block_end><block_end><class_stmt>Foo(AbstractFoo)<block_start><def_stmt>kwonly_1 self first * second# [arguments-differ]
<block_start>"One positional and only one positional only param."<block_end><def_stmt>kwonly_2 self first# [arguments-differ]
<block_start>"Only one positional parameter instead of two positional only parameters."<block_end><def_stmt>kwonly_3 self first second# [arguments-differ]
<block_start>"Two positional params."<block_end><def_stmt>kwonly_4 self first second# [arguments-differ]
<block_start>"Two positional params."<block_end><def_stmt>kwonly_5 self * first# [arguments-differ]
<block_start>"Keyword only, but no variadics."<block_end><def_stmt>kwonly_6 self *args **kwargs# valid override
<block_start>"Positional and keyword variadics to pass through parent params"<block_end><block_end><class_stmt>Foo2(AbstractFoo)<block_start><def_stmt>kwonly_6 self first *args **kwargs# valid override
<block_start>"One positional with the rest variadics to pass through parent params"<block_end><block_end>
|
<import_stmt>argparse<import_stmt>pandas<as>pd<import_stmt>numpy<as>np<import_stmt>string<import_stmt>pickle<import_from_stmt>nltk.util ngrams<def_stmt>generate_csv input_file='predicted_titles.csv' output_file='submission.csv' voc_file='vocs.pkl'<block_start>'''
Generates file in format required for submitting result to Kaggle
Parameters:
input_file (str) : path to csv file with your predicted titles.
Should have two fields: abstract and title
output_file (str) : path to output submission file
voc_file (str) : path to voc.pkl file
'''<line_sep>data=pd.read_csv(input_file)<with_stmt>open(voc_file 'rb')<as>voc_file<block_start>vocs=pickle.load(voc_file)<block_end><with_stmt>open(output_file 'w')<as>res_file<block_start>res_file.write('Id,Predict\n')<block_end>output_idx=0<for_stmt>row_idx,row data.iterrows()<block_start>trg=row['title']<line_sep>trg=trg.translate(str.maketrans('' '' string.punctuation)).lower().split()<line_sep>trg.extend(['_'.join(ngram)<for>ngram list(ngrams(trg 2))+list(ngrams(trg 3))])<line_sep>VOCAB_stoi=vocs[row_idx]<line_sep>trg_intersection=set(VOCAB_stoi.keys()).intersection(set(trg))<line_sep>trg_vec=np.zeros(len(VOCAB_stoi))<for_stmt>word trg_intersection<block_start>trg_vec[VOCAB_stoi[word]]=1<block_end><with_stmt>open(output_file 'a')<as>res_file<block_start><for_stmt>is_word trg_vec<block_start>res_file.write('{0},{1}\n'.format(output_idx int(is_word)))<line_sep>output_idx<augadd>1<block_end><block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('--input_file' help='Path to input .csv file (abstract, title)' type=str )<line_sep>parser.add_argument('--output_file' help='Path to kaggle submission file' type=str )<line_sep>parser.add_argument('--voc_file' help='Path to voc.pkl file' type=str )<line_sep>args=parser.parse_args()<line_sep>generate_csv(args.input_file args.output_file args.voc_file)<block_end>
|
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE: sample_authentication.py
DESCRIPTION:
This sample demonstrates how to authenticate to the Form Recognizer service.
There are two supported methods of authentication:
1) Use a Form Recognizer API key with AzureKeyCredential from azure.core.credentials
2) Use a token credential from azure-identity to authenticate with Azure Active Directory
See more details about authentication here:
https://docs.microsoft.com/azure/cognitive-services/authentication
USAGE:
python sample_authentication.py
Set the environment variables with your own values before running the sample:
1) AZURE_FORM_RECOGNIZER_ENDPOINT - the endpoint to your Form Recognizer resource.
2) AZURE_FORM_RECOGNIZER_KEY - your Form Recognizer API key
3) AZURE_CLIENT_ID - the client ID of your active directory application.
4) AZURE_TENANT_ID - the tenant ID of your active directory application.
5) AZURE_CLIENT_SECRET - the secret of your active directory application.
"""<import_stmt>os<line_sep>url="https://raw.githubusercontent.com/Azure/azure-sdk-for-python/main/sdk/formrecognizer/azure-ai-formrecognizer/tests/sample_forms/forms/Form_1.jpg"<def_stmt>authentication_with_api_key_credential_document_analysis_client # [START create_da_client_with_key]
<block_start><import_from_stmt>azure.core.credentials AzureKeyCredential<import_from_stmt>azure.ai.formrecognizer DocumentAnalysisClient<line_sep>endpoint=os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]<line_sep>key=os.environ["AZURE_FORM_RECOGNIZER_KEY"]<line_sep>document_analysis_client=DocumentAnalysisClient(endpoint AzureKeyCredential(key))<line_sep># [END create_da_client_with_key]
poller=document_analysis_client.begin_analyze_document_from_url("prebuilt-layout" url)<line_sep>result=poller.result()<block_end><def_stmt>authentication_with_azure_active_directory_document_analysis_client # [START create_da_client_with_aad]
<block_start>"""DefaultAzureCredential will use the values from these environment
variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, AZURE_CLIENT_SECRET
"""<import_from_stmt>azure.ai.formrecognizer DocumentAnalysisClient<import_from_stmt>azure.identity DefaultAzureCredential<line_sep>endpoint=os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]<line_sep>credential=DefaultAzureCredential()<line_sep>document_analysis_client=DocumentAnalysisClient(endpoint credential)<line_sep># [END create_da_client_with_aad]
poller=document_analysis_client.begin_analyze_document_from_url("prebuilt-layout" url)<line_sep>result=poller.result()<block_end><def_stmt>authentication_with_api_key_credential_document_model_admin_client # [START create_dt_client_with_key]
<block_start><import_from_stmt>azure.core.credentials AzureKeyCredential<import_from_stmt>azure.ai.formrecognizer DocumentModelAdministrationClient<line_sep>endpoint=os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]<line_sep>key=os.environ["AZURE_FORM_RECOGNIZER_KEY"]<line_sep>document_model_admin_client=DocumentModelAdministrationClient(endpoint AzureKeyCredential(key))<line_sep># [END create_dt_client_with_key]
info=document_model_admin_client.get_account_info()<block_end><def_stmt>authentication_with_azure_active_directory_document_model_admin_client # [START create_dt_client_with_aad]
<block_start>"""DefaultAzureCredential will use the values from these environment
variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, AZURE_CLIENT_SECRET
"""<import_from_stmt>azure.ai.formrecognizer DocumentModelAdministrationClient<import_from_stmt>azure.identity DefaultAzureCredential<line_sep>endpoint=os.environ["AZURE_FORM_RECOGNIZER_ENDPOINT"]<line_sep>credential=DefaultAzureCredential()<line_sep>document_model_admin_client=DocumentModelAdministrationClient(endpoint credential)<line_sep># [END create_dt_client_with_aad]
info=document_model_admin_client.get_account_info()<block_end><if_stmt>__name__<eq>"__main__"<block_start>authentication_with_api_key_credential_document_analysis_client()<line_sep>authentication_with_azure_active_directory_document_analysis_client()<line_sep>authentication_with_api_key_credential_document_model_admin_client()<line_sep>authentication_with_azure_active_directory_document_model_admin_client()<block_end>
|
<import_from_stmt>.efficient_tts EfficientTTSCNN<import_from_stmt>.duration_model DurationModel<line_sep>
|
# pylint: disable=missing-docstring
<def_stmt>baz # [disallowed-name]
<block_start><pass><block_end>
|
# Copyright (c) 2010 Resolver Systems Ltd, PythonAnywhere LLP
# See LICENSE.md
#
<try_stmt><block_start><import_stmt>unittest2<as>unittest<block_end><except_stmt>ImportError<block_start><import_stmt>unittest<block_end><import_stmt>datetime<import_from_stmt>sheet.dirigible_datetime DateTime<import_from_stmt>dirigible.test_utils ResolverTestCase<class_stmt>DateTimeTest(ResolverTestCase)<block_start><def_stmt>test_DateTime_subclasses_datetime_dot_datetime self<block_start>self.assertTrue(isinstance(DateTime(1979 10 8) datetime.datetime))<block_end><block_end>
|
<import_from_stmt>unittest.mock MagicMock<import_stmt>pytest<import_from_stmt>briefcase.integrations.subprocess Subprocess<line_sep>@pytest.fixture<def_stmt>mock_sub <block_start>command=MagicMock()<line_sep>command.verbosity=0<line_sep>sub=Subprocess(command)<line_sep>sub._subprocess=MagicMock()<line_sep><return>sub<block_end>
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
<import_from_stmt>..surface ExtractROIBasedSurfaceMeasures<def_stmt>test_ExtractROIBasedSurfaceMeasures_inputs <block_start>input_map=dict(lh_roi_atlas=dict(copyfile=<false> field="rdata" mandatory=<true> ) lh_surface_measure=dict(copyfile=<false> field="cdata" mandatory=<true> ) matlab_cmd=dict() mfile=dict(usedefault=<true> ) paths=dict() rh_roi_atlas=dict(copyfile=<false> mandatory=<false> ) rh_surface_measure=dict(copyfile=<false> mandatory=<false> ) surface_files=dict(copyfile=<false> mandatory=<false> ) use_mcr=dict() use_v8struct=dict(min_ver="8" usedefault=<true> ) )<line_sep>inputs=ExtractROIBasedSurfaceMeasures.input_spec()<for_stmt>key,metadata list(input_map.items())<block_start><for_stmt>metakey,value list(metadata.items())<block_start><assert_stmt>getattr(inputs.traits()[key] metakey)<eq>value<block_end><block_end><block_end><def_stmt>test_ExtractROIBasedSurfaceMeasures_outputs <block_start>output_map=dict(label_files=dict() )<line_sep>outputs=ExtractROIBasedSurfaceMeasures.output_spec()<for_stmt>key,metadata list(output_map.items())<block_start><for_stmt>metakey,value list(metadata.items())<block_start><assert_stmt>getattr(outputs.traits()[key] metakey)<eq>value<block_end><block_end><block_end>
|
# coding=utf-8
<import_stmt>os<import_stmt>re<import_stmt>time<import_stmt>base64<import_stmt>logging<import_stmt>tempfile<class_stmt>AdbExt(object)<block_start><def_stmt>__init__ self util<block_start>self.util=util<line_sep>self.is_helper_ready=<false><line_sep>self.width,self.height=<none> <none><line_sep>self.dir_path=os.path.dirname(os.path.abspath(__file__))# 当前文件所在的目录绝对路径
self.temp_device_dir_path='/data/local/tmp'<block_end><def_stmt>init_device_size self<block_start><if_stmt>self.width<and>self.height<block_start><return><block_end>out=self.util.shell('wm size')# out like 'Physical size: 1080x1920'
out=re.findall(r'\d+' out)<line_sep>self.width=int(out[0])<line_sep>self.height=int(out[1])<block_end><def_stmt>dump self<block_start><for_stmt>i range(5)<block_start>xml_str=self.__dump_xml()<if_stmt>xml_str<block_start><return>xml_str<block_end>time.sleep(1)<block_end><raise>NameError('dump xml fail!')<block_end><def_stmt>__dump_xml self# 使用 helper 获取 xml
<block_start>xml_str=self.run_helper_cmd('layout')<line_sep># 使用压缩模式
<if_stmt><not>xml_str<block_start>xml_str=self.util.adb('exec-out uiautomator dump --compressed /dev/tty' encoding='')<block_end># 使用非压缩模式
<if_stmt><not>xml_str<block_start>xml_str=self.util.adb('exec-out uiautomator dump /dev/tty' encoding='')<block_end><if_stmt>isinstance(xml_str bytes)<block_start>xml_str=xml_str.decode('utf-8')<block_end><if_stmt>'hierarchy'<in>xml_str<block_start>start=xml_str.find('<hierarchy')<line_sep>end=xml_str.rfind('>')+1<line_sep>xml_str=xml_str[start:end].strip()<line_sep><return>xml_str<block_end><block_end><def_stmt>run_helper_cmd self cmd<block_start>"""
执行 helper 的命令,当前 helper 支持 dump xml 和 screenshot
:param cmd:
:return:
"""<if_stmt><not>self.is_helper_ready<block_start>file_names=self.util.shell('ls {}'.format(self.temp_device_dir_path))<if_stmt>'adbui'<not><in>file_names<block_start>helper_path=os.path.join(self.dir_path 'static' 'adbui')<line_sep>self.push(helper_path self.temp_device_dir_path)<block_end>self.is_helper_ready=<true><block_end>arg='app_process -Djava.class.path=/data/local/tmp/adbui /data/local/tmp com.ysbing.yadb.Main -{}'.format(cmd)<line_sep><return>self.util.shell(arg)<block_end><def_stmt>delete_from_device self path<block_start>self.util.shell('rm -rf {}'.format(path))<block_end><def_stmt>screenshot self pc_path=<none><block_start>out=self.run_helper_cmd('screenshot')<if_stmt>out<and>len(out)<g>50<block_start>out=base64.b64decode(out)<block_end><else_stmt># helper 截图失败,使用 screencap 截图
<block_start>logging.warning('helper 截图失败')<line_sep>arg='exec-out screencap -p'.format(self.util.sn)<line_sep>out=self.util.adb(arg encoding=<none>)<block_end># 这里是 png bytes string
# 保存截图
<if_stmt>pc_path<block_start><if_stmt>self.util.is_py2<block_start>pc_path=pc_path.decode('utf-8')<block_end><if_stmt>os.path.exists(pc_path)# 删除电脑文件
<block_start>os.remove(pc_path)<block_end><with_stmt>open(pc_path 'wb')<as>f<block_start>f.write(out)<block_end><return>pc_path<block_end><return>out<block_end><def_stmt>pull self device_path=<none> pc_path=<none><block_start><return>self.util.adb('pull "{}" "{}"'.format(device_path pc_path))<block_end><def_stmt>push self pc_path=<none> device_path=<none><block_start><return>self.util.adb('push "{}" "{}"'.format(pc_path device_path))<block_end><def_stmt>click self x y<block_start>self.util.shell('input tap {} {}'.format(x y))<block_end><def_stmt>long_click self x y duration=''<block_start>"""
长按
:param x: x 坐标
:param y: y 坐标
:param duration: 长按的时间(ms)
:return:
"""<line_sep>self.util.shell('input touchscreen swipe {} {} {} {} {}'.format(x y x y duration))<block_end><def_stmt>start self pkg<block_start>"""
使用monkey,只需给出包名即可启动一个应用
:param pkg:
:return:
"""<line_sep>self.util.shell('monkey -p {} 1'.format(pkg))<block_end><def_stmt>stop self pkg<block_start>self.util.shell('am force-stop {}'.format(pkg))<block_end><def_stmt>input self text<block_start>self.util.shell('input text "{}"'.format(text.replace('&' '\&')))<block_end><def_stmt>back self times=1<block_start><while_stmt>times<block_start>self.util.shell('input keyevent 4')<line_sep>times<augsub>1<block_end><block_end><def_stmt>home self<block_start>self.util.shell('input keyevent 3')<block_end><def_stmt>enter self times=1<block_start><while_stmt>times<block_start>self.util.shell('input keyevent 66')<line_sep>times<augsub>1<block_end><block_end><def_stmt>swipe self e1=<none> e2=<none> start_x=<none> start_y=<none> end_x=<none> end_y=<none> duration=" "<block_start>"""
滑动事件,Android 4.4以上可选duration(ms)
usage: swipe(e1, e2)
swipe(e1, end_x=200, end_y=500)
swipe(start_x=0.5, start_y=0.5, e2)
"""<line_sep>self.init_device_size()<if_stmt>e1<is><not><none><block_start>start_x=e1[0]<line_sep>start_y=e1[1]<block_end><if_stmt>e2<is><not><none><block_start>end_x=e2[0]<line_sep>end_y=e2[1]<block_end><if_stmt>0<l>start_x<l>1<block_start>start_x=start_x<times>self.width<block_end><if_stmt>0<l>start_y<l>1<block_start>start_y=start_y<times>self.height<block_end><if_stmt>0<l>end_x<l>1<block_start>end_x=end_x<times>self.width<block_end><if_stmt>0<l>end_y<l>1<block_start>end_y=end_y<times>self.height<block_end>self.util.shell('input swipe %s %s %s %s %s'%(str(start_x) str(start_y) str(end_x) str(end_y) str(duration)))<block_end><def_stmt>clear self pkg<block_start>"""
重置应用
:param pkg:
:return:
"""<line_sep>self.util.shell('pm clear {}'.format(pkg))<block_end><def_stmt>wake_up self<block_start>"""
点亮屏幕
:return:
"""<line_sep>self.util.shell('input keyevent KEYCODE_WAKEUP')<block_end><def_stmt>unlock self<block_start>"""
解锁屏幕
:return:
"""<line_sep>self.util.shell('input keyevent 82')<block_end><def_stmt>grant self pkg permission<block_start>"""
给app赋权限,类似 adb shell pm grant [PACKAGE_NAME] android.permission.PACKAGE_USAGE_STATS
:return:
"""<line_sep>self.util.shell('pm grant {} {}'.format(pkg permission))<block_end><def_stmt>install self apk_path with_g=<true> with_r=<false> user=<none><block_start>"""
安装包
:param apk_path:
:param with_g: -g 在一些设备上可以自动授权,默认 true
:param with_r: -r 覆盖安装,默认 false
:param user:
:return:
"""<line_sep>arg='install'<if_stmt>user<block_start>arg=arg+' -user {}'.format(user)<block_end><if_stmt>with_g<block_start>arg=arg+' -g'<block_end><if_stmt>with_r<block_start>arg=arg+' -r'<block_end>self.util.adb('{} "{}"'.format(arg apk_path) timeout=60<times>5)<block_end># 安装较大的包可能比较耗时
<def_stmt>uninstall self pkg<block_start>"""
卸载包
:param pkg:
:return:
"""<line_sep>self.util.adb('uninstall {}'.format(pkg))<block_end><def_stmt>get_name self remove_blank=<false><block_start>name=self.util.shell('getprop ro.config.marketing_name').strip()<if_stmt><not>name<block_start>name=self.util.shell('getprop ro.product.nickname').strip()<block_end><if_stmt>remove_blank<block_start>name=name.replace(' ' '')<block_end><return>name<block_end><def_stmt>switch_user self user_id wait_time=5<block_start>self.util.shell('am switch-user {}'.format(user_id))<line_sep>time.sleep(wait_time)<block_end><def_stmt>list_packages self system=<false><block_start>"""
返回手机中安装的包
:param system: 是否包含系统包
:return:
"""<line_sep>with_system=''<if>system<else>'-3'<line_sep><return>self.util.shell('pm list packages {}'.format(with_system))<block_end><block_end>
|
<import_from_stmt>math ceil floor<import_from_stmt>pygears.typing Fixp Ufixp Uint Int<def_stmt>test_abs <block_start>uq2_3=Ufixp[2 3]<line_sep>q2_3=Fixp[2 3]<line_sep>q3_4=Fixp[3 4]<assert_stmt>abs(uq2_3.max)<eq>uq2_3.max<assert_stmt>abs(q2_3.min)<eq>q3_4(abs(float(q2_3.min)))<block_end><def_stmt>test_add <block_start>uq2_3=Ufixp[2 3]<line_sep>uq2_4=Ufixp[2 4]<line_sep>uq3_4=Ufixp[3 4]<line_sep>uq3_5=Ufixp[3 5]<line_sep>uq4_5=Ufixp[4 5]<line_sep>uq4_6=Ufixp[4 6]<line_sep>q2_3=Fixp[2 3]<line_sep>q2_4=Fixp[2 4]<line_sep>q3_4=Fixp[3 4]<line_sep>q3_5=Fixp[3 5]<line_sep>q4_5=Fixp[4 5]<line_sep>q4_6=Fixp[4 6]<line_sep>q5_6=Fixp[5 6]<line_sep>q5_7=Fixp[5 7]<assert_stmt>uq2_3.quant+uq3_4.quant<eq>uq4_5(float(uq2_3.quant)+float(uq3_4.quant))<assert_stmt>uq2_3.max+uq3_4.max<eq>uq4_5(11.0)<assert_stmt>uq3_4.max+uq3_4.max<eq>uq4_5(15.0)<assert_stmt>uq2_4.quant+uq3_4.quant<eq>uq4_6(float(uq2_4.quant)+float(uq3_4.quant))<assert_stmt>uq2_4.max+uq3_4.max<eq>uq4_6(11.25)<assert_stmt>uq3_4.max+uq3_5.max<eq>uq4_6(15.25)<assert_stmt>q2_3.quant+q3_4.quant<eq>q4_5(float(q2_3.quant)+float(q3_4.quant))<assert_stmt>q2_3.max+q3_4.max<eq>q4_5(5.0)<assert_stmt>q3_4.max+q3_4.max<eq>q4_5(7.0)<assert_stmt>q2_4.quant+q3_4.quant<eq>q4_6(float(q2_4.quant)+float(q3_4.quant))<assert_stmt>q2_4.max+q3_4.max<eq>q4_6(5.25)<assert_stmt>q3_4.max+q3_5.max<eq>q4_6(7.25)<assert_stmt>uq2_3.quant+q3_4.quant<eq>q4_5(float(uq2_3.quant)+float(q3_4.quant))<assert_stmt>uq2_3.max+q3_4.max<eq>q4_5(7.0)<assert_stmt>q2_3.max+uq3_4.max<eq>q5_6(9.0)<assert_stmt>uq3_4.max+q3_4.max<eq>q5_6(11.0)<assert_stmt>uq2_4.quant+q3_4.quant<eq>q4_6(float(uq2_4.quant)+float(q3_4.quant))<assert_stmt>uq2_4.max+q3_4.max<eq>q4_6(7.25)<assert_stmt>uq3_4.max+q3_5.max<eq>q5_7(11.25)<assert_stmt>q2_4.max+uq3_4.max<eq>q5_7(9.25)<assert_stmt>q2_3.min+q3_4.max<eq>q4_5(1.5)<assert_stmt>q3_4.min+q3_4.max<eq>q4_5(-0.5)<assert_stmt>q2_4.min+q3_4.max<eq>q4_6(1.5)<assert_stmt>q3_4.min+q3_5.max<eq>q4_6(-0.25)<assert_stmt>uq2_3.max+q3_4.min<eq>q4_5(-0.5)<assert_stmt>q2_3.min+uq3_4.max<eq>q5_6(5.5)<assert_stmt>uq3_4.max+q3_4.min<eq>q5_6(3.5)<assert_stmt>uq2_4.max+q3_4.min<eq>q4_6(-0.25)<assert_stmt>uq3_4.max+q3_5.min<eq>q5_7(3.5)<assert_stmt>q2_4.min+uq3_4.max<eq>q5_7(5.5)<block_end><def_stmt>test_ceil <block_start>uq2_4=Ufixp[2 4]<line_sep>q2_3=Fixp[2 3]<line_sep>uq4_4=Ufixp[4 4]<line_sep>q6_3=Fixp[6 3]<assert_stmt>ceil(uq2_4.max)<eq>Ufixp[3 5](4.0)<assert_stmt>ceil(uq2_4(3.25))<eq>Ufixp[3 5](4.0)<assert_stmt>ceil(q2_3.min)<eq>Fixp[3 4](-2.0)<assert_stmt>ceil(q2_3(-1.5))<eq>Fixp[3 4](-1.0)<assert_stmt>ceil(uq4_4.max)<eq>uq4_4.max<assert_stmt>ceil(q6_3.min)<eq>q6_3.min<block_end><def_stmt>test_floor <block_start>uq2_4=Ufixp[2 4]<line_sep>q2_3=Fixp[2 3]<line_sep>uq4_4=Ufixp[4 4]<line_sep>q6_3=Fixp[6 3]<assert_stmt>floor(uq2_4.max)<eq>uq2_4(3.0)<assert_stmt>floor(uq2_4(3.25))<eq>uq2_4(3.0)<assert_stmt>floor(q2_3.min)<eq>q2_3(-2.0)<assert_stmt>floor(q2_3(-1.5))<eq>q2_3(-2.0)<assert_stmt>floor(uq4_4.max)<eq>uq4_4.max<assert_stmt>floor(q6_3.min)<eq>q6_3.min<block_end><def_stmt>test_ge <block_start>uq2_3=Ufixp[2 3]<line_sep>q2_3=Fixp[2 3]<assert_stmt>uq2_3(1.5)<ge>q2_3(1.5)<assert_stmt>q2_3(1.5)<ge>uq2_3(1.5)<assert_stmt>uq2_3.max<ge>q2_3.min<assert_stmt>q2_3.max<ge>uq2_3.min<block_end><def_stmt>test_gt <block_start>uq2_3=Ufixp[2 3]<line_sep>q2_3=Fixp[2 3]<assert_stmt>uq2_3(2.0)<g>q2_3(1.5)<assert_stmt>q2_3(1.5)<g>uq2_3(1.0)<assert_stmt>uq2_3.max<g>q2_3.min<assert_stmt>q2_3.max<g>uq2_3.min<block_end><def_stmt>test_le <block_start>uq2_3=Ufixp[2 3]<line_sep>q2_3=Fixp[2 3]<assert_stmt>uq2_3(1.5)<le>q2_3(1.5)<assert_stmt>q2_3(1.5)<le>uq2_3(1.5)<assert_stmt>uq2_3.min<le>q2_3.max<assert_stmt>q2_3.min<le>uq2_3.max<block_end><def_stmt>test_lt <block_start>uq2_3=Ufixp[2 3]<line_sep>q2_3=Fixp[2 3]<assert_stmt>uq2_3(1.0)<l>q2_3(1.5)<assert_stmt>q2_3(1.0)<l>uq2_3(1.5)<assert_stmt>uq2_3.min<l>q2_3.max<assert_stmt>q2_3.min<l>uq2_3.max<block_end><def_stmt>test_lshift <block_start>uq2_3=Ufixp[2 3]<line_sep>uq4_3=Ufixp[4 3]<line_sep>q2_3=Fixp[2 3]<line_sep>q4_3=Fixp[4 3]<assert_stmt>uq2_3.max<lshift>2<eq>uq4_3(14.0)<assert_stmt>q2_3.min<lshift>2<eq>q4_3.min<assert_stmt>uq2_3.max<lshift>0<eq>uq2_3.max<assert_stmt>q2_3.min<lshift>0<eq>q2_3.min<block_end><def_stmt>test_neg <block_start>uq2_3=Ufixp[2 3]<line_sep>q2_3=Fixp[2 3]<line_sep>q3_4=Fixp[3 4]<assert_stmt>-uq2_3.max<eq>q3_4(-float(uq2_3.max))<assert_stmt>-q2_3.min<eq>q3_4(-float(q2_3.min))<block_end><def_stmt>test_rshift <block_start>uq2_3=Ufixp[2 3]<line_sep>uq4_3=Ufixp[4 3]<line_sep>q2_3=Fixp[2 3]<line_sep>q4_3=Fixp[4 3]<assert_stmt>uq4_3(14.0)<rshift>2<eq>uq2_3.max<assert_stmt>q4_3.min<rshift>2<eq>q2_3.min<assert_stmt>uq2_3.max<rshift>0<eq>uq2_3.max<assert_stmt>q2_3.min<rshift>0<eq>q2_3.min<block_end><def_stmt>test_round <block_start>uq2_4=Ufixp[2 4]<line_sep>q2_3=Fixp[2 3]<line_sep>uq4_4=Ufixp[4 4]<line_sep>q6_3=Fixp[6 3]<assert_stmt>round(uq2_4.max)<eq>Ufixp[3 5](4.0)<assert_stmt>round(uq2_4(3.25))<eq>Ufixp[3 5](3.0)<assert_stmt>round(q2_3.min)<eq>Fixp[3 4](-2.0)<assert_stmt>round(q2_3(-1.5))<eq>Fixp[3 4](-1.0)<assert_stmt>round(uq4_4.max)<eq>uq4_4.max<assert_stmt>round(q6_3.min)<eq>q6_3.min<block_end><def_stmt>test_sub_val <block_start>uq2_3=Ufixp[2 3]<line_sep>uq2_4=Ufixp[2 4]<line_sep>uq3_4=Ufixp[3 4]<line_sep>uq3_5=Ufixp[3 5]<line_sep>q2_3=Fixp[2 3]<line_sep>q2_4=Fixp[2 4]<line_sep>q3_4=Fixp[3 4]<line_sep>q3_5=Fixp[3 5]<line_sep>q4_5=Fixp[4 5]<line_sep>q4_6=Fixp[4 6]<line_sep>q5_6=Fixp[5 6]<line_sep>q5_7=Fixp[5 7]<assert_stmt>uq2_3.quant-uq3_4.quant<eq>q4_5(0.0)<assert_stmt>uq2_3.min-uq3_4.max<eq>q4_5(-7.5)<assert_stmt>uq2_4.quant-uq3_4.quant<eq>q4_6(float(uq2_4.quant)-float(uq3_4.quant))<assert_stmt>uq2_4.min-uq3_4.max<eq>q4_6(-7.5)<assert_stmt>uq3_4.min-uq3_5.max<eq>q4_6(-7.75)<assert_stmt>q2_3.quant-q3_4.quant<eq>q4_5(0.0)<assert_stmt>q2_3.min-q3_4.max<eq>q4_5(-5.5)<assert_stmt>q3_4.min-q3_4.max<eq>q4_5(-7.5)<assert_stmt>q3_4.max-q3_4.min<eq>q4_5(7.5)<assert_stmt>q2_4.quant-q3_4.quant<eq>q4_6(float(q2_4.quant)-float(q3_4.quant))<assert_stmt>q2_4.min-q3_4.max<eq>q4_6(-5.5)<assert_stmt>q2_4.max-q3_4.min<eq>q4_6(5.75)<assert_stmt>q3_4.min-q3_5.max<eq>q4_6(-7.75)<assert_stmt>q3_4.max-q3_5.min<eq>q4_6(7.5)<assert_stmt>uq2_3.quant-q3_4.quant<eq>q4_5(0.0)<assert_stmt>uq2_3.max-q3_4.min<eq>q4_5(7.5)<assert_stmt>q2_3.min-uq3_4.max<eq>q5_6(-9.5)<assert_stmt>uq3_4.max-q3_4.min<eq>q5_6(11.5)<assert_stmt>q3_4.min-uq3_4.max<eq>q5_6(-11.5)<assert_stmt>uq2_4.quant-q3_4.quant<eq>q4_6(float(uq2_4.quant)-float(q3_4.quant))<assert_stmt>uq2_4.max-q3_4.min<eq>q4_6(7.75)<assert_stmt>uq3_4.max-q3_5.min<eq>q5_7(11.5)<assert_stmt>q2_4.min-uq3_4.max<eq>q5_7(-9.5)<block_end>
|
<import_stmt>numpy<as>np<import_stmt>yaml<import_stmt>argparse<import_stmt>os<import_stmt>random<import_stmt>matplotlib.pyplot<as>plt<def_stmt>save_img path spec_song=<none> spec_hum=<none><block_start><if_stmt>spec_song<is><none><or>spec_hum<is><none><block_start><if_stmt>spec_song<is><not><none><block_start>plt.imshow(spec_song origin="lower")<line_sep>plt.title("song" fontsize="medium")<line_sep>plt.ylim(0 spec_song.shape[0])<block_end><if_stmt>spec_hum<is><not><none><block_start>plt.imshow(spec_hum origin="lower")<line_sep>plt.title("hum" fontsize="medium")<line_sep>plt.ylim(0 spec_hum.shape[0])<block_end><block_end><else_stmt><block_start>fig,axes=plt.subplots(2 1 squeeze=<false>)<line_sep>axes[0 0].imshow(spec_song origin="lower")<line_sep>axes[0 0].set_title("song" fontsize="medium")<line_sep>axes[0 0].set_ylim(0 spec_song.shape[0])<line_sep>axes[1 0].imshow(spec_hum origin="lower")<line_sep>axes[1 0].set_title("hum" fontsize="medium")<line_sep>axes[1 0].set_ylim(0 spec_hum.shape[0])<block_end>plt.savefig(path)<line_sep>plt.close()<block_end><def_stmt>visualize dataset in_dir out_dir num<block_start>random.seed(1234)<line_sep>files=os.listdir(os.path.join(in_dir dataset "hum"))<line_sep>random.shuffle(files)<line_sep>files=random.sample(files k=min(num len(files)))<line_sep>os.makedirs(os.path.join(out_dir dataset) exist_ok=<true>)<if_stmt>dataset<eq>"train"<or>dataset<eq>"val"<block_start><for_stmt>file files<block_start>spec_hum=np.load(os.path.join(in_dir dataset "hum" file))<line_sep>spec_song=np.load(os.path.join(in_dir dataset "song" file))<line_sep>save_img(os.path.join(out_dir dataset file[:-4]+".jpg") spec_song.T spec_hum.T)<block_end><block_end><elif_stmt>dataset<eq>"public_test"<block_start>os.makedirs(os.path.join(out_dir dataset "hum") exist_ok=<true>)<for_stmt>file files<block_start>spec_hum=np.load(os.path.join(in_dir dataset "hum" file))<line_sep>save_img(os.path.join(out_dir dataset "hum" file[:-4]+".jpg") spec_hum=spec_hum.T)<block_end>files=os.listdir(os.path.join(in_dir dataset "full_song"))<line_sep>random.shuffle(files)<line_sep>files=random.sample(files k=min(num len(files)))<line_sep>os.makedirs(os.path.join(out_dir dataset "full_song") exist_ok=<true>)<for_stmt>file files<block_start>spec_song=np.load(os.path.join(in_dir dataset "full_song" file))<line_sep>save_img(os.path.join(out_dir dataset "full_song" file[:-4]+".jpg") spec_song=spec_song.T)<block_end><block_end><block_end><def_stmt>main config num<block_start>in_dir=config["path"]["preprocessed_path"]<line_sep>out_dir=config["path"]["visualization_path"]<line_sep>dataset=["train" "val" "public_test"]<line_sep>os.makedirs(out_dir exist_ok=<true>)<for_stmt>data dataset<block_start>visualize(data in_dir out_dir num)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>parser=argparse.ArgumentParser()<line_sep>parser.add_argument("--config" type=str required=<false> default="config/preprocess.yaml" help="path to preprocess.yaml")<line_sep>parser.add_argument("--indir" type=str required=<false> help="path to input")<line_sep>parser.add_argument("--outdir" type=str required=<false> help="path to output")<line_sep>parser.add_argument("--num" type=int required=<false> default=5 help="num of samples")<line_sep>args=parser.parse_args()<line_sep>config=yaml.load(open(args.config "r") Loader=yaml.FullLoader)<if_stmt>args.indir<is><not><none><block_start>config["path"]["preprocessed_path"]=args.indir<block_end><if_stmt>args.outdir<is><not><none><block_start>config["path"]["visualization_path"]=args.outdir<block_end>main(config num=args.num)<block_end>
|
# Copyright 2019 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define metadata constants."""<line_sep>LABEL_COLUMN='labelArray'<line_sep>KEY_COLUMN='fullVisitorId'<line_sep># columns to omit from model features
NON_FEATURE_COLUMNS=[LABEL_COLUMN KEY_COLUMN]<line_sep>NUM_INTERVALS=4# number of bounded churn duration intervals
SEED=123<line_sep>
|
"""Tests for the violations.Error class."""<import_stmt>pytest<import_stmt>collections<import_stmt>textwrap<import_from_stmt>pydocstyle.violations Error<line_sep>MockDefinition=collections.namedtuple('MockDefinition' ['source' 'start'])<def_stmt>test_message_without_context <block_start>"""Test a simple error message without parameters."""<line_sep>error=Error('CODE' 'an error' <none>)<assert_stmt>error.message<eq>'CODE: an error'<block_end><def_stmt>test_message_with_context <block_start>"""Test an error message with parameters."""<line_sep>error=Error('CODE' 'an error' 'got {}' 0)<assert_stmt>error.message<eq>'CODE: an error (got 0)'<block_end><def_stmt>test_message_with_insufficient_parameters <block_start>"""Test an error message with invalid parameter invocation."""<line_sep>error=Error('CODE' 'an error' 'got {}')<with_stmt>pytest.raises(IndexError)<block_start><assert_stmt>error.message<block_end><block_end><def_stmt>test_lines <block_start>"""Test proper printing of source lines, including blank line trimming."""<line_sep>error=Error('CODE' 'an error' <none>)<line_sep>definition=MockDefinition(source=['def foo():\n' ' """A docstring."""\n' '\n' ' pass\n' '\n' '\n'] start=424)<line_sep>error.set_context(definition <none>)<line_sep>print(error.lines)<assert_stmt>error.lines<eq>textwrap.dedent('''\
424: def foo():
425: """A docstring."""
426:
427: pass
''')<block_end>
|
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>pfTauBenchmarkElecRejection=cms.EDAnalyzer("PFTauElecRejectionBenchmarkAnalyzer" OutputFile=cms.untracked.string('tauBenchmarkElecRejection.root') InputTruthLabel=cms.InputTag('generatorSmeared') BenchmarkLabel=cms.string('PFTauElecRejection') minRecoPt=cms.double(15.0) maxRecoAbsEta=cms.double(2.5) minMCPt=cms.double(10.0) maxMCAbsEta=cms.double(2.5) maxDeltaR=cms.double(0.3) PFTauProducer=cms.InputTag('shrinkingConePFTauProducer') PFTauDiscriminatorByIsolationProducer=cms.InputTag('shrinkingConePFTauDiscriminationByIsolation') PFTauDiscriminatorAgainstElectronProducer=cms.InputTag('shrinkingConePFTauDiscriminationAgainstElectron') ApplyEcalCrackCut=cms.bool(<true>) GenMatchObjectLabel=cms.string('tau')# match with hadronic 'tau' or electron "e"
)<line_sep>
|
_base_=['./classification_dynamic.py' '../_base_/backends/onnxruntime.py']<line_sep>
|
<import_stmt>os<import_from_stmt>django.conf settings<import_from_stmt>django.core.management.base BaseCommand<import_from_stmt>django.contrib.auth.models User<import_from_stmt>pyjobs.profiler.models ProfilerData<import_from_stmt>django.db IntegrityError<import_from_stmt>linkedin_scraper Person actions<import_from_stmt>pprint pprint<import_from_stmt>selenium webdriver<import_from_stmt>selenium.webdriver.chrome.options Options<import_from_stmt>tqdm tqdm<import_from_stmt>time sleep<def_stmt>driver_factory <block_start>chrome_options=Options()<line_sep>chrome_options.add_argument("--headless")<line_sep>driver=webdriver.Chrome(options=chrome_options)<line_sep>actions.login(driver settings.LINKEDIN_EMAIL settings.LINKEDIN_PASSWORD)<line_sep><return>driver<block_end><class_stmt>Command(BaseCommand)<block_start><def_stmt>handle self *args **options<block_start>driver=driver_factory()<for_stmt>user tqdm(User.objects.filter(profile__linkedin__isnull=<false>).exclude(profile__linkedin__in=["" " " "\n"]))<block_start>user_data={"name":"" "about":"" "experiences":[] "education":[] "interests":[] }<try_stmt><block_start>person=Person(user.profile.linkedin contacts=[] driver=driver close_on_complete=<false> )<line_sep>user_data["name"]=person.name<line_sep>user_data["about"]=person.about<for_stmt>experience person.experiences<block_start>user_data["experiences"].append({"description":experience.description "position_title":experience.position_title.replace("Nome da empresa\n" "") "duration":experience.duration })<block_end><for_stmt>education person.educations<block_start>user_data["educations"].append({"from_date":education.from_date "to_date":education.to_date "degree":education.degree "company":education.company })<block_end>user_data["interests"]=[interest.title<for>interest person.interests]<line_sep>ProfilerData.objects.get_or_create(user=user linkedin_data=user_data)<block_end><except_stmt>Exception<as>e<block_start><pass><block_end><block_end>driver.close()<block_end><block_end>
|
<import_stmt>abc<import_stmt>typing<import_stmt>weakref<import_from_stmt>magma.compatibility IntegerTypes<class_stmt>Ref<block_start>@abc.abstractmethod<def_stmt>__str__ self<block_start><raise>NotImplementedError()<block_end><def_stmt>__repr__ self<block_start><return>self.qualifiedname()<block_end>@abc.abstractmethod<def_stmt>qualifiedname self sep="."<block_start><raise>NotImplementedError()<block_end>@abc.abstractmethod<def_stmt>anon self<block_start><raise>NotImplementedError()<block_end><def_stmt>parent self<block_start><return>self<block_end><def_stmt>root self<arrow>typing.Optional['Ref']<block_start>parent=self.parent()<if_stmt>parent<is>self<block_start><return>self<block_end><return>parent.root()<block_end><block_end><class_stmt>AnonRef(Ref)<block_start><def_stmt>__init__ self<block_start>self.name=<none><block_end><def_stmt>__str__ self<block_start><return>f"AnonymousValue_{id(self)}"<block_end><def_stmt>qualifiedname self sep='.'<block_start><return>f"AnonymousValue_{id(self)}"<block_end><def_stmt>anon self<block_start><return><true><block_end><block_end><class_stmt>NamedRef(Ref)<block_start><def_stmt>__init__ self name value=<none><block_start><if_stmt><not>isinstance(name (str int))<block_start><raise>TypeError("Expected string or int")<block_end>self.name=name<line_sep>self._value=value<if>value<is><none><else>weakref.ref(value)<block_end><def_stmt>__str__ self<block_start><return>self.name<block_end><def_stmt>qualifiedname self sep="."<block_start><return>self.name<block_end><def_stmt>anon self<block_start><return><false><block_end><def_stmt>value self<block_start><return>self._value<if>self._value<is><none><else>self._value()<block_end><block_end><class_stmt>TempNamedRef(NamedRef)<block_start><pass><block_end><class_stmt>InstRef(NamedRef)<block_start><def_stmt>__init__ self inst name<block_start>super().__init__(name)<if_stmt><not>inst<block_start><raise>ValueError(f"Bad inst: {inst}")<block_end>self.inst=inst<block_end><def_stmt>qualifiedname self sep="."<block_start>name=self.name<if_stmt>isinstance(self.name IntegerTypes)# Hack, Hack, Hack!
# NOTE: This is used for verilog instances that don't use named
# port (wired by index instead), so the ports are referred to by
# index instead of name and we use the array indexing syntax to
# represent them
# See mantle's generic verilog target for example use case
<block_start><if_stmt>sep<eq>"."<block_start><return>f"{self.inst.name}[{self.name}]"<block_end><block_end><return>self.inst.name+sep+str(name)<block_end><block_end><class_stmt>LazyInstRef(InstRef)<block_start><def_stmt>__init__ self name<block_start>self.name=name<line_sep>self._inst=<none><block_end>@property<def_stmt>inst self<block_start><if_stmt>self._inst<is><not><none><block_start><return>self._inst<block_end><return>LazyCircuit<block_end><def_stmt>qualifiedname self sep="."<block_start><return>super().qualifiedname(sep)<block_end><def_stmt>set_inst self inst<block_start><if_stmt>self._inst<is><not><none><block_start><raise>Exception("Can only set definition of LazyInstRef once")<block_end>self._inst=inst<block_end><block_end><class_stmt>DefnRef(NamedRef)<block_start><def_stmt>__init__ self defn name<block_start>super().__init__(name)<if_stmt><not>defn<block_start><raise>ValueError(f"Bad defn: {defn}")<block_end>self.defn=defn<block_end><def_stmt>qualifiedname self sep="."<block_start><if_stmt>sep<eq>"."<block_start><return>self.defn.__name__+sep+self.name<block_end><return>self.name<block_end><block_end><class_stmt>LazyCircuit<block_start>name=""<block_end><class_stmt>LazyDefnRef(DefnRef)<block_start><def_stmt>__init__ self name<block_start>self.name=name<line_sep>self._defn=<none><block_end>@property<def_stmt>defn self<block_start><if_stmt>self._defn<is><not><none><block_start><return>self._defn<block_end><return>LazyCircuit<block_end><def_stmt>qualifiedname self sep="."<block_start><return>super().qualifiedname(sep)<block_end><def_stmt>set_defn self defn<block_start><if_stmt>self._defn<is><not><none><block_start><raise>Exception("Can only set definition of LazyDefnRef once")<block_end>self._defn=defn<block_end><block_end><class_stmt>ArrayRef(Ref)<block_start><def_stmt>__init__ self array index<block_start>self.array=array<line_sep>self.index=index<block_end><def_stmt>__str__ self<block_start><return>self.qualifiedname()<block_end><def_stmt>qualifiedname self sep="."<block_start><return>f"{self.array.name.qualifiedname(sep=sep)}[{self.index}]"<block_end><def_stmt>anon self<block_start><return>self.array.name.anon()<block_end><def_stmt>parent self<block_start><return>self.array.name<block_end><block_end><class_stmt>TupleRef(Ref)<block_start><def_stmt>__init__ self tuple index<block_start>self.tuple=tuple<line_sep>self.index=index<block_end><def_stmt>__str__ self<block_start><return>self.qualifiedname()<block_end><def_stmt>qualifiedname self sep="."<block_start><try_stmt><block_start>int(self.index)<line_sep><return>(self.tuple.name.qualifiedname(sep=sep)+"["+str(self.index)+"]")<block_end><except_stmt>ValueError<block_start><return>(self.tuple.name.qualifiedname(sep=sep)+sep+str(self.index))<block_end><block_end><def_stmt>anon self<block_start><return>self.tuple.name.anon()<block_end><def_stmt>parent self<block_start><return>self.tuple.name<block_end><block_end><class_stmt>PortViewRef(Ref)<block_start>"""
Used for values that are connection references to a hierarchical value
(using the view logic)
"""<def_stmt>__init__ self view<block_start>self.view=view<block_end><def_stmt>qualifiedname self sep="."<block_start><return>self.view.port.name.qualifiedname(sep)<block_end><def_stmt>anon self<block_start><return>self.view.port.anon()<block_end><def_stmt>__str__ self<block_start><return>str(self.view.port.name)<block_end><def_stmt>root self<block_start><return>self.view.root()<block_end><block_end><def_stmt>get_ref_inst ref<block_start>"""
If value is part of a port on an instance, return that instance,
otherwise None.
"""<line_sep>root=ref.root()<if_stmt><not>isinstance(root InstRef)<block_start><return><none><block_end><return>root.inst<block_end><def_stmt>get_ref_defn ref<block_start>"""
If value is part of a port on an definition, return that definition,
otherwise None.
"""<line_sep>root=ref.root()<if_stmt><not>isinstance(root DefnRef)<block_start><return><none><block_end><return>root.defn<block_end><def_stmt>is_temp_ref ref<block_start>root=ref.root()<line_sep><return>isinstance(root (TempNamedRef AnonRef))<block_end>
|
<import_from_stmt>typing List Tuple<import_from_stmt>pyrep.objects Dummy<import_from_stmt>pyrep.objects.shape Shape<import_from_stmt>pyrep.objects.proximity_sensor ProximitySensor<import_from_stmt>rlbench.backend.task Task<import_from_stmt>rlbench.backend.conditions DetectedCondition NothingGrasped<class_stmt>PutBooksOnBookshelf(Task)<block_start><def_stmt>init_task self<arrow><none><block_start>self._success_sensor=ProximitySensor('success')<line_sep>self._books=[Shape('book2') Shape('book1') Shape('book0')]<line_sep>self._waypoints_idxs=[5 11 -1]<line_sep>self.register_graspable_objects(self._books)<block_end><def_stmt>init_episode self index:int<arrow>List[str]<block_start>self.register_success_conditions([DetectedCondition(b self._success_sensor)<for>b self._books[:index+1]])<line_sep>self.register_stop_at_waypoint(self._waypoints_idxs[index])<line_sep><return>['put %d books on bookshelf'%(index+1) 'pick up %d books and place them on the top shelf'%(index+1) 'stack %d books up on the top shelf'%(index+1)]<block_end><def_stmt>variation_count self<arrow>int<block_start><return>3<block_end><def_stmt>base_rotation_bounds self<arrow>Tuple[List[float] List[float]]<block_start><return>[0.0 0.0 -3.14/2] [0.0 0.0 3.14/2]<block_end><block_end>
|
# -*- coding: utf-8 -*-
<import_from_future_stmt> absolute_import<import_from_stmt>itertools groupby<import_from_stmt>vilya.libs.generated Generated<import_from_stmt>vilya.libs.text is_image<import_from_stmt>vilya.models.consts LINECOMMENT_INDEX_EMPTY<import_from_stmt>vilya.models.ngit.hunk Hunk<line_sep>MAX_PATCH_MOD_LINES=2000<line_sep>INVALID_OID=b'0'<times>40<class_stmt>Patch(object)# libgit2 status definition
# GIT_DELTA_ADDED: code = 'A'
# GIT_DELTA_DELETED: code = 'D'
# GIT_DELTA_MODIFIED: code = 'M'
# GIT_DELTA_RENAMED: code = 'R'
# GIT_DELTA_COPIED: code = 'C'
# GIT_DELTA_IGNORED: code = 'I'
# GIT_DELTA_UNTRACKED: code = '?'
# default: code = ' '
<block_start><def_stmt>__init__ self repo diff patch linecomments=[] is_limit_lines=<true><block_start>self.repo=repo<line_sep>self.diff=diff<line_sep>self._patch=patch<line_sep>self._old_file_length=<none><line_sep>self._new_file_length=<none><line_sep># patch sha == diff sha
# FIXME: commit diff old_sha 貌似为 None
self.old_sha=patch['old_sha']<line_sep>self.new_sha=patch['new_sha']<line_sep># oids # an oid encoded in hex (40 bytes) # invalid oid = '0000...'
self.old_file_sha=patch['old_oid']<line_sep>self.new_file_sha=patch['new_oid']<line_sep>self.status=patch['status']<line_sep>self.old_file_path=patch['old_file_path']<line_sep>self.new_file_path=patch['new_file_path']<line_sep># TODO: remove self.filepath
self.filepath=self.old_file_path<line_sep>self.additions=patch['additions']<line_sep>self.deletions=patch['deletions']<line_sep>self.similarity=patch['similarity']<line_sep>self.binary=patch['binary']<line_sep>self._generated=<none><line_sep># TODO: move to def init_comment_groups
<def_stmt>func_filter l<block_start><if_stmt>l.has_oids<block_start><return>l.from_oid<eq>self.old_file_sha<and>l.to_oid<eq>self.new_file_sha<block_end><else_stmt><block_start><return>l.from_sha<eq>self.new_sha<block_end><block_end>self.linecomments=filter(func_filter linecomments)<line_sep>self.linecomments_has_linenum=[]<line_sep>self.linecomments_has_pos=[]<for_stmt>l self.linecomments<block_start>(self.linecomments_has_pos self.linecomments_has_linenum)[l.has_linenum].append(l)<block_end><if_stmt>is_limit_lines<and>self.additions+self.deletions<g>MAX_PATCH_MOD_LINES<block_start>self.is_toobig=<true><line_sep>self.hunks=[]<block_end><else_stmt><block_start>self.is_toobig=<false><line_sep>self.init_comment_groups()<line_sep>self.init_hunks(patch)<block_end><block_end><def_stmt>init_comment_groups self# TODO: 用 oids 做 linecomments 的过滤
<block_start>keyfunc_pos=<lambda>x:x.position<line_sep>keyfunc_line=<lambda>x:x.linenum<line_sep>self.comments_by_pos={}<line_sep>self.comments_by_line={}<if_stmt>self.linecomments_has_pos<block_start>self.linecomments_has_pos.sort(key=keyfunc_pos)<line_sep>self.comments_by_pos=dict((k list(v))<for>k,v groupby(self.linecomments_has_pos key=keyfunc_pos))<block_end><if_stmt>self.linecomments_has_linenum<block_start>self.linecomments_has_linenum.sort(key=keyfunc_line)<line_sep>self.comments_by_line=dict((k list(v))<for>k,v groupby(self.linecomments_has_linenum key=keyfunc_line))<block_end><block_end># TODO: refactor this! T^T
<def_stmt>init_hunks self raw_patch<block_start>''' init Hunks, add extra_contexts when there're linecomments not
involved '''<line_sep>EXTRE_CONTEXT_LINES=3<def_stmt>expand_hunk hunk last_hunk_old_end type MAX_LINE_NUM=99999 MIN_LINE_NUM=0<block_start><if_stmt>type<eq>'up'<block_start>min_old_not_involved=MAX_LINE_NUM<for_stmt>linecomment self.linecomments_has_linenum<block_start>not_involved=<false><line_sep>old,new=linecomment.linenum<if_stmt>old<ne>LINECOMMENT_INDEX_EMPTY<and>new<ne>LINECOMMENT_INDEX_EMPTY<block_start>not_involved=last_hunk_old_end<l>old<and>old<l>hunk.old_start<block_end><if_stmt>not_involved<block_start>min_old_not_involved=min(min_old_not_involved old)<block_end><block_end><if_stmt>min_old_not_involved<ne>MAX_LINE_NUM<block_start>contexts=self.get_contexts(min_old_not_involved-EXTRE_CONTEXT_LINES hunk.old_start)<if_stmt>contexts<block_start>hunk.expand_top_contexts(contexts)<block_end><block_end><block_end><elif_stmt>type<eq>'bottom'<block_start>max_old_not_involved=MIN_LINE_NUM<for_stmt>linecomment self.linecomments_has_linenum<block_start>not_involved=<false><line_sep>old,new=linecomment.linenum<if_stmt>old<ne>LINECOMMENT_INDEX_EMPTY<and>new<ne>LINECOMMENT_INDEX_EMPTY<block_start>not_involved=last_hunk_old_end<l>old<block_end><if_stmt>not_involved<block_start>max_old_not_involved=max(max_old_not_involved old)<block_end><block_end><if_stmt>max_old_not_involved<ne>MIN_LINE_NUM<block_start>contexts=self.get_contexts(hunk.old_end+1 max_old_not_involved+1+EXTRE_CONTEXT_LINES)<if_stmt>contexts<block_start>hunk.expand_bottom_contexts(contexts)<block_end><block_end><block_end><block_end>self.hunks=[Hunk(self h)<for>h raw_patch['hunks']]<if_stmt><not>self.hunks<block_start><return><block_end># TODO: 再 细分 pull/new pull/discussion compare 等?
<if_stmt>self.linecomments_has_linenum<and>self.repo.provide('project')<block_start>last_hunk_old_end=0<for_stmt>hunk self.hunks<block_start>expand_hunk(hunk last_hunk_old_end type='up')<line_sep>last_hunk_old_end=hunk.old_end<block_end>expand_hunk(hunk last_hunk_old_end type='bottom')<block_end><if_stmt>self.repo.provide('project')<block_start>first_hunk=self.hunks[0]<line_sep>last_hunk=self.hunks[-1]<line_sep># add top_hunk
<if_stmt>first_hunk.old_start<g>EXTRE_CONTEXT_LINES+1<block_start>contexts=self.get_contexts(1 EXTRE_CONTEXT_LINES+1)<if_stmt>contexts<block_start>top_hunk=Hunk(self old_start=1 new_start=1 old_lines=EXTRE_CONTEXT_LINES new_lines=EXTRE_CONTEXT_LINES contexts=contexts)<line_sep>self.hunks.insert(0 top_hunk)<block_end><block_end><elif_stmt>first_hunk.old_start<g>1<block_start>contexts=self.get_contexts(1 first_hunk.old_start)<if_stmt>contexts<block_start>first_hunk.expand_top_contexts(contexts)<block_end><block_end># add bottom_hunk
<if_stmt>last_hunk.old_end+EXTRE_CONTEXT_LINES<l>self.old_file_length<block_start>bottom_hunk_old_start=self.old_file_length-EXTRE_CONTEXT_LINES+1# noqa
bottom_hunk_new_start=self.new_file_length-EXTRE_CONTEXT_LINES+1# noqa
contexts=self.get_contexts(bottom_hunk_old_start self.old_file_length+1)<if_stmt>contexts<block_start>bottom_hunk=Hunk(self old_start=bottom_hunk_old_start new_start=bottom_hunk_new_start old_lines=EXTRE_CONTEXT_LINES new_lines=EXTRE_CONTEXT_LINES contexts=contexts)<line_sep>self.hunks.append(bottom_hunk)<block_end><block_end><elif_stmt>last_hunk.old_end<l>self.old_file_length<block_start>contexts=self.get_contexts(last_hunk.old_end+1 self.old_file_length+1)<if_stmt>contexts<block_start>last_hunk.expand_bottom_contexts(contexts)<block_end><block_end><block_end># update hunks
pos=1<for_stmt>i,hunk enumerate(self.hunks)<block_start>hunk.start_pos=pos<line_sep>pos<augadd>hunk.n_lines+1# +1 means hunk_heading
<if_stmt>i<g>0<block_start>last=self.hunks[i-1]<line_sep>hunk.skipped_old_start=last.old_end+1<line_sep>hunk.skipped_new_start=last.new_end+1<line_sep>hunk.skipped_old_end=hunk.old_start-1<line_sep>hunk.skipped_new_end=hunk.new_start-1<block_end><block_end><block_end><def_stmt>get_contexts self start end<block_start>''' get patch's context lines in [start, end) '''<if_stmt>self.old_file_sha<eq>INVALID_OID<block_start>ref=self.new_sha<block_end><elif_stmt>self.new_file_sha<eq>INVALID_OID<block_start>ref=self.old_sha<block_end><else_stmt><block_start>ref=self.old_sha<or>self.new_sha<block_end>contexts=self.repo.get_contexts(ref self.old_file_path start end)<line_sep><return>contexts<block_end>@property<def_stmt>old_file_length self<block_start><if_stmt>self._old_file_length<is><not><none><block_start><return>self._old_file_length<block_end><if_stmt>self.old_file_sha<eq>INVALID_OID<block_start>self._old_file_length=0<line_sep><return>self._old_file_length<block_end>ref=self.old_sha<or>self.new_sha<line_sep>self._old_file_length=self.repo.get_file_n_lines(ref self.old_file_path)<line_sep><return>self._old_file_length<block_end>@property<def_stmt>new_file_length self<block_start><if_stmt>self._new_file_length<is><not><none><block_start><return>self._new_file_length<block_end><if_stmt>self.new_file_sha<eq>INVALID_OID<block_start>self._new_file_length=0<line_sep><return>self._new_file_length<block_end>ref=self.new_sha<line_sep>self._new_file_length=self.repo.get_file_n_lines(ref self.new_file_path)<line_sep><return>self._new_file_length<block_end>@property<def_stmt>image self<block_start><return>is_image(self.old_file_path)<block_end>@property<def_stmt>generated self# FIXME: generated 性能问题
<block_start><if_stmt>self._generated<is><not><none><block_start><return>self._generated<block_end><def_stmt>get_data <block_start>data=''<try_stmt><block_start><if_stmt>self.status<eq>'D'<block_start>blob=self.repo.get_file(self.old_sha self.old_file_path)<block_end><else_stmt><block_start>blob=self.repo.get_file(self.new_sha self.new_file_path)<block_end><if_stmt>blob<block_start>data=blob.data<block_end><block_end><except_stmt># very first commit ??
<block_start>data=''<block_end><return>data<block_end>generated=Generated.is_generated(self.new_file_path get_data)<line_sep>self._generated=generated<line_sep><return>generated<block_end>#@property
# def n_lines(self):
# return sum([hunk.n_lines for hunk in self.hunks])
# TODO: remove this
@property<def_stmt>content self<block_start>content=[]<for_stmt>h self.hunks<block_start>content.append(h.heading)<for_stmt>l h.lines<block_start>content.append(l)<block_end><block_end><return>content<block_end># TODO: rewrite
# FIXME: more explanation
<def_stmt>smart_slice self num<block_start>content=self.content[:num+1]<if_stmt>len(content)<g>15<block_start>tip_pos=0<for_stmt>idx,line enumerate(content)<block_start><if_stmt>line.old<is><none><and>line.new<is><none><block_start>tip_pos=idx<block_end><block_end>content=content[tip_pos:]<if_stmt>len(content)<g>25<block_start><return>content[-25:]<block_end><else_stmt><block_start><return>content<block_end><block_end><return>content<block_end><block_end>
|
#
# Copyright 2019 <NAME>, <NAME>, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ======================================================================
"""
Authors: <NAME>
OmniNet transformer sub layers
"""<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_stmt>numpy<as>np<class_stmt>ScaledDotProductAttention(nn.Module)<block_start>''' Scaled Dot-Product Attention '''<def_stmt>__init__ self temperature attn_dropout=0.1<block_start>super().__init__()<line_sep>self.temperature=temperature<line_sep>self.dropout=nn.Dropout(attn_dropout)<line_sep>self.softmax=nn.Softmax(dim=2)<block_end><def_stmt>forward self q k v mask=<none> k_gate=<none><block_start>attn=torch.bmm(q k.transpose(1 2))<line_sep>attn=attn/self.temperature<if_stmt>k_gate<is><not><none><block_start>attn=torch.mul(attn k_gate)<block_end><if_stmt>mask<is><not><none><block_start>attn=attn.masked_fill(mask -np.inf)<block_end>attn=self.softmax(attn)<line_sep>attn=self.dropout(attn)<line_sep>output=torch.bmm(attn v)<line_sep><return>output attn<block_end><block_end><class_stmt>MultiHeadAttention(nn.Module)<block_start>''' Multi-Head Attention module '''<def_stmt>__init__ self n_head d_model d_k d_v dropout=0.1<block_start>super().__init__()<line_sep>self.n_head=n_head<line_sep>self.d_k=d_k<line_sep>self.d_v=d_v<line_sep>self.w_qs=nn.Linear(d_model n_head<times>d_k)<line_sep>self.w_ks=nn.Linear(d_model n_head<times>d_k)<line_sep>self.w_vs=nn.Linear(d_model n_head<times>d_v)<line_sep>nn.init.normal_(self.w_qs.weight mean=0 std=np.sqrt(2.0/(d_model+d_k)))<line_sep>nn.init.normal_(self.w_ks.weight mean=0 std=np.sqrt(2.0/(d_model+d_k)))<line_sep>nn.init.normal_(self.w_vs.weight mean=0 std=np.sqrt(2.0/(d_model+d_v)))<line_sep>self.attention=ScaledDotProductAttention(temperature=np.power(d_k 0.5))<line_sep>self.layer_norm=nn.LayerNorm(d_model)<line_sep>self.fc=nn.Linear(n_head<times>d_v d_model)<line_sep>nn.init.xavier_normal_(self.fc.weight)<line_sep>self.dropout=nn.Dropout(dropout)<block_end><def_stmt>forward self q k v mask=<none> k_gate=<none><block_start>d_k,d_v,n_head=self.d_k self.d_v self.n_head<line_sep>sz_b,len_q,_=q.size()<line_sep>sz_b,len_k,_=k.size()<line_sep>sz_b,len_v,_=v.size()<if_stmt>k_gate<is><not><none><block_start>k_gate=k_gate.transpose(0 1)<line_sep>k_gate=k_gate.reshape(n_head<times>sz_b len_q len_v)<block_end>residual=q<line_sep>q=self.w_qs(q).view(sz_b len_q n_head d_k)<line_sep>k=self.w_ks(k).view(sz_b len_k n_head d_k)<line_sep>v=self.w_vs(v).view(sz_b len_v n_head d_v)<line_sep>#A Weighting score for the keys is provided
q=q.permute(2 0 1 3).contiguous().view(-1 len_q d_k)# (n*b) x lq x dk
k=k.permute(2 0 1 3).contiguous().view(-1 len_k d_k)# (n*b) x lk x dk
v=v.permute(2 0 1 3).contiguous().view(-1 len_v d_v)# (n*b) x lv x dv
<if_stmt>mask<is><not><none><block_start>mask=mask.repeat(n_head 1 1)# (n*b) x .. x ..
<block_end>output,attn=self.attention(q k v mask=mask k_gate=k_gate)<line_sep>output=output.view(n_head sz_b len_q d_v)<line_sep>output=output.permute(1 2 0 3).contiguous().view(sz_b len_q -1)# b x lq x (n*dv)
output=self.dropout(self.fc(output))<line_sep>output=self.layer_norm(output+residual)<line_sep>attn=attn.view(n_head sz_b len_q len_v).transpose(0 1)<line_sep><return>output attn<block_end><block_end><class_stmt>PositionwiseFeedForward(nn.Module)<block_start>''' A two-feed-forward-layer module '''<def_stmt>__init__ self d_in d_hid dropout=0.1<block_start>super().__init__()<line_sep>self.w_1=nn.Linear(d_in d_hid)# position-wise
self.w_2=nn.Linear(d_hid d_in)# position-wise
self.layer_norm=nn.LayerNorm(d_in)<line_sep>self.dropout=nn.Dropout(dropout)<block_end><def_stmt>forward self x<block_start>residual=x<line_sep>output=x<line_sep>output=self.w_2(F.relu(self.w_1(output)))<line_sep>output=self.dropout(output)<line_sep>output=self.layer_norm(output+residual)<line_sep><return>output<block_end><block_end>
|
<import_stmt>cv2#needed for histogram plotting and preview window display
#need to build and install opencv version 3 to support frame blending
<import_stmt>threading<import_stmt>struct<import_stmt>logging<import_stmt>config<import_stmt>numpy<as>np<import_stmt>io<import_from_stmt>time sleep<import_from_stmt>fractions Fraction<import_from_stmt>PyQt5 QtCore<as>qtcore<import_from_stmt>PyQt5 QtGui<line_sep>mask_pct=.8#this determines what (center) portion of the image is used for histogram calculations. (Avoid using black borders)
blender=cv2.createMergeMertens()#COMMENT OUT IF NOT USING opencv version 3+ and bracketing
<def_stmt>thefilename i suffix=""<block_start>fname=str(config.folder)+"/img%.5d%s.jpg"%(i suffix)<line_sep>logging.debug(fname)<line_sep><return>fname<block_end><def_stmt>subDims amt fraction#sub dimensions: Pass it a width or height and get the origin and width/height of the center portion
<block_start><return>(int(amt<times>(1-fraction)/2) int(amt<times>(1+fraction)/2))<block_end><def_stmt>getMask img fraction<block_start>mask=np.zeros(img.shape[:2] np.uint8)<line_sep>(x x2 y y2)=subDims(img.shape[0] fraction)+subDims(img.shape[1] fraction)<line_sep>mask[x:x2 y:y2]=255<line_sep><return>mask<block_end><def_stmt>saveable_img img<block_start><return>np.array(img dtype=float)<times>float(255)<block_end><def_stmt>adjustable_img img<block_start><return>cv2.convertScaleAbs(img alpha=255)<line_sep>#return np.array(img*float(250),dtype=np.uint8)
<block_end><def_stmt>saveable_255_img img<block_start><return>np.array(img dtype=float)<block_end><def_stmt>quickBrightness img#make a thumbnail, convert to grayscale, get avg value
<block_start>brt=cv2.mean(cv2.cvtColor(cv2.resize(img (120 90)) cv2.COLOR_BGR2GRAY))<line_sep>brt=int(brt[0]<times>100)<line_sep>logging.debug("Brt="+str(brt))<line_sep><return>brt<block_end><def_stmt>correctLens img w h<block_start>distCoeff=np.zeros((4 1) np.float64)<line_sep># TODO: add your coefficients here!
k1=config.lensCorrValue# negative to remove barrel distortion
k2=0.0<line_sep>p1=0.0<line_sep>p2=0.0<line_sep>distCoeff[0 0]=k1<line_sep>distCoeff[1 0]=k2<line_sep>distCoeff[2 0]=p1<line_sep>distCoeff[3 0]=p2<line_sep># assume unit matrix for camera
cam=np.eye(3 dtype=np.float32)<line_sep>cam[0 2]=w/2.0# define center x
cam[1 2]=h/2.0# define center y
cam[0 0]=100.# define focal length x
cam[1 1]=100.# define focal length y
# here the undistortion will be computed
dst=cv2.undistort(img cam distCoeff)<line_sep><return>dst<block_end><def_stmt>adjustLevels img<block_start>h,w=img.shape[:2]<line_sep>#perform lens correction if selected
logging.debug("AdjustingLevels")<line_sep>logging.debug(np.shape(img))<line_sep>logging.debug(np.shape(config.flatFieldImg))<if_stmt>config.antiVignetting<and>(len(config.flatFieldImg)<g>0)<and>(np.shape(img)[0]<eq>np.shape(config.flatFieldImg)[0])<block_start>img=flatFieldCorrection(img)<block_end><if_stmt>config.lensCorr<block_start>img=correctLens(img w h)<block_end>#perform rotation if selected
<if_stmt>config.rotation<block_start>M=cv2.getRotationMatrix2D((w/2 h/2) config.rotationValue 1)<line_sep>img=cv2.warpAffine(img M (w h))<block_end>#perform cropping if selected
<if_stmt>config.cropping<block_start>img=img[config.cropT:h-config.cropB config.cropL:w-config.cropR]<block_end><return>img<block_end># cv2.LUT(img, config.lut)
#def fadj(img, ff):
# img/ff*exp((max(0,img-128)+128)/256)
<def_stmt>flatFieldCorrection img<block_start>logging.debug("Performing Flat Field Correction")<line_sep>logging.debug(np.shape(img))<line_sep>logging.debug(img[1000][1000])<line_sep>img=img/config.flatFieldImg<line_sep>img=np.clip(img 0 255)<line_sep>img=img.astype(np.uint8)<line_sep><return>img<block_end><def_stmt>genFlatFieldImg <block_start>logging.debug("Generating Flat Field Image")<line_sep>avgimg=np.mean(config.calibrationImages axis=0)<line_sep>config.flatFieldImg=np.clip(np.true_divide(avgimg np.mean(avgimg)) .5 2.0)<line_sep>#config.flatFieldImg=np.clip(np.true_divide(config.calibrationImages[0],np.mean(config.calibrationImages[0])), .8, 1.2)
<block_end><class_stmt>imgThread(qtcore.QThread)#(threading.Thread):
<block_start>updateFrameNumSig=qtcore.pyqtSignal(int)<line_sep>updateSSSig=qtcore.pyqtSignal(int int int)<line_sep>updateGainsSig=qtcore.pyqtSignal(int int)<line_sep>updateStatusSig=qtcore.pyqtSignal(str)<line_sep>plotHistogramSig=qtcore.pyqtSignal(list np.ndarray float)<line_sep>displayWashoutsSig=qtcore.pyqtSignal(list float float)<line_sep>displayImgSig=qtcore.pyqtSignal(np.ndarray str)<def_stmt>__init__ self connection app<block_start>qtcore.QThread.__init__(self parent=app)<line_sep>self.threadID=1<line_sep>self.name="ImgThread"<line_sep>self.conn=connection<block_end><def_stmt>updateFrameNum self i<block_start>self.updateFrameNumSig.emit(i)<line_sep>#self.emit(qtcore.SIGNAL("updateFrameNum(int)"), i)
<block_end><def_stmt>updateSS self ss again dgain<block_start>self.updateSSSig.emit(ss again dgain)<line_sep>#self.emit(qtcore.SIGNAL("updateSS(int, int, int)"), ss, again, dgain)
<block_end><def_stmt>updateGains self r b<block_start>self.updateGainsSig.emit(r b)<line_sep>#self.emit(qtcore.SIGNAL("updateGains(int, int)"), r, b)
<block_end><def_stmt>updateStatus self status<block_start>self.updateStatusSig.emit(status)<line_sep>#self.emit(qtcore.SIGNAL("updateStatus(QString)"), status)
<block_end><def_stmt>blendImgList self imList show fnum<block_start>logging.debug("Starting blend Thread")<line_sep>cvimg=blender.process(imList)<line_sep>logging.debug("Done Blending")<line_sep>cvimg=adjustLevels(adjustable_img(cvimg))<line_sep>#cvimg=adjustable_img(cvimg)
title=thefilename(fnum)<if_stmt>config.wait_for_test<block_start>title="TEST"<block_end><else_stmt><block_start>cv2.imwrite(thefilename(fnum) cvimg [int(cv2.IMWRITE_JPEG_QUALITY) 97])<block_end><if_stmt>config.wait_for_test<or>show<block_start>self.showImage(cvimg title)<block_end>self.plothist(cvimg)<block_end>#,True)
<def_stmt>plothist self img fScale=<false>#perhaps this should be called in a separate thread?
<block_start>bins=256<line_sep>rangetop=1.0<if>fScale<else>256<line_sep>imgsize=img.shape<line_sep>mask=getMask(img mask_pct)<line_sep>bwimg=cv2.cvtColor(img cv2.COLOR_BGR2GRAY)<line_sep>bhist=cv2.calcHist([bwimg] [0] mask [256] [0 rangetop])<line_sep>bhist[0]=0<line_sep>over=[0 0 0 0]<line_sep>over[3]=sum(bhist[:10])<line_sep>px=imgsize[0]<times>imgsize[1]<times>mask_pct<times>mask_pct<line_sep>ylim=px/128#arbitrary value to keep y limit consistent and reasonable
hists=[]<for_stmt>i range(0 3)<block_start>hist=cv2.calcHist([img] [i] mask [256] [0 rangetop])<line_sep>over[i]=sum(hist[252:])<line_sep>hists.append(hist)<block_end>avg=int(cv2.mean(bwimg)[0]<times>100.0/rangetop)<line_sep>#logging.debug("Sending Signal")
self.plotHistogramSig.emit(hists bhist px)<line_sep>self.displayWashoutsSig.emit(over px avg)<line_sep>#self.emit(qtcore.SIGNAL("plotHistogram(PyQt_PyObject, PyQt_PyObject, float)"), hists, bhist, px)
#self.emit(qtcore.SIGNAL("displayWashouts(PyQt_PyObject, float, float)"), over, px, avg)
<block_end><def_stmt>showImage self im title="Image"<block_start>im2=cv2.cvtColor(im cv2.COLOR_RGB2BGR)<line_sep>self.displayImgSig.emit(im2 title)<line_sep>#self.emit(qtcore.SIGNAL("displayImg(PyQt_PyObject, QString)"), im2, title)
<block_end><def_stmt>run self<block_start>logging.debug("Imgthread running fn")<line_sep>image_stream=io.BytesIO()<line_sep>imglist=[]<line_sep>pframe=0#counter for preview frames, so we only generate histogram 1 in 10 times
<try_stmt><block_start><while_stmt><not>config.exitFlag<block_start>logging.debug("looping")<while_stmt>config.prevOn<or>config.captureOn<block_start>logging.debug("waiting on img")<line_sep>imgflag=self.conn.read(1)<line_sep>imgflag=imgflag.decode("utf-8")<line_sep>logging.debug(imgflag)<if_stmt>imgflag<eq>"q"<block_start><break><block_end><if_stmt>imgflag<eq>"f"<block_start>ss=struct.unpack('<L' self.conn.read(struct.calcsize('<L')))[0]<line_sep>again=struct.unpack('<L' self.conn.read(struct.calcsize('<L')))[0]<line_sep>dgain=struct.unpack('<L' self.conn.read(struct.calcsize('<L')))[0]<line_sep>logging.debug("SS:"+str(ss))<line_sep>self.updateSS(ss again dgain)<block_end><elif_stmt>imgflag<eq>"g"<block_start>r=struct.unpack('<L' self.conn.read(struct.calcsize('<L')))[0]<line_sep>b=struct.unpack('<L' self.conn.read(struct.calcsize('<L')))[0]<line_sep>logging.debug("Gains "+str(r)+" "+str(b))<line_sep>self.updateGains(r b)<block_end><elif_stmt>imgflag<eq>"t"<block_start>config.prevOn=<false><line_sep>self.showImage(cvimg2 "TEST")<line_sep><break><block_end><else_stmt><block_start><if_stmt>imgflag<eq>"s"<or>imgflag<eq>"b"<block_start>self.updateFrameNum(config.frame_number)<block_end>image_len=struct.unpack('<L' self.conn.read(struct.calcsize('<L')))[0]<line_sep>logging.debug("Image:"+str(image_len))<if_stmt><not>image_len<block_start>logging.debug("Quit Signal (0 Length image) received from client")<line_sep><break><block_end>logging.debug(imgflag+str(image_len))<line_sep>image_stream.write(self.conn.read(image_len))<line_sep>image_stream.seek(0)<if_stmt>imgflag<eq>"s"#single image
<block_start>cvimg=cv2.imdecode(np.fromstring(image_stream.read(image_len) dtype=np.uint8) 1)<if_stmt>config.flatFieldCalibration<eq><true><block_start>config.calibrationImages.append(cvimg)<line_sep>logging.debug("Collecting CalibrationData")<if_stmt>len(config.calibrationImages)<g>4<block_start>genFlatFieldImg()<line_sep>config.flatFieldCalibration=<false><block_end><continue><block_end>cvimg2=adjustLevels(cvimg)<line_sep>#tmp=image_stream.read(image_len)
<if_stmt>config.wait_for_test<block_start>self.showImage(cvimg2 "TEST")<block_end>#else:
# process_for_brightness(cvimg)
self.plothist(cvimg2)<line_sep>#logging.debug("Single Shown")
<if_stmt><not>config.wait_for_test<block_start>filename=thefilename(config.frame_number)<with_stmt>open(filename 'w')<as>imfile<block_start>self.showImage(cvimg2 filename)<line_sep>cv2.imwrite(filename cvimg2 [int(cv2.IMWRITE_JPEG_QUALITY) 97])<block_end>self.updateFrameNum(config.frame_number)<line_sep>config.frame_number<augadd>1<line_sep>#logging.debug("Single Written to "+filename)
<block_end><block_end><if_stmt>imgflag<eq>"p"#preview image
<block_start>cvimg=cv2.imdecode(np.fromstring(image_stream.read(image_len) dtype=np.uint8) 1)<line_sep>logging.debug(cvimg.dtype)<line_sep>cvimg2=adjustLevels(cvimg)<line_sep>self.showImage(cvimg2 "Live Preview")<line_sep>pframe<augadd>1<if_stmt>pframe<g>10<block_start>pframe=0<line_sep>self.plothist(cvimg2)<block_end><block_end><if_stmt>imgflag<eq>"a"#one of several blended images
#save image data in variable, dont increment or update display
<block_start>logging.debug('start a')<line_sep>imglist.append(cv2.imdecode(np.fromstring(image_stream.read(image_len) dtype=np.uint8) 1))<block_end><if_stmt>imgflag<eq>"b"#the last of several blended images
#logging.debug('start read final')
<block_start>imglist.append(cv2.imdecode(np.fromstring(image_stream.read(image_len) dtype=np.uint8) 1))<line_sep>self.updateStatus(str(config.frame_number)+' '+' '.join(map(str list(map(quickBrightness imglist)))))<line_sep>thd=threading.Thread(target=self.blendImgList args=(imglist[:] <true> config.frame_number))#colon in brackets makes new copy of list
thd.start()#tried this using multiprocessing, but it hung when processing merge_mertens
imglist=[]<if_stmt><not>config.wait_for_test<block_start>config.frame_number<augadd>1<block_end><block_end><block_end>image_stream.seek(0)<line_sep>image_stream.truncate()<block_end>#logging.debug("Waiting for prevOn...")
sleep(1)<block_end><block_end><finally_stmt><block_start>logging.debug("Thread closing %.1d"%config.exitFlag)<line_sep>cv2.destroyAllWindows()<line_sep>self.conn.close()<block_end><block_end><block_end>
|
# -*- coding: utf-8 -*-
#
# Copyright 2020 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_stmt>numpy<as>np<import_stmt>pytest<import_from_stmt>stellargraph IndexedArray<def_stmt>test_indexed_array_empty <block_start>frame=IndexedArray()<assert_stmt>frame.index<eq>range(0)<line_sep>np.testing.assert_array_equal(frame.values np.empty((0 0)))<block_end><def_stmt>test_indexed_array_non_empty <block_start>list_ids=["a" "b" "c"]<line_sep>array_ids=np.array([10 -1 2])<line_sep>range_ids=range(106 100 -2)<line_sep>values=np.random.rand(3 4 5)<line_sep># this test uses 'is' checks to validate that there's no copying of data
frame=IndexedArray(values)<assert_stmt>frame.index<eq>range(3)<assert_stmt>frame.values<is>values<line_sep>frame=IndexedArray(values index=list_ids)<assert_stmt>frame.index<is>list_ids<assert_stmt>frame.values<is>values<line_sep>frame=IndexedArray(values index=array_ids)<assert_stmt>frame.index<is>array_ids<assert_stmt>frame.values<is>values<line_sep>frame=IndexedArray(values index=range_ids)<assert_stmt>frame.index<is>range_ids<assert_stmt>frame.values<is>values<block_end><def_stmt>test_indexed_array_invalid <block_start>values=np.random.rand(3 4 5)<with_stmt>pytest.raises(TypeError match="values: expected a NumPy array .* found int")<block_start>IndexedArray(123)<block_end><with_stmt>pytest.raises(ValueError match=r"values: expected an array with shape .* found shape \(\) of length 0" )<block_start>IndexedArray(np.zeros(()))<block_end><with_stmt>pytest.raises(ValueError match=r"values: expected an array with shape .* found shape \(123,\) of length 1" )<block_start>IndexedArray(np.zeros(123))<block_end># check that the index `len`-failure works with or without index inference
<with_stmt>pytest.raises(TypeError match="index: expected a sequence .* found int")<block_start>IndexedArray(index=0)<block_end><with_stmt>pytest.raises(TypeError match="index: expected a sequence .* found int")<block_start>IndexedArray(values index=123)<block_end><with_stmt>pytest.raises(ValueError match="values: expected the index length 2 .* found 3 rows")<block_start>IndexedArray(values index=range(0 3 2))<block_end><block_end>
|
# Tests a concurrent server, by connecting multiple clients sending pre-set
# messages, and comparing the echoes with expected values.
#
# Run with -h for full usage.
#
# <NAME> [http://eli.thegreenplace.net]
# This code is in the public domain.
<import_stmt>argparse<import_stmt>itertools<import_stmt>logging<import_stmt>queue<import_stmt>socket<import_stmt>subprocess<import_stmt>sys<import_stmt>threading<import_stmt>time<def_stmt>server_runner path args stop_event<block_start>"""Runs the server as a subprocess until stop is requested.
Run this function in a separate thread!
path is the path to the server to run, with the given args. If 'path' ends
with .py, a python interpreter is prepended. The args have to be a (possibly
empty) iterable.
stop_event is a threading.Event object; when it's set, the subprocess is
killed and this function returns.
"""<line_sep>runcmd=['python3.6' '-u' path]<if>path.endswith('.py')<else>[path]<line_sep>runcmd.extend(args)<line_sep>logging.info('server_runner: executing subprocess "{0}"'.format(runcmd))<line_sep>proc=subprocess.Popen(runcmd)<line_sep>logging.info('server_runner waiting for stop event')<line_sep>stop_event.wait()<line_sep>logging.info('server_runner sending kill to subprocess')<line_sep>proc.terminate()<try_stmt><block_start>proc.wait(timeout=0.2)<block_end><except_stmt>subprocess.TimeoutExpired<block_start>logging.info('server_runner: subprocess did not die within timeout')<block_end><block_end><def_stmt>socket_reader sockobj outq exit_event<block_start>"""Reads from sockobj, 1 byte at a time; places results in outq.
This function runs in a loop until the sockobj connection is closed or until
exit_event is set.
"""<while_stmt><not>exit_event.is_set()<block_start><try_stmt><block_start>buf=sockobj.recv(1)<if_stmt>len(buf)<l>1<block_start><break><block_end>outq.put(buf)<block_end><except_stmt>socket.timeout<block_start><continue><block_end><except_stmt>OSError<block_start><break><block_end><block_end><block_end><def_stmt>assert_queue_contains q val timeout=0.1<block_start><try_stmt><block_start>v=q.get(timeout=timeout)<assert_stmt>v<eq>val<block_end><except_stmt>queue.Empty<block_start><assert_stmt><false> f'queue was empty with timeout={timeout}'<block_end><block_end><def_stmt>assert_queue_empty q wait=0.1<block_start>time.sleep(wait)<assert_stmt>q.empty() 'queue had {0} with wait={1}'.format(q.get() wait)<block_end><def_stmt>client_thread_runner client_body_func port initial_timeout=0.1<block_start>"""Abstracts the function running within a client thread.
Connects to the port with a socket, launches a reading thread and makes sure
to shut down properly. client_body_func is the actual interaction with a
socket, once connected.
"""<line_sep>sockobj=socket.socket(socket.AF_INET socket.SOCK_STREAM)<line_sep>sockobj.settimeout(initial_timeout)<line_sep>sockobj.connect(('localhost' port))<line_sep>logging.info('{0} connected to server'.format(client_body_func.__name__))<line_sep>readq=queue.Queue()<line_sep>exit_event=threading.Event()<line_sep>tread=threading.Thread(target=socket_reader args=(sockobj readq exit_event))<line_sep>tread.start()<try_stmt><block_start>client_body_func(sockobj readq initial_timeout)<block_end><finally_stmt># Closing the socket before killing the server helps the bound socket be
# fully released on the server side; otherwise it may be kept alive by
# the kernel for a while after the server process exits.
<block_start>sockobj.shutdown(socket.SHUT_RDWR)<line_sep>sockobj.close()<line_sep>exit_event.set()<line_sep>tread.join()<block_end><block_end><def_stmt>client0 sock readq initial_timeout<block_start>assert_queue_contains(readq b'*' timeout=initial_timeout)<line_sep>assert_queue_empty(readq)<block_end><def_stmt>client1 sock readq initial_timeout<block_start>assert_queue_contains(readq b'*' timeout=initial_timeout)<line_sep>sock.send(b'abcdef')<line_sep>assert_queue_empty(readq)<line_sep>sock.send(b'^')<line_sep>assert_queue_empty(readq)<line_sep>sock.send(b'f')<line_sep>assert_queue_contains(readq b'g')<line_sep>sock.send(b'1234')<line_sep>assert_queue_contains(readq b'2')<line_sep>assert_queue_contains(readq b'3')<line_sep>assert_queue_contains(readq b'4')<line_sep>assert_queue_contains(readq b'5')<line_sep>sock.send(b'$')<line_sep>assert_queue_empty(readq)<line_sep>sock.send(b'1234')<line_sep>assert_queue_empty(readq)<line_sep>sock.send(b'^')<line_sep>sock.send(b'xy')<line_sep>assert_queue_contains(readq b'y')<line_sep>assert_queue_contains(readq b'z')<block_end><def_stmt>client2 sock readq initial_timeout<block_start>assert_queue_contains(readq b'*' timeout=initial_timeout)<line_sep>sock.send(b'^ab$^kl$^80$50')<for_stmt>b [b'b' b'c' b'l' b'm' b'9' b'1']<block_start>assert_queue_contains(readq b)<block_end>assert_queue_empty(readq)<block_end><def_stmt>client3 sock readq initial_timeout<block_start>assert_queue_contains(readq b'*' timeout=initial_timeout)<line_sep>sock.send(b'^$^$^$^$^$^$$^$$$$foobarjoemoedoe^$$')<line_sep>assert_queue_empty(readq)<block_end><def_stmt>test_main <block_start>argparser=argparse.ArgumentParser('Server test')<line_sep>argparser.add_argument('server_path' help='path to the server executable')<line_sep>argparser.add_argument('-p' '--server-port' default=9090 type=int help='the server listens on this port')<line_sep>argparser.add_argument('--timeout-bump' default=0.0 type=float help='amount of time (in sec) by which to bump the '<concat>'timeout between consecutive clients')<line_sep>argparser.add_argument('-n' '--num-clients' default=2 type=int help='number of clients to launch simultaneously; ')<line_sep>argparser.add_argument('--loop' default=1 type=int help='launch test in a loop')<line_sep>args=argparser.parse_args()<assert_stmt>args.num_clients<ge>1<line_sep>logging.basicConfig(level=logging.DEBUG format='%(levelname)s:%(asctime)s:%(message)s')<line_sep># Launch the server in a thread, listening on the port.
stop_event=threading.Event()<line_sep>server_thread=threading.Thread(target=server_runner args=(args.server_path [str(args.server_port)] stop_event))<line_sep>server_thread.start()<line_sep>time.sleep(0.3)<line_sep>TIMEOUT=0.5+(args.num_clients-1)<times>args.timeout_bump<for_stmt>i range(args.loop)<block_start>logging.info('** Test iteration {}'.format(i))<line_sep>client_iter=itertools.cycle([client0 client1 client2 client3])<line_sep>threads=[]<for_stmt>i range(args.num_clients)<block_start>tester_thread=threading.Thread(target=client_thread_runner args=(next(client_iter) args.server_port TIMEOUT))<line_sep>tester_thread.start()<line_sep>threads.append(tester_thread)<block_end>time.sleep(TIMEOUT)<for_stmt>thread threads<block_start>thread.join()<block_end><block_end>stop_event.set()<block_end><if_stmt>__name__<eq>'__main__'<block_start>test_main()<block_end>
|
<import_from_stmt>frictionless Resource<line_sep># Loader
<def_stmt>test_buffer_loader <block_start>source=b"header1,header2\nvalue1,value2\nvalue3,value4"<with_stmt>Resource(source format="csv")<as>resource<block_start><assert_stmt>resource.header<eq>["header1" "header2"]<assert_stmt>resource.read_rows()<eq>[{"header1":"value1" "header2":"value2"} {"header1":"value3" "header2":"value4"} ]<block_end><block_end><def_stmt>test_buffer_loader_recursion_error_issue_647 <block_start><with_stmt>open("data/issue-647.csv.txt" "rb")<as>file<block_start><with_stmt>Resource(file.read() format="csv" encoding="iso-8859-1")<as>resource<block_start><assert_stmt>len(resource.read_lists())<eq>883<block_end><block_end><block_end><def_stmt>test_buffer_loader_write <block_start>source=Resource("data/table.csv")<line_sep>target=source.write(Resource(scheme="buffer" format="csv"))<assert_stmt>target.data<eq>"id,name\r\n1,english\r\n2,中国人\r\n".encode("utf-8")<block_end>
|
# encoding: utf-8
<import_stmt>os<import_stmt>platform<import_stmt>datetime<import_stmt>random<import_stmt>time<import_from_stmt>emails.compat to_unicode<line_sep>DEFAULT_FROM=os.environ.get('SMTP_TEST_FROM_EMAIL')<or>'<EMAIL>'<line_sep>SUBJECT_SUFFIX=os.environ.get('SMTP_TEST_SUBJECT_SUFFIX')<def_stmt>as_bool value default=<false><block_start><if_stmt>value<is><none><block_start><return>default<block_end><return>value.lower()<in>('1' 'yes' 'true' 'on')<block_end>"""
Take environment variables if exists and send test letter
SMTP_TEST_SETS=GMAIL,OUTLOOK,YAMAIL
SMTP_TEST_GMAIL_TO=<EMAIL>
SMTP_TEST_GMAIL_USER=myuser
SMTP_TEST_GMAIL_PASSWORD=<PASSWORD>
SMTP_TEST_GMAIL_WITH_TLS=true
SMTP_TEST_GMAIL_WITHOUT_TLS=false
SMTP_TEST_GMAIL_HOST=alt1.gmail-smtp-in.l.google.com
SMTP_TEST_GMAIL_PORT=25
...
"""<def_stmt>smtp_server_from_env name='GMAIL'<block_start><def_stmt>_var param default=<none><block_start>v=os.environ.get('SMTP_TEST_{}_{}'.format(name param) default)<line_sep><return>v<block_end><def_stmt>_valid_smtp data<block_start><return>data['host']<block_end>smtp_info=dict(from_email=_var("FROM" default=DEFAULT_FROM) to_email=_var("TO") host=_var('HOST') port=_var('PORT' default=25) user=_var('USER') password=_var('PASSWORD'))<if_stmt>_valid_smtp(smtp_info)<block_start><if_stmt>as_bool(_var('WITH_TLS'))<block_start>smtp_info['tls']=<true><line_sep>sys_name='{}_WITH_TLS'.format(name)<line_sep><yield>sys_name smtp_info<block_end><if_stmt>as_bool(_var('WITHOUT_TLS'))<block_start>smtp_info['tls']=<false><line_sep>sys_name='{}_WITHOUT_TLS'.format(name)<line_sep><yield>sys_name smtp_info<block_end><block_end><block_end><class_stmt>SMTPTestParams(object)<block_start>subject_prefix='[python-emails]'<def_stmt>__init__ self from_email=<none> to_email=<none> defaults=<none> **kw<block_start>params={'fail_silently':<false> 'debug':1 'timeout':25}<line_sep>params.update(defaults<or>{})<line_sep>params.update(kw)<line_sep>self.params=params<line_sep>self.from_email=from_email<line_sep>self.to_email=to_email<block_end><def_stmt>patch_message self message<block_start>"""
Some SMTP requires from and to emails
"""<if_stmt>self.from_email<block_start>message.mail_from=(message.mail_from[0] self.from_email)<block_end><if_stmt>self.to_email<block_start>message.mail_to=self.to_email<block_end># TODO: this code breaks template in subject; fix it
<if_stmt><not>to_unicode(message.subject).startswith(self.subject_prefix)<block_start>message.subject=" ".join([self.subject_prefix message.subject '// %s'%SUBJECT_SUFFIX])<block_end>message._headers['X-Test-Date']=datetime.datetime.utcnow().isoformat()<line_sep>message._headers['X-Python-Version']="%s/%s"%(platform.python_version() platform.platform())<line_sep>message._headers['X-Build-Data']=SUBJECT_SUFFIX<line_sep><return>message<block_end><def_stmt>__str__ self<block_start><return>u'SMTPTestParams({user}@{host}:{port})'.format(host=self.params.get('host') port=self.params.get('port') user=self.params.get('user' ''))<block_end><def_stmt>sleep self<block_start><if_stmt>'mailtrap'<in>self.params.get('host' '')<block_start>t=2+random.randint(0 2)<block_end><else_stmt><block_start>t=0.5<block_end>time.sleep(t)<block_end><block_end><def_stmt>get_servers <block_start>names=os.environ.get('SMTP_TEST_SETS' <none>)<if_stmt>names<block_start><for_stmt>name names.split(',')<block_start><for_stmt>sys_name,params smtp_server_from_env(name)<block_start><yield>sys_name SMTPTestParams(**params)<block_end><block_end><block_end><block_end>
|
<import_from_future_stmt> absolute_import<import_stmt>pytest<import_from_stmt>billiard Value RawValue Lock Process<class_stmt>test_values<block_start>codes_values=[('i' 4343 24234) ('d' 3.625 -4.25) ('h' -232 234) ('c' 'x'.encode('latin') 'y'.encode('latin'))]<def_stmt>test_issue_229 self<block_start>"""Test fix for issue #229"""<line_sep>a=Value('i' 0)<line_sep>b=Value('i' 0)<line_sep>a.value=5<assert_stmt>a.value<eq>5<assert_stmt>b.value<eq>0<block_end>@classmethod<def_stmt>_test cls values<block_start><for_stmt>sv,cv zip(values cls.codes_values)<block_start>sv.value=cv[2]<block_end><block_end><def_stmt>test_value self raw=<false><block_start><if_stmt>raw<block_start>values=[RawValue(code value)<for>code,value,_ self.codes_values]<block_end><else_stmt><block_start>values=[Value(code value)<for>code,value,_ self.codes_values]<block_end><for_stmt>sv,cv zip(values self.codes_values)<block_start><assert_stmt>sv.value<eq>cv[1]<block_end>proc=Process(target=self._test args=(values ))<line_sep>proc.daemon=<true><line_sep>proc.start()<line_sep>proc.join()<for_stmt>sv,cv zip(values self.codes_values)<block_start><assert_stmt>sv.value<eq>cv[2]<block_end><block_end><def_stmt>test_rawvalue self<block_start>self.test_value(raw=<true>)<block_end><def_stmt>test_getobj_getlock self<block_start>val1=Value('i' 5)<line_sep>lock1=val1.get_lock()<line_sep>obj1=val1.get_obj()<line_sep>val2=Value('i' 5 lock=<none>)<line_sep>lock2=val2.get_lock()<line_sep>obj2=val2.get_obj()<line_sep>lock=Lock()<line_sep>val3=Value('i' 5 lock=lock)<line_sep>lock3=val3.get_lock()<line_sep>obj3=val3.get_obj()<assert_stmt>lock<eq>lock3<line_sep>arr4=Value('i' 5 lock=<false>)<assert_stmt><not>hasattr(arr4 'get_lock')<assert_stmt><not>hasattr(arr4 'get_obj')<with_stmt>pytest.raises(AttributeError)<block_start>Value('i' 5 lock='navalue')<block_end>arr5=RawValue('i' 5)<assert_stmt><not>hasattr(arr5 'get_lock')<assert_stmt><not>hasattr(arr5 'get_obj')<block_end><block_end>
|
<import_from_stmt>common *<line_sep>
|
<import_stmt>pkg_resources<line_sep>__version__=pkg_resources.require("ckanapi")[0].version<line_sep>
|
# Copyright 2021, <NAME>, mailto:<EMAIL>
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Nodes representing more trusted imports. """<import_from_stmt>nuitka.importing.Importing locateModule<import_from_stmt>nuitka.utils.ModuleNames ModuleName<import_from_stmt>.ExpressionBases ExpressionBase<class_stmt>ExpressionImportHardBase(ExpressionBase)# Base classes can be abstract, pylint: disable=abstract-method
#
<block_start>__slots__=("module_name" "finding" "module_filename")<def_stmt>__init__ self module_name source_ref<block_start>ExpressionBase.__init__(self source_ref=source_ref)<line_sep>self.module_name=ModuleName(module_name)<line_sep>self.finding=<none><line_sep>self.module_filename=<none><line_sep>_module_name,self.module_filename,self.finding=locateModule(module_name=self.module_name parent_package=<none> level=0 )<line_sep># Expect to find them and to match the name of course.
<assert_stmt>self.finding<ne>"not-found" self.module_name<assert_stmt>_module_name<eq>self.module_name<block_end><def_stmt>getUsedModule self<block_start><return>self.module_name self.module_filename self.finding<block_end><block_end><class_stmt>ExpressionImportModuleNameHardBase(ExpressionImportHardBase)<block_start>"""Hard import names base class."""<line_sep># Base classes can be abstract, pylint: disable=I0021,abstract-method
__slots__=("import_name" "finding" "module_filename")<def_stmt>__init__ self module_name import_name source_ref<block_start>ExpressionImportHardBase.__init__(self module_name=module_name source_ref=source_ref)<line_sep>self.import_name=import_name<block_end># Derived ones have the same interface.
@staticmethod<def_stmt>isExpressionImportModuleNameHard <block_start><return><true><block_end><def_stmt>finalize self<block_start><del_stmt>self.parent<block_end><def_stmt>getDetails self<block_start><return>{"module_name":self.module_name "import_name":self.import_name}<block_end><def_stmt>getModuleName self<block_start><return>self.module_name<block_end><def_stmt>getImportName self<block_start><return>self.import_name<block_end><block_end><class_stmt>ExpressionImportModuleNameHardMaybeExists(ExpressionImportModuleNameHardBase)<block_start>"""Hard coded import names, e.g. of "site.something"
These are created for attributes of hard imported modules that are not know if
they exist or not.
"""<line_sep>kind="EXPRESSION_IMPORT_MODULE_NAME_HARD_MAYBE_EXISTS"<def_stmt>computeExpressionRaw self trace_collection<block_start>trace_collection.onExceptionRaiseExit(AttributeError)<line_sep><return>self <none> <none><block_end>@staticmethod<def_stmt>mayHaveSideEffects <block_start><return><true><block_end>@staticmethod<def_stmt>mayRaiseException exception_type<block_start><return><true><block_end><block_end><class_stmt>ExpressionImportModuleNameHardExists(ExpressionImportModuleNameHardBase)<block_start>"""Hard coded import names, e.g. of "sys.stdout"
These are directly created for some Python mechanics.
"""<line_sep>kind="EXPRESSION_IMPORT_MODULE_NAME_HARD_EXISTS"<def_stmt>computeExpressionRaw self trace_collection# As good as it gets.
<block_start><return>self <none> <none><block_end>@staticmethod<def_stmt>mayHaveSideEffects <block_start><return><false><block_end>@staticmethod<def_stmt>mayRaiseException exception_type<block_start><return><false><block_end><block_end>
|
<import_from_stmt>os listdir<import_from_stmt>os.path isfile join abspath<line_sep>image_path=abspath('./images')<line_sep>onlyfiles=[f<for>f listdir(image_path)<if>isfile(join(image_path f))]<line_sep>readme='''# awesome-video-chat-backgrounds
Just in case you're at home on a video call and you haven't had time to tidy up your REAL background, here are some awesome backgrounds to help you get through your next video chat.
## Contributing
* Please submit pull requests to add additional photos/images to this collection!
* Images should be minimum of 1080 (width) x 550 (height) pixels
## Image List
'''<for_stmt>file onlyfiles<block_start>title=file.split('.')[0].replace('_' ' ').title()<line_sep>readme<augadd>'<a href="./images/{}" title="{}"> <img align="center" src="./images/{}" width="540px"/></a>\n'.format(file title file)<block_end><with_stmt>open('README.md' 'w+')<as>f<block_start>f.write(readme)<block_end>
|
"""Animations of common operations in signal processing."""<import_stmt>matplotlib.pyplot<as>plt<import_from_stmt>matplotlib.animation FuncAnimation<def_stmt>animate_discrete_convolution x h y k kappa interval=100<block_start><def_stmt>update_stem stem x y<block_start>stem.markerline.set_data(x y)<for_stmt>idx,stem_line enumerate(stem.stemlines)<block_start>stem_line.set_data([x[idx] x[idx]] [0 y[idx]])<block_end><block_end><def_stmt>animate kappa_i<block_start>update_stem(stem_x k x(k))<line_sep>update_stem(stem_h k h(kappa_i-k))<line_sep>dot.set_data(kappa_i y[-k[0]+kappa_i])<block_end># setup plot and define objects
default_figsize=plt.rcParams.get('figure.figsize')<line_sep>fig,ax=plt.subplots(2 1 figsize=(default_figsize[0] 1.5<times>default_figsize[1]))<line_sep>fig.subplots_adjust(hspace=0.2)<line_sep>plt.close()# suppresses empty plot in notebook
stem_x=ax[0].stem(k x(k) linefmt='C0-' markerfmt='C0o' basefmt=' ' label=r'$x[\kappa]$')<line_sep>stem_h=ax[0].stem(k h(kappa[0]-k) linefmt='C1-' markerfmt='C1o' basefmt=' ' label=r'$h(k - \kappa)$')<line_sep>ax[0].set_xlabel(r'$\kappa$')<line_sep>ax[0].legend(loc='upper right')<line_sep>ax[0].grid()<line_sep>y=y(k)<line_sep>ax[1].stem(k y linefmt='C2-' markerfmt='C2o' basefmt=' ' label=r'$y[k]$')<line_sep>dot,=ax[1].plot([] 'ro')<line_sep>ax[1].set_xlabel(r'$k$')<line_sep>ax[1].legend(loc='upper right')<line_sep>ax[1].grid()<line_sep><return>FuncAnimation(fig animate kappa interval=interval)<block_end>
|
#
#
#
<import_from_future_stmt> absolute_import division print_function unicode_literals<import_from_stmt>.base BaseProcessor<class_stmt>TypeAllowlistFilter(BaseProcessor)<block_start><def_stmt>__init__ self name allowlist<block_start>super(TypeAllowlistFilter self).__init__(name)<line_sep>self.allowlist=set(allowlist)<block_end><def_stmt>_process self zone *args **kwargs<block_start><for_stmt>record zone.records<block_start><if_stmt>record._type<not><in>self.allowlist<block_start>zone.remove_record(record)<block_end><block_end><return>zone<block_end>process_source_zone=_process<line_sep>process_target_zone=_process<block_end><class_stmt>TypeRejectlistFilter(BaseProcessor)<block_start><def_stmt>__init__ self name rejectlist<block_start>super(TypeRejectlistFilter self).__init__(name)<line_sep>self.rejectlist=set(rejectlist)<block_end><def_stmt>_process self zone *args **kwargs<block_start><for_stmt>record zone.records<block_start><if_stmt>record._type<in>self.rejectlist<block_start>zone.remove_record(record)<block_end><block_end><return>zone<block_end>process_source_zone=_process<line_sep>process_target_zone=_process<block_end>
|
<import_stmt>rospy<import_from_stmt>geometry_msgs.msg Pose Twist<import_stmt>sys select os<import_stmt>tty termios<import_from_stmt>std_msgs.msg String<line_sep>MAX_LINEAR=1000<line_sep>MAX_ANG_VEL=0.5<line_sep>LINEAR_STEP_SIZE=0.1<line_sep>ANG_VEL_STEP_SIZE=0.01<line_sep>ctrl_leader=<false><line_sep>send_flag=<false><line_sep>transition_state='multirotor'<line_sep>msg2all="""
Control Your XTDrone!
To all drones (press g to control the leader)
---------------------------
1 2 3 4 5 6 7 8 9 0
w r t y i
a s d g j k l
x v b n ,
w/x : increase/decrease north setpoint
a/d : increase/decrease east setpoint
i/, : increase/decrease upward setpoint
j/l : increase/decrease orientation
r : return home
t/y : arm/disarm
v/n : takeoff/land
b : offboard
s : hover(multirotor mode), loiter(plane mode)
k : hover(multirotor mode), idle(plane mode)
0~9 : extendable mission(eg.different formation configuration)
this will mask the keyboard control
g : control the leader
o : transition
CTRL-C to quit
"""<line_sep>msg2leader="""
Control Your XTDrone!
To the leader (press g to control all drones)
---------------------------
1 2 3 4 5 6 7 8 9 0
w r t y i
a s d g j k l
x v b n ,
w/x : increase/decrease north setpoint
a/d : increase/decrease east setpoint
i/, : increase/decrease upward setpoint
j/l : increase/decrease orientation
r : return home
t/y : arm/disarm
v/n : takeoff/land
b : offboard
s : hover(multirotor mode), loiter(plane mode)
k : hover(multirotor mode), idle(plane mode)
0~9 : extendable mission(eg.different formation configuration)
g : control all drones
o : transition
CTRL-C to quit
"""<def_stmt>getKey <block_start>tty.setraw(sys.stdin.fileno())<line_sep>rlist,_,_=select.select([sys.stdin] [] [] 0.1)<if_stmt>rlist<block_start>key=sys.stdin.read(1)<block_end><else_stmt><block_start>key=''<block_end>termios.tcsetattr(sys.stdin termios.TCSADRAIN settings)<line_sep><return>key<block_end><def_stmt>print_msg <block_start><if_stmt>ctrl_leader<block_start>print(msg2leader)<block_end><else_stmt><block_start>print(msg2all)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>settings=termios.tcgetattr(sys.stdin)<line_sep>vehicle_type=sys.argv[1]<line_sep>vehicle_num=int(sys.argv[2])<line_sep>control_type=sys.argv[3]<line_sep>rospy.init_node('vtol_keyboard_multi_control')<line_sep>multi_cmd_pose_enu_pub=[<none>]<times>vehicle_num<line_sep>multi_cmd_vel_flu_pub=[<none>]<times>vehicle_num<line_sep>multi_cmd_pub=[<none>]<times>vehicle_num<for_stmt>i range(vehicle_num)<block_start>multi_cmd_pose_enu_pub[i]=rospy.Publisher('/xtdrone/'+vehicle_type+'_'+str(i)+'/cmd_pose_enu' Pose queue_size=10)<if_stmt>control_type<eq>'vel'<block_start>multi_cmd_vel_flu_pub[i]=rospy.Publisher('/xtdrone/'+vehicle_type+'_'+str(i)+'/cmd_vel_flu' Twist queue_size=10)<block_end><else_stmt><block_start>multi_cmd_accel_flu_pub[i]=rospy.Publisher('/xtdrone/'+multirotor_type+'_'+str(i)+'/cmd_accel_flu' Twist queue_size=10)<block_end>multi_cmd_pub[i]=rospy.Publisher('/xtdrone/'+vehicle_type+'_'+str(i)+'/cmd' String queue_size=10)<block_end>leader_cmd_pose_enu_pub=rospy.Publisher("/xtdrone/leader/cmd_pose_enu" Pose queue_size=10)<if_stmt>control_type<eq>'vel'<block_start>leader_cmd_vel_flu_pub=rospy.Publisher("/xtdrone/leader/cmd_vel_flu" Twist queue_size=10)<block_end><else_stmt><block_start>leader_cmd_accel_flu_pub=rospy.Publisher("/xtdrone/leader/cmd_accel_flu" Twist queue_size=10)<block_end>leader_cmd_pub=rospy.Publisher("/xtdrone/leader_cmd" String queue_size=10)<line_sep>cmd=String()<line_sep>pose=Pose()<line_sep>twist=Twist()<line_sep>forward=0.0<line_sep>leftward=0.0<line_sep>upward=0.0<line_sep>angular=0.0<line_sep>print_msg()<while_stmt>(1)<block_start>key=getKey()<if_stmt>key<eq>'w'<block_start>forward=forward+LINEAR_STEP_SIZE<line_sep>print_msg()<if_stmt>control_type<eq>'vel'<block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><else_stmt><block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><block_end><elif_stmt>key<eq>'x'<block_start>forward=forward-LINEAR_STEP_SIZE<line_sep>print_msg()<if_stmt>control_type<eq>'vel'<block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><else_stmt><block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><block_end><elif_stmt>key<eq>'a'<block_start>leftward=leftward+LINEAR_STEP_SIZE<line_sep>print_msg()<if_stmt>control_type<eq>'vel'<block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><else_stmt><block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><block_end><elif_stmt>key<eq>'d'<block_start>leftward=leftward-LINEAR_STEP_SIZE<line_sep>print_msg()<if_stmt>control_type<eq>'vel'<block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><else_stmt><block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><block_end><elif_stmt>key<eq>'i'<block_start>upward=upward+LINEAR_STEP_SIZE<line_sep>print_msg()<if_stmt>control_type<eq>'vel'<block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><else_stmt><block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><block_end><elif_stmt>key<eq>','<block_start>upward=upward-LINEAR_STEP_SIZE<line_sep>print_msg()<if_stmt>control_type<eq>'vel'<block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><else_stmt><block_start>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><block_end><elif_stmt>key<eq>'j'<block_start>angular=angular+ANG_VEL_STEP_SIZE<line_sep>print_msg()<line_sep>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><elif_stmt>key<eq>'l'<block_start>angular=angular-ANG_VEL_STEP_SIZE<line_sep>print_msg()<line_sep>print("currently:\t forward vel %.2f\t leftward vel %.2f\t upward vel %.2f\t angular %.2f "%(forward leftward upward angular))<block_end><elif_stmt>key<eq>'r'<block_start>cmd='AUTO.RTL'<line_sep>print_msg()<line_sep>print('Returning home')<block_end><elif_stmt>key<eq>'t'<block_start>cmd='ARM'<line_sep>print_msg()<line_sep>print('Arming')<block_end><elif_stmt>key<eq>'y'<block_start>cmd='DISARM'<line_sep>print_msg()<line_sep>print('Disarming')<block_end><elif_stmt>key<eq>'v'<block_start>cmd='AUTO.TAKEOFF'<line_sep>print_msg()<line_sep>print('AUTO.TAKEOFF')<block_end><elif_stmt>key<eq>'b'<block_start>cmd='OFFBOARD'<line_sep>print_msg()<line_sep>print('Offboard')<block_end><elif_stmt>key<eq>'n'<block_start>cmd='AUTO.LAND'<line_sep>print_msg()<line_sep>print('AUTO.LAND')<block_end><elif_stmt>key<eq>'g'<block_start>ctrl_leader=<not>ctrl_leader<line_sep>print_msg()<block_end><elif_stmt>key<eq>'s'<block_start><if_stmt>transition_state<eq>'multirotor'<block_start>forward=0.0<line_sep>leftward=0.0<line_sep>upward=0.0<line_sep>angular=0.0<line_sep>cmd='HOVER'<block_end><else_stmt><block_start>cmd='loiter'<block_end>print_msg()<line_sep>print(cmd)<block_end><elif_stmt>key<eq>'k'<block_start><if_stmt>transition_state<eq>'multirotor'<block_start>forward=0.0<line_sep>leftward=0.0<line_sep>upward=0.0<line_sep>angular=0.0<line_sep>cmd='HOVER'<block_end><else_stmt><block_start>cmd='idle'<block_end>print_msg()<line_sep>print(cmd)<block_end><elif_stmt>key<eq>'o'<block_start><if_stmt>transition_state<eq>'multirotor'<block_start>transition_state='plane'<line_sep>cmd=transition_state<block_end><else_stmt><block_start>transition_state='multirotor'<line_sep>cmd=transition_state<block_end>print_msg()<line_sep>print(cmd)<block_end><else_stmt><block_start><for_stmt>i range(10)<block_start><if_stmt>key<eq>str(i)<block_start>cmd='mission'+key<line_sep>print_msg()<line_sep>print(cmd)<block_end><block_end><if_stmt>(key<eq>'\x03')<block_start><break><block_end><block_end><if_stmt>forward<g>MAX_LINEAR<block_start>forward=MAX_LINEAR<block_end><elif_stmt>forward<l>-MAX_LINEAR<block_start>forward=-MAX_LINEAR<block_end><if_stmt>leftward<g>MAX_LINEAR<block_start>leftward=MAX_LINEAR<block_end><elif_stmt>leftward<l>-MAX_LINEAR<block_start>leftward=-MAX_LINEAR<block_end><if_stmt>upward<g>MAX_LINEAR<block_start>upward=MAX_LINEAR<block_end><elif_stmt>upward<l>-MAX_LINEAR<block_start>upward=-MAX_LINEAR<block_end><if_stmt>angular<g>MAX_ANG_VEL<block_start>angular=MAX_ANG_VEL<block_end><elif_stmt>angular<l>-MAX_ANG_VEL<block_start>angular=-MAX_ANG_VEL<block_end><if_stmt>transition_state<eq>'plane'<block_start>pose.position.x=forward<line_sep>pose.position.y=leftward<line_sep>pose.position.z=upward<line_sep>pose.orientation.x=0.0<line_sep>pose.orientation.y=0.0<line_sep>pose.orientation.z=angular<block_end><else_stmt><block_start>twist.linear.x=forward<line_sep>twist.linear.y=leftward<line_sep>twist.linear.z=upward<line_sep>twist.angular.x=0.0<line_sep>twist.angular.y=0.0<line_sep>twist.angular.z=angular<block_end><for_stmt>i range(vehicle_num)<block_start><if_stmt>ctrl_leader<block_start><if_stmt>transition_state<eq>'plane'<block_start>leader_cmd_pose_enu_pub.publish(pose)<block_end><else_stmt><block_start><if_stmt>control_type<eq>'vel'<block_start>leader_cmd_vel_flu_pub.publish(twist)<block_end><else_stmt><block_start>leader_cmd_aceel_flu_pub.publish(twist)<block_end><block_end>leader_cmd_pub.publish(cmd)<block_end><else_stmt><block_start><if_stmt>transition_state<eq>'plane'<block_start>multi_cmd_pose_enu_pub[i].publish(pose)<block_end><else_stmt><block_start><if_stmt>control_type<eq>'vel'<block_start>multi_cmd_vel_flu_pub[i].publish(twist)<block_end><else_stmt><block_start>multi_cmd_accel_flu_pub[i].publish(twist)<block_end><block_end>multi_cmd_pub[i].publish(cmd)<block_end><block_end>cmd=''<block_end>termios.tcsetattr(sys.stdin termios.TCSADRAIN settings)<block_end>
|
<import_stmt>pytest<try_stmt><block_start><import_from_stmt>bots.stocks.technical_analysis.aroon aroon_command<block_end><except_stmt>ImportError<block_start>pytest.skip(allow_module_level=<true>)<block_end>@pytest.fixture(scope="module")<def_stmt>vcr_config <block_start><return>{"filter_headers":[("User-Agent" <none>)] "filter_query_parameters":[("period1" "MOCK_PERIOD_1") ("period2" "MOCK_PERIOD_2") ("date" "MOCK_DATE") ] }<block_end>@[email protected]@pytest.mark.parametrize("start, end, extended, heikin, news" [("" "" <false> <false> <false>) ("2022-01-01" "2022-04-01" <true> <true> <true>) ] )<def_stmt>test_aroon_command recorder start end extended heikin news<block_start>value=aroon_command("TSLA" start=start end=end extended_hours=extended heikin_candles=heikin news=news )<line_sep>value["imagefile"]=value["imagefile"][-4:]<line_sep>recorder.capture(value)<block_end>@[email protected]@pytest.mark.parametrize("ticker" ["" "ZZZZ"])<def_stmt>test_aroon_invalid ticker<block_start><with_stmt>pytest.raises(Exception)<block_start>aroon_command(ticker)<block_end><block_end>
|
<import_stmt>math<import_stmt>operator<import_stmt>random<import_stmt>pytest<import_stmt>hail<as>hl<import_stmt>hail.expr.aggregators<as>agg<import_from_stmt>hail.utils.java Env<import_from_stmt>hail.utils.misc new_temp_file<import_from_stmt>..helpers *<line_sep>setUpModule=startTestHailContext<line_sep>tearDownModule=stopTestHailContext<class_stmt>Tests(unittest.TestCase)<block_start><def_stmt>get_mt self min_partitions=<none><arrow>hl.MatrixTable<block_start><return>hl.import_vcf(resource("sample.vcf") min_partitions=min_partitions)<block_end><def_stmt>test_range_count self<block_start>self.assertEqual(hl.utils.range_matrix_table(7 13).count() (7 13))<block_end><def_stmt>test_row_key_field_show_runs self<block_start>ds=self.get_mt()<line_sep>ds.locus.show()<block_end><def_stmt>test_update self<block_start>mt=self.get_mt()<line_sep>mt=mt.select_entries(dp=mt.DP gq=mt.GQ)<line_sep>self.assertTrue(schema_eq(mt.entry.dtype hl.tstruct(dp=hl.tint32 gq=hl.tint32)))<block_end><def_stmt>test_annotate self<block_start>mt=self.get_mt()<line_sep>mt=mt.annotate_globals(foo=5)<line_sep>self.assertEqual(mt.globals.dtype hl.tstruct(foo=hl.tint32))<line_sep>mt=mt.annotate_rows(x1=agg.count() x2=agg.fraction(<false>) x3=agg.count_where(<true>) x4=mt.info.AC+mt.foo)<line_sep>mt=mt.annotate_cols(apple=6)<line_sep>mt=mt.annotate_cols(y1=agg.count() y2=agg.fraction(<false>) y3=agg.count_where(<true>) y4=mt.foo+mt.apple)<line_sep>expected_schema=hl.tstruct(s=hl.tstr apple=hl.tint32 y1=hl.tint64 y2=hl.tfloat64 y3=hl.tint64 y4=hl.tint32)<line_sep>self.assertTrue(schema_eq(mt.col.dtype expected_schema) "expected: "+str(mt.col.dtype)+"\nactual: "+str(expected_schema))<line_sep>mt=mt.select_entries(z1=mt.x1+mt.foo z2=mt.x1+mt.y1+mt.foo)<line_sep>self.assertTrue(schema_eq(mt.entry.dtype hl.tstruct(z1=hl.tint64 z2=hl.tint64)))<block_end><def_stmt>test_annotate_globals self<block_start>mt=hl.utils.range_matrix_table(1 1)<line_sep>ht=hl.utils.range_table(1 1)<line_sep>data=[(5 hl.tint operator.eq) (float('nan') hl.tfloat32 <lambda>x y:str(x)<eq>str(y)) (float('inf') hl.tfloat64 <lambda>x y:str(x)<eq>str(y)) (float('-inf') hl.tfloat64 <lambda>x y:str(x)<eq>str(y)) (1.111 hl.tfloat64 operator.eq) ([hl.Struct(**{'a':<none> 'b':5}) hl.Struct(**{'a':'hello' 'b':10})] hl.tarray(hl.tstruct(a=hl.tstr b=hl.tint)) operator.eq)]<for_stmt>x,t,f data<block_start>self.assertTrue(f(hl.eval(mt.annotate_globals(foo=hl.literal(x t)).foo) x) f"{x}, {t}")<line_sep>self.assertTrue(f(hl.eval(ht.annotate_globals(foo=hl.literal(x t)).foo) x) f"{x}, {t}")<block_end><block_end><def_stmt>test_head self# no empty partitions
<block_start>mt1=hl.utils.range_matrix_table(10 10)<line_sep># empty partitions at front
mt2=hl.utils.range_matrix_table(20 10 20)<line_sep>mt2=mt2.filter_rows(mt2.row_idx<g>9)<line_sep>mts=[mt1 mt2]<for_stmt>mt mts<block_start>tmp_file=new_temp_file(extension='mt')<line_sep>mt.write(tmp_file)<line_sep>mt_readback=hl.read_matrix_table(tmp_file)<for_stmt>mt_ [mt mt_readback]<block_start><assert_stmt>mt_.head(1).count_rows()<eq>1<assert_stmt>mt_.head(1)._force_count_rows()<eq>1<assert_stmt>mt_.head(100).count_rows()<eq>10<assert_stmt>mt_.head(100)._force_count_rows()<eq>10<block_end><block_end><block_end><def_stmt>test_head_cols self<block_start>mt1=hl.utils.range_matrix_table(10 10)<assert_stmt>mt1.head(1 2).count()<eq>(1 2)<assert_stmt>mt1.head(1 <none>).count()<eq>(1 10)<assert_stmt>mt1.head(<none> 1).count()<eq>(10 1)<block_end><def_stmt>test_tail self# no empty partitions
<block_start>mt1=hl.utils.range_matrix_table(10 10)<line_sep># empty partitions at front
mt2=hl.utils.range_matrix_table(20 10 20)<line_sep>mt2=mt2.filter_rows(mt2.row_idx<g>9)<line_sep>mts=[mt1 mt2]<for_stmt>mt mts<block_start>tmp_file=new_temp_file(extension='mt')<line_sep>mt.write(tmp_file)<line_sep>mt_readback=hl.read_matrix_table(tmp_file)<for_stmt>mt_ [mt mt_readback]<block_start><assert_stmt>mt_.tail(1).count_rows()<eq>1<assert_stmt>mt_.tail(1)._force_count_rows()<eq>1<assert_stmt>mt_.tail(100).count_rows()<eq>10<assert_stmt>mt_.tail(100)._force_count_rows()<eq>10<block_end><block_end><block_end><def_stmt>test_tail_cols self<block_start>mt1=hl.utils.range_matrix_table(10 10)<assert_stmt>mt1.tail(1 2).count()<eq>(1 2)<assert_stmt>mt1.tail(1 <none>).count()<eq>(1 10)<assert_stmt>mt1.tail(<none> 1).count()<eq>(10 1)<block_end>@fails_service_backend()<def_stmt>test_tail_entries self<block_start>mt=hl.utils.range_matrix_table(100 30)<line_sep>mt=mt.filter_cols(mt.col_idx<ne>29)<def_stmt>tail *args<block_start>ht=mt.tail(*args).entries()<line_sep><return>ht.aggregate(hl.agg.collect_as_set(hl.tuple([ht.row_idx ht.col_idx])))<block_end><def_stmt>expected n m<block_start><return>set((i j)<for>i range(100-n 100)<for>j range(29-m 29))<block_end><assert_stmt>tail(<none> 10)<eq>expected(100 10)<assert_stmt>tail(30 <none>)<eq>expected(30 29)<assert_stmt>tail(30 10)<eq>expected(30 10)<block_end>@fails_service_backend()<def_stmt>test_tail_scan self<block_start>mt=hl.utils.range_matrix_table(30 40)<line_sep>mt=mt.annotate_rows(i=hl.scan.count())<line_sep>mt=mt.annotate_cols(j=hl.scan.count())<line_sep>mt=mt.tail(10 11)<line_sep>ht=mt.entries()<assert_stmt>ht.aggregate(agg.collect_as_set(hl.tuple([ht.i ht.j])))<eq>set((i j)<for>i range(20 30)<for>j range(29 40))<block_end><def_stmt>test_filter self<block_start>mt=self.get_mt()<line_sep>mt=mt.annotate_globals(foo=5)<line_sep>mt=mt.annotate_rows(x1=agg.count())<line_sep>mt=mt.annotate_cols(y1=agg.count())<line_sep>mt=mt.annotate_entries(z1=mt.DP)<line_sep>mt=mt.filter_rows((mt.x1<eq>5)&(agg.count()<eq>3)&(mt.foo<eq>2))<line_sep>mt=mt.filter_cols((mt.y1<eq>5)&(agg.count()<eq>3)&(mt.foo<eq>2))<line_sep>mt=mt.filter_entries((mt.z1<l>5)&(mt.y1<eq>3)&(mt.x1<eq>5)&(mt.foo<eq>2))<line_sep>mt.count_rows()<block_end>@fails_service_backend()<def_stmt>test_aggregate self<block_start>mt=self.get_mt()<line_sep>mt=mt.annotate_globals(foo=5)<line_sep>mt=mt.annotate_rows(x1=agg.count())<line_sep>mt=mt.annotate_cols(y1=agg.count())<line_sep>mt=mt.annotate_entries(z1=mt.DP)<line_sep>qv=mt.aggregate_rows(agg.count())<line_sep>qs=mt.aggregate_cols(agg.count())<line_sep>qg=mt.aggregate_entries(agg.count())<line_sep>self.assertIsNotNone(mt.aggregate_entries(hl.agg.take(mt.s 1)[0]))<line_sep>self.assertEqual(qv 346)<line_sep>self.assertEqual(qs 100)<line_sep>self.assertEqual(qg qv<times>qs)<line_sep>qvs=mt.aggregate_rows(hl.Struct(x=agg.collect(mt.locus.contig) y=agg.collect(mt.x1)))<line_sep>qss=mt.aggregate_cols(hl.Struct(x=agg.collect(mt.s) y=agg.collect(mt.y1)))<line_sep>qgs=mt.aggregate_entries(hl.Struct(x=agg.filter(<false> agg.collect(mt.y1)) y=agg.filter(hl.rand_bool(0.1) agg.collect(mt.GT))))<block_end><def_stmt>test_aggregate_rows_array_agg self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=mt.annotate_rows(maf_flag=hl.empty_array('bool'))<line_sep>mt.aggregate_rows(hl.agg.array_agg(<lambda>x:hl.agg.counter(x) mt.maf_flag))<block_end><def_stmt>test_aggregate_rows_bn_counter self<block_start>r=hl.balding_nichols_model(3 10 10).rows()<line_sep>r.aggregate(hl.agg.counter(r.locus.in_x_nonpar()))<block_end><def_stmt>test_col_agg_no_rows self<block_start>mt=hl.utils.range_matrix_table(3 3).filter_rows(<false>)<line_sep>mt=mt.annotate_cols(x=hl.agg.count())<assert_stmt>mt.x.collect()<eq>[0 0 0]<block_end><def_stmt>test_col_collect self<block_start>mt=hl.utils.range_matrix_table(3 3)<line_sep>mt.cols().collect()<block_end><def_stmt>test_aggregate_ir self<block_start>ds=(hl.utils.range_matrix_table(5 5).annotate_globals(g1=5).annotate_entries(e1=3))<line_sep>x=[("col_idx" <lambda>e:ds.aggregate_cols(e)) ("row_idx" <lambda>e:ds.aggregate_rows(e))]<for_stmt>name,f x<block_start>r=f(hl.struct(x=agg.sum(ds[name])+ds.g1 y=agg.filter(ds[name]%2<ne>0 agg.sum(ds[name]+2))+ds.g1 z=agg.sum(ds.g1+ds[name])+ds.g1 mean=agg.mean(ds[name])))<line_sep>self.assertEqual(convert_struct_to_dict(r) {u'x':15 u'y':13 u'z':40 u'mean':2.0})<line_sep>r=f(5)<line_sep>self.assertEqual(r 5)<line_sep>r=f(hl.missing(hl.tint32))<line_sep>self.assertEqual(r <none>)<line_sep>r=f(agg.filter(ds[name]%2<ne>0 agg.sum(ds[name]+2))+ds.g1)<line_sep>self.assertEqual(r 13)<block_end>r=ds.aggregate_entries(agg.filter((ds.row_idx%2<ne>0)&(ds.col_idx%2<ne>0) agg.sum(ds.e1+ds.g1+ds.row_idx+ds.col_idx))+ds.g1)<line_sep>self.assertTrue(r 48)<block_end><def_stmt>test_select_entries self<block_start>mt=hl.utils.range_matrix_table(10 10 n_partitions=4)<line_sep>mt=mt.annotate_entries(a=hl.struct(b=mt.row_idx c=mt.col_idx) foo=mt.row_idx<times>10+mt.col_idx)<line_sep>mt=mt.select_entries(mt.a.b mt.a.c mt.foo)<line_sep>mt=mt.annotate_entries(bc=mt.b<times>10+mt.c)<line_sep>mt_entries=mt.entries()<assert_stmt>(mt_entries.all(mt_entries.bc<eq>mt_entries.foo))<block_end><def_stmt>test_select_cols self<block_start>mt=hl.utils.range_matrix_table(3 5 n_partitions=4)<line_sep>mt=mt.annotate_entries(e=mt.col_idx<times>mt.row_idx)<line_sep>mt=mt.annotate_globals(g=1)<line_sep>mt=mt.annotate_cols(sum=agg.sum(mt.e+mt.col_idx+mt.row_idx+mt.g)+mt.col_idx+mt.g count=agg.count_where(mt.e%2<eq>0) foo=agg.count())<line_sep>result=convert_struct_to_dict(mt.cols().collect()[-2])<line_sep>self.assertEqual(result {'col_idx':3 'sum':28 'count':2 'foo':3})<block_end><def_stmt>test_drop self<block_start>mt=self.get_mt()<line_sep>mt=mt.annotate_globals(foo=5)<line_sep>mt=mt.annotate_cols(bar=5)<line_sep>mt1=mt.drop('GT' 'info' 'foo' 'bar')<line_sep>self.assertTrue('foo'<not><in>mt1.globals)<line_sep>self.assertTrue('info'<not><in>mt1.row)<line_sep>self.assertTrue('bar'<not><in>mt1.col)<line_sep>self.assertTrue('GT'<not><in>mt1.entry)<line_sep>mt1._force_count_rows()<line_sep>mt2=mt.drop(mt.GT mt.info mt.foo mt.bar)<line_sep>self.assertTrue('foo'<not><in>mt2.globals)<line_sep>self.assertTrue('info'<not><in>mt2.row)<line_sep>self.assertTrue('bar'<not><in>mt2.col)<line_sep>self.assertTrue('GT'<not><in>mt2.entry)<line_sep>mt2._force_count_rows()<block_end><def_stmt>test_explode_rows self<block_start>mt=hl.utils.range_matrix_table(4 4)<line_sep>mt=mt.annotate_entries(e=mt.row_idx<times>10+mt.col_idx)<line_sep>self.assertTrue(mt.annotate_rows(x=[1]).explode_rows('x').drop('x')._same(mt))<line_sep>self.assertEqual(mt.annotate_rows(x=hl.empty_array('int')).explode_rows('x').count_rows() 0)<line_sep>self.assertEqual(mt.annotate_rows(x=hl.missing('array<int>')).explode_rows('x').count_rows() 0)<line_sep>self.assertEqual(mt.annotate_rows(x=hl.range(0 mt.row_idx)).explode_rows('x').count_rows() 6)<line_sep>mt=mt.annotate_rows(x=hl.struct(y=hl.range(0 mt.row_idx)))<line_sep>self.assertEqual(mt.explode_rows(mt.x.y).count_rows() 6)<block_end><def_stmt>test_explode_cols self<block_start>mt=hl.utils.range_matrix_table(4 4)<line_sep>mt=mt.annotate_entries(e=mt.row_idx<times>10+mt.col_idx)<line_sep>self.assertTrue(mt.annotate_cols(x=[1]).explode_cols('x').drop('x')._same(mt))<line_sep>self.assertEqual(mt.annotate_cols(x=hl.empty_array('int')).explode_cols('x').count_cols() 0)<line_sep>self.assertEqual(mt.annotate_cols(x=hl.missing('array<int>')).explode_cols('x').count_cols() 0)<line_sep>self.assertEqual(mt.annotate_cols(x=hl.range(0 mt.col_idx)).explode_cols('x').count_cols() 6)<block_end><def_stmt>test_explode_key_errors self<block_start>mt=hl.utils.range_matrix_table(1 1).key_cols_by(a=[1]).key_rows_by(b=[1])<with_stmt>self.assertRaises(ValueError)<block_start>mt.explode_cols('a')<block_end><with_stmt>self.assertRaises(ValueError)<block_start>mt.explode_rows('b')<block_end><block_end><def_stmt>test_group_by_field_lifetimes self<block_start>mt=hl.utils.range_matrix_table(3 3)<line_sep>mt2=(mt.group_rows_by(row_idx='100').aggregate(x=hl.agg.collect_as_set(mt.row_idx+5)))<assert_stmt>mt2.aggregate_entries(hl.agg.all(mt2.x<eq>hl.set({5 6 7})))<line_sep>mt3=(mt.group_cols_by(col_idx='100').aggregate(x=hl.agg.collect_as_set(mt.col_idx+5)))<assert_stmt>mt3.aggregate_entries(hl.agg.all(mt3.x<eq>hl.set({5 6 7})))<block_end><def_stmt>test_aggregate_cols_by self<block_start>mt=hl.utils.range_matrix_table(2 4)<line_sep>mt=(mt.annotate_cols(group=mt.col_idx<l>2).annotate_globals(glob=5))<line_sep>grouped=mt.group_cols_by(mt.group)<line_sep>result=grouped.aggregate(sum=hl.agg.sum(mt.row_idx<times>2+mt.col_idx+mt.glob)+3)<line_sep>expected=(hl.Table.parallelize([{'row_idx':0 'group':<true> 'sum':14} {'row_idx':0 'group':<false> 'sum':18} {'row_idx':1 'group':<true> 'sum':18} {'row_idx':1 'group':<false> 'sum':22}] hl.tstruct(row_idx=hl.tint group=hl.tbool sum=hl.tint64)).annotate_globals(glob=5).key_by('row_idx' 'group'))<line_sep>self.assertTrue(result.entries()._same(expected))<block_end><def_stmt>test_aggregate_cols_by_init_op self<block_start>mt=hl.import_vcf(resource('sample.vcf'))<line_sep>cs=mt.group_cols_by(mt.s).aggregate(cs=hl.agg.call_stats(mt.GT mt.alleles))<line_sep>cs._force_count_rows()<block_end># should run without error
<def_stmt>test_aggregate_cols_scope_violation self<block_start>mt=get_dataset()<with_stmt>pytest.raises(hl.expr.ExpressionException)<as>exc<block_start>mt.aggregate_cols(hl.agg.filter(<false> hl.agg.sum(mt.GT.is_non_ref())))<block_end><assert_stmt>"scope violation"<in>str(exc.value)<block_end><def_stmt>test_aggregate_rows_by self<block_start>mt=hl.utils.range_matrix_table(4 2)<line_sep>mt=(mt.annotate_rows(group=mt.row_idx<l>2).annotate_globals(glob=5))<line_sep>grouped=mt.group_rows_by(mt.group)<line_sep>result=grouped.aggregate(sum=hl.agg.sum(mt.col_idx<times>2+mt.row_idx+mt.glob)+3)<line_sep>expected=(hl.Table.parallelize([{'col_idx':0 'group':<true> 'sum':14} {'col_idx':1 'group':<true> 'sum':18} {'col_idx':0 'group':<false> 'sum':18} {'col_idx':1 'group':<false> 'sum':22}] hl.tstruct(group=hl.tbool col_idx=hl.tint sum=hl.tint64)).annotate_globals(glob=5).key_by('group' 'col_idx'))<line_sep>self.assertTrue(result.entries()._same(expected))<block_end><def_stmt>test_collect_cols_by_key self<block_start>mt=hl.utils.range_matrix_table(3 3)<line_sep>col_dict=hl.literal({0:[1] 1:[2 3] 2:[4 5 6]})<line_sep>mt=mt.annotate_cols(foo=col_dict.get(mt.col_idx)).explode_cols('foo')<line_sep>mt=mt.annotate_entries(bar=mt.row_idx<times>mt.foo)<line_sep>grouped=mt.collect_cols_by_key()<line_sep>self.assertListEqual(grouped.cols().order_by('col_idx').collect() [hl.Struct(col_idx=0 foo=[1]) hl.Struct(col_idx=1 foo=[2 3]) hl.Struct(col_idx=2 foo=[4 5 6])])<line_sep>self.assertListEqual(grouped.entries().select('bar').order_by('row_idx' 'col_idx').collect() [hl.Struct(row_idx=0 col_idx=0 bar=[0]) hl.Struct(row_idx=0 col_idx=1 bar=[0 0]) hl.Struct(row_idx=0 col_idx=2 bar=[0 0 0]) hl.Struct(row_idx=1 col_idx=0 bar=[1]) hl.Struct(row_idx=1 col_idx=1 bar=[2 3]) hl.Struct(row_idx=1 col_idx=2 bar=[4 5 6]) hl.Struct(row_idx=2 col_idx=0 bar=[2]) hl.Struct(row_idx=2 col_idx=1 bar=[4 6]) hl.Struct(row_idx=2 col_idx=2 bar=[8 10 12])])<block_end><def_stmt>test_weird_names self<block_start>ds=self.get_mt()<line_sep>exprs={'a':5 ' a ':5 r'\%!^!@#&#&$%#$%':[5] '$':5 'ß':5}<line_sep>ds.annotate_globals(**exprs)<line_sep>ds.select_globals(**exprs)<line_sep>ds.annotate_cols(**exprs)<line_sep>ds1=ds.select_cols(**exprs)<line_sep>ds.annotate_rows(**exprs)<line_sep>ds2=ds.select_rows(**exprs)<line_sep>ds.annotate_entries(**exprs)<line_sep>ds.select_entries(**exprs)<line_sep>ds1.explode_cols(r'\%!^!@#&#&$%#$%')<line_sep>ds1.explode_cols(ds1[r'\%!^!@#&#&$%#$%'])<line_sep>ds1.group_cols_by(ds1.a).aggregate(**{'*``81':agg.count()})<line_sep>ds1.drop(r'\%!^!@#&#&$%#$%')<line_sep>ds1.drop(ds1[r'\%!^!@#&#&$%#$%'])<line_sep>ds2.explode_rows(r'\%!^!@#&#&$%#$%')<line_sep>ds2.explode_rows(ds2[r'\%!^!@#&#&$%#$%'])<line_sep>ds2.group_rows_by(ds2.a).aggregate(**{'*``81':agg.count()})<block_end><def_stmt>test_semi_anti_join_rows self<block_start>mt=hl.utils.range_matrix_table(10 3)<line_sep>ht=hl.utils.range_table(3)<assert_stmt>mt.semi_join_rows(ht).count()<eq>(3 3)<assert_stmt>mt.anti_join_rows(ht).count()<eq>(7 3)<block_end><def_stmt>test_semi_anti_join_cols self<block_start>mt=hl.utils.range_matrix_table(3 10)<line_sep>ht=hl.utils.range_table(3)<assert_stmt>mt.semi_join_cols(ht).count()<eq>(3 3)<assert_stmt>mt.anti_join_cols(ht).count()<eq>(3 7)<block_end>@fails_service_backend()<def_stmt>test_joins self<block_start>mt=self.get_mt().select_rows(x1=1 y1=1)<line_sep>mt2=mt.select_rows(x2=1 y2=2)<line_sep>mt2=mt2.select_cols(c1=1 c2=2)<line_sep>mt=mt.annotate_rows(y2=mt2.index_rows(mt.row_key).y2)<line_sep>mt=mt.annotate_cols(c2=mt2.index_cols(mt.s).c2)<line_sep>mt=mt.annotate_cols(c2=mt2.index_cols(hl.str(mt.s)).c2)<line_sep>rt=mt.rows()<line_sep>ct=mt.cols()<line_sep>mt.annotate_rows(**rt[mt.locus mt.alleles])<line_sep>self.assertTrue(rt.all(rt.y2<eq>2))<line_sep>self.assertTrue(ct.all(ct.c2<eq>2))<block_end>@fails_service_backend()<def_stmt>test_joins_with_key_structs self<block_start>mt=self.get_mt()<line_sep>rows=mt.rows()<line_sep>cols=mt.cols()<line_sep>self.assertEqual(rows[mt.locus mt.alleles].take(1) rows[mt.row_key].take(1))<line_sep>self.assertEqual(cols[mt.s].take(1) cols[mt.col_key].take(1))<line_sep>self.assertEqual(mt.index_rows(mt.row_key).take(1) mt.index_rows(mt.locus mt.alleles).take(1))<line_sep>self.assertEqual(mt.index_cols(mt.col_key).take(1) mt.index_cols(mt.s).take(1))<line_sep>self.assertEqual(mt[mt.row_key mt.col_key].take(1) mt[(mt.locus mt.alleles) mt.s].take(1))<block_end><def_stmt>test_index_keyless self<block_start>mt=hl.utils.range_matrix_table(3 3)<with_stmt>self.assertRaisesRegex(hl.expr.ExpressionException "MatrixTable row key: *<<<empty key>>>")<block_start>mt.key_rows_by().index_rows(mt.row_idx)<block_end><with_stmt>self.assertRaisesRegex(hl.expr.ExpressionException "MatrixTable col key: *<<<empty key>>>")<block_start>mt.key_cols_by().index_cols(mt.col_idx)<block_end><block_end><def_stmt>test_table_join self<block_start>ds=self.get_mt()<line_sep># test different row schemas
self.assertTrue(ds.union_cols(ds.drop(ds.info)).count_rows() 346)<block_end>@skip_when_service_backend('''The Service and Shuffler have no way of knowing the order in which rows appear in the original
dataset, as such it is impossible to guarantee the ordering in `matches`.
https://hail.zulipchat.com/#narrow/stream/123011-Hail-Dev/topic/test_drop/near/235425714''')<def_stmt>test_table_product_join self<block_start>left=hl.utils.range_matrix_table(5 1)<line_sep>right=hl.utils.range_table(5)<line_sep>right=right.annotate(i=hl.range(right.idx+1 5)).explode('i').key_by('i')<line_sep>left=left.annotate_rows(matches=right.index(left.row_key all_matches=<true>))<line_sep>rows=left.rows()<line_sep>self.assertTrue(rows.all(rows.matches.map(<lambda>x:x.idx)<eq>hl.range(0 rows.row_idx)))<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_naive_coalesce self<block_start>mt=self.get_mt(min_partitions=8)<line_sep>self.assertEqual(mt.n_partitions() 8)<line_sep>repart=mt.naive_coalesce(2)<line_sep>self.assertTrue(mt._same(repart))<block_end><def_stmt>test_coalesce_with_no_rows self<block_start>mt=self.get_mt().filter_rows(<false>)<line_sep>self.assertEqual(mt.repartition(1).count_rows() 0)<block_end><def_stmt>test_literals_rebuild self<block_start>mt=hl.utils.range_matrix_table(1 1)<line_sep>mt=mt.annotate_rows(x=hl.if_else(hl.literal([1 2 3])[mt.row_idx]<l>hl.rand_unif(10 11) mt.globals hl.struct()))<line_sep>mt._force_count_rows()<block_end>@fails_service_backend()<def_stmt>test_globals_lowering self<block_start>mt=hl.utils.range_matrix_table(1 1).annotate_globals(x=1)<line_sep>lit=hl.literal(hl.utils.Struct(x=0))<line_sep>mt.annotate_rows(foo=hl.agg.collect(mt.globals<eq>lit))._force_count_rows()<line_sep>mt.annotate_cols(foo=hl.agg.collect(mt.globals<eq>lit))._force_count_rows()<line_sep>mt.filter_rows(mt.globals<eq>lit)._force_count_rows()<line_sep>mt.filter_cols(mt.globals<eq>lit)._force_count_rows()<line_sep>mt.filter_entries(mt.globals<eq>lit)._force_count_rows()<line_sep>(mt.group_rows_by(mt.row_idx).aggregate_rows(foo=hl.agg.collect(mt.globals<eq>lit)).aggregate(bar=hl.agg.collect(mt.globals<eq>lit))._force_count_rows())<line_sep>(mt.group_cols_by(mt.col_idx).aggregate_cols(foo=hl.agg.collect(mt.globals<eq>lit)).aggregate(bar=hl.agg.collect(mt.globals<eq>lit))._force_count_rows())<block_end>@skip_when_service_backend('ShuffleRead non-deterministically causes segfaults')<def_stmt>test_unions self<block_start>dataset=hl.import_vcf(resource('sample2.vcf'))<line_sep># test union_rows
ds1=dataset.filter_rows(dataset.locus.position%2<eq>1)<line_sep>ds2=dataset.filter_rows(dataset.locus.position%2<eq>0)<line_sep>datasets=[ds1 ds2]<line_sep>r1=ds1.union_rows(ds2)<line_sep>r2=hl.MatrixTable.union_rows(*datasets)<line_sep>self.assertTrue(r1._same(r2))<with_stmt>self.assertRaises(ValueError)<block_start>ds1.filter_cols(ds1.s.endswith('5')).union_rows(ds2)<block_end># test union_cols
ds=dataset.union_cols(dataset).union_cols(dataset)<for_stmt>s,count ds.aggregate_cols(agg.counter(ds.s)).items()<block_start>self.assertEqual(count 3)<block_end><block_end>@skip_when_service_backend('Shuffler encoding/decoding is broken.')<def_stmt>test_union_cols_example self<block_start>joined=hl.import_vcf(resource('joined.vcf'))<line_sep>left=hl.import_vcf(resource('joinleft.vcf'))<line_sep>right=hl.import_vcf(resource('joinright.vcf'))<line_sep>self.assertTrue(left.union_cols(right)._same(joined))<block_end><def_stmt>test_union_cols_distinct self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=mt.key_rows_by(x=mt.row_idx<floordiv>2)<assert_stmt>mt.union_cols(mt).count_rows()<eq>5<block_end>@skip_when_service_backend('flaky https://hail.zulipchat.com/#narrow/stream/127527-team/topic/CI.20Deploy.20Failure/near/237593731')<def_stmt>test_union_cols_outer self<block_start>r,c=10 10<line_sep>mt=hl.utils.range_matrix_table(2<times>r c)<line_sep>mt=mt.annotate_entries(entry=hl.tuple([mt.row_idx mt.col_idx]))<line_sep>mt2=hl.utils.range_matrix_table(2<times>r c)<line_sep>mt2=mt2.key_rows_by(row_idx=mt2.row_idx+r)<line_sep>mt2=mt2.key_cols_by(col_idx=mt2.col_idx+c)<line_sep>mt2=mt2.annotate_entries(entry=hl.tuple([mt2.row_idx mt2.col_idx]))<line_sep>expected=hl.utils.range_matrix_table(3<times>r 2<times>c)<line_sep>missing=hl.missing(hl.ttuple(hl.tint hl.tint))<line_sep>expected=expected.annotate_entries(entry=hl.if_else(expected.col_idx<l>c hl.if_else(expected.row_idx<l>2<times>r hl.tuple([expected.row_idx expected.col_idx]) missing) hl.if_else(expected.row_idx<ge>r hl.tuple([expected.row_idx expected.col_idx]) missing)))<assert_stmt>mt.union_cols(mt2 row_join_type='outer')._same(expected)<block_end><def_stmt>test_union_rows_different_col_schema self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt2=hl.utils.range_matrix_table(10 10)<line_sep>mt2=mt2.annotate_cols(x=mt2.col_idx+1)<line_sep>mt2=mt2.annotate_globals(g="foo")<line_sep>self.assertEqual(mt.union_rows(mt2).count_rows() 20)<block_end><def_stmt>test_index self<block_start>ds=self.get_mt(min_partitions=8)<line_sep>self.assertEqual(ds.n_partitions() 8)<line_sep>ds=ds.add_row_index('rowidx').add_col_index('colidx')<for_stmt>i,struct enumerate(ds.cols().select('colidx').collect())<block_start>self.assertEqual(i struct.colidx)<block_end><for_stmt>i,struct enumerate(ds.rows().select('rowidx').collect())<block_start>self.assertEqual(i struct.rowidx)<block_end><block_end><def_stmt>test_choose_cols self<block_start>ds=self.get_mt()<line_sep>indices=list(range(ds.count_cols()))<line_sep>random.shuffle(indices)<line_sep>old_order=ds.key_cols_by()['s'].collect()<line_sep>self.assertEqual(ds.choose_cols(indices).key_cols_by()['s'].collect() [old_order[i]<for>i indices])<line_sep>self.assertEqual(ds.choose_cols(list(range(10))).s.collect() old_order[:10])<block_end>@skip_when_service_backend('Shuffler encoding/decoding is broken.')<def_stmt>test_choose_cols_vs_explode self<block_start>ds=self.get_mt()<line_sep>ds2=ds.annotate_cols(foo=[0 0]).explode_cols('foo').drop('foo')<line_sep>self.assertTrue(ds.choose_cols(sorted(list(range(ds.count_cols()))<times>2))._same(ds2))<block_end><def_stmt>test_distinct_by_row self<block_start>orig_mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=orig_mt.key_rows_by(row_idx=orig_mt.row_idx<floordiv>2)<line_sep>self.assertTrue(mt.distinct_by_row().count_rows()<eq>5)<line_sep>self.assertTrue(orig_mt.union_rows(orig_mt).distinct_by_row()._same(orig_mt))<block_end><def_stmt>test_distinct_by_col self<block_start>orig_mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=orig_mt.key_cols_by(col_idx=orig_mt.col_idx<floordiv>2)<line_sep>self.assertTrue(mt.distinct_by_col().count_cols()<eq>5)<line_sep>self.assertTrue(orig_mt.union_cols(orig_mt).distinct_by_col()._same(orig_mt))<block_end><def_stmt>test_aggregation_with_no_aggregators self<block_start>mt=hl.utils.range_matrix_table(3 3)<line_sep>self.assertEqual(mt.group_rows_by(mt.row_idx).aggregate().count_rows() 3)<line_sep>self.assertEqual(mt.group_cols_by(mt.col_idx).aggregate().count_cols() 3)<block_end>@fails_service_backend()<def_stmt>test_computed_key_join_1 self<block_start>ds=self.get_mt()<line_sep>kt=hl.Table.parallelize([{'key':0 'value':<true>} {'key':1 'value':<false>}] hl.tstruct(key=hl.tint32 value=hl.tbool) key=['key'])<line_sep>ds=ds.annotate_rows(key=ds.locus.position%2)<line_sep>ds=ds.annotate_rows(value=kt[ds['key']]['value'])<line_sep>rt=ds.rows()<line_sep>self.assertTrue(rt.all(((rt.locus.position%2)<eq>0)<eq>rt['value']))<block_end>@fails_service_backend()<def_stmt>test_computed_key_join_2 self# multiple keys
<block_start>ds=self.get_mt()<line_sep>kt=hl.Table.parallelize([{'key1':0 'key2':0 'value':0} {'key1':1 'key2':0 'value':1} {'key1':0 'key2':1 'value':-2} {'key1':1 'key2':1 'value':-1}] hl.tstruct(key1=hl.tint32 key2=hl.tint32 value=hl.tint32) key=['key1' 'key2'])<line_sep>ds=ds.annotate_rows(key1=ds.locus.position%2 key2=ds.info.DP%2)<line_sep>ds=ds.annotate_rows(value=kt[ds.key1 ds.key2]['value'])<line_sep>rt=ds.rows()<line_sep>self.assertTrue(rt.all((rt.locus.position%2)-2<times>(rt.info.DP%2)<eq>rt['value']))<block_end>@fails_service_backend()<def_stmt>test_computed_key_join_3 self# duplicate row keys
<block_start>ds=self.get_mt()<line_sep>kt=hl.Table.parallelize([{'culprit':'InbreedingCoeff' 'foo':'bar' 'value':'IB'}] hl.tstruct(culprit=hl.tstr foo=hl.tstr value=hl.tstr) key=['culprit' 'foo'])<line_sep>ds=ds.annotate_rows(dsfoo='bar' info=ds.info.annotate(culprit=[ds.info.culprit "foo"]))<line_sep>ds=ds.explode_rows(ds.info.culprit)<line_sep>ds=ds.annotate_rows(value=kt[ds.info.culprit ds.dsfoo]['value'])<line_sep>rt=ds.rows()<line_sep>self.assertTrue(rt.all(hl.if_else(rt.info.culprit<eq>"InbreedingCoeff" rt['value']<eq>"IB" hl.is_missing(rt['value']))))<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_interval_join self<block_start>left=hl.utils.range_matrix_table(50 1 n_partitions=10)<line_sep>intervals=hl.utils.range_table(4)<line_sep>intervals=intervals.key_by(interval=hl.interval(intervals.idx<times>10 intervals.idx<times>10+5))<line_sep>left=left.annotate_rows(interval_matches=intervals.index(left.row_key))<line_sep>rows=left.rows()<line_sep>self.assertTrue(rows.all(hl.case().when(rows.row_idx%10<l>5 rows.interval_matches.idx<eq>rows.row_idx<floordiv>10).default(hl.is_missing(rows.interval_matches))))<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_interval_product_join self<block_start>left=hl.utils.range_matrix_table(50 1 n_partitions=8)<line_sep>intervals=hl.utils.range_table(25)<line_sep>intervals=intervals.key_by(interval=hl.interval(1+(intervals.idx<floordiv>5)<times>10+(intervals.idx%5) (1+intervals.idx<floordiv>5)<times>10-(intervals.idx%5)))<line_sep>intervals=intervals.annotate(i=intervals.idx%5)<line_sep>left=left.annotate_rows(interval_matches=intervals.index(left.row_key all_matches=<true>))<line_sep>rows=left.rows()<line_sep>self.assertTrue(rows.all(hl.sorted(rows.interval_matches.map(<lambda>x:x.i))<eq>hl.range(0 hl.min(rows.row_idx%10 10-rows.row_idx%10))))<block_end><def_stmt>test_entry_join_self self<block_start>mt1=hl.utils.range_matrix_table(10 10 n_partitions=4).choose_cols([9 8 7 6 5 4 3 2 1 0])<line_sep>mt1=mt1.annotate_entries(x=10<times>mt1.row_idx+mt1.col_idx)<line_sep>self.assertEqual(mt1[mt1.row_idx mt1.col_idx].dtype mt1.entry.dtype)<line_sep>mt_join=mt1.annotate_entries(x2=mt1[mt1.row_idx mt1.col_idx].x)<line_sep>mt_join_entries=mt_join.entries()<line_sep>self.assertTrue(mt_join_entries.all(mt_join_entries.x<eq>mt_join_entries.x2))<block_end>@fails_service_backend()<def_stmt>test_entry_join_const self<block_start>mt1=hl.utils.range_matrix_table(10 10 n_partitions=4)<line_sep>mt1=mt1.annotate_entries(x=mt1.row_idx+mt1.col_idx)<line_sep>mt2=hl.utils.range_matrix_table(1 1 n_partitions=1)<line_sep>mt2=mt2.annotate_entries(foo=10101)<line_sep>mt_join=mt1.annotate_entries(**mt2[mt1.row_idx<floordiv>100 mt1.col_idx<floordiv>100])<line_sep>mt_join_entries=mt_join.entries()<line_sep>self.assertTrue(mt_join_entries.all(mt_join_entries['foo']<eq>10101))<block_end><def_stmt>test_entry_join_missingness self<block_start>mt1=hl.utils.range_matrix_table(10 10 n_partitions=4)<line_sep>mt1=mt1.annotate_entries(x=mt1.row_idx+mt1.col_idx)<line_sep>mt2=mt1.filter_cols(mt1.col_idx%2<eq>0)<line_sep>mt2=mt2.filter_rows(mt2.row_idx%2<eq>0)<line_sep>mt_join=mt1.annotate_entries(x2=mt2[mt1.row_idx mt1.col_idx].x<times>10)<line_sep>mt_join_entries=mt_join.entries()<line_sep>kept=mt_join_entries.filter((mt_join_entries.row_idx%2<eq>0)&(mt_join_entries.col_idx%2<eq>0))<line_sep>removed=mt_join_entries.filter(~((mt_join_entries.row_idx%2<eq>0)&(mt_join_entries.col_idx%2<eq>0)))<line_sep>self.assertTrue(kept.all(hl.is_defined(kept.x2)&(kept.x2<eq>kept.x<times>10)))<line_sep>self.assertTrue(removed.all(hl.is_missing(removed.x2)))<block_end><def_stmt>test_entries_table_length_and_fields self<block_start>mt=hl.utils.range_matrix_table(10 10 n_partitions=4)<line_sep>mt=mt.annotate_entries(x=mt.col_idx+mt.row_idx)<line_sep>et=mt.entries()<line_sep>self.assertEqual(et.count() 100)<line_sep>self.assertTrue(et.all(et.x<eq>et.col_idx+et.row_idx))<block_end><def_stmt>test_entries_table_no_keys self<block_start>mt=hl.utils.range_matrix_table(2 2)<line_sep>mt=mt.annotate_entries(x=(mt.row_idx mt.col_idx))<line_sep>original_order=[hl.utils.Struct(row_idx=0 col_idx=0 x=(0 0)) hl.utils.Struct(row_idx=0 col_idx=1 x=(0 1)) hl.utils.Struct(row_idx=1 col_idx=0 x=(1 0)) hl.utils.Struct(row_idx=1 col_idx=1 x=(1 1)) ]<assert_stmt>mt.entries().collect()<eq>original_order<assert_stmt>mt.key_cols_by().entries().collect()<eq>original_order<assert_stmt>mt.key_rows_by().key_cols_by().entries().collect()<eq>original_order<assert_stmt>mt.key_rows_by().entries().collect()<eq>sorted(original_order key=<lambda>x:x.col_idx)<block_end>@fails_service_backend()<def_stmt>test_entries_table_with_out_of_order_row_key_fields self<block_start>mt=hl.utils.range_matrix_table(10 10 1)<line_sep>mt=mt.select_rows(key2=0 key1=mt.row_idx)<line_sep>mt=mt.key_rows_by(mt.key1 mt.key2)<line_sep>mt.entries()._force_count()<block_end><def_stmt>test_filter_cols_required_entries self<block_start>mt1=hl.utils.range_matrix_table(10 10 n_partitions=4)<line_sep>mt1=mt1.filter_cols(mt1.col_idx<l>3)<line_sep>self.assertEqual(len(mt1.entries().collect()) 30)<block_end><def_stmt>test_filter_cols_with_global_references self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>s=hl.literal({1 3 5 7})<line_sep>self.assertEqual(mt.filter_cols(s.contains(mt.col_idx)).count_cols() 4)<block_end><def_stmt>test_filter_cols_agg self<block_start>mt=hl.utils.range_matrix_table(10 10)<assert_stmt>mt.filter_cols(hl.agg.count()<g>5).count_cols()<eq>10<block_end><def_stmt>test_vcf_regression self<block_start>ds=hl.import_vcf(resource('33alleles.vcf'))<line_sep>self.assertEqual(ds.filter_rows(ds.alleles.length()<eq>2).count_rows() 0)<block_end><def_stmt>test_field_groups self<block_start>ds=self.get_mt()<line_sep>df=ds.annotate_rows(row_struct=ds.row).rows()<line_sep>self.assertTrue(df.all((df.info<eq>df.row_struct.info)&(df.qual<eq>df.row_struct.qual)))<line_sep>ds2=ds.add_col_index()<line_sep>df=ds2.annotate_cols(col_struct=ds2.col).cols()<line_sep>self.assertTrue(df.all((df.col_idx<eq>df.col_struct.col_idx)))<line_sep>df=ds.annotate_entries(entry_struct=ds.entry).entries()<line_sep>self.assertTrue(df.all(((hl.is_missing(df.GT)|(df.GT<eq>df.entry_struct.GT))&(df.AD<eq>df.entry_struct.AD))))<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_filter_partitions self<block_start>ds=self.get_mt(min_partitions=8)<line_sep>self.assertEqual(ds.n_partitions() 8)<line_sep>self.assertEqual(ds._filter_partitions([0 1 4]).n_partitions() 3)<line_sep>self.assertEqual(ds._filter_partitions(range(3)).n_partitions() 3)<line_sep>self.assertEqual(ds._filter_partitions([4 5 7] keep=<false>).n_partitions() 5)<line_sep>self.assertTrue(ds._same(hl.MatrixTable.union_rows(ds._filter_partitions([0 3 7]) ds._filter_partitions([0 3 7] keep=<false>))))<block_end>@skip_when_service_backend('Shuffler encoding/decoding is broken.')<def_stmt>test_from_rows_table self<block_start>mt=hl.import_vcf(resource('sample.vcf'))<line_sep>mt=mt.annotate_globals(foo='bar')<line_sep>rt=mt.rows()<line_sep>rm=hl.MatrixTable.from_rows_table(rt)<line_sep>self.assertTrue(rm._same(mt.filter_cols(<false>).select_entries().key_cols_by().select_cols()))<block_end><def_stmt>test_sample_rows self<block_start>ds=self.get_mt()<line_sep>ds_small=ds.sample_rows(0.01)<line_sep>self.assertTrue(ds_small.count_rows()<l>ds.count_rows())<block_end>@fails_service_backend()<def_stmt>test_read_stored_cols self<block_start>ds=self.get_mt()<line_sep>ds=ds.annotate_globals(x='foo')<line_sep>f=new_temp_file(extension='mt')<line_sep>ds.write(f)<line_sep>t=hl.read_table(f+'/cols')<line_sep>self.assertTrue(ds.cols().key_by()._same(t))<block_end>@skip_when_service_backend('Shuffler encoding/decoding is broken.')<def_stmt>test_read_stored_rows self<block_start>ds=self.get_mt()<line_sep>ds=ds.annotate_globals(x='foo')<line_sep>f=new_temp_file(extension='mt')<line_sep>ds.write(f)<line_sep>t=hl.read_table(f+'/rows')<line_sep>self.assertTrue(ds.rows()._same(t))<block_end><def_stmt>test_read_stored_globals self<block_start>ds=self.get_mt()<line_sep>ds=ds.annotate_globals(x=5 baz='foo')<line_sep>f=new_temp_file(extension='mt')<line_sep>ds.write(f)<line_sep>t=hl.read_table(f+'/globals')<line_sep>self.assertTrue(ds.globals_table()._same(t))<block_end>@fails_service_backend()<def_stmt>test_indexed_read self<block_start>mt=hl.utils.range_matrix_table(2000 100 10)<line_sep>f=new_temp_file(extension='mt')<line_sep>mt.write(f)<line_sep>mt2=hl.read_matrix_table(f _intervals=[hl.Interval(start=150 end=250 includes_start=<true> includes_end=<false>) hl.Interval(start=250 end=500 includes_start=<true> includes_end=<false>) ])<line_sep>self.assertEqual(mt2.n_partitions() 2)<line_sep>self.assertTrue(mt.filter_rows((mt.row_idx<ge>150)&(mt.row_idx<l>500))._same(mt2))<line_sep>mt2=hl.read_matrix_table(f _intervals=[hl.Interval(start=150 end=250 includes_start=<true> includes_end=<false>) hl.Interval(start=250 end=500 includes_start=<true> includes_end=<false>) ] _filter_intervals=<true>)<line_sep>self.assertEqual(mt2.n_partitions() 3)<line_sep>self.assertTrue(mt.filter_rows((mt.row_idx<ge>150)&(mt.row_idx<l>500))._same(mt2))<block_end>@fails_service_backend()<def_stmt>test_indexed_read_vcf self<block_start>vcf=self.get_mt(10)<line_sep>f=new_temp_file(extension='mt')<line_sep>vcf.write(f)<line_sep>l1,l2,l3,l4=hl.Locus('20' 10000000) hl.Locus('20' 11000000) hl.Locus('20' 13000000) hl.Locus('20' 14000000)<line_sep>mt=hl.read_matrix_table(f _intervals=[hl.Interval(start=l1 end=l2) hl.Interval(start=l3 end=l4) ])<line_sep>self.assertEqual(mt.n_partitions() 2)<line_sep>p=(vcf.locus<ge>l1)&(vcf.locus<l>l2)<line_sep>q=(vcf.locus<ge>l3)&(vcf.locus<l>l4)<line_sep>self.assertTrue(vcf.filter_rows(p|q)._same(mt))<block_end>@fails_service_backend()<def_stmt>test_codecs_matrix self<block_start><import_from_stmt>hail.utils.java scala_object<line_sep>supported_codecs=scala_object(Env.hail().io 'BufferSpec').specs()<line_sep>ds=self.get_mt()<line_sep>temp=new_temp_file(extension='mt')<for_stmt>codec supported_codecs<block_start>ds.write(temp overwrite=<true> _codec_spec=codec.toString())<line_sep>ds2=hl.read_matrix_table(temp)<line_sep>self.assertTrue(ds._same(ds2))<block_end><block_end>@fails_service_backend()<def_stmt>test_codecs_table self<block_start><import_from_stmt>hail.utils.java scala_object<line_sep>supported_codecs=scala_object(Env.hail().io 'BufferSpec').specs()<line_sep>rt=self.get_mt().rows()<line_sep>temp=new_temp_file(extension='ht')<for_stmt>codec supported_codecs<block_start>rt.write(temp overwrite=<true> _codec_spec=codec.toString())<line_sep>rt2=hl.read_table(temp)<line_sep>self.assertTrue(rt._same(rt2))<block_end><block_end>@fails_service_backend()<def_stmt>test_fix3307_read_mt_wrong self<block_start>mt=hl.import_vcf(resource('sample2.vcf'))<line_sep>mt=hl.split_multi_hts(mt)<line_sep>mt.write('/tmp/foo.mt' overwrite=<true>)<line_sep>mt2=hl.read_matrix_table('/tmp/foo.mt')<line_sep>t=hl.read_table('/tmp/foo.mt/rows')<line_sep>self.assertTrue(mt.rows()._same(t))<line_sep>self.assertTrue(mt2.rows()._same(t))<line_sep>self.assertTrue(mt._same(mt2))<block_end><def_stmt>test_rename self<block_start>dataset=self.get_mt()<line_sep>renamed1=dataset.rename({'locus':'locus2' 'info':'info2' 's':'info'})<line_sep>self.assertEqual(renamed1['locus2']._type dataset['locus']._type)<line_sep>self.assertEqual(renamed1['info2']._type dataset['info']._type)<line_sep>self.assertEqual(renamed1['info']._type dataset['s']._type)<line_sep>self.assertEqual(renamed1['info']._indices renamed1._col_indices)<line_sep>self.assertFalse('locus'<in>renamed1._fields)<line_sep>self.assertFalse('s'<in>renamed1._fields)<with_stmt>self.assertRaises(ValueError)<block_start>dataset.rename({'locus':'info'})<block_end><with_stmt>self.assertRaises(ValueError)<block_start>dataset.rename({'locus':'a' 's':'a'})<block_end><with_stmt>self.assertRaises(LookupError)<block_start>dataset.rename({'foo':'a'})<block_end><block_end><def_stmt>test_range self<block_start>ds=hl.utils.range_matrix_table(100 10)<line_sep>self.assertEqual(ds.count_rows() 100)<line_sep>self.assertEqual(ds.count_cols() 10)<line_sep>et=ds.annotate_entries(entry_idx=10<times>ds.row_idx+ds.col_idx).entries().add_index()<line_sep>self.assertTrue(et.all(et.idx<eq>et.entry_idx))<block_end><def_stmt>test_filter_entries self<block_start>ds=hl.utils.range_matrix_table(100 10)<line_sep>ds=ds.annotate_rows(foo=5)# triggered a RV bug
ds=ds.annotate_cols(bar=5)<line_sep>ds=ds.filter_entries((ds.col_idx<times>ds.row_idx)%4<eq>0)<line_sep>entries=ds.entries()<line_sep>self.assertTrue(entries.all((entries.col_idx<times>entries.row_idx)%4<eq>0))<block_end><def_stmt>test_filter_na self<block_start>mt=hl.utils.range_matrix_table(1 1)<line_sep>self.assertEqual(mt.filter_rows(hl.missing(hl.tbool)).count_rows() 0)<line_sep>self.assertEqual(mt.filter_cols(hl.missing(hl.tbool)).count_cols() 0)<line_sep>self.assertEqual(mt.filter_entries(hl.missing(hl.tbool)).entries().count() 0)<block_end>@fails_service_backend()<def_stmt>test_to_table_on_various_fields self<block_start>mt=hl.utils.range_matrix_table(3 4)<line_sep>globe='the globe!'<line_sep>sample_ids=['Bob' 'Alice' 'David' 'Carol']<line_sep>entries=[1 0 3 2]<line_sep>rows=['1:3:A:G' '1:2:A:G' '1:0:A:G']<line_sep>sorted_rows=sorted(rows)<line_sep>mt=mt.annotate_globals(globe=globe)<line_sep>mt=mt.annotate_cols(s=hl.array(sample_ids)[mt.col_idx]).key_cols_by('s')<line_sep>mt=mt.annotate_entries(e=hl.array(entries)[mt.col_idx])<line_sep>mt=mt.annotate_rows(r=hl.array(rows)[mt.row_idx]).key_rows_by('r')<line_sep>self.assertEqual(mt.globe.collect() [globe])<line_sep>self.assertEqual(mt.s.collect() sample_ids)<line_sep>self.assertEqual((mt.s+'1').collect() [s+'1'<for>s sample_ids])<line_sep>self.assertEqual(('1'+mt.s).collect() ['1'+s<for>s sample_ids])<line_sep>self.assertEqual(mt.s.take(1) [sample_ids[0]])<line_sep>self.assertEqual(mt.e.collect() entries<times>3)<line_sep>self.assertEqual(mt.e.take(1) [entries[0]])<line_sep>self.assertEqual(mt.row_idx.collect() [2 1 0])<line_sep>self.assertEqual(mt.r.collect() sorted_rows)<line_sep>self.assertEqual(mt.r.take(1) [sorted_rows[0]])<line_sep>self.assertEqual(mt.col_key.collect() [hl.Struct(s=s)<for>s sample_ids])<line_sep>self.assertEqual(mt.col.collect() [hl.Struct(s=s col_idx=i)<for>i,s enumerate(sample_ids)])<line_sep>self.assertEqual(mt.row_key.collect() [hl.Struct(r=r)<for>r sorted_rows])<line_sep>self.assertEqual(mt.row.collect() sorted([hl.Struct(r=r row_idx=i)<for>i,r enumerate(rows)] key=<lambda>x:x.r))<line_sep>self.assertEqual(mt.entry.collect() [hl.Struct(e=e)<for>_ sorted_rows<for>e entries])<line_sep>self.assertEqual(mt.cols().s.collect() sorted(sample_ids))<line_sep>self.assertEqual(mt.cols().s.take(1) [sorted(sample_ids)[0]])<line_sep>self.assertEqual(mt.entries().e.collect() sorted(entries)<times>3)<line_sep>self.assertEqual(mt.entries().e.take(1) [sorted(entries)[0]])<line_sep>self.assertEqual(mt.rows().row_idx.collect() [2 1 0])<line_sep>self.assertEqual(mt.rows().r.collect() sorted_rows)<line_sep>self.assertEqual(mt.rows().r.take(1) [sorted_rows[0]])<block_end>@fails_service_backend()<def_stmt>test_order_by self<block_start>ht=hl.utils.range_table(10)<line_sep>self.assertEqual(ht.order_by('idx').idx.collect() list(range(10)))<line_sep>self.assertEqual(ht.order_by(hl.asc('idx')).idx.collect() list(range(10)))<line_sep>self.assertEqual(ht.order_by(hl.desc('idx')).idx.collect() list(range(10))[::-1])<block_end><def_stmt>test_order_by_complex_exprs self<block_start>ht=hl.utils.range_table(10)<assert_stmt>ht.order_by(-ht.idx).idx.collect()<eq>list(range(10))[::-1]<block_end>@fails_service_backend()<def_stmt>test_order_by_intervals self<block_start>intervals={0:hl.Interval(0 3 includes_start=<true> includes_end=<false>) 1:hl.Interval(0 4 includes_start=<true> includes_end=<true>) 2:hl.Interval(1 4 includes_start=<true> includes_end=<false>) 3:hl.Interval(0 4 includes_start=<false> includes_end=<false>) 4:hl.Interval(0 4 includes_start=<true> includes_end=<false>)}<line_sep>ht=hl.utils.range_table(5)<line_sep>ht=ht.annotate_globals(ilist=intervals)<line_sep>ht=ht.annotate(interval=ht['ilist'][ht['idx']])<line_sep>ht=ht.order_by(ht['interval'])<line_sep>ordered=ht['interval'].collect()<line_sep>expected=[intervals[i]<for>i [0 4 1 3 2]]<line_sep>self.assertEqual(ordered expected)<block_end><def_stmt>test_range_matrix_table self<block_start>mt=hl.utils.range_matrix_table(13 7 n_partitions=5)<line_sep>self.assertEqual(mt.globals.dtype hl.tstruct())<line_sep>self.assertEqual(mt.row.dtype hl.tstruct(row_idx=hl.tint32))<line_sep>self.assertEqual(mt.col.dtype hl.tstruct(col_idx=hl.tint32))<line_sep>self.assertEqual(mt.entry.dtype hl.tstruct())<line_sep>self.assertEqual(list(mt.row_key) ['row_idx'])<line_sep>self.assertEqual(list(mt.col_key) ['col_idx'])<line_sep>self.assertEqual([r.row_idx<for>r mt.rows().collect()] list(range(13)))<line_sep>self.assertEqual([r.col_idx<for>r mt.cols().collect()] list(range(7)))<block_end><def_stmt>test_range_matrix_table_0_rows_0_cols self<block_start>mt=hl.utils.range_matrix_table(0 0)<line_sep>self.assertEqual(mt.col_idx.collect() [])<line_sep>self.assertEqual(mt.row_idx.collect() [])<line_sep>mt=mt.annotate_entries(x=mt.row_idx<times>mt.col_idx)<line_sep>self.assertEqual(mt.x.collect() [])<block_end><def_stmt>test_make_table self<block_start>mt=hl.utils.range_matrix_table(3 2)<line_sep>mt=mt.select_entries(x=mt.row_idx<times>mt.col_idx)<line_sep>mt=mt.key_cols_by(col_idx=hl.str(mt.col_idx))<line_sep>t=hl.Table.parallelize([{'row_idx':0 '0.x':0 '1.x':0} {'row_idx':1 '0.x':0 '1.x':1} {'row_idx':2 '0.x':0 '1.x':2}] hl.tstruct(**{'row_idx':hl.tint32 '0.x':hl.tint32 '1.x':hl.tint32}) key='row_idx')<line_sep>self.assertTrue(mt.make_table()._same(t))<block_end><def_stmt>test_make_table_empty_entry_field self<block_start>mt=hl.utils.range_matrix_table(3 2)<line_sep>mt=mt.select_entries(**{'':mt.row_idx<times>mt.col_idx})<line_sep>mt=mt.key_cols_by(col_idx=hl.str(mt.col_idx))<line_sep>t=mt.make_table()<line_sep>self.assertEqual(t.row.dtype hl.tstruct(**{'row_idx':hl.tint32 '0':hl.tint32 '1':hl.tint32}))<block_end><def_stmt>test_make_table_sep self<block_start>mt=hl.utils.range_matrix_table(3 2)<line_sep>mt=mt.select_entries(x=mt.row_idx<times>mt.col_idx)<line_sep>mt=mt.key_cols_by(col_idx=hl.str(mt.col_idx))<line_sep>t=mt.make_table()<assert_stmt>list(t.row)<eq>['row_idx' '0.x' '1.x']<line_sep>t=mt.make_table(separator='__')<assert_stmt>list(t.row)<eq>['row_idx' '0__x' '1__x']<block_end><def_stmt>test_make_table_row_equivalence self<block_start>mt=hl.utils.range_matrix_table(3 3)<line_sep>mt=mt.annotate_rows(r1=hl.rand_norm() r2=hl.rand_norm())<line_sep>mt=mt.annotate_entries(e1=hl.rand_norm() e2=hl.rand_norm())<line_sep>mt=mt.key_cols_by(col_idx=hl.str(mt.col_idx))<assert_stmt>mt.make_table().select(*mt.row_value)._same(mt.rows())<block_end><def_stmt>test_make_table_na_error self<block_start>mt=hl.utils.range_matrix_table(3 3).key_cols_by(s=hl.missing('str'))<line_sep>mt=mt.annotate_entries(e1=1)<with_stmt>pytest.raises(ValueError)<block_start>mt.make_table()<block_end><block_end><def_stmt>test_transmute self<block_start>mt=(hl.utils.range_matrix_table(1 1).annotate_globals(g1=0 g2=0).annotate_cols(c1=0 c2=0).annotate_rows(r1=0 r2=0).annotate_entries(e1=0 e2=0))<line_sep>self.assertEqual(mt.transmute_globals(g3=mt.g2+1).globals.dtype hl.tstruct(g1=hl.tint g3=hl.tint))<line_sep>self.assertEqual(mt.transmute_rows(r3=mt.r2+1).row_value.dtype hl.tstruct(r1=hl.tint r3=hl.tint))<line_sep>self.assertEqual(mt.transmute_cols(c3=mt.c2+1).col_value.dtype hl.tstruct(c1=hl.tint c3=hl.tint))<line_sep>self.assertEqual(mt.transmute_entries(e3=mt.e2+1).entry.dtype hl.tstruct(e1=hl.tint e3=hl.tint))<block_end><def_stmt>test_transmute_agg self<block_start>mt=hl.utils.range_matrix_table(1 1).annotate_entries(x=5)<line_sep>mt=mt.transmute_rows(y=hl.agg.mean(mt.x))<block_end><def_stmt>test_agg_explode self<block_start>t=hl.Table.parallelize([hl.struct(a=[1 2]) hl.struct(a=hl.empty_array(hl.tint32)) hl.struct(a=hl.missing(hl.tarray(hl.tint32))) hl.struct(a=[3]) hl.struct(a=[hl.missing(hl.tint32)])])<line_sep>self.assertCountEqual(t.aggregate(hl.agg.explode(<lambda>elt:hl.agg.collect(elt) t.a)) [1 2 <none> 3])<block_end><def_stmt>test_agg_call_stats self<block_start>t=hl.Table.parallelize([hl.struct(c=hl.call(0 0)) hl.struct(c=hl.call(0 1)) hl.struct(c=hl.call(0 2 phased=<true>)) hl.struct(c=hl.call(1)) hl.struct(c=hl.call(0)) hl.struct(c=hl.call())])<line_sep>actual=t.aggregate(hl.agg.call_stats(t.c ['A' 'T' 'G']))<line_sep>expected=hl.struct(AC=[5 2 1] AF=[5.0/8.0 2.0/8.0 1.0/8.0] AN=8 homozygote_count=[1 0 0])<line_sep>self.assertTrue(hl.Table.parallelize([actual]) hl.Table.parallelize([expected]))<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_hardy_weinberg_test self<block_start>mt=hl.import_vcf(resource('HWE_test.vcf'))<line_sep>mt_two_sided=mt.select_rows(**hl.agg.hardy_weinberg_test(mt.GT one_sided=<false>))<line_sep>rt_two_sided=mt_two_sided.rows()<line_sep>expected_two_sided=hl.Table.parallelize([hl.struct(locus=hl.locus('20' pos) alleles=alleles het_freq_hwe=r p_value=p)<for>(pos alleles r p) [(1 ['A' 'G'] 0.0 0.5) (2 ['A' 'G'] 0.25 0.5) (3 ['T' 'C'] 0.5357142857142857 0.21428571428571427) (4 ['T' 'A'] 0.5714285714285714 0.6571428571428573) (5 ['G' 'A'] 0.3333333333333333 0.5)]] key=['locus' 'alleles'])<line_sep>self.assertTrue(rt_two_sided.filter(rt_two_sided.locus.position<ne>6)._same(expected_two_sided))<line_sep>rt6_two_sided=rt_two_sided.filter(rt_two_sided.locus.position<eq>6).collect()[0]<line_sep>self.assertEqual(rt6_two_sided['p_value'] 0.5)<line_sep>self.assertTrue(math.isnan(rt6_two_sided['het_freq_hwe']))<line_sep>mt_one_sided=mt.select_rows(**hl.agg.hardy_weinberg_test(mt.GT one_sided=<true>))<line_sep>rt_one_sided=mt_one_sided.rows()<line_sep>expected_one_sided=hl.Table.parallelize([hl.struct(locus=hl.locus('20' pos) alleles=alleles het_freq_hwe=r p_value=p)<for>(pos alleles r p) [(1 ['A' 'G'] 0.0 0.5) (2 ['A' 'G'] 0.25 0.5) (3 ['T' 'C'] 0.5357142857142857 0.7857142857142857) (4 ['T' 'A'] 0.5714285714285714 0.5714285714285715) (5 ['G' 'A'] 0.3333333333333333 0.5)]] key=['locus' 'alleles'])<line_sep>self.assertTrue(rt_one_sided.filter(rt_one_sided.locus.position<ne>6)._same(expected_one_sided))<line_sep>rt6_one_sided=rt_one_sided.filter(rt_one_sided.locus.position<eq>6).collect()[0]<line_sep>self.assertEqual(rt6_one_sided['p_value'] 0.5)<line_sep>self.assertTrue(math.isnan(rt6_one_sided['het_freq_hwe']))<block_end><def_stmt>test_hw_func_and_agg_agree self<block_start>mt=hl.import_vcf(resource('sample.vcf'))<line_sep>mt_two_sided=mt.annotate_rows(stats=hl.agg.call_stats(mt.GT mt.alleles) hw=hl.agg.hardy_weinberg_test(mt.GT one_sided=<false>))<line_sep>mt_two_sided=mt_two_sided.annotate_rows(hw2=hl.hardy_weinberg_test(mt_two_sided.stats.homozygote_count[0] mt_two_sided.stats.AC[1]-2<times>mt_two_sided.stats.homozygote_count[1] mt_two_sided.stats.homozygote_count[1] one_sided=<false>))<line_sep>rt_two_sided=mt_two_sided.rows()<line_sep>self.assertTrue(rt_two_sided.all(rt_two_sided.hw<eq>rt_two_sided.hw2))<line_sep>mt_one_sided=mt.annotate_rows(stats=hl.agg.call_stats(mt.GT mt.alleles) hw=hl.agg.hardy_weinberg_test(mt.GT one_sided=<true>))<line_sep>mt_one_sided=mt_one_sided.annotate_rows(hw2=hl.hardy_weinberg_test(mt_one_sided.stats.homozygote_count[0] mt_one_sided.stats.AC[1]-2<times>mt_one_sided.stats.homozygote_count[1] mt_one_sided.stats.homozygote_count[1] one_sided=<true>))<line_sep>rt_one_sided=mt_one_sided.rows()<line_sep>self.assertTrue(rt_one_sided.all(rt_one_sided.hw<eq>rt_one_sided.hw2))<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_write_stage_locally self<block_start>mt=self.get_mt()<line_sep>f=new_temp_file(extension='mt')<line_sep>mt.write(f stage_locally=<true>)<line_sep>mt2=hl.read_matrix_table(f)<line_sep>self.assertTrue(mt._same(mt2))<block_end>@skip_when_service_backend('ShuffleRead non-deterministically causes segfaults')<def_stmt>test_write_checkpoint_file self<block_start>mt=self.get_mt()<line_sep>f=new_temp_file(extension='mt')<line_sep>cp=new_temp_file()<line_sep>mt.write(f _checkpoint_file=cp)<line_sep>mt2=hl.read_matrix_table(f)<line_sep>self.assertTrue(mt._same(mt2))<block_end>@fails_service_backend()<def_stmt>test_write_no_parts self<block_start>mt=hl.utils.range_matrix_table(10 10 2).filter_rows(<false>)<line_sep>path=new_temp_file(extension='mt')<line_sep>path2=new_temp_file(extension='mt')<assert_stmt>mt.checkpoint(path)._same(mt)<line_sep>hl.read_matrix_table(path _drop_rows=<true>).write(path2)<block_end><def_stmt>test_nulls_in_distinct_joins self# MatrixAnnotateRowsTable uses left distinct join
<block_start>mr=hl.utils.range_matrix_table(7 3 4)<line_sep>matrix1=mr.key_rows_by(new_key=hl.if_else((mr.row_idx<eq>3)|(mr.row_idx<eq>5) hl.missing(hl.tint32) mr.row_idx))<line_sep>matrix2=mr.key_rows_by(new_key=hl.if_else((mr.row_idx<eq>4)|(mr.row_idx<eq>6) hl.missing(hl.tint32) mr.row_idx))<line_sep>joined=matrix1.select_rows(idx1=matrix1.row_idx idx2=matrix2.rows()[matrix1.new_key].row_idx)<def_stmt>row new_key idx1 idx2<block_start><return>hl.Struct(new_key=new_key idx1=idx1 idx2=idx2)<block_end>expected=[row(0 0 0) row(1 1 1) row(2 2 2) row(4 4 <none>) row(6 6 <none>) row(<none> 3 <none>) row(<none> 5 <none>)]<line_sep>self.assertEqual(joined.rows().collect() expected)<line_sep># union_cols uses inner distinct join
matrix1=matrix1.annotate_entries(ridx=matrix1.row_idx cidx=matrix1.col_idx)<line_sep>matrix2=matrix2.annotate_entries(ridx=matrix2.row_idx cidx=matrix2.col_idx)<line_sep>matrix2=matrix2.key_cols_by(col_idx=matrix2.col_idx+3)<line_sep>expected=hl.utils.range_matrix_table(3 6 1)<line_sep>expected=expected.key_rows_by(new_key=expected.row_idx)<line_sep>expected=expected.annotate_entries(ridx=expected.row_idx cidx=expected.col_idx%3)<line_sep>self.assertTrue(matrix1.union_cols(matrix2)._same(expected))<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_row_joins_into_table self<block_start>rt=hl.utils.range_matrix_table(9 13 3)<line_sep>mt1=rt.key_rows_by(idx=rt.row_idx)<line_sep>mt1=mt1.select_rows(v=mt1.idx+2)<line_sep>mt2=rt.key_rows_by(idx=rt.row_idx idx2=rt.row_idx+1)<line_sep>mt2=mt2.select_rows(v=mt2.idx+2)<line_sep>t1=hl.utils.range_table(10 3)<line_sep>t2=t1.key_by(t1.idx idx2=t1.idx+1)<line_sep>t1=t1.select(v=t1.idx+2)<line_sep>t2=t2.select(v=t2.idx+2)<line_sep>tinterval1=t1.key_by(k=hl.interval(t1.idx t1.idx <true> <true>))<line_sep>tinterval1=tinterval1.select(v=tinterval1.idx+2)<line_sep>tinterval2=t2.key_by(k=hl.interval(t2.key t2.key <true> <true>))<line_sep>tinterval2=tinterval2.select(v=tinterval2.idx+2)<line_sep>values=[hl.Struct(v=i+2)<for>i range(9)]<line_sep># join on mt row key
self.assertEqual(t1.index(mt1.row_key).collect() values)<line_sep>self.assertEqual(t2.index(mt2.row_key).collect() values)<line_sep>self.assertEqual(t1.index(mt1.idx).collect() values)<line_sep>self.assertEqual(t2.index(mt2.idx mt2.idx2).collect() values)<line_sep>self.assertEqual(t1.index(mt2.idx).collect() values)<with_stmt>self.assertRaises(hl.expr.ExpressionException)<block_start>t2.index(mt2.idx).collect()<block_end><with_stmt>self.assertRaises(hl.expr.ExpressionException)<block_start>t2.index(mt1.row_key).collect()<block_end># join on not mt row key
self.assertEqual(t1.index(mt1.v).collect() [hl.Struct(v=i+2)<for>i range(2 10)]+[<none>])<line_sep>self.assertEqual(t2.index(mt2.idx2 mt2.v).collect() [hl.Struct(v=i+2)<for>i range(1 10)])<with_stmt>self.assertRaises(hl.expr.ExpressionException)<block_start>t2.index(mt2.v).collect()<block_end># join on interval of first field of mt row key
self.assertEqual(tinterval1.index(mt1.idx).collect() values)<line_sep>self.assertEqual(tinterval1.index(mt1.row_key).collect() values)<line_sep>self.assertEqual(tinterval1.index(mt2.idx).collect() values)<with_stmt>self.assertRaises(hl.expr.ExpressionException)<block_start>tinterval1.index(mt2.row_key).collect()<block_end><with_stmt>self.assertRaises(hl.expr.ExpressionException)<block_start>tinterval2.index(mt2.idx).collect()<block_end><with_stmt>self.assertRaises(hl.expr.ExpressionException)<block_start>tinterval2.index(mt2.row_key).collect()<block_end><with_stmt>self.assertRaises(hl.expr.ExpressionException)<block_start>tinterval2.index(mt2.idx mt2.idx2).collect()<block_end><block_end><def_stmt>test_refs_with_process_joins self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=mt.annotate_entries(a_literal=hl.literal(['a']) a_col_join=hl.is_defined(mt.cols()[mt.col_key]) a_row_join=hl.is_defined(mt.rows()[mt.row_key]) an_entry_join=hl.is_defined(mt[mt.row_key mt.col_key]) the_global_failure=hl.if_else(<true> mt.globals hl.missing(mt.globals.dtype)) the_row_failure=hl.if_else(<true> mt.row hl.missing(mt.row.dtype)) the_col_failure=hl.if_else(<true> mt.col hl.missing(mt.col.dtype)) the_entry_failure=hl.if_else(<true> mt.entry hl.missing(mt.entry.dtype)) )<line_sep>mt.count()<block_end><def_stmt>test_aggregate_localize_false self<block_start>dim1,dim2=10 10<line_sep>mt=hl.utils.range_matrix_table(dim1 dim2)<line_sep>mt=mt.annotate_entries(x=mt.aggregate_rows(hl.agg.max(mt.row_idx) _localize=<false>)+mt.aggregate_cols(hl.agg.max(mt.col_idx) _localize=<false>)+mt.aggregate_entries(hl.agg.max(mt.row_idx<times>mt.col_idx) _localize=<false>))<assert_stmt>mt.x.take(1)[0]<eq>(dim1-1)+(dim2-1)+(dim1-1)<times>(dim2-1)<block_end><def_stmt>test_agg_cols_filter self<block_start>t=hl.utils.range_matrix_table(1 10)<line_sep>tests=[(agg.filter(t.col_idx<g>7 agg.collect(t.col_idx+1).append(0)) [9 10 0]) (agg.filter(t.col_idx<g>7 agg.explode(<lambda>elt:agg.collect(elt+1).append(0) [t.col_idx t.col_idx+1])) [9 10 10 11 0]) (agg.filter(t.col_idx<g>7 agg.group_by(t.col_idx%3 hl.array(agg.collect_as_set(t.col_idx+1)).append(0))) {0:[10 0] 2:[9 0]})]<for_stmt>aggregation,expected tests<block_start>self.assertEqual(t.select_rows(result=aggregation).result.collect()[0] expected)<block_end><block_end><def_stmt>test_agg_cols_explode self<block_start>t=hl.utils.range_matrix_table(1 10)<line_sep>tests=[(agg.explode(<lambda>elt:agg.collect(elt+1).append(0) hl.if_else(t.col_idx<g>7 [t.col_idx t.col_idx+1] hl.empty_array(hl.tint32))) [9 10 10 11 0]) (agg.explode(<lambda>elt:agg.explode(<lambda>elt2:agg.collect(elt2+1).append(0) [elt elt+1]) hl.if_else(t.col_idx<g>7 [t.col_idx t.col_idx+1] hl.empty_array(hl.tint32))) [9 10 10 11 10 11 11 12 0]) (agg.explode(<lambda>elt:agg.filter(elt<g>8 agg.collect(elt+1).append(0)) hl.if_else(t.col_idx<g>7 [t.col_idx t.col_idx+1] hl.empty_array(hl.tint32))) [10 10 11 0]) (agg.explode(<lambda>elt:agg.group_by(elt%3 agg.collect(elt+1).append(0)) hl.if_else(t.col_idx<g>7 [t.col_idx t.col_idx+1] hl.empty_array(hl.tint32))) {0:[10 10 0] 1:[11 0] 2:[9 0]})]<for_stmt>aggregation,expected tests<block_start>self.assertEqual(t.select_rows(result=aggregation).result.collect()[0] expected)<block_end><block_end><def_stmt>test_agg_cols_group_by self<block_start>t=hl.utils.range_matrix_table(1 10)<line_sep>tests=[(agg.group_by(t.col_idx%2 hl.array(agg.collect_as_set(t.col_idx+1)).append(0)) {0:[1 3 5 7 9 0] 1:[2 4 6 8 10 0]}) (agg.group_by(t.col_idx%3 agg.filter(t.col_idx<g>7 hl.array(agg.collect_as_set(t.col_idx+1)).append(0))) {0:[10 0] 1:[0] 2:[9 0]}) (agg.group_by(t.col_idx%3 agg.explode(<lambda>elt:agg.collect(elt+1).append(0) hl.if_else(t.col_idx<g>7 [t.col_idx t.col_idx+1] hl.empty_array(hl.tint32)))) {0:[10 11 0] 1:[0] 2:[9 10 0]}) ]<for_stmt>aggregation,expected tests<block_start>self.assertEqual(t.select_rows(result=aggregation).result.collect()[0] expected)<block_end><block_end><def_stmt>localize_entries_with_both_none_is_rows_table self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=mt.select_entries(x=mt.row_idx<times>mt.col_idx)<line_sep>localized=mt.localize_entries(entries_array_field_name=<none> columns_array_field_name=<none>)<line_sep>rows_table=mt.rows()<assert_stmt>rows_table.collect()<eq>localized.collect()<assert_stmt>rows_table.globals_table().collect()<eq>localized.globals_table().collect()<block_end><def_stmt>localize_entries_with_none_cols_adds_no_globals self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=mt.select_entries(x=mt.row_idx<times>mt.col_idx)<line_sep>localized=mt.localize_entries(entries_array_field_name=Env.get_uid() columns_array_field_name=<none>)<assert_stmt>mt.globals_table().collect()<eq>localized.globals_table().collect()<block_end><def_stmt>localize_entries_with_none_entries_changes_no_rows self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=mt.select_entries(x=mt.row_idx<times>mt.col_idx)<line_sep>localized=mt.localize_entries(entries_array_field_name=<none> columns_array_field_name=Env.get_uid())<line_sep>rows_table=mt.rows()<assert_stmt>rows_table.collect()<eq>localized.collect()<block_end><def_stmt>localize_entries_creates_arrays_of_entries_and_array_of_cols self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=mt.select_entries(x=mt.row_idx<times>mt.col_idx)<line_sep>localized=mt.localize_entries(entries_array_field_name='entries' columns_array_field_name='cols')<assert_stmt>[[x<times>y<for>x range(0 10)]<for>y range(0 10)]<eq>localized.entries.collect()<assert_stmt>range(0 10)<eq>localized.cols.collect()<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_multi_write self<block_start>mt=self.get_mt()<line_sep>f=new_temp_file()<line_sep>hl.experimental.write_matrix_tables([mt mt] f)<line_sep>path1=f+'0.mt'<line_sep>path2=f+'1.mt'<line_sep>mt1=hl.read_matrix_table(path1)<line_sep>mt2=hl.read_matrix_table(path2)<line_sep>self.assertTrue(mt._same(mt1))<line_sep>self.assertTrue(mt._same(mt2))<line_sep>self.assertTrue(mt1._same(mt2))<block_end><def_stmt>test_matrix_type_equality self<block_start>mt=hl.utils.range_matrix_table(1 1)<line_sep>mt2=mt.annotate_entries(foo=1)<assert_stmt>mt._type<eq>mt._type<assert_stmt>mt._type<ne>mt2._type<block_end><def_stmt>test_entry_filtering self<block_start>mt=hl.utils.range_matrix_table(10 10)<line_sep>mt=mt.filter_entries((mt.col_idx+mt.row_idx)%2<eq>0)<assert_stmt>mt.aggregate_entries(hl.agg.count())<eq>50<assert_stmt>all(x<eq>5<for>x mt.annotate_cols(x=hl.agg.count()).x.collect())<assert_stmt>all(x<eq>5<for>x mt.annotate_rows(x=hl.agg.count()).x.collect())<line_sep>mt=mt.unfilter_entries()<assert_stmt>mt.aggregate_entries(hl.agg.count())<eq>100<assert_stmt>all(x<eq>10<for>x mt.annotate_cols(x=hl.agg.count()).x.collect())<assert_stmt>all(x<eq>10<for>x mt.annotate_rows(x=hl.agg.count()).x.collect())<block_end><def_stmt>test_entry_filter_stats self<block_start>mt=hl.utils.range_matrix_table(40 20)<line_sep>mt=mt.filter_entries((mt.row_idx%4<eq>0)&(mt.col_idx%4<eq>0) keep=<false>)<line_sep>mt=mt.compute_entry_filter_stats()<line_sep>row_expected=hl.dict({<true>:hl.struct(n_filtered=5 n_remaining=15 fraction_filtered=hl.float32(0.25)) <false>:hl.struct(n_filtered=0 n_remaining=20 fraction_filtered=hl.float32(0.0))})<assert_stmt>mt.aggregate_rows(hl.agg.all(mt.entry_stats_row<eq>row_expected[mt.row_idx%4<eq>0]))<line_sep>col_expected=hl.dict({<true>:hl.struct(n_filtered=10 n_remaining=30 fraction_filtered=hl.float32(0.25)) <false>:hl.struct(n_filtered=0 n_remaining=40 fraction_filtered=hl.float32(0.0))})<assert_stmt>mt.aggregate_cols(hl.agg.all(mt.entry_stats_col<eq>col_expected[mt.col_idx%4<eq>0]))<block_end><def_stmt>test_annotate_col_agg_lowering self<block_start>mt=hl.utils.range_matrix_table(10 10 2)<line_sep>mt=mt.annotate_cols(c1=[mt.col_idx mt.col_idx<times>2])<line_sep>mt=mt.annotate_entries(e1=mt.col_idx+mt.row_idx e2=[mt.col_idx<times>mt.row_idx mt.col_idx<times>mt.row_idx<power>2])<line_sep>common_ref=mt.c1[1]<line_sep>mt=mt.annotate_cols(exploded=hl.agg.explode(<lambda>e:common_ref+hl.agg.sum(e) mt.e2) array=hl.agg.array_agg(<lambda>e:common_ref+hl.agg.sum(e) mt.e2) filt=hl.agg.filter(mt.e1<l>5 hl.agg.sum(mt.e1)+common_ref) grouped=hl.agg.group_by(mt.e1%5 hl.agg.sum(mt.e1)+common_ref))<line_sep>mt.cols()._force_count()<block_end><def_stmt>test_annotate_rows_scan_lowering self<block_start>mt=hl.utils.range_matrix_table(10 10 2)<line_sep>mt=mt.annotate_rows(r1=[mt.row_idx mt.row_idx<times>2])<line_sep>common_ref=mt.r1[1]<line_sep>mt=mt.annotate_rows(exploded=hl.scan.explode(<lambda>e:common_ref+hl.scan.sum(e) mt.r1) array=hl.scan.array_agg(<lambda>e:common_ref+hl.scan.sum(e) mt.r1) filt=hl.scan.filter(mt.row_idx<l>5 hl.scan.sum(mt.row_idx)+common_ref) grouped=hl.scan.group_by(mt.row_idx%5 hl.scan.sum(mt.row_idx)+common_ref) an_agg=hl.agg.sum(mt.row_idx<times>mt.col_idx))<line_sep>mt.cols()._force_count()<block_end><def_stmt>test_show_runs self<block_start>mt=self.get_mt()<line_sep>mt.show()<block_end><def_stmt>test_show_header self<block_start>mt=hl.utils.range_matrix_table(1 1)<line_sep>mt=mt.annotate_entries(x=1)<line_sep>mt=mt.key_cols_by(col_idx=mt.col_idx+10)<line_sep>expected=('+---------+-------+\n'<concat>'| row_idx | 10.x |\n'<concat>'+---------+-------+\n'<concat>'| int32 | int32 |\n'<concat>'+---------+-------+\n'<concat>'| 0 | 1 |\n'<concat>'+---------+-------+\n')<line_sep>actual=mt.show(handler=str)<assert_stmt>actual<eq>expected<block_end><def_stmt>test_partitioned_write self<block_start>mt=hl.utils.range_matrix_table(40 3 5)<def_stmt>test_parts parts expected=mt<block_start>parts=[hl.Interval(start=hl.Struct(row_idx=s) end=hl.Struct(row_idx=e) includes_start=_is includes_end=ie)<for>(s e _is ie) parts]<line_sep>tmp=new_temp_file(extension='mt')<line_sep>mt.write(tmp _partitions=parts)<line_sep>mt2=hl.read_matrix_table(tmp)<line_sep>self.assertEqual(mt2.n_partitions() len(parts))<line_sep>self.assertTrue(mt2._same(expected))<block_end>test_parts([(0 40 <true> <false>)])<line_sep>test_parts([(-34 -31 <true> <true>) (-30 9 <true> <true>) (10 107 <true> <true>) (108 1000 <true> <true>)])<line_sep>test_parts([(0 5 <true> <false>) (35 40 <true> <true>)] mt.filter_rows((mt.row_idx<l>5)|(mt.row_idx<ge>35)))<line_sep>test_parts([(5 35 <true> <false>)] mt.filter_rows((mt.row_idx<ge>5)&(mt.row_idx<l>35)))<block_end>@skip_when_service_backend('Shuffler encoding/decoding is broken.')<def_stmt>test_partitioned_write_coerce self<block_start>mt=hl.import_vcf(resource('sample.vcf'))<line_sep>parts=[hl.Interval(hl.Locus('20' 10277621) hl.Locus('20' 11898992))]<line_sep>tmp=new_temp_file(extension='mt')<line_sep>mt.write(tmp _partitions=parts)<line_sep>mt2=hl.read_matrix_table(tmp)<assert_stmt>mt2.n_partitions()<eq>len(parts)<assert_stmt>hl.filter_intervals(mt parts)._same(mt2)<block_end><def_stmt>test_overwrite self<block_start>mt=hl.utils.range_matrix_table(1 1)<line_sep>f=new_temp_file(extension='mt')<line_sep>mt.write(f)<with_stmt>pytest.raises(hl.utils.FatalError match="file already exists")<block_start>mt.write(f)<block_end>mt.write(f overwrite=<true>)<block_end><def_stmt>test_invalid_metadata self<block_start><with_stmt>pytest.raises(hl.utils.FatalError match='metadata does not contain file version')<block_start>hl.read_matrix_table(resource('0.1-1fd5cc7.vds'))<block_end><block_end><def_stmt>test_legacy_files_with_required_globals self<block_start>hl.read_table(resource('required_globals.ht'))._force_count()<line_sep>hl.read_matrix_table(resource('required_globals.mt'))._force_count_rows()<block_end><def_stmt>test_matrix_native_write_range self<block_start>mt=hl.utils.range_matrix_table(11 3 n_partitions=3)<line_sep>f=new_temp_file()<line_sep>mt.write(f)<assert_stmt>hl.read_matrix_table(f)._same(mt)<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_matrix_multi_write_range self<block_start>mts=[hl.utils.range_matrix_table(11 27 n_partitions=10) hl.utils.range_matrix_table(11 3 n_partitions=10)]<line_sep>f=new_temp_file()<line_sep>hl.experimental.write_matrix_tables(mts f)<assert_stmt>hl.read_matrix_table(f+'0.mt')._same(mts[0])<assert_stmt>hl.read_matrix_table(f+'1.mt')._same(mts[1])<block_end><def_stmt>test_key_cols_by_extract_issue self<block_start>mt=hl.utils.range_matrix_table(1000 100)<line_sep>mt=mt.key_cols_by(col_id=hl.str(mt.col_idx))<line_sep>mt=mt.add_col_index()<line_sep>mt.show()<block_end><def_stmt>test_filtered_entries_group_rows_by self<block_start>mt=hl.utils.range_matrix_table(1 1)<line_sep>mt=mt.filter_entries(<false>)<line_sep>mt=mt.group_rows_by(x=mt.row_idx<floordiv>10).aggregate(c=hl.agg.count())<assert_stmt>mt.entries().collect()<eq>[hl.Struct(x=0 col_idx=0 c=0)]<block_end><def_stmt>test_filtered_entries_group_cols_by self<block_start>mt=hl.utils.range_matrix_table(1 1)<line_sep>mt=mt.filter_entries(<false>)<line_sep>mt=mt.group_cols_by(x=mt.col_idx<floordiv>10).aggregate(c=hl.agg.count())<assert_stmt>mt.entries().collect()<eq>[hl.Struct(row_idx=0 x=0 c=0)]<block_end><def_stmt>test_invalid_field_ref_error self<block_start>mt=hl.balding_nichols_model(2 5 5)<line_sep>mt2=hl.balding_nichols_model(2 5 5)<with_stmt>pytest.raises(hl.expr.ExpressionException match='Found fields from 2 objects:')<block_start>mt.annotate_entries(x=mt.GT.n_alt_alleles()<times>mt2.af)<block_end><block_end><def_stmt>test_invalid_field_ref_annotate self<block_start>mt=hl.balding_nichols_model(2 5 5)<line_sep>mt2=hl.balding_nichols_model(2 5 5)<with_stmt>pytest.raises(hl.expr.ExpressionException match='source mismatch')<block_start>mt.annotate_entries(x=mt2.af)<block_end><block_end><block_end><def_stmt>test_read_write_all_types <block_start>mt=create_all_values_matrix_table()<line_sep>tmp_file=new_temp_file()<line_sep>mt.write(tmp_file)<assert_stmt>hl.read_matrix_table(tmp_file)._same(mt)<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_read_write_balding_nichols_model <block_start>mt=hl.balding_nichols_model(3 10 10)<line_sep>tmp_file=new_temp_file()<line_sep>mt.write(tmp_file)<assert_stmt>hl.read_matrix_table(tmp_file)._same(mt)<block_end>@fails_service_backend()@fails_local_backend()<def_stmt>test_read_partitions <block_start>ht=hl.utils.range_matrix_table(n_rows=100 n_cols=10 n_partitions=3)<line_sep>path=new_temp_file()<line_sep>ht.write(path)<assert_stmt>hl.read_matrix_table(path _n_partitions=10).n_partitions()<eq>10<block_end>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.