content
stringlengths 0
1.55M
|
---|
# Copyright (c) 2021 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
load("//bazel_tools:versions.bzl" "version_to_name")<def_stmt>_build_dar name package_name srcs data_dependencies sdk_version<block_start>daml="@daml-sdk-{sdk_version}//:daml".format(sdk_version=sdk_version )<line_sep>native.genrule(name=name srcs=srcs+data_dependencies outs=["%s.dar"%name] tools=[daml] cmd="""\
set -euo pipefail
TMP_DIR=$$(mktemp -d)
cleanup() {{ rm -rf $$TMP_DIR; }}
trap cleanup EXIT
mkdir -p $$TMP_DIR/src $$TMP_DIR/dep
for src in {srcs}; do
cp -L $$src $$TMP_DIR/src
done
DATA_DEPS=
for dep in {data_dependencies}; do
cp -L $$dep $$TMP_DIR/dep
DATA_DEPS="$$DATA_DEPS\n - dep/$$(basename $$dep)"
done
cat <<EOF >$$TMP_DIR/daml.yaml
sdk-version: {sdk_version}
name: {name}
source: src
version: 0.0.1
dependencies:
- daml-prim
- daml-script
data-dependencies:$$DATA_DEPS
EOF
$(location {daml}) build --project-root=$$TMP_DIR -o $$PWD/$(OUTS)
""".format(daml=daml name=package_name data_dependencies=" ".join(["$(location %s)"%dep<for>dep data_dependencies]) sdk_version=sdk_version srcs=" ".join(["$(locations %s)"%src<for>src srcs]) ) )<block_end><def_stmt>data_dependencies_coins sdk_version<block_start>"""Build the coin1 and coin2 packages with the given SDK version.
"""<line_sep>_build_dar(name="data-dependencies-coin1-{sdk_version}".format(sdk_version=sdk_version ) package_name="data-dependencies-coin1" srcs=["//bazel_tools/data_dependencies:example/CoinV1.daml"] data_dependencies=[] sdk_version=sdk_version )<line_sep>_build_dar(name="data-dependencies-coin2-{sdk_version}".format(sdk_version=sdk_version ) package_name="data-dependencies-coin2" srcs=["//bazel_tools/data_dependencies:example/CoinV2.daml"] data_dependencies=[] sdk_version=sdk_version )<block_end><def_stmt>data_dependencies_upgrade_test old_sdk_version new_sdk_version<block_start>"""Build and validate the coin-upgrade package using the new SDK version.
The package will have data-dependencies on the coin1 and coin2 package
built with the old SDK version.
"""<line_sep>daml_new="@daml-sdk-{sdk_version}//:daml".format(sdk_version=new_sdk_version )<line_sep>dar_name="data-dependencies-upgrade-old-{old_sdk_version}-new-{new_sdk_version}".format(old_sdk_version=old_sdk_version new_sdk_version=new_sdk_version )<line_sep>_build_dar(name=dar_name package_name="data-dependencies-upgrade" srcs=["//bazel_tools/data_dependencies:example/UpgradeFromCoinV1.daml"] data_dependencies=["data-dependencies-coin1-{sdk_version}".format(sdk_version=old_sdk_version ) "data-dependencies-coin2-{sdk_version}".format(sdk_version=old_sdk_version ) ] sdk_version=new_sdk_version )<line_sep>native.sh_test(name="data-dependencies-test-old-{old_sdk_version}-new-{new_sdk_version}".format(old_sdk_version=old_sdk_version new_sdk_version=new_sdk_version ) srcs=["//bazel_tools/data_dependencies:validate_dar.sh"] args=["$(rootpath %s)"%daml_new "$(rootpath %s)"%dar_name ] data=[daml_new dar_name] deps=["@bazel_tools//tools/bash/runfiles"] )<block_end>
|
<import_stmt>asyncio<import_stmt>contextlib<import_stmt>logging<import_stmt>time<import_stmt>unittest<import_from_stmt>decimal Decimal<import_from_stmt>typing List<import_stmt>conf<import_from_stmt>hummingbot.connector.derivative.binance_perpetual.binance_perpetual_derivative BinancePerpetualDerivative<import_from_stmt>hummingbot.core.clock Clock<import_from_stmt>hummingbot.core.clock_mode ClockMode<import_from_stmt>hummingbot.core.data_type.common OrderType<import_from_stmt>hummingbot.core.event.event_logger EventLogger<import_from_stmt>hummingbot.core.event.events BuyOrderCompletedEvent BuyOrderCreatedEvent MarketEvent OrderCancelledEvent SellOrderCompletedEvent SellOrderCreatedEvent <import_from_stmt>hummingbot.core.network_iterator NetworkStatus<import_from_stmt>hummingbot.core.utils.async_utils safe_ensure_future safe_gather<import_from_stmt>hummingbot.logger.struct_logger METRICS_LOG_LEVEL<line_sep>logging.basicConfig(level=METRICS_LOG_LEVEL)<class_stmt>BinancePerpetualMarketUnitTest(unittest.TestCase)<block_start>events:List[MarketEvent]=[MarketEvent.ReceivedAsset MarketEvent.BuyOrderCompleted MarketEvent.SellOrderCompleted MarketEvent.OrderFilled MarketEvent.TransactionFailure MarketEvent.BuyOrderCreated MarketEvent.SellOrderCreated MarketEvent.OrderCancelled MarketEvent.OrderFailure]<line_sep>market:BinancePerpetualDerivative<line_sep>market_logger:EventLogger<line_sep>stack:contextlib.ExitStack<line_sep>@classmethod<def_stmt>setUpClass cls<arrow><none><block_start>cls._ev_loop=asyncio.get_event_loop()<line_sep>cls.clock:Clock=Clock(ClockMode.REALTIME)<line_sep>cls.market:BinancePerpetualDerivative=BinancePerpetualDerivative(api_key=conf.binance_perpetual_api_key api_secret=conf.binance_perpetual_api_secret trading_pairs=["ETH-USDT"])<line_sep>print("Initializing Binance Perpetual market... this will take about a minute.")<line_sep>cls.ev_loop:asyncio.BaseEventLoop=asyncio.get_event_loop()<line_sep>cls.clock.add_iterator(cls.market)<line_sep>cls.stack:contextlib.ExitStack=contextlib.ExitStack()<line_sep>cls._clock=cls.stack.enter_context(cls.clock)<line_sep>cls.ev_loop.run_until_complete(cls.wait_till_ready())<line_sep>print("Market Ready.")<block_end>@classmethod<async_keyword><def_stmt>wait_till_ready cls<block_start><while_stmt><true><block_start>now=time.time()<line_sep>next_iteration=now<floordiv>1.0+1<if_stmt>cls.market.ready<block_start><break><block_end><else_stmt><block_start><await>cls._clock.run_til(next_iteration)<block_end><await>asyncio.sleep(1.0)<block_end><block_end><def_stmt>setUp self<arrow><none><block_start>self.market_logger=EventLogger()<for_stmt>event_tag self.events<block_start>self.market.add_listener(event_tag self.market_logger)<block_end><block_end><def_stmt>tearDown self<block_start><for_stmt>event_tag self.events<block_start>self.market.remove_listener(event_tag self.market_logger)<block_end>self.market_logger=<none><block_end>@classmethod<def_stmt>tearDownClass cls<arrow><none><block_start>cls.stack.close()<block_end><async_keyword><def_stmt>run_parallel_async self *tasks<block_start>future:asyncio.Future=safe_ensure_future(safe_gather(*tasks))<while_stmt><not>future.done()<block_start>now=time.time()<line_sep>next_iteration=now<floordiv>1.0+1<line_sep><await>self._clock.run_til(next_iteration)<line_sep><await>asyncio.sleep(1.0)<block_end><return>future.result()<block_end><def_stmt>run_parallel self *tasks<block_start><return>self.ev_loop.run_until_complete(self.run_parallel_async(*tasks))<block_end>@unittest.skip("Too Simple, Unnecessary")<def_stmt>test_network_status self<block_start>network_status:NetworkStatus=self.ev_loop.run_until_complete(self.market.check_network())<line_sep>self.assertEqual(NetworkStatus.CONNECTED network_status)<block_end>@unittest.skip("")<def_stmt>test_buy_and_sell_order_then_cancel_individually self<block_start>trading_pair="ETH-USDT"<line_sep># Create Buy Order
buy_order_id=self.market.buy(trading_pair=trading_pair amount=Decimal(0.01) order_type=OrderType.LIMIT price=Decimal(300))<line_sep>[order_created_event]=self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent))<line_sep>order_created_event:BuyOrderCreatedEvent=order_created_event<line_sep>self.assertEqual(buy_order_id order_created_event.order_id)<line_sep>self.assertEqual(trading_pair order_created_event.trading_pair)<line_sep>self.assertEqual(1 len(self.market.in_flight_orders))<line_sep>self.assertTrue(buy_order_id<in>self.market.in_flight_orders)<line_sep># Create Sell Order
sell_order_id=self.market.sell(trading_pair=trading_pair amount=Decimal(0.01) order_type=OrderType.LIMIT price=Decimal(500))<line_sep>[order_created_event]=self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent))<line_sep>order_created_event:SellOrderCreatedEvent=order_created_event<line_sep>self.assertEqual(sell_order_id order_created_event.order_id)<line_sep>self.assertEqual(trading_pair order_created_event.trading_pair)<line_sep>self.assertEqual(2 len(self.market.in_flight_orders))<line_sep>self.assertTrue(sell_order_id<in>self.market.in_flight_orders)<line_sep>self.assertTrue(buy_order_id<in>self.market.in_flight_orders)<line_sep># Cancel Buy Order
self.market.cancel(trading_pair buy_order_id)<line_sep>[order_cancelled_event]=self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent))<line_sep>order_cancelled_event:OrderCancelledEvent=order_cancelled_event<line_sep>self.assertEqual(buy_order_id order_cancelled_event.order_id)<line_sep>self.assertEqual(1 len(self.market.in_flight_orders))<line_sep>self.assertTrue(sell_order_id<in>self.market.in_flight_orders)<line_sep>self.assertTrue(buy_order_id<not><in>self.market.in_flight_orders)<line_sep># Cancel Sell Order
self.market.cancel(trading_pair sell_order_id)<line_sep>[order_cancelled_event]=self.run_parallel(self.market_logger.wait_for(OrderCancelledEvent))<line_sep>order_cancelled_event:OrderCancelledEvent=order_cancelled_event<line_sep>self.assertEqual(sell_order_id order_cancelled_event.order_id)<line_sep>self.assertEqual(0 len(self.market.in_flight_orders))<line_sep>self.assertTrue(sell_order_id<not><in>self.market.in_flight_orders)<line_sep>self.assertTrue(buy_order_id<not><in>self.market.in_flight_orders)<block_end>@unittest.skip("")<def_stmt>test_buy_and_sell_order_then_cancel_all self<block_start>trading_pair="ETH-USDT"<line_sep># Create Buy Order
buy_order_id=self.market.buy(trading_pair=trading_pair amount=Decimal(0.01) order_type=OrderType.LIMIT price=Decimal(300))<line_sep>[order_created_event]=self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent))<line_sep>order_created_event:BuyOrderCreatedEvent=order_created_event<line_sep>self.assertEqual(buy_order_id order_created_event.order_id)<line_sep>self.assertEqual(trading_pair order_created_event.trading_pair)<line_sep>self.assertEqual(1 len(self.market.in_flight_orders))<line_sep>self.assertTrue(buy_order_id<in>self.market.in_flight_orders)<line_sep># Create Sell Order
sell_order_id=self.market.sell(trading_pair=trading_pair amount=Decimal(0.01) order_type=OrderType.LIMIT price=Decimal(500))<line_sep>[order_created_event]=self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent))<line_sep>order_created_event:SellOrderCreatedEvent=order_created_event<line_sep>self.assertEqual(sell_order_id order_created_event.order_id)<line_sep>self.assertEqual(trading_pair order_created_event.trading_pair)<line_sep>self.assertEqual(2 len(self.market.in_flight_orders))<line_sep>self.assertTrue(sell_order_id<in>self.market.in_flight_orders)<line_sep>self.assertTrue(buy_order_id<in>self.market.in_flight_orders)<line_sep># Cancel All Orders
[cancellation_results]=self.run_parallel(self.market.cancel_all(5))<for_stmt>cancel_result cancellation_results<block_start>self.assertEqual(cancel_result.success <true>)<block_end>self.assertEqual(0 len(self.market.in_flight_orders))<line_sep>self.assertTrue(sell_order_id<not><in>self.market.in_flight_orders)<line_sep>self.assertTrue(buy_order_id<not><in>self.market.in_flight_orders)<block_end>@unittest.skip("")<def_stmt>test_buy_and_sell_order_then_cancel_account_orders self<block_start>trading_pair="ETH-USDT"<line_sep># Create Buy Order
buy_order_id=self.market.buy(trading_pair=trading_pair amount=Decimal(0.01) order_type=OrderType.LIMIT price=Decimal(300))<line_sep>[order_created_event]=self.run_parallel(self.market_logger.wait_for(BuyOrderCreatedEvent))<line_sep>order_created_event:BuyOrderCreatedEvent=order_created_event<line_sep>self.assertEqual(buy_order_id order_created_event.order_id)<line_sep>self.assertEqual(trading_pair order_created_event.trading_pair)<line_sep>self.assertEqual(1 len(self.market.in_flight_orders))<line_sep>self.assertTrue(buy_order_id<in>self.market.in_flight_orders)<line_sep># Create Sell Order
sell_order_id=self.market.sell(trading_pair=trading_pair amount=Decimal(0.01) order_type=OrderType.LIMIT price=Decimal(500))<line_sep>[order_created_event]=self.run_parallel(self.market_logger.wait_for(SellOrderCreatedEvent))<line_sep>order_created_event:SellOrderCreatedEvent=order_created_event<line_sep>self.assertEqual(sell_order_id order_created_event.order_id)<line_sep>self.assertEqual(trading_pair order_created_event.trading_pair)<line_sep>self.assertEqual(2 len(self.market.in_flight_orders))<line_sep>self.assertTrue(sell_order_id<in>self.market.in_flight_orders)<line_sep>self.assertTrue(buy_order_id<in>self.market.in_flight_orders)<line_sep># Cancel All Open Orders on Account (specified by trading pair)
self.ev_loop.run_until_complete(safe_ensure_future(self.market.cancel_all_account_orders(trading_pair)))<line_sep>self.assertEqual(0 len(self.market.in_flight_orders))<line_sep>self.assertTrue(sell_order_id<not><in>self.market.in_flight_orders)<line_sep>self.assertTrue(buy_order_id<not><in>self.market.in_flight_orders)<block_end>@unittest.skip("")<def_stmt>test_order_fill_event self<block_start>trading_pair="ETH-USDT"<line_sep>amount:Decimal=Decimal(0.01)<line_sep>quantized_amount:Decimal=self.market.quantize_order_amount(trading_pair amount)<line_sep># Initialize Pricing (Buy)
price:Decimal=self.market.get_price(trading_pair <true>)<times>Decimal("1.01")<line_sep>quantized_price:Decimal=self.market.quantize_order_price(trading_pair price)<line_sep># Create Buy Order
buy_order_id=self.market.buy(trading_pair=trading_pair amount=quantized_amount order_type=OrderType.LIMIT price=quantized_price)<line_sep>[order_completed_event]=self.run_parallel(self.market_logger.wait_for(BuyOrderCompletedEvent))<line_sep>self.assertEqual(buy_order_id order_completed_event.order_id)<line_sep>self.assertEqual(quantized_amount order_completed_event.base_asset_amount)<line_sep>self.assertEqual("ETH" order_completed_event.base_asset)<line_sep>self.assertEqual("USDT" order_completed_event.quote_asset)<line_sep>self.assertTrue(any([isinstance(event BuyOrderCreatedEvent)<and>event.order_id<eq>buy_order_id<for>event self.market_logger.event_log]))<line_sep># Initialize Pricing (Sell)
price=self.market.get_price(trading_pair <false>)<times>Decimal("0.99")<line_sep>quantized_price=self.market.quantize_order_price(trading_pair price)<line_sep># Create Sell Order
sell_order_id=self.market.sell(trading_pair=trading_pair amount=quantized_amount order_type=OrderType.LIMIT price=quantized_price)<line_sep>[order_completed_event]=self.run_parallel(self.market_logger.wait_for(SellOrderCompletedEvent))<line_sep>self.assertEqual(sell_order_id order_completed_event.order_id)<line_sep>self.assertEqual(quantized_amount order_completed_event.base_asset_amount)<line_sep>self.assertEqual("ETH" order_completed_event.base_asset)<line_sep>self.assertEqual("USDT" order_completed_event.quote_asset)<line_sep>self.assertTrue(any([isinstance(event SellOrderCreatedEvent)<and>event.order_id<eq>sell_order_id<for>event self.market_logger.event_log]))<block_end><block_end><def_stmt>main <block_start>logging.getLogger("hummingbot.core.event.event_reporter").setLevel(logging.WARNING)<line_sep>unittest.main()<block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
|
<import_stmt>requests<import_stmt>json<import_from_stmt>api_client.url_helpers.apps_url get_apps_search_url<import_from_stmt>config config<import_from_stmt>Logs.log_configuration configure_logger<import_from_stmt>models.api_header_model RequestHeader<line_sep>log=configure_logger('default')<def_stmt>search_application bundle_id<block_start>"""
Search for applications with the given Bundle ID
:param bundle_id: Bundle ID (App Identifier)
:return: True/False indicating Success/Failure and Application_list that matches the given Bundle ID
"""<line_sep>api_url=get_apps_search_url()<line_sep>headers=RequestHeader().header<line_sep>api_params={'type':'App' 'applicationtype':'Internal' 'bundleid':bundle_id 'locationgroupid':config.TENANT_GROUP_ID 'productcomponentappsonly':'False'}<try_stmt><block_start>response=requests.get(api_url headers=headers params=api_params)<if_stmt><not>response.ok<block_start>log.error(f'{response.status_code}, {response.reason}, {response.content}')# HTTP
<return><false> 0<block_end><else_stmt><block_start>response_data=json.loads(response.content)<line_sep>app_list=response_data['Application']<line_sep><return><true> app_list<block_end><block_end><except_stmt>Exception<as>e<block_start>log.error('Application Search failed: {}'.format(str(e)))<line_sep><return><false><block_end><block_end>
|
#
# Base class for convection submodels
#
<import_stmt>pybamm<class_stmt>BaseModel(pybamm.BaseSubModel)<block_start>"""Base class for convection submodels.
Parameters
----------
param : parameter class
The parameters to use for this submodel
options : dict, optional
A dictionary of options to be passed to the model.
**Extends:** :class:`pybamm.BaseSubModel`
"""<def_stmt>__init__ self param options=<none><block_start>super().__init__(param options=options)<block_end><def_stmt>_get_standard_whole_cell_velocity_variables self variables<block_start>"""
A private function to obtain the standard variables which
can be derived from the fluid velocity.
Parameters
----------
variables : dict
The existing variables in the model
Returns
-------
variables : dict
The variables which can be derived from the volume-averaged
velocity.
"""<line_sep>vel_scale=self.param.velocity_scale<if_stmt>self.half_cell<block_start>v_box_n=<none><block_end><else_stmt><block_start>v_box_n=variables["Negative electrode volume-averaged velocity"]<block_end>v_box_s=variables["Separator volume-averaged velocity"]<line_sep>v_box_p=variables["Positive electrode volume-averaged velocity"]<line_sep>v_box=pybamm.concatenation(v_box_n v_box_s v_box_p)<line_sep>variables={"Volume-averaged velocity":v_box "Volume-averaged velocity [m.s-1]":vel_scale<times>v_box }<line_sep><return>variables<block_end><def_stmt>_get_standard_whole_cell_acceleration_variables self variables<block_start>"""
A private function to obtain the standard variables which
can be derived from the fluid velocity.
Parameters
----------
variables : dict
The existing variables in the model
Returns
-------
variables : dict
The variables which can be derived from the volume-averaged
velocity.
"""<line_sep>acc_scale=self.param.velocity_scale/self.param.L_x<if_stmt>self.half_cell<block_start>div_v_box_n=<none><block_end><else_stmt><block_start>div_v_box_n=variables["Negative electrode volume-averaged acceleration"]<block_end>div_v_box_s=variables["Separator volume-averaged acceleration"]<line_sep>div_v_box_p=variables["Positive electrode volume-averaged acceleration"]<line_sep>div_v_box=pybamm.concatenation(div_v_box_n div_v_box_s div_v_box_p)<line_sep>div_v_box_av=pybamm.x_average(div_v_box)<line_sep>variables={"Volume-averaged acceleration":div_v_box "X-averaged volume-averaged acceleration":div_v_box_av "Volume-averaged acceleration [m.s-1]":acc_scale<times>div_v_box "X-averaged volume-averaged acceleration [m.s-1]":acc_scale<times>div_v_box_av }<line_sep><return>variables<block_end><def_stmt>_get_standard_whole_cell_pressure_variables self variables<block_start>"""
A private function to obtain the standard variables which
can be derived from the pressure in the fluid.
Parameters
----------
variables : dict
The existing variables in the model
Returns
-------
variables : dict
The variables which can be derived from the pressure.
"""<if_stmt>self.half_cell<block_start>p_n=<none><block_end><else_stmt><block_start>p_n=variables["Negative electrode pressure"]<block_end>p_s=variables["Separator pressure"]<line_sep>p_p=variables["Positive electrode pressure"]<line_sep>p=pybamm.concatenation(p_n p_s p_p)<line_sep>variables={"Pressure":p}<line_sep><return>variables<block_end><block_end>
|
# -*- encoding: utf-8 -*-
<import_stmt>sys re os argparse heapq<import_from_stmt>datetime datetime<import_from_stmt>collections namedtuple defaultdict<import_stmt>numpy<as>np<import_from_stmt>PIL Image<import_from_stmt>scipy ndimage<line_sep>######################################################################
DIR_RIGHT=0<line_sep>DIR_DOWN=1<line_sep>DIR_LEFT=2<line_sep>DIR_UP=3<line_sep>NEIGHBOR_OFFSET=np.array([[0 1] [1 0] [0 -1] [-1 0]])<line_sep>TURN_RIGHT=np.array([DIR_DOWN DIR_LEFT DIR_UP DIR_RIGHT])<line_sep>TURN_LEFT=np.array([DIR_UP DIR_RIGHT DIR_DOWN DIR_LEFT])<line_sep>VMAP_OFFSET=np.array([[-1 0 0] [0 0 1] [0 0 0] [0 -1 1]])<line_sep>DIAG_OFFSET=NEIGHBOR_OFFSET+NEIGHBOR_OFFSET[TURN_LEFT]<line_sep>OPP_OFFSET=NEIGHBOR_OFFSET[TURN_LEFT]<line_sep>CROSS_ELEMENT=np.array([[0 1 0] [1 1 1] [0 1 0]] dtype=np.bool)<line_sep>BOX_ELEMENT=np.ones((3 3) dtype=np.bool)<line_sep>######################################################################
# Some helper classes
EdgeInfo=namedtuple('EdgeInfo' ['node0' 'node1' 'label0' 'label1'])<line_sep>EdgeRef=namedtuple('EdgeRef' ['edge_index' 'opp_label' 'step'])<line_sep>######################################################################
# Class to store boundary representation for our map
<class_stmt>BoundaryRepresentation(object)<block_start><def_stmt>__init__ self# list of nodes (points) or None for deleted
<block_start>self.node_list=[]<line_sep># list of sets of edge indices
self.node_edges=[]<line_sep># list of point arrays (or empty for deleted edges)
self.edge_list=[]<line_sep># list of EdgeInfo (or None for deleted edges)
self.edge_infolist=[]<line_sep># map from point to node index
self.node_lookup=dict()<line_sep># map from EdgeInfo to edge index
self.edge_lookup=dict()<line_sep># map from label to list of list of EdgeRef
self.label_lookup=defaultdict(list)<block_end><def_stmt>lookup_node self point insert=<false><block_start>key=tuple(map(float point))<if_stmt>insert<and>key<not><in>self.node_lookup<block_start>node_idx=len(self.node_list)<line_sep>self.node_list.append(point.copy())<line_sep>self.node_edges.append(set())<line_sep>self.node_lookup[key]=node_idx<block_end><else_stmt><block_start>node_idx=self.node_lookup[key]<block_end><return>node_idx<block_end><def_stmt>add_edges self cur_label contour_edges<block_start>edge_refs=[]<for_stmt>opp_label,edge contour_edges<block_start><assert_stmt>cur_label<ne>opp_label<assert_stmt>cur_label<ne>0<line_sep>label0=min(cur_label opp_label)<line_sep>label1=max(cur_label opp_label)<if_stmt>label0<eq>cur_label<block_start>step=1<block_end><else_stmt><block_start>step=-1<block_end>edge_to_add=edge[::step]<line_sep>node0=self.lookup_node(edge_to_add[0] insert=<true>)<line_sep>node1=self.lookup_node(edge_to_add[-1] insert=<true>)<line_sep>edge_info=EdgeInfo(node0 node1 label0 label1)<if_stmt>edge_info<in>self.edge_lookup<block_start>edge_idx=self.edge_lookup[edge_info]<line_sep>stored_edge=self.edge_list[edge_idx]<assert_stmt>self.edge_infolist[edge_idx]<eq>edge_info<assert_stmt>np.all(stored_edge<eq>edge_to_add)<assert_stmt>edge_idx<in>self.node_edges[node0]<assert_stmt>edge_idx<in>self.node_edges[node1]<block_end><else_stmt><block_start>edge_idx=len(self.edge_list)<line_sep>self.edge_list.append(edge_to_add)<line_sep>self.edge_infolist.append(edge_info)<line_sep>self.edge_lookup[edge_info]=edge_idx<line_sep>self.node_edges[node0].add(edge_idx)<line_sep>self.node_edges[node1].add(edge_idx)<block_end>edge_refs.append(EdgeRef(edge_idx opp_label step))<block_end>self.label_lookup[cur_label].append(edge_refs)<block_end><def_stmt>replace_endpoints self edge_idx na nb nc<block_start>edge=self.edge_list[edge_idx]<line_sep>edge_info=self.edge_infolist[edge_idx]<assert_stmt>(edge_info.node0<eq>na<or>edge_info.node0<eq>nb<or>edge_info.node1<eq>na<or>edge_info.node1<eq>nb)<line_sep>n0=<none><line_sep>n1=<none><if_stmt>edge_info.node0<eq>na<block_start>n0=na<line_sep>new_n0=nc<block_end><elif_stmt>edge_info.node0<eq>nb<block_start>n0=nb<line_sep>new_n0=nc<block_end><else_stmt><block_start>new_n0=edge_info.node0<block_end><if_stmt>edge_info.node1<eq>na<block_start>n1=na<line_sep>new_n1=nc<block_end><elif_stmt>edge_info.node1<eq>nb<block_start>n1=nb<line_sep>new_n1=nc<block_end><else_stmt><block_start>new_n1=edge_info.node1<block_end><if_stmt>n0<is><not><none><and>n1<is><not><none><block_start>self.edge_list[edge_idx]=edge[:0]<line_sep>self.edge_infolist[edge_idx]=<none><line_sep># NB we will rebuild label_lookup after all merges
<return><block_end>self.node_edges[nc].add(edge_idx)<line_sep>pc=self.node_list[nc]<for_stmt>node_idx,which_end,lo,hi [(n0 0 1 0) (n1 -1 0 1)]<block_start><if_stmt>node_idx<is><none><block_start><continue><block_end>p=self.node_list[node_idx]<line_sep>delta=(pc-p).reshape(1 2)<line_sep>u=np.linspace(lo hi len(edge)).reshape(-1 1)<line_sep>edge=edge+delta<times>u<line_sep>edge[which_end]=pc<block_end>edge_info=EdgeInfo(new_n0 new_n1 edge_info.label0 edge_info.label1)<line_sep>self.edge_list[edge_idx]=edge<line_sep>self.edge_infolist[edge_idx]=edge_info<assert_stmt>np.all(edge[0]<eq>self.node_list[edge_info.node0])<assert_stmt>np.all(edge[-1]<eq>self.node_list[edge_info.node1])<block_end><def_stmt>merge_nodes self tol<block_start>node_points=np.array(self.node_list)<line_sep>rng=range(len(node_points))<line_sep>i,j=np.meshgrid(rng rng)<line_sep>use=i<g>j<line_sep>i=i[use]<line_sep>j=j[use]<line_sep>ni=node_points[i]<line_sep>nj=node_points[j]<line_sep>dists=np.linalg.norm(ni-nj axis=1)<line_sep>heap=list(zip(dists i j))<line_sep>heapq.heapify(heap)<line_sep>retired_nodes=set()<line_sep>active_nodes=set(rng)<while_stmt>len(heap)<block_start>dmin,na,nb=heapq.heappop(heap)<assert_stmt>na<g>nb<if_stmt>dmin<g>tol<block_start><break><block_end><if_stmt>na<in>retired_nodes<or>nb<in>retired_nodes<block_start><continue><block_end>print(' merge nodes {} and {} with distance {}'.format(na nb dmin))<line_sep>pa=self.node_list[na]<line_sep>pb=self.node_list[nb]<line_sep>pc=0.5<times>(pa+pb)<line_sep>nc=len(self.node_list)<line_sep>nkey=tuple(map(float pc))<line_sep>self.node_list.append(pc.copy())<line_sep>self.node_edges.append(set())<line_sep>self.node_lookup[nkey]=nc<assert_stmt>self.lookup_node(pc)<eq>nc<for_stmt>node_idx (na nb)<block_start><for_stmt>edge_idx self.node_edges[node_idx]<block_start><if_stmt>self.edge_infolist[edge_idx]<is><not><none><block_start>self.replace_endpoints(edge_idx na nb nc)<block_end><block_end><block_end><for_stmt>node_idx (na nb)<block_start>p=self.node_list[node_idx]<line_sep>pkey=tuple(map(float p))<del_stmt>self.node_lookup[pkey]<line_sep>self.node_list[node_idx]=<none><line_sep>self.node_edges[node_idx]=set()<line_sep>retired_nodes.add(node_idx)<line_sep>active_nodes.remove(node_idx)<block_end><for_stmt>nj active_nodes<block_start>pj=self.node_list[nj]<line_sep>dcj=np.linalg.norm(pc-pj)<line_sep>hkey=(dcj nc nj)<line_sep>heapq.heappush(heap hkey)<block_end>active_nodes.add(nc)<block_end># rebuild label lookup
new_label_lookup=dict()<for_stmt>label,contours self.label_lookup.items()<block_start>new_contours=[]<for_stmt>contour contours<block_start>new_contour=[]<for_stmt>edge_ref contour<block_start>idx,_,_=edge_ref<if_stmt>self.edge_infolist[idx]<is><not><none><block_start>new_contour.append(edge_ref)<block_end><block_end><if_stmt>len(new_contour)<block_start>new_contours.append(new_contour)<block_end><block_end><if_stmt>len(new_contours)<block_start>new_label_lookup[label]=new_contours<block_end><else_stmt><block_start>print('totally deleted label {}!'.format(label))<block_end><block_end>self.label_lookup=new_label_lookup<block_end><def_stmt>save_debug_image self opts orig_shape colors name<block_start>filename=opts.basename+'_debug_'+name+'.svg'<with_stmt>open(filename 'w')<as>svg<block_start>svg.write('<svg width="{}" height="{}" '<concat>'xmlns="http://www.w3.org/2000/svg">\n'.format(orig_shape[1] orig_shape[0]))<line_sep>svg.write(' <rect width="100%" height="100%" fill="#eee" />\n')<for_stmt>ilabel range(2)<block_start><if_stmt>ilabel<eq>0<block_start>svg.write(' <g stroke-linejoin="miter" stroke-width="4" fill="none">\n')<block_end><else_stmt><block_start>svg.write(' <g stroke-linejoin="miter" stroke-width="4" fill="none" stroke-dasharray="8, 8" >\n')<block_end><for_stmt>edge,einfo zip(self.edge_list self.edge_infolist)<block_start>svg.write(' <path d="')<line_sep>last=np.array([0 0])<for_stmt>i,pt enumerate(edge)<block_start>pt=pt.astype(int)<if_stmt>i<eq>0<block_start>svg.write('M{},{}'.format(pt[0] pt[1]))<block_end><else_stmt><block_start>diff=pt-last<if_stmt>diff[1]<eq>0<block_start>svg.write('h{}'.format(diff[0]))<block_end><elif_stmt>diff[0]<eq>0<block_start>svg.write('v{}'.format(diff[1]))<block_end><else_stmt><block_start>svg.write('l{},{}'.format(*diff))<block_end><block_end>last=pt<block_end>color=colors[einfo.label0<if>ilabel<eq>0<else>einfo.label1]<line_sep>svg.write('" stroke="#{:02x}{:02x}{:02x}" />\n'.format(*color))<block_end>svg.write(' </g>\n')<block_end>svg.write(' <g stroke="none" fill="#000">\n')<for_stmt>pt self.node_list<block_start>svg.write(' <circle cx="{}" cy="{}" r="4" />\n'.format(*pt))<block_end>svg.write(' </g>\n')<line_sep>svg.write('</svg>\n')<block_end>print('wrote' filename)<block_end><block_end>######################################################################
# Input is string, output is pair (string, lambda image -> image)
<def_stmt>filter_type fstr<block_start>m=re.match(r'^\s*([a-z]+)\s*:\s*([a-z]+)\s*,\s*([1-9][0-9]*)\s*$' fstr)<if_stmt>m<is><none><block_start><raise>argparse.ArgumentTypeError('invalid filter string')<block_end>operation=m.group(1)<line_sep>element=m.group(2)<line_sep>iterations=int(m.group(3))<line_sep>fnmap=dict(open=ndimage.binary_opening close=ndimage.binary_closing dilate=ndimage.binary_dilation erode=ndimage.binary_erosion)<if_stmt>operation<not><in>fnmap.keys()<block_start><raise>argparse.ArgumentTypeError('invalid operation '+operation)<block_end><if_stmt>element<eq>'box'<block_start>element=BOX_ELEMENT<block_end><elif_stmt>element<eq>'cross'<block_start>element=CROSS_ELEMENT<block_end><else_stmt><block_start><raise>argparse.ArgumentTypeError('invalid element '+element)<block_end>f=<lambda>img:fnmap[operation](img element iterations=iterations)<line_sep><return>fstr f<block_end>######################################################################
# Confirm with [y/n]
<def_stmt>confirm prompt<block_start><while_stmt><true><block_start>print(prompt+' [y/n]: ' end='')<line_sep>sys.stdout.flush()<line_sep>choice=input().lower()<if_stmt>choice<in>['y' 'yes']<block_start><return><true><block_end><elif_stmt>choice<in>['n' 'no']<block_start><return><false><block_end><else_stmt><block_start>print('invalid choice')<block_end><block_end><block_end>######################################################################
# Parse command-line options, return namespace containing results
<def_stmt>get_options <block_start>parser=argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)<line_sep>parser.add_argument('image' type=argparse.FileType('rb') metavar='IMAGE.png' nargs='?' help='image to approximate')<line_sep>parser.add_argument('-z' '--zoom' type=float metavar='ZOOM' default=1.0 help='amount to resize image on load')<line_sep>parser.add_argument('-t' '--threshold' type=int metavar='T' default=64 help='intensity threshold for outlines')<line_sep>parser.add_argument('-a' '--alpha-threshold' type=int metavar='T' default=127 help='threshold for alpha channel')<line_sep>parser.add_argument('-C' '--connectivity' choices=('4' '8') default='4' help='connectivity of non-outline regions')<line_sep>parser.add_argument('-f' '--filter' type=filter_type default=<none> help='filter for preprocessing outline map '<concat>'after thresholding but before connected '<concat>'component analysis; must be of the format '<concat>'(erode|dilate|open|close):(box|cross),ITERATIONS '<concat>'e.g., erode:cross,1')<line_sep>parser.add_argument('-e' '--edge-tol' type=float metavar='E' default='1.42' help='tolerance in px for simplifying edges')<line_sep>parser.add_argument('-n' '--node-tol' type=float metavar='N' default=0 help='tolerance in px for merging nodes')<line_sep>parser.add_argument('-o' '--output-file' type=str metavar='FILENAME.svg' default=<none> help='output SVG file name')<line_sep>parser.add_argument('-s' '--stroke-width' type=float metavar='S' default=1.0 help='output SVG stroke width')<line_sep>parser.add_argument('-b' '--bg-stroke-width' type=float metavar='S' default=<none> help='output SVG stroke width for largest region')<line_sep>parser.add_argument('-d' '--debug-images' action='store_true' help='generate debug images')<line_sep>parser.add_argument('-D' '--allow-dark-colors' action='store_true' help='flag to prevent applying grayscale threshold '<concat>'to image supplied with -c')<line_sep>parser.add_argument('-m' '--min-area' type=int metavar='A' default=1 help='minimum region area in pixels')<line_sep>parser.add_argument('-c' '--color-image' type=argparse.FileType('rb') default=<none> help='image to supply color for output map')<line_sep>parser.add_argument('-q' '--color-quantize-bits' type=int default=8 help='quantization for finding region '<concat>'colors with -c')<line_sep>parser.add_argument('-r' '--random-colors' action='store_true' help='color regions randomly')<line_sep>parser.add_argument('-R' '--random-seed' type=int help='random seed for colors')<line_sep>parser.add_argument('-y' '--overwrite' action='store_true' help='overwrite output')<line_sep>parser.add_argument('-S' '--solid-colors' action='store_true' help='input image is solid colors with no outlines')<line_sep>opts=parser.parse_args()<if_stmt>opts.image<is><none><block_start><if_stmt>opts.color_image<is><none><block_start>print('error: must provide image filename or set color image with -c')<line_sep>sys.exit(1)<block_end><else_stmt><block_start>opts.image=open(opts.color_image.name 'rb')<block_end><block_end>basename=os.path.basename(opts.image.name)<line_sep>opts.basename,_=os.path.splitext(basename)<if_stmt>opts.bg_stroke_width<is><none><block_start>opts.bg_stroke_width=opts.stroke_width<block_end><if_stmt>opts.output_file<is><none><block_start>opts.output_file=opts.basename+'.svg'<block_end><if_stmt>os.path.exists(opts.output_file)<and><not>opts.overwrite<block_start><if_stmt><not>confirm(opts.output_file+' exists. Overwrite?')<block_start>print('will not overwite output, exiting')<line_sep>sys.exit(1)<block_end><block_end><return>opts<block_end>######################################################################
# Downsample pixel values, rounding to center of bins.
<def_stmt>quantize image bits_per_channel=<none><block_start><if_stmt>bits_per_channel<is><none><block_start>bits_per_channel=8<block_end><assert_stmt>image.dtype<eq>np.uint8<line_sep>shift=8-bits_per_channel<line_sep>halfbin=(1<lshift>shift)<rshift>1<line_sep><return>((image.astype(int)<rshift>shift)<lshift>shift)+halfbin<block_end>######################################################################
# Pack RGB triplets into ints
<def_stmt>pack_rgb rgb<block_start>orig_shape=<none><if_stmt>isinstance(rgb np.ndarray)<block_start><assert_stmt>rgb.shape[-1]<eq>3<line_sep>orig_shape=rgb.shape[:-1]<block_end><else_stmt><block_start><assert_stmt>len(rgb)<eq>3<line_sep>rgb=np.array(rgb)<block_end>rgb=rgb.astype(int).reshape((-1 3))<line_sep>packed=(rgb[: 0]<lshift>16|rgb[: 1]<lshift>8|rgb[: 2])<if_stmt>orig_shape<is><none><block_start><return>packed<block_end><else_stmt><block_start><return>packed.reshape(orig_shape)<block_end><block_end>######################################################################
# Unpack ints to RGB triplets
<def_stmt>unpack_rgb packed<block_start>orig_shape=<none><if_stmt>isinstance(packed np.ndarray)<block_start><assert_stmt>packed.dtype<eq>int<line_sep>orig_shape=packed.shape<line_sep>packed=packed.reshape((-1 1))<block_end>rgb=((packed<rshift>16)&0xff (packed<rshift>8)&0xff (packed)&0xff)<if_stmt>orig_shape<is><none><block_start><return>rgb<block_end><else_stmt><block_start><return>np.hstack(rgb).reshape(orig_shape+(3 ))<block_end><block_end>######################################################################
# Get the dominant color in a list of colors (with optional
# quantization)
<def_stmt>get_dominant_color colors bits_per_channel=<none><block_start><assert_stmt>colors.shape[-1]<eq>3<line_sep>quantized=quantize(colors bits_per_channel).astype(int)<line_sep>packed=pack_rgb(quantized)<line_sep>unique,counts=np.unique(packed return_counts=<true>)<line_sep>packed_mode=unique[counts.argmax()]<line_sep><return>unpack_rgb(packed_mode)<block_end>######################################################################
# Save a debug image if allowed
<def_stmt>save_debug_image opts name image<block_start><if_stmt><not>opts.debug_images<block_start><return><block_end><if_stmt>isinstance(image np.ndarray)<block_start><if_stmt>image.dtype<eq>np.bool<block_start>image=(image.astype(np.uint8)<times>255)<block_end><if_stmt>len(image.shape)<eq>2<block_start>mode='L'<block_end><else_stmt><block_start>mode='RGB'<block_end>image=Image.fromarray(image mode)<block_end>filename=opts.basename+'_debug_'+name+'.png'<line_sep>image.save(filename)<line_sep>print('wrote' filename)<block_end>######################################################################
# Open an input image and get the RGB colors as well as the mask
<def_stmt>get_mask input_image opts<block_start>rgb=input_image<line_sep>alpha=<none><if_stmt>(rgb.mode<eq>'LA'<or>(rgb.mode<eq>'P'<and>'transparency'<in>rgb.info))<block_start>rgb=rgb.convert('RGBA')<block_end><if_stmt>rgb.mode<eq>'RGBA'<block_start>alpha=np.array(rgb.split()[-1])<block_end>rgb=rgb.convert('RGB')<line_sep>rgb=np.array(rgb)<line_sep>gray=rgb.max(axis=2)<line_sep>mask=(gray<g>opts.threshold)<if_stmt>alpha<is><not><none><block_start>mask=mask|(alpha<l>opts.alpha_threshold)<block_end>save_debug_image(opts 'mask' mask)<if_stmt>opts.filter<is><not><none><block_start>print('applying filter:' opts.filter[0])<line_sep>mask=opts.filter[1](mask)<line_sep>save_debug_image(opts 'mask_filtered' mask)<block_end><return>mask<block_end>######################################################################
<def_stmt>printp *args<block_start>print(*args end='')<line_sep>sys.stdout.flush()<block_end>######################################################################
<def_stmt>get_labels_and_colors_outlined mask opts<block_start><if_stmt>opts.connectivity<eq>'8'<block_start>structure=BOX_ELEMENT<block_end><else_stmt><block_start>structure=CROSS_ELEMENT<block_end>labels,num_labels=ndimage.label(mask structure=structure)<line_sep>print('found {} labels'.format(num_labels))<line_sep>unlabeled=~mask<line_sep>printp('computing areas... ')<line_sep>start=datetime.now()<line_sep>areas,bins=np.histogram(labels.flatten() bins=num_labels range=(1 num_labels+1))<line_sep>elapsed=(datetime.now()-start).total_seconds()<line_sep>print('finished computing areas in {} seconds.'.format(elapsed))<line_sep>idx=np.hstack(([0] np.argsort(-areas)+1))<line_sep>replace=np.zeros_like(idx)<line_sep>replace[idx]=range(len(idx))<line_sep>labels=replace[labels]<line_sep>areas=areas[idx[1:]-1]<line_sep>print('min area is {}, max is {}'.format(areas[-1] areas[0]))<if_stmt>opts.min_area<g>areas[-1]<block_start>print('killing all labels with area < {} px'.format(opts.min_area))<line_sep>kill_labels=np.nonzero(areas<l>opts.min_area)[0]<line_sep>num_labels=kill_labels.min()<line_sep>kill_mask=(labels<g>num_labels)<line_sep>save_debug_image(opts 'kill_labels' kill_mask)<line_sep>unlabeled=unlabeled|kill_mask<line_sep>print('killed {} labels, now at {} total'.format(len(kill_labels) num_labels))<block_end>colors=255<times>np.ones((num_labels+1 3) dtype=np.uint8)<if_stmt>opts.color_image<is><not><none><block_start>color_image=Image.open(opts.color_image)<line_sep>labels_size=labels.shape[::-1]<if_stmt>color_image.size<ne>labels_size<block_start>color_image=color_image.resize(labels_size Image.NEAREST)<block_end>color_image=np.array(color_image.convert('RGB'))<line_sep>print('assigning colors from {}...'.format(opts.color_image.name))<line_sep>slices=ndimage.find_objects(labels num_labels)<for_stmt>label,(yslc xslc) zip(range(1 num_labels+1) slices)<block_start>print(' coloring label {}/{}'.format(label num_labels))<line_sep>lmask=(labels[yslc xslc]<eq>label)<line_sep>crect=color_image[yslc xslc]<if_stmt><not>opts.allow_dark_colors<block_start>lmask=lmask&(crect.max(axis=2)<g>opts.threshold)<block_end><if_stmt><not>np.any(lmask)<block_start>print('no colors available for label {}, '<concat>'try running with -D?'.format(label))<block_end><else_stmt><block_start>colors[label]=get_dominant_color(crect[lmask] opts.color_quantize_bits)<block_end><block_end><block_end><elif_stmt>opts.random_colors<block_start><if_stmt>opts.random_seed<is><not><none><block_start>np.random.seed(opts.random_seed)<block_end>colors=np.random.randint(128 size=(num_labels+1 3) dtype=np.uint8)+128<line_sep>colors[0 :]=255<block_end>save_debug_image(opts 'regions' colors[labels])<line_sep>printp('running DT... ')<line_sep>start=datetime.now()<line_sep>result=ndimage.distance_transform_edt(unlabeled return_distances=opts.debug_images return_indices=<true>)<if_stmt>opts.debug_images<block_start>dist,idx=result<line_sep>dist<augdiv>dist.max()<line_sep>dist=(dist<times>255).astype(np.uint8)<line_sep>save_debug_image(opts 'dist' dist)<block_end><else_stmt><block_start>idx=result<block_end>elapsed=(datetime.now()-start).total_seconds()<line_sep>print('ran DT in {} seconds'.format(elapsed))<line_sep>labels=labels[tuple(idx)]<assert_stmt><not>np.any(labels<eq>0)<line_sep>labels_big=np.zeros((labels.shape[0]+2 labels.shape[1]+2) dtype=labels.dtype)<line_sep>labels_big[1:-1 1:-1]=labels<line_sep>start=datetime.now()<line_sep>printp('finding objects... ')<line_sep>slices=ndimage.find_objects(labels num_labels)<line_sep>elapsed=(datetime.now()-start).total_seconds()<line_sep>print('found all objects in {} seconds'.format(elapsed))<line_sep>slices_big=[]<for_stmt>spair slices<block_start>spair_big=[]<for_stmt>s spair<block_start>spair_big.append(slice(s.start s.stop+2))<block_end>slices_big.append(tuple(spair_big))<block_end><assert_stmt>labels_big.min()<eq>0<and>labels_big.max()<eq>num_labels<assert_stmt>len(slices)<eq>num_labels<line_sep>save_debug_image(opts 'regions_expanded' colors[labels_big[1:-1 1:-1]])<line_sep><return>num_labels labels_big slices_big colors<block_end>######################################################################
<def_stmt>get_labels_and_colors_solid input_image opts<block_start>array=np.array(input_image)<line_sep>print(array.shape array.dtype)<if_stmt>len(array.shape)<eq>2<block_start>flattened=array.flatten()<line_sep>axis=<none><block_end><else_stmt><block_start><assert_stmt>len(array.shape)<eq>3<line_sep>flattened=array.reshape(-1 array.shape[2])<line_sep>axis=0<block_end>unique,ulabels=np.unique(flattened axis=axis return_inverse=<true>)<line_sep>ucount=len(unique)<line_sep># go from bright to dark
unique=unique[::-1]<line_sep>ulabels=ucount-ulabels-1<line_sep>ulabels=ulabels.reshape(array.shape[:2])<line_sep>print('unique:' unique)<line_sep>print('ulabels:' ulabels)<line_sep>rgb=np.array(input_image.convert('RGB'))<line_sep>colors=[]<line_sep>labels=np.zeros(array.shape[:2] dtype=int)<line_sep>max_label=0<line_sep>slices=[]<for_stmt>ulabel range(ucount)<block_start>mask=(ulabels<eq>ulabel)<line_sep>yidx,xidx=np.nonzero(mask)<line_sep>color=rgb[yidx[0] xidx[0]]<if_stmt>ulabel<eq>0# background
<block_start>colors.append(color)<block_end><else_stmt><block_start>sublabels,num_features=ndimage.label(mask)<line_sep>print('found {} sublabels for {}'.format(num_features color))<line_sep>subslices=ndimage.find_objects(sublabels num_features)<line_sep>labels[mask]=sublabels[mask]+max_label<line_sep>max_label<augadd>num_features<assert_stmt>labels.max()<eq>max_label<line_sep>slices.extend(subslices)<line_sep>colors.extend([color]<times>num_features)<block_end><block_end>colors=np.array(colors)<line_sep>colors[0 :]=255<line_sep>randocolors=np.random.randint(128 size=(max_label+1 3) dtype=np.uint8)+128<if_stmt>opts.random_colors<block_start>colors=randocolors<block_end>save_debug_image(opts 'labels' randocolors[labels])<line_sep>slices_big=[]<for_stmt>spair slices<block_start>spair_big=[]<for_stmt>s spair<block_start>spair_big.append(slice(s.start s.stop+2))<block_end>slices_big.append(tuple(spair_big))<block_end><return>max_label labels slices_big colors<block_end>######################################################################
<def_stmt>follow_contour l_subrect cur_label startpoints pos<block_start>start=pos<line_sep>cur_dir=DIR_RIGHT<line_sep>contour_info=[]<while_stmt><true><block_start>ooffs=OPP_OFFSET[cur_dir]<line_sep>noffs=NEIGHBOR_OFFSET[cur_dir]<line_sep>doffs=DIAG_OFFSET[cur_dir]<line_sep>neighbor=tuple(pos+noffs)<line_sep>diag=tuple(pos+doffs)<line_sep>opp=tuple(pos+ooffs)<assert_stmt>l_subrect[pos]<eq>cur_label<assert_stmt>l_subrect[opp]<ne>cur_label<line_sep>contour_info.append(pos+(cur_dir l_subrect[opp]))<line_sep>startpoints[pos]=<false><if_stmt>l_subrect[neighbor]<ne>cur_label<block_start>cur_dir=TURN_RIGHT[cur_dir]<block_end><elif_stmt>l_subrect[diag]<eq>cur_label<block_start>pos=diag<line_sep>cur_dir=TURN_LEFT[cur_dir]<block_end><else_stmt><block_start>pos=neighbor<block_end><if_stmt>pos<eq>start<and>cur_dir<eq>DIR_RIGHT<block_start><break><block_end><block_end>n=len(contour_info)<line_sep>contour_info=np.array(contour_info)<line_sep>clabels=contour_info[: 3]<line_sep># set of unique labels for this contour
opp_label_set=set(clabels)<assert_stmt>cur_label<not><in>opp_label_set<line_sep># if multiple labels and one wraps around, correct this
<if_stmt>len(opp_label_set)<g>1<and>clabels[0]<eq>clabels[-1]<block_start>idx=np.nonzero(clabels<ne>clabels[0])[0][0]<line_sep>perm=np.hstack((np.arange(idx n) np.arange(idx)))<line_sep>contour_info=contour_info[perm]<line_sep>clabels=contour_info[: 3]<block_end># make sure no wraparound
<assert_stmt>len(opp_label_set)<eq>1<or>clabels[0]<ne>clabels[-1]<line_sep># apply offset to get contour points
cpoints=contour_info[: :2].astype(np.float32)<line_sep>cdirs=contour_info[: 2]<line_sep>cpoints<augadd>0.5<times>(OPP_OFFSET[cdirs]-NEIGHBOR_OFFSET[cdirs]+1)<line_sep># put points in xy format
cpoints=cpoints[: ::-1]<if_stmt>len(opp_label_set)<eq>1<block_start>idx=np.arange(len(cpoints))<line_sep>xyi=zip(cpoints[: 0] cpoints[: 1] idx)<line_sep>imin=min(xyi)<line_sep>i=imin[-1]<line_sep>cpoints=np.vstack((cpoints[i:] cpoints[:i]))<assert_stmt>np.all(clabels<eq>clabels[0])<block_end><return>cpoints clabels<block_end>######################################################################
<def_stmt>split_contour cpoints clabels<block_start>edges=[]<line_sep>shifted=np.hstack(([-1] clabels[:-1]))<line_sep>istart=np.nonzero(clabels-shifted)[0]<line_sep>iend=np.hstack((istart[1:] len(clabels)))<for_stmt>start,end zip(istart iend)<block_start><assert_stmt>start<eq>0<or>clabels[start]<ne>clabels[start-1]<assert_stmt>clabels[end-1]<eq>clabels[start]<line_sep>opp_label=clabels[start]<if_stmt>end<l>len(cpoints)<block_start>edge=cpoints[start:end+1]<block_end><else_stmt><block_start>edge=np.vstack((cpoints[start:end] cpoints[0]))<block_end>edges.append((opp_label edge))<line_sep>start=end<block_end><return>edges<block_end>######################################################################
<def_stmt>store_contour_edges opts labels edge_lookup edge_list cur_label contour_edges<block_start>edge_refs=[]<for_stmt>opp_label,edge contour_edges<block_start><assert_stmt>cur_label<ne>opp_label<assert_stmt>cur_label<ne>0<line_sep>print(' storing contour edge with cur={}, opp={}'.format(cur_label opp_label))<line_sep>lmin=min(cur_label opp_label)<line_sep>lmax=max(cur_label opp_label)<if_stmt>lmin<eq>cur_label<block_start>step=1<block_end><else_stmt><block_start>step=-1<block_end>edge_to_add=edge[::step]<line_sep>p0=tuple(map(float edge_to_add[0]))<line_sep>p1=tuple(map(float edge_to_add[1]))<line_sep>key=(lmin lmax p0 p1)<if_stmt>key<in>edge_lookup<block_start>idx=edge_lookup[key]<if_stmt><not>np.all(edge_list[idx]<eq>edge_to_add)<block_start>debug=255<times>np.ones(labels.shape+(3 ) dtype=np.uint8)<line_sep>debug[labels<eq>cur_label]=(255 0 0)<line_sep>debug[labels<eq>opp_label]=(0 0 255)<line_sep>save_debug_image(opts 'debug_edge' debug)<line_sep>print('not forward/backward symmetric!')<line_sep>print(type(edge_to_add))<line_sep>print(type(edge_list[idx]))<line_sep>print(edge_list[idx].shape edge_list[idx].dtype)<line_sep>print(edge_to_add.shape edge_to_add.dtype)<line_sep>print(edge_to_add<eq>edge_list[idx])<assert_stmt>np.all(edge_list[idx]<eq>edge_to_add)<block_end><block_end><else_stmt><block_start>idx=len(edge_list)<line_sep>edge_list.append(edge_to_add)<line_sep>edge_lookup[key]=idx<block_end>edge_refs.append((idx opp_label step))<block_end><return>edge_refs<block_end>######################################################################
<def_stmt>_simplify_r opts p0 edge output_list<block_start><assert_stmt>np.all(output_list[-1][-1]<eq>p0)<assert_stmt><not>np.all(edge[0]<eq>p0)<line_sep>p1=edge[-1]<if_stmt>len(edge)<eq>1<block_start>output_list.append(edge)<line_sep><return><block_end>l=np.cross([p0[0] p0[1] 1] [p1[0] p1[1] 1])<line_sep>n=l[:2]<line_sep>w=np.linalg.norm(n)<if_stmt>w<eq>0<block_start>dist=np.linalg.norm(edge-p0 axis=1)<line_sep>idx=dist.argmax()<line_sep>dmax=np.inf<block_end><else_stmt><block_start>l<augdiv>w<line_sep>dist=np.abs(np.dot(edge l[:2])+l[2])<line_sep>idx=dist.argmax()<line_sep>dmax=dist[idx]<block_end><if_stmt>dmax<l>opts.edge_tol<block_start>output_list.append(np.array([p1]))<block_end><elif_stmt>len(edge)<g>3<block_start>_simplify_r(opts p0 edge[:idx+1] output_list)<line_sep>_simplify_r(opts edge[idx] edge[idx+1:] output_list)<block_end><else_stmt><block_start>output_list.append(edge)<block_end><block_end>######################################################################
<def_stmt>simplify opts edge<block_start><if_stmt><not>len(edge)<block_start><return>edge<block_end>p0=edge[0]<line_sep>output_list=[edge[[0]]]<line_sep>_simplify_r(opts p0 edge[1:] output_list)<line_sep><return>np.vstack(tuple(output_list))<block_end>######################################################################
<def_stmt>build_brep opts num_labels labels slices colors<block_start>brep=BoundaryRepresentation()<line_sep>label_range=range(1 num_labels+1)<line_sep>print('building boundary representation...')<line_sep># for each object
<for_stmt>cur_label,(yslc xslc) zip(label_range slices)<block_start>p0=(xslc.start-1 yslc.start-1)<line_sep># extract sub-rectangle for this label
l_subrect=labels[yslc xslc]<line_sep># get binary map of potential start points for contour in
# rightward direction
mask_subrect=(l_subrect<eq>cur_label)<line_sep>mask_shifted_down=np.vstack((np.zeros_like(mask_subrect[0].reshape(1 -1)) mask_subrect[:-1]))<line_sep>startpoints=mask_subrect&~mask_shifted_down<line_sep>print(' processing label {}/{} with area {}'.format(cur_label num_labels (l_subrect<eq>cur_label).sum()))<line_sep># while there are candidate locations to start at
<while_stmt>np.any(startpoints)# get the first one
<block_start>i,j=np.nonzero(startpoints)<line_sep>pos=(i[0] j[0])<line_sep># extract points and adjacent labels along contour,
# this modifies startpoints
cpoints,clabels=follow_contour(l_subrect cur_label startpoints pos)<line_sep>cpoints<augadd>p0<line_sep># split contour into (opp_label, points) pairs
contour_edges=split_contour(cpoints clabels)<line_sep># add them to our boundary representation
brep.add_edges(cur_label contour_edges)<block_end><block_end><if_stmt>opts.debug_images<block_start>orig_shape=(labels.shape[0]-2 labels.shape[1]-2)<line_sep>brep.save_debug_image(opts orig_shape colors 'brep')<block_end>simplified=<false><if_stmt>opts.node_tol<g>0<block_start>print('merging all nodes closer than {} px...'.format(opts.node_tol))<line_sep>brep.merge_nodes(opts.node_tol)<line_sep>simplified=<true><block_end><if_stmt>opts.edge_tol<g>0<block_start>print('simplifying edges...')<line_sep>brep.edge_list=[simplify(opts edge)<for>edge brep.edge_list]<line_sep>simplified=<true><block_end><if_stmt>opts.debug_images<and>simplified<block_start>orig_shape=(labels.shape[0]-2 labels.shape[1]-2)<line_sep>brep.save_debug_image(opts orig_shape colors 'brep_simplified')<block_end><return>brep<block_end>######################################################################
<def_stmt>num_fmt n<block_start>s='{:.2f}'.format(n)<if_stmt>'.'<in>s<block_start>s=re.sub(r'\.?0+$' '' s)<block_end><return>s<block_end><def_stmt>output_svg opts orig_shape brep colors<block_start><with_stmt>open(opts.output_file 'w')<as>svg<block_start>svg.write('<svg width="{}" height="{}" '<concat>'xmlns="http://www.w3.org/2000/svg">\n'.format(orig_shape[1] orig_shape[0]))<line_sep>svg.write(' <g stroke="#000" stroke-linejoin="bevel" '<concat>'stroke-width="{}">\n'.format(opts.stroke_width))<line_sep>cpacked=pack_rgb(colors.astype(int))<line_sep>cset=set(cpacked)<line_sep>lsets=[]<for_stmt>c cset<block_start>idx=np.nonzero(cpacked<eq>c)[0]<if_stmt>1<in>idx<block_start>lsets.insert(0 idx)<block_end><else_stmt><block_start>lsets.append(idx)<block_end><block_end><assert_stmt>1<in>lsets[0]<for_stmt>lset lsets<block_start>svg.write(' <g fill="#{:02x}{:02x}{:02x}">\n'.format(*colors[lset[0]]))<for_stmt>cur_label lset<block_start><if_stmt>cur_label<not><in>brep.label_lookup<block_start><continue><block_end>contours=brep.label_lookup[cur_label]<line_sep>svg.write(' <path d="')<for_stmt>i,contour enumerate(contours)<block_start><for_stmt>j,(edge_idx _ step) enumerate(contour)<block_start>edge=brep.edge_list[edge_idx][::step]<line_sep>iedge=edge.astype(int)<if_stmt>np.all(edge<eq>iedge)<block_start>pprev=iedge[0]<if_stmt>j<eq>0<block_start>svg.write('M{:d},{:d}'.format(*pprev))<block_end><for_stmt>pt iedge[1:]<block_start>svg.write('l{:d},{:d}'.format(*(pt-pprev)))<line_sep>pprev=pt<block_end><block_end><else_stmt><block_start><if_stmt>j<eq>0<block_start>svg.write('M{},{}'.format(*map(num_fmt edge[0])))<block_end><for_stmt>pt edge[1:]<block_start>svg.write('L{},{}'.format(*map(num_fmt pt)))<block_end><block_end><block_end>svg.write('Z')<block_end>svg.write('"')<if_stmt>cur_label<eq>1<and>opts.stroke_width<ne>opts.bg_stroke_width<block_start>svg.write(' stroke-width="{}"'.format(opts.bg_stroke_width))<block_end>svg.write('/>\n')<block_end>svg.write(' </g>\n')<block_end>svg.write(' </g>\n')<line_sep>svg.write('</svg>\n')<block_end>print('wrote' opts.output_file)<block_end>######################################################################
<def_stmt>main <block_start>opts=get_options()<line_sep>input_image=Image.open(opts.image)<if_stmt>opts.zoom<ne>1<block_start>w,h=input_image.size<line_sep>wnew=int(round(w<times>opts.zoom))<line_sep>hnew=int(round(h<times>opts.zoom))<line_sep>resample=Image.LANCZOS<if>opts.zoom<g>1<else>Image.LANCZOS<line_sep>input_image=input_image.resize((wnew hnew) resample)<line_sep>save_debug_image(opts 'resized' input_image)<block_end><if_stmt><not>opts.solid_colors<block_start>mask=get_mask(input_image opts)<line_sep># labels is a 2D array that ranges from 0 (background) to
# num_labels (inclusive), and slices are bounding rectangles for
# each non-zero label.
num_labels,labels,slices,colors=get_labels_and_colors_outlined(mask opts)<block_end><else_stmt><block_start>num_labels,labels,slices,colors=get_labels_and_colors_solid(input_image opts)<block_end><assert_stmt>len(slices)<eq>num_labels<assert_stmt>len(colors)<eq>num_labels+1<line_sep>brep=build_brep(opts num_labels labels slices colors)<line_sep>output_svg(opts labels.shape brep colors)<block_end>######################################################################
<if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
|
<import_stmt>time<import_from_stmt>core.security session_required<import_from_stmt>flask Blueprint Response stream_with_context<line_sep>stream=Blueprint('stream' __name__ template_folder='templates')<line_sep>@stream.route('/log')@session_required<def_stmt>view_stream <block_start><def_stmt>generate <block_start><with_stmt>open('logs/nerve.log')<as>f<block_start><while_stmt><true><block_start><yield>f.read()<line_sep>time.sleep(1)<block_end><block_end><block_end><return>Response(stream_with_context(generate()) mimetype='text/plain')<block_end>
|
"""
提目:输入两棵二叉树A和B,判断B是不是A的子结构。
总结:使用递归,注意判断好结束条件
"""<import_stmt>unittest<import_from_stmt>collections deque<class_stmt>TreeNode(object)<block_start><def_stmt>__init__ self x<block_start>self.val=x<line_sep>self.left=<none><line_sep>self.right=<none><block_end><def_stmt>__repr__ self<block_start><return>f'<{self.val}, {self.left}, {self.right}>'<block_end><block_end># 树的一些基本算法
<class_stmt>BinaryTree(object)<block_start><def_stmt>__init__ self tree=<none><block_start>self.tree=tree<block_end><def_stmt>construct_tree self l:TreeNode d:TreeNode r:TreeNode<block_start><if_stmt><not>self.tree<block_start>self.tree=d<block_end>d.left=l<line_sep>d.right=r<block_end><def_stmt>pre_traversal self<block_start>r=[]<def_stmt>f t<block_start><if_stmt><not>t<block_start><return><block_end>r.append(t.val)<line_sep>f(t.left)<line_sep>f(t.right)<block_end>f(self.tree)<line_sep><return>r<block_end><def_stmt>in_traversal self<block_start>r=[]<def_stmt>f t<block_start><if_stmt><not>t<block_start><return><block_end>f(t.left)<line_sep>r.append(t.val)<line_sep>f(t.right)<block_end>f(self.tree)<line_sep><return>r<block_end><def_stmt>post_traversal self<block_start>r=[]<def_stmt>f t<block_start><if_stmt><not>t<block_start><return><block_end>f(t.left)<line_sep>f(t.right)<line_sep>r.append(t.val)<block_end>f(self.tree)<line_sep><return>r<block_end><def_stmt>bfs self<block_start>r=[]<line_sep>q=deque([self.tree])<while_stmt>q<block_start>n=q.popleft()<if_stmt>n<block_start>r.append(n.val)<line_sep>q.append(n.left)<line_sep>q.append(n.right)<block_end><block_end><return>r<block_end><block_end><def_stmt>is_subtree t1:TreeNode t2:TreeNode<block_start>r=<false><if_stmt>t1<and>t2# 若根节点值相同,则判断此根节点下所有节点值是否相同,并保留结果 r
<block_start><if_stmt>t1.val<eq>t2.val<block_start>r=has_subtree(t1 t2)<block_end># 如果上一个判断不成立,则判断 t1 的子节点
<if_stmt><not>r<block_start>r=is_subtree(t1.left t2)<or>is_subtree(t1.right t2)<block_end><block_end><return>r<block_end><def_stmt>has_subtree t1 t2<block_start><if_stmt><not>t2<block_start><return><true><block_end><if_stmt><not>t1<block_start><return><false><block_end><if_stmt>t1.val<ne>t2.val<block_start><return><false><block_end><return>has_subtree(t1.left t2.left)<and>has_subtree(t1.right t2.right)<block_end><class_stmt>Test(unittest.TestCase)<block_start><def_stmt>test self<block_start>n1=TreeNode(8)<line_sep>n2=TreeNode(8)<line_sep>n3=TreeNode(7)<line_sep>n4=TreeNode(9)<line_sep>n5=TreeNode(2)<line_sep>n6=TreeNode(4)<line_sep>n7=TreeNode(7)<line_sep>m1=TreeNode(8)<line_sep>m2=TreeNode(9)<line_sep>m3=TreeNode(2)<line_sep>t1=BinaryTree()<line_sep>t1.construct_tree(n2 n1 n3)<line_sep>t1.construct_tree(n4 n2 n5)<line_sep>t1.construct_tree(n6 n5 n7)<line_sep>t2=BinaryTree()<line_sep>t2.construct_tree(m2 m1 m3)<line_sep>self.assertEqual(<true> is_subtree(t1.tree t2.tree))<block_end><block_end>
|
<import_stmt>argparse<import_stmt>logging<import_stmt>sys<import_from_stmt>pathlib Path<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<import_stmt>wandb<import_from_stmt>torch optim<import_from_stmt>torch.utils.data DataLoader random_split<import_from_stmt>tqdm tqdm<import_from_stmt>utils.data_loading BasicDataset CarvanaDataset<import_from_stmt>utils.dice_score dice_loss<import_from_stmt>evaluate evaluate<import_from_stmt>unet UNet<line_sep>dir_img=Path('./data/imgs/')<line_sep>dir_mask=Path('./data/masks/')<line_sep>dir_checkpoint=Path('./checkpoints/')<def_stmt>train_net net device epochs:int=5 batch_size:int=1 learning_rate:float=0.001 val_percent:float=0.1 save_checkpoint:bool=<true> img_scale:float=0.5 amp:bool=<false># 1. Create dataset
<block_start><try_stmt><block_start>dataset=CarvanaDataset(dir_img dir_mask img_scale)<block_end><except_stmt>(AssertionError RuntimeError)<block_start>dataset=BasicDataset(dir_img dir_mask img_scale)<block_end># 2. Split into train / validation partitions
n_val=int(len(dataset)<times>val_percent)<line_sep>n_train=len(dataset)-n_val<line_sep>train_set,val_set=random_split(dataset [n_train n_val] generator=torch.Generator().manual_seed(0))<line_sep># 3. Create data loaders
loader_args=dict(batch_size=batch_size num_workers=4 pin_memory=<true>)<line_sep>train_loader=DataLoader(train_set shuffle=<true> **loader_args)<line_sep>val_loader=DataLoader(val_set shuffle=<false> drop_last=<true> **loader_args)<line_sep># (Initialize logging)
experiment=wandb.init(project='U-Net' resume='allow' anonymous='must')<line_sep>experiment.config.update(dict(epochs=epochs batch_size=batch_size learning_rate=learning_rate val_percent=val_percent save_checkpoint=save_checkpoint img_scale=img_scale amp=amp))<line_sep>logging.info(f'''Starting training:
Epochs: {epochs}
Batch size: {batch_size}
Learning rate: {learning_rate}
Training size: {n_train}
Validation size: {n_val}
Checkpoints: {save_checkpoint}
Device: {device.type}
Images scaling: {img_scale}
Mixed Precision: {amp}
''')<line_sep># 4. Set up the optimizer, the loss, the learning rate scheduler and the loss scaling for AMP
optimizer=optim.RMSprop(net.parameters() lr=learning_rate weight_decay=1e-8 momentum=0.9)<line_sep>scheduler=optim.lr_scheduler.ReduceLROnPlateau(optimizer 'max' patience=2)# goal: maximize Dice score
grad_scaler=torch.cuda.amp.GradScaler(enabled=amp)<line_sep>criterion=nn.CrossEntropyLoss()<line_sep>global_step=0<line_sep># 5. Begin training
<for_stmt>epoch range(epochs)<block_start>net.train()<line_sep>epoch_loss=0<with_stmt>tqdm(total=n_train desc=f'Epoch {epoch+1}/{epochs}' unit='img')<as>pbar<block_start><for_stmt>batch train_loader<block_start>images=batch['image']<line_sep>true_masks=batch['mask']<assert_stmt>images.shape[1]<eq>net.n_channels f'Network has been defined with {net.n_channels} input channels, '<concat>f'but loaded images have {images.shape[1]} channels. Please check that '<concat>'the images are loaded correctly.'<line_sep>images=images.to(device=device dtype=torch.float32)<line_sep>true_masks=true_masks.to(device=device dtype=torch.long)<with_stmt>torch.cuda.amp.autocast(enabled=amp)<block_start>masks_pred=net(images)<line_sep>loss=criterion(masks_pred true_masks)+dice_loss(F.softmax(masks_pred dim=1).float() F.one_hot(true_masks net.n_classes).permute(0 3 1 2).float() multiclass=<true>)<block_end>optimizer.zero_grad(set_to_none=<true>)<line_sep>grad_scaler.scale(loss).backward()<line_sep>grad_scaler.step(optimizer)<line_sep>grad_scaler.update()<line_sep>pbar.update(images.shape[0])<line_sep>global_step<augadd>1<line_sep>epoch_loss<augadd>loss.item()<line_sep>experiment.log({'train loss':loss.item() 'step':global_step 'epoch':epoch})<line_sep>pbar.set_postfix(**{'loss (batch)':loss.item()})<line_sep># Evaluation round
<if_stmt>global_step%(n_train<floordiv>(10<times>batch_size))<eq>0<block_start>histograms={}<for_stmt>tag,value net.named_parameters()<block_start>tag=tag.replace('/' '.')<line_sep>histograms['Weights/'+tag]=wandb.Histogram(value.data.cpu())<line_sep>histograms['Gradients/'+tag]=wandb.Histogram(value.grad.data.cpu())<block_end>val_score=evaluate(net val_loader device)<line_sep>scheduler.step(val_score)<line_sep>logging.info('Validation Dice score: {}'.format(val_score))<line_sep>experiment.log({'learning rate':optimizer.param_groups[0]['lr'] 'validation Dice':val_score 'images':wandb.Image(images[0].cpu()) 'masks':{'true':wandb.Image(true_masks[0].float().cpu()) 'pred':wandb.Image(torch.softmax(masks_pred dim=1)[0].float().cpu()) } 'step':global_step 'epoch':epoch **histograms})<block_end><block_end><block_end><if_stmt>save_checkpoint<block_start>Path(dir_checkpoint).mkdir(parents=<true> exist_ok=<true>)<line_sep>torch.save(net.state_dict() str(dir_checkpoint/'checkpoint_epoch{}.pth'.format(epoch+1)))<line_sep>logging.info(f'Checkpoint {epoch+1} saved!')<block_end><block_end><block_end><def_stmt>get_args <block_start>parser=argparse.ArgumentParser(description='Train the UNet on images and target masks')<line_sep>parser.add_argument('--epochs' '-e' metavar='E' type=int default=5 help='Number of epochs')<line_sep>parser.add_argument('--batch-size' '-b' dest='batch_size' metavar='B' type=int default=1 help='Batch size')<line_sep>parser.add_argument('--learning-rate' '-l' metavar='LR' type=float default=0.00001 help='Learning rate' dest='lr')<line_sep>parser.add_argument('--load' '-f' type=str default=<false> help='Load model from a .pth file')<line_sep>parser.add_argument('--scale' '-s' type=float default=0.5 help='Downscaling factor of the images')<line_sep>parser.add_argument('--validation' '-v' dest='val' type=float default=10.0 help='Percent of the data that is used as validation (0-100)')<line_sep>parser.add_argument('--amp' action='store_true' default=<false> help='Use mixed precision')<line_sep><return>parser.parse_args()<block_end><if_stmt>__name__<eq>'__main__'<block_start>args=get_args()<line_sep>logging.basicConfig(level=logging.INFO format='%(levelname)s: %(message)s')<line_sep>device=torch.device('cuda'<if>torch.cuda.is_available()<else>'cpu')<line_sep>logging.info(f'Using device {device}')<line_sep># Change here to adapt to your data
# n_channels=3 for RGB images
# n_classes is the number of probabilities you want to get per pixel
net=UNet(n_channels=3 n_classes=2 bilinear=<true>)<line_sep>logging.info(f'Network:\n'<concat>f'\t{net.n_channels} input channels\n'<concat>f'\t{net.n_classes} output channels (classes)\n'<concat>f'\t{"Bilinear"<if>net.bilinear<else>"Transposed conv"} upscaling')<if_stmt>args.load<block_start>net.load_state_dict(torch.load(args.load map_location=device))<line_sep>logging.info(f'Model loaded from {args.load}')<block_end>net.to(device=device)<try_stmt><block_start>train_net(net=net epochs=args.epochs batch_size=args.batch_size learning_rate=args.lr device=device img_scale=args.scale val_percent=args.val/100 amp=args.amp)<block_end><except_stmt>KeyboardInterrupt<block_start>torch.save(net.state_dict() 'INTERRUPTED.pth')<line_sep>logging.info('Saved interrupt')<line_sep>sys.exit(0)<block_end><block_end>
|
#!flask/bin/python
<import_from_stmt>app app<line_sep>
|
<import_stmt>os<import_from_stmt>clang.cindex Config<if_stmt>'CLANG_LIBRARY_PATH'<in>os.environ<block_start>Config.set_library_path(os.environ['CLANG_LIBRARY_PATH'])<block_end><import_from_stmt>clang.cindex TokenKind<import_stmt>unittest<class_stmt>TestTokenKind(unittest.TestCase)<block_start><def_stmt>test_constructor self<block_start>"""Ensure TokenKind constructor works as expected."""<line_sep>t=TokenKind(5 'foo')<line_sep>self.assertEqual(t.value 5)<line_sep>self.assertEqual(t.name 'foo')<block_end><def_stmt>test_bad_register self<block_start>"""Ensure a duplicate value is rejected for registration."""<with_stmt>self.assertRaises(ValueError)<block_start>TokenKind.register(2 'foo')<block_end><block_end><def_stmt>test_unknown_value self<block_start>"""Ensure trying to fetch an unknown value raises."""<with_stmt>self.assertRaises(ValueError)<block_start>TokenKind.from_value(-1)<block_end><block_end><def_stmt>test_registration self<block_start>"""Ensure that items registered appear as class attributes."""<line_sep>self.assertTrue(hasattr(TokenKind 'LITERAL'))<line_sep>literal=TokenKind.LITERAL<line_sep>self.assertIsInstance(literal TokenKind)<block_end><def_stmt>test_from_value self<block_start>"""Ensure registered values can be obtained from from_value()."""<line_sep>t=TokenKind.from_value(3)<line_sep>self.assertIsInstance(t TokenKind)<line_sep>self.assertEqual(t TokenKind.LITERAL)<block_end><def_stmt>test_repr self<block_start>"""Ensure repr() works."""<line_sep>r=repr(TokenKind.LITERAL)<line_sep>self.assertEqual(r 'TokenKind.LITERAL')<block_end><block_end>
|
"""
This script calculates the minimum spanning tree of a shapefile network
"""<import_stmt>math<import_stmt>os<import_stmt>networkx<as>nx<import_stmt>pandas<as>pd<import_from_stmt>geopandas GeoDataFrame<as>gdf<import_from_stmt>networkx.algorithms.approximation.steinertree steiner_tree<import_from_stmt>shapely.geometry LineString<import_from_stmt>typing List<import_stmt>cea.config<import_stmt>cea.inputlocator<import_from_stmt>cea.constants SHAPEFILE_TOLERANCE<line_sep>__author__="<NAME>"<line_sep>__copyright__="Copyright 2017, Architecture and Building Systems - ETH Zurich"<line_sep>__credits__=["<NAME>"]<line_sep>__license__="MIT"<line_sep>__version__="0.1"<line_sep>__maintainer__="<NAME>"<line_sep>__email__="<EMAIL>"<line_sep>__status__="Production"<def_stmt>calc_steiner_spanning_tree crs_projected temp_path_potential_network_shp output_network_folder temp_path_building_centroids_shp path_output_edges_shp path_output_nodes_shp weight_field type_mat_default pipe_diameter_default type_network total_demand_location allow_looped_networks optimization_flag plant_building_names disconnected_building_names<block_start>"""
Calculate the minimum spanning tree of the network. Note that this function can't be run in parallel in it's
present form.
:param str crs_projected: e.g. "+proj=utm +zone=48N +ellps=WGS84 +datum=WGS84 +units=m +no_defs"
:param str temp_path_potential_network_shp: e.g. "TEMP/potential_network.shp"
:param str output_network_folder: "{general:scenario}/inputs/networks/DC"
:param str temp_path_building_centroids_shp: e.g. "%TEMP%/nodes_buildings.shp"
:param str path_output_edges_shp: "{general:scenario}/inputs/networks/DC/edges.shp"
:param str path_output_nodes_shp: "{general:scenario}/inputs/networks/DC/nodes.shp"
:param str weight_field: e.g. "Shape_Leng"
:param str type_mat_default: e.g. "T1"
:param float pipe_diameter_default: e.g. 150
:param str type_network: "DC" or "DH"
:param str total_demand_location: "{general:scenario}/outputs/data/demand/Total_demand.csv"
:param bool create_plant: e.g. True
:param bool allow_looped_networks:
:param bool optimization_flag:
:param List[str] plant_building_names: e.g. ``['B001']``
:param List[str] disconnected_building_names: e.g. ``['B002', 'B010', 'B004', 'B005', 'B009']``
:return: ``(mst_edges, mst_nodes)``
"""<line_sep># read shapefile into networkx format into a directed potential_network_graph, this is the potential network
potential_network_graph=nx.read_shp(temp_path_potential_network_shp)<line_sep>building_nodes_graph=nx.read_shp(temp_path_building_centroids_shp)<line_sep># transform to an undirected potential_network_graph
iterator_edges=potential_network_graph.edges(data=<true>)<line_sep>G=nx.Graph()<for_stmt>(x y data) iterator_edges<block_start>x=(round(x[0] SHAPEFILE_TOLERANCE) round(x[1] SHAPEFILE_TOLERANCE))<line_sep>y=(round(y[0] SHAPEFILE_TOLERANCE) round(y[1] SHAPEFILE_TOLERANCE))<line_sep>G.add_edge(x y weight=data[weight_field])<block_end># get the building nodes and coordinates
iterator_nodes=building_nodes_graph.nodes(data=<true>)<line_sep>terminal_nodes_coordinates=[]<line_sep>terminal_nodes_names=[]<for_stmt>coordinates,data iterator_nodes._nodes.items()<block_start>building_name=data['Name']<if_stmt>building_name<in>disconnected_building_names<block_start>print("Building {} is considered to be disconnected and it is not included".format(building_name))<block_end><else_stmt><block_start>terminal_nodes_coordinates.append((round(coordinates[0] SHAPEFILE_TOLERANCE) round(coordinates[1] SHAPEFILE_TOLERANCE)))<line_sep>terminal_nodes_names.append(data['Name'])<block_end><block_end># calculate steiner spanning tree of undirected potential_network_graph
<try_stmt><block_start>mst_non_directed=nx.Graph(steiner_tree(G terminal_nodes_coordinates))<line_sep>nx.write_shp(mst_non_directed output_network_folder)# need to write to disk and then import again
mst_nodes=gdf.from_file(path_output_nodes_shp)<line_sep>mst_edges=gdf.from_file(path_output_edges_shp)<block_end><except_stmt><block_start><raise>ValueError('There was an error while creating the Steiner tree. '<concat>'Check the streets.shp for isolated/disconnected streets (lines) and erase them, '<concat>'the Steiner tree does not support disconnected graphs. '<concat>'If no disconnected streets can be found, try increasing the SHAPEFILE_TOLERANCE in cea.constants and run again. '<concat>'Otherwise, try using the Feature to Line tool of ArcMap with a tolerance of around 10m to solve the issue.')<block_end># POPULATE FIELDS IN NODES
pointer_coordinates_building_names=dict(zip(terminal_nodes_coordinates terminal_nodes_names))<def_stmt>populate_fields coordinate<block_start><if_stmt>coordinate<in>terminal_nodes_coordinates<block_start><return>pointer_coordinates_building_names[coordinate]<block_end><else_stmt><block_start><return>"NONE"<block_end><block_end>mst_nodes['coordinates']=mst_nodes['geometry'].apply(<lambda>x:(round(x.coords[0][0] SHAPEFILE_TOLERANCE) round(x.coords[0][1] SHAPEFILE_TOLERANCE)))<line_sep>mst_nodes['Building']=mst_nodes['coordinates'].apply(<lambda>x:populate_fields(x))<line_sep>mst_nodes['Name']=mst_nodes['FID'].apply(<lambda>x:"NODE"+str(x))<line_sep>mst_nodes['Type']=mst_nodes['Building'].apply(<lambda>x:'CONSUMER'<if>x<ne>"NONE"<else>"NONE")<line_sep># do some checks to see that the building names was not compromised
<if_stmt>len(terminal_nodes_names)<ne>(len(mst_nodes['Building'].unique())-1)<block_start><raise>ValueError('There was an error while populating the nodes fields. '<concat>'One or more buildings could not be matched to nodes of the network. '<concat>'Try changing the constant SNAP_TOLERANCE in cea/constants.py to try to fix this')<block_end># POPULATE FIELDS IN EDGES
mst_edges.loc[: 'Type_mat']=type_mat_default<line_sep>mst_edges.loc[: 'Pipe_DN']=pipe_diameter_default<line_sep>mst_edges.loc[: 'Name']=["PIPE"+str(x)<for>x mst_edges.index]<if_stmt>allow_looped_networks# add loops to the network by connecting None nodes that exist in the potential network
<block_start>mst_edges,mst_nodes=add_loops_to_network(G mst_non_directed mst_nodes mst_edges type_mat_default pipe_diameter_default)<line_sep># mst_edges.drop(['weight'], inplace=True, axis=1)
<block_end><if_stmt>optimization_flag<block_start><for_stmt>building plant_building_names<block_start>building_anchor=building_node_from_name(building mst_nodes)<line_sep>mst_nodes,mst_edges=add_plant_close_to_anchor(building_anchor mst_nodes mst_edges type_mat_default pipe_diameter_default)<block_end><block_end><elif_stmt>os.path.exists(total_demand_location)<block_start><if_stmt>len(plant_building_names)<g>0<block_start>building_anchor=mst_nodes[mst_nodes['Building'].isin(plant_building_names)]<block_end><else_stmt><block_start>building_anchor=calc_coord_anchor(total_demand_location mst_nodes type_network)<block_end>mst_nodes,mst_edges=add_plant_close_to_anchor(building_anchor mst_nodes mst_edges type_mat_default pipe_diameter_default)<block_end># GET COORDINATE AND SAVE FINAL VERSION TO DISK
mst_edges.crs=crs_projected<line_sep>mst_nodes.crs=crs_projected<line_sep>mst_edges['length_m']=mst_edges['weight']<line_sep>mst_edges[['geometry' 'length_m' 'Type_mat' 'Name' 'Pipe_DN']].to_file(path_output_edges_shp driver='ESRI Shapefile')<line_sep>mst_nodes[['geometry' 'Building' 'Name' 'Type']].to_file(path_output_nodes_shp driver='ESRI Shapefile')<block_end><def_stmt>add_loops_to_network G mst_non_directed new_mst_nodes mst_edges type_mat pipe_dn<block_start>added_a_loop=<false><line_sep># Identify all NONE type nodes in the steiner tree
<for_stmt>node_number,node_coords zip(new_mst_nodes.index new_mst_nodes['coordinates'])<block_start><if_stmt>new_mst_nodes['Type'][node_number]<eq>'NONE'# find neighbours of nodes in the potential network and steiner network
<block_start>potential_neighbours=G[node_coords]<line_sep>steiner_neighbours=mst_non_directed[node_coords]<line_sep># check if there are differences, if yes, an edge was deleted here
<if_stmt><not>set(potential_neighbours.keys())<eq>set(steiner_neighbours.keys())<block_start>new_neighbour_list=[]<for_stmt>a potential_neighbours.keys()<block_start><if_stmt>a<not><in>steiner_neighbours.keys()<block_start>new_neighbour_list.append(a)<block_end><block_end># check if the node that is additional in the potential network also exists in the steiner network
<for_stmt>new_neighbour new_neighbour_list<block_start><if_stmt>new_neighbour<in>list(new_mst_nodes['coordinates'].values)# check if it is a none type
# write out index of this node
<block_start>node_index=list(new_mst_nodes['coordinates'].values).index(new_neighbour)<if_stmt>new_mst_nodes['Type'][node_index]<eq>'NONE'# create new edge
<block_start>line=LineString((node_coords new_neighbour))<if_stmt><not>line<in>mst_edges['geometry']<block_start>mst_edges=mst_edges.append({"geometry":line "Pipe_DN":pipe_dn "Type_mat":type_mat "Name":"PIPE"+str(mst_edges.Name.count())} ignore_index=<true>)<line_sep>added_a_loop=<true><block_end>mst_edges.reset_index(inplace=<true> drop=<true>)<block_end><block_end><block_end><block_end><block_end><block_end><if_stmt><not>added_a_loop<block_start>print('No first degree loop added. Trying two nodes apart.')<line_sep># Identify all NONE type nodes in the steiner tree
<for_stmt>node_number,node_coords zip(new_mst_nodes.index new_mst_nodes['coordinates'])<block_start><if_stmt>new_mst_nodes['Type'][node_number]<eq>'NONE'# find neighbours of nodes in the potential network and steiner network
<block_start>potential_neighbours=G[node_coords]<line_sep>steiner_neighbours=mst_non_directed[node_coords]<line_sep># check if there are differences, if yes, an edge was deleted here
<if_stmt><not>set(potential_neighbours.keys())<eq>set(steiner_neighbours.keys())<block_start>new_neighbour_list=[]<for_stmt>a potential_neighbours.keys()<block_start><if_stmt>a<not><in>steiner_neighbours.keys()<block_start>new_neighbour_list.append(a)<block_end><block_end># check if the node that is additional in the potential network does not exist in the steiner network
<for_stmt>new_neighbour new_neighbour_list<block_start><if_stmt>new_neighbour<not><in>list(new_mst_nodes['coordinates'].values)# find neighbours of that node
<block_start>second_degree_pot_neigh=list(G[new_neighbour].keys())<for_stmt>potential_second_deg_neighbour second_degree_pot_neigh<block_start><if_stmt>potential_second_deg_neighbour<in>list(new_mst_nodes['coordinates'].values)<and>potential_second_deg_neighbour<ne>node_coords# check if it is a none type
# write out index of this node
<block_start>node_index=list(new_mst_nodes['coordinates'].values).index(potential_second_deg_neighbour)<if_stmt>new_mst_nodes['Type'][node_index]<eq>'NONE'# create new edge
<block_start>line=LineString((node_coords new_neighbour))<if_stmt>line<not><in>mst_edges['geometry']<block_start>mst_edges=mst_edges.append({"geometry":line "Pipe_DN":pipe_dn "Type_mat":type_mat "Name":"PIPE"+str(mst_edges.Name.count())} ignore_index=<true>)<block_end># Add new node from potential network to steiner tree
# create copy of selected node and add to list of all nodes
copy_of_new_mst_nodes=new_mst_nodes.copy()<line_sep>x_distance=new_neighbour[0]-node_coords[0]<line_sep>y_distance=new_neighbour[1]-node_coords[1]<line_sep>copy_of_new_mst_nodes.geometry=copy_of_new_mst_nodes.translate(xoff=x_distance yoff=y_distance)<line_sep>selected_node=copy_of_new_mst_nodes[copy_of_new_mst_nodes["coordinates"]<eq>node_coords]<line_sep>selected_node.loc[: "Name"]="NODE"+str(new_mst_nodes.Name.count())<line_sep>selected_node.loc[: "Type"]="NONE"<line_sep>selected_node["coordinates"]=selected_node.geometry.values[0].coords<if_stmt>selected_node["coordinates"].values<not><in>new_mst_nodes["coordinates"].values<block_start>new_mst_nodes=new_mst_nodes.append(selected_node)<block_end>new_mst_nodes.reset_index(inplace=<true> drop=<true>)<line_sep>line2=LineString((new_neighbour potential_second_deg_neighbour))<if_stmt>line2<not><in>mst_edges['geometry']<block_start>mst_edges=mst_edges.append({"geometry":line2 "Pipe_DN":pipe_dn "Type_mat":type_mat "Name":"PIPE"+str(mst_edges.Name.count())} ignore_index=<true>)<line_sep>added_a_loop=<true><block_end>mst_edges.reset_index(inplace=<true> drop=<true>)<block_end><block_end><block_end><block_end><block_end><block_end><block_end><block_end><block_end><if_stmt><not>added_a_loop<block_start>print('No loops added.')<block_end><return>mst_edges new_mst_nodes<block_end><def_stmt>calc_coord_anchor total_demand_location nodes_df type_network<block_start>total_demand=pd.read_csv(total_demand_location)<line_sep>nodes_names_demand=nodes_df.merge(total_demand left_on="Building" right_on="Name" how="inner")<if_stmt>type_network<eq>"DH"<block_start>field="QH_sys_MWhyr"<block_end><elif_stmt>type_network<eq>"DC"<block_start>field="QC_sys_MWhyr"<block_end><else_stmt><block_start><raise>ValueError("Invalid value for variable 'type_network': {type_network}".format(type_network=type_network))<block_end>max_value=nodes_names_demand[field].max()<line_sep>building_series=nodes_names_demand[nodes_names_demand[field]<eq>max_value]<line_sep><return>building_series<block_end><def_stmt>building_node_from_name building_name nodes_df<block_start>building_series=nodes_df[nodes_df['Building']<eq>building_name]<line_sep><return>building_series<block_end><def_stmt>add_plant_close_to_anchor building_anchor new_mst_nodes mst_edges type_mat pipe_dn# find closest node
<block_start>copy_of_new_mst_nodes=new_mst_nodes.copy()<line_sep>building_coordinates=building_anchor.geometry.values[0].coords<line_sep>x1=building_coordinates[0][0]<line_sep>y1=building_coordinates[0][1]<line_sep>delta=10E24# big number
<for_stmt>node copy_of_new_mst_nodes.iterrows()<block_start><if_stmt>node[1]['Type']<eq>'NONE'<block_start>x2=node[1].geometry.coords[0][0]<line_sep>y2=node[1].geometry.coords[0][1]<line_sep>distance=math.sqrt((x2-x1)<power>2+(y2-y1)<power>2)<if_stmt>0<l>distance<l>delta<block_start>delta=distance<line_sep>node_id=node[1].Name<block_end><block_end><block_end>pd.options.mode.chained_assignment=<none># avoid warning
# create copy of selected node and add to list of all nodes
copy_of_new_mst_nodes.geometry=copy_of_new_mst_nodes.translate(xoff=1 yoff=1)<line_sep>selected_node=copy_of_new_mst_nodes[copy_of_new_mst_nodes["Name"]<eq>node_id]<line_sep>selected_node.loc[: "Name"]="NODE"+str(new_mst_nodes.Name.count())<line_sep>selected_node.loc[: "Type"]="PLANT"<line_sep>new_mst_nodes=new_mst_nodes.append(selected_node)<line_sep>new_mst_nodes.reset_index(inplace=<true> drop=<true>)<line_sep># create new edge
point1=(selected_node.geometry.x selected_node.geometry.y)<line_sep>point2=(new_mst_nodes[new_mst_nodes["Name"]<eq>node_id].geometry.x new_mst_nodes[new_mst_nodes["Name"]<eq>node_id].geometry.y)<line_sep>line=LineString((point1 point2))<line_sep>mst_edges=mst_edges.append({"geometry":line "Pipe_DN":pipe_dn "Type_mat":type_mat "Name":"PIPE"+str(mst_edges.Name.count())} ignore_index=<true>)<line_sep>mst_edges.reset_index(inplace=<true> drop=<true>)<line_sep><return>new_mst_nodes mst_edges<block_end><def_stmt>main config<block_start><assert_stmt>os.path.exists(config.scenario) 'Scenario not found: %s'%config.scenario<line_sep>locator=cea.inputlocator.InputLocator(scenario=config.scenario)<line_sep>weight_field='Shape_Leng'<line_sep>type_mat_default=config.network_layout.type_mat<line_sep>pipe_diameter_default=config.network_layout.pipe_diameter<line_sep>type_network=config.network_layout.network_type<line_sep>create_plant=config.network_layout.create_plant<line_sep>output_substations_shp=locator.get_temporary_file("nodes_buildings.shp")<line_sep>path_potential_network=locator.get_temporary_file("potential_network.shp")# shapefile, location of output.
output_edges=locator.get_network_layout_edges_shapefile(type_network '')<line_sep>output_nodes=locator.get_network_layout_nodes_shapefile(type_network '')<line_sep>output_network_folder=locator.get_input_network_folder(type_network '')<line_sep>total_demand_location=locator.get_total_demand()<line_sep>calc_steiner_spanning_tree(path_potential_network output_network_folder output_substations_shp output_edges output_nodes weight_field type_mat_default pipe_diameter_default type_network total_demand_location create_plant)<block_end><if_stmt>__name__<eq>'__main__'<block_start>main(cea.config.Configuration())<block_end>
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Trains and Evaluates the MNIST network using a feed dictionary."""<line_sep># pylint: disable=missing-docstring
<import_stmt>os<import_stmt>time<import_stmt>numpy<import_from_stmt>six.moves xrange# pylint: disable=redefined-builtin
<import_stmt>tensorflow<as>tf<import_stmt>math<import_stmt>input_data<import_stmt>numpy<as>np<import_from_stmt>multiprocessing Pool<import_stmt>threading<import_from_stmt>tqdm tqdm trange<def_stmt>placeholder_inputs batch_size=16 num_frame_per_clib=16 crop_size=224 rgb_channels=3 flow_channels=2<block_start>"""Generate placeholder variables to represent the input tensors.
These placeholders are used as inputs by the rest of the model building
code and will be fed from the downloaded data in the .run() loop, below.
Args:
batch_size: The batch size will be baked into both placeholders.
num_frame_per_clib: The num of frame per clib.
crop_size: The crop size of per clib.
channels: The input channel of per clib.
Returns:
images_placeholder: Images placeholder.
labels_placeholder: Labels placeholder.
"""<line_sep># Note that the shapes of the placeholders match the shapes of the full
# image and label tensors, except the first dimension is now batch_size
# rather than the full size of the train or test data sets.
rgb_images_placeholder=tf.placeholder(tf.float32 shape=(batch_size num_frame_per_clib crop_size crop_size rgb_channels))<line_sep>flow_images_placeholder=tf.placeholder(tf.float32 shape=(batch_size num_frame_per_clib crop_size crop_size flow_channels))<line_sep>labels_placeholder=tf.placeholder(tf.int64 shape=(batch_size))<line_sep>is_training=tf.placeholder(tf.bool)<line_sep><return>rgb_images_placeholder flow_images_placeholder labels_placeholder is_training<block_end><def_stmt>rgb_placeholder_inputs batch_size=16 num_frame_per_clib=16 crop_size=224 rgb_channels=3 flow_channels=2<block_start>"""Generate placeholder variables to represent the input tensors.
These placeholders are used as inputs by the rest of the model building
code and will be fed from the downloaded data in the .run() loop, below.
Args:
batch_size: The batch size will be baked into both placeholders.
num_frame_per_clib: The num of frame per clib.
crop_size: The crop size of per clib.
channels: The input channel of per clib.
Returns:
images_placeholder: Images placeholder.
labels_placeholder: Labels placeholder.
"""<line_sep># Note that the shapes of the placeholders match the shapes of the full
# image and label tensors, except the first dimension is now batch_size
# rather than the full size of the train or test data sets.
rgb_images_placeholder=tf.placeholder(tf.float32 shape=(batch_size num_frame_per_clib crop_size crop_size rgb_channels))<line_sep>labels_placeholder=tf.placeholder(tf.int64 shape=(batch_size))<line_sep>is_training=tf.placeholder(tf.bool)<line_sep><return>rgb_images_placeholder labels_placeholder is_training<block_end><def_stmt>Normalization clips frames_num<block_start>new_clips=[]<for_stmt>index range(frames_num)<block_start>clip=tf.image.per_image_standardization(clips[index])<line_sep>new_clips.append(clip)<block_end><return>new_clips<block_end><def_stmt>average_gradients tower_grads<block_start>average_grads=[]<for_stmt>grad_and_vars zip(*tower_grads)<block_start>grads=[]<for_stmt>g,_ grad_and_vars<block_start>expanded_g=tf.expand_dims(g 0)<line_sep>grads.append(expanded_g)<block_end>grad=tf.concat(grads 0)<line_sep>grad=tf.reduce_mean(grad 0)<line_sep>v=grad_and_vars[0][1]<line_sep>grad_and_var=(grad v)<line_sep>average_grads.append(grad_and_var)<block_end><return>average_grads<block_end><def_stmt>l2_loss weight_decay weighyt_list<block_start>l2_reg=tf.contrib.layers.l2_regularizer(weight_decay)<line_sep><return>tf.contrib.layers.apply_regularization(regularizer=l2_reg weights_list=weighyt_list)<block_end><def_stmt>tower_loss logit labels wd<block_start>print(logit.shape)<line_sep>print(labels.shape)<line_sep>weight_map=[]<for_stmt>variable tf.global_variables()<block_start><if_stmt>'conv_3d/w'<in>variable.name<or>'kernel'<in>variable.name<block_start>weight_map.append(variable)<block_end><block_end>cross_entropy_mean=tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels logits=logit))<line_sep>weight_decay=l2_loss(wd weight_map)<line_sep>#tf.summary.scalar('sgd_weight_decay_loss', weight_decay)
# Calculate the total loss for the current tower.
total_loss=cross_entropy_mean+weight_decay<line_sep><return>total_loss<block_end><def_stmt>tower_acc logit labels<block_start>correct_pred=tf.equal(tf.argmax(logit 1) labels)<line_sep>accuracy=tf.reduce_mean(tf.cast(correct_pred tf.float32))<line_sep><return>accuracy<block_end><def_stmt>_variable_on_cpu name shape initializer<block_start><with_stmt>tf.device('/cpu:0')<block_start>var=tf.get_variable(name shape initializer=initializer)<block_end><return>var<block_end><def_stmt>_variable_with_weight_decay name shape wd<block_start>var=_variable_on_cpu(name shape tf.contrib.layers.xavier_initializer())<if_stmt>wd<is><not><none><block_start>weight_decay=tf.nn.l2_loss(var)<times>wd<line_sep>tf.add_to_collection('weightdecay_losses' weight_decay)<block_end><return>var<block_end><def_stmt>data_to_feed_dict data<block_start>rgb_train_images=[]<line_sep>train_labels=[]<for_stmt>i data<block_start>tmp_train_images=i.get_result()[0]<line_sep>tmp_labels=i.get_result()[1]<line_sep>rgb_train_images.extend(tmp_train_images)<line_sep>train_labels.extend(tmp_labels)<block_end><return>np.array(rgb_train_images) np.array(train_labels)<block_end><def_stmt>get_data filename batch_size num_frames_per_clip=64 sample_rate=4 crop_size=224 shuffle=<false> add_flow=<false><block_start>rgb_train_images,flow_train_images,train_labels,_,_,_=input_data.read_clip_and_label(filename=filename batch_size=batch_size num_frames_per_clip=num_frames_per_clip sample_rate=sample_rate crop_size=crop_size shuffle=shuffle add_flow=add_flow)<line_sep><return>rgb_train_images train_labels<block_end><class_stmt>MyThread(threading.Thread)<block_start><def_stmt>__init__ self func args=()<block_start>super(MyThread self).__init__()<line_sep>self.func=func<line_sep>self.args=args<block_end><def_stmt>run self<block_start>self.result=self.func(*self.args)<block_end><def_stmt>get_result self<block_start><try_stmt><block_start><return>self.result<block_end><except_stmt>Exception<block_start><return><none><block_end><block_end><block_end><def_stmt>load_data filename batch_size num_frames_per_clip sample_rate crop_size shuffle=<false> add_flow=<false><block_start>data=[]<line_sep>'''
p = Pool(batch_size/8)
for i in range(batch_size):
data.append(p.apply_async(get_data, args=(
filename,
8,
num_frames_per_clip,
sample_rate,
crop_size,
shuffle,
add_flow
)))
p.close()
#p.join()
'''<for_stmt>i range(batch_size/4)<block_start>t=MyThread(get_data args=(filename 4 num_frames_per_clip sample_rate crop_size shuffle add_flow))<line_sep>data.append(t)<line_sep>t.start()<block_end><for_stmt>t data<block_start>t.join()<block_end>print('DATA_LOAD_COMP: enqueue......')<line_sep>rgb_train_images,train_labels=data_to_feed_dict(data)<line_sep><return>rgb_train_images train_labels<block_end><def_stmt>topk predicts labels ids<block_start>scores={}<line_sep>top1_list=[]<line_sep>top5_list=[]<line_sep>clips_top1_list=[]<line_sep>clips_top5_list=[]<line_sep>start_time=time.time()<line_sep>print('Results process..............')<for_stmt>index tqdm(range(len(predicts)))<block_start>id=ids[index]<line_sep>score=predicts[index]<if_stmt>str(id)<not><in>scores.keys()<block_start>scores['%d'%id]=[]<line_sep>scores['%d'%id].append(score)<block_end><else_stmt><block_start>scores['%d'%id].append(score)<block_end>avg_pre_index=np.argsort(score).tolist()<line_sep>top1=(labels[id]<in>avg_pre_index[-1:])<line_sep>top5=(labels[id]<in>avg_pre_index[-5:])<line_sep>clips_top1_list.append(top1)<line_sep>clips_top5_list.append(top5)<block_end>print('Clips-----TOP_1_ACC in test: %f'%np.mean(clips_top1_list))<line_sep>print('Clips-----TOP_5_ACC in test: %f'%np.mean(clips_top5_list))<line_sep>print('..............')<for_stmt>_id range(len(labels)-1)<block_start>avg_pre_index=np.argsort(np.mean(scores['%d'%_id] axis=0)).tolist()<line_sep>top1=(labels[_id]<in>avg_pre_index[-1:])<line_sep>top5=(labels[_id]<in>avg_pre_index[-5:])<line_sep>top1_list.append(top1)<line_sep>top5_list.append(top5)<block_end>print('TOP_1_ACC in test: %f'%np.mean(top1_list))<line_sep>print('TOP_5_ACC in test: %f'%np.mean(top5_list))<line_sep>duration=time.time()-start_time<line_sep>print('Time use: %.3f'%duration)<block_end>
|
<import_stmt>argparse<import_stmt>logging<import_from_stmt>progress.bar Bar<import_from_stmt>metrics ErrorMetric<import_from_stmt>util format_results load_predictions load_gold_data<line_sep>logging.basicConfig(format="%(asctime)s - %(levelname)s - %(name)s - %(message)s" datefmt="%m/%d/%Y %H:%M:%S" level=logging.INFO )<line_sep>logger=logging.getLogger(__name__)<if_stmt>__name__<eq>"__main__"<block_start>parser=argparse.ArgumentParser(description="SLURP evaluation script")<line_sep>parser.add_argument("-g" "--gold-data" required=<true> type=str help="Gold data in SLURP jsonl format" )<line_sep>parser.add_argument("-p" "--prediction-file" type=str required=<true> help="Predictions file")<line_sep>parser.add_argument("--load-gold" action="store_true" help="When evaluating against gold transcriptions\
(gold_*_predictions.jsonl), this flag must be true." )<line_sep>parser.add_argument("--average" type=str default="micro" help="The averaging modality {micro, macro}." )<line_sep>parser.add_argument("--full" action="store_true" help="Print the full results, including per-label metrics." )<line_sep>parser.add_argument("--errors" action="store_true" help="Print TPs, FPs, and FNs in each row.")<line_sep>parser.add_argument("--table-layout" type=str default="fancy_grid" help="The results table layout {fancy_grid (DEFAULT), csv, tsv}." )<line_sep>args=parser.parse_args()<line_sep>logger.info("Loading data")<line_sep>pred_examples=load_predictions(args.prediction_file args.load_gold)<line_sep>gold_examples=load_gold_data(args.gold_data args.load_gold)<line_sep>n_gold_examples=len(gold_examples)<line_sep>logger.info("Initializing metrics")<line_sep>scenario_f1=ErrorMetric.get_instance(metric="f1" average=args.average)<line_sep>action_f1=ErrorMetric.get_instance(metric="f1" average=args.average)<line_sep>intent_f1=ErrorMetric.get_instance(metric="f1" average=args.average)<line_sep>span_f1=ErrorMetric.get_instance(metric="span_f1" average=args.average)<line_sep>distance_metrics={}<for_stmt>distance ["word" "char"]<block_start>distance_metrics[distance]=ErrorMetric.get_instance(metric="span_distance_f1" average=args.average distance=distance)<block_end>slu_f1=ErrorMetric.get_instance(metric="slu_f1" average=args.average)<line_sep>bar=Bar(message="Evaluating metrics" max=len(gold_examples))<for_stmt>gold_id list(gold_examples)<block_start><if_stmt>gold_id<in>pred_examples<block_start>gold_example=gold_examples.pop(gold_id)<line_sep>pred_example=pred_examples.pop(gold_id)<line_sep>scenario_f1(gold_example["scenario"] pred_example["scenario"])<line_sep>action_f1(gold_example["action"] pred_example["action"])<line_sep>intent_f1("{}_{}".format(gold_example["scenario"] gold_example["action"]) "{}_{}".format(pred_example["scenario"] pred_example["action"]) )<line_sep># Filtering below has been added to original code
# because of way in which punctuation handled in data preparation
<for_stmt>k gold_example["entities"]<block_start>k["filler"]=k["filler"].replace(" '" "'")<block_end>span_f1(gold_example["entities"] pred_example["entities"])<for_stmt>distance,metric distance_metrics.items()<block_start>metric(gold_example["entities"] pred_example["entities"])<block_end><block_end>bar.next()<block_end>bar.finish()<line_sep>logger.info("Results:")<line_sep>results=scenario_f1.get_metric()<line_sep>print(format_results(results=results label="scenario" full=args.full errors=args.errors table_layout=args.table_layout ) "\n" )<line_sep>results=action_f1.get_metric()<line_sep>print(format_results(results=results label="action" full=args.full errors=args.errors table_layout=args.table_layout ) "\n" )<line_sep>results=intent_f1.get_metric()<line_sep>print(format_results(results=results label="intent (scen_act)" full=args.full errors=args.errors table_layout=args.table_layout ) "\n" )<line_sep>results=span_f1.get_metric()<line_sep>print(format_results(results=results label="entities" full=args.full errors=args.errors table_layout=args.table_layout ) "\n" )<for_stmt>distance,metric distance_metrics.items()<block_start>results=metric.get_metric()<line_sep>slu_f1(results)<line_sep>print(format_results(results=results label="entities (distance {})".format(distance) full=args.full errors=args.errors table_layout=args.table_layout ) "\n" )<block_end>results=slu_f1.get_metric()<line_sep>print(format_results(results=results label="SLU F1" full=args.full errors=args.errors table_layout=args.table_layout ) "\n" )<line_sep>logger.warning("Gold examples not predicted: {} (out of {})".format(len(gold_examples) n_gold_examples))<block_end>
|
#
# Copyright 2015 <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
<import_stmt>unittest<import_stmt>testbase<import_stmt>util<import_stmt>gateway_mgmt<import_stmt>config<import_stmt>default_cluster<class_stmt>TestCRC16(unittest.TestCase)<block_start>cluster=config.clusters[0]<line_sep>@classmethod<def_stmt>setUpClass cls<block_start>cls.conf_checker=default_cluster.initialize_starting_up_smr_before_redis(cls.cluster)<assert_stmt>cls.conf_checker<ne><none> 'failed to initialize cluster'<block_end>@classmethod<def_stmt>tearDownClass cls<block_start>testbase.defaultTearDown(cls)<block_end><def_stmt>setUp self<block_start>util.set_process_logfile_prefix('TestCRC16_%s'%self._testMethodName)<line_sep><return>0<block_end><def_stmt>tearDown self<block_start><return>0<block_end><def_stmt>test_single_thread_input self<block_start>util.print_frame()<line_sep>self.cluster=config.clusters[0]<line_sep>result={}<line_sep>ip,port=util.get_rand_gateway(self.cluster)<line_sep>gw=gateway_mgmt.Gateway(ip)<line_sep>self.assertEquals(0 gw.connect(ip port))<line_sep>max=5<for_stmt>idx range(max)<block_start>cmd='set key%d 0\r\n'%(idx)<line_sep>gw.write(cmd)<line_sep>result[idx]=gw.read_until('\r\n')<block_end>data_max=65535<for_stmt>idx range(max)<block_start><for_stmt>cnt range(0 data_max)<block_start>gw.write('crc16 key%d %d\r\n'%(idx cnt))<line_sep>result[idx]=gw.read_until('\r\n')<block_end><block_end><for_stmt>idx range(max-1)<block_start>self.assertEquals(result[idx] result[idx+1])<block_end><block_end><block_end>
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""V1 tests for Stack and ParallelStack Ops."""<import_from_stmt>tensorflow.python.framework dtypes<import_from_stmt>tensorflow.python.framework errors_impl<import_from_stmt>tensorflow.python.framework ops<import_from_stmt>tensorflow.python.framework test_util<import_from_stmt>tensorflow.python.ops array_ops<import_from_stmt>tensorflow.python.ops variables<import_from_stmt>tensorflow.python.platform test<class_stmt>AutomaticStackingTest(test.TestCase)<block_start>@test_util.run_deprecated_v1# Tests symbolic tensor semantics
<def_stmt>testVariable self<block_start><with_stmt>self.session()<block_start>v=variables.Variable(17)<line_sep>result=ops.convert_to_tensor([[0 0 0] [0 v 0] [0 0 0]])<line_sep>self.evaluate(v.initializer)<line_sep>self.assertAllEqual([[0 0 0] [0 17 0] [0 0 0]] self.evaluate(result))<line_sep>v.assign(38).op.run()<line_sep>self.assertAllEqual([[0 0 0] [0 38 0] [0 0 0]] self.evaluate(result))<block_end><block_end>@test_util.run_deprecated_v1# Placeholders are V1 only.
<def_stmt>testPlaceholder self<block_start><with_stmt>self.session()# Test using placeholder with a defined shape.
<block_start>ph_0=array_ops.placeholder(dtypes.int32 shape=[])<line_sep>result_0=ops.convert_to_tensor([[0 0 0] [0 ph_0 0] [0 0 0]])<line_sep>self.assertAllEqual([[0 0 0] [0 1 0] [0 0 0]] result_0.eval(feed_dict={ph_0:1}))<line_sep>self.assertAllEqual([[0 0 0] [0 2 0] [0 0 0]] result_0.eval(feed_dict={ph_0:2}))<line_sep># Test using placeholder with an undefined shape.
ph_1=array_ops.placeholder(dtypes.int32)<line_sep>result_1=ops.convert_to_tensor([[0 0 0] [0 ph_1 0] [0 0 0]])<line_sep>self.assertAllEqual([[0 0 0] [0 1 0] [0 0 0]] result_1.eval(feed_dict={ph_1:1}))<line_sep>self.assertAllEqual([[0 0 0] [0 2 0] [0 0 0]] result_1.eval(feed_dict={ph_1:2}))<block_end><block_end>@test_util.run_deprecated_v1# Placeholders and shape inference are only applicable in Graph mode.
<def_stmt>testShapeErrors self# Static shape error.
<block_start>ph_0=array_ops.placeholder(dtypes.int32 shape=[1])<with_stmt>self.assertRaises(ValueError)<block_start>ops.convert_to_tensor([[0 0 0] [0 ph_0 0] [0 0 0]])<block_end># Dynamic shape error.
ph_1=array_ops.placeholder(dtypes.int32)<line_sep>result_1=ops.convert_to_tensor([[0 0 0] [0 ph_1 0] [0 0 0]])<with_stmt>self.session()<block_start><with_stmt>self.assertRaises(errors_impl.InvalidArgumentError)<block_start>result_1.eval(feed_dict={ph_1:[1]})<block_end><block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>test.main()<block_end>
|
# coding: utf-8
<import_from_future_stmt> absolute_import division print_function unicode_literals <import_stmt>os<import_stmt>sys<import_stmt>django<import_from_stmt>django.conf settings<import_from_stmt>django.test.utils get_runner<def_stmt>runtests <block_start>os.environ['DJANGO_SETTINGS_MODULE']='test_project.settings'<line_sep>django.setup()<line_sep>TestRunner=get_runner(settings)<line_sep>test_runner=TestRunner()<line_sep>failures=test_runner.run_tests(["tests"])<line_sep>sys.exit(bool(failures))<block_end><if_stmt>__name__<eq>'__main__'<block_start>runtests()<block_end>
|
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>run2_CSC_2018=cms.Modifier()<line_sep>
|
<import_from_future_stmt> unicode_literals<import_from_stmt>psycopg2 IntegrityError<import_from_stmt>gratipay.testing Harness D<import_from_stmt>gratipay.models.exchange_route ExchangeRoute<class_stmt>TestRecordAnExchange(Harness)# fixture
# =======
<block_start><def_stmt>make_participants self<block_start>self.make_participant('alice' claimed_time='now' is_admin=<true>)<line_sep>self.bob=self.make_participant('bob' claimed_time='now')<block_end><def_stmt>record_an_exchange self data make_participants=<true><block_start><if_stmt>make_participants<block_start>self.make_participants()<block_end>data.setdefault('status' 'succeeded')<line_sep>data.setdefault('note' 'noted')<if_stmt>'route_id'<not><in>data<block_start><try_stmt><block_start>data['route_id']=ExchangeRoute.insert(self.bob 'paypal' '<EMAIL>').id<block_end><except_stmt>IntegrityError<block_start>data['route_id']=ExchangeRoute.from_network(self.bob 'paypal').id<block_end><block_end><if_stmt>data['status']<is><none><block_start><del_stmt>(data['status'])<block_end><if_stmt>data['route_id']<is><none><block_start><del_stmt>(data['route_id'])<block_end><if_stmt>'ref'<not><in>data<block_start>data['ref']='N/A'<block_end><return>self.client.PxST('/~bob/history/record-an-exchange' data auth_as='alice')<block_end># tests
# =====
<def_stmt>test_success_is_302 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'0'})<assert_stmt>response.code<eq>302<assert_stmt>response.headers['location']<eq>'/bob/history/'<block_end><def_stmt>test_non_admin_is_403 self<block_start>self.make_participant('alice' claimed_time='now')<line_sep>self.bob=self.make_participant('bob' claimed_time='now')<line_sep>actual=self.record_an_exchange({'amount':'10' 'fee':'0'} <false>).code<assert_stmt>actual<eq>403<block_end><def_stmt>test_bad_amount_is_400 self<block_start>response=self.record_an_exchange({'amount':'cheese' 'fee':'0'})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Invalid amount/fee"<block_end><def_stmt>test_bad_fee_is_400 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'cheese'})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Invalid amount/fee"<block_end><def_stmt>test_no_note_is_400 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'0' 'note':''})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Invalid note"<block_end><def_stmt>test_whitespace_note_is_400 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'0' 'note':' '})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Invalid note"<block_end><def_stmt>test_no_route_id_is_400 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'0' 'route_id':<none>})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Invalid route_id"<block_end><def_stmt>test_bad_route_id_is_400 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'0' 'route_id':'foo'})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Invalid route_id"<block_end><def_stmt>test_non_existent_route_id_is_400 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'0' 'route_id':'123456'})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Route doesn't exist"<block_end><def_stmt>test_route_should_belong_to_user_else_400 self<block_start>alice=self.make_participant('alice' claimed_time='now' is_admin=<true>)<line_sep>self.make_participant('bob' claimed_time='now')<line_sep>route=ExchangeRoute.insert(alice 'paypal' '<EMAIL>')<line_sep>response=self.record_an_exchange({'amount':'10' 'fee':'0' 'route_id':route.id} <false>)<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Route doesn't exist"<block_end><def_stmt>test_no_ref_is_400 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'0' 'ref':''})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Invalid Reference"<block_end><def_stmt>test_whitespace_ref_is_400 self<block_start>response=self.record_an_exchange({'amount':'10' 'fee':'0' 'ref':' '})<assert_stmt>response.code<eq>400<assert_stmt>response.body<eq>"Invalid Reference"<block_end><def_stmt>test_dropping_balance_below_zero_is_allowed_in_this_context self<block_start>self.record_an_exchange({'amount':'-10' 'fee':'0'})<line_sep>actual=self.db.one("SELECT balance FROM participants WHERE username='bob'")<assert_stmt>actual<eq>D('-10.00')<block_end><def_stmt>test_success_records_exchange self<block_start>self.record_an_exchange({'amount':'10' 'fee':'0.50' 'ref':"605BSOC6G855L15OO"})<line_sep>expected={"amount":D('10.00') "fee":D('0.50') "participant":"bob" "recorder":"alice" "note":"noted" "ref":"605BSOC6G855L15OO" "route":ExchangeRoute.from_network(self.bob 'paypal').id}<line_sep>SQL="SELECT amount, fee, participant, recorder, note, route, ref "<concat>"FROM exchanges"<line_sep>actual=self.db.one(SQL back_as=dict)<assert_stmt>actual<eq>expected<block_end><def_stmt>test_success_updates_balance self<block_start>self.record_an_exchange({'amount':'10' 'fee':'0'})<line_sep>expected=D('10.00')<line_sep>SQL="SELECT balance FROM participants WHERE username='bob'"<line_sep>actual=self.db.one(SQL)<assert_stmt>actual<eq>expected<block_end><def_stmt>test_withdrawals_work self<block_start>self.make_participant('alice' claimed_time='now' is_admin=<true>)<line_sep>self.bob=self.make_participant('bob' claimed_time='now' balance=20)<line_sep>self.record_an_exchange({'amount':'-7' 'fee':'0'} make_participants=<false>)<line_sep>expected=D('13.00')<line_sep>SQL="SELECT balance FROM participants WHERE username='bob'"<line_sep>actual=self.db.one(SQL)<assert_stmt>actual<eq>expected<block_end><def_stmt>test_withdrawals_take_fee_out_of_balance self<block_start>self.make_participant('alice' claimed_time='now' is_admin=<true>)<line_sep>self.bob=self.make_participant('bob' claimed_time='now' balance=20)<line_sep>self.bob=self.record_an_exchange({'amount':'-7' 'fee':'1.13'} <false>)<line_sep>SQL="SELECT balance FROM participants WHERE username='bob'"<assert_stmt>self.db.one(SQL)<eq>D('11.87')<block_end><def_stmt>test_can_set_status self<block_start>self.make_participants()<for_stmt>status ('pre' 'pending' 'failed' 'succeeded')<block_start>self.record_an_exchange({'amount':'10' 'fee':'0' 'status':status} <false>)<line_sep>actual=self.db.one("SELECT status FROM exchanges ORDER BY timestamp desc LIMIT 1")<assert_stmt>actual<eq>status<block_end><block_end><def_stmt>test_cant_record_new_exchanges_with_None_status self<block_start>r=self.record_an_exchange({'amount':'10' 'fee':'0' 'status':<none>})<assert_stmt>r.code<eq>400<assert_stmt>self.db.one("SELECT count(*) FROM exchanges")<eq>0<block_end><def_stmt>test_succeeded_affects_balance self<block_start>self.make_participants()<line_sep>balance=0<for_stmt>amount ('10' '-10')<block_start>self.record_an_exchange({'amount':amount 'fee':'0'} <false>)<line_sep>balance<augadd>int(amount)<assert_stmt>self.db.one("SELECT balance FROM participants WHERE username='bob'")<eq>balance<block_end><block_end><def_stmt>test_failed_doesnt_affect_balance self<block_start>self.make_participants()<for_stmt>amount ('10' '-10')<block_start>self.record_an_exchange({'amount':amount 'fee':'0' 'status':'failed'} <false>)<assert_stmt>self.db.one("SELECT balance FROM participants WHERE username='bob'")<eq>0<block_end><block_end><def_stmt>test_other_statuses_dont_affect_balance_for_payins self<block_start>self.make_participants()<for_stmt>status ('pre' 'pending')<block_start>self.record_an_exchange({'amount':'10' 'fee':'0' 'status':status} <false>)<assert_stmt>self.db.one("SELECT balance FROM participants WHERE username='bob'")<eq>0<block_end><block_end><def_stmt>test_other_statuses_affect_balance_for_payouts self<block_start>self.make_participants()<line_sep>balance=0<for_stmt>status ('pre' 'pending')<block_start>self.record_an_exchange({'amount':'-10' 'fee':'0' 'status':status} <false>)<line_sep>balance<augsub>10<assert_stmt>self.db.one("SELECT balance FROM participants WHERE username='bob'")<eq>balance<block_end><block_end><block_end>
|
# Copyright 2021 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commands for importing timelines."""<import_stmt>sys<import_stmt>time<import_stmt>click<import_from_stmt>timesketch_import_client importer<as>import_client<line_sep>@click.command('import')@click.option('--name' help='Name of the timeline.')@click.option('--timeout' type=int default=600 help='Seconds to wait for indexing.')@click.argument('file_path' type=click.Path(exists=<true>))@click.pass_context<def_stmt>importer ctx name timeout file_path<block_start>"""Import timeline.
Args:
ctx: Click CLI context object.
name: Name of the timeline to create.
timeout: Seconds to wait for indexing.
file_path: File path to the file to import.
"""<line_sep>sketch=ctx.obj.sketch<if_stmt><not>name<block_start>name=click.format_filename(file_path shorten=<true>)<block_end>timeline=<none><with_stmt>import_client.ImportStreamer()<as>streamer<block_start>click.echo('Uploading to server .. ' nl=<false>)<line_sep>streamer.set_sketch(sketch)<line_sep>streamer.set_timeline_name(name)<line_sep>streamer.set_provider('Timesketch CLI client')<line_sep># TODO: Consider using the whole command as upload context instead
# of the file path.
streamer.set_upload_context(file_path)<line_sep>streamer.add_file(file_path)<line_sep>timeline=streamer.timeline<if_stmt><not>timeline<block_start>click.echo('Error creating timeline, please try again.')<line_sep>sys.exit(1)<block_end>click.echo('Done')<block_end># Poll the timeline status and wait for the timeline to be ready
click.echo('Indexing .. ' nl=<false>)<line_sep>max_time_seconds=timeout<line_sep>sleep_time_seconds=5# Sleep between API calls
max_retries=max_time_seconds/sleep_time_seconds<line_sep>retry_count=0<while_stmt><true><block_start><if_stmt>retry_count<ge>max_retries<block_start>click.echo(('WARNING: The command timed out before indexing finished. '<concat>'The timeline will continue to be indexed in the background'))<line_sep><break><block_end>status=timeline.status<line_sep># TODO: Do something with other statuses? (e.g. failed)
<if_stmt>status<eq>'ready'<block_start>click.echo('Done')<line_sep><break><block_end>retry_count<augadd>1<line_sep>time.sleep(sleep_time_seconds)<block_end>click.echo(f'Timeline imported: {timeline.name}')<block_end>
|
<import_from_stmt>mitmproxy.contentviews hex<import_from_stmt>. full_eval<def_stmt>test_view_hex <block_start>v=full_eval(hex.ViewHex())<assert_stmt>v(b"foo")<block_end><def_stmt>test_render_priority <block_start>v=hex.ViewHex()<assert_stmt><not>v.render_priority(b"ascii")<assert_stmt>v.render_priority(b"\xFF")<assert_stmt><not>v.render_priority(b"")<block_end>
|
"""Matplotlib dotplot."""<import_stmt>math<import_stmt>warnings<import_stmt>numpy<as>np<import_stmt>matplotlib.pyplot<as>plt<import_from_stmt>matplotlib _pylab_helpers<import_from_stmt>...plot_utils _scale_fig_size<import_from_stmt>. backend_kwarg_defaults create_axes_grid backend_show<import_from_stmt>...plot_utils plot_point_interval<import_from_stmt>...dotplot wilkinson_algorithm layout_stacks<def_stmt>plot_dot values binwidth dotsize stackratio hdi_prob quartiles rotated dotcolor intervalcolor markersize markercolor marker figsize linewidth point_estimate nquantiles point_interval ax show backend_kwargs plot_kwargs <block_start>"""Matplotlib dotplot."""<if_stmt>backend_kwargs<is><none><block_start>backend_kwargs={}<block_end>backend_kwargs={**backend_kwarg_defaults() **backend_kwargs}<line_sep>backend_kwargs.setdefault("figsize" figsize)<line_sep>backend_kwargs["squeeze"]=<true><line_sep>(figsize _ _ _ auto_linewidth auto_markersize)=_scale_fig_size(figsize <none>)<if_stmt>plot_kwargs<is><none><block_start>plot_kwargs={}<line_sep>plot_kwargs.setdefault("color" dotcolor)<block_end><if_stmt>linewidth<is><none><block_start>linewidth=auto_linewidth<block_end><if_stmt>markersize<is><none><block_start>markersize=auto_markersize<block_end><if_stmt>ax<is><none><block_start>fig_manager=_pylab_helpers.Gcf.get_active()<if_stmt>fig_manager<is><not><none><block_start>ax=fig_manager.canvas.figure.gca()<block_end><else_stmt><block_start>_,ax=create_axes_grid(1 backend_kwargs=backend_kwargs )<block_end><block_end><if_stmt>point_interval<block_start>ax=plot_point_interval(ax values point_estimate hdi_prob quartiles linewidth markersize markercolor marker rotated intervalcolor "matplotlib" )<block_end><if_stmt>nquantiles<g>values.shape[0]<block_start>warnings.warn("nquantiles must be less than or equal to the number of data points" UserWarning)<line_sep>nquantiles=values.shape[0]<block_end><else_stmt><block_start>qlist=np.linspace(1/(2<times>nquantiles) 1-1/(2<times>nquantiles) nquantiles)<line_sep>values=np.quantile(values qlist)<block_end><if_stmt>binwidth<is><none><block_start>binwidth=math.sqrt((values[-1]-values[0]+1)<power>2/(2<times>nquantiles<times>np.pi))<block_end>## Wilkinson's Algorithm
stack_locs,stack_count=wilkinson_algorithm(values binwidth)<line_sep>x,y=layout_stacks(stack_locs stack_count binwidth stackratio rotated)<for_stmt>(x_i y_i) zip(x y)<block_start>dot=plt.Circle((x_i y_i) dotsize<times>binwidth/2 **plot_kwargs)<line_sep>ax.add_patch(dot)<block_end><if_stmt>rotated<block_start>ax.tick_params(bottom=<false> labelbottom=<false>)<block_end><else_stmt><block_start>ax.tick_params(left=<false> labelleft=<false>)<block_end>ax.set_aspect("equal" adjustable="box")<line_sep>ax.autoscale()<if_stmt>backend_show(show)<block_start>plt.show()<block_end><return>ax<block_end>
|
"""
Copyright 2020 Nvidia Corporation
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""<import_from_stmt>collections defaultdict<import_from_stmt>contextlib contextmanager<import_from_stmt>shutil copyfile<import_stmt>csv<import_stmt>os<import_stmt>re<import_stmt>shlex<import_stmt>subprocess<import_stmt>time<try_stmt><block_start><import_from_stmt>torch.utils.tensorboard SummaryWriter<block_end><except_stmt>ModuleNotFoundError<block_start><import_from_stmt>tensorboardX SummaryWriter<block_end><import_stmt>torch<try_stmt><block_start><import_from_stmt>.utils get_logroot save_hparams trn_names val_names ConditionalProxy <block_end><except_stmt>ImportError# This is to allow the unit tests to run properly
<block_start><import_from_stmt>utils get_logroot save_hparams trn_names val_names ConditionalProxy <block_end><def_stmt>is_list x<block_start><return>isinstance(x (list tuple))<block_end><def_stmt>get_gpu_utilization_pct <block_start>'''
Use nvidia-smi to capture the GPU utilization, which is reported as an
integer in range 0-100.
'''<line_sep>util=subprocess.check_output(shlex.split('nvidia-smi --query-gpu="utilization.gpu" '<concat>'--format=csv,noheader,nounits -i 0'))<line_sep>util=util.decode('utf-8')<line_sep>util=util.replace('\n' '')<line_sep><return>int(util)<block_end><class_stmt>LogX(object)<block_start><def_stmt>__init__ self rank=0<block_start>self.initialized=<false><block_end><def_stmt>initialize self logdir=<none> coolname=<false> hparams=<none> tensorboard=<false> no_timestamp=<false> global_rank=0 eager_flush=<true><block_start>'''
Initialize logx
inputs
- logdir - where to write logfiles
- tensorboard - whether to write to tensorboard file
- global_rank - must set this if using distributed training, so we only
log from rank 0
- coolname - generate a unique directory name underneath logdir, else
use logdir as output directory
- hparams - only use if not launching jobs with runx, which also saves
the hparams.
- eager_flush - call `flush` after every tensorboard write
'''<line_sep>self.rank0=(global_rank<eq>0)<line_sep>self.initialized=<true><if_stmt>logdir<is><not><none><block_start>self.logdir=logdir<block_end><else_stmt><block_start>logroot=get_logroot()<if_stmt>coolname<block_start><import_from_stmt>coolname generate_slug<line_sep>self.logdir=os.path.join(logroot generate_slug(2))<block_end><else_stmt><block_start>self.logdir=os.path.join(logroot 'default')<block_end><block_end># confirm target log directory exists
<if_stmt><not>os.path.isdir(self.logdir)<block_start>os.makedirs(self.logdir exist_ok=<true>)<block_end><if_stmt>hparams<is><not><none><and>self.rank0<block_start>save_hparams(hparams self.logdir)<block_end># Tensorboard file
<if_stmt>self.rank0<and>tensorboard<block_start>self.tb_writer=SummaryWriter(log_dir=self.logdir flush_secs=1)<block_end><else_stmt><block_start>self.tb_writer=<none><block_end>self.eager_flush=eager_flush<line_sep># This allows us to use the tensorboard with automatic checking of both
# the `tensorboard` condition, as well as ensuring writes only happen
# on rank0. Any function supported by `SummaryWriter` is supported by
# `ConditionalProxy`. Additionally, flush will be called after any call
# to this.
self.tensorboard=ConditionalProxy(self.tb_writer tensorboard<and>self.rank0 post_hook=self._flush_tensorboard )<if_stmt><not>self.rank0<block_start><return><block_end># Metrics file
metrics_fn=os.path.join(self.logdir 'metrics.csv')<line_sep>self.metrics_fp=open(metrics_fn mode='a+')<line_sep>self.metrics_writer=csv.writer(self.metrics_fp delimiter=',')<line_sep># Log file
log_fn=os.path.join(self.logdir 'logging.log')<line_sep>self.log_file=open(log_fn mode='a+')<line_sep># save metric
self.save_metric=<none><line_sep>self.best_metric=<none><line_sep>self.save_ckpt_fn=''<line_sep># Find the existing best checkpoint, and update `best_metric`,
# if available
self.best_ckpt_fn=self.get_best_checkpoint()<or>''<if_stmt>self.best_ckpt_fn<block_start>best_chk=torch.load(self.best_ckpt_fn map_location='cpu')<line_sep>self.best_metric=best_chk.get('__metric' <none>)<block_end>self.epoch=defaultdict(<lambda>:0)<line_sep>self.no_timestamp=no_timestamp<line_sep># Initial timestamp, so that epoch time calculation is correct
phase='start'<line_sep>csv_line=[phase]<line_sep># add epoch/iter
csv_line.append('{}/step'.format(phase))<line_sep>csv_line.append(0)<line_sep># add timestamp
<if_stmt><not>self.no_timestamp# this feature is useful for testing
<block_start>csv_line.append('timestamp')<line_sep>csv_line.append(time.time())<block_end>self.metrics_writer.writerow(csv_line)<line_sep>self.metrics_fp.flush()<block_end><def_stmt>__del__ self<block_start><if_stmt>self.initialized<and>self.rank0<block_start>self.metrics_fp.close()<line_sep>self.log_file.close()<block_end><block_end><def_stmt>msg self msg<block_start>'''
Print out message to std and to a logfile
'''<if_stmt><not>self.rank0<block_start><return><block_end>print(msg)<line_sep>self.log_file.write(msg+'\n')<line_sep>self.log_file.flush()<block_end><def_stmt>add_image self path img step=<none><block_start>'''
Write an image to the tensorboard file
'''<line_sep>self.tensorboard.add_image(path img step)<block_end><def_stmt>add_scalar self name val idx<block_start>'''
Write a scalar to the tensorboard file
'''<line_sep>self.tensorboard.add_scalar(name val idx)<block_end><def_stmt>_flush_tensorboard self<block_start><if_stmt>self.eager_flush<and>self.tb_writer<is><not><none><block_start>self.tb_writer.flush()<block_end><block_end>@contextmanager<def_stmt>suspend_flush self flush_at_end=<true><block_start>prev_flush=self.eager_flush<line_sep>self.eager_flush=<false><line_sep><yield><line_sep>self.eager_flush=prev_flush<if_stmt>flush_at_end<block_start>self._flush_tensorboard()<block_end><block_end><def_stmt>metric self phase metrics epoch=<none><block_start>"""Record train/val metrics. This serves the dual-purpose to write these
metrics to both a tensorboard file and a csv file, for each parsing by
sumx.
Arguments:
phase: 'train' or 'val'. sumx will only summarize val metrics.
metrics: dictionary of metrics to record
global_step: (optional) epoch or iteration number
"""<if_stmt><not>self.rank0<block_start><return><block_end># define canonical phase
<if_stmt>phase<in>trn_names<block_start>canonical_phase='train'<block_end><elif_stmt>phase<in>val_names<block_start>canonical_phase='val'<block_end><else_stmt><block_start><raise>('expected phase to be one of {} {}'.format(str(val_names trn_names)))<block_end><if_stmt>epoch<is><not><none><block_start>self.epoch[canonical_phase]=epoch<block_end># Record metrics to csv file
csv_line=[canonical_phase]<for_stmt>k,v metrics.items()<block_start>csv_line.append(k)<line_sep>csv_line.append(v)<block_end># add epoch/iter
csv_line.append('epoch')<line_sep>csv_line.append(self.epoch[canonical_phase])<line_sep># add timestamp
<if_stmt><not>self.no_timestamp# this feature is useful for testing
<block_start>csv_line.append('timestamp')<line_sep>csv_line.append(time.time())<block_end># To save a bit of disk space, only save validation metrics
<if_stmt>canonical_phase<eq>'val'<block_start>self.metrics_writer.writerow(csv_line)<line_sep>self.metrics_fp.flush()<block_end># Write updates to tensorboard file
<with_stmt>self.suspend_flush()<block_start><for_stmt>k,v metrics.items()<block_start>self.add_scalar('{}/{}'.format(phase k) v self.epoch[canonical_phase])<block_end><block_end># if no step, then keep track of it automatically
<if_stmt>epoch<is><none><block_start>self.epoch[canonical_phase]<augadd>1<block_end><block_end>@staticmethod<def_stmt>is_better save_metric best_metric higher_better<block_start><return>best_metric<is><none><or>higher_better<and>(save_metric<g>best_metric)<or><not>higher_better<and>(save_metric<l>best_metric)<block_end><def_stmt>save_model self save_dict metric epoch higher_better=<true> delete_old=<true><block_start>"""Saves a model to disk. Keeps a separate copy of latest and best models.
Arguments:
save_dict: dictionary to save to checkpoint
epoch: epoch number, used to name checkpoint
metric: metric value to be used to evaluate whether this is the
best result
higher_better: True if higher valued metric is better, False
otherwise
delete_old: Delete prior 'lastest' checkpoints. By setting to
false, you'll get a checkpoint saved every time this
function is called.
"""<if_stmt><not>self.rank0<block_start><return><block_end>save_dict['__metric']=metric<if_stmt>os.path.exists(self.save_ckpt_fn)<and>delete_old<block_start>os.remove(self.save_ckpt_fn)<block_end># Save out current model
self.save_ckpt_fn=os.path.join(self.logdir 'last_checkpoint_ep{}.pth'.format(epoch))<line_sep>torch.save(save_dict self.save_ckpt_fn)<line_sep>self.save_metric=metric<line_sep>is_better=self.is_better(self.save_metric self.best_metric higher_better)<if_stmt>is_better<block_start><if_stmt>os.path.exists(self.best_ckpt_fn)<block_start>os.remove(self.best_ckpt_fn)<block_end>self.best_ckpt_fn=os.path.join(self.logdir 'best_checkpoint_ep{}.pth'.format(epoch))<line_sep>self.best_metric=self.save_metric<line_sep>copyfile(self.save_ckpt_fn self.best_ckpt_fn)<block_end><return>is_better<block_end><def_stmt>get_best_checkpoint self<block_start>"""
Finds the checkpoint in `self.logdir` that is considered best.
If, for some reason, there are multiple best checkpoint files, then
the one with the highest epoch will be preferred.
Returns:
None - If there is no best checkpoint file
path (str) - The full path to the best checkpoint otherwise.
"""<line_sep>match_str=r'^best_checkpoint_ep([0-9]+).pth$'<line_sep>best_epoch=-1<line_sep>best_checkpoint=<none><for_stmt>filename os.listdir(self.logdir)<block_start>match=re.fullmatch(match_str filename)<if_stmt>match<is><not><none># Extract the epoch number
<block_start>epoch=int(match.group(1))<if_stmt>epoch<g>best_epoch<block_start>best_epoch=epoch<line_sep>best_checkpoint=filename<block_end><block_end><block_end><if_stmt>best_checkpoint<is><none><block_start><return><none><block_end><return>os.path.join(self.logdir best_checkpoint)<block_end><def_stmt>load_model self path<block_start>"""Restore a model and return a dict with any meta data included in
the snapshot
"""<line_sep>checkpoint=torch.load(path)<line_sep>state_dict=checkpoint['state_dict']<line_sep>meta={k:v<for>k,v checkpoint.items()<if>k<ne>'state_dict'}<line_sep><return>state_dict meta<block_end><block_end># Importing logx gives you access to this shared object
logx=LogX()<line_sep>
|
<import_from_stmt>os environ<import_from_stmt>loader Loader<import_stmt>actions<line_sep>LOADER=Loader()<def_stmt>lambda_handler event context<block_start><try_stmt><block_start>response=LOADER.personalize_cli.delete_dataset(datasetArn=event['datasetArn'])<block_end><except_stmt>Exception<as>e<block_start>LOADER.logger.error(f'Error deleting dataset: {e}')<line_sep><raise>e<block_end><block_end>
|
# coding: utf-8
#
<import_from_stmt>collections namedtuple<def_stmt>test_session sess<block_start>sess.wlan_ip<line_sep>sess.widget<line_sep>sess.watcher<line_sep>sess.image<line_sep>sess.jsonrpc<line_sep>sess.open_identify<line_sep>sess.shell<line_sep>sess.set_new_command_timeout<line_sep>sess.settings<line_sep>sess.taobao<line_sep>sess.xpath<block_end><def_stmt>test_session_app sess package_name<block_start>sess.app_start(package_name)<assert_stmt>sess.app_current()['package']<eq>package_name<line_sep>sess.app_wait(package_name)<assert_stmt>package_name<in>sess.app_list()<assert_stmt>package_name<in>sess.app_list_running()<assert_stmt>sess.app_info(package_name)['packageName']<eq>package_name<block_end><def_stmt>test_session_window_size sess<block_start><assert_stmt>isinstance(sess.window_size() tuple)<block_end>
|
"""Unittests for Slices."""<import_from_stmt>unittest TestCase<import_from_stmt>robustnessgym.core.slice SliceDataPanel<import_from_stmt>tests.testbeds MockTestBedv0<class_stmt>TestSlice(TestCase)<block_start><def_stmt>setUp self<block_start>self.testbed=MockTestBedv0()<block_end><def_stmt>test_from_dataset self# Create a slice
<block_start>sl=SliceDataPanel(self.testbed.dataset)<line_sep># Compare the slice identifier
self.assertEqual(str(sl) "RGSlice[num_rows: 6](MockDataset(version=1.0))")<line_sep># Length of the slice
self.assertEqual(len(sl) 6)<line_sep># Lineage of the slice
self.assertEqual(sl.lineage [("Dataset" "MockDataset(version=1.0)")])<block_end><block_end>
|
<import_from_stmt>flask request<import_from_stmt>operator or_<import_from_stmt>zeus.models FileCoverage Revision<import_from_stmt>zeus.utils.builds fetch_build_for_revision<import_from_stmt>.base_revision BaseRevisionResource<import_from_stmt>..schemas FileCoverageSchema<line_sep>filecoverage_schema=FileCoverageSchema(many=<true>)<class_stmt>RevisionFileCoverageResource(BaseRevisionResource)<block_start><def_stmt>get self revision:Revision<block_start>"""
Return a list of file coverage objects for a given revision.
"""<line_sep>build=fetch_build_for_revision(revision)<if_stmt><not>build<block_start><return>self.respond(status=404)<block_end>build_ids=[original.id<for>original build.original]<line_sep>query=FileCoverage.query.filter(FileCoverage.build_id.in_(build_ids))<line_sep>diff_only=request.args.get("diff_only")<in>("1" "yes" "true")<if_stmt>diff_only<block_start>query=query.filter(or_(FileCoverage.diff_lines_covered<g>0 FileCoverage.diff_lines_uncovered<g>0 ))<block_end>query=query.order_by((FileCoverage.diff_lines_covered+FileCoverage.diff_lines_uncovered<g>0).desc() FileCoverage.filename.asc() )<line_sep><return>self.respond_with_schema(filecoverage_schema query)<block_end><block_end>
|
# Copyright 2017 CodiLime
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_from_future_stmt> unicode_literals<import_stmt>socket<import_stmt>ssl<import_stmt>msgpack<import_from_stmt>veles.proto messages msgpackwrap<import_from_stmt>veles.proto.messages PROTO_VERSION<import_from_stmt>veles.schema nodeid<import_from_stmt>veles.util helpers<class_stmt>Client(object)<block_start><def_stmt>__init__ self sock key name='scli' version='1.0' description='' type='scli' quit_on_close=<false><block_start>self.sock=sock<line_sep>wrapper=msgpackwrap.MsgpackWrapper()<line_sep>self.unpacker=wrapper.unpacker<line_sep>self.packer=wrapper.packer<line_sep>self.client_name=name<line_sep>self.client_version=version<line_sep>self.client_description=description<line_sep>self.client_type=type<line_sep>self.quit_on_close=quit_on_close<line_sep>self._authorize(helpers.prepare_auth_key(key))<block_end><def_stmt>_authorize self key<block_start>self.sock.sendall(key)<line_sep>self.send_msg(messages.MsgConnect(proto_version=PROTO_VERSION client_name=self.client_name client_version=self.client_version client_description=self.client_description client_type=self.client_type quit_on_close=self.quit_on_close ))<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgConnected)<block_start>print('Connected to server: {}'.format(pkt.server_name))<block_end><elif_stmt>isinstance(pkt messages.MsgConnectionError)<block_start><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply when attempting to connect')<block_end><block_end><def_stmt>getpkt self<block_start><while_stmt><true><block_start><try_stmt><block_start><return>messages.MsgpackMsg.load(self.unpacker.unpack())<block_end><except_stmt>msgpack.OutOfData<block_start><pass><block_end>data=self.sock.recv(1024)<if_stmt><not>data<block_start><raise>Exception("end of file")<block_end>self.unpacker.feed(data)<block_end><block_end><def_stmt>send_msg self msg<block_start>self.sock.sendall(self.packer.pack(msg.dump()))<block_end><def_stmt>request self msg<block_start>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgRequestAck)<and>pkt.rid<eq>0<block_start><return>msg.id<block_end><elif_stmt>isinstance(pkt messages.MsgRequestError)<and>pkt.rid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to request')<block_end><block_end><def_stmt>create self parent tags=set() attr={} data={} bindata={} pos=(<none> <none>)<block_start>msg=messages.MsgCreate(id=nodeid.NodeID() parent=parent pos_start=pos[0] pos_end=pos[1] tags=tags attr=attr data=data bindata=bindata rid=0 )<line_sep>self.request(msg)<line_sep><return>msg.id<block_end><def_stmt>delete self obj<block_start>msg=messages.MsgDelete(id=obj rid=0)<line_sep>self.request(msg)<block_end><def_stmt>set_parent self obj parent<block_start>msg=messages.MsgSetParent(id=obj parent=parent rid=0)<line_sep>self.request(msg)<block_end><def_stmt>set_pos self obj start end<block_start>msg=messages.MsgSetPos(id=obj pos_start=start pos_end=end rid=0)<line_sep>self.request(msg)<block_end><def_stmt>add_tag self obj tag<block_start>msg=messages.MsgAddTag(id=obj tag=tag rid=0)<line_sep>self.request(msg)<block_end><def_stmt>del_tag self obj tag<block_start>msg=messages.MsgDelTag(id=obj tag=tag rid=0)<line_sep>self.request(msg)<block_end><def_stmt>set_attr self obj key data<block_start>msg=messages.MsgSetAttr(id=obj key=key data=data rid=0)<line_sep>self.request(msg)<block_end><def_stmt>set_data self obj key data<block_start>msg=messages.MsgSetData(id=obj rid=0 key=key data=data )<line_sep>self.request(msg)<block_end><def_stmt>set_bindata self obj key start data truncate=<false><block_start>msg=messages.MsgSetBinData(id=obj rid=0 key=key start=start data=data truncate=truncate )<line_sep>self.request(msg)<block_end><def_stmt>get self obj<block_start>msg=messages.MsgGet(id=obj qid=0 )<line_sep>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetReply)<and>pkt.qid<eq>0<block_start><return>pkt.obj<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start><raise>Exception('weird reply to get')<block_end><block_end><def_stmt>get_sub self obj<block_start>msg=messages.MsgGet(id=obj qid=0 sub=<true> )<line_sep>self.send_msg(msg)<while_stmt><true><block_start>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetReply)<and>pkt.qid<eq>0<block_start><yield>pkt.obj<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start><raise>Exception('weird reply to get')<block_end><block_end><block_end><def_stmt>get_data self obj key<block_start>msg=messages.MsgGetData(id=obj qid=0 key=key )<line_sep>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetDataReply)<and>pkt.qid<eq>0<block_start><return>pkt.data<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start><raise>Exception('weird reply to get_data')<block_end><block_end><def_stmt>get_data_sub self obj key<block_start>msg=messages.MsgGetData(id=obj qid=0 key=key sub=<true>)<line_sep>self.send_msg(msg)<while_stmt><true><block_start>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetDataReply)<and>pkt.qid<eq>0<block_start><yield>pkt.data<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start><raise>Exception('weird reply to get_data')<block_end><block_end><block_end><def_stmt>get_bindata self obj key start=0 end=<none><block_start>msg=messages.MsgGetBinData(id=obj qid=0 key=key start=start end=end )<line_sep>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetBinDataReply)<and>pkt.qid<eq>0<block_start><return>pkt.data<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start><raise>Exception('weird reply to get_bindata')<block_end><block_end><def_stmt>get_bindata_sub self obj key start=0 end=<none><block_start>msg=messages.MsgGetBinData(id=obj qid=0 key=key start=start end=end sub=<true> )<line_sep>self.send_msg(msg)<while_stmt><true><block_start>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetBinDataReply)<and>pkt.qid<eq>0<block_start><yield>pkt.data<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start><raise>Exception('weird reply to get_bindata')<block_end><block_end><block_end><def_stmt>list self obj<block_start>msg=messages.MsgGetList(qid=0 parent=obj )<line_sep>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetListReply)<and>pkt.qid<eq>0<block_start><return>pkt.objs<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to list')<block_end><block_end><def_stmt>list_sub self obj<block_start>msg=messages.MsgGetList(qid=0 parent=obj sub=<true>)<line_sep>self.send_msg(msg)<while_stmt><true><block_start>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetListReply)<and>pkt.qid<eq>0<block_start><yield>pkt<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to list')<block_end><block_end><block_end><def_stmt>query self obj sig params checks=<none><block_start>params=sig.params.dump(params)<line_sep>msg=messages.MsgGetQuery(qid=0 node=obj query=sig.name params=params trace=checks<is><not><none>)<line_sep>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetQueryReply)<and>pkt.qid<eq>0<block_start><if_stmt>checks<is><not><none><block_start>checks<augadd>pkt.checks<block_end><return>sig.result.load(pkt.result)<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><if_stmt>checks<is><not><none><block_start>checks<augadd>pkt.checks<block_end><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to get_query')<block_end><block_end><def_stmt>query_sub self obj sig params checks=<none><block_start>params=sig.params.dump(params)<line_sep>msg=messages.MsgGetQuery(qid=0 node=obj query=sig.name params=params trace=checks<is><not><none> sub=<true>)<line_sep>self.send_msg(msg)<while_stmt><true><block_start>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgGetQueryReply)<and>pkt.qid<eq>0<block_start><if_stmt>checks<is><not><none><block_start>checks<augadd>pkt.checks<block_end><yield>sig.result.load(pkt.result)<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><if_stmt>checks<is><not><none><block_start>checks<augadd>pkt.checks<block_end><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to get_query')<block_end><block_end><block_end><def_stmt>run_method self obj sig params<block_start>params=sig.params.dump(params)<line_sep>msg=messages.MsgMethodRun(mid=0 node=obj method=sig.name params=params)<line_sep>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgMethodResult)<and>pkt.mid<eq>0<block_start><return>sig.result.load(pkt.result)<block_end><elif_stmt>isinstance(pkt messages.MsgMethodError)<and>pkt.mid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to run_method')<block_end><block_end><def_stmt>run_broadcast self sig params<block_start>params=sig.params.dump(params)<line_sep>msg=messages.MsgBroadcastRun(bid=0 broadcast=sig.name params=params)<line_sep>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgBroadcastResult)<and>pkt.bid<eq>0<block_start><return>[sig.result.load(result)<for>result pkt.results]<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to run_broadcast')<block_end><block_end><def_stmt>list_connections self<block_start>msg=messages.MsgListConnections(qid=0 )<line_sep>self.send_msg(msg)<line_sep>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgConnectionsReply)<and>pkt.qid<eq>0<block_start><return>pkt.connections<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to list_connections')<block_end><block_end><def_stmt>list_connections_sub self<block_start>msg=messages.MsgListConnections(qid=0 sub=<true>)<line_sep>self.send_msg(msg)<while_stmt><true><block_start>pkt=self.getpkt()<if_stmt>isinstance(pkt messages.MsgConnectionsReply)<and>pkt.qid<eq>0<block_start><yield>pkt<block_end><elif_stmt>isinstance(pkt messages.MsgQueryError)<and>pkt.qid<eq>0<block_start><raise>pkt.err<block_end><else_stmt><block_start>print(pkt)<line_sep><raise>Exception('weird reply to list_connections')<block_end><block_end><block_end><block_end><class_stmt>UnixClient(Client)<block_start><def_stmt>__init__ self path key **kwargs<block_start>sock=socket.socket(socket.AF_UNIX socket.SOCK_STREAM)<line_sep>sock.connect(path)<line_sep>super(UnixClient self).__init__(sock key **kwargs)<block_end><block_end><class_stmt>TcpClient(Client)<block_start><def_stmt>__init__ self ip port key **kwargs<block_start>sock=socket.create_connection((ip port))<line_sep>super(TcpClient self).__init__(sock key **kwargs)<block_end><block_end><class_stmt>SslClient(Client)<block_start><def_stmt>__init__ self ip port key fingerprint **kwargs<block_start>sock=socket.create_connection((ip port))<line_sep>sc=ssl.SSLContext()<line_sep>sock=sc.wrap_socket(sock)<line_sep>cert=sock.getpeercert(<true>)<line_sep>helpers.validate_cert(cert fingerprint)<line_sep>super(SslClient self).__init__(sock key **kwargs)<block_end><block_end><def_stmt>create_client url<block_start>url=helpers.parse_url(url)<if_stmt>url.scheme<eq>helpers.UrlScheme.UNIX_SCHEME<block_start><return>UnixClient(url.path url.auth_key)<block_end><elif_stmt>url.scheme<eq>helpers.UrlScheme.TCP_SCHEME<block_start><return>TcpClient(url.host url.port url.auth_key)<block_end><elif_stmt>url.scheme<eq>helpers.UrlScheme.SSL_SCHEME<block_start><return>SslClient(url.host url.port url.auth_key url.fingerprint)<block_end><else_stmt><block_start><raise>ValueError('Wrong scheme provided!')<block_end><block_end>
|
<import_stmt>bpy<import_stmt>numpy<as>np<import_from_stmt>os listdir path<def_stmt>fbx2bvh data_path file<block_start>sourcepath=data_path+"/"+file<line_sep>bvh_path=data_path+"/"+file.split(".fbx")[0]+".bvh"<line_sep>bpy.ops.import_scene.fbx(filepath=sourcepath)<line_sep>frame_start=9999<line_sep>frame_end=-9999<line_sep>action=bpy.data.actions[-1]<if_stmt>action.frame_range[1]<g>frame_end<block_start>frame_end=action.frame_range[1]<block_end><if_stmt>action.frame_range[0]<l>frame_start<block_start>frame_start=action.frame_range[0]<block_end>frame_end=np.max([60 frame_end])<line_sep>bpy.ops.export_anim.bvh(filepath=bvh_path frame_start=frame_start frame_end=frame_end root_transform_only=<true>)<line_sep>bpy.data.actions.remove(bpy.data.actions[-1])<line_sep>print(data_path+"/"+file+" processed.")<block_end><if_stmt>__name__<eq>'__main__'<block_start>data_path="./fbx/"<line_sep>directories=sorted([f<for>f listdir(data_path)<if><not>f.startswith(".")])<for_stmt>d directories<block_start>files=sorted([f<for>f listdir(data_path+d)<if>f.endswith(".fbx")])<for_stmt>file files<block_start>fbx2bvh(path.join(data_path d) file)<block_end><block_end><block_end>
|
<import_from_stmt>social_django.config PythonSocialAuthConfig<line_sep>
|
# !/usr/bin/env python3
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
'''
Utilities to help process files containing kernel registrations.
'''<import_stmt>os<import_stmt>sys<import_stmt>typing<import_from_stmt>logger get_logger<line_sep>log=get_logger("op_registration_utils")<def_stmt>map_ort_constant_to_domain ort_constant_name:str<block_start>'''
Map the name of the internal ONNX Runtime constant used in operator kernel registrations to the domain name
used in ONNX models and configuration files.
:param ort_constant_name: ONNX Runtime constant name for the domain from a kernel registration entry.
:return: String with public domain name.
'''<line_sep># constants are defined in <ORT root>/include/onnxruntime/core/graph/constants.h
constant_to_domain_map={'kOnnxDomain':'ai.onnx' 'kMLDomain':'ai.onnx.ml' 'kMSDomain':'com.microsoft' 'kMSExperimentalDomain':'com.microsoft.experimental' 'kMSNchwcDomain':'com.microsoft.nchwc' 'kMSFeaturizersDomain':'com.microsoft.mlfeaturizers' 'kMSDmlDomain':'com.microsoft.dml' 'kNGraphDomain':'com.intel.ai' 'kVitisAIDomain':'com.xilinx'}<if_stmt>ort_constant_name<in>constant_to_domain_map<block_start><return>constant_to_domain_map[ort_constant_name]<block_end><else_stmt><block_start>log.warning('Unknown domain for ONNX Runtime constant of {}.'.format(ort_constant_name))<line_sep><return><none><block_end><block_end><def_stmt>get_kernel_registration_files ort_root=<none> include_cuda=<false><block_start>'''
Return paths to files containing kernel registrations for CPU and CUDA providers.
:param ort_root: ORT repository root directory. Inferred from the location of this script if not provided.
:param include_cuda: Include the CUDA registrations in the list of files.
:return: list[str] containing the kernel registration filenames.
'''<if_stmt><not>ort_root<block_start>ort_root=os.path.dirname(os.path.abspath(__file__))+'/../..'<block_end>provider_path=ort_root+'/onnxruntime/core/providers/{ep}/{ep}_execution_provider.cc'<line_sep>contrib_provider_path=ort_root+'/onnxruntime/contrib_ops/{ep}/{ep}_contrib_kernels.cc'<line_sep>training_provider_path=ort_root+'/orttraining/orttraining/training_ops/{ep}/{ep}_training_kernels.cc'<line_sep>provider_paths=[provider_path.format(ep='cpu') contrib_provider_path.format(ep='cpu') training_provider_path.format(ep='cpu')]<if_stmt>include_cuda<block_start>provider_paths.append(provider_path.format(ep='cuda'))<line_sep>provider_paths.append(contrib_provider_path.format(ep='cuda'))<line_sep>provider_paths.append(training_provider_path.format(ep='cuda'))<block_end>provider_paths=[os.path.abspath(p)<for>p provider_paths]<line_sep><return>provider_paths<block_end><class_stmt>RegistrationProcessor<block_start>'''
Class to process lines that are extracted from a kernel registration file.
For each kernel registration, process_registration is called.
For all other lines, process_other_line is called.
'''<def_stmt>process_registration self lines:typing.List[str] domain:str operator:str start_version:int end_version:typing.Optional[int]=<none> type:typing.Optional[str]=<none><block_start>'''
Process lines that contain a kernel registration.
:param lines: Array containing the original lines containing the kernel registration.
:param domain: Domain for the operator
:param operator: Operator type
:param start_version: Start version
:param end_version: End version or None if unversioned registration
:param type: Type used in registration, if this is a typed registration
'''<line_sep><pass><block_end><def_stmt>process_other_line self line<block_start>'''
Process a line that does not contain a kernel registration
:param line: Original line
'''<line_sep><pass><block_end><def_stmt>ok self<block_start>'''
Get overall status for processing
:return: True if successful. False if not. Error will be logged as the registrations are processed.
'''<line_sep><return><false><block_end><block_end># return False as the derived class must override to report the real status
<def_stmt>_process_lines lines:typing.List[str] offset:int registration_processor:RegistrationProcessor<block_start>'''
Process one or more lines that contain a kernel registration.
Merge lines if split over multiple, and call registration_processor.process_registration with the original lines
and the registration information.
:return: Offset for first line that was not consumed.
'''<line_sep>onnx_op='ONNX_OPERATOR_KERNEL_CLASS_NAME'<line_sep>onnx_op_len=len(onnx_op)<line_sep>onnx_typed_op='ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME'<line_sep>onnx_typed_op_len=len(onnx_typed_op)<line_sep>onnx_versioned_op='ONNX_OPERATOR_VERSIONED_KERNEL_CLASS_NAME'<line_sep>onnx_versioned_op_len=len(onnx_versioned_op)<line_sep>onnx_versioned_typed_op='ONNX_OPERATOR_VERSIONED_TYPED_KERNEL_CLASS_NAME'<line_sep>onnx_versioned_typed_op_len=len(onnx_versioned_typed_op)<line_sep>end_marks=tuple([');' ')>' ')>,' ')>,};' ')>};'])<line_sep>end_mark=''<line_sep>lines_to_process=[]<line_sep># merge line if split over multiple.
# original lines will be in lines_to_process. merged and stripped line will be in code_line
<while_stmt><true><block_start>lines_to_process.append(lines[offset])<line_sep>stripped=lines[offset].strip()<line_sep>line_end=<false><for_stmt>mark end_marks<block_start><if_stmt>stripped.endswith(mark)<block_start>end_mark=mark<line_sep>line_end=<true><line_sep><break><block_end><block_end><if_stmt>line_end<block_start><break><block_end>offset<augadd>1<if_stmt>offset<g>len(lines)<block_start>log.error('Past end of input lines looking for line terminator.')<line_sep>sys.exit(-1)<block_end><block_end>code_line=''.join([line.strip()<for>line lines_to_process])<if_stmt>onnx_op<in>code_line# e.g. BuildKernelCreateInfo<ONNX_OPERATOR_KERNEL_CLASS_NAME(
# kCpuExecutionProvider, kOnnxDomain, 7, Cos)>,
<block_start>trim_at=code_line.index(onnx_op)+onnx_op_len+1<line_sep>*_,domain,start_version,op_type=[arg.strip()<for>arg code_line[trim_at:-len(end_mark)].split(',')]<line_sep>registration_processor.process_registration(lines_to_process domain op_type int(start_version) <none> <none>)<block_end><elif_stmt>onnx_typed_op<in>code_line# e.g. BuildKernelCreateInfo<ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(
# kCpuExecutionProvider, kOnnxDomain, 7, double, Sin)>,
<block_start>trim_at=code_line.index(onnx_typed_op)+onnx_typed_op_len+1<line_sep>*_,domain,start_version,type,op_type=[arg.strip()<for>arg code_line[trim_at:-len(end_mark)].split(',')]<line_sep>registration_processor.process_registration(lines_to_process domain op_type int(start_version) <none> type)<block_end><elif_stmt>onnx_versioned_op<in>code_line# e.g. BuildKernelCreateInfo<ONNX_OPERATOR_VERSIONED_KERNEL_CLASS_NAME(
# kCpuExecutionProvider, kOnnxDomain, 1, 10, Hardmax)>,
<block_start>trim_at=code_line.index(onnx_versioned_op)+onnx_versioned_op_len+1<line_sep>*_,domain,start_version,end_version,op_type=[arg.strip()<for>arg code_line[trim_at:-len(end_mark)].split(',')]<line_sep>registration_processor.process_registration(lines_to_process domain op_type int(start_version) int(end_version) <none>)<block_end><elif_stmt>onnx_versioned_typed_op<in>code_line# e.g. BuildKernelCreateInfo<ONNX_OPERATOR_VERSIONED_TYPED_KERNEL_CLASS_NAME(
# kCpuExecutionProvider, kOnnxDomain, 1, 10, float, LogSoftmax)>,
<block_start>trim_at=code_line.index(onnx_versioned_typed_op)+onnx_versioned_typed_op_len+1<line_sep>*_,domain,start_version,end_version,type,op_type=[arg.strip()<for>arg code_line[trim_at:-len(end_mark)].split(',')]<line_sep>registration_processor.process_registration(lines_to_process domain op_type int(start_version) int(end_version) type)<block_end><else_stmt><block_start>log.warning("Ignoring unhandled kernel registration variant: {}".format(code_line))<for_stmt>line lines_to_process<block_start>registration_processor.process_other_line(line)<block_end><block_end><return>offset+1<block_end><def_stmt>process_kernel_registration_file filename:str registration_processor:RegistrationProcessor<block_start>'''
Process a kernel registration file using registration_processor.
:param filename: Path to file containing kernel registrations.
:param registration_processor: Processor to be used.
:return True if processing was successful.
'''<if_stmt><not>os.path.isfile(filename)<block_start>log.error('File not found: {}'.format(filename))<line_sep><return><false><block_end>lines=[]<with_stmt>open(filename 'r')<as>file_to_read<block_start>lines=file_to_read.readlines()<block_end>offset=0<while_stmt>offset<l>len(lines)<block_start>line=lines[offset]<line_sep>stripped=line.strip()<if_stmt>stripped.startswith('BuildKernelCreateInfo<ONNX')<block_start>offset=_process_lines(lines offset registration_processor)<block_end><else_stmt><block_start>registration_processor.process_other_line(line)<line_sep>offset<augadd>1<block_end><block_end><block_end>
|
"""This module contains some general purpose utilities that are used across
SymPy.
"""<import_from_stmt>.iterables flatten group take subsets variations numbered_symbols cartes capture dict_merge prefixes postfixes sift topological_sort unflatten has_dups has_variety reshape default_sort_key ordered rotations <import_from_stmt>.misc filldedent<import_from_stmt>.lambdify lambdify<import_from_stmt>.source source<import_from_stmt>.decorator threaded xthreaded public memoize_property<import_from_stmt>.timeutils timed<line_sep>__all__=['flatten' 'group' 'take' 'subsets' 'variations' 'numbered_symbols' 'cartes' 'capture' 'dict_merge' 'prefixes' 'postfixes' 'sift' 'topological_sort' 'unflatten' 'has_dups' 'has_variety' 'reshape' 'default_sort_key' 'ordered' 'rotations' 'filldedent' 'lambdify' 'source' 'threaded' 'xthreaded' 'public' 'memoize_property' 'timed' ]<line_sep>
|
<import_from_stmt>acceptance_test_case AcceptanceTestCase# @UnusedImport
<import_from_stmt>flask_test_case FlaskTestCase# @UnusedImport
<import_from_stmt>model_test_case ModelTestCase# @UnusedImport
<import_from_stmt>fixtures_test_case FixturesTestCase# @UnusedImport
|
"""
Database module
"""<class_stmt>Database(object)<block_start>"""
Defines data structures and methods to store article content.
"""<def_stmt>save self article<block_start>"""
Saves an article.
Args:
article: article metadata and text content
"""<block_end><def_stmt>complete self<block_start>"""
Signals processing is complete and runs final storage methods.
"""<block_end><def_stmt>close self<block_start>"""
Commits and closes the database.
"""<block_end><block_end>
|
<import_from_stmt>pytest raises<import_from_stmt>vyper.exceptions UndeclaredDefinition<def_stmt>test_permanent_variables_test get_contract_with_gas_estimation<block_start>permanent_variables_test="""
struct Var:
a: int128
b: int128
var: Var
@external
def __init__(a: int128, b: int128):
self.var.a = a
self.var.b = b
@external
def returnMoose() -> int128:
return self.var.a * 10 + self.var.b
"""<line_sep>c=get_contract_with_gas_estimation(permanent_variables_test *[5 7])<assert_stmt>c.returnMoose()<eq>57<line_sep>print("Passed init argument and variable member test")<block_end><def_stmt>test_missing_global get_contract<block_start>code="""
@external
def a() -> int128:
return self.b
"""<with_stmt>raises(UndeclaredDefinition)<block_start>get_contract(code)<block_end><block_end>
|
<import_from_future_stmt> print_function absolute_import division<import_from_stmt>future.builtins *<import_from_stmt>future standard_library<line_sep>standard_library.install_aliases()<line_sep># Copyright 2017 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
<import_from_stmt>.constants *<class_stmt>UnitSystem(object)<block_start>""" Class for standardizing units - specifies preferred units for length, mass, energy etc.
In MDT, many methods will automatically convert output using the UnitSystem at
``moldesign.units.default``
Args:
length (MdtUnit): length units
mass (MdtUnit): mass units
time (MdtUnit): time units
energy (MdtUnit): energy units
temperature (MdtUnit): temperature units (default: kelvin)
force (MdtUnit): force units (default: energy/length)
momentum (MdtUnit): momentum units (default: mass * length / time)
angle (MdtUnit): angle units (default: radians)
charge (MdtUnit): charge units (default: fundamental charge)
"""<def_stmt>__init__ self length mass time energy temperature=kelvin force=<none> momentum=<none> angle=radians charge=q_e<block_start>self.length=length<line_sep>self.mass=mass<line_sep>self.time=time<line_sep>self.energy=energy<line_sep>self.temperature=temperature<line_sep>self.force=force<line_sep>self.momentum=momentum<line_sep>self.angle=angle<line_sep>self.charge=charge<block_end><def_stmt>__getitem__ self item<block_start>""" For convenience when using pint dimensionality descriptions.
This aliases self['item'] = self['[item]'] = self.item,
e.g. self['length'] = self['[length]'] = self.length
"""<line_sep>itemname=item.lstrip('[').rstrip(']')<line_sep><return>getattr(self itemname)<block_end>@property<def_stmt>force self<block_start><if_stmt>self._force<is><none><block_start><return>self.energy/self.length<block_end><else_stmt><block_start><return>self._force<block_end><block_end>@force.setter<def_stmt>force self f<block_start>self._force=f<block_end>@property<def_stmt>momentum self<block_start><if_stmt>self._momentum<is><none><block_start><return>self.mass<times>self.length/self.time<block_end><else_stmt><block_start><return>self._momentum<block_end><block_end>@momentum.setter<def_stmt>momentum self f<block_start>self._momentum=f<block_end><def_stmt>convert self quantity<block_start>""" Convert a quantity into this unit system.
Args:
quantity (MdtQuantity or MdtUnit): quantity to convert
"""<line_sep>baseunit=self.get_baseunit(quantity)<if_stmt>baseunit<eq>ureg.dimensionless<block_start><return>quantity<times>ureg.dimensionless<block_end><else_stmt><block_start>result=quantity.to(baseunit)<line_sep><return>result<block_end><block_end><def_stmt>get_default self q<block_start>""" Return the default unit system for objects with these dimensions
Args:
q (MdtQuantity or MdtUnit): quantity to get default units for
Returns:
MdtUnit: Proper units for this quantity
"""<line_sep><return>self.get_baseunit(1.0<times>q).units<block_end><def_stmt>convert_if_possible self quantity<block_start><if_stmt>isinstance(quantity MdtQuantity)<block_start><return>self.convert(quantity)<block_end><else_stmt><block_start><return>quantity<block_end><block_end><def_stmt>get_baseunit self quantity<block_start>""" Get units of a quantity, list or array
Args:
quantity (Any): any number or list-like object with units
Raises:
TypeError: if the passed object cannot have units (e.g., it's a string or ``None``)
Returns:
MdtUnit: units found in the passed object
"""<try_stmt><block_start>dims=dict(quantity.dimensionality)<block_end><except_stmt>AttributeError<block_start><try_stmt><block_start>q=quantity[0]<block_end><except_stmt>(TypeError StopIteration)<block_start><if_stmt>isinstance(quantity (int float complex))<block_start><return>ureg.dimensionless<block_end><raise>TypeError('This type of object cannot have physical units')<block_end><if_stmt>isinstance(q str)<block_start><raise>TypeError('This type of object cannot have physical units')<block_end><try_stmt><block_start><return>self.get_baseunit(q)<block_end><except_stmt>(IndexError TypeError)# Assume dimensionless
<block_start><return>ureg.dimensionless<block_end><block_end>baseunit=ureg.dimensionless<line_sep># Factor out force units
<if_stmt>self._force<block_start><if_stmt>'[length]'<in>dims<and>'[mass]'<in>dims<and>'[time]'<in>dims<block_start><while_stmt>dims['[length]']<ge>1<and>dims['[mass]']<ge>1<and>dims['[time]']<le>-2<block_start>baseunit<augmul>self['force']<line_sep>dims['[length]']<augsub>1<line_sep>dims['[mass]']<augsub>1<line_sep>dims['[time]']<augadd>2<block_end><block_end><block_end># Factor out energy units
<if_stmt>'[length]'<in>dims<and>'[mass]'<in>dims<and>'[time]'<in>dims<block_start><while_stmt>dims['[length]']<ge>1<and>dims['[mass]']<ge>1<and>dims['[time]']<le>-2<block_start>baseunit<augmul>self['energy']<line_sep>dims['[length]']<augsub>2<line_sep>dims['[mass]']<augsub>1<line_sep>dims['[time]']<augadd>2<block_end><block_end># Factor out momentum units
<if_stmt>self._momentum<block_start><if_stmt>'[length]'<in>dims<and>'[mass]'<in>dims<and>'[time]'<in>dims<block_start><while_stmt>dims['[length]']<ge>1<and>dims['[mass]']<ge>1<and>dims['[time]']<le>-1<block_start>baseunit<augmul>self['momentum']<line_sep>dims['[length]']<augsub>1<line_sep>dims['[mass]']<augsub>1<line_sep>dims['[time]']<augadd>1<block_end><block_end><block_end><if_stmt>'[current]'<in>dims<block_start>dims.setdefault('[charge]' 0)<line_sep>dims.setdefault('[time]' 0)<line_sep>dims['[charge]']<augadd>dims['[current]']<line_sep>dims['[time]']<augsub>dims['[current]']<line_sep>dims.pop('[current]')<block_end># Otherwise, just use the units
<for_stmt>unit dims<block_start><if_stmt>dims[unit]<eq>0<block_start><continue><block_end><try_stmt><block_start>baseunit<augmul>self[unit]<power>dims[unit]<block_end><except_stmt>AttributeError<block_start>baseunit<augmul>ureg[unit]<power>dims[unit]<block_end><block_end><return>baseunit.units<block_end><block_end>default=UnitSystem(length=angstrom mass=amu time=fs energy=eV)<line_sep>atomic_units=UnitSystem(length=a0 mass=m_e time=t0 energy=hartree)<line_sep>nano_si=UnitSystem(length=nm mass=dalton time=fs energy=kjpermol)<line_sep>
|
"""
Secret file eraser.
"""<import_from_stmt>itertools islice<import_from_stmt>random random<import_from_stmt>.bitmap Bitmap<def_stmt>erase bitmap:Bitmap<arrow><none><block_start>"""Scramble a previously hidden data."""<if_stmt>bitmap.reserved_field<g>0<block_start><for_stmt>byte_offset islice(bitmap.byte_offsets bitmap.reserved_field)<block_start>bitmap[byte_offset]=randomize_lsb(bitmap[byte_offset])<block_end>bitmap.reserved_field=0<line_sep>print("Erased a secret file from the bitmap")<block_end><else_stmt><block_start>print("Secret file not found in the bitmap")<block_end><block_end><def_stmt>randomize_lsb value:int<arrow>int<block_start>"""Set a random bit on the least-significant position."""<line_sep><return>value&~1<if>random()<l>0.5<else>value|1<block_end>
|
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
<import_from_stmt>page_sets.desktop_ui.browser_element_identifiers kTabCounterButtonElementId<import_from_stmt>page_sets.desktop_ui.custom_metric_utils SetMetricNames<import_from_stmt>page_sets.desktop_ui.js_utils MEASURE_JS_MEMORY<import_from_stmt>page_sets.desktop_ui.multitab_story MultiTabStory<import_from_stmt>page_sets.desktop_ui.ui_devtools_utils ClickOn<import_from_stmt>page_sets.desktop_ui.url_list TOP_URL<import_from_stmt>page_sets.desktop_ui.webui_utils Inspect<import_from_stmt>page_sets.desktop_ui story_tags<line_sep>WEBUI_TAB_STRIP_BENCHMARK_UMA=['TabStrip.Tab.Views.ActivationAction' 'TabStrip.Tab.WebUI.ActivationAction' 'WebUITabStrip.CloseAction' 'WebUITabStrip.CloseTabAction' 'WebUITabStrip.LoadCompletedTime' 'WebUITabStrip.LoadDocumentTime' 'WebUITabStrip.OpenAction' 'WebUITabStrip.OpenDuration' 'WebUITabStrip.TabActivation' 'WebUITabStrip.TabCreation' 'WebUITabStrip.TabDataReceived' ]<line_sep>WEBUI_TAB_STRIP_CUSTOM_METRIC_NAMES=['Jank' 'Tab.Preview.CompressJPEG' 'Tab.Preview.CompressJPEGWithFlow' 'Tab.Preview.VideoCapture' 'Tab.Preview.VideoCaptureFrameReceived' 'TabStripPageHandler:HandleGetGroupVisualData' 'TabStripPageHandler:HandleGetLayout' 'TabStripPageHandler:HandleGetTabs' 'TabStripPageHandler:HandleGetThemeColors' 'TabStripPageHandler:HandleSetThumbnailTracked' 'TabStripPageHandler:HandleThumbnailUpdate' 'TabStripPageHandler:NotifyLayoutChanged' 'TabStripPageHandler:OnTabGroupChanged' 'TabStripPageHandler:OnTabStripModelChanged' 'TabStripPageHandler:TabChangedAt' 'TabStripPageHandler:TabGroupedStateChanged' ]<line_sep>WEBUI_TAB_STRIP_URL='chrome://tab-strip.top-chrome/'<class_stmt>WebUITabStripStory(MultiTabStory)<block_start>"""Base class for webui tab strip stories"""<def_stmt>RunPageInteractions self action_runner<block_start>SetMetricNames(action_runner WEBUI_TAB_STRIP_CUSTOM_METRIC_NAMES)<line_sep>ClickOn(self._devtools element_id=kTabCounterButtonElementId)<line_sep>action_runner=Inspect(action_runner.tab.browser WEBUI_TAB_STRIP_URL)<line_sep>action_runner.ExecuteJavaScript(MEASURE_JS_MEMORY%'webui_tab_strip:used_js_heap_size_begin')<line_sep>self.InteractWithPage(action_runner)<line_sep>action_runner.ExecuteJavaScript(MEASURE_JS_MEMORY%'webui_tab_strip:used_js_heap_size_end')<block_end><def_stmt>InteractWithPage self action_runner<block_start>self.ScrollTabs(action_runner)<line_sep>action_runner.Wait(5)<block_end><def_stmt>ScrollTabs self action_runner<block_start>action_runner.Wait(1)<line_sep>self.StartMeasuringFrameTime(action_runner 'webui_tab_strip:frame_time_on_scroll')<line_sep>action_runner.ScrollElement(element_function=SCROLL_ELEMENT_FUNCTION direction='left')<line_sep>self.StopMeasuringFrameTime(action_runner)<line_sep>action_runner.Wait(1)<block_end><def_stmt>WillStartTracing self chrome_trace_config<block_start>super(WebUITabStripStory self).WillStartTracing(chrome_trace_config)<line_sep>chrome_trace_config.category_filter.AddIncludedCategory('benchmark')<line_sep>chrome_trace_config.category_filter.AddIncludedCategory('ui')<line_sep>chrome_trace_config.EnableUMAHistograms(*WEBUI_TAB_STRIP_BENCHMARK_UMA)<block_end><block_end><class_stmt>WebUITabStripStoryCleanSlate(WebUITabStripStory)<block_start>NAME='webui_tab_strip:clean_slate'<line_sep>URL_LIST=[]<line_sep>URL='about:blank'<line_sep>TAGS=[story_tags.SMOKE_TEST]<line_sep>WAIT_FOR_NETWORK_QUIESCENCE=<false><block_end><class_stmt>WebUITabStripStoryTop10(WebUITabStripStory)<block_start>NAME='webui_tab_strip:top10:2020'<line_sep>URL_LIST=TOP_URL[:10]<line_sep>URL=URL_LIST[0]<line_sep>WAIT_FOR_NETWORK_QUIESCENCE=<true><block_end><class_stmt>WebUITabStripStoryTop10Loading(WebUITabStripStory)<block_start>NAME='webui_tab_strip:top10:loading:2020'<line_sep>URL_LIST=TOP_URL[:10]<line_sep>URL=URL_LIST[0]<line_sep>WAIT_FOR_NETWORK_QUIESCENCE=<false><block_end><class_stmt>WebUITabStripStoryMeasureMemory(WebUITabStripStory)<block_start>NAME='webui_tab_strip:measure_memory'<line_sep>URL_LIST=[]<line_sep>URL='about:blank'<line_sep>WAIT_FOR_NETWORK_QUIESCENCE=<false><def_stmt>WillStartTracing self chrome_trace_config<block_start>super(WebUITabStripStoryMeasureMemory self).WillStartTracing(chrome_trace_config)<line_sep>chrome_trace_config.category_filter.AddExcludedCategory('*')<line_sep>chrome_trace_config.category_filter.AddIncludedCategory('blink.console')<line_sep>chrome_trace_config.category_filter.AddDisabledByDefault('disabled-by-default-memory-infra')<block_end><def_stmt>GetExtraTracingMetrics self<block_start><return>super(WebUITabStripStoryMeasureMemory self).GetExtraTracingMetrics()+['memoryMetric']<block_end><def_stmt>InteractWithPage self action_runner<block_start>action_runner.MeasureMemory(deterministic_mode=<true>)<block_end><block_end><class_stmt>WebUITabStripStoryMeasureMemory2Window(WebUITabStripStoryMeasureMemory)<block_start>NAME='webui_tab_strip:measure_memory:2window'<line_sep>URL_LIST=[]<line_sep>URL='about:blank'<line_sep>WAIT_FOR_NETWORK_QUIESCENCE=<false><def_stmt>InteractWithPage self action_runner<block_start>action_runner.tab.browser.tabs.New(url='about:blank' in_new_window=<true>)<line_sep>action_runner.Wait(1)<line_sep>action_runner.MeasureMemory(deterministic_mode=<true>)<block_end><block_end>SCROLL_ELEMENT_FUNCTION='''
document.querySelector('tabstrip-tab-list')
'''<line_sep>
|
"""
weasyprint.tests.test_draw.svg.test_visibility
----------------------------------------------
Test how the visibility is controlled with "visibility" and "display"
attributes.
"""<import_from_stmt>...testing_utils assert_no_logs<import_from_stmt>.. assert_pixels<line_sep>@assert_no_logs<def_stmt>test_visibility_visible <block_start>assert_pixels('visibility_visible' 9 9 '''
_________
_________
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
_________
_________
''' '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect visibility="visible"
x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')<block_end>@assert_no_logs<def_stmt>test_visibility_hidden <block_start>assert_pixels('visibility_hidden' 9 9 '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''' '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect visibility="hidden"
x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')<block_end>@assert_no_logs<def_stmt>test_visibility_inherit_hidden <block_start>assert_pixels('visibility_inherit_hidden' 9 9 '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''' '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g visibility="hidden">
<rect x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')<block_end>@assert_no_logs<def_stmt>test_visibility_inherit_visible <block_start>assert_pixels('visibility_inherit_visible' 9 9 '''
_________
_________
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
_________
_________
''' '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g visibility="hidden">
<rect visibility="visible"
x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')<block_end>@assert_no_logs<def_stmt>test_display_inline <block_start>assert_pixels('display_inline' 9 9 '''
_________
_________
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
__RRRRR__
_________
_________
''' '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect display="inline"
x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')<block_end>@assert_no_logs<def_stmt>test_display_none <block_start>assert_pixels('display_none' 9 9 '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''' '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<rect display="none"
x="2" y="2" width="5" height="5" fill="red" />
</svg>
''')<block_end>@assert_no_logs<def_stmt>test_display_inherit_none <block_start>assert_pixels('display_inherit_none' 9 9 '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''' '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g display="none">
<rect x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')<block_end>@assert_no_logs<def_stmt>test_display_inherit_inline <block_start>assert_pixels('display_inherit_inline' 9 9 '''
_________
_________
_________
_________
_________
_________
_________
_________
_________
''' '''
<style>
@page { size: 9px }
svg { display: block }
</style>
<svg width="9px" height="9px" xmlns="http://www.w3.org/2000/svg">
<g display="none">
<rect display="inline"
x="2" y="2" width="5" height="5" fill="red" />
</g>
</svg>
''')<block_end>
|
# Author: <NAME> (<EMAIL>)
# Center for Machine Perception, Czech Technical University in Prague
<import_stmt>os<import_stmt>sys<import_stmt>glob<import_stmt>numpy<as>np<line_sep>sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))<import_from_stmt>pysixd inout<import_from_stmt>params.dataset_params get_dataset_params<line_sep>par=get_dataset_params('hinterstoisser')<line_sep># data_ids = range(1, par.obj_count + 1)
data_ids=range(1 par['scene_count']+1)<line_sep># depth_mpath = par.train_depth_mpath
depth_mpath=par['test_depth_mpath']<line_sep>scale=0.1<for_stmt>data_id data_ids<block_start>print('Processing id: '+str(data_id))<line_sep>depth_paths=sorted(glob.glob(os.path.join(os.path.dirname(depth_mpath.format(data_id 0)) '*')))<for_stmt>depth_path depth_paths<block_start>d=inout.load_depth(depth_path)<line_sep>d<augmul>scale<line_sep>d=np.round(d).astype(np.uint16)<line_sep>inout.save_depth(depth_path d)<block_end><block_end>
|
# coding=utf-8
# Copyright 2021 The Robustness Metrics Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Module-level convenience functions."""<import_from_stmt>robustness_metrics.datasets base<import_from_stmt>robustness_metrics.datasets ood_detection<import_from_stmt>robustness_metrics.datasets tfds<def_stmt>get dataset_spec<arrow>base.Dataset<block_start>"""Fetches a dataset from the dataset registry."""<line_sep><return>base.registry.get_instance(dataset_spec)<block_end><def_stmt>get_available_datasets <block_start>"""Fetches dataset constructor from the dataset registry."""<line_sep><return>base.registry.get_registered_subclasses()<block_end>
|
# Copyright 2009 by <NAME>. All rights reserved.
# Revisions copyright 2009-2010 by <NAME>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Tests for parsing Compass output."""<import_stmt>os<import_stmt>unittest<import_from_stmt>Bio Compass<class_stmt>CompassTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>file_dir=os.path.join("Compass")<line_sep>self.test_files=[os.path.join(file_dir "comtest1") os.path.join(file_dir "comtest2") ]<block_end><def_stmt>testCompassScanAndConsume self<block_start><with_stmt>open(self.test_files[0])<as>handle<block_start>com_record=Compass.read(handle)<block_end>self.assertEqual("60456.blo.gz.aln" com_record.query)<line_sep>self.assertEqual("60456.blo.gz.aln" com_record.hit)<line_sep>self.assertEqual(0.5 com_record.gap_threshold)<line_sep>self.assertEqual(388 com_record.query_length)<line_sep>self.assertEqual(386 com_record.query_filtered_length)<line_sep>self.assertEqual(388 com_record.hit_length)<line_sep>self.assertEqual(386 com_record.hit_filtered_length)<line_sep>self.assertEqual(399 com_record.query_nseqs)<line_sep>self.assertEqual(12.972 com_record.query_neffseqs)<line_sep>self.assertEqual(399 com_record.hit_nseqs)<line_sep>self.assertEqual(12.972 com_record.hit_neffseqs)<line_sep>self.assertEqual(2759 com_record.sw_score)<line_sep>self.assertEqual(float("0.00e+00") com_record.evalue)<block_end><def_stmt>testCompassParser self<block_start><with_stmt>open(self.test_files[0])<as>handle<block_start>com_record=Compass.read(handle)<block_end>self.assertEqual("60456.blo.gz.aln" com_record.query)<block_end><def_stmt>testCompassIteratorEasy self<block_start><with_stmt>open(self.test_files[0])<as>handle<block_start>records=Compass.parse(handle)<line_sep>com_record=next(records)<block_end>self.assertEqual("60456.blo.gz.aln" com_record.query)<line_sep>self.assertRaises(StopIteration next records)<block_end><def_stmt>testCompassIteratorHard self<block_start><with_stmt>open(self.test_files[1])<as>handle<block_start>records=Compass.parse(handle)<line_sep>com_record=next(records)<line_sep>self.assertEqual("allscop//14982.blo.gz.aln" com_record.hit)<line_sep>self.assertEqual(float("1.01e+03") com_record.evalue)<line_sep>com_record=next(records)<line_sep>self.assertEqual("allscop//14983.blo.gz.aln" com_record.hit)<line_sep>self.assertEqual(float("1.01e+03") com_record.evalue)<line_sep>com_record=next(records)<line_sep>self.assertEqual("allscop//14984.blo.gz.aln" com_record.hit)<line_sep>self.assertEqual(float("5.75e+02") com_record.evalue)<block_end><block_end><def_stmt>testAlignmentParsingOne self<block_start><with_stmt>open(self.test_files[1])<as>handle<block_start>records=Compass.parse(handle)<line_sep>com_record=next(records)<line_sep>self.assertEqual(178 com_record.query_start)<line_sep>self.assertEqual("KKDLEEIAD" com_record.query_aln)<line_sep>self.assertEqual(9 com_record.hit_start)<line_sep>self.assertEqual("QAAVQAVTA" com_record.hit_aln)<line_sep>self.assertEqual("++ ++++++" com_record.positives)<line_sep>com_record=next(records)<line_sep>com_record=next(records)<line_sep>self.assertEqual(371 com_record.query_start)<line_sep>self.assertEqual("LEEAMDRMER~~~V" com_record.query_aln)<line_sep>self.assertEqual(76 com_record.hit_start)<line_sep>self.assertEqual("LQNFIDQLDNpddL" com_record.hit_aln)<line_sep>self.assertEqual("+ ++++ + + +" com_record.positives)<block_end><block_end><def_stmt>testAlignmentParsingTwo self<block_start><with_stmt>open(self.test_files[0])<as>handle<block_start>records=Compass.parse(handle)<line_sep>com_record=next(records)<block_end>self.assertEqual(2 com_record.query_start)<line_sep>self.assertEqual(2 com_record.hit_start)<line_sep>self.assertEqual("LKERKL" com_record.hit_aln[-6:])<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>runner=unittest.TextTestRunner(verbosity=2)<line_sep>unittest.main(testRunner=runner)<block_end>
|
# Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Simple bidirectional model definitions."""<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>tensorflow<as>tf<import_from_stmt>regularization variational_dropout<line_sep>FLAGS=tf.app.flags.FLAGS<def_stmt>discriminator hparams sequence is_training reuse=<none> initial_state=<none><block_start>"""Define the Discriminator graph."""<line_sep>sequence=tf.cast(sequence tf.int32)<if_stmt>FLAGS.dis_share_embedding<block_start><assert_stmt>hparams.dis_rnn_size<eq>hparams.gen_rnn_size ('If you wish to share Discriminator/Generator embeddings, they must be'<concat>' same dimension.')<with_stmt>tf.variable_scope('gen/decoder/rnn' reuse=<true>)<block_start>embedding=tf.get_variable('embedding' [FLAGS.vocab_size hparams.gen_rnn_size])<block_end><block_end><with_stmt>tf.variable_scope('dis' reuse=reuse)<block_start><def_stmt>lstm_cell <block_start><return>tf.contrib.rnn.BasicLSTMCell(hparams.dis_rnn_size forget_bias=0.0 state_is_tuple=<true> reuse=reuse)<block_end>attn_cell=lstm_cell<if_stmt>is_training<and>hparams.dis_vd_keep_prob<l>1<block_start><def_stmt>attn_cell <block_start><return>variational_dropout.VariationalDropoutWrapper(lstm_cell() FLAGS.batch_size hparams.dis_rnn_size hparams.dis_vd_keep_prob hparams.dis_vd_keep_prob)<block_end><block_end>cell_fwd=tf.contrib.rnn.MultiRNNCell([attn_cell()<for>_ range(hparams.dis_num_layers)] state_is_tuple=<true>)<line_sep>cell_bwd=tf.contrib.rnn.MultiRNNCell([attn_cell()<for>_ range(hparams.dis_num_layers)] state_is_tuple=<true>)<line_sep># print initial_state
# print cell_fwd.zero_state(FLAGS.batch_size, tf.float32)
<if_stmt>initial_state<block_start>state_fwd=[[tf.identity(x)<for>x inner_initial_state]<for>inner_initial_state initial_state]<line_sep>state_bwd=cell_bwd.zero_state(FLAGS.batch_size tf.float32)<block_end><else_stmt><block_start>state_fwd=cell_fwd.zero_state(FLAGS.batch_size tf.float32)<line_sep>state_bwd=cell_bwd.zero_state(FLAGS.batch_size tf.float32)<block_end><def_stmt>make_mask keep_prob units<block_start>random_tensor=keep_prob<line_sep># 0. if [keep_prob, 1.0) and 1. if [1.0, 1.0 + keep_prob)
random_tensor<augadd>tf.random_uniform(tf.stack([FLAGS.batch_size units]))<line_sep><return>tf.floor(random_tensor)/keep_prob<block_end><if_stmt>is_training<block_start>output_mask=make_mask(hparams.dis_vd_keep_prob 2<times>hparams.dis_rnn_size)<block_end><if_stmt><not>FLAGS.dis_share_embedding<block_start>embedding=tf.get_variable('embedding' [FLAGS.vocab_size hparams.dis_rnn_size])<block_end>rnn_inputs=tf.nn.embedding_lookup(embedding sequence)<line_sep>rnn_inputs=tf.unstack(rnn_inputs axis=1)<with_stmt>tf.variable_scope('rnn')<as>vs<block_start>outputs,_,_=tf.contrib.rnn.static_bidirectional_rnn(cell_fwd cell_bwd rnn_inputs state_fwd state_bwd scope=vs)<if_stmt>is_training<block_start>outputs<augmul>output_mask<block_end># Prediction is linear output for Discriminator.
predictions=tf.contrib.layers.linear(outputs 1 scope=vs)<line_sep>predictions=tf.transpose(predictions [1 0 2])<block_end><block_end><if_stmt>FLAGS.baseline_method<eq>'critic'<block_start><with_stmt>tf.variable_scope('critic' reuse=reuse)<as>critic_scope<block_start>values=tf.contrib.layers.linear(outputs 1 scope=critic_scope)<line_sep>values=tf.transpose(values [1 0 2])<block_end><return>tf.squeeze(predictions axis=2) tf.squeeze(values axis=2)<block_end><else_stmt><block_start><return>tf.squeeze(predictions axis=2) <none><block_end><block_end>
|
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
# TEST SCENARIO COVERAGE
# ----------------------
# Methods Total : 16
# Methods Covered : 16
# Examples Total : 21
# Examples Tested : 21
# Coverage % : 100
# ----------------------
# current method cover: 15/16
<import_stmt>os<import_stmt>unittest<import_stmt>azure.mgmt.databox<import_from_stmt>devtools_testutils AzureMgmtTestCase ResourceGroupPreparer<line_sep>AZURE_LOCATION='eastus'<class_stmt>MgmtDataBoxTest(AzureMgmtTestCase)<block_start><def_stmt>setUp self<block_start>super(MgmtDataBoxTest self).setUp()<line_sep>self.mgmt_client=self.create_mgmt_client(azure.mgmt.databox.DataBoxManagementClient)<block_end>@unittest.skip("unavailable in track2")@ResourceGroupPreparer(location=AZURE_LOCATION)<def_stmt>test_databox self resource_group<block_start>SUBSCRIPTION_ID=<none><if_stmt>self.is_live<block_start>SUBSCRIPTION_ID=os.environ.get("AZURE_SUBSCRIPTION_ID" <none>)<block_end><if_stmt><not>SUBSCRIPTION_ID<block_start>SUBSCRIPTION_ID=self.settings.SUBSCRIPTION_ID<block_end>RESOURCE_GROUP=resource_group.name<line_sep>STORAGE_ACCOUNT_NAME='databoxaccountabc'<line_sep>JOB_NAME='testjob'<line_sep>LOCATION_NAME="westus"<line_sep># JobsCreate[put]
BODY={"details":{"job_details_type":"DataBox" "contact_details":{"contact_name":"<NAME>" "phone":"1234567890" "phone_extension":"1234" "email_list":["<EMAIL>"]} "shipping_address":{"street_address1":"16 TOWNSEND ST" "street_address2":"Unit 1" "city":"San Francisco" "state_or_province":"CA" "country":"US" "postal_code":"94107" "company_name":"Microsoft" "address_type":"Commercial"} "destination_account_details":[{"storage_account_id":"/subscriptions/"+SUBSCRIPTION_ID+"/resourceGroups/"+RESOURCE_GROUP+"/providers/Microsoft.Storage/storageAccounts/"+STORAGE_ACCOUNT_NAME+"" "data_destination_type":"StorageAccount"}]} "location":"westus" "sku":{"name":"DataBox"}}<line_sep>result=self.mgmt_client.jobs.create(resource_group.name JOB_NAME BODY)<line_sep>result=result.result()<line_sep># JobsGet5[get]
result=self.mgmt_client.jobs.get(resource_group.name JOB_NAME)<line_sep># JobsGet4[get]
result=self.mgmt_client.jobs.get(resource_group.name JOB_NAME)<line_sep># JobsGet3[get]
result=self.mgmt_client.jobs.get(resource_group.name JOB_NAME)<line_sep># JobsGet2[get]
result=self.mgmt_client.jobs.get(resource_group.name JOB_NAME)<line_sep># JobsGet1[get]
result=self.mgmt_client.jobs.get(resource_group.name JOB_NAME)<line_sep># JobsGet[get]
result=self.mgmt_client.jobs.get(resource_group.name JOB_NAME)<line_sep># JobsListByResourceGroup[get]
result=self.mgmt_client.jobs.list_by_resource_group(resource_group.name)<line_sep># JobsList[get]
result=self.mgmt_client.jobs.list()<line_sep># OperationsGet[get]
result=self.mgmt_client.operations.list()<line_sep># ServiceValidateInputsByResourceGroup[post]
BODY={"validation_category":"JobCreationValidation" "individual_request_details":[{"validation_type":"ValidateDataDestinationDetails" "location":"westus" "destination_account_details":[{"storage_account_id":"/subscriptions/"+SUBSCRIPTION_ID+"/resourceGroups/"+RESOURCE_GROUP+"/providers/Microsoft.Storage/storageAccounts/"+STORAGE_ACCOUNT_NAME+"" "data_destination_type":"StorageAccount"}]} {"validation_type":"ValidateAddress" "shipping_address":{"street_address1":"16 TOWNSEND ST" "street_address2":"Unit 1" "city":"San Francisco" "state_or_province":"CA" "country":"US" "postal_code":"94107" "company_name":"Microsoft" "address_type":"Commercial"} "device_type":"DataBox"}]}<line_sep>result=self.mgmt_client.service.validate_inputs_by_resource_group(resource_group.name LOCATION_NAME BODY)<line_sep># AvailableSkusByResourceGroup[post]
BODY={"country":"US" "location":"westus" "transfer_type":"ImportToAzure"}<line_sep>result=self.mgmt_client.service.list_available_skus_by_resource_group(resource_group.name LOCATION_NAME BODY)<line_sep>"""
# BookShipmentPickupPost[post]
now = dt.datetime.now()
BODY = {
# For new test, change the start time as current date
# and end time as start_time + 2 days
"start_time": now,
"end_time": now + dt.timedelta(days=2),
"shipment_location": "Front desk"
}
self.mgmt_client.jobs.book_shipment_pick_up(resource_group.name, JOB_NAME, BODY)
"""<line_sep># JobsListCredentials[post]
result=self.mgmt_client.jobs.list_credentials(resource_group.name JOB_NAME)<line_sep># JobsPatch[patch]
BODY={"details":{"contact_details":{"contact_name":"<NAME>" "phone":"1234567890" "phone_extension":"1234" "email_list":["<EMAIL>"]} "shipping_address":{"street_address1":"16 TOWNSEND ST" "street_address2":"Unit 1" "city":"San Francisco" "state_or_province":"CA" "country":"US" "postal_code":"94107" "company_name":"Microsoft" "address_type":"Commercial"}}}<line_sep>result=self.mgmt_client.jobs.update(resource_group.name JOB_NAME BODY)<line_sep>result=result.result()<line_sep># ServiceRegionConfiguration[post]
# TODO: SKUs are not available in live test
# BODY = {
# "storage_location": "westus",
# "sku_name": "DataBox"
# }
BODY=<none><line_sep>result=self.mgmt_client.service.region_configuration(LOCATION_NAME BODY)<line_sep># ValidateAddressPost[post]
BODY={"validation_type":"ValidateAddress" "shipping_address":{"street_address1":"16 TOWNSEND ST" "street_address2":"Unit 1" "city":"San Francisco" "state_or_province":"CA" "country":"US" "postal_code":"94107" "company_name":"Microsoft" "address_type":"Commercial"} "device_type":"DataBox"}<line_sep>result=self.mgmt_client.service.validate_address_method(LOCATION_NAME BODY)<line_sep># ServiceValidateInputs[post]
BODY={"validation_category":"JobCreationValidation" "individual_request_details":[{"validation_type":"ValidateDataDestinationDetails" "location":"westus" "destination_account_details":[{"storage_account_id":"/subscriptions/"+SUBSCRIPTION_ID+"/resourceGroups/"+RESOURCE_GROUP+"/providers/Microsoft.Storage/storageAccounts/"+STORAGE_ACCOUNT_NAME+"" "data_destination_type":"StorageAccount"}]} {"validation_type":"ValidateAddress" "shipping_address":{"street_address1":"16 TOWNSEND ST" "street_address2":"Unit 1" "city":"San Francisco" "state_or_province":"CA" "country":"US" "postal_code":"94107" "company_name":"Microsoft" "address_type":"Commercial"} "device_type":"DataBox"}]}<line_sep>result=self.mgmt_client.service.validate_inputs(LOCATION_NAME BODY)<line_sep># AvailableSkusPost[post]
BODY={"country":"US" "location":"westus" "transfer_type":"ImportToAzure"}<line_sep>result=self.mgmt_client.service.list_available_skus(LOCATION_NAME BODY)<line_sep># JobsCancelPost[post]
BODY={"reason":"CancelTest"}<line_sep>result=self.mgmt_client.jobs.cancel(resource_group.name JOB_NAME BODY)<line_sep># JobsDelete[delete]
result=self.mgmt_client.jobs.delete(resource_group.name JOB_NAME)<line_sep>result=result.result()<block_end><block_end>#------------------------------------------------------------------------------
<if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
|
# __about__.py
#
# Copyright (C) 2006-2020 wolfSSL Inc.
#
# This file is part of wolfSSL.
#
# wolfSSL is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# wolfSSL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
#/
metadata=dict(__name__="wolfcrypt" __version__="0.1.9" __license__="GPLv2 or Commercial License" __author__="wolfSSL Inc." __author_email__="<EMAIL>" __url__="https://wolfssl.github.io/wolfcrypt-py" __description__=u"A Python library that encapsulates wolfSSL's wolfCrypt API." __keywords__="security, cryptography, ssl, embedded, embedded ssl" __classifiers__=[u"License :: OSI Approved :: GNU General Public License v2 (GPLv2)" u"License :: Other/Proprietary License" u"Operating System :: OS Independent" u"Programming Language :: Python :: 2.7" u"Programming Language :: Python :: 3.5" u"Topic :: Security" u"Topic :: Security :: Cryptography" u"Topic :: Software Development"])<line_sep>globals().update(metadata)<line_sep>__all__=list(metadata.keys())<line_sep>
|
<import_stmt>EoN<import_stmt>networkx<as>nx<import_stmt>matplotlib.pyplot<as>plt<import_stmt>scipy<import_stmt>random<def_stmt>get_deg_seq N Pk<block_start><while_stmt><true>#run until degree sequence has even sum of N entries
<block_start>deg_seq=[]<for_stmt>counter range(N)<block_start>r=random.random()<for_stmt>k Pk<block_start><if_stmt>Pk[k]<g>r<block_start><break><block_end><else_stmt><block_start>r<augsub>Pk[k]<block_end><block_end>deg_seq.append(k)<block_end><if_stmt>sum(deg_seq)%2<eq>0<block_start><break><block_end><block_end><return>deg_seq<block_end><def_stmt>sim_and_plot G tau gamma rho tmax tcount ax<block_start>t,S,I=EoN.fast_SIS(G tau gamma rho=rho tmax=tmax)<line_sep>report_times=scipy.linspace(0 tmax tcount)<line_sep>I=EoN.subsample(report_times t I)<line_sep>ax.plot(report_times I/N color='grey' linewidth=5 alpha=0.3)<line_sep>t,S,I,=EoN.SIS_heterogeneous_meanfield_from_graph(G tau gamma rho=rho tmax=tmax tcount=tcount)<line_sep>ax.plot(t I/N '--')<line_sep>t,S,I=EoN.SIS_compact_pairwise_from_graph(G tau gamma rho=rho tmax=tmax tcount=tcount)<line_sep>ax.plot(t I/N)<line_sep>t,S,I=EoN.SIS_homogeneous_pairwise_from_graph(G tau gamma rho=rho tmax=tmax tcount=tcount)<line_sep>ax.plot(t I/N '-.')<block_end>N=10000<line_sep>gamma=1<line_sep>rho=0.05<line_sep>tmax=10<line_sep>tcount=1001<line_sep>kmin=1<line_sep>kmax=40<line_sep>Pk={}<for_stmt>k range(kmin kmax+1)<block_start>Pk[k]=k<power>(-2.)<block_end>norm_factor=sum(Pk.values())<for_stmt>k Pk<block_start>Pk[k]<augdiv>norm_factor<block_end>deg_seq=get_deg_seq(N Pk)<line_sep>G=nx.configuration_model(deg_seq)<line_sep>kave=sum(deg_seq)/N<line_sep>tau=1.5<times>gamma/kave<line_sep>fig=plt.figure(1)<line_sep>main=plt.axes()<line_sep>sim_and_plot(G tau gamma rho tmax tcount main)<line_sep>kmin=10<line_sep>kmax=150<line_sep>Pk={}<for_stmt>k range(kmin kmax+1)<block_start>Pk[k]=k<power>(-2.)<block_end>norm_factor=sum(Pk.values())<for_stmt>k Pk<block_start>Pk[k]<augdiv>norm_factor<block_end>deg_seq=get_deg_seq(N Pk)<line_sep>G=nx.configuration_model(deg_seq)<line_sep>kave=(sum(deg_seq)/N)<line_sep>tau=1.5<times>gamma/kave<line_sep>fig=plt.figure(1)<line_sep>ax1=plt.gca()<line_sep>inset=plt.axes([0.45 0.175 0.45 0.45])<line_sep>sim_and_plot(G tau gamma rho tmax tcount inset)<line_sep>ax1.set_xlabel('$t$')<line_sep>ax1.set_ylabel('Prevalence')<line_sep>plt.savefig('fig5p4.png')<line_sep>
|
"""
Tests for LocalDAL
"""<import_from_future_stmt> division<import_from_future_stmt> print_function<import_from_future_stmt> unicode_literals<import_stmt>os<import_stmt>tempfile<import_stmt>platform<import_from_stmt>datetime datetime<import_from_stmt>datmo.core.storage.driver.blitzdb_dal_driver BlitzDBDALDriver<import_from_stmt>datmo.core.storage.local.dal LocalDAL<import_from_stmt>datmo.core.entity.model Model<import_from_stmt>datmo.core.entity.environment Environment<import_from_stmt>datmo.core.util.exceptions EntityNotFound InvalidArgumentType<class_stmt>TestLocalDAL()<block_start><def_stmt>setup_method self# provide mountable tmp directory for docker
<block_start>tempfile.tempdir="/tmp"<if><not>platform.system()<eq>"Windows"<else><none><line_sep>test_datmo_dir=os.environ.get('TEST_DATMO_DIR' tempfile.gettempdir())<line_sep>self.temp_dir=tempfile.mkdtemp(dir=test_datmo_dir)<line_sep>self.driver_type="blitzdb"<line_sep>self.driver_options={"driver_type":"file" "connection_string":self.temp_dir}<line_sep>self.dal=LocalDAL(self.driver_type self.driver_options)<line_sep>model_name="model_1"<line_sep>model=self.dal.model.create(Model({"name":model_name}))<line_sep>self.environment_input_dict={"model_id":model.id "driver_type":"docker" "file_collection_id":"test_file_id" "definition_filename":"Dockerfile" "hardware_info":{"system":"macosx"} "unique_hash":"slkdjfa23dk" "language":"python3"}<block_end><def_stmt>teardown_method self<block_start><pass><block_end># TODO: Add tests for other variables once figured out.
<def_stmt>test_create_environment_by_dictionary self<block_start>environment=self.dal.environment.create(Environment(self.environment_input_dict))<assert_stmt>environment.id<assert_stmt>environment.driver_type<eq>self.environment_input_dict['driver_type']<assert_stmt>environment.file_collection_id<eq>self.environment_input_dict['file_collection_id']<assert_stmt>environment.definition_filename<eq>self.environment_input_dict['definition_filename']<assert_stmt>environment.hardware_info<eq>self.environment_input_dict['hardware_info']<assert_stmt>environment.unique_hash<eq>self.environment_input_dict['unique_hash']<assert_stmt>environment.created_at<assert_stmt>environment.updated_at<line_sep>environment_2=self.dal.environment.create(Environment(self.environment_input_dict))<assert_stmt>environment_2.id<ne>environment.id<line_sep>test_environment_input_dict=self.environment_input_dict.copy()<line_sep>test_environment_input_dict['id']="environment_id"<line_sep>environment_3=self.dal.environment.create(Environment(test_environment_input_dict))<assert_stmt>environment_3.id<eq>test_environment_input_dict['id']<block_end><def_stmt>test_get_by_id_environment self<block_start>environment=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>result=self.dal.environment.get_by_id(environment.id)<assert_stmt>environment.id<eq>result.id<block_end><def_stmt>test_get_by_shortened_id_environment self<block_start>environment=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>result=self.dal.environment.get_by_shortened_id(environment.id[:10])<assert_stmt>environment.id<eq>result.id<block_end><def_stmt>test_get_by_id_environment_new_driver_instance self<block_start>environment=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep># create new dal with new driver instance (success)
new_driver_instance=BlitzDBDALDriver("file" self.temp_dir)<line_sep>new_dal_instance=LocalDAL(self.driver_type self.driver_options driver=new_driver_instance)<line_sep>new_environment_1=new_dal_instance.environment.get_by_id(environment.id)<assert_stmt>new_environment_1.id<eq>environment.id<line_sep># create new dal instance with same driver (success)
new_dal_instance=LocalDAL(self.driver_type self.driver_options)<line_sep>new_environment_2=new_dal_instance.environment.get_by_id(environment.id)<assert_stmt>new_environment_2.id<eq>environment.id<block_end><def_stmt>test_update_environment self<block_start>environment=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep># Update required and optional parameters
updated_environment_input_dict=self.environment_input_dict.copy()<line_sep>updated_environment_input_dict['id']=environment.id<line_sep>updated_environment_input_dict['driver_type']="new_driver"<line_sep>updated_environment_input_dict['created_at']=datetime.utcnow()<line_sep>updated_environment=self.dal.environment.update(updated_environment_input_dict)<assert_stmt>environment.id<eq>updated_environment.id<assert_stmt>environment.updated_at<l>updated_environment.updated_at<assert_stmt>updated_environment.driver_type<eq>updated_environment_input_dict['driver_type']<assert_stmt>updated_environment.created_at<eq>updated_environment_input_dict['created_at']<block_end><def_stmt>test_delete_environment self<block_start>environment=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>self.dal.environment.delete(environment.id)<line_sep>deleted=<false><try_stmt><block_start>self.dal.environment.get_by_id(environment.id)<block_end><except_stmt>EntityNotFound<block_start>deleted=<true><block_end><assert_stmt>deleted<block_end><def_stmt>test_query_environments_basic self<block_start>environment=self.dal.environment.create(Environment(self.environment_input_dict))<assert_stmt>len(self.dal.environment.query({"id":environment.id}))<eq>1<block_end><def_stmt>test_query_environments_multiple self<block_start>environment_1=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>environment_2=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>environment_3=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>results=self.dal.environment.query({} sort_key="created_at" sort_order="ascending")<assert_stmt>len(results)<eq>3<assert_stmt>results[0].created_at<eq>environment_1.created_at<assert_stmt>results[1].created_at<eq>environment_2.created_at<line_sep>results=self.dal.environment.query({} sort_key="created_at" sort_order="descending")<assert_stmt>len(results)<eq>3<assert_stmt>results[0].created_at<eq>environment_3.created_at<assert_stmt>results[1].created_at<eq>environment_2.created_at<line_sep># Wrong order being passed in
failed=<false><try_stmt><block_start>_=self.dal.environment.query({} sort_key='created_at' sort_order='wrong_order')<block_end><except_stmt>InvalidArgumentType<block_start>failed=<true><block_end><assert_stmt>failed<line_sep># Wrong key and order being passed in
failed=<false><try_stmt><block_start>_=self.dal.environment.query({} sort_key='wrong_key' sort_order='wrong_order')<block_end><except_stmt>InvalidArgumentType<block_start>failed=<true><block_end><assert_stmt>failed<line_sep># wrong key and right order being passed in
expected_items=self.dal.environment.query({} sort_key='created_at' sort_order='ascending')<line_sep>items=self.dal.environment.query({} sort_key='wrong_key' sort_order='ascending')<line_sep>expected_ids=[item.id<for>item expected_items]<line_sep>ids=[item.id<for>item items]<assert_stmt>set(expected_ids)<eq>set(ids)<block_end><def_stmt>test_query_environments_range_query self<block_start>_=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>_=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>_=self.dal.environment.create(Environment(self.environment_input_dict))<line_sep>environments=self.dal.environment.query({} sort_key="created_at" sort_order="descending")<line_sep>result=self.dal.environment.query({"created_at":{"$lt":environments[1].created_at.strftime('%Y-%m-%dT%H:%M:%S.%fZ')}})<assert_stmt>len(environments)<eq>3<assert_stmt>len(result)<eq>1<block_end><block_end>
|
<import_from_stmt>setuptools setup find_packages<import_from_stmt>rolling __version__<line_sep>long_description="""**rolling** is a collection of computationally efficient
rolling window iterators for Python.
Many useful arithmetical, logical and statistical functions are implemented
to allow the window to be computed in sub-linear time (and in many instances
constant time). These include:
- Sum
- Min and Max
- All and Any
- Mean, Median and Mode
- Variance and Standard deviation
There's also a more general 'apply' mode where any specific function can be
applied to the window. Both fixed-length and variable-length windows are supported.
"""<line_sep>setup(name='rolling' version=__version__ description='Efficient rolling window algorithms' long_description=long_description classifiers=['Development Status :: 3 - Alpha' 'Intended Audience :: Developers' 'Topic :: Software Development :: Libraries :: Python Modules' 'License :: OSI Approved :: MIT License' 'Programming Language :: Python :: 3' 'Programming Language :: Python :: 3.6' 'Programming Language :: Python :: 3.7' 'Programming Language :: Python :: 3.8' 'Programming Language :: Python :: 3.9' ] keywords='rolling window iterator algorithms' project_urls={'Source':'https://github.com/ajcr/rolling/' 'Tracker':'https://github.com/ajcr/rolling/issues' } python_requires='>=3.6.0' author='<NAME>' license='MIT' packages=find_packages(include=["rolling" "rolling.*"]) tests_require=['pytest>=2.8.0'] zip_safe=<false> )<line_sep>
|
<import_from_stmt>uuid uuid4<import_from_stmt>ee.clickhouse.client sync_execute<import_from_stmt>ee.clickhouse.models.event create_event<import_from_stmt>ee.clickhouse.util ClickhouseTestMixin<import_from_stmt>posthog.api.test.test_person factory_test_person<import_from_stmt>posthog.models Event Person<import_from_stmt>posthog.models.person PersonDistinctId<def_stmt>_create_event **kwargs<block_start>kwargs.update({"event_uuid":uuid4()})<line_sep><return>Event(pk=create_event(**kwargs))<block_end><def_stmt>_get_events team_id<block_start><return>sync_execute("SELECT * FROM events WHERE team_id = %(team_id)s" {"team_id":team_id})<block_end><def_stmt>_create_person **kwargs<block_start><return>Person.objects.create(**kwargs)<block_end><class_stmt>ClickhouseTestPersonApi(ClickhouseTestMixin factory_test_person(_create_event _create_person _get_events)# type: ignore
)<block_start><def_stmt>test_split_person_clickhouse self<block_start>person=_create_person(team=self.team distinct_ids=["1" "2" "3"] properties={"$browser":"whatever" "$os":"Mac OS X"})<line_sep>response=self.client.post("/api/person/%s/split/"%person.pk ).json()<line_sep>self.assertTrue(response["success"])<line_sep>people=Person.objects.all().order_by("id")<line_sep>clickhouse_people=sync_execute("SELECT id FROM person FINAL WHERE team_id = %(team_id)s" {"team_id":self.team.pk})<line_sep>self.assertCountEqual(clickhouse_people [(person.uuid )<for>person people])<line_sep>distinct_id_rows=PersonDistinctId.objects.all().order_by("person_id")<line_sep>pdis=sync_execute("SELECT person_id, distinct_id FROM person_distinct_id FINAL WHERE team_id = %(team_id)s" {"team_id":self.team.pk} )<line_sep>self.assertCountEqual(pdis [(pdi.person.uuid pdi.distinct_id)<for>pdi distinct_id_rows])<line_sep>pdis2=sync_execute("SELECT person_id, distinct_id FROM person_distinct_id2 FINAL WHERE team_id = %(team_id)s" {"team_id":self.team.pk} )<line_sep>self.assertCountEqual(pdis2 [(pdi.person.uuid pdi.distinct_id)<for>pdi distinct_id_rows])<block_end><block_end>
|
<import_stmt>math<import_stmt>ee<import_from_stmt>sepal.ee.image replace<line_sep># Volumetric model (Hoekman & Reiche 2015)
<def_stmt>apply image<block_start>geometry=image.geometry()<line_sep>srtm=ee.Image('USGS/SRTMGL1_003').clip(geometry)<line_sep># convert Sigma0 dB to Power
sigma0_pow=ee.Image.constant(10).pow(image.divide(10.0))<line_sep># Article ( numbers relate to chapters)
# 2.1.1 Radar geometry
theta_i=image.select('angle')<line_sep>phi_i=ee.Terrain.aspect(theta_i).reduceRegion(reducer=ee.Reducer.mean() geometry=geometry scale=100).get('aspect')<line_sep># 2.1.2 Terrain geometry
alpha_s=ee.Terrain.slope(srtm).select('slope')<line_sep>phi_s=ee.Terrain.aspect(srtm).select('aspect')<line_sep># 2.1.3 Model geometry
# reduce to 3 angle
phi_r=ee.Image.constant(phi_i).subtract(phi_s)<line_sep># convert all to radians
phi_rRad=phi_r.multiply(math.pi/180)<line_sep>alpha_sRad=alpha_s.multiply(math.pi/180)<line_sep>theta_iRad=theta_i.multiply(math.pi/180)<line_sep>ninetyRad=ee.Image.constant(90).multiply(math.pi/180)<line_sep># slope steepness in range (eq. 2)
alpha_r=(alpha_sRad.tan().multiply(phi_rRad.cos())).atan()<line_sep># slope steepness in azimuth (eq 3)
alpha_az=(alpha_sRad.tan().multiply(phi_rRad.sin())).atan()<line_sep># local incidence angle (eq. 4)
theta_lia=(alpha_az.cos().multiply((theta_iRad.subtract(alpha_r)).cos())).acos()<line_sep>theta_liaDeg=theta_lia.multiply(180/math.pi)<line_sep># 2.2
# Gamma_nought_flat
gamma0=sigma0_pow.divide(theta_iRad.cos())<line_sep>gamma0dB=ee.Image.constant(10).multiply(gamma0.log10())<line_sep>ratio_1=gamma0dB.select('VV').subtract(gamma0dB.select('VH'))<line_sep># Volumetric Model
nominator=(ninetyRad.subtract(theta_iRad).add(alpha_r)).tan()<line_sep>denominator=(ninetyRad.subtract(theta_iRad)).tan()<line_sep>volModel=(nominator.divide(denominator)).abs()<line_sep># apply model
gamma0_Volume=gamma0.divide(volModel)<line_sep>gamma0_VolumeDB=ee.Image.constant(10).multiply(gamma0_Volume.log10())<line_sep># we add a layover/shadow mask to the original implementation
# layover, where slope > radar viewing angle
alpha_rDeg=alpha_r.multiply(180/math.pi)<line_sep>layover=alpha_rDeg.lt(theta_i)<line_sep># shadow where LIA > 90
shadow=theta_liaDeg.lt(85)<line_sep># calculate the ratio for RGB vis
ratio=gamma0_VolumeDB.select('VV').subtract(gamma0_VolumeDB.select('VH'))<line_sep>output=gamma0_VolumeDB.addBands(ratio).addBands(alpha_r).addBands(phi_s).addBands(theta_iRad).addBands(layover).addBands(shadow).addBands(gamma0dB).addBands(ratio_1)<line_sep># rename bands for output
<return>replace(image output.select(['VV' 'VH' 'slope_1' 'slope_2'] ['VV' 'VH' 'layover' 'shadow']).addBands(image.select('angle')))<block_end>
|
#########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
<import_stmt>os<import_stmt>json<import_stmt>tarfile<import_stmt>uuid<import_stmt>wagon<import_stmt>yaml<import_stmt>shutil<import_stmt>zipfile<import_stmt>tempfile<import_stmt>requests<import_stmt>traceback<import_from_stmt>setuptools archive_util<import_from_stmt>flask request current_app<import_from_stmt>flask_restful.reqparse Argument<import_from_stmt>flask_restful.inputs boolean<import_from_stmt>cloudify.models_states SnapshotState BlueprintUploadState<import_from_stmt>manager_rest.manager_exceptions ArchiveTypeError<import_from_stmt>manager_rest.constants FILE_SERVER_PLUGINS_FOLDER FILE_SERVER_SNAPSHOTS_FOLDER FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER FILE_SERVER_BLUEPRINTS_FOLDER <import_from_stmt>manager_rest.archiving get_archive_type<import_from_stmt>manager_rest.storage.models Blueprint Plugin<import_from_stmt>manager_rest config chunked manager_exceptions workflow_executor<import_from_stmt>manager_rest.utils mkdirs get_formatted_timestamp current_tenant unzip files_in_folder remove <import_from_stmt>manager_rest.resource_manager get_resource_manager<import_from_stmt>manager_rest.constants SUPPORTED_ARCHIVE_TYPES <import_from_stmt>manager_rest.rest.rest_utils get_args_and_verify_arguments<line_sep>_PRIVATE_RESOURCE='private_resource'<line_sep>_VISIBILITY='visibility'<class_stmt>UploadedDataManager(object)<block_start><def_stmt>receive_uploaded_data self data_id=<none> **kwargs<block_start>file_server_root=config.instance.file_server_root<line_sep>resource_target_path=tempfile.mktemp()<try_stmt><block_start>additional_inputs=self._save_file_locally_and_extract_inputs(resource_target_path self._get_data_url_key() self._get_kind())<line_sep>doc,dest_file_name=self._prepare_and_process_doc(data_id file_server_root resource_target_path additional_inputs=additional_inputs **kwargs)<if_stmt><not>os.path.isfile(resource_target_path)# if the archive is a folder, we're copying its content,
# so there is no meaning to a specific archive file name...
<block_start>dest_file_name=<none><block_end>self._move_archive_to_uploaded_dir(doc.id file_server_root resource_target_path dest_file_name=dest_file_name)<line_sep><return>doc 201<block_end><finally_stmt><block_start>remove(resource_target_path)<block_end><block_end>@classmethod<def_stmt>_extract_file_to_file_server cls archive_path destination_root<block_start>"""
Extracting a package.
:param destination_root: the root destination for the unzipped archive
:param archive_path: the archive path
:return: the full path for the extracted archive
"""<line_sep># extract application to file server
tempdir=tempfile.mkdtemp('-blueprint-submit')<try_stmt><block_start><try_stmt><block_start>archive_util.unpack_archive(archive_path tempdir)<block_end><except_stmt>archive_util.UnrecognizedFormat<block_start><raise>manager_exceptions.BadParametersError('Blueprint archive is of an unrecognized format. '<concat>'Supported formats are: {0}'.format(SUPPORTED_ARCHIVE_TYPES))<block_end>archive_file_list=os.listdir(tempdir)<if_stmt>len(archive_file_list)<ne>1<or><not>os.path.isdir(os.path.join(tempdir archive_file_list[0]))<block_start><raise>manager_exceptions.BadParametersError('archive must contain exactly 1 directory')<block_end>application_dir_base_name=archive_file_list[0]<line_sep># generating temporary unique name for app dir, to allow multiple
# uploads of apps with the same name (as it appears in the file
# system, not the app name field inside the blueprint.
# the latter is guaranteed to be unique).
generated_app_dir_name='{0}-{1}'.format(application_dir_base_name uuid.uuid4())<line_sep>temp_application_dir=os.path.join(tempdir application_dir_base_name)<line_sep>temp_application_target_dir=os.path.join(tempdir generated_app_dir_name)<line_sep>shutil.move(temp_application_dir temp_application_target_dir)<line_sep>shutil.move(temp_application_target_dir destination_root)<line_sep><return>generated_app_dir_name<block_end><finally_stmt><block_start>shutil.rmtree(tempdir)<block_end><block_end>@staticmethod<def_stmt>_save_file_from_url archive_target_path url data_type<block_start><if_stmt>request.data<or>'Transfer-Encoding'<in>request.headers<or>'blueprint_archive'<in>request.files<block_start><raise>manager_exceptions.BadParametersError("Can pass {0} as only one of: URL via query parameters, "<concat>"request body, multi-form or chunked.".format(data_type))<block_end><try_stmt><block_start><with_stmt>requests.get(url stream=<true> timeout=(5 <none>))<as>resp<block_start>resp.raise_for_status()<with_stmt>open(archive_target_path 'wb')<as>f<block_start><for_stmt>chunk resp.iter_content(chunk_size=8192)<block_start><if_stmt>chunk<block_start>f.write(chunk)<block_end><block_end><block_end><block_end><block_end><except_stmt>requests.exceptions.RequestException<as>e<block_start><raise>manager_exceptions.BadParametersError("Cannot fetch {0}: {1}".format(url e))<block_end><block_end>@staticmethod<def_stmt>_save_file_from_chunks archive_target_path data_type<block_start><if_stmt>request.data<or>'blueprint_archive'<in>request.files<block_start><raise>manager_exceptions.BadParametersError("Can pass {0} as only one of: request body, multi-form or "<concat>"chunked.".format(data_type))<block_end><with_stmt>open(archive_target_path 'w')<as>f<block_start><for_stmt>buffered_chunked chunked.decode(request.input_stream)<block_start>f.write(buffered_chunked)<block_end><block_end><block_end>@staticmethod<def_stmt>_save_file_content archive_target_path data_type<block_start><if_stmt>'blueprint_archive'<in>request.files<block_start><raise>manager_exceptions.BadParametersError("Can't pass {0} both as URL via request body and multi-form".format(data_type))<block_end>uploaded_file_data=request.data<with_stmt>open(archive_target_path 'wb')<as>f<block_start>f.write(uploaded_file_data)<block_end><block_end><def_stmt>_save_files_multipart self archive_target_path<block_start>inputs={}<for_stmt>file_key request.files<block_start><if_stmt>file_key<eq>'inputs'<block_start>content=request.files[file_key]<line_sep># The file is a binary
<if_stmt>'application'<in>content.content_type<block_start>content_payload=self._save_bytes(content)<line_sep># Handling yaml
<if_stmt>content.content_type<eq>'application/octet-stream'<block_start>inputs=yaml.load(content_payload)<block_end># Handling json
<elif_stmt>content.content_type<eq>'application/json'<block_start>inputs=json.load(content_payload)<block_end><block_end># The file is raw json
<elif_stmt>'text'<in>content.content_type<block_start>inputs=json.load(content)<block_end><block_end><elif_stmt>file_key<eq>'blueprint_archive'<block_start>self._save_bytes(request.files[file_key] archive_target_path)<block_end><block_end><return>inputs<block_end>@staticmethod<def_stmt>_save_bytes content target_path=<none><block_start>"""
content should support read() function if target isn't supplied,
string rep is returned
:param content:
:param target_path:
:return:
"""<if_stmt><not>target_path<block_start><return>content.getvalue().decode("utf-8")<block_end><else_stmt><block_start><with_stmt>open(target_path 'wb')<as>f<block_start>f.write(content.read())<block_end><block_end><block_end><def_stmt>_save_file_locally_and_extract_inputs self archive_target_path url_key data_type='unknown'<block_start>"""
Retrieves the file specified by the request to the local machine.
:param archive_target_path: the target of the archive
:param data_type: the kind of the data (e.g. 'blueprint')
:param url_key: if the data is passed as a url to an online resource,
the url_key specifies what header points to the requested url.
:return: None
"""<line_sep>inputs={}<line_sep># Handling importing blueprint through url
<if_stmt>url_key<in>request.args<block_start>self._save_file_from_url(archive_target_path request.args[url_key] data_type)<block_end># handle receiving chunked blueprint
<elif_stmt>'Transfer-Encoding'<in>request.headers<block_start>self._save_file_from_chunks(archive_target_path data_type)<block_end># handler receiving entire content through data
<elif_stmt>request.data<block_start>self._save_file_content(archive_target_path data_type)<block_end># handle inputs from form-data (for both the blueprint and inputs
# in body in form-data format)
<if_stmt>request.files<block_start>inputs=self._save_files_multipart(archive_target_path)<block_end><return>inputs<block_end><def_stmt>_move_archive_to_uploaded_dir self data_id root_path archive_path dest_file_name=<none><block_start><if_stmt><not>os.path.exists(archive_path)<block_start><raise>RuntimeError("Archive [{0}] doesn't exist - Cannot move "<concat>"archive to uploaded {1}s "<concat>"directory".format(archive_path self._get_kind()))<block_end>uploaded_dir=os.path.join(root_path self._get_target_dir_path() data_id)<if_stmt><not>os.path.isdir(uploaded_dir)<block_start>os.makedirs(uploaded_dir)<block_end>current_app.logger.info('uploading archive to: {0}'.format(uploaded_dir))<if_stmt>os.path.isfile(archive_path)<block_start><if_stmt><not>dest_file_name<block_start><try_stmt><block_start>archive_type=self._get_archive_type(archive_path)<block_end><except_stmt>ArchiveTypeError<block_start><raise>manager_exceptions.BadParametersError('Blueprint archive is of an unrecognized format. '<concat>'Supported formats are: {0}'.format(SUPPORTED_ARCHIVE_TYPES))<block_end>dest_file_name='{0}.{1}'.format(data_id archive_type)<block_end>shutil.move(archive_path os.path.join(uploaded_dir dest_file_name))<block_end><else_stmt><block_start><for_stmt>item os.listdir(archive_path)<block_start>shutil.copy(os.path.join(archive_path item) uploaded_dir)<block_end>shutil.rmtree(archive_path)<block_end><block_end>@classmethod<def_stmt>_zip_dir cls dir_to_zip target_zip_path<block_start>zipf=zipfile.ZipFile(target_zip_path 'w' zipfile.ZIP_DEFLATED)<try_stmt><block_start>plugin_dir_base_name=os.path.basename(dir_to_zip)<line_sep>rootlen=len(dir_to_zip)-len(plugin_dir_base_name)<for_stmt>base,dirs,files os.walk(dir_to_zip)<block_start><for_stmt>entry files<block_start>fn=os.path.join(base entry)<line_sep>zipf.write(fn fn[rootlen:])<block_end><block_end><block_end><finally_stmt><block_start>zipf.close()<block_end><block_end><def_stmt>_get_kind self<block_start><raise>NotImplementedError('Subclass responsibility')<block_end><def_stmt>_get_data_url_key self<block_start><raise>NotImplementedError('Subclass responsibility')<block_end><def_stmt>_get_target_dir_path self<block_start><raise>NotImplementedError('Subclass responsibility')<block_end><def_stmt>_get_archive_type self archive_path<block_start><raise>NotImplementedError('Subclass responsibility')<block_end><def_stmt>_prepare_and_process_doc self data_id file_server_root archive_target_path additional_inputs **kwargs<block_start><raise>NotImplementedError('Subclass responsibility')<block_end><block_end><class_stmt>UploadedSnapshotsManager(UploadedDataManager)<block_start><def_stmt>_get_kind self<block_start><return>'snapshot'<block_end><def_stmt>_get_data_url_key self<block_start><return>'snapshot_archive_url'<block_end><def_stmt>_get_target_dir_path self<block_start><return>FILE_SERVER_SNAPSHOTS_FOLDER<block_end><def_stmt>_get_archive_type self archive_path<block_start><return>'zip'<block_end><def_stmt>_prepare_and_process_doc self data_id file_server_root archive_target_path **kwargs<block_start><return>get_resource_manager().create_snapshot_model(data_id status=SnapshotState.UPLOADED) <none><block_end><block_end><class_stmt>UploadedBlueprintsManager(UploadedDataManager)<block_start><def_stmt>receive_uploaded_data self data_id=<none> **kwargs<block_start>blueprint_url=<none><line_sep>visibility=kwargs.get(_VISIBILITY <none>)<line_sep>labels=kwargs.get('labels' <none>)<line_sep>override_failed_blueprint=kwargs.get('override_failed' <false>)<line_sep>args=get_args_and_verify_arguments([Argument('private_resource' type=boolean) Argument('application_file_name' default='')])<line_sep># Handle importing blueprint through url
<if_stmt>self._get_data_url_key()<in>request.args<block_start><if_stmt>request.data<or>'Transfer-Encoding'<in>request.headers<or>'blueprint_archive'<in>request.files<block_start><raise>manager_exceptions.BadParametersError("Can pass {0} as only one of: URL via query parameters, "<concat>"request body, multi-form or "<concat>"chunked.".format(self._get_kind()))<block_end>blueprint_url=request.args[self._get_data_url_key()]<block_end>visibility=get_resource_manager().get_resource_visibility(Blueprint data_id visibility args.private_resource)<line_sep>new_blueprint=self._prepare_and_process_doc(data_id visibility blueprint_url application_file_name=args.application_file_name override_failed_blueprint=override_failed_blueprint labels=labels)<line_sep><return>new_blueprint 201<block_end><def_stmt>_prepare_and_process_doc self data_id visibility blueprint_url application_file_name override_failed_blueprint labels=<none># Put a new blueprint entry in DB
<block_start>now=get_formatted_timestamp()<line_sep>rm=get_resource_manager()<if_stmt>override_failed_blueprint<block_start>new_blueprint=rm.sm.get(Blueprint data_id)<line_sep>new_blueprint.plan=<none><line_sep>new_blueprint.description=<none><line_sep>new_blueprint.created_at=now<line_sep>new_blueprint.updated_at=now<line_sep>new_blueprint.main_file_name=<none><line_sep>new_blueprint.visibility=visibility<line_sep>new_blueprint.state=BlueprintUploadState.PENDING<line_sep>rm.sm.update(new_blueprint)<block_end><else_stmt><block_start>new_blueprint=rm.sm.put(Blueprint(plan=<none> id=data_id description=<none> created_at=now updated_at=now main_file_name=<none> visibility=visibility state=BlueprintUploadState.PENDING))<block_end><if_stmt><not>blueprint_url<block_start>new_blueprint.state=BlueprintUploadState.UPLOADING<line_sep>rm.sm.update(new_blueprint)<line_sep>self.upload_archive_to_file_server(data_id)<block_end><try_stmt><block_start>new_blueprint.upload_execution,messages=rm.upload_blueprint(data_id application_file_name blueprint_url config.instance.file_server_root # for the import resolver
labels=labels)<line_sep>rm.sm.update(new_blueprint)<line_sep>workflow_executor.execute_workflow(messages)<block_end><except_stmt>manager_exceptions.ExistingRunningExecutionError<as>e<block_start>new_blueprint.state=BlueprintUploadState.FAILED_UPLOADING<line_sep>new_blueprint.error=str(e)<line_sep>new_blueprint.error_traceback=traceback.format_exc()<line_sep>rm.sm.update(new_blueprint)<line_sep>self.cleanup_blueprint_archive_from_file_server(data_id current_tenant.name)<line_sep><raise><block_end><return>new_blueprint<block_end><def_stmt>upload_archive_to_file_server self blueprint_id<block_start>file_server_root=config.instance.file_server_root<line_sep>archive_target_path=tempfile.mktemp()<try_stmt><block_start>self._save_file_locally_and_extract_inputs(archive_target_path <none> self._get_kind())<line_sep>self._move_archive_to_uploaded_dir(blueprint_id file_server_root archive_target_path)<block_end><except_stmt>Exception<as>e<block_start>sm=get_resource_manager().sm<line_sep>blueprint=sm.get(Blueprint blueprint_id)<line_sep>blueprint.state=BlueprintUploadState.FAILED_UPLOADING<line_sep>blueprint.error=str(e)<line_sep>sm.update(blueprint)<line_sep>self.cleanup_blueprint_archive_from_file_server(blueprint_id blueprint.tenant.name)<line_sep><raise><block_end><finally_stmt><block_start>remove(archive_target_path)<block_end><block_end><def_stmt>extract_blueprint_archive_to_file_server self blueprint_id tenant<block_start>sm=get_resource_manager().sm<line_sep>file_server_root=config.instance.file_server_root<line_sep>local_path=os.path.join(config.instance.file_server_root FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER tenant blueprint_id)<for_stmt>arc_type SUPPORTED_ARCHIVE_TYPES# attempting to find the archive file on the file system
<block_start>local_file_path=os.path.join(local_path '{0}.{1}'.format(blueprint_id arc_type))<if_stmt>os.path.isfile(local_file_path)<block_start><break><block_end><block_end><else_stmt><block_start>error_msg="Could not find blueprint's archive; "<concat>"Blueprint ID: {0}".format(blueprint_id)<line_sep>blueprint=sm.get(Blueprint blueprint_id)<line_sep>blueprint.state=BlueprintUploadState.FAILED_EXTRACTING_TO_FILE_SERVER<line_sep>blueprint.error=error_msg<line_sep>sm.update(blueprint)<line_sep><raise>manager_exceptions.NotFoundError(error_msg)<block_end><try_stmt><block_start>app_dir=self._extract_file_to_file_server(local_file_path file_server_root)<block_end><except_stmt>Exception<as>e<block_start>blueprint=sm.get(Blueprint blueprint_id)<line_sep>blueprint.state=BlueprintUploadState.FAILED_EXTRACTING_TO_FILE_SERVER<line_sep>blueprint.error=str(e)<line_sep>sm.update(blueprint)<line_sep>remove(local_path)<line_sep><raise>e<block_end>tenant_dir=os.path.join(file_server_root FILE_SERVER_BLUEPRINTS_FOLDER tenant)<line_sep>mkdirs(tenant_dir)<line_sep>bp_from=os.path.join(file_server_root app_dir)<line_sep>bp_dir=os.path.join(tenant_dir blueprint_id)<try_stmt># use os.rename - bp_from is already in file_server_root, ie.
# same filesystem as the target dir
<block_start>os.rename(bp_from bp_dir)<block_end><except_stmt>OSError<as>e# eg. directory not empty
<block_start>shutil.rmtree(bp_from)<line_sep><raise>manager_exceptions.ConflictError(str(e))<block_end>self._process_plugins(file_server_root blueprint_id)<block_end>@staticmethod<def_stmt>cleanup_blueprint_archive_from_file_server blueprint_id tenant<block_start>remove(os.path.join(config.instance.file_server_root FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER tenant blueprint_id))<block_end><def_stmt>_get_kind self<block_start><return>'blueprint'<block_end><def_stmt>_get_data_url_key self<block_start><return>'blueprint_archive_url'<block_end><def_stmt>_get_target_dir_path self<block_start><return>os.path.join(FILE_SERVER_UPLOADED_BLUEPRINTS_FOLDER current_tenant.name)<block_end><def_stmt>_get_archive_type self archive_path<block_start><return>get_archive_type(archive_path)<block_end>@classmethod<def_stmt>_process_plugins cls file_server_root blueprint_id<block_start>plugins_directory=os.path.join(file_server_root FILE_SERVER_BLUEPRINTS_FOLDER current_tenant.name blueprint_id "plugins")<if_stmt><not>os.path.isdir(plugins_directory)<block_start><return><block_end>plugins=[os.path.join(plugins_directory directory)<for>directory os.listdir(plugins_directory)<if>os.path.isdir(os.path.join(plugins_directory directory))]<for_stmt>plugin_dir plugins<block_start>final_zip_name='{0}.zip'.format(os.path.basename(plugin_dir))<line_sep>target_zip_path=os.path.join(plugins_directory final_zip_name)<line_sep>cls._zip_dir(plugin_dir target_zip_path)<block_end><block_end><block_end><class_stmt>UploadedBlueprintsValidator(UploadedBlueprintsManager)<block_start><def_stmt>receive_uploaded_data self data_id=<none> **kwargs<block_start>blueprint_url=<none><line_sep># avoid clashing with existing blueprint names
blueprint_id=data_id+uuid.uuid4().hex[:16]<line_sep>args=get_args_and_verify_arguments([Argument('application_file_name' default='')])<line_sep># Handle importing blueprint through url
<if_stmt>self._get_data_url_key()<in>request.args<block_start><if_stmt>request.data<or>'Transfer-Encoding'<in>request.headers<or>'blueprint_archive'<in>request.files<block_start><raise>manager_exceptions.BadParametersError("Can pass {0} as only one of: URL via query parameters, "<concat>"request body, multi-form or "<concat>"chunked.".format(self._get_kind()))<block_end>blueprint_url=request.args[self._get_data_url_key()]<block_end>self._prepare_and_process_doc(blueprint_id blueprint_url application_file_name=args.application_file_name)<line_sep><return>"" 204<block_end><def_stmt>_prepare_and_process_doc self data_id blueprint_url application_file_name# Put a temporary blueprint entry in DB
<block_start>rm=get_resource_manager()<line_sep>now=get_formatted_timestamp()<line_sep>temp_blueprint=rm.sm.put(Blueprint(plan=<none> id=data_id description=<none> created_at=now updated_at=now main_file_name=<none> visibility=<none> state=BlueprintUploadState.VALIDATING))<if_stmt><not>blueprint_url<block_start>self.upload_archive_to_file_server(data_id)<block_end><try_stmt><block_start>temp_blueprint.upload_execution,messages=rm.upload_blueprint(data_id application_file_name blueprint_url config.instance.file_server_root # for the import resolver
validate_only=<true> )<line_sep>workflow_executor.execute_workflow(messages)<block_end><except_stmt>manager_exceptions.ExistingRunningExecutionError<block_start>rm.sm.delete(temp_blueprint)<line_sep>self.cleanup_blueprint_archive_from_file_server(data_id current_tenant.name)<line_sep><raise><block_end><block_end><block_end><class_stmt>UploadedPluginsManager(UploadedDataManager)<block_start><def_stmt>_get_kind self<block_start><return>'plugin'<block_end><def_stmt>_get_data_url_key self<block_start><return>'plugin_archive_url'<block_end><def_stmt>_get_target_dir_path self<block_start><return>FILE_SERVER_PLUGINS_FOLDER<block_end><def_stmt>_get_archive_type self archive_path<block_start><return>'tar.gz'<block_end><def_stmt>_prepare_and_process_doc self data_id file_server_root archive_target_path **kwargs# support previous implementation
<block_start>wagon_target_path=archive_target_path<line_sep># handle the archive_target_path, which may be zip or wagon
<if_stmt><not>self._is_wagon_file(archive_target_path)<block_start><if_stmt><not>zipfile.is_zipfile(archive_target_path)<block_start><raise>manager_exceptions.InvalidPluginError('input can be only a wagon or a zip file.')<block_end>archive_name=unzip(archive_target_path logger=current_app.logger)<line_sep>os.remove(archive_target_path)<line_sep>shutil.move(archive_name archive_target_path)<try_stmt><block_start>wagon_target_path,_=self._verify_archive(archive_target_path)<block_end><except_stmt>RuntimeError<as>re<block_start><raise>manager_exceptions.InvalidPluginError(str(re))<block_end><block_end>args=get_args_and_verify_arguments([Argument('title') Argument('private_resource' type=boolean) Argument('visibility')])<line_sep>visibility=kwargs.get(_VISIBILITY <none>)<line_sep>new_plugin=self._create_plugin_from_archive(data_id args.title wagon_target_path args.private_resource visibility)<line_sep>filter_by_name={'package_name':new_plugin.package_name}<line_sep>sm=get_resource_manager().sm<line_sep>plugins=sm.list(Plugin filters=filter_by_name)<for_stmt>plugin plugins<block_start><if_stmt>plugin.archive_name<eq>new_plugin.archive_name<block_start><raise>manager_exceptions.ConflictError('a plugin archive by the name of {archive_name} already '<concat>'exists for package with name {package_name} and version '<concat>'{version}'.format(archive_name=new_plugin.archive_name package_name=new_plugin.package_name version=new_plugin.package_version))<block_end><block_end>dest_path=new_plugin.archive_name<line_sep>sm.put(new_plugin)<line_sep><return>new_plugin dest_path<block_end><def_stmt>_is_wagon_file self file_path<block_start><try_stmt><block_start>self._load_plugin_package_json(file_path)<block_end><except_stmt>Exception<block_start><return><false><block_end><else_stmt><block_start><return><true><block_end><block_end>@staticmethod<def_stmt>_verify_archive archive_path<block_start>wagons=files_in_folder(archive_path '*.wgn')<line_sep>yamls=files_in_folder(archive_path '*.yaml')<if_stmt>len(wagons)<ne>1<or>len(yamls)<ne>1<block_start><raise>RuntimeError("Archive must include one wgn file "<concat>"and one yaml file")<block_end><return>wagons[0] yamls[0]<block_end><def_stmt>_create_plugin_from_archive self plugin_id plugin_title archive_path private_resource visibility<block_start>plugin=self._load_plugin_package_json(archive_path)<line_sep>build_props=plugin.get('build_server_os_properties')<line_sep>plugin_info={'package_name':plugin.get('package_name') 'archive_name':plugin.get('archive_name')}<line_sep>resource_manager=get_resource_manager()<line_sep>visibility=resource_manager.get_resource_visibility(Plugin plugin_id visibility private_resource plugin_info)<line_sep><return>Plugin(id=plugin_id title=plugin_title<or>plugin.get('package_name') package_name=plugin.get('package_name') package_version=plugin.get('package_version') archive_name=plugin.get('archive_name') package_source=plugin.get('package_source') supported_platform=plugin.get('supported_platform') distribution=build_props.get('distribution') distribution_version=build_props.get('distribution_version') distribution_release=build_props.get('distribution_release') wheels=plugin.get('wheels') excluded_wheels=plugin.get('excluded_wheels') supported_py_versions=plugin.get('supported_python_versions') uploaded_at=get_formatted_timestamp() visibility=visibility)<block_end>@staticmethod<def_stmt>_load_plugin_package_json wagon_source# Disable validation for now - seems to break in certain
# circumstances.
# if wagon.validate(wagon_source):
# # wagon returns a list of validation issues.
# raise manager_exceptions.InvalidPluginError(
# 'the provided wagon can not be read.')
<block_start><try_stmt><block_start><return>wagon.show(wagon_source)<block_end><except_stmt>wagon.WagonError<as>e<block_start><raise>manager_exceptions.InvalidPluginError('The provided wagon archive can not be read.\n{0}'.format(str(e)))<block_end><block_end><block_end><class_stmt>UploadedCaravanManager(UploadedPluginsManager)<block_start><class_stmt>InvalidCaravanException(Exception)<block_start><pass><block_end><class_stmt>Caravan(object)<block_start><def_stmt>__init__ self caravan_path<block_start>self._caravan_path=caravan_path<line_sep>self._tempdir=tempfile.mkdtemp()<line_sep>self._cvn_dir=<none><line_sep>self._metadata=<none><block_end><def_stmt>__enter__ self<block_start><return>self<block_end><def_stmt>__exit__ self *_<block_start>remove(self._tempdir)<block_end><def_stmt>init_metadata self<block_start>self._cvn_dir=self._extract(self._caravan_path self._tempdir)<line_sep>self._metadata=self._get_metadata(self._cvn_dir)<block_end>@property<def_stmt>root_dir self<block_start><return>self._cvn_dir<block_end>@staticmethod<def_stmt>_get_metadata path<block_start><try_stmt><block_start><with_stmt>open(os.path.join(path 'METADATA'))<as>metadata_file<block_start>metadata=yaml.load(metadata_file)<block_end><block_end><except_stmt>Exception<block_start><raise>UploadedCaravanManager.InvalidCaravanException('Failed to get caravan metadata')<block_end><return>metadata<block_end>@property<def_stmt>metadata self<block_start><return>self._metadata<block_end><def_stmt>__iter__ self<block_start><for_stmt>wgn_path,yaml_path self._metadata.items()<block_start><yield>os.path.join(self._cvn_dir wgn_path) os.path.join(self._cvn_dir yaml_path)<block_end><block_end><def_stmt>__getitem__ self item<block_start><return>os.path.join(self._cvn_dir self._metadata[item])<block_end>@staticmethod<def_stmt>_extract src dest<block_start><try_stmt><block_start>tarfile_=tarfile.open(name=src)<block_end><except_stmt>tarfile.ReadError<block_start><raise>UploadedCaravanManager.InvalidCaravanException('Failed to load caravan file')<block_end><try_stmt># Get the top level dir
<block_start>root_dir=tarfile_.getmembers()[0]<line_sep>tarfile_.extractall(path=dest members=tarfile_.getmembers())<block_end><finally_stmt><block_start>tarfile_.close()<block_end><return>os.path.join(dest root_dir.path)<block_end><block_end><def_stmt>_get_kind self<block_start><return>'caravan'<block_end><def_stmt>receive_uploaded_data self data_id=<none> **kwargs<block_start>file_server_root=config.instance.file_server_root<line_sep>resource_target_path=tempfile.mktemp(dir=file_server_root)<try_stmt><block_start>self._save_file_locally_and_extract_inputs(resource_target_path self._get_data_url_key() self._get_kind())<with_stmt>self.Caravan(resource_target_path)<as>caravan_instance<block_start>caravan_instance.init_metadata()<line_sep>plugins=self._prepare_and_process_doc(file_server_root resource_target_path caravan_instance=caravan_instance **kwargs)<line_sep>docs=[]<for_stmt>doc,plugin_dir plugins<block_start>self._move_archive_to_uploaded_dir(doc.id file_server_root plugin_dir )<line_sep>docs.append(doc)<block_end><block_end><return>docs 201<block_end><finally_stmt><block_start>remove(resource_target_path)<block_end><block_end><def_stmt>_prepare_and_process_doc self file_server_root archive_target_path **kwargs<block_start>plugins=[]<line_sep>caravan_=kwargs['caravan_instance']<for_stmt>wgn_path,_ caravan_<block_start>files_dir=os.path.dirname(wgn_path)<line_sep>archive_path=shutil.make_archive(os.path.join(caravan_.root_dir os.path.basename(files_dir)) 'zip' files_dir)<try_stmt><block_start>new_plugin,_=super(UploadedCaravanManager self)._prepare_and_process_doc(str(uuid.uuid4()) file_server_root archive_path **kwargs)<line_sep>plugins.append((new_plugin files_dir))<block_end><except_stmt>manager_exceptions.ConflictError<block_start><pass><block_end><block_end><return>plugins<block_end><block_end>
|
# Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Learning rate policies."""<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_from_future_stmt> unicode_literals<import_stmt>numpy<as>np<import_from_stmt>detectron.core.config cfg<def_stmt>get_lr_at_iter it<block_start>"""Get the learning rate at iteration it according to the cfg.SOLVER
settings.
"""<line_sep>lr=get_lr_func()(it)<if_stmt>it<l>cfg.SOLVER.WARM_UP_ITERS<block_start>method=cfg.SOLVER.WARM_UP_METHOD<if_stmt>method<eq>'constant'<block_start>warmup_factor=cfg.SOLVER.WARM_UP_FACTOR<block_end><elif_stmt>method<eq>'linear'<block_start>alpha=it/cfg.SOLVER.WARM_UP_ITERS<line_sep>warmup_factor=cfg.SOLVER.WARM_UP_FACTOR<times>(1-alpha)+alpha<block_end><else_stmt><block_start><raise>KeyError('Unknown SOLVER.WARM_UP_METHOD: {}'.format(method))<block_end>lr<augmul>warmup_factor<block_end><return>np.float32(lr)<block_end># ---------------------------------------------------------------------------- #
# Learning rate policy functions
# ---------------------------------------------------------------------------- #
<def_stmt>lr_func_steps_with_lrs cur_iter<block_start>"""For cfg.SOLVER.LR_POLICY = 'steps_with_lrs'
Change the learning rate to specified values at specified iterations.
Example:
cfg.SOLVER.MAX_ITER: 90
cfg.SOLVER.STEPS: [0, 60, 80]
cfg.SOLVER.LRS: [0.02, 0.002, 0.0002]
for cur_iter in [0, 59] use 0.02
in [60, 79] use 0.002
in [80, inf] use 0.0002
"""<line_sep>ind=get_step_index(cur_iter)<line_sep><return>cfg.SOLVER.LRS[ind]<block_end><def_stmt>lr_func_steps_with_decay cur_iter<block_start>"""For cfg.SOLVER.LR_POLICY = 'steps_with_decay'
Change the learning rate specified iterations based on the formula
lr = base_lr * gamma ** lr_step_count.
Example:
cfg.SOLVER.MAX_ITER: 90
cfg.SOLVER.STEPS: [0, 60, 80]
cfg.SOLVER.BASE_LR: 0.02
cfg.SOLVER.GAMMA: 0.1
for cur_iter in [0, 59] use 0.02 = 0.02 * 0.1 ** 0
in [60, 79] use 0.002 = 0.02 * 0.1 ** 1
in [80, inf] use 0.0002 = 0.02 * 0.1 ** 2
"""<line_sep>ind=get_step_index(cur_iter)<line_sep><return>cfg.SOLVER.BASE_LR<times>cfg.SOLVER.GAMMA<power>ind<block_end><def_stmt>lr_func_step cur_iter<block_start>"""For cfg.SOLVER.LR_POLICY = 'step'
"""<line_sep><return>(cfg.SOLVER.BASE_LR<times>cfg.SOLVER.GAMMA<power>(cur_iter<floordiv>cfg.SOLVER.STEP_SIZE))<block_end># ---------------------------------------------------------------------------- #
# Helpers
# ---------------------------------------------------------------------------- #
<def_stmt>get_step_index cur_iter<block_start>"""Given an iteration, find which learning rate step we're at."""<assert_stmt>cfg.SOLVER.STEPS[0]<eq>0 'The first step should always start at 0.'<line_sep>steps=cfg.SOLVER.STEPS+[cfg.SOLVER.MAX_ITER]<for_stmt>ind,step enumerate(steps)# NoQA
<block_start><if_stmt>cur_iter<l>step<block_start><break><block_end><block_end><return>ind-1<block_end><def_stmt>get_lr_func <block_start>policy='lr_func_'+cfg.SOLVER.LR_POLICY<if_stmt>policy<not><in>globals()<block_start><raise>NotImplementedError('Unknown LR policy: {}'.format(cfg.SOLVER.LR_POLICY))<block_end><else_stmt><block_start><return>globals()[policy]<block_end><block_end>
|
<import_stmt>json<import_stmt>os<import_from_stmt>pathlib Path<line_sep>jhkaggle_config={}<def_stmt>load_config profile filename=<none><block_start><global>jhkaggle_config<if_stmt><not>filename<block_start>home=str(Path.home())<line_sep>filename=os.path.join(home ".jhkaggleConfig.json")<if_stmt><not>os.path.isfile(filename)<block_start><raise>Exception(f"If no 'filename' paramater specifed, assume '.jhkaggleConfig.json' exists at HOME: {home}")<block_end><block_end><with_stmt>open(filename)<as>f<block_start>data=json.load(f)<if_stmt>profile<not><in>data<block_start><raise>Exception(f"Undefined profile '{profile}' in file '{filename}'")<block_end>jhkaggle_config=data[profile]<block_end><block_end>
|
<import_stmt>warnings<import_from_stmt>collections OrderedDict defaultdict<import_stmt>numpy<as>np<import_from_stmt>sklearn.utils.multiclass type_of_target<import_from_stmt>sklearn.utils.validation column_or_1d<import_stmt>torch<line_sep>TORCH_MAJOR=int(torch.__version__.split('.')[0])<line_sep>TORCH_MINOR=int(torch.__version__.split('.')[1])<if_stmt>TORCH_MAJOR<eq>1<and>TORCH_MINOR<l>8<block_start><import_from_stmt>torch._six container_abcs<block_end><else_stmt><block_start><import_stmt>collections.abc<as>container_abcs<block_end>collate_with_pre_batching_err_msg_format=("collate_with_pre_batched_map: "<concat>"batch must be a list with one map element; found {}")<def_stmt>collate_with_pre_batching batch<block_start>r"""
Collate function used by our PyTorch dataloader (in both distributed and
serial settings).
We avoid adding a batch dimension, as for NPT we have pre-batched data,
where each element of the dataset is a map.
:arg batch: List[Dict] (not as general as the default collate fn)
"""<if_stmt>len(batch)<g>1<block_start><raise>NotImplementedError<block_end>elem=batch[0]<line_sep>elem_type=type(elem)<if_stmt>isinstance(elem container_abcs.Mapping)<block_start><return>elem<block_end># Just return the dict, as there will only be one in NPT
<raise>TypeError(collate_with_pre_batching_err_msg_format.format(elem_type))<block_end># TODO: batching over features?
<class_stmt>StratifiedIndexSampler<block_start><def_stmt>__init__ self y n_splits shuffle=<true> label_col=<none> train_indices=<none><block_start>self.y=y<line_sep>self.n_splits=n_splits<line_sep>self.shuffle=shuffle<line_sep>self.label_col=label_col<line_sep>self.train_indices=train_indices<if_stmt>label_col<is><not><none><and>train_indices<is><not><none><block_start>self.stratify_class_labels=<true><line_sep>print('Stratifying train rows in each batch on the class label.')<block_end><else_stmt><block_start>self.stratify_class_labels=<false><block_end><block_end><def_stmt>_make_test_folds self labels<block_start>"""
Slight alterations from sklearn (StratifiedKFold)
"""<line_sep>y,n_splits,shuffle=labels self.n_splits self.shuffle<line_sep>y=np.asarray(y)<line_sep>type_of_target_y=type_of_target(y)<line_sep>allowed_target_types=('binary' 'multiclass')<if_stmt>type_of_target_y<not><in>allowed_target_types<block_start><raise>ValueError('Supported target types are: {}. Got {!r} instead.'.format(allowed_target_types type_of_target_y))<block_end>y=column_or_1d(y)<line_sep>_,y_idx,y_inv=np.unique(y return_index=<true> return_inverse=<true>)<line_sep># y_inv encodes y according to lexicographic order. We invert y_idx to
# map the classes so that they are encoded by order of appearance:
# 0 represents the first label appearing in y, 1 the second, etc.
_,class_perm=np.unique(y_idx return_inverse=<true>)<line_sep>y_encoded=class_perm[y_inv]<line_sep>n_classes=len(y_idx)<line_sep>y_counts=np.bincount(y_encoded)<line_sep>min_groups=np.min(y_counts)<if_stmt>np.all(n_splits<g>y_counts)<block_start><raise>ValueError("n_splits=%d cannot be greater than the"<concat>" number of members in each class."%(n_splits))<block_end><if_stmt>n_splits<g>min_groups<block_start>warnings.warn(("The least populated class in y has only %d"<concat>" members, which is less than n_splits=%d."%(min_groups n_splits)) UserWarning)<block_end># Determine the optimal number of samples from each class in each fold,
# using round robin over the sorted y. (This can be done direct from
# counts, but that code is unreadable.)
y_order=np.sort(y_encoded)<line_sep>allocation=np.asarray([np.bincount(y_order[i::n_splits] minlength=n_classes)<for>i range(n_splits)])<line_sep># To maintain the data order dependencies as best as possible within
# the stratification constraint, we assign samples from each class in
# blocks (and then mess that up when shuffle=True).
test_folds=np.empty(len(y) dtype='i')<for_stmt>k range(n_classes)# since the kth column of allocation stores the number of samples
# of class k in each test set, this generates blocks of fold
# indices corresponding to the allocation for class k.
<block_start>folds_for_class=np.arange(n_splits).repeat(allocation[: k])<if_stmt>shuffle<block_start>np.random.shuffle(folds_for_class)<block_end>test_folds[y_encoded<eq>k]=folds_for_class<block_end><return>test_folds<block_end><def_stmt>get_stratified_test_array self X<block_start>"""
Based on sklearn function StratifiedKFold._iter_test_masks.
"""<if_stmt>self.stratify_class_labels<block_start><return>self.get_train_label_stratified_test_array(X)<block_end>test_folds=self._make_test_folds(self.y)<line_sep># Inefficient for huge arrays, particularly when we need to materialize
# the index order.
# for i in range(n_splits):
# yield test_folds == i
batch_index_to_row_indices=OrderedDict()<line_sep>batch_index_to_row_index_count=defaultdict(int)<for_stmt>row_index,batch_index enumerate(test_folds)<block_start><if_stmt>batch_index<not><in>batch_index_to_row_indices.keys()<block_start>batch_index_to_row_indices[batch_index]=[row_index]<block_end><else_stmt><block_start>batch_index_to_row_indices[batch_index].append(row_index)<block_end>batch_index_to_row_index_count[batch_index]<augadd>1<block_end># Keep track of the batch sizes for each batch -- this can vary
# towards the end of the epoch, and will not be precisely what the
# user specified. Doesn't matter because the model is equivariant
# w.r.t. rows.
batch_sizes=[]<for_stmt>batch_index batch_index_to_row_indices.keys()<block_start>batch_sizes.append(batch_index_to_row_index_count[batch_index])<block_end><return>(X[np.concatenate(list(batch_index_to_row_indices.values()))] batch_sizes)<block_end><def_stmt>get_train_label_stratified_test_array self X<block_start>train_class_folds=self._make_test_folds(self.label_col[self.train_indices])<line_sep># Mapping from the size of a stratified batch of training rows
# to the index of the batch.
train_batch_size_to_train_batch_indices=defaultdict(list)<line_sep># Mapping from a train batch index to all of the actual train indices
train_batch_index_to_train_row_indices=OrderedDict()<for_stmt>train_row_index,train_batch_index enumerate(train_class_folds)<block_start><if_stmt>(train_batch_index<not><in>train_batch_index_to_train_row_indices.keys())<block_start>train_batch_index_to_train_row_indices[train_batch_index]=[train_row_index]<block_end><else_stmt><block_start>train_batch_index_to_train_row_indices[train_batch_index].append(train_row_index)<block_end><block_end><for_stmt>train_batch_index,train_row_indices (train_batch_index_to_train_row_indices.items())<block_start>train_batch_size_to_train_batch_indices[len(train_row_indices)].append(train_batch_index)<block_end>test_folds=self._make_test_folds(self.y)<line_sep># Mapping our actual batch indices to the val and test rows which
# have been successfully assigned
batch_index_to_val_test_row_indices=OrderedDict()<line_sep># Mapping our actual batch indices to the total number of row indices
# in each batch. We will have to assign the stratified train batches
# to fulfill this constraint.
batch_index_to_row_index_count=defaultdict(int)<line_sep># Mapping our actual batch indices to how many train spots are
# "vacant" in each batch. These we will fill with our stratified
# train batches.
batch_index_to_train_row_index_count=defaultdict(int)<for_stmt>row_index,(batch_index dataset_mode) enumerate(zip(test_folds self.y))<block_start>batch_index_to_row_index_count[batch_index]<augadd>1<if_stmt>dataset_mode<eq>0# Train
<block_start>batch_index_to_train_row_index_count[batch_index]<augadd>1<block_end><else_stmt><block_start><if_stmt>batch_index<not><in>(batch_index_to_val_test_row_indices.keys())<block_start>batch_index_to_val_test_row_indices[batch_index]=[row_index]<block_end><else_stmt><block_start>batch_index_to_val_test_row_indices[batch_index].append(row_index)<block_end><block_end><block_end># For all of our actual batches, let's find a suitable batch
# of stratified training data for us to use.
<for_stmt>batch_index,train_row_index_count batch_index_to_train_row_index_count.items()<block_start><try_stmt><block_start>train_batch_index=(train_batch_size_to_train_batch_indices[train_row_index_count].pop())<block_end><except_stmt>Exception<as>e<block_start><raise>e<block_end>batch_index_to_val_test_row_indices[batch_index]<augadd>(train_batch_index_to_train_row_indices[train_batch_index])<block_end><for_stmt>train_batch_arr train_batch_size_to_train_batch_indices.values()<block_start><if_stmt>len(train_batch_arr)<ne>0<block_start><raise>Exception<block_end><block_end>batch_sizes=[]<for_stmt>batch_index batch_index_to_val_test_row_indices.keys()<block_start>batch_sizes.append(batch_index_to_row_index_count[batch_index])<block_end>batch_order_sorted_row_indices=X[np.concatenate(list(batch_index_to_val_test_row_indices.values()))]<assert_stmt>(len(set(batch_order_sorted_row_indices))<eq>len(batch_order_sorted_row_indices))<line_sep><return>batch_order_sorted_row_indices batch_sizes<block_end><block_end>
|
"""chunk module"""<import_from_stmt>._loader chunk_loader synchronous_loading wait_for_async<import_from_stmt>._request ChunkLocation ChunkRequest LayerRef OctreeLocation<line_sep>__all__=['ChunkLocation' 'OctreeLocation' 'ChunkRequest' 'LayerRef' 'chunk_loader' 'wait_for_async' 'synchronous_loading' ]<line_sep>
|
#coding:utf-8
<import_from_future_stmt> unicode_literals<import_stmt>os<import_stmt>shutil<import_from_stmt>six.moves http_client urllib<import_from_stmt>cactus.site Site<import_from_stmt>cactus.plugin.manager PluginManager<import_from_stmt>cactus.utils.helpers CaseInsensitiveDict<import_from_stmt>cactus.utils.parallel PARALLEL_DISABLED<import_from_stmt>cactus.tests BaseBootstrappedTestCase<class_stmt>DummyPluginManager(PluginManager)<block_start>"""
Doesn't do anything
"""<def_stmt>call self method *args **kwargs<block_start>"""
Trap the call
"""<line_sep><pass><block_end><block_end><class_stmt>IntegrationTestCase(BaseBootstrappedTestCase)<block_start><def_stmt>setUp self<block_start>super(IntegrationTestCase self).setUp()<line_sep>self.site=Site(self.path PluginManagerClass=DummyPluginManager DeploymentEngineClass=self.get_deployment_engine_class())<line_sep>self.site._parallel=PARALLEL_DISABLED<line_sep>self.site.config.set('site-url' 'http://example.com/')<line_sep># Clean up the site paths
<for_stmt>path (self.site.page_path self.site.static_path)<block_start>shutil.rmtree(path)<line_sep>os.mkdir(path)<block_end><block_end><def_stmt>get_deployment_engine_class self<block_start>"""
Should return a deployment engine in tests.
"""<line_sep><pass><block_end><block_end><class_stmt>BaseTestHTTPConnection(object)<block_start>last_request=<none><def_stmt>__init__ self host *args **kwargs<block_start>self.host=host<line_sep>self.requests=[]<block_end><def_stmt>connect self<block_start><pass><block_end><def_stmt>close self<block_start><pass><block_end><def_stmt>request self method url body=b'' headers=<none><block_start>"""
Send a full request at once
"""<if_stmt>headers<is><none><block_start>headers={}<block_end>self.last_request=TestHTTPRequest(self method url body headers)<block_end><def_stmt>putrequest self method url *args **kwargs<block_start>"""
Create a new request, but add more things to it later
"""<line_sep>self.current_request=TestHTTPRequest(self method url b'' {})<line_sep>self.current_request.state="headers"<block_end><def_stmt>putheader self header value<block_start>"""
Add an header to a request that's in progress
"""<line_sep>self.current_request.headers[header]=value<block_end><def_stmt>endheaders self data=<none><block_start>"""
End the headers of a request that's in progress
"""<line_sep>self.current_request.state="body"<line_sep>self.last_request=self.current_request<if_stmt>data<is><not><none><block_start>self.send(data)<block_end><block_end><def_stmt>send self data<block_start>"""
Add data to a request that's in progress
"""<line_sep>self.current_request.body<augadd>data<block_end><def_stmt>getresponse self<block_start>request=self.last_request<line_sep>self.requests.append(request)<line_sep><return>self.handle_request(request)<block_end><def_stmt>handle_request self request<block_start>"""
:param request: The request to handle
"""<line_sep><raise>NotImplementedError("handle_request should be implemented by subclasses")<block_end><def_stmt>set_debuglevel self level<block_start><pass><block_end><block_end><class_stmt>DebugHTTPSConnectionFactory(object)<block_start><def_stmt>__init__ self conn_cls<block_start>self.conn_cls=conn_cls<line_sep>self.connections=[]<block_end>@property<def_stmt>requests self<block_start>"""
:returns: A dictionary of the calls made through this connection factory (method -> list of calls)
"""<line_sep>out=[]<for_stmt>connection self.connections<block_start>out.extend(connection.requests)<block_end><return>out<block_end><def_stmt>__call__ self *args **kwargs<block_start>"""
Create a new connection from our connection class
"""<line_sep>connection=self.conn_cls(*args **kwargs)<line_sep>self.connections.append(connection)<line_sep><return>connection<block_end><block_end><class_stmt>TestHTTPRequest(object)<block_start>state=<none><def_stmt>__init__ self connection method url body headers<block_start>self.connection=connection<line_sep>self.method=method<line_sep>self.url=url<line_sep>self.body=body<line_sep>self.headers=CaseInsensitiveDict(headers)<line_sep>u=urllib.parse.urlparse(url)<line_sep>self.path=u.path<line_sep>self.params=urllib.parse.parse_qs(u.query keep_blank_values=<true>)<block_end><block_end><class_stmt>TestHTTPResponse(object)<block_start><def_stmt>__init__ self status reason=<none> headers=<none> body=''<block_start><if_stmt>reason<is><none><block_start>reason=http_client.responses[status]<block_end><if_stmt>headers<is><none><block_start>headers={}<block_end>self.status=status<line_sep>self.reason=reason<line_sep>self.headers=CaseInsensitiveDict(headers)<line_sep>self.body=body<block_end><def_stmt>getheader self header default=<none><block_start><return>self.headers.get(header default)<block_end><def_stmt>getheaders self<block_start><return>self.headers<block_end><def_stmt>read self<block_start><return>self.body<block_end><block_end>
|
# Copyright 2019 <NAME> and collaborators.
# This program is distributed under the MIT license.
<import_stmt>tempfile<import_stmt>shutil<import_stmt>io<import_stmt>sys<import_from_stmt>. pathlib<import_from_stmt>. contextlib<line_sep>@contextlib.contextmanager<def_stmt>BlankContextManager <block_start><yield><block_end>@contextlib.contextmanager<def_stmt>create_temp_folder prefix=tempfile.template suffix='' parent_folder=<none> chmod=<none><block_start>'''
Context manager that creates a temporary folder and deletes it after usage.
After the suite finishes, the temporary folder and all its files and
subfolders will be deleted.
Example:
with create_temp_folder() as temp_folder:
# We have a temporary folder!
assert temp_folder.is_dir()
# We can create files in it:
(temp_folder / 'my_file').open('w')
# The suite is finished, now it's all cleaned:
assert not temp_folder.exists()
Use the `prefix` and `suffix` string arguments to dictate a prefix and/or a
suffix to the temporary folder's name in the filesystem.
If you'd like to set the permissions of the temporary folder, pass them to
the optional `chmod` argument, like this:
create_temp_folder(chmod=0o550)
'''<line_sep>temp_folder=pathlib.Path(tempfile.mkdtemp(prefix=prefix suffix=suffix dir=parent_folder))<try_stmt><block_start><if_stmt>chmod<is><not><none><block_start>temp_folder.chmod(chmod)<block_end><yield>temp_folder<block_end><finally_stmt><block_start>shutil.rmtree(str(temp_folder))<block_end><block_end><class_stmt>NotInDict<block_start>'''Object signifying that the key was not found in the dict.'''<block_end><class_stmt>TempValueSetter(object)<block_start>'''
Context manager for temporarily setting a value to a variable.
The value is set to the variable before the suite starts, and gets reset
back to the old value after the suite finishes.
'''<def_stmt>__init__ self variable value assert_no_fiddling=<true><block_start>'''
Construct the `TempValueSetter`.
`variable` may be either an `(object, attribute_string)`, a `(dict,
key)` pair, or a `(getter, setter)` pair.
`value` is the temporary value to set to the variable.
'''<line_sep>self.assert_no_fiddling=assert_no_fiddling<line_sep>#######################################################################
# We let the user input either an `(object, attribute_string)`, a
# `(dict, key)` pair, or a `(getter, setter)` pair. So now it's our job
# to inspect `variable` and figure out which one of these options the
# user chose, and then obtain from that a `(getter, setter)` pair that
# we could use.
bad_input_exception=Exception('`variable` must be either an `(object, attribute_string)` pair, '<concat>'a `(dict, key)` pair, or a `(getter, setter)` pair.')<try_stmt><block_start>first,second=variable<block_end><except_stmt>Exception<block_start><raise>bad_input_exception<block_end><if_stmt>hasattr(first '__getitem__')<and>hasattr(first 'get')<and>hasattr(first '__setitem__')<and>hasattr(first '__delitem__')# `first` is a dictoid; so we were probably handed a `(dict, key)`
# pair.
<block_start>self.getter=<lambda>:first.get(second NotInDict)<line_sep>self.setter=<lambda>value:(first.__setitem__(second value)<if>value<is><not>NotInDict<else>first.__delitem__(second))<line_sep>### Finished handling the `(dict, key)` case. ###
<block_end><elif_stmt>callable(second)# `second` is a callable; so we were probably handed a `(getter,
# setter)` pair.
<block_start><if_stmt><not>callable(first)<block_start><raise>bad_input_exception<block_end>self.getter,self.setter=first second<line_sep>### Finished handling the `(getter, setter)` case. ###
<block_end><else_stmt># All that's left is the `(object, attribute_string)` case.
<block_start><if_stmt><not>isinstance(second str)<block_start><raise>bad_input_exception<block_end>parent,attribute_name=first second<line_sep>self.getter=<lambda>:getattr(parent attribute_name)<line_sep>self.setter=<lambda>value:setattr(parent attribute_name value)<line_sep>### Finished handling the `(object, attribute_string)` case. ###
<block_end>#
#
### Finished obtaining a `(getter, setter)` pair from `variable`. #####
self.getter=self.getter<line_sep>'''Getter for getting the current value of the variable.'''<line_sep>self.setter=self.setter<line_sep>'''Setter for Setting the the variable's value.'''<line_sep>self.value=value<line_sep>'''The value to temporarily set to the variable.'''<line_sep>self.active=<false><block_end><def_stmt>__enter__ self<block_start>self.active=<true><line_sep>self.old_value=self.getter()<line_sep>'''The old value of the variable, before entering the suite.'''<line_sep>self.setter(self.value)<line_sep># In `__exit__` we'll want to check if anyone changed the value of the
# variable in the suite, which is unallowed. But we can't compare to
# `.value`, because sometimes when you set a value to a variable, some
# mechanism modifies that value for various reasons, resulting in a
# supposedly equivalent, but not identical, value. For example this
# happens when you set the current working directory on Mac OS.
#
# So here we record the value right after setting, and after any
# possible processing the system did to it:
self._value_right_after_setting=self.getter()<line_sep><return>self<block_end><def_stmt>__exit__ self exc_type exc_value exc_traceback<block_start><if_stmt>self.assert_no_fiddling# Asserting no-one inside the suite changed our variable:
<block_start><assert_stmt>self.getter()<eq>self._value_right_after_setting<block_end>self.setter(self.old_value)<line_sep>self.active=<false><block_end><block_end><class_stmt>OutputCapturer(object)<block_start>'''
Context manager for catching all system output generated during suite.
Example:
with OutputCapturer() as output_capturer:
print('woo!')
assert output_capturer.output == 'woo!\n'
The boolean arguments `stdout` and `stderr` determine, respectively,
whether the standard-output and the standard-error streams will be
captured.
'''<def_stmt>__init__ self stdout=<true> stderr=<true><block_start>self.string_io=io.StringIO()<if_stmt>stdout<block_start>self._stdout_temp_setter=TempValueSetter((sys 'stdout') self.string_io)<block_end><else_stmt># not stdout
<block_start>self._stdout_temp_setter=BlankContextManager()<block_end><if_stmt>stderr<block_start>self._stderr_temp_setter=TempValueSetter((sys 'stderr') self.string_io)<block_end><else_stmt># not stderr
<block_start>self._stderr_temp_setter=BlankContextManager()<block_end><block_end><def_stmt>__enter__ self<block_start>'''Manage the `OutputCapturer`'s context.'''<line_sep>self._stdout_temp_setter.__enter__()<line_sep>self._stderr_temp_setter.__enter__()<line_sep><return>self<block_end><def_stmt>__exit__ self exc_type exc_value exc_traceback# Not doing exception swallowing anywhere here.
<block_start>self._stderr_temp_setter.__exit__(exc_type exc_value exc_traceback)<line_sep>self._stdout_temp_setter.__exit__(exc_type exc_value exc_traceback)<block_end>output=property(<lambda>self:self.string_io.getvalue() doc='''The string of output that was captured.''')<block_end><class_stmt>TempSysPathAdder(object)<block_start>'''
Context manager for temporarily adding paths to `sys.path`.
Removes the path(s) after suite.
Example:
with TempSysPathAdder('path/to/fubar/package'):
import fubar
fubar.do_stuff()
'''<def_stmt>__init__ self addition<block_start>self.addition=[str(addition)]<block_end><def_stmt>__enter__ self<block_start>self.entries_not_in_sys_path=[entry<for>entry self.addition<if>entry<not><in>sys.path]<line_sep>sys.path<augadd>self.entries_not_in_sys_path<line_sep><return>self<block_end><def_stmt>__exit__ self *args **kwargs<block_start><for_stmt>entry self.entries_not_in_sys_path# We don't allow anyone to remove it except for us:
<block_start><assert_stmt>entry<in>sys.path<line_sep>sys.path.remove(entry)<block_end><block_end><block_end>
|
# Download the Python helper library from twilio.com/docs/python/install
<import_stmt>os<import_from_stmt>twilio.rest Client<import_from_stmt>datetime datetime<line_sep># Your Account Sid and Auth Token from twilio.com/user/account
# To set up environmental variables, see http://twil.io/secure
account_sid=os.environ['TWILIO_ACCOUNT_SID']<line_sep>auth_token=os.environ['TWILIO_AUTH_TOKEN']<line_sep>client=Client(account_sid auth_token)<line_sep>new_data={'date_updated':str(datetime.now()) 'movie_title':"On The Line" 'show_times':<none> 'starring':["Lance Bass" "<NAME>"] 'genre':"Romance"}<line_sep>document=client.sync.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").documents("MyFirstDocument").update(data=new_data)<line_sep>print(document.data)<line_sep>
|
<import_stmt>time json<import_stmt>functools logging traceback<import_from_stmt>weakref ref<line_sep>logger=logging.getLogger('itchatmp')<def_stmt>retry n=3 waitTime=3<block_start><def_stmt>_retry fn<block_start>@functools.wraps(fn)<def_stmt>__retry *args **kwargs<block_start><for_stmt>i range(n)<block_start><try_stmt><block_start><return>fn(*args **kwargs)<block_end><except_stmt>Exception<as>e<block_start>logger.debug('%s failed. Count: %s. Info: %r'%(fn.__name__ i+1 e))<if_stmt>i+1<eq>n<block_start>logger.debug('%s failed. Reach max retry'%fn.__name__)<block_end>time.sleep(waitTime)<block_end><block_end><block_end><return>__retry<block_end><return>_retry<block_end><def_stmt>encode_send_dict d<block_start><try_stmt><block_start><return>json.dumps(d).encode('utf8').decode('unicode-escape').encode('utf8')<block_end><except_stmt>(UnicodeDecodeError UnicodeEncodeError)<block_start><return><block_end><block_end><class_stmt>CoreMixin(object)<block_start><def_stmt>__init__ self core<block_start>self.core=core<block_end>@property<def_stmt>core self<block_start><return>getattr(self '_core' <lambda>:<none>)()<block_end>@core.setter<def_stmt>core self v<block_start>self._core=ref(v)<block_end><block_end>
|
<import_from_future_stmt> division<import_from_stmt>expan.core.experiment Experiment _choose_threshold_type<import_stmt>pandas<as>pd<import_stmt>numpy<as>np<def_stmt>test_choose_threshold_type_upper <block_start>data=np.array([1 2 3 np.nan])<assert_stmt>_choose_threshold_type(data)<eq>'upper'<block_end><def_stmt>test_choose_threshold_type_upper_zero <block_start>data=np.array([0 1 2 3 np.nan])<assert_stmt>_choose_threshold_type(data)<eq>'upper'<block_end><def_stmt>test_choose_threshold_type_lower <block_start>data=np.array([-3 -2 -1 np.nan])<assert_stmt>_choose_threshold_type(data)<eq>'lower'<block_end><def_stmt>test_choose_threshold_type_lower_zero <block_start>data=np.array([-3 -2 -1 0 np.nan])<assert_stmt>_choose_threshold_type(data)<eq>'lower'<block_end><def_stmt>test_choose_threshold_type_two_sided <block_start>data=np.array([-3 -2 -1 0 1 2 3 np.nan])<assert_stmt>_choose_threshold_type(data)<eq>'two-sided'<block_end><def_stmt>test_quantile_filtering_upper_old <block_start>exp=Experiment({})<line_sep>data=np.array([0 0 1 2])/np.array([0 0 1 1])<line_sep>df=pd.DataFrame.from_dict({'earnings':data})<line_sep>flags=exp._quantile_filtering(df ['earnings'] {'earnings':('upper' 90.0)})<assert_stmt>flags.tolist()<eq>[<false> <false> <false> <true>]<block_end><def_stmt>test_quantile_filtering_lower_old <block_start>exp=Experiment({})<line_sep>data=np.array([0 0 1 2])/np.array([0 0 1 1])<line_sep>df=pd.DataFrame.from_dict({'earnings':data})<line_sep>flags=exp._quantile_filtering(df ['earnings'] {'earnings':('lower' 10.)})<assert_stmt>flags.tolist()<eq>[<false> <false> <true> <false>]<block_end><def_stmt>test_quantile_filtering_upper <block_start>exp=Experiment({})<line_sep>data=np.array([0.0]<times>2+list(range(10)))/np.array([0.0]<times>2+[1.0]<times>10)<line_sep>df=pd.DataFrame.from_dict({'earnings':data})<line_sep>flags=exp._quantile_filtering(df ['earnings'] {'earnings':('upper' 90.0)})<assert_stmt>flags.tolist()<eq>[<false>]<times>11+[<true>]<block_end><def_stmt>test_quantile_filtering_lower <block_start>exp=Experiment({})<line_sep>data=np.array([0.0]<times>2+list(range(10)))/np.array([0.0]<times>2+[1.0]<times>10)<line_sep>df=pd.DataFrame.from_dict({'earnings':data})<line_sep>flags=exp._quantile_filtering(df ['earnings'] {'earnings':('lower' 50.0)})<line_sep>print(flags.tolist())<assert_stmt>flags.tolist()<eq>[<false>]<times>2+[<true>]<times>5+[<false>]<times>5<block_end><def_stmt>test_quantile_filtering_two_sided <block_start>exp=Experiment({})<line_sep>df=pd.DataFrame.from_dict({'earnings':list(range(10))})<line_sep>flags=exp._quantile_filtering(df ['earnings'] {'earnings':('two-sided' 80.0)})<line_sep>results=flags.tolist()<assert_stmt>results<eq>[<true>]+[<false>]<times>8+[<true>]<block_end><def_stmt>test_quantile_filtering_two_sided_asym <block_start>exp=Experiment({})<line_sep>data=list(range(-8 0))+list(range(16))<line_sep>df=pd.DataFrame.from_dict({'earnings':data})<line_sep>flags=exp._quantile_filtering(df ['earnings'] {'earnings':('two-sided-asym' 50.0)})<line_sep>results=flags.tolist()<assert_stmt>results<eq>[<true>]<times>2+[<false>]<times>18+[<true>]<times>4<block_end>
|
<import_from_future_stmt> absolute_import<import_from_stmt>.enginebase TemplateEngine<import_from_stmt>.moyatemplates Template<import_from_stmt>.environment Environment<line_sep>TemplateEngine<line_sep>Template<line_sep>Environment<line_sep>
|
#
# Copyright (c) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
<import_stmt>pytest<import_from_stmt>tensorflow.core.framework.tensor_pb2 TensorProto<import_from_stmt>tensorflow_serving.apis.get_model_metadata_pb2 GetModelMetadataRequest<import_from_stmt>tensorflow_serving.apis.get_model_status_pb2 GetModelStatusRequest<import_from_stmt>ovmsclient.tfs_compat.grpc.requests GrpcModelMetadataRequest GrpcPredictRequest _check_model_spec make_metadata_request make_predict_request make_status_request GrpcModelStatusRequest <import_from_stmt>config MODEL_SPEC_INVALID MODEL_SPEC_VALID PREDICT_REQUEST_INVALID_INPUTS PREDICT_REQUEST_VALID <import_from_stmt>tensorflow_serving.apis.predict_pb2 PredictRequest<line_sep>@pytest.mark.parametrize("name, version" MODEL_SPEC_VALID)<def_stmt>test_check_model_spec_valid name version<block_start>_check_model_spec(name version)<block_end>@pytest.mark.parametrize("name, version, expected_exception, expected_message" MODEL_SPEC_INVALID)<def_stmt>test_check_model_spec_invalid name version expected_exception expected_message<block_start><with_stmt>pytest.raises(expected_exception)<as>e_info<block_start>_check_model_spec(name version)<block_end><assert_stmt>str(e_info.value)<eq>expected_message<block_end>@pytest.mark.parametrize("name, version" MODEL_SPEC_VALID)<def_stmt>test_make_status_request_valid mocker name version<block_start>mock_method=mocker.patch('ovmsclient.tfs_compat.grpc.requests._check_model_spec')<line_sep>model_status_request=make_status_request(name version)<line_sep>mock_method.assert_called_once()<assert_stmt>isinstance(model_status_request GrpcModelStatusRequest)<assert_stmt>model_status_request.model_version<eq>version<assert_stmt>model_status_request.model_name<eq>name<assert_stmt>isinstance(model_status_request.raw_request GetModelStatusRequest)<block_end>@pytest.mark.parametrize("name, version, expected_exception, expected_message" MODEL_SPEC_INVALID)<def_stmt>test_make_status_request_invalid mocker name version expected_exception expected_message<block_start>mock_method=mocker.patch('ovmsclient.tfs_compat.grpc.requests._check_model_spec' side_effect=expected_exception(expected_message))<with_stmt>pytest.raises(expected_exception)<as>e_info<block_start>make_status_request(name version)<block_end><assert_stmt>str(e_info.value)<eq>expected_message<line_sep>mock_method.assert_called_once()<block_end>@pytest.mark.parametrize("name, version" MODEL_SPEC_VALID)<def_stmt>test_make_metadata_request_vaild mocker name version<block_start>mock_method=mocker.patch('ovmsclient.tfs_compat.grpc.requests._check_model_spec')<line_sep>model_metadata_request=make_metadata_request(name version)<line_sep>mock_method.assert_called_once()<assert_stmt>isinstance(model_metadata_request GrpcModelMetadataRequest)<assert_stmt>model_metadata_request.model_version<eq>version<assert_stmt>model_metadata_request.model_name<eq>name<assert_stmt>isinstance(model_metadata_request.raw_request GetModelMetadataRequest)<assert_stmt>len(model_metadata_request.raw_request.metadata_field)<eq>1<assert_stmt>model_metadata_request.raw_request.metadata_field[0]<eq>'signature_def'<block_end>@pytest.mark.parametrize("name, version, expected_exception, expected_message" MODEL_SPEC_INVALID)<def_stmt>test_make_metadata_request_invalid mocker name version expected_exception expected_message<block_start>mock_method=mocker.patch('ovmsclient.tfs_compat.grpc.requests._check_model_spec' side_effect=expected_exception(expected_message))<with_stmt>pytest.raises(expected_exception)<as>e_info<block_start>make_metadata_request(name version)<block_end><assert_stmt>str(e_info.value)<eq>expected_message<line_sep>mock_method.assert_called_once()<block_end>@pytest.mark.parametrize("inputs, expected_proto, name, version" PREDICT_REQUEST_VALID)<def_stmt>test_make_predict_request_valid inputs expected_proto name version<block_start>model_predict_request=make_predict_request(inputs name version)<line_sep>raw_predict_request=model_predict_request.raw_request<assert_stmt>isinstance(model_predict_request GrpcPredictRequest)<assert_stmt>model_predict_request.model_name<eq>name<assert_stmt>model_predict_request.model_version<eq>version<assert_stmt>model_predict_request.inputs<eq>inputs<assert_stmt>isinstance(raw_predict_request PredictRequest)<assert_stmt>raw_predict_request.model_spec.name<eq>name<assert_stmt>raw_predict_request.model_spec.version.value<eq>version<assert_stmt>len(inputs.keys())<eq>len(list(raw_predict_request.inputs.keys()))<for_stmt>key,value inputs.items()<block_start><assert_stmt>isinstance(raw_predict_request.inputs[key] TensorProto)<if_stmt>isinstance(value TensorProto)<block_start><assert_stmt>value<eq>raw_predict_request.inputs[key]<block_end><else_stmt><block_start><assert_stmt>(raw_predict_request.inputs[key].__getattribute__(expected_proto[key]['field'])<eq>expected_proto[key]['value'])<assert_stmt>raw_predict_request.inputs[key].tensor_shape<eq>expected_proto[key]['shape']<assert_stmt>raw_predict_request.inputs[key].dtype<eq>expected_proto[key]['dtype']<block_end><block_end><block_end>@pytest.mark.parametrize("name, version, expected_exception, expected_message" MODEL_SPEC_INVALID)<def_stmt>test_make_predict_request_invalid_model_spec mocker name version expected_exception expected_message<block_start>inputs={"input":[1 2 3]}<line_sep>mock_method=mocker.patch('ovmsclient.tfs_compat.grpc.requests._check_model_spec' side_effect=expected_exception(expected_message))<with_stmt>pytest.raises(expected_exception)<as>e_info<block_start>make_predict_request(inputs name version)<block_end><assert_stmt>str(e_info.value)<eq>expected_message<line_sep>mock_method.assert_called_once()<block_end>@[email protected]("""inputs, name, version,
expected_exception, expected_message""" PREDICT_REQUEST_INVALID_INPUTS)<def_stmt>test_make_predict_request_invalid_inputs mocker inputs name version expected_exception expected_message<block_start>mock_method=mocker.patch('ovmsclient.tfs_compat.grpc.requests._check_model_spec')<with_stmt>pytest.raises(expected_exception)<as>e_info<block_start>make_predict_request(inputs name version)<block_end><assert_stmt>str(e_info.value)<eq>expected_message<line_sep>mock_method.assert_called_once()<block_end>
|
<import_stmt>numba<import_stmt>scipy.special<as>sc<import_from_stmt>. signatures<def_stmt>choose_kernel name all_signatures<block_start><def_stmt>choice_function *args<block_start><for_stmt>signature all_signatures<block_start><if_stmt>args<eq>signature<block_start>f=signatures.name_and_types_to_pointer[(name *signature)]<line_sep><return><lambda>*args:f(*args)<block_end><block_end><block_end><return>choice_function<block_end><def_stmt>add_overloads <block_start><for_stmt>name,all_signatures signatures.name_to_numba_signatures.items()<block_start>sc_function=getattr(sc name)<line_sep>numba.extending.overload(sc_function)(choose_kernel(name all_signatures))<block_end><block_end>
|
"""Variant of cmd2's option parsing mechanism (http://www.assembla.com/wiki/show/python-cmd2)
"""<import_stmt>re<import_stmt>pyparsing<import_stmt>optparse<class_stmt>OptionParser(optparse.OptionParser)<block_start><def_stmt>__init__ self option_class=optparse.Option<block_start>optparse.OptionParser.__init__(self add_help_option=<false> option_class=option_class)<block_end><def_stmt>error self msg<block_start>"""error(msg : string)
Print a usage message incorporating 'msg' to stderr and exit.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""<line_sep><raise>optparse.OptParseError(msg)<block_end><block_end><def_stmt>remaining_args oldArgs newArgList<block_start>"""
Preserves the spacing originally in the argument after
the removal of options.
>>> remaining_args('-f bar bar cow', ['bar', 'cow'])
'bar cow'
"""<line_sep>pattern=r'\s+'.join(re.escape(a)<for>a newArgList)+r'\s*$'<line_sep>matchObj=re.search(pattern oldArgs)<line_sep><return>oldArgs[matchObj.start():]<block_end><def_stmt>_attr_get_ obj attr<block_start>"""Returns an attribute's value, or None (no error) if undefined.
Analagous to .get() for dictionaries. Useful when checking for
value of options that may not have been defined on a given
method."""<try_stmt><block_start><return>getattr(obj attr)<block_end><except_stmt>AttributeError<block_start><return><none><block_end><block_end>optparse.Values.get=_attr_get_<line_sep>options_defined=[]# used to distinguish --options from SQL-style --comments
<def_stmt>options option_list arg_desc="arg"<block_start>"""Used as a decorator and passed a list of optparse-style options,
alters a method to populate its ``opts`` argument from its
raw text argument.
Example: transform
def do_something(self, arg):
into
@options([make_option('-q', '--quick', action="store_true",
help="Makes things fast")],
"source dest")
def do_something(self, arg, opts):
if opts.quick:
self.fast_button = True
"""<if_stmt><not>isinstance(option_list list)<block_start>option_list=[option_list]<block_end><for_stmt>opt option_list<block_start>options_defined.append(pyparsing.Literal(opt.get_opt_string()))<block_end><def_stmt>option_setup func<block_start>optionParser=OptionParser()<for_stmt>opt option_list<block_start>optionParser.add_option(opt)<block_end>optionParser.set_usage("%s [options] %s"%(func.__name__[3:] arg_desc))<line_sep>optionParser._func=func<def_stmt>new_func instance arg<block_start><try_stmt><block_start>opts,newArgList=optionParser.parse_args(arg.split())<line_sep>newArgs=remaining_args(arg newArgList)<line_sep>arg=newArgs<block_end><except_stmt>optparse.OptParseError<as>e<block_start>instance.sendData(str(e))<line_sep><return>instance.sendData(optionParser.format_help())<block_end><return>func(instance arg opts)<block_end>new_func.__doc__=func.__doc__<line_sep>new_func.__extended_doc__=optionParser.format_help()<line_sep><return>new_func<block_end><return>option_setup<block_end>
|
<import_from_stmt>typing Optional<import_from_stmt>botocore.client BaseClient<import_from_stmt>typing Dict<import_from_stmt>typing Union<import_from_stmt>botocore.paginate Paginator<import_from_stmt>botocore.waiter Waiter<import_from_stmt>typing List<class_stmt>Client(BaseClient)<block_start><def_stmt>can_paginate self operation_name:str=<none><block_start><pass><block_end><def_stmt>create_api self Name:str ProtocolType:str RouteSelectionExpression:str ApiKeySelectionExpression:str=<none> Description:str=<none> DisableSchemaValidation:bool=<none> Version:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_api_mapping self ApiId:str DomainName:str Stage:str ApiMappingKey:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_authorizer self ApiId:str AuthorizerType:str AuthorizerUri:str IdentitySource:List Name:str AuthorizerCredentialsArn:str=<none> AuthorizerResultTtlInSeconds:int=<none> IdentityValidationExpression:str=<none> ProviderArns:List=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_deployment self ApiId:str Description:str=<none> StageName:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_domain_name self DomainName:str DomainNameConfigurations:List=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_integration self ApiId:str IntegrationType:str ConnectionId:str=<none> ConnectionType:str=<none> ContentHandlingStrategy:str=<none> CredentialsArn:str=<none> Description:str=<none> IntegrationMethod:str=<none> IntegrationUri:str=<none> PassthroughBehavior:str=<none> RequestParameters:Dict=<none> RequestTemplates:Dict=<none> TemplateSelectionExpression:str=<none> TimeoutInMillis:int=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_integration_response self ApiId:str IntegrationId:str IntegrationResponseKey:str ContentHandlingStrategy:str=<none> ResponseParameters:Dict=<none> ResponseTemplates:Dict=<none> TemplateSelectionExpression:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_model self ApiId:str Name:str Schema:str ContentType:str=<none> Description:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_route self ApiId:str RouteKey:str ApiKeyRequired:bool=<none> AuthorizationScopes:List=<none> AuthorizationType:str=<none> AuthorizerId:str=<none> ModelSelectionExpression:str=<none> OperationName:str=<none> RequestModels:Dict=<none> RequestParameters:Dict=<none> RouteResponseSelectionExpression:str=<none> Target:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_route_response self ApiId:str RouteId:str RouteResponseKey:str ModelSelectionExpression:str=<none> ResponseModels:Dict=<none> ResponseParameters:Dict=<none><arrow>Dict<block_start><pass><block_end><def_stmt>create_stage self ApiId:str StageName:str AccessLogSettings:Dict=<none> ClientCertificateId:str=<none> DefaultRouteSettings:Dict=<none> DeploymentId:str=<none> Description:str=<none> RouteSettings:Dict=<none> StageVariables:Dict=<none><arrow>Dict<block_start><pass><block_end><def_stmt>delete_api self ApiId:str<block_start><pass><block_end><def_stmt>delete_api_mapping self ApiMappingId:str DomainName:str<block_start><pass><block_end><def_stmt>delete_authorizer self ApiId:str AuthorizerId:str<block_start><pass><block_end><def_stmt>delete_deployment self ApiId:str DeploymentId:str<block_start><pass><block_end><def_stmt>delete_domain_name self DomainName:str<block_start><pass><block_end><def_stmt>delete_integration self ApiId:str IntegrationId:str<block_start><pass><block_end><def_stmt>delete_integration_response self ApiId:str IntegrationId:str IntegrationResponseId:str<block_start><pass><block_end><def_stmt>delete_model self ApiId:str ModelId:str<block_start><pass><block_end><def_stmt>delete_route self ApiId:str RouteId:str<block_start><pass><block_end><def_stmt>delete_route_response self ApiId:str RouteId:str RouteResponseId:str<block_start><pass><block_end><def_stmt>delete_stage self ApiId:str StageName:str<block_start><pass><block_end><def_stmt>generate_presigned_url self ClientMethod:str=<none> Params:Dict=<none> ExpiresIn:int=<none> HttpMethod:str=<none><block_start><pass><block_end><def_stmt>get_api self ApiId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_api_mapping self ApiMappingId:str DomainName:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_api_mappings self DomainName:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_apis self MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_authorizer self ApiId:str AuthorizerId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_authorizers self ApiId:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_deployment self ApiId:str DeploymentId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_deployments self ApiId:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_domain_name self DomainName:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_domain_names self MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_integration self ApiId:str IntegrationId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_integration_response self ApiId:str IntegrationId:str IntegrationResponseId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_integration_responses self ApiId:str IntegrationId:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_integrations self ApiId:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_model self ApiId:str ModelId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_model_template self ApiId:str ModelId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_models self ApiId:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_paginator self operation_name:str=<none><arrow>Paginator<block_start><pass><block_end><def_stmt>get_route self ApiId:str RouteId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_route_response self ApiId:str RouteId:str RouteResponseId:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_route_responses self ApiId:str RouteId:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_routes self ApiId:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_stage self ApiId:str StageName:str<arrow>Dict<block_start><pass><block_end><def_stmt>get_stages self ApiId:str MaxResults:str=<none> NextToken:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>get_waiter self waiter_name:str=<none><arrow>Waiter<block_start><pass><block_end><def_stmt>update_api self ApiId:str ApiKeySelectionExpression:str=<none> Description:str=<none> DisableSchemaValidation:bool=<none> Name:str=<none> RouteSelectionExpression:str=<none> Version:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_api_mapping self ApiId:str ApiMappingId:str DomainName:str ApiMappingKey:str=<none> Stage:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_authorizer self ApiId:str AuthorizerId:str AuthorizerCredentialsArn:str=<none> AuthorizerResultTtlInSeconds:int=<none> AuthorizerType:str=<none> AuthorizerUri:str=<none> IdentitySource:List=<none> IdentityValidationExpression:str=<none> Name:str=<none> ProviderArns:List=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_deployment self ApiId:str DeploymentId:str Description:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_domain_name self DomainName:str DomainNameConfigurations:List=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_integration self ApiId:str IntegrationId:str ConnectionId:str=<none> ConnectionType:str=<none> ContentHandlingStrategy:str=<none> CredentialsArn:str=<none> Description:str=<none> IntegrationMethod:str=<none> IntegrationType:str=<none> IntegrationUri:str=<none> PassthroughBehavior:str=<none> RequestParameters:Dict=<none> RequestTemplates:Dict=<none> TemplateSelectionExpression:str=<none> TimeoutInMillis:int=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_integration_response self ApiId:str IntegrationId:str IntegrationResponseId:str ContentHandlingStrategy:str=<none> IntegrationResponseKey:str=<none> ResponseParameters:Dict=<none> ResponseTemplates:Dict=<none> TemplateSelectionExpression:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_model self ApiId:str ModelId:str ContentType:str=<none> Description:str=<none> Name:str=<none> Schema:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_route self ApiId:str RouteId:str ApiKeyRequired:bool=<none> AuthorizationScopes:List=<none> AuthorizationType:str=<none> AuthorizerId:str=<none> ModelSelectionExpression:str=<none> OperationName:str=<none> RequestModels:Dict=<none> RequestParameters:Dict=<none> RouteKey:str=<none> RouteResponseSelectionExpression:str=<none> Target:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_route_response self ApiId:str RouteId:str RouteResponseId:str ModelSelectionExpression:str=<none> ResponseModels:Dict=<none> ResponseParameters:Dict=<none> RouteResponseKey:str=<none><arrow>Dict<block_start><pass><block_end><def_stmt>update_stage self ApiId:str StageName:str AccessLogSettings:Dict=<none> ClientCertificateId:str=<none> DefaultRouteSettings:Dict=<none> DeploymentId:str=<none> Description:str=<none> RouteSettings:Dict=<none> StageVariables:Dict=<none><arrow>Dict<block_start><pass><block_end><block_end>
|
<import_from_stmt>config.Config Config<import_from_stmt>engine.component.TemplateModuleComponent TemplateModuleComponent<class_stmt>UnhookComponent(TemplateModuleComponent)<block_start><def_stmt>__init__ self code=<none><block_start>placeholder=Config().get("PLACEHOLDERS" "UNHOOK")<line_sep>super().__init__(code placeholder)<block_end><block_end>
|
<import_from_stmt>stix_shifter_utils.modules.base.stix_translation.base_results_translator BaseResultTranslator<import_from_stmt>stix_shifter_utils.stix_translation.src.utils.exceptions TranslationResultException<import_stmt>json<import_stmt>uuid<line_sep>ERROR_TYPE_TRANSLATE_EXCEPTION='translate_exception'<class_stmt>ResultsTranslator(BaseResultTranslator)<block_start><def_stmt>read_json self filepath options<block_start><return>'{}'<block_end><def_stmt>translate_results self data_source data<block_start>error_type=self.options.get('error_type')<if_stmt>self.options.get('error_type')<eq>ERROR_TYPE_TRANSLATE_EXCEPTION<block_start><raise>TranslationResultException("Test exception in translate_results")<block_end># Wrap data in a STIX bundle and insert the data_source identity object as the first object
bundle={"type":"bundle" "id":"bundle--"+str(uuid.uuid4()) "objects":[]}<line_sep>data_source=json.loads(data_source)<line_sep>bundle['objects']<augadd>[data_source]<line_sep># Data is already STIX and we don't want to touch it
bundle_data=json.loads(data)<for_stmt>obs bundle_data<block_start>obs["created_by_ref"]=data_source['id']<block_end>bundle['objects']<augadd>bundle_data<line_sep><return>bundle<block_end><block_end>
|
WTF_CSRF_ENABLED=<true><line_sep>SECRET_KEY='you-will-never-guess'<line_sep>
|
<import_from_future_stmt> annotations<import_from_stmt>typing Callable Generator<import_stmt>h11<import_stmt>pytest<import_stmt>trio<import_from_stmt>hypercorn.config Config<import_from_stmt>hypercorn.trio.tcp_server TCPServer<import_from_stmt>hypercorn.typing Scope<import_from_stmt>..helpers MockSocket<line_sep>KEEP_ALIVE_TIMEOUT=0.01<line_sep>REQUEST=h11.Request(method="GET" target="/" headers=[(b"host" b"hypercorn")])<async_keyword><def_stmt>slow_framework scope:Scope receive:Callable send:Callable<arrow><none><block_start><while_stmt><true><block_start>event=<await>receive()<if_stmt>event["type"]<eq>"http.disconnect"<block_start><break><block_end><elif_stmt>event["type"]<eq>"lifespan.startup"<block_start><await>send({"type":"lifspan.startup.complete"})<block_end><elif_stmt>event["type"]<eq>"lifespan.shutdown"<block_start><await>send({"type":"lifspan.shutdown.complete"})<block_end><elif_stmt>event["type"]<eq>"http.request"<and><not>event.get("more_body" <false>)<block_start><await>trio.sleep(2<times>KEEP_ALIVE_TIMEOUT)<line_sep><await>send({"type":"http.response.start" "status":200 "headers":[(b"content-length" b"0")] })<line_sep><await>send({"type":"http.response.body" "body":b"" "more_body":<false>})<line_sep><break><block_end><block_end><block_end>@pytest.fixture(name="client_stream" scope="function")<def_stmt>_client_stream nursery:trio._core._run.Nursery <arrow>Generator[trio.testing._memory_streams.MemorySendStream <none> <none>]<block_start>config=Config()<line_sep>config.keep_alive_timeout=KEEP_ALIVE_TIMEOUT<line_sep>client_stream,server_stream=trio.testing.memory_stream_pair()<line_sep>server_stream.socket=MockSocket()<line_sep>server=TCPServer(slow_framework config server_stream)<line_sep>nursery.start_soon(server.run)<line_sep><yield>client_stream<block_end>@pytest.mark.trio<async_keyword><def_stmt>test_http1_keep_alive_pre_request client_stream:trio.testing._memory_streams.MemorySendStream <arrow><none><block_start><await>client_stream.send_all(b"GET")<line_sep><await>trio.sleep(2<times>KEEP_ALIVE_TIMEOUT)<line_sep># Only way to confirm closure is to invoke an error
<with_stmt>pytest.raises(trio.BrokenResourceError)<block_start><await>client_stream.send_all(b"a")<block_end><block_end>@pytest.mark.trio<async_keyword><def_stmt>test_http1_keep_alive_during client_stream:trio.testing._memory_streams.MemorySendStream <arrow><none><block_start>client=h11.Connection(h11.CLIENT)<line_sep><await>client_stream.send_all(client.send(REQUEST))<line_sep><await>trio.sleep(2<times>KEEP_ALIVE_TIMEOUT)<line_sep># Key is that this doesn't error
<await>client_stream.send_all(client.send(h11.EndOfMessage()))<block_end>@pytest.mark.trio<async_keyword><def_stmt>test_http1_keep_alive client_stream:trio.testing._memory_streams.MemorySendStream <arrow><none><block_start>client=h11.Connection(h11.CLIENT)<line_sep><await>client_stream.send_all(client.send(REQUEST))<line_sep><await>trio.sleep(2<times>KEEP_ALIVE_TIMEOUT)<line_sep><await>client_stream.send_all(client.send(h11.EndOfMessage()))<while_stmt><true><block_start>event=client.next_event()<if_stmt>event<eq>h11.NEED_DATA<block_start>data=<await>client_stream.receive_some(2<power>16)<line_sep>client.receive_data(data)<block_end><elif_stmt>isinstance(event h11.EndOfMessage)<block_start><break><block_end><block_end>client.start_next_cycle()<line_sep><await>client_stream.send_all(client.send(REQUEST))<line_sep><await>trio.sleep(2<times>KEEP_ALIVE_TIMEOUT)<line_sep># Key is that this doesn't error
<await>client_stream.send_all(client.send(h11.EndOfMessage()))<block_end>@pytest.mark.trio<async_keyword><def_stmt>test_http1_keep_alive_pipelining client_stream:trio.testing._memory_streams.MemorySendStream <arrow><none><block_start><await>client_stream.send_all(b"GET / HTTP/1.1\r\nHost: hypercorn\r\n\r\nGET / HTTP/1.1\r\nHost: hypercorn\r\n\r\n")<line_sep><await>client_stream.receive_some(2<power>16)<line_sep><await>trio.sleep(2<times>KEEP_ALIVE_TIMEOUT)<line_sep><await>client_stream.send_all(b"")<block_end>
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
# pylint: disable=missing-module-docstring
# pylint: disable=missing-class-docstring
# pylint: disable=missing-function-docstring
""" Test checkpoint_wrapper with normalization layers. """<import_stmt>pytest<import_stmt>torch<import_from_stmt>torch.nn BatchNorm2d LayerNorm Linear Sequential<import_from_stmt>torch.optim SGD<import_from_stmt>fairscale.nn.checkpoint.checkpoint_activations checkpoint_wrapper<import_from_stmt>fairscale.utils torch_version<import_from_stmt>fairscale.utils.testing objects_are_equal<line_sep>NORM_TYPES=[LayerNorm BatchNorm2d]<line_sep>MP_TYPES=["fp32" "fp16" "call_half"]<def_stmt>get_model norm_type checkpointed mixed_precision<block_start><assert_stmt>norm_type<in>NORM_TYPES norm_type<assert_stmt>checkpointed<in>[<true> <false>] checkpointed<assert_stmt>mixed_precision<in>MP_TYPES<line_sep>model=Sequential(Linear(3 2) norm_type(2))<if_stmt>mixed_precision<eq>"fp16"# Set param.data and buffers as fp16
<block_start><for_stmt>p model.parameters()<block_start>p.data=p.data.half()<block_end><for_stmt>m model<block_start><for_stmt>n,b m.named_buffers()<block_start>setattr(m n b.half())<block_end><block_end><block_end><elif_stmt>mixed_precision<eq>"call_half"<block_start>model.half()<block_end><if_stmt>checkpointed<block_start>model=checkpoint_wrapper(model)<block_end><return>model<block_end>@pytest.mark.parametrize("device" ["cpu" "cuda"])@pytest.mark.parametrize("norm_type" NORM_TYPES)@pytest.mark.parametrize("mixed_precision" MP_TYPES)<def_stmt>test_norm device norm_type mixed_precision<block_start>"""Test checkpoint_wrapper with different norm layers."""<if_stmt>device<eq>"cuda"<and><not>torch.cuda.is_available()<block_start>pytest.skip("Skip due to lack of GPU")<block_end># Get input, ref, checkpoint models and make them equal.
in_data=torch.rand(2 2 3 3).to(device)<line_sep>m_ref=get_model(norm_type <false> mixed_precision).to(device)<line_sep>m_cpt=get_model(norm_type <true> mixed_precision).to(device)<line_sep>m_cpt.load_state_dict(m_ref.state_dict())<if_stmt>torch_version()<ge>(1 6 0)# This assert fails on 1.5.1.
<block_start><assert_stmt>objects_are_equal(m_ref.state_dict() m_cpt.state_dict())<block_end><if_stmt>mixed_precision<ne>"fp32"<block_start>in_data=in_data.half()<block_end># Needed due to checkpointing.
in_data.requires_grad=<true><for_stmt>model (m_ref m_cpt)<block_start>optim=SGD(model.parameters() lr=0.1)<if_stmt>device<eq>"cpu"<and>mixed_precision<ne>"fp32"# Got: RuntimeError: "batch_norm"/"layer_norm" not implemented for 'Half'.
<block_start><with_stmt>pytest.raises(RuntimeError)<block_start>out=model(in_data)<block_end><return><block_end><else_stmt># Everything else work.
<block_start>out=model(in_data)<block_end>out.sum().backward()<line_sep>optim.step()<block_end><if_stmt>torch_version()<ge>(1 6 0)<block_start><assert_stmt>objects_are_equal(m_ref.state_dict() m_cpt.state_dict())<block_end><block_end>
|
# Copyright (c) 2013 - 2019 <NAME> and Contributors.
# This file is part of YAWAST which is released under the MIT license.
# See the LICENSE file or go to https://yawast.org/license/ for full license details.
<import_stmt>gc<import_stmt>hashlib<import_stmt>json<import_stmt>os<import_stmt>time<import_stmt>zipfile<import_from_stmt>datetime datetime<import_from_stmt>typing Dict List cast Optional Any Union<import_from_stmt>yawast.external.memory_size Size<import_from_stmt>yawast.external.total_size total_size<import_from_stmt>yawast.reporting.enums Vulnerabilities Severity<import_from_stmt>yawast.reporting.issue Issue<import_from_stmt>yawast.scanner.plugins.result Result<import_from_stmt>yawast.shared output<import_from_stmt>yawast.shared.exec_timer ExecutionTimer<line_sep>_issues:Dict[str Dict[Vulnerabilities List[Issue]]]={}<line_sep>_info:Dict[str Any]={}<line_sep>_data:Dict[str Any]={}<line_sep>_evidence:Dict[str Any]={}<line_sep>_domain:str=""<line_sep>_output_file:str=""<def_stmt>init output_file:Union[str <none>]=<none><arrow><none><block_start><global>_output_file<if_stmt>output_file<is><not><none># if we have something, let's figure out what
<block_start>output_file=os.path.abspath(output_file)<if_stmt>os.path.isdir(output_file)# it's a directory, so we are going to create a name
<block_start>name=f"yawast_{int(time.time())}.json"<line_sep>output_file=os.path.join(output_file name)<block_end><elif_stmt>os.path.isfile(output_file)<or>os.path.isfile(f"{_output_file}.zip")# the file already exists
<block_start>print("WARNING: Output file already exists; it will be replaced.")<block_end>_output_file=output_file<block_end><block_end><def_stmt>save_output spinner=<none># add some extra debug data
<block_start>register_info("memsize_issues" total_size(_issues))<line_sep>register_info("memsize_info" total_size(_info))<line_sep>register_info("memsize_data" total_size(_data))<line_sep>register_info("memsize_evidence" total_size(_evidence))<line_sep>register_info("gc_stats" gc.get_stats())<line_sep>register_info("gc_objects" len(gc.get_objects()))<if_stmt>spinner<block_start>spinner.stop()<block_end>print("Saving...")<if_stmt>spinner<block_start>spinner.start()<block_end>vulns={}<for_stmt>vuln Vulnerabilities<block_start>vulns[vuln.name]={"severity":vuln.severity "description":vuln.description "id":vuln.id }<block_end>data={"_info":_convert_keys(_info) "data":_convert_keys(_data) "issues":_convert_keys(_issues) "evidence":_convert_keys(_evidence) "vulnerabilities":vulns }<line_sep>json_data=json.dumps(data indent=4)<try_stmt><block_start>zf=zipfile.ZipFile(f"{_output_file}.zip" "x" zipfile.ZIP_BZIP2)<with_stmt>ExecutionTimer()<as>tm<block_start>zf.writestr(f"{os.path.basename(_output_file)}" json_data.encode("utf_8" "backslashreplace") )<block_end>zf.close()<line_sep>orig="{0:cM}".format(Size(len(json_data)))<line_sep>comp="{0:cM}".format(Size(os.path.getsize(f"{_output_file}.zip")))<if_stmt>spinner<block_start>spinner.stop()<block_end>print(f"Saved {_output_file}.zip (size reduced from {orig} to {comp} in {tm.to_ms()}ms)")<block_end><except_stmt>Exception<as>error<block_start><if_stmt>spinner<block_start>spinner.stop()<block_end>print(f"Error writing output file: {error}")<block_end><block_end><def_stmt>get_output_file <arrow>str<block_start><if_stmt>len(_output_file)<g>0<block_start><return>f"{_output_file}.zip"<block_end><else_stmt><block_start><return>""<block_end><block_end><def_stmt>setup domain:str<arrow><none><block_start><global>_domain<line_sep>_domain=domain<if_stmt>_domain<not><in>_issues<block_start>_issues[_domain]={}<block_end><if_stmt>_domain<not><in>_data<block_start>_data[_domain]={}<block_end><if_stmt>_domain<not><in>_evidence<block_start>_evidence[_domain]={}<block_end><block_end><def_stmt>is_registered vuln:Vulnerabilities<arrow>bool<block_start><if_stmt>_issues<is><none><block_start><return><false><block_end><else_stmt><block_start><if_stmt>_domain<in>_issues<block_start><if_stmt>_issues[_domain].get(vuln)<is><none><block_start><return><false><block_end><else_stmt><block_start><return><true><block_end><block_end><else_stmt><block_start><return><false><block_end><block_end><block_end><def_stmt>register_info key:str value:Any<block_start><if_stmt>_output_file<is><not><none><and>len(_output_file)<g>0<block_start>_info[key]=value<block_end><block_end><def_stmt>register_data key:str value:Any<block_start><if_stmt>_output_file<is><not><none><and>len(_output_file)<g>0<block_start><if_stmt>_domain<is><not><none><block_start><if_stmt>_domain<in>_data<block_start>_register_data(_data[_domain] key value)<block_end><else_stmt><block_start>_data[_domain]={}<line_sep>_register_data(_data[_domain] key value)<block_end><block_end><else_stmt><block_start>_register_data(_data key value)<block_end><block_end><block_end><def_stmt>register_message value:str kind:str<block_start><if_stmt>_output_file<is><not><none><and>len(_output_file)<g>0<block_start><if_stmt>"messages"<not><in>_info<block_start>_info["messages"]={}<block_end><if_stmt>kind<not><in>_info["messages"]<block_start>_info["messages"][kind]=[]<block_end>_info["messages"][kind].append(f"[{datetime.utcnow()} UTC]: {value}")<block_end><block_end><def_stmt>register issue:Issue<arrow><none># make sure the Dict for _domain exists - this shouldn't normally be an issue, but is for unit tests
<block_start><if_stmt>_domain<not><in>_issues<block_start>_issues[_domain]={}<block_end># add the evidence to the evidence list, and swap the value in the object for its hash.
# the point of this is to minimize cases where we are holding the same (large) string
# multiple times in memory. should reduce memory by up to 20%
<if_stmt>_domain<not><in>_evidence<block_start>_evidence[_domain]={}<block_end><if_stmt>"request"<in>issue.evidence<and>issue.evidence["request"]<is><not><none><block_start>req=str(issue.evidence["request"]).encode("utf-8")<line_sep>req_id=hashlib.blake2b(req digest_size=16).hexdigest()<if_stmt>req_id<not><in>_evidence[_domain]<block_start>_evidence[_domain][req_id]=issue.evidence["request"]<block_end>issue.evidence["request"]=req_id<block_end><if_stmt>"response"<in>issue.evidence<and>issue.evidence["response"]<is><not><none><block_start>res=str(issue.evidence["response"]).encode("utf-8")<line_sep>res_id=hashlib.blake2b(res digest_size=16).hexdigest()<if_stmt>res_id<not><in>_evidence[_domain]<block_start>_evidence[_domain][res_id]=issue.evidence["response"]<block_end>issue.evidence["response"]=res_id<block_end># if we haven't handled this issue yet, create a List for it
<if_stmt><not>is_registered(issue.vulnerability)<block_start>_issues[_domain][issue.vulnerability]=[]<block_end># we need to check to see if we already have this issue, for this URL, so we don't create dups
# TODO: This isn't exactly efficient - refactor
findings=_issues[_domain][issue.vulnerability]<line_sep>findings=cast(List[Issue] findings)<for_stmt>finding findings<block_start><if_stmt>finding.url<eq>issue.url<and>finding.evidence<eq>issue.evidence# just bail out
<block_start>output.debug(f"Duplicate Issue: {issue.id} (duplicate of {finding.id})")<line_sep><return><block_end><block_end>_issues[_domain][issue.vulnerability].append(issue)<block_end><def_stmt>display msg:str issue:Issue<arrow><none><block_start><if_stmt>issue.vulnerability.display_all<or><not>is_registered(issue.vulnerability)<block_start><if_stmt>issue.severity<eq>Severity.CRITICAL<or>issue.severity<eq>Severity.HIGH<block_start>output.vuln(msg)<block_end><elif_stmt>issue.severity<eq>Severity.MEDIUM<block_start>output.warn(msg)<block_end><else_stmt><block_start>output.info(msg)<block_end><block_end># if there's no evidence, default to the msg - better than nothing
<if_stmt>issue.evidence<is><none><block_start>issue.evidence=msg.strip()<block_end>register(issue)<block_end><def_stmt>display_results results:List[Result] padding:Optional[str]=""<block_start><for_stmt>res results<block_start>iss=Issue.from_result(res)<line_sep>display(f"{padding}{res.message}" iss)<block_end><block_end><def_stmt>_register_data data:Dict key:str value:Any<block_start><if_stmt>key<in>data<and>isinstance(data[key] list)<and>isinstance(value list)<block_start>ls=cast(list data[key])<line_sep>ls.extend(value)<block_end><elif_stmt>key<in>data<and>isinstance(data[key] dict)<and>isinstance(value dict)<block_start>dt=cast(dict data[key])<line_sep>dt.update(value)<block_end><else_stmt><block_start>data[key]=value<block_end><block_end><def_stmt>_convert_keys dct:Dict<arrow>Dict<block_start>ret={}<for_stmt>k,v dct.items()<block_start><if_stmt>isinstance(k Vulnerabilities)<block_start>k=k.name<block_end><if_stmt>isinstance(v dict)<block_start>v=_convert_keys(v)<block_end><try_stmt><block_start>_=json.dumps(v)<block_end><except_stmt>Exception<as>error<block_start>output.debug(f"Error serializing data: {str(error)}")<line_sep># convert to string - this may be wrong, but at least it won't fail
v=str(v)<block_end>ret[k]=v<block_end><return>ret<block_end>
|
# Copyright (c) 2016 - 2019 Dell Inc. or its subsidiaries.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
<import_stmt>unittest<import_from_stmt>unittest mock<import_stmt>ddt<import_from_stmt>cinder exception<import_from_stmt>cinder.volume configuration<as>conf<import_from_stmt>cinder.volume.drivers.dell_emc.unity adapter<as>unity_adapter<import_from_stmt>cinder.volume.drivers.dell_emc.unity driver<import_from_stmt>cinder.volume.drivers.dell_emc.unity replication<import_from_stmt>cinder.volume.drivers.san.san san_opts<line_sep>@ddt.ddt<class_stmt>UnityReplicationTest(unittest.TestCase)<block_start>@ddt.data({'version':'1.0.0' 'protocol':'FC' 'expected':unity_adapter.FCAdapter} {'version':'2.0.0' 'protocol':'iSCSI' 'expected':unity_adapter.ISCSIAdapter})@ddt.unpack<def_stmt>test_init_adapter self version protocol expected<block_start>a=replication.init_adapter(version protocol)<line_sep>self.assertIsInstance(a expected)<line_sep>self.assertEqual(version a.version)<block_end><block_end>@ddt.ddt<class_stmt>UnityReplicationDeviceTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self.config=conf.Configuration(san_opts config_group='unity-backend')<line_sep>self.config.san_ip='1.1.1.1'<line_sep>self.config.san_login='user1'<line_sep>self.config.san_password='<PASSWORD>'<line_sep>self.driver=driver.UnityDriver(configuration=self.config)<line_sep>conf_dict={'backend_id':'secondary_unity' 'san_ip':'2.2.2.2'}<line_sep>self.mock_adapter=mock.MagicMock(is_setup=<false>)<def_stmt>mock_do_setup *args<block_start>self.mock_adapter.is_setup=<true><block_end>self.mock_adapter.do_setup=mock.MagicMock(side_effect=mock_do_setup)<with_stmt>mock.patch('cinder.volume.drivers.dell_emc.unity.'<concat>'replication.init_adapter' return_value=self.mock_adapter)<block_start>self.replication_device=replication.ReplicationDevice(conf_dict self.driver)<block_end><block_end>@ddt.data({'conf_dict':{'backend_id':'secondary_unity' 'san_ip':'2.2.2.2'} 'expected':['secondary_unity' '2.2.2.2' 'user1' '<PASSWORD>' 60]} {'conf_dict':{'backend_id':'secondary_unity' 'san_ip':'2.2.2.2' 'san_login':'user2' 'san_password':'<PASSWORD>' 'max_time_out_of_sync':180} 'expected':['secondary_unity' '2.2.2.2' 'user2' '<PASSWORD>' 180]} )@ddt.unpack<def_stmt>test_init self conf_dict expected<block_start>self.driver.configuration.replication_device=conf_dict<line_sep>device=replication.ReplicationDevice(conf_dict self.driver)<line_sep>self.assertListEqual([device.backend_id device.san_ip device.san_login device.san_password device.max_time_out_of_sync] expected)<line_sep>self.assertIs(self.driver device.driver)<block_end>@ddt.data({'conf_dict':{'san_ip':'2.2.2.2'} } {'conf_dict':{'backend_id':' ' 'san_ip':'2.2.2.2'} } {'conf_dict':{'backend_id':'secondary_unity'} } {'conf_dict':{'backend_id':'secondary_unity' 'san_ip':' '} } {'conf_dict':{'backend_id':'secondary_unity' 'san_ip':'2.2.2.2' 'san_login':'user2' 'san_password':'<PASSWORD>' 'max_time_out_of_sync':'NOT_A_NUMBER'} } )@ddt.unpack<def_stmt>test_init_raise self conf_dict<block_start>self.driver.configuration.replication_device=conf_dict<line_sep>self.assertRaisesRegex(exception.InvalidConfigurationValue 'Value .* is not valid for configuration '<concat>'option "unity-backend.replication_device"' replication.ReplicationDevice conf_dict self.driver)<block_end>@ddt.data({'conf_dict':{'backend_id':'secondary_unity' 'san_ip':'2.2.2.2'} 'expected':['2.2.2.2' 'user1' '<PASSWORD>']} {'conf_dict':{'backend_id':'secondary_unity' 'san_ip':'2.2.2.2' 'san_login':'user2' 'san_password':'<PASSWORD>' 'max_time_out_of_sync':180} 'expected':['2.2.2.2' 'user2' '<PASSWORD>']} )@ddt.unpack<def_stmt>test_device_conf self conf_dict expected<block_start>self.driver.configuration.replication_device=conf_dict<line_sep>device=replication.ReplicationDevice(conf_dict self.driver)<line_sep>c=device.device_conf<line_sep>self.assertListEqual([c.san_ip c.san_login c.san_password] expected)<block_end><def_stmt>test_setup_adapter self<block_start>self.replication_device.setup_adapter()<line_sep># Not call adapter.do_setup after initial setup done.
self.replication_device.setup_adapter()<line_sep>self.mock_adapter.do_setup.assert_called_once()<block_end><def_stmt>test_setup_adapter_fail self<block_start><def_stmt>f *args<block_start><raise>exception.VolumeBackendAPIException('adapter setup failed')<block_end>self.mock_adapter.do_setup=mock.MagicMock(side_effect=f)<with_stmt>self.assertRaises(exception.VolumeBackendAPIException)<block_start>self.replication_device.setup_adapter()<block_end><block_end><def_stmt>test_adapter self<block_start>self.assertIs(self.mock_adapter self.replication_device.adapter)<line_sep>self.mock_adapter.do_setup.assert_called_once()<block_end><def_stmt>test_destination_pool self<block_start>self.mock_adapter.storage_pools_map={'pool-1':'pool-1'}<line_sep>self.assertEqual('pool-1' self.replication_device.destination_pool)<block_end><block_end>@ddt.ddt<class_stmt>UnityReplicationManagerTest(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>self.config=conf.Configuration(san_opts config_group='unity-backend')<line_sep>self.config.san_ip='1.1.1.1'<line_sep>self.config.san_login='user1'<line_sep>self.config.san_password='<PASSWORD>'<line_sep>self.config.replication_device=[{'backend_id':'secondary_unity' 'san_ip':'2.2.2.2'}]<line_sep>self.driver=driver.UnityDriver(configuration=self.config)<line_sep>self.replication_manager=replication.ReplicationManager()<block_end>@mock.patch('cinder.volume.drivers.dell_emc.unity.'<concat>'replication.ReplicationDevice.setup_adapter')<def_stmt>test_do_setup self mock_setup_adapter<block_start>self.replication_manager.do_setup(self.driver)<line_sep>calls=[mock.call() mock.call()]<line_sep>default_device=self.replication_manager.default_device<line_sep>self.assertEqual('1.1.1.1' default_device.san_ip)<line_sep>self.assertEqual('user1' default_device.san_login)<line_sep>self.assertEqual('password1' default_device.san_password)<line_sep>devices=self.replication_manager.replication_devices<line_sep>self.assertEqual(1 len(devices))<line_sep>self.assertIn('secondary_unity' devices)<line_sep>rep_device=devices['secondary_unity']<line_sep>self.assertEqual('2.2.2.2' rep_device.san_ip)<line_sep>self.assertEqual('user1' rep_device.san_login)<line_sep>self.assertEqual('<PASSWORD>' rep_device.san_password)<line_sep>self.assertTrue(self.replication_manager.is_replication_configured)<line_sep>self.assertTrue(self.replication_manager.active_backend_id<is><none><or>self.replication_manager.active_backend_id<eq>'default')<line_sep>self.assertFalse(self.replication_manager.is_service_failed_over)<line_sep>active_adapter=self.replication_manager.active_adapter<line_sep>calls.append(mock.call())<line_sep>self.assertIs(default_device.adapter active_adapter)<line_sep>calls.append(mock.call())<line_sep>mock_setup_adapter.assert_has_calls(calls)<block_end>@mock.patch('cinder.volume.drivers.dell_emc.unity.'<concat>'replication.ReplicationDevice.setup_adapter')<def_stmt>test_do_setup_replication_not_configured self mock_setup_adapter<block_start>self.driver.configuration.replication_device=<none><line_sep>self.replication_manager.do_setup(self.driver)<line_sep>calls=[mock.call()]<line_sep>default_device=self.replication_manager.default_device<line_sep>self.assertEqual('1.1.1.1' default_device.san_ip)<line_sep>self.assertEqual('user1' default_device.san_login)<line_sep>self.assertEqual('<PASSWORD>' default_device.san_password)<line_sep>devices=self.replication_manager.replication_devices<line_sep>self.assertEqual(0 len(devices))<line_sep>self.assertFalse(self.replication_manager.is_replication_configured)<line_sep>self.assertTrue(self.replication_manager.active_backend_id<is><none><or>self.replication_manager.active_backend_id<eq>'default')<line_sep>self.assertFalse(self.replication_manager.is_service_failed_over)<line_sep>active_adapter=self.replication_manager.active_adapter<line_sep>calls.append(mock.call())<line_sep>self.assertIs(default_device.adapter active_adapter)<line_sep>calls.append(mock.call())<line_sep>mock_setup_adapter.assert_has_calls(calls)<block_end>@mock.patch('cinder.volume.drivers.dell_emc.unity.'<concat>'replication.ReplicationDevice.setup_adapter')<def_stmt>test_do_setup_failed_over self mock_setup_adapter<block_start>self.driver=driver.UnityDriver(configuration=self.config active_backend_id='secondary_unity')<line_sep>self.replication_manager.do_setup(self.driver)<line_sep>calls=[mock.call()]<line_sep>default_device=self.replication_manager.default_device<line_sep>self.assertEqual('1.1.1.1' default_device.san_ip)<line_sep>self.assertEqual('user1' default_device.san_login)<line_sep>self.assertEqual('<PASSWORD>' default_device.san_password)<line_sep>devices=self.replication_manager.replication_devices<line_sep>self.assertEqual(1 len(devices))<line_sep>self.assertIn('secondary_unity' devices)<line_sep>rep_device=devices['secondary_unity']<line_sep>self.assertEqual('2.2.2.2' rep_device.san_ip)<line_sep>self.assertEqual('user1' rep_device.san_login)<line_sep>self.assertEqual('<PASSWORD>' rep_device.san_password)<line_sep>self.assertTrue(self.replication_manager.is_replication_configured)<line_sep>self.assertEqual('secondary_unity' self.replication_manager.active_backend_id)<line_sep>self.assertTrue(self.replication_manager.is_service_failed_over)<line_sep>active_adapter=self.replication_manager.active_adapter<line_sep>calls.append(mock.call())<line_sep>self.assertIs(rep_device.adapter active_adapter)<line_sep>calls.append(mock.call())<line_sep>mock_setup_adapter.assert_has_calls(calls)<block_end>@ddt.data({'rep_device':[{'backend_id':'default' 'san_ip':'2.2.2.2'}]} {'rep_device':[{'backend_id':'secondary_unity' 'san_ip':'2.2.2.2'} {'backend_id':'default' 'san_ip':'3.3.3.3'}]} {'rep_device':[{'backend_id':'secondary_unity' 'san_ip':'2.2.2.2'} {'backend_id':'third_unity' 'san_ip':'3.3.3.3'}]} )@[email protected]('cinder.volume.drivers.dell_emc.unity.'<concat>'replication.ReplicationDevice.setup_adapter')<def_stmt>test_do_setup_raise_invalid_rep_device self mock_setup_adapter rep_device<block_start>self.driver.configuration.replication_device=rep_device<line_sep>self.assertRaises(exception.InvalidConfigurationValue self.replication_manager.do_setup self.driver)<block_end>@mock.patch('cinder.volume.drivers.dell_emc.unity.'<concat>'replication.ReplicationDevice.setup_adapter')<def_stmt>test_do_setup_raise_invalid_active_backend_id self mock_setup_adapter<block_start>self.driver=driver.UnityDriver(configuration=self.config active_backend_id='third_unity')<line_sep>self.assertRaises(exception.InvalidConfigurationValue self.replication_manager.do_setup self.driver)<block_end>@mock.patch('cinder.volume.drivers.dell_emc.unity.'<concat>'replication.ReplicationDevice.setup_adapter')<def_stmt>test_failover_service self mock_setup_adapter<block_start>self.assertIsNone(self.replication_manager.active_backend_id)<line_sep>self.replication_manager.do_setup(self.driver)<line_sep>self.replication_manager.active_adapter<line_sep>self.assertEqual('default' self.replication_manager.active_backend_id)<line_sep>self.replication_manager.failover_service('secondary_unity')<line_sep>self.assertEqual('secondary_unity' self.replication_manager.active_backend_id)<block_end><block_end>
|
# coding: UTF-8
<import_from_stmt>math asin atan2 cos sin<import_stmt>numpy<as>np<def_stmt>isRotationMatrix R<block_start>''' checks if a matrix is a valid rotation matrix(whether orthogonal or not)
'''<line_sep>Rt=np.transpose(R)<line_sep>shouldBeIdentity=np.dot(Rt R)<line_sep>I=np.identity(3 dtype=R.dtype)<line_sep>n=np.linalg.norm(I-shouldBeIdentity)<line_sep><return>n<l>1e-6<block_end><def_stmt>matrix2angle R<block_start>''' compute three Euler angles from a Rotation Matrix. Ref: http://www.gregslabaugh.net/publications/euler.pdf
Args:
R: (3,3). rotation matrix
Returns:
x: yaw
y: pitch
z: roll
'''<line_sep># assert(isRotationMatrix(R))
<if_stmt>R[2 0]<ne>1<or>R[2 0]<ne>-1<block_start>x=asin(R[2 0])<line_sep>y=atan2(R[2 1]/cos(x) R[2 2]/cos(x))<line_sep>z=atan2(R[1 0]/cos(x) R[0 0]/cos(x))<block_end><else_stmt># Gimbal lock
<block_start>z=0# can be anything
<if_stmt>R[2 0]<eq>-1<block_start>x=np.pi/2<line_sep>y=z+atan2(R[0 1] R[0 2])<block_end><else_stmt><block_start>x=-np.pi/2<line_sep>y=-z+atan2(-R[0 1] -R[0 2])<block_end><block_end><return>x y z<block_end><def_stmt>P2sRt P<block_start>''' decompositing camera matrix P.
Args:
P: (3, 4). Affine Camera Matrix.
Returns:
s: scale factor.
R: (3, 3). rotation matrix.
t2d: (2,). 2d translation.
'''<line_sep>t2d=P[:2 3]<line_sep>R1=P[0:1 :3]<line_sep>R2=P[1:2 :3]<line_sep>s=(np.linalg.norm(R1)+np.linalg.norm(R2))/2.0<line_sep>r1=R1/np.linalg.norm(R1)<line_sep>r2=R2/np.linalg.norm(R2)<line_sep>r3=np.cross(r1 r2)<line_sep>R=np.concatenate((r1 r2 r3) 0)<line_sep><return>s R t2d<block_end><def_stmt>compute_similarity_transform points_static points_to_transform# http://nghiaho.com/?page_id=671
<block_start>p0=np.copy(points_static).T<line_sep>p1=np.copy(points_to_transform).T<line_sep>t0=-np.mean(p0 axis=1).reshape(3 1)<line_sep>t1=-np.mean(p1 axis=1).reshape(3 1)<line_sep>t_final=t1-t0<line_sep>p0c=p0+t0<line_sep>p1c=p1+t1<line_sep>covariance_matrix=p0c.dot(p1c.T)<line_sep>U,S,V=np.linalg.svd(covariance_matrix)<line_sep>R=U.dot(V)<if_stmt>np.linalg.det(R)<l>0<block_start>R[: 2]<augmul>-1<block_end>rms_d0=np.sqrt(np.mean(np.linalg.norm(p0c axis=0)<power>2))<line_sep>rms_d1=np.sqrt(np.mean(np.linalg.norm(p1c axis=0)<power>2))<line_sep>s=(rms_d0/rms_d1)<line_sep>P=np.c_[s<times>np.eye(3).dot(R) t_final]<line_sep><return>P<block_end><def_stmt>estimate_pose vertices<block_start>canonical_vertices=np.load('face/utils/uv_data/canonical_vertices.npy')<line_sep>P=compute_similarity_transform(vertices canonical_vertices)<line_sep>_,R,_=P2sRt(P)# decompose affine matrix to s, R, t
pose=matrix2angle(R)<line_sep><return>P pose<block_end>
|
"""Add TestGroup and TestSuite
Revision ID: 5177cfff57d7
Revises: <PASSWORD>
Create Date: 2013-11-04 12:42:37.249656
"""<import_from_future_stmt> absolute_import print_function<line_sep># revision identifiers, used by Alembic.
revision='5<PASSWORD>c<PASSWORD>'<line_sep>down_revision='<PASSWORD>'<import_from_stmt>alembic op<import_from_stmt>datetime datetime<import_from_stmt>hashlib sha1<import_from_stmt>sqlalchemy.sql table<import_from_stmt>uuid uuid4<import_stmt>sqlalchemy<as>sa<def_stmt>upgrade <block_start><import_from_stmt>changes.constants Result<line_sep>testsuites_table=table('testsuite' sa.Column('id' sa.GUID() nullable=<false>) sa.Column('build_id' sa.GUID() nullable=<false>) sa.Column('project_id' sa.GUID() nullable=<false>) sa.Column('name_sha' sa.String(length=40) nullable=<false>) sa.Column('name' sa.Text() nullable=<true>) sa.Column('date_created' sa.DateTime() nullable=<true>) )<line_sep>testgroups_table=table('testgroup' sa.Column('id' sa.GUID() nullable=<false>) sa.Column('build_id' sa.GUID() nullable=<false>) sa.Column('project_id' sa.GUID() nullable=<false>) sa.Column('name_sha' sa.String(length=40) nullable=<false>) sa.Column('duration' sa.Integer() nullable=<true>) sa.Column('num_tests' sa.Integer() nullable=<true>) sa.Column('num_failed' sa.Integer() nullable=<true>) sa.Column('name' sa.Text() nullable=<true>) sa.Column('date_created' sa.DateTime() nullable=<true>) )<line_sep>testgroups_m2m_table=table('testgroup_test' sa.Column('group_id' sa.GUID() nullable=<false>) sa.Column('test_id' sa.GUID() nullable=<false>) )<line_sep>testcases_table=table('test' sa.Column('id' sa.GUID() nullable=<false>) sa.Column('build_id' sa.GUID() nullable=<false>) sa.Column('project_id' sa.GUID() nullable=<false>) sa.Column('package' sa.Text() nullable=<true>) sa.Column('name' sa.Text() nullable=<true>) sa.Column('group' sa.Text() nullable=<true>) sa.Column('suite_id' sa.GUID() nullable=<true>) sa.Column('duration' sa.Integer() nullable=<true>) sa.Column('result' sa.Enum() nullable=<true>) )<line_sep>connection=op.get_bind()<line_sep>### commands auto generated by Alembic - please adjust! ###
op.create_table('testsuite' sa.Column('id' sa.GUID() nullable=<false>) sa.Column('build_id' sa.GUID() nullable=<false>) sa.Column('project_id' sa.GUID() nullable=<false>) sa.Column('name_sha' sa.String(length=40) nullable=<false>) sa.Column('name' sa.Text() nullable=<true>) sa.Column('date_created' sa.DateTime() nullable=<true>) sa.ForeignKeyConstraint(['build_id'] ['build.id'] ) sa.ForeignKeyConstraint(['project_id'] ['project.id'] ) sa.PrimaryKeyConstraint('id') sa.UniqueConstraint('build_id' 'name_sha' name='_suite_key'))<line_sep>op.create_table('testgroup' sa.Column('id' sa.GUID() nullable=<false>) sa.Column('build_id' sa.GUID() nullable=<false>) sa.Column('project_id' sa.GUID() nullable=<false>) sa.Column('suite_id' sa.GUID() nullable=<true>) sa.Column('parent_id' sa.GUID() nullable=<true>) sa.Column('name_sha' sa.String(length=40) nullable=<false>) sa.Column('name' sa.Text() nullable=<true>) sa.Column('duration' sa.Integer() default=0 nullable=<true>) sa.Column('num_tests' sa.Integer() default=0 nullable=<true>) sa.Column('num_failed' sa.Integer() default=0 nullable=<true>) sa.Column('data' sa.JSONEncodedDict() nullable=<true>) sa.Column('date_created' sa.DateTime() nullable=<true>) sa.ForeignKeyConstraint(['build_id'] ['build.id'] ) sa.ForeignKeyConstraint(['parent_id'] ['testgroup.id'] ) sa.ForeignKeyConstraint(['project_id'] ['project.id'] ) sa.ForeignKeyConstraint(['suite_id'] ['testsuite.id'] ) sa.PrimaryKeyConstraint('id') sa.UniqueConstraint('build_id' 'suite_id' 'name_sha' name='_group_key'))<line_sep>op.create_table('testgroup_test' sa.Column('group_id' sa.GUID() nullable=<false>) sa.Column('test_id' sa.GUID() nullable=<false>) sa.ForeignKeyConstraint(['group_id'] ['testgroup.id'] ) sa.ForeignKeyConstraint(['test_id'] ['test.id'] ) sa.PrimaryKeyConstraint('group_id' 'test_id'))<line_sep>op.add_column(u'test' sa.Column('suite_id' sa.GUID() nullable=<true>))<line_sep># perform data migrations
<for_stmt>testcase connection.execute(testcases_table.select())# migrate group to suite
<block_start>print("Migrating TestCase %s"%(testcase.id ))<line_sep>suite_name=testcase.group<or>'default'<line_sep>suite_sha=sha1(suite_name).hexdigest()<line_sep>result=connection.execute(testsuites_table.select().where(sa.and_(testsuites_table.c.build_id<eq>testcase.build_id testsuites_table.c.name_sha<eq>suite_sha )).limit(1)).fetchone()<if_stmt><not>result<block_start>suite_id=uuid4()<line_sep>connection.execute(testsuites_table.insert().values(id=suite_id build_id=testcase.build_id project_id=testcase.project_id name=suite_name name_sha=suite_sha date_created=datetime.utcnow() ))<block_end><else_stmt><block_start>suite_id=result[0]<block_end>connection.execute(testcases_table.update().where(testcases_table.c.id<eq>testcase.id ).values({testcases_table.c.suite_id:suite_id }))<line_sep># add package as group
group_name=testcase.package<or>testcase.name.rsplit('.' 1)[1]<line_sep>group_sha=sha1(group_name).hexdigest()<line_sep>result=connection.execute(testgroups_table.select().where(sa.and_(testgroups_table.c.build_id<eq>testcase.build_id testgroups_table.c.name_sha<eq>group_sha )).limit(1)).fetchone()<if_stmt><not>result<block_start>group_id=uuid4()<line_sep>connection.execute(testgroups_table.insert().values(id=group_id build_id=testcase.build_id project_id=testcase.project_id name=group_name name_sha=group_sha date_created=datetime.utcnow() duration=0 num_tests=0 num_failed=0 ))<block_end><else_stmt><block_start>group_id=result[0]<block_end>update_values={testgroups_table.c.num_tests:testgroups_table.c.num_tests+1 testgroups_table.c.duration:testgroups_table.c.duration+testcase.duration }<if_stmt>testcase.result<eq>Result.failed.value<block_start>update_values[testgroups_table.c.num_failed]=testgroups_table.c.num_failed+1<block_end>connection.execute(testgroups_m2m_table.insert().values({testgroups_m2m_table.c.group_id:group_id testgroups_m2m_table.c.test_id:testcase.id }))<line_sep>connection.execute(testgroups_table.update().where(testgroups_table.c.id<eq>group_id ).values(update_values))<block_end>op.drop_column(u'test' u'group')<line_sep>op.drop_column(u'test' u'group_sha')<line_sep>### end Alembic commands ###
<block_end><def_stmt>downgrade ### commands auto generated by Alembic - please adjust! ###
<block_start>op.add_column(u'test' sa.Column(u'group_sha' sa.VARCHAR(length=40) nullable=<true>))<line_sep>op.add_column(u'test' sa.Column(u'group' sa.TEXT() nullable=<true>))<line_sep>op.drop_column(u'test' 'suite_id')<line_sep>op.drop_table('testgroup_test')<line_sep>op.drop_table('testgroup')<line_sep>op.drop_table('testsuite')<line_sep>### end Alembic commands ###
<block_end>
|
<import_stmt>networkx.algorithms.tests.test_threshold<import_stmt>pytest<import_from_stmt>graphscope.nx.utils.compat import_as_graphscope_nx<import_from_stmt>graphscope.nx.utils.compat with_graphscope_nx_context<line_sep>import_as_graphscope_nx(networkx.algorithms.tests.test_threshold decorators=pytest.mark.usefixtures("graphscope_session"))<import_from_stmt>networkx.algorithms.tests.test_threshold TestGeneratorThreshold<line_sep>@pytest.mark.usefixtures("graphscope_session")@with_graphscope_nx_context(TestGeneratorThreshold)<class_stmt>TestGeneratorThreshold<block_start><def_stmt>test_eigenvectors self<block_start>np=pytest.importorskip('numpy')<line_sep>eigenval=np.linalg.eigvals<line_sep>scipy=pytest.importorskip('scipy')<line_sep>cs='ddiiddid'<line_sep>G=nxt.threshold_graph(cs)<line_sep>(tgeval tgevec)=nxt.eigenvectors(cs)<line_sep>dot=np.dot<assert_stmt>[abs(dot(lv lv)-1.0)<l>1e-9<for>lv tgevec]<eq>[<true>]<times>8<block_end><def_stmt>test_create_using self<block_start>cs='ddiiddid'<line_sep>G=nxt.threshold_graph(cs)<assert_stmt>pytest.raises(nx.exception.NetworkXError nxt.threshold_graph cs create_using=nx.DiGraph())<block_end><block_end>
|
<import_stmt>logging<import_from_stmt>pajbot.managers.db DBManager<import_from_stmt>pajbot.models.command Command<import_from_stmt>pajbot.models.command CommandExample<import_from_stmt>pajbot.models.user User<import_from_stmt>pajbot.modules BaseModule<import_from_stmt>pajbot.modules ModuleType<import_from_stmt>pajbot.modules.basic BasicCommandsModule<line_sep>log=logging.getLogger(__name__)<class_stmt>IgnoreModule(BaseModule)<block_start>ID=__name__.split(".")[-1]<line_sep>NAME="Ignore"<line_sep>DESCRIPTION="Ignore all commands from a user"<line_sep>CATEGORY="Feature"<line_sep>ENABLED_DEFAULT=<true><line_sep>MODULE_TYPE=ModuleType.TYPE_ALWAYS_ENABLED<line_sep>PARENT_MODULE=BasicCommandsModule<line_sep>@staticmethod<def_stmt>ignore_command bot source message **rest<block_start><if_stmt><not>message<block_start><return><false><block_end><with_stmt>DBManager.create_session_scope()<as>db_session<block_start>username=message.split(" ")[0]<line_sep>user=User.find_by_user_input(db_session username)<if_stmt>user<eq>source<block_start>bot.whisper(source "You cannot ignore yourself")<line_sep><return><false><block_end><block_end><with_stmt>DBManager.create_session_scope()<as>db_session<block_start>user=User.find_by_user_input(db_session username)<if_stmt>user<is><none><block_start>bot.whisper(source "No user with that name found.")<line_sep><return><false><block_end><if_stmt>user.ignored<block_start>bot.whisper(source "User is already ignored.")<line_sep><return><false><block_end>user.ignored=<true><line_sep>bot.whisper(source f"Now ignoring {user}")<block_end><block_end>@staticmethod<def_stmt>unignore_command bot source message **rest<block_start><if_stmt><not>message<block_start><return><block_end>username=message.split(" ")[0]<with_stmt>DBManager.create_session_scope()<as>db_session<block_start>user=User.find_by_user_input(db_session username)<if_stmt><not>user<block_start>bot.whisper(source "No user with that name found.")<line_sep><return><false><block_end><if_stmt>user.ignored<is><false><block_start>bot.whisper(source "User is not ignored.")<line_sep><return><false><block_end>user.ignored=<false><line_sep>bot.whisper(source f"No longer ignoring {user}")<block_end><block_end><def_stmt>load_commands self **options<block_start>self.commands["ignore"]=Command.raw_command(self.ignore_command level=1000 description="Ignore a user, which means he can't run any commands" examples=[CommandExample(<none> "Default usage" chat="user:!ignore Karl_Kons\n"<concat>"bot>user:Now ignoring Karl_Kons" description="Ignore user Karl_Kons" ).parse()] )<line_sep>self.commands["unignore"]=Command.raw_command(self.unignore_command level=1000 description="Unignore a user" examples=[CommandExample(<none> "Default usage" chat="user:!unignore Karl_Kons\n"<concat>"bot>user:No longer ignoring Karl_Kons" description="Unignore user Karl_Kons" ).parse()] )<block_end><block_end>
|
port="COM3"<line_sep>#
<if_stmt>('virtual'<in>globals()<and>virtual)<block_start>virtualArduino=Runtime.start("virtualArduino" "VirtualArduino")<line_sep>virtualArduino.connect(port)<block_end>ard=Runtime.createAndStart("Arduino" "Arduino")<line_sep>ard.connect(port)<line_sep>#
i2cmux=Runtime.createAndStart("i2cMux" "I2cMux")<line_sep># From version 1.0.2316 use attach instead of setController
# i2cmux.setController(ard,"1","0x70")
i2cmux.attach(ard "1" "0x70")<line_sep>#
mpu6050_0=Runtime.createAndStart("Mpu6050-0" "Mpu6050")<line_sep>mpu6050_0.attach(i2cmux "0" "0x68")<line_sep>mpu6050_1=Runtime.createAndStart("Mpu6050-1" "Mpu6050")<line_sep>mpu6050_1.attach(i2cmux "1" "0x68")<line_sep>
|
<import_stmt>sublime<import_stmt>time<import_stmt>pprint<import_stmt>os<import_stmt>csv<import_stmt>json<import_stmt>datetime<import_from_stmt>xml.sax.saxutils unescape quoteattr<import_from_stmt>.. xmltodict<import_from_stmt>..soap SOAP<import_from_stmt>..login soap_login rest_login<import_from_stmt>... requests util<import_from_stmt>..lib.panel Printer<class_stmt>MetadataApi<block_start><def_stmt>__init__ self settings **kwargs<block_start>self.settings=settings<line_sep>self.api_version=settings["api_version"]<line_sep>self.deploy_options=settings["deploy_options"]<line_sep>self.soap=SOAP(settings)<line_sep>self.session=<none><line_sep>self.result=<none><block_end><def_stmt>login self session_id_expired=<false><block_start>""" Login with default project credentials
Arguments:
* session_id_expired -- Optional; generally, session in .config/session.json is expired,
if INVALID_SESSION_ID appeared in response requested by session in session.json,
we need to call this method with expired session flag again
Returns:
* result -- Keep the session info, if `output_session_info` in plugin setting is True,
session info will be outputted to console
"""<if_stmt>self.settings["login_type"]<eq>"REST"<block_start>result=rest_login(self.settings session_id_expired)<block_end><else_stmt><block_start>result=soap_login(self.settings session_id_expired)<block_end><if_stmt><not>result["success"]<block_start>self.result=result<line_sep><return>self.result<block_end>self.metadata_url=result["instance_url"]+"/services/Soap/m/%s.0"%self.api_version<line_sep>self.headers={"Content-Type":"text/xml;charset=UTF-8" "SOAPAction":'""'}<line_sep>self.result=result<line_sep><return>result<block_end><def_stmt>_invoke_method self _method options={}<block_start>""" Support below methods:
* describeMetadata
* readMetadata
* renameMetadata
* deleteMetadata
* cancelDeployment
"""<line_sep>result=self.login()<if_stmt><not>result["success"]<block_start>self.result=result<line_sep><return>self.result<block_end># Build soap_body
body=self.soap.create_request(_method options)<try_stmt><block_start>response=requests.post(self.metadata_url body verify=<false> headers=self.headers)<block_end><except_stmt>requests.exceptions.RequestException<as>e<block_start>self.result={"Error Message":"Connection Timeout" "success":<false>}<line_sep><return>self.result<block_end># If status_code is > 399, which means it has error
<if_stmt>response.status_code<g>399# If session is invalid, force to login and execute this again
<block_start><if_stmt>"INVALID_SESSION_ID"<in>response.text<block_start>Printer.get("log").write("Session is expired, need login again")<line_sep>self.login(<true>)<line_sep><return>self._invoke_method(_method options)<block_end>self.result=util.get_response_error(response)<line_sep><return>self.result<block_end>result=xmltodict.parse(response.content)<line_sep>self.result=result["soapenv:Envelope"]["soapenv:Body"][_method+"Response"]["result"]<line_sep>self.result["success"]=<true><line_sep><return>self.result<block_end><def_stmt>check_status self async_process_id timeout=120<block_start>""" Ensure the retrieve request is done and then we can
continue other work
* async_process_id -- retrieve request asyncProcessId
"""<line_sep># Headers and Body
headers=self.headers.copy()<line_sep>headers["Accept-Encoding"]="identity, deflate, compress, gzip"<line_sep>soap_body=self.soap.create_request('check_status' {"async_process_id":async_process_id})<try_stmt><block_start>session=requests.Session()<line_sep>adapter=requests.adapters.HTTPAdapter(max_retries=10)<line_sep>session.mount(self.metadata_url adapter)<line_sep>response=session.post(self.metadata_url soap_body verify=<false> headers=headers timeout=timeout)<block_end><except_stmt>requests.exceptions.RequestException<as>e<block_start>self.result={"Error Message":"Network connection timeout when checking status for retrieve" "success":<false>}<line_sep><return>self.result<block_end># If status_code is > 399, which means it has error
<if_stmt>response.status_code<g>399<block_start>self.result=util.get_response_error(response)<line_sep><return>self.result<block_end>result=xmltodict.parse(response.content)<line_sep>self.result=result["soapenv:Envelope"]["soapenv:Body"]["checkStatusResponse"]["result"]<line_sep>self.result["success"]=<true><line_sep><return>self.result<block_end><def_stmt>check_retrieve_status self async_process_id<block_start>""" After async process is done, post a checkRetrieveStatus to
obtain the zipFile(base64)
Arguments:
* async_process_id -- asyncProcessId of retrieve request
"""<line_sep>headers=self.headers.copy()<line_sep>headers["Accept-Encoding"]="identity, deflate, compress, gzip"<line_sep>soap_body=self.soap.create_request('check_retrieve_status' {"async_process_id":async_process_id})<try_stmt><block_start>session=requests.Session()<line_sep>adapter=requests.adapters.HTTPAdapter(max_retries=10)<line_sep>session.mount(self.metadata_url adapter)<line_sep>response=session.post(self.metadata_url soap_body verify=<false> headers=headers timeout=120)<block_end><except_stmt>requests.exceptions.RequestException<as>e<block_start>self.result={"Error Message":"Network connection timeout when checking retrieve status" "success":<false>}<line_sep><return>self.result<block_end># If status_code is > 399, which means it has error
<if_stmt>response.status_code<g>399<block_start>self.result=util.get_response_error(response)<line_sep><return>self.result<block_end>result=xmltodict.parse(response.content)<line_sep>result=result["soapenv:Envelope"]["soapenv:Body"]["checkRetrieveStatusResponse"]["result"]<line_sep>result["success"]=response.status_code<l>399<line_sep><return>result<block_end><def_stmt>retrieve self options timeout=120<block_start>""" 1. Issue a retrieve request to start the asynchronous retrieval and asyncProcessId is returned
2. Issue a checkRetrieveStatus request to check whether the async process job is completed.
3. After the job is completed, you will get the zipFile(base64)
4. Use Python Lib base64 to convert the base64 string to zip file.
5. Use Python Lib zipFile to unzip the zip file to path
Arguments:
* options -- {"types" : types_dict, "package_names": package_names}
"""<line_sep>result=self.login()<if_stmt><not>result<or><not>result["success"]<block_start>self.result=result<line_sep><return>self.result<block_end># Log the StartTime
start_time=datetime.datetime.now()<line_sep># Write a separate line
Printer.get('log').write_start()<line_sep># Before build soap_body, we need to check type supports *,
# if not, we need to list package for it
list_package_for_all=<false><if_stmt>"list_package_for_all"<in>options<and>options["list_package_for_all"]<block_start>list_package_for_all=<true><block_end>records=self.prepare_members(options["types"] list_package_for_all)<line_sep># Add types for options
_types={}<for_stmt>k,values records.items()<block_start><if_stmt>"*"<in>values<block_start>_types[k]=values<block_end><else_stmt><block_start>members=[]<for_stmt>v values<block_start><if_stmt>isinstance(v str)<block_start>members.append(v)<block_end><elif_stmt>isinstance(v dict)<block_start>members.append(v["fullName"])<block_end><block_end>_types[k]=members<block_end><block_end>options["types"]=_types<line_sep># [sf:retrieve]
Printer.get('log').write("[sf:retrieve] Start request for a retrieve...")<line_sep># Build soap_body
soap_body=self.soap.create_request('retrieve' options)<line_sep># Post retrieve request
<try_stmt><block_start>response=requests.post(self.metadata_url soap_body verify=<false> headers=self.headers timeout=120)<block_end><except_stmt>requests.exceptions.RequestException<as>e<block_start>self.result={"Error Message":"Network connection timeout when issuing retrieve request" "success":<false>}<line_sep><return>self.result<block_end># Check whether session_id is expired
<if_stmt>"INVALID_SESSION_ID"<in>response.text<block_start>Printer.get('log').write("[sf:retrieve] Session expired, need login again")<line_sep>result=self.login(<true>)<if_stmt><not>result["success"]<block_start>self.result=result<line_sep><return>self.result<block_end><return>self.retrieve(options)<block_end># If status_code is > 399, which means it has error
<if_stmt>response.status_code<g>399<block_start>self.result=util.get_response_error(response)<line_sep><return>self.result<block_end># [sf:retrieve]
Printer.get('log').write("[sf:retrieve] Request for a retrieve submitted successfully.")<line_sep># Get async process id
async_process_id=util.getUniqueElementValueFromXmlString(response.content "id")<line_sep># [sf:retrieve]
Printer.get('log').write("[sf:retrieve] Request ID for the current retrieve task: "+async_process_id)<line_sep>Printer.get('log').write("[sf:retrieve] Waiting for server to finish processing the request...")<line_sep># Check status until retrieve request is finished
done="false"<while_stmt>done<eq>"false"# Issue a check_status request to retrieve retrieve result
# Since version 31 before, we need to invoke check_status before check_retrieve_status
<block_start><if_stmt>self.settings["api_version"]<ge>31<block_start>result=self.check_retrieve_status(async_process_id)<block_end><else_stmt><block_start>result=self.check_status(async_process_id)<block_end># Catch exception of status retrieving
<if_stmt><not>result<or><not>result["success"]<block_start>self.result=result<line_sep><return>self.result<block_end>status=result["state"]<if>self.api_version<l>31<else>result["status"]<line_sep>done=result["done"]<line_sep># Display retrieve status in the output panel
Printer.get('log').write("[sf:retrieve] Request Status: %s"%status)<line_sep># Defer to issue request according to status
sleep_seconds=2<if>status<in>["Queued" "Pending" "Succeeded"]<else>self.settings["metadata_polling_frequency"]<line_sep>time.sleep(sleep_seconds)<block_end># If check status request failed, this will not be done
<if_stmt>status<eq>"Failed"<block_start>Printer.get('log').write("[sf:retrieve] Request Failed")# [sf:retrieve]
self.result={"success":<false> "Error Message":result["errorMessage"]}<line_sep><return>self.result<block_end># Since version 31, checkRetrieveStatus request is not required
<if_stmt>self.api_version<l>31<block_start>Printer.get('log').write("[sf:retrieve] Obtaining ZipFile...")<line_sep>result=self.check_retrieve_status(async_process_id)<line_sep># Catch exception of status retrieve
<if_stmt><not>result["success"]<block_start>self.result=result<line_sep><return>self.result<block_end><block_end># Output the message if have
<if_stmt>"messages"<in>result<block_start>messages=result["messages"]<if_stmt>isinstance(messages dict)<block_start>messages=[messages]<block_end><for_stmt>message messages<block_start>Printer.get('log').write("[sf:retrieve] %s - %s"%(message["fileName"] message["problem"]))<block_end><block_end># [sf:retrieve]
Printer.get('log').write("[sf:retrieve] Finished request %s successfully."%async_process_id)<line_sep># Build Successful
Printer.get('log').write("\n\nBUILD SUCCESSFUL" <false>)<line_sep># Total time
total_seconds=(datetime.datetime.now()-start_time).seconds<line_sep>Printer.get('log').write("Total time: %s seconds"%total_seconds <false>)<line_sep># print('meta retrive result', result)
self.result=result<block_end><def_stmt>prepare_members self _types list_package_for_all=<false><block_start><if_stmt><not>self.login()<block_start><return><block_end><if_stmt>list_package_for_all<block_start>Printer.get("log").write_start()<block_end># List package for metadata objects which 'inFolder' is true
# EmailFolder, DocumentFolder, DashboardFolder and ReportFolder
records=[]<for_stmt>_type _types<block_start><if_stmt>"*"<not><in>_types[_type]<block_start><continue><block_end><if_stmt>_type<in>self.settings["metadata_objects_in_folder"]# List package for ``suffix.capitalize() + 'Folder'``
<block_start>metadata_object=_type+"Folder"<if>_type<ne>"EmailTemplate"<else>"EmailFolder"<line_sep># Waiting message in output console
Printer.get("log").write("[sf:retrieve] List Folders for %s"%metadata_object)<line_sep># Collect all folders into records
folders=[]<line_sep>elements=[]<for_stmt>record self.list_package({metadata_object:[""]})<block_start>elements.append({"id":record["id"] "fullName":record["fullName"] "lastModifiedDate":record["lastModifiedDate"] "lastModifiedById":record["lastModifiedById"] "lastModifiedByName":record["lastModifiedByName"]})<line_sep>folders.append(record["fullName"])<block_end><for_stmt>_folders util.list_chunks(folders 3)<block_start>Printer.get("log").write("[sf:retrieve] Fetching component metadata for %s Folder: %s"%(_type ", ".join(_folders)))<line_sep># Add file in folders into retrieve list
<for_stmt>record self.list_package({_type:_folders})<block_start>detail={"id":record["id"] "fullName":record["fullName"] "lastModifiedDate":record["lastModifiedDate"] "lastModifiedById":record["lastModifiedById"] "lastModifiedByName":record["lastModifiedByName"]}<line_sep>elements.append(detail)<block_end><block_end>elements=sorted(elements key=<lambda>k:k['fullName'])<line_sep>_types[_type]=elements<block_end><block_end># In order to speed up retrieve request, we will not list package for them
# just when we want to get full copy or build package.xml, we will list_package for all
# Note: CustomObject must be retrieved by ``list_package`` request
# list package for metadata object which supports wildcard retrieve
_types_list=[]<line_sep># print("retrieve types: ", _types)
<if_stmt><not>list_package_for_all<block_start><if_stmt>"CustomObject"<in>_types<and>"*"<in>_types["CustomObject"]<block_start>_types_list.append("CustomObject")<block_end><if_stmt>"InstalledPackage"<in>_types<and>"*"<in>_types["InstalledPackage"]<block_start>_types_list.append("InstalledPackage")<block_end><block_end><else_stmt><block_start><for_stmt>_type _types<block_start><if_stmt>"*"<not><in>_types[_type]<block_start><continue><block_end><if_stmt>_type<not><in>self.settings["metadata_objects_in_folder"]<block_start>_types_list.append(_type)<block_end><block_end><block_end># Sort _types_list
_types_list=sorted(_types_list)<line_sep># Maximum number of every list_package request is 3
# so we need to chunk list to little pieces
<for_stmt>_trunked_types_list util.list_chunks(_types_list 3)<block_start>_trunked_types={}<for_stmt>t _trunked_types_list<block_start>_trunked_types[t]=[""]<block_end># Define type_with_elements for keeping files for _trunked_types
type_with_elements={}<line_sep># list package for all non-folder metadata types
Printer.get("log").write("[sf:retrieve] Fetching component metadata for %s"%(", ".join(_trunked_types)))<for_stmt>record self.list_package(_trunked_types)<block_start>_type=record["type"]<line_sep># StandardValueSetTranslation doesn't have type?
<if_stmt>isinstance(record["type"] dict)<block_start>_type="StandardValueSetTranslation"<block_end>detail={"id":record.get("id" "") "fullName":record["fullName"] "lastModifiedDate":record["lastModifiedDate"] "lastModifiedById":record["lastModifiedById"] "lastModifiedByName":record["lastModifiedByName"]}<if_stmt>_type<not><in>type_with_elements<block_start>type_with_elements[_type]=[detail]<block_end><else_stmt><block_start>type_with_elements[_type].append(detail)<block_end><block_end># Order elements
<for_stmt>t type_with_elements<block_start>type_with_elements[t]=sorted(type_with_elements[t] key=<lambda>k:k['fullName'])<block_end># Update _types with result of list_package request
<for_stmt>_type _trunked_types<block_start><if_stmt>_type<in>type_with_elements<block_start>_types[_type]=type_with_elements[_type]<block_end><else_stmt><block_start>_types[_type]=[]<block_end><block_end><block_end># After reload is finished
<if_stmt>list_package_for_all<block_start>Printer.get("log").write("Project cache is saved to local .config/package.json")<block_end># Invoked by thread
self.result={"success":<true> "types":_types}<line_sep># Invoked by retrieve request
<return>_types<block_end><def_stmt>list_package self _types# Build soap_body
<block_start>soap_body=self.soap.create_request('list_package' {"types":_types})<try_stmt><block_start>response=requests.post(self.metadata_url soap_body verify=<false> headers=self.headers)<block_end><except_stmt>requests.exceptions.RequestException<as>e<block_start>Printer.get("log").write("Connection timeout when list package for %s"%(", ".join(list(_types.keys()))))<line_sep><return>[]<block_end># If status_code is > 399, which means it has error
<if_stmt>response.status_code<g>399<block_start><if_stmt>"INVALID_SESSION_ID"<in>response.text<block_start>Printer.get("log").write("Session is expired, need login again")<line_sep>self.login(<true>)<line_sep><return>self.list_package(_types)<block_end>result=util.get_response_error(response)<line_sep>Printer.get("log").write("Error happened when list package for %s, detail reason: %s"%(", ".join(list(_types.keys())) result.get("Error Message" "Unknown Reason")))<line_sep><return>[]<block_end>result=xmltodict.parse(response.content)<line_sep>result=result["soapenv:Envelope"]["soapenv:Body"]["listMetadataResponse"]<if_stmt><not>result<or>"result"<not><in>result<block_start><return>[]<block_end>result=result["result"]<if_stmt>isinstance(result dict)<block_start>result=[result]<block_end>self.result=result<line_sep><return>result<block_end><def_stmt>check_deploy_status self async_process_id<block_start>""" After async process is done, post a checkDeployResult to
get the deploy result
Arguments:
* async_process_id -- retrieve request asyncProcessId
"""<line_sep>soap_body=self.soap.create_request('check_deploy_status' {"async_process_id":async_process_id})<line_sep>response=requests.post(self.metadata_url soap_body headers=self.headers verify=<false> timeout=120)<line_sep># If status_code is > 399, which means it has error
<if_stmt>response.status_code<g>399<block_start>self.result=util.get_response_error(response)<line_sep><return>self.result<block_end>result=xmltodict.parse(response.content)<try_stmt><block_start>header=<none><if_stmt>"soapenv:Header"<in>result["soapenv:Envelope"]<block_start>header=result["soapenv:Envelope"]["soapenv:Header"]["DebuggingInfo"]<block_end>result=result["soapenv:Envelope"]["soapenv:Body"]["checkDeployStatusResponse"]["result"]<line_sep>result={"success":<true> "header":header "body":result}<block_end><except_stmt>KeyError<as>ke<block_start>result={"Message":"Convert Xml to JSON Exception: "+str(ke) "success":<false>}<block_end><return>result<block_end><def_stmt>deploy self base64_zip test_classes=[]<block_start>""" Deploy zip file
Arguments:
* zipFile -- base64 encoded zipfile
"""<line_sep>result=self.login()<if_stmt><not>result<or><not>result["success"]<block_start><return><block_end># Log the StartTime
start_time=datetime.datetime.now()<line_sep># Populate the soap_body with actual options
# move the deploy options in to class attributes from better manipulate
# deploy_options = self.settings["deploy_options"]
# If just checkOnly, output VALIDATE, otherwise, output DEPLOY
deploy_or_validate="validate"<if>self.deploy_options["checkOnly"]<else>"deploy"<line_sep># [sf:deploy]
Printer.get('log').write_start().write("[sf:%s] Start request for a deploy..."%deploy_or_validate)<line_sep>options=self.deploy_options<line_sep>options["zipfile"]=base64_zip<line_sep># If testLevel is Run Specified Test,
# we need to specify the runTests
testLevel=options.get("testLevel" "NoTestRun")<if_stmt>testLevel<eq>"RunSpecifiedTests"<block_start>options["runTests"]="\n".join(["<met:runTests>%s</met:runTests>"%c<for>c test_classes])<block_end>soap_body=self.soap.create_request('deploy' options)<try_stmt><block_start>response=requests.post(self.metadata_url soap_body verify=<false> headers=self.headers)<block_end><except_stmt>requests.exceptions.RequestException<as>e<block_start>self.result={"Error Message":"Network connection timeout when issuing deploy request" "success":<false>}<line_sep><return>self.result<block_end># Check whether session_id is expired
<if_stmt>"INVALID_SESSION_ID"<in>response.text<block_start>Printer.get('log').write("[sf:%s] Session expired, need login again"%deploy_or_validate)<line_sep>result=self.login(<true>)<if_stmt><not>result["success"]<block_start>self.result=result<line_sep><return>self.result<block_end><return>self.deploy(base64_zip)<block_end># If status_code is > 399, which means it has error
# If status_code is > 399, which means it has error
<if_stmt>response.status_code<g>399<block_start>self.result=util.get_response_error(response)<line_sep><return>self.result<block_end># [sf:deploy]
Printer.get('log').write("[sf:%s] Request for a deploy submitted successfully."%deploy_or_validate)<line_sep># Get async process id
async_process_id=util.getUniqueElementValueFromXmlString(response.content "id")<line_sep># [sf:deploy]
Printer.get('log').write("[sf:%s] Request ID for the current deploy task: %s"%(deploy_or_validate async_process_id))<line_sep>Printer.get('log').write("[sf:%s] Waiting for server to finish processing the request..."%deploy_or_validate)<line_sep># 2. issue a check status loop request to assure the async
# process is done
result=self.check_deploy_status(async_process_id)<line_sep>body=result["body"]<line_sep>index=1<line_sep>failure_dict={}<while_stmt>body["status"]<in>["Pending" "InProgress" "Canceling"]<block_start><if_stmt>"stateDetail"<in>body<block_start><if_stmt>int(body["numberComponentsDeployed"])<l>int(body["numberComponentsTotal"])<block_start>Printer.get('log').write("[sf:%s] Request Status: %s (%s/%s) -- %s"%(deploy_or_validate body["status"] body["numberComponentsDeployed"] body["numberComponentsTotal"] body["stateDetail"]))<block_end><else_stmt><block_start>Printer.get('log').write("[sf:%s] TestRun Status: %s (%s/%s) -- %s"%(deploy_or_validate body["status"] body["numberTestsCompleted"] body["numberTestsTotal"] body["stateDetail"]))<block_end><block_end><else_stmt><block_start>Printer.get('log').write("[sf:%s] Request Status: %s"%(deploy_or_validate body["status"]))<block_end># Process Test Run Result
<if_stmt>"runTestResult"<in>body["details"]<and>"failures"<in>body["details"]["runTestResult"]<block_start>failures=body["details"]["runTestResult"]["failures"]<if_stmt>isinstance(failures dict)<block_start><if_stmt>failures["id"]<not><in>failure_dict<block_start>failure_dict[failures["id"]]=failures<line_sep>Printer.get('log').write("-"<times>84).write("Test Failures: ")<line_sep>Printer.get('log').write("%s.\t%s"%(index failures["message"]))<for_stmt>msg failures["stackTrace"].split("\n")<block_start>Printer.get('log').write("\t%s"%msg)<block_end># [sf:deploy]
Printer.get('log').write("-"<times>84)<line_sep>index<augadd>index<block_end><block_end><elif_stmt>isinstance(failures list)<block_start><for_stmt>f failures<block_start><if_stmt>f["id"]<not><in>failure_dict<block_start>failure_dict[f["id"]]=f<line_sep>Printer.get('log').write("-"<times>84).write("Test Failures: ")<line_sep>Printer.get('log').write("%s.\t%s"%(index f["message"]))<line_sep># If compile error, there will no stack trace
<if_stmt>isinstance(f["stackTrace"] str)<block_start><for_stmt>msg f["stackTrace"].split("\n")<block_start>Printer.get('log').write("\t%s"%msg)<block_end>Printer.get('log').write("-"<times>84)<block_end>index<augadd>1<block_end><block_end><block_end><block_end># Thread Wait
sleep_seconds=2<if>body["status"]<eq>"Pending"<else>self.settings["metadata_polling_frequency"]<line_sep>time.sleep(sleep_seconds)<line_sep>result=self.check_deploy_status(async_process_id)<line_sep>body=result["body"]<block_end># Check if job is canceled
<if_stmt>body["status"]<eq>"Canceled"<block_start>Printer.get('log').write("\nBUILD FAILED" <false>)<line_sep>Printer.get('log').write("*********** DEPLOYMENT FAILED ***********" <false>)<line_sep>Printer.get('log').write("Request ID: %s"%async_process_id <false>)<line_sep>Printer.get('log').write("\nRequest Canceled" <false>)<line_sep>Printer.get('log').write("*********** DEPLOYMENT FAILED ***********" <false>)<block_end># If check status request failed, this will not be done
<elif_stmt>body["status"]<eq>"Failed"# Append failure message
<block_start>Printer.get('log').write("[sf:%s] Request Failed\n\nBUILD FAILED"%deploy_or_validate)<line_sep>Printer.get('log').write("*********** DEPLOYMENT FAILED ***********" <false>)<line_sep>Printer.get('log').write("Request ID: %s"%async_process_id <false>)<line_sep># Output Failure Details
failures_messages=[]<if_stmt>"componentFailures"<in>body["details"]<block_start>component_failures=body["details"]["componentFailures"]<if_stmt>isinstance(component_failures dict)<block_start>component_failures=[component_failures]<block_end><for_stmt>index range(len(component_failures))<block_start>component_failure=component_failures[index]<line_sep>failures_messages.append("%s. %s -- %s: %s (line %s column %s)"%(index+1 component_failure["fileName"] component_failure["problemType"] component_failure["problem"] component_failure["lineNumber"]<if>"lineNumber"<in>component_failure<else>"N/A" component_failure["columnNumber"]<if>"columnNumber"<in>component_failure<else>"N/A"))<block_end><block_end><elif_stmt>"runTestResult"<in>body["details"]<block_start>failures=body["details"]["runTestResult"].get("failures" [])<if_stmt>isinstance(failures dict)<block_start>failures=[failures]<block_end><for_stmt>index range(len(failures))<block_start>failure=failures[index]<line_sep>failures_messages.append("%s. %s -- %s: %s"%(index+1 failure.get("type") failure.get("name") failure.get("message")))<block_end><block_end># Unknown exception printer
<if_stmt>"errorMessage"<in>body<block_start>Printer.get('log').write("\n"+body["errorMessage"] <false>)<block_end>warning_messages=[]<if_stmt>"runTestResult"<in>body["details"]<block_start>runTestResult=body["details"]["runTestResult"]<if_stmt>"codeCoverageWarnings"<in>runTestResult<block_start>coverage_warnings=runTestResult["codeCoverageWarnings"]<if_stmt>isinstance(runTestResult["codeCoverageWarnings"] dict)<block_start>coverage_warnings=[coverage_warnings]<block_end><elif_stmt>isinstance(runTestResult["codeCoverageWarnings"] list)<block_start>coverage_warnings=coverage_warnings<block_end><for_stmt>warn coverage_warnings<block_start><if_stmt><not>isinstance(warn["name"] str)<block_start><continue><block_end>warning_messages.append("%s -- %s"%(warn["name"] warn["message"]))<block_end><block_end><block_end># Output failure message
<if_stmt>failures_messages<block_start>Printer.get('log').write("\n\nAll Component Failures:" <false>)<line_sep>Printer.get('log').write("\n"+"\n\n".join(failures_messages) <false>)<block_end># Output warning message
<if_stmt>warning_messages<block_start>Printer.get('log').write("\n\nTest Coverage Warnings:" <false>)<line_sep>Printer.get('log').write("\n"+"\n".join(warning_messages) <false>)<block_end># End for Deploy Result
Printer.get('log').write("\n*********** %s FAILED ***********"%(deploy_or_validate.upper()) <false>)<block_end><else_stmt># Append succeed message
<block_start>Printer.get('log').write("\n[sf:%s] Request Succeed"%deploy_or_validate <false>)<line_sep>Printer.get('log').write("[sf:%s] *********** %s SUCCEEDED ***********"%(deploy_or_validate deploy_or_validate.upper()) <false>)<line_sep>Printer.get('log').write("[sf:%s] Finished request %s successfully."%(deploy_or_validate async_process_id) <false>)<block_end># Total time
total_seconds=(datetime.datetime.now()-start_time).seconds<line_sep>Printer.get('log').write("\n\nTotal time: %s seconds"%total_seconds <false>)<line_sep># # Display debug log message in the new view
# view = sublime.active_window().new_file()
# view.run_command("new_view", {
# "name": "Debugging Information",
# "input": result.get("header", {}).get("debugLog", "")
# })
self.result=result<block_end><block_end>
|
"""
looks for parameter values that are reflected in the response.
Author: <EMAIL>
The scan function will be called for request/response made via ZAP, excluding some of the automated tools
Passive scan rules should not make any requests
Note that new passive scripts will initially be disabled
Right click the script in the Scripts tree and select "enable"
Refactored & Improved by nil0x42
"""<line_sep># Set to True if you want to see results on a per param basis
# (i.e.: A single URL may be listed more than once)
RESULT_PER_FINDING=<false><line_sep># Ignore parameters whose length is too short
MIN_PARAM_VALUE_LENGTH=8<def_stmt>scan ps msg src# Docs on alert raising function:
# raiseAlert(int risk, int confidence, str name, str description, str uri,
# str param, str attack, str otherInfo, str solution,
# str evidence, int cweId, int wascId, HttpMessage msg)
# risk: 0: info, 1: low, 2: medium, 3: high
# confidence: 0: falsePositive, 1: low, 2: medium, 3: high, 4: confirmed
<block_start>alert_title="Reflected HTTP GET parameter(s) (script)"<line_sep>alert_desc=("Reflected parameter value has been found. "<concat>"A reflected parameter values may introduce XSS "<concat>"vulnerability or HTTP header injection.")<line_sep>uri=header=body=<none><line_sep>reflected_params=[]<for_stmt>param msg.getUrlParams()<block_start>value=param.getValue()<if_stmt>len(value)<l>MIN_PARAM_VALUE_LENGTH<block_start><continue><block_end><if_stmt><not>header<block_start>uri=msg.getRequestHeader().getURI().toString()<line_sep>header=msg.getResponseHeader().toString()<line_sep>body=msg.getResponseBody().toString()<block_end><if_stmt>value<in>header<or>value<in>body<block_start><if_stmt>RESULT_PER_FINDING<block_start>param_name=param.getName()<line_sep>ps.raiseAlert(0 2 alert_title alert_desc uri param_name <none> <none> <none> value 0 0 msg)<block_end><else_stmt><block_start>reflected_params.append(param.getName())<block_end><block_end><block_end><if_stmt>reflected_params<and><not>RESULT_PER_FINDING<block_start>reflected_params=u",".join(reflected_params)<line_sep>ps.raiseAlert(0 2 alert_title alert_desc uri reflected_params <none> <none> <none> <none> 0 0 msg)<block_end><block_end>
|
# -*- coding: utf-8 -*-
<import_stmt>logging<import_from_stmt>apscheduler.schedulers.background BackgroundScheduler<import_from_stmt>examples.report_utils report_top_stats<import_from_stmt>zvt init_log<line_sep>logger=logging.getLogger(__name__)<line_sep>sched=BackgroundScheduler()<line_sep>@sched.scheduled_job("cron" hour=19 minute=30 day_of_week="mon-fri")<def_stmt>report_stats <block_start>report_top_stats(entity_type="stock" entity_provider="em" data_provider="em" periods=[7 30 180 365] ignore_new_stock=<true> adjust_type=<none> top_count=30 turnover_threshold=100000000 turnover_rate_threshold=0.02 em_group_over_write=<true> )<line_sep>report_top_stats(entity_type="stockhk" entity_provider="em" data_provider="em" top_count=30 periods=[7 30 180 365] ignore_new_stock=<true> adjust_type=<none> turnover_threshold=100000000 turnover_rate_threshold=0.005 em_group_over_write=<false> )<block_end><if_stmt>__name__<eq>"__main__"<block_start>init_log("report_stats.log")<line_sep>report_stats()<line_sep>sched.start()<line_sep>sched._thread.join()<block_end>
|
<import_from_stmt>.utils get_handlers<line_sep>
|
# Generated by Django 3.2.13 on 2022-05-26 17:16
<import_from_stmt>django.db migrations<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[("bookwyrm" "0148_alter_user_preferred_language") ("bookwyrm" "0148_merge_20220326_2006") ]<line_sep>operations=[]<block_end>
|
# Copyright 2016 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for grl_ops."""<line_sep>#from models.domain_adaptation.domain_separation import grl_op_grads # pylint: disable=unused-import
#from models.domain_adaptation.domain_separation import grl_op_shapes # pylint: disable=unused-import
<import_stmt>tensorflow<as>tf<import_stmt>grl_op_grads<import_stmt>grl_ops<line_sep>FLAGS=tf.app.flags.FLAGS<class_stmt>GRLOpsTest(tf.test.TestCase)<block_start><def_stmt>testGradientReversalOp self<block_start><with_stmt>tf.Graph().as_default()<block_start><with_stmt>self.test_session()# Test that in forward prop, gradient reversal op acts as the
# identity operation.
<block_start>examples=tf.constant([5.0 4.0 3.0 2.0 1.0])<line_sep>output=grl_ops.gradient_reversal(examples)<line_sep>expected_output=examples<line_sep>self.assertAllEqual(output.eval() expected_output.eval())<line_sep># Test that shape inference works as expected.
self.assertAllEqual(output.get_shape() expected_output.get_shape())<line_sep># Test that in backward prop, gradient reversal op multiplies
# gradients by -1.
examples=tf.constant([[1.0]])<line_sep>w=tf.get_variable(name='w' shape=[1 1])<line_sep>b=tf.get_variable(name='b' shape=[1])<line_sep>init_op=tf.global_variables_initializer()<line_sep>init_op.run()<line_sep>features=tf.nn.xw_plus_b(examples w b)<line_sep># Construct two outputs: features layer passes directly to output1, but
# features layer passes through a gradient reversal layer before
# reaching output2.
output1=features<line_sep>output2=grl_ops.gradient_reversal(features)<line_sep>gold=tf.constant([1.0])<line_sep>loss1=gold-output1<line_sep>loss2=gold-output2<line_sep>opt=tf.train.GradientDescentOptimizer(learning_rate=0.01)<line_sep>grads_and_vars_1=opt.compute_gradients(loss1 tf.trainable_variables())<line_sep>grads_and_vars_2=opt.compute_gradients(loss2 tf.trainable_variables())<line_sep>self.assertAllEqual(len(grads_and_vars_1) len(grads_and_vars_2))<for_stmt>i range(len(grads_and_vars_1))<block_start>g1=grads_and_vars_1[i][0]<line_sep>g2=grads_and_vars_2[i][0]<line_sep># Verify that gradients of loss1 are the negative of gradients of
# loss2.
self.assertAllEqual(tf.negative(g1).eval() g2.eval())<block_end><block_end><block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>tf.test.main()<block_end>
|
"""Muffin Handlers."""<import_stmt>inspect<import_stmt>typing<as>t<import_from_stmt>http_router Router<import_from_stmt>http_router.typing TYPE_METHODS<import_from_stmt>asgi_tools Request<import_from_stmt>asgi_tools.app HTTPView HTTP_METHODS<import_from_stmt>asgi_tools.utils is_awaitable<class_stmt>HandlerMeta(type)<block_start>"""Prepare handlers."""<def_stmt>__new__ mcs name bases params<block_start>"""Prepare a Handler Class."""<line_sep>cls=super().__new__(mcs name bases params)<line_sep># Ensure that the class methods are exist and iterable
<if_stmt><not>cls.methods<block_start>cls.methods=set(method<for>method HTTP_METHODS<if>method.lower()<in>cls.__dict__)<block_end><elif_stmt>isinstance(cls.methods str)<block_start>cls.methods=[cls.methods]<block_end>cls.methods=set(method.upper()<for>method cls.methods)<for_stmt>m cls.methods<block_start>method=getattr(cls m.lower() <none>)<if_stmt>method<and><not>is_awaitable(method)<block_start><raise>TypeError(f"The method '{method.__qualname__}' has to be awaitable")<block_end><block_end><return>cls<block_end><block_end><def_stmt>route_method *paths:str **params<arrow>t.Callable<block_start>"""Mark a method as a route."""<def_stmt>wrapper method<block_start>"""Wrap a method."""<line_sep>method.__route__=paths params<line_sep><return>method<block_end><return>wrapper<block_end><class_stmt>Handler(HTTPView metaclass=HandlerMeta)<block_start>"""Class-based view pattern for handling HTTP method dispatching.
.. code-block:: python
@app.route('/hello', '/hello/{name}')
class HelloHandler(Handler):
async def get(self, request):
name = request.patch_params.get('name') or 'all'
return "GET: Hello f{name}"
async def post(self, request):
name = request.patch_params.get('name') or 'all'
return "POST: Hello f{name}"
@Handler.route('/hello/custom')
async def custom(self, request):
return 'Custom HELLO'
# ...
async def test_my_endpoint(client):
response = await client.get('/hello')
assert await response.text() == 'GET: Hello all'
response = await client.get('/hello/john')
assert await response.text() == 'GET: Hello john'
response = await client.post('/hello')
assert await response.text() == 'POST: Hello all'
response = await client.get('/hello/custom')
assert await response.text() == 'Custom HELLO'
response = await client.delete('/hello')
assert response.status_code == 405
"""<line_sep>methods:t.Optional[t.Sequence[str]]=<none><line_sep>@classmethod<def_stmt>__route__ cls router:Router *paths:str methods:TYPE_METHODS=<none> **params<block_start>"""Check for registered methods."""<line_sep>router.bind(cls *paths methods=methods<or>cls.methods **params)<for_stmt>_,method inspect.getmembers(cls <lambda>m:hasattr(m '__route__'))<block_start>cpaths,cparams=method.__route__<line_sep>router.bind(cls *cpaths __meth__=method.__name__ **cparams)<block_end><return>cls<block_end><def_stmt>__call__ self request:Request *args **opts<arrow>t.Awaitable<block_start>"""Dispatch the given request by HTTP method."""<line_sep>method=getattr(self opts.get('__meth__')<or>request.method.lower())<line_sep><return>method(request)<block_end>route=route_method<block_end>
|
# /usr/bin/env python3.5
# -*- mode: python -*-
# =============================================================================
# @@-COPYRIGHT-START-@@
#
# Copyright (c) 2017-2018, Qualcomm Innovation Center, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# @@-COPYRIGHT-END-@@
# =============================================================================
<import_stmt>unittest<import_stmt>unittest.mock<import_from_stmt>unittest.mock create_autospec<import_from_stmt>decimal Decimal<import_stmt>math<import_stmt>os<import_stmt>signal<import_from_stmt>torch nn<import_stmt>torch.nn.functional<as>functional<import_stmt>libpymo<as>pymo<import_from_stmt>aimet_common.defs CostMetric LayerCompRatioPair<import_from_stmt>aimet_common.cost_calculator SpatialSvdCostCalculator WeightSvdCostCalculator<import_from_stmt>aimet_common comp_ratio_select<import_from_stmt>aimet_common.bokeh_plots BokehServerSession<import_from_stmt>aimet_common.bokeh_plots DataTable<import_from_stmt>aimet_common.bokeh_plots ProgressBar<import_from_stmt>aimet_common.utils start_bokeh_server_session<import_from_stmt>aimet_torch.utils create_rand_tensors_given_shapes<import_from_stmt>aimet_torch.examples mnist_torch_model<import_from_stmt>aimet_torch.layer_database Layer LayerDatabase<import_from_stmt>aimet_torch.svd.svd_pruner SpatialSvdPruner<import_from_stmt>aimet_torch pymo_utils<class_stmt>MnistModel(nn.Module)<block_start><def_stmt>__init__ self<block_start>super(MnistModel self).__init__()<line_sep>self.conv1=nn.Conv2d(1 32 kernel_size=5 padding=(2 2))<line_sep>self.conv2=nn.Conv2d(32 64 kernel_size=5 padding=(2 2))<line_sep>self.conv2_drop=nn.Dropout2d()<line_sep>self.fc1=nn.Linear(7<times>7<times>64 1024)<line_sep>self.fc2=nn.Linear(1024 10)<block_end><def_stmt>forward self x<block_start>x=functional.relu(functional.max_pool2d(self.conv1(x) 2))<line_sep>x=functional.relu(functional.max_pool2d(self.conv2_drop(self.conv2(x)) 2))<line_sep>x=x.view(-1 x.size(1)<times>x.size(2)<times>x.size(3))<line_sep>x=functional.relu(self.fc1(x))<line_sep>x=functional.dropout(x training=self.training)<line_sep>x=self.fc2(x)<line_sep><return>functional.log_softmax(x dim=1)<block_end><block_end><class_stmt>TestTrainingExtensionsCompRatioSelect(unittest.TestCase)<block_start><def_stmt>test_per_layer_eval_scores self<block_start>url,process=start_bokeh_server_session(8006)<line_sep>bokeh_session=BokehServerSession(url=url session_id="compression")<line_sep>pruner=unittest.mock.MagicMock()<line_sep>eval_func=unittest.mock.MagicMock()<line_sep>model=mnist_torch_model.Net().to('cpu')<line_sep>input_shape=(1 1 28 28)<line_sep>dummy_input=create_rand_tensors_given_shapes(input_shape)<line_sep>layer_db=LayerDatabase(model dummy_input)<line_sep>layer1=layer_db.find_layer_by_name('conv1')<line_sep>layer_db.mark_picked_layers([layer1])<line_sep>eval_func.side_effect=[90 80 70 60 50 40 30 20 10]<line_sep># Instantiate child
greedy_algo=comp_ratio_select.GreedyCompRatioSelectAlgo(layer_db pruner SpatialSvdCostCalculator() eval_func 20 CostMetric.mac 0.5 10 <true> <none> <none> <false> bokeh_session=<none>)<line_sep>progress_bar=ProgressBar(1 "eval scores" "green" bokeh_session=bokeh_session)<line_sep>data_table=DataTable(num_columns=3 num_rows=1 column_names=['0.1' '0.2' '0.3' '0.4' '0.5' '0.6' '0.7' '0.8' '0.9'] row_index_names=[layer1.name] bokeh_session=bokeh_session)<line_sep>pruner.prune_model.return_value=layer_db<line_sep>eval_dict=greedy_algo._compute_layerwise_eval_score_per_comp_ratio_candidate(data_table progress_bar layer1)<line_sep>self.assertEqual(90 eval_dict[Decimal('0.1')])<line_sep>bokeh_session.server_session.close("test complete")<line_sep>os.killpg(os.getpgid(process.pid) signal.SIGTERM)<block_end><def_stmt>test_eval_scores self<block_start>pruner=unittest.mock.MagicMock()<line_sep>eval_func=unittest.mock.MagicMock()<line_sep>eval_func.side_effect=[90 80 70 60 50 40 30 20 10 91 81 71 61 51 41 31 21 11]<line_sep>model=mnist_torch_model.Net().to('cpu')<line_sep>input_shape=(1 1 28 28)<line_sep>dummy_input=create_rand_tensors_given_shapes(input_shape)<line_sep>layer_db=LayerDatabase(model dummy_input)<line_sep>layer1=layer_db.find_layer_by_name('conv1')<line_sep>layer2=layer_db.find_layer_by_name('conv2')<line_sep>layer_db.mark_picked_layers([layer1 layer2])<line_sep># Instantiate child
greedy_algo=comp_ratio_select.GreedyCompRatioSelectAlgo(layer_db pruner SpatialSvdCostCalculator() eval_func 20 CostMetric.mac 0.5 10 <true> <none> <none> <false> bokeh_session=<none>)<line_sep>eval_dict=greedy_algo._compute_eval_scores_for_all_comp_ratio_candidates()<line_sep>self.assertEqual(50 eval_dict['conv1'][Decimal('0.5')])<line_sep>self.assertEqual(60 eval_dict['conv1'][Decimal('0.4')])<line_sep>self.assertEqual(11 eval_dict['conv2'][Decimal('0.9')])<block_end><def_stmt>test_eval_scores_with_spatial_svd_pruner self<block_start>pruner=SpatialSvdPruner()<line_sep>eval_func=unittest.mock.MagicMock()<line_sep>eval_func.side_effect=[90 80 70 60 50 40 30 20 10 91 81 71 61 51 41 31 21 11]<line_sep>model=mnist_torch_model.Net()<line_sep># Create a layer database
input_shape=(1 1 28 28)<line_sep>dummy_input=create_rand_tensors_given_shapes(input_shape)<line_sep>layer_db=LayerDatabase(model dummy_input)<line_sep>layer1=layer_db.find_layer_by_name('conv1')<line_sep>layer2=layer_db.find_layer_by_name('conv2')<line_sep>layer_db.mark_picked_layers([layer1 layer2])<line_sep># Instantiate child
greedy_algo=comp_ratio_select.GreedyCompRatioSelectAlgo(layer_db pruner SpatialSvdCostCalculator() eval_func 20 CostMetric.mac 0.5 10 <true> <none> <none> <true> bokeh_session=<none>)<line_sep>eval_dict=greedy_algo._compute_eval_scores_for_all_comp_ratio_candidates()<line_sep>print()<line_sep>print(eval_dict)<line_sep>self.assertEqual(90 eval_dict['conv1'][Decimal('0.1')])<line_sep>self.assertEqual(51 eval_dict['conv2'][Decimal('0.5')])<line_sep>self.assertEqual(21 eval_dict['conv2'][Decimal('0.8')])<block_end><def_stmt>test_find_min_max_eval_scores self<block_start>eval_scores_dict={'layer1':{Decimal('0.1'):90 Decimal('0.5'):50 Decimal('0.7'):30 Decimal('0.8'):20} 'layer2':{Decimal('0.2'):91 Decimal('0.3'):45 Decimal('0.7'):30 Decimal('0.9'):11}}<line_sep>min_score,max_score=comp_ratio_select.GreedyCompRatioSelectAlgo._find_min_max_eval_scores(eval_scores_dict)<line_sep>self.assertEqual(11 min_score)<line_sep>self.assertEqual(91 max_score)<line_sep>eval_scores_dict={'layer1':{Decimal('0.1'):10 Decimal('0.5'):92 Decimal('0.7'):30 Decimal('0.8'):20} 'layer2':{Decimal('0.2'):91 Decimal('0.3'):45 Decimal('0.7'):30 Decimal('0.9'):11}}<line_sep>min_score,max_score=comp_ratio_select.GreedyCompRatioSelectAlgo._find_min_max_eval_scores(eval_scores_dict)<line_sep>self.assertEqual(10 min_score)<line_sep>self.assertEqual(92 max_score)<block_end><def_stmt>test_find_layer_comp_ratio_given_eval_score self<block_start>eval_scores_dict={'layer1':{Decimal('0.1'):90 Decimal('0.5'):50 Decimal('0.7'):30 Decimal('0.8'):20} 'layer2':{Decimal('0.1'):11 Decimal('0.3'):23 Decimal('0.5'):47 Decimal('0.7'):85 Decimal('0.9'):89}}<line_sep>layer2=Layer(nn.Conv2d(32 64 3) "layer2" <none>)<line_sep>greedy_algo=comp_ratio_select.GreedyCompRatioSelectAlgo<line_sep>comp_ratio=greedy_algo._find_layer_comp_ratio_given_eval_score(eval_scores_dict 45 layer2)<line_sep>self.assertEqual(Decimal('0.5') comp_ratio)<line_sep>comp_ratio=greedy_algo._find_layer_comp_ratio_given_eval_score(eval_scores_dict 48 layer2)<line_sep>self.assertEqual(Decimal('0.7') comp_ratio)<line_sep>comp_ratio=greedy_algo._find_layer_comp_ratio_given_eval_score(eval_scores_dict 90 layer2)<line_sep>self.assertEqual(<none> comp_ratio)<block_end><def_stmt>test_select_per_layer_comp_ratios self<block_start>pruner=unittest.mock.MagicMock()<line_sep>eval_func=unittest.mock.MagicMock()<line_sep>rounding_algo=unittest.mock.MagicMock()<line_sep>rounding_algo.round.side_effect=[0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9]<line_sep>eval_func.side_effect=[10 20 30 40 50 60 70 80 90 11 21 31 35 40 45 50 55 60]<line_sep>model=mnist_torch_model.Net()<line_sep>input_shape=(1 1 28 28)<line_sep>dummy_input=create_rand_tensors_given_shapes(input_shape)<line_sep>layer_db=LayerDatabase(model dummy_input)<line_sep>layer1=layer_db.find_layer_by_name('conv1')<line_sep>layer2=layer_db.find_layer_by_name('conv2')<line_sep>selected_layers=[layer1 layer2]<line_sep>layer_db.mark_picked_layers([layer1 layer2])<try_stmt><block_start>os.remove('./data/greedy_selection_eval_scores_dict.pkl')<block_end><except_stmt>OSError<block_start><pass><block_end># Instantiate child
greedy_algo=comp_ratio_select.GreedyCompRatioSelectAlgo(layer_db pruner SpatialSvdCostCalculator() eval_func 20 CostMetric.mac Decimal(0.6) 10 <true> <none> rounding_algo <false> bokeh_session=<none>)<line_sep>layer_comp_ratio_list,stats=greedy_algo.select_per_layer_comp_ratios()<line_sep>original_cost=SpatialSvdCostCalculator.compute_model_cost(layer_db)<for_stmt>layer layer_db<block_start><if_stmt>layer<not><in>selected_layers<block_start>layer_comp_ratio_list.append(LayerCompRatioPair(layer <none>))<block_end><block_end>compressed_cost=SpatialSvdCostCalculator.calculate_compressed_cost(layer_db layer_comp_ratio_list CostMetric.mac)<line_sep>rounding_algo.round.side_effect=[0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9]<line_sep>actual_compression_ratio=compressed_cost.mac/original_cost.mac<line_sep>self.assertTrue(math.isclose(Decimal(0.6) actual_compression_ratio abs_tol=0.05))<line_sep>self.assertTrue(os.path.isfile('./data/greedy_selection_eval_scores_dict.pkl'))<line_sep>print('\n')<for_stmt>pair layer_comp_ratio_list<block_start>print(pair)<block_end># lets repeat with a saved eval_dict
greedy_algo=comp_ratio_select.GreedyCompRatioSelectAlgo(layer_db pruner SpatialSvdCostCalculator() eval_func 20 CostMetric.mac Decimal(0.6) 10 <true> './data/greedy_selection_eval_scores_dict.pkl' rounding_algo <false> bokeh_session=<none>)<line_sep>layer_comp_ratio_list,stats=greedy_algo.select_per_layer_comp_ratios()<line_sep>original_cost=SpatialSvdCostCalculator.compute_model_cost(layer_db)<for_stmt>layer layer_db<block_start><if_stmt>layer<not><in>selected_layers<block_start>layer_comp_ratio_list.append(LayerCompRatioPair(layer <none>))<block_end><block_end>compressed_cost=SpatialSvdCostCalculator.calculate_compressed_cost(layer_db layer_comp_ratio_list CostMetric.mac)<line_sep>actual_compression_ratio=compressed_cost.mac/original_cost.mac<line_sep>self.assertTrue(math.isclose(Decimal(0.6) actual_compression_ratio abs_tol=0.05))<line_sep>print('\n')<for_stmt>pair layer_comp_ratio_list<block_start>print(pair)<block_end><block_end><def_stmt>test_select_per_layer_comp_ratios_with_spatial_svd_pruner self<block_start>pruner=SpatialSvdPruner()<line_sep>eval_func=unittest.mock.MagicMock()<line_sep>rounding_algo=unittest.mock.MagicMock()<line_sep>eval_func.side_effect=[10 20 30 40 50 60 70 80 90 11 21 31 35 40 45 50 55 60]<line_sep>rounding_algo.round.side_effect=[0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9]<line_sep>model=mnist_torch_model.Net()<line_sep>input_shape=(1 1 28 28)<line_sep>dummy_input=create_rand_tensors_given_shapes(input_shape)<line_sep>layer_db=LayerDatabase(model dummy_input)<line_sep>selected_layers=[layer<for>layer layer_db<if>isinstance(layer.module nn.Conv2d)]<line_sep>layer_db.mark_picked_layers(selected_layers)<line_sep># Instantiate child
greedy_algo=comp_ratio_select.GreedyCompRatioSelectAlgo(layer_db pruner SpatialSvdCostCalculator() eval_func 20 CostMetric.mac Decimal(0.4) 10 <true> <none> rounding_algo <false> bokeh_session=<none>)<line_sep>layer_comp_ratio_list,stats=greedy_algo.select_per_layer_comp_ratios()<line_sep>original_cost=SpatialSvdCostCalculator.compute_model_cost(layer_db)<for_stmt>layer layer_db<block_start><if_stmt>layer<not><in>selected_layers<block_start>layer_comp_ratio_list.append(LayerCompRatioPair(layer <none>))<block_end><block_end>compressed_cost=SpatialSvdCostCalculator.calculate_compressed_cost(layer_db layer_comp_ratio_list CostMetric.mac)<line_sep>actual_compression_ratio=compressed_cost.mac/original_cost.mac<line_sep>self.assertTrue(math.isclose(Decimal(0.3) actual_compression_ratio abs_tol=0.8))<line_sep>print('\n')<for_stmt>pair layer_comp_ratio_list<block_start>print(pair)<block_end><block_end><def_stmt>test_comp_ratio_select_tar self<block_start>compute_model_cost=unittest.mock.MagicMock()<line_sep>pruner=unittest.mock.MagicMock()<line_sep>eval_func=unittest.mock.MagicMock()<line_sep>eval_func.side_effect=[0.1 0.15 0.2 0.25 0.3 0.35 0.4 0.45 0.5 0.55 0.6 0.65 0.7 0.75 0.8 0.85 0.9 0.95 0.97 1.0 0.1 0.15 0.2 0.25 0.3 0.35 0.4 0.45 0.5 0.55 0.6 0.65 0.7 0.75 0.8 0.85 0.9 0.95 0.97 1.0 0.1 0.15 0.2 0.25 0.3 0.35 0.4 0.45 0.5 0.55 0.6 0.65 0.7 0.75 0.8 0.85 0.9 0.95 0.97 1.0 0.1 0.15 0.2 0.25 0.3 0.35 0.4 0.45 0.5 0.55 0.6 0.65 0.7 0.75 0.8 0.85 0.9 0.95 0.97 1.0]<line_sep>compute_model_cost.return_value=(500 500)<line_sep>compute_network_cost=unittest.mock.MagicMock()<line_sep>compute_network_cost.return_value=(500 500)<line_sep>model=mnist_torch_model.Net().to('cpu')<line_sep>input_shape=(1 1 28 28)<line_sep>dummy_input=create_rand_tensors_given_shapes(input_shape)<line_sep>layer_db=LayerDatabase(model dummy_input)<line_sep>layer1=layer_db.find_layer_by_name('conv2')<line_sep>layer_db.mark_picked_layers([layer1])<line_sep>layer2=layer_db.find_layer_by_name('fc2')<line_sep>layer_db.mark_picked_layers([layer2])<line_sep>layer3=layer_db.find_layer_by_name('fc1')<line_sep>layer_db.mark_picked_layers([layer3])<line_sep># Instantiate child
tar_algo=comp_ratio_select.TarRankSelectAlgo(layer_db=layer_db pruner=pruner cost_calculator=WeightSvdCostCalculator() eval_func=eval_func eval_iterations=20 cost_metric=CostMetric.mac num_rank_indices=20 use_cuda=<false> pymo_utils_lib=pymo_utils)<line_sep>tar_algo._svd_lib_ref=create_autospec(pymo.Svd instance=<true>)<line_sep>tar_algo._svd_lib_ref.SetCandidateRanks=unittest.mock.MagicMock()<line_sep>tar_algo._svd_lib_ref.SetCandidateRanks.return_value=20<line_sep>tar_algo._num_rank_indices=20<with_stmt>unittest.mock.patch('aimet_common.cost_calculator.CostCalculator.calculate_comp_ratio_given_rank')<as>calculate_comp_ratio_given_rank<block_start>calculate_comp_ratio_given_rank.side_effect=[0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0]<line_sep>layer_comp_ratio_list,stats=tar_algo.select_per_layer_comp_ratios()<line_sep>self.assertEqual(layer_comp_ratio_list[2].eval_score 0.97)<line_sep>self.assertEqual(layer_comp_ratio_list[2].comp_ratio 1.0)<block_end><block_end><block_end>
|
"""
Copyright (c) 2022 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""<import_stmt>os<import_from_stmt>typing List<import_from_stmt>typing Optional<import_stmt>numpy<as>np<import_stmt>onnx<import_from_stmt>tqdm tqdm<import_from_stmt>nncf.common.utils.logger logger<as>nncf_logger<import_from_stmt>openvino.tools.accuracy_checker.config ConfigReader<import_from_stmt>openvino.tools.accuracy_checker.argparser build_arguments_parser<import_from_stmt>openvino.tools.accuracy_checker.dataset Dataset<import_from_stmt>openvino.tools.accuracy_checker.evaluators ModelEvaluator<import_stmt>nncf.experimental.post_training.api.dataset<as>ptq_api_dataset<import_from_stmt>nncf.experimental.onnx.engine ONNXEngine<import_from_stmt>nncf.experimental.onnx.samplers create_onnx_sampler<import_from_stmt>time time<import_stmt>pandas<as>pd<class_stmt>OpenVINOAccuracyCheckerDataset(ptq_api_dataset.Dataset)<block_start><def_stmt>__init__ self evaluator:ModelEvaluator batch_size shuffle<block_start>super().__init__(batch_size shuffle)<line_sep>self.model_evaluator=evaluator<block_end><def_stmt>__getitem__ self item<block_start>_,batch_annotation,batch_input,_=self.model_evaluator.dataset[item]<line_sep>filled_inputs,_,_=self.model_evaluator._get_batch_input(batch_annotation batch_input)<assert_stmt>len(filled_inputs)<eq>1<line_sep>dummy_target=0<for_stmt>_,v filled_inputs[0].items()<block_start><return>np.squeeze(v axis=0) dummy_target<block_end><raise>RuntimeError("filled_inputs has no value.")<block_end><def_stmt>__len__ self<block_start><return>len(self.model_evaluator.dataset)<block_end><block_end><def_stmt>run onnx_model_path:str output_file_path:str dataset:Dataset ignored_scopes:Optional[List[str]]=<none> evaluate:Optional[bool]=<false><block_start>num_init_samples=len(dataset)<line_sep>nncf_logger.info("Post-Training Quantization Parameters:")<line_sep>nncf_logger.info(f" number of samples: {num_init_samples}")<line_sep>nncf_logger.info(f" ignored_scopes: {ignored_scopes}")<line_sep>onnx.checker.check_model(onnx_model_path)<line_sep>original_model=onnx.load(onnx_model_path)<line_sep>nncf_logger.info(f"The model is loaded from {onnx_model_path}")<line_sep>onnx.checker.check_model(original_model)<line_sep>engine=ONNXEngine()<line_sep>sampler=create_onnx_sampler(dataset range(len(dataset)))<line_sep>engine.rt_session_options['providers']=["OpenVINOExecutionProvider"]<line_sep>engine.set_model(original_model)<line_sep>engine.set_sampler(sampler)<line_sep>elapsed_times=[]<for_stmt>input_data,_ tqdm(sampler)<block_start>start_time=time()<line_sep>engine.infer(input_data)<line_sep>elapsed_times<augadd>[1000.0<times>(time()-start_time)]<block_end>elapsed_times=np.array(elapsed_times)<line_sep>model_name,_=os.path.splitext(os.path.basename(onnx_model_path))<line_sep>df=pd.DataFrame({"model_name":[model_name] "latency_mean":[np.mean(elapsed_times)] "latency_std":[np.std(elapsed_times)]})<if_stmt>os.path.exists(output_file_path)<block_start>df.to_csv(output_file_path header=<false> mode="a" index=<false>)<block_end><else_stmt><block_start>df.to_csv(output_file_path header=<true> mode="w" index=<false>)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>parser=build_arguments_parser()<line_sep>parser.add_argument("--output-file-path" "-o" help="Directory path to save output quantized ONNX model" type=str)<line_sep>args=parser.parse_args()<line_sep>config,mode=ConfigReader.merge(args)<assert_stmt>mode<eq>"models"<for_stmt>config_entry config[mode]<block_start>model_evaluator=ModelEvaluator.from_configs(config_entry)<assert_stmt>"datasets"<in>config_entry<assert_stmt>len(config_entry["datasets"])<eq>1 "Config should have one dataset."<line_sep>dataset_config=config_entry["datasets"][0]<assert_stmt>"launchers"<in>config_entry<assert_stmt>len(config_entry["launchers"])<eq>1<line_sep>run(onnx_model_path=str(config_entry["launchers"][0]["model"]) output_file_path=args.output_file_path dataset=OpenVINOAccuracyCheckerDataset(model_evaluator batch_size=1 shuffle=<true>))<block_end><block_end>
|
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
<import_from_future_stmt> annotations<import_from_stmt>dataclasses dataclass<import_from_stmt>pants.backend.go.util_rules.sdk GoSdkProcess<import_from_stmt>pants.engine.fs Digest<import_from_stmt>pants.engine.process ProcessResult<import_from_stmt>pants.engine.rules Get collect_rules rule<line_sep>@dataclass(frozen=<true>)<class_stmt>LinkGoBinaryRequest<block_start>"""Link a Go binary from package archives and an import configuration."""<line_sep>input_digest:Digest<line_sep>archives:tuple[str <ellipsis>]<line_sep>import_config_path:str<line_sep>output_filename:str<line_sep>description:str<block_end>@dataclass(frozen=<true>)<class_stmt>LinkedGoBinary<block_start>"""A linked Go binary stored in a `Digest`."""<line_sep>digest:Digest<block_end>@rule<async_keyword><def_stmt>link_go_binary request:LinkGoBinaryRequest<arrow>LinkedGoBinary<block_start>result=<await>Get(ProcessResult GoSdkProcess(input_digest=request.input_digest command=("tool" "link" "-importcfg" request.import_config_path "-o" request.output_filename "-buildmode=exe" # seen in `go build -x` output
*request.archives ) description=f"Link Go binary: {request.output_filename}" output_files=(request.output_filename ) ) )<line_sep><return>LinkedGoBinary(result.output_digest)<block_end><def_stmt>rules <block_start><return>collect_rules()<block_end>
|
#=========================================================================
# TestSimpleSink_test.py
#=========================================================================
<import_from_future_stmt> print_function<import_from_stmt>pymtl *<import_from_stmt>TestSimpleSource TestSimpleSource<import_from_stmt>TestSimpleSink TestSimpleSink<line_sep>#-------------------------------------------------------------------------
# TestHarness
#-------------------------------------------------------------------------
<class_stmt>TestHarness(Model)<block_start><def_stmt>__init__ s dtype msgs# Instantiate models
<block_start>s.src=TestSimpleSource(dtype msgs)<line_sep>s.sink=TestSimpleSink(dtype msgs)<line_sep># Connect chain
s.connect(s.src.out.msg s.sink.in_.msg)<line_sep>s.connect(s.src.out.val s.sink.in_.val)<line_sep>s.connect(s.src.out.rdy s.sink.in_.rdy)<block_end><def_stmt>done s<block_start><return>s.src.done<and>s.sink.done<block_end><def_stmt>line_trace s<block_start><return>s.src.line_trace()+" | "+s.sink.line_trace()<block_end><block_end>#-------------------------------------------------------------------------
# test_basics
#-------------------------------------------------------------------------
<def_stmt>test_basics dump_vcd# Test messages
<block_start>test_msgs=[0x0000 0x0a0a 0x0b0b 0x0c0c 0x0d0d 0xf0f0 0xe0e0 0xd0d0 ]<line_sep># Instantiate and elaborate the model
model=TestHarness(16 test_msgs)<line_sep>model.vcd_file=dump_vcd<line_sep>model.elaborate()<line_sep># Create a simulator using the simulation tool
sim=SimulationTool(model)<line_sep># Run the simulation
print()<line_sep>sim.reset()<while_stmt><not>model.done()<block_start>sim.print_line_trace()<line_sep>sim.cycle()<block_end># Add a couple extra ticks so that the VCD dump is nicer
sim.cycle()<line_sep>sim.cycle()<line_sep>sim.cycle()<block_end>
|
<import_stmt>time<import_stmt>board<import_stmt>adafruit_pyportal<line_sep># We can cycle through the latest featured products
#PRODUCTS_TYPE = "featured"
#or we can view the latest new products
PRODUCTS_TYPE="new"<line_sep># Set up where we'll be fetching data from
DATA_SOURCE="https://www.adafruit.com/api/products?format=micro&"+PRODUCTS_TYPE+"=1&random=1"<line_sep># What data we'll be viewing
IMAGE_LOCATION=[0 "image"]<line_sep>NAME_LOCATION=[0 "name"]<line_sep>URL_LOCATION=[0 "url"]<line_sep># determine the current working directory needed so we know where to find files
cwd=("/"+__file__).rsplit('/' 1)[0]<line_sep>pyportal=adafruit_pyportal.PyPortal(url=DATA_SOURCE json_path=(NAME_LOCATION URL_LOCATION) status_neopixel=board.NEOPIXEL default_bg=cwd+"/new_background.bmp" text_font=cwd+"/fonts/Arial-Bold-12.bdf" text_position=((5 35) (5 225)) text_color=(0xFFFFFF 0xFFFFFF) text_wrap=(35 35) # characters to wrap
image_json_path=IMAGE_LOCATION image_resize=(320 240) image_position=(0 0))<line_sep>pyportal.preload_font()<while_stmt><true><block_start>response=<none><try_stmt><block_start>response=pyportal.fetch()<line_sep>print("Response is" response)<block_end><except_stmt>(IndexError RuntimeError ValueError)<as>e<block_start>print("Some error occured, retrying! -" e)<block_end>time.sleep(60)<block_end>
|
working=<true><match_stmt>working<block_start><case_stmt><true><block_start>print("OK")<block_end><case_stmt><false><block_start>print()<block_end><block_end>
|
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
<import_stmt>ast<import_stmt>logging<as>log<import_stmt>os<import_from_stmt>openvino.tools.mo.utils.error Error<class_stmt>SimpleProtoParser(object)<block_start>"""
This is a simple Proto2 parser that has limited functionality and is intended to parse configuration files for the
models created with Object Detection API only. The result of the parser is the dictionary.
"""<line_sep>_tokens=list()<line_sep>_result=dict()<def_stmt>__init__ self<block_start>self._tokens=list()<line_sep>self._result=dict()<block_end>@staticmethod<def_stmt>_convert_value_to_correct_datatype value:str<block_start>"""
Converts string representation of the token to a value with proper data type.
:param value: string representation to be converted.
:return: converted to a correct data type value.
"""<if_stmt>value<eq>'true'<block_start><return><true><block_end><if_stmt>value<eq>'false'<block_start><return><false><block_end><try_stmt><block_start>result=ast.literal_eval(value)<line_sep><return>result<block_end><except_stmt>Exception# if it is not possible to evaluate the value then consider it as a string
<block_start><return>value<block_end><block_end>@staticmethod<def_stmt>_convert_values_to_correct_datatypes d:dict<block_start>"""
Convert dictionary with values to correct data types.
:param d: dictionary with values.
:return: None
"""<for_stmt>key,value d.items()<block_start><if_stmt>isinstance(value dict)<block_start>__class__._convert_values_to_correct_datatypes(value)<block_end><elif_stmt>isinstance(value list)<block_start>d[key]=[__class__._convert_value_to_correct_datatype(item)<for>item value]<block_end><else_stmt><block_start>d[key]=__class__._convert_value_to_correct_datatype(value)<block_end><block_end><block_end><def_stmt>_add_non_empty_token self token:str<block_start>"""
Add token to the list of tokens if it is non-empty.
:param token: token to add
:return: None
"""<if_stmt>token<ne>""<block_start>self._tokens.append(token)<block_end><block_end><def_stmt>_parse_list self result:list token_ind:int<block_start>prev_token='['<while_stmt>token_ind<l>len(self._tokens)<block_start>cur_token=self._tokens[token_ind]<if_stmt>cur_token<eq>']'<block_start><return>token_ind+1<block_end><if_stmt>cur_token<eq>','<block_start><if_stmt>prev_token<eq>','<or>prev_token<eq>'['<block_start><raise>Error('Missing value in the list at position {}'.format(token_ind))<block_end><block_end><else_stmt><block_start>result.append(cur_token)<block_end>token_ind<augadd>1<line_sep>prev_token=cur_token<block_end><return>token_ind<block_end><def_stmt>_parse_tokens self result:dict token_ind:int depth:int=0<block_start>"""
Internal function that parses tokens.
:param result: current dictionary where to store parse result.
:param token_ind: index of the token from the tokens list to start parsing from.
:return: token index to continue parsing from.
"""<while_stmt>token_ind<l>len(self._tokens)<block_start>cur_token=self._tokens[token_ind]<if_stmt>cur_token<eq>','# redundant commas that we simply ignore everywhere except list "[x, y, z...]"
<block_start>token_ind<augadd>1<line_sep><continue><block_end><if_stmt>cur_token<eq>'}'<block_start><return>token_ind+1<block_end>next_token=self._tokens[token_ind+1]<if_stmt>next_token<eq>'{'<block_start>result[cur_token]=dict()<line_sep>token_ind=self._parse_tokens(result[cur_token] token_ind+2 depth+1)<block_end><elif_stmt>next_token<eq>':'<block_start>next_next_token=self._tokens[token_ind+2]<if_stmt>next_next_token<eq>'['<block_start>result[cur_token]=list()<line_sep>token_ind=self._parse_list(result[cur_token] token_ind+3)<block_end><else_stmt><block_start><if_stmt>cur_token<not><in>result<block_start>result[cur_token]=self._tokens[token_ind+2]<block_end><else_stmt><block_start><if_stmt><not>isinstance(result[cur_token] list)<block_start>old_val=result[cur_token]<line_sep>result[cur_token]=[old_val]<block_end>result[cur_token].append(self._tokens[token_ind+2])<block_end>token_ind<augadd>3<block_end><block_end><else_stmt><block_start><raise>Error('Wrong character "{}" in position {}'.format(next_token token_ind))<block_end><block_end><if_stmt>depth<ne>0<block_start><raise>Error('Input/output braces mismatch.')<block_end><return>token_ind<block_end><def_stmt>_convert_tokens_to_dict self<block_start>"""
Convert list of tokens into a dictionary with proper structure.
Then converts values in the dictionary to values of correct data types. For example, 'false' -> False,
'true' -> true, '0.004' -> 0.004, etc.
:return: True if conversion is successful.
"""<try_stmt><block_start>self._parse_tokens(self._result 0)<block_end><except_stmt>Exception<as>ex<block_start>log.error('Failed to convert tokens to dictionary: {}'.format(str(ex)))<line_sep><return><false><block_end>self._convert_values_to_correct_datatypes(self._result)<line_sep><return><true><block_end><def_stmt>_split_to_tokens self file_content:str<block_start>"""
The function gets file content as string and converts it to the list of tokens (all tokens are still strings).
:param file_content: file content as a string
"""<line_sep>cur_token=''<line_sep>string_started=<false><for_stmt>line file_content.split('\n')<block_start>cur_token=''<line_sep>line=line.strip()<if_stmt>line.startswith('#')# skip comments
<block_start><continue><block_end><for_stmt>char line<block_start><if_stmt>string_started<block_start><if_stmt>char<eq>'"'# string ended
<block_start>self._add_non_empty_token(cur_token)<line_sep>cur_token=''# start of a new string
string_started=<false><block_end><else_stmt><block_start>cur_token<augadd>char<block_end><block_end><elif_stmt>char<eq>'"'<block_start>self._add_non_empty_token(cur_token)<line_sep>cur_token=''# start of a new string
string_started=<true><block_end><elif_stmt>(char<eq>" "<and><not>string_started)<or>char<eq>'\n'<block_start>self._add_non_empty_token(cur_token)<line_sep>cur_token=''<block_end><elif_stmt>char<in>[':' '{' '}' '[' ']' ',']<block_start>self._add_non_empty_token(cur_token)<line_sep>self._tokens.append(char)<line_sep>cur_token=''<block_end><else_stmt><block_start>cur_token<augadd>char<block_end><block_end>self._add_non_empty_token(cur_token)<block_end>self._add_non_empty_token(cur_token)<block_end><def_stmt>parse_from_string self file_content:str<block_start>"""
Parses the proto text file passed as a string.
:param file_content: content of the file.
:return: dictionary with file content or None if the file cannot be parsed.
"""<line_sep>self._split_to_tokens(file_content)<if_stmt><not>self._convert_tokens_to_dict()<block_start>log.error('Failed to generate dictionary representation of file.')<line_sep><return><none><block_end><return>self._result<block_end><def_stmt>parse_file self file_name:str<block_start>"""
Parses the specified file and returns its representation as dictionary.
:param file_name: file name to parse.
:return: dictionary with file content or None if the file cannot be parsed.
"""<if_stmt><not>os.path.exists(file_name)<block_start>log.error('File {} does not exist'.format(file_name))<line_sep><return><none><block_end><try_stmt><block_start><with_stmt>open(file_name)<as>file<block_start>file_content=file.readlines()<block_end><block_end><except_stmt>Exception<as>ex<block_start>log.error('Failed to read file {}: {}'.format(file_name str(ex)))<line_sep><return><none><block_end><return>self.parse_from_string(''.join(file_content))<block_end><block_end>
|
<import_from_stmt>dagster pipeline<import_from_stmt>.baz baz_solid# pylint: disable=import-error
@pipeline<def_stmt>bar_pipeline <block_start>baz_solid()<block_end>
|
<import_stmt>bpy<line_sep>bpy.context.fluid.domain_settings.viscosity_base=5.0<line_sep>bpy.context.fluid.domain_settings.viscosity_exponent=5<line_sep>
|
<import_from_stmt>unittest mock<import_from_stmt>loafer.ext.sentry sentry_handler<def_stmt>test_sentry_handler <block_start>mock_scope=mock.MagicMock()<line_sep>sdk_mocked=mock.Mock()<line_sep>sdk_mocked.push_scope.return_value=mock_scope<line_sep>handler=sentry_handler(sdk_mocked)<line_sep>exc=ValueError("test")<line_sep>exc_info=(type(exc) exc <none>)<line_sep>delete_message=handler(exc_info "test")<assert_stmt>delete_message<is><false><assert_stmt>sdk_mocked.push_scope.called<line_sep>mock_scope.__enter__.return_value.set_extra.assert_called_once_with("message" "test")<line_sep>sdk_mocked.capture_exception.assert_called_once_with(exc_info)<block_end><def_stmt>test_sentry_handler_delete_message <block_start>mock_scope=mock.MagicMock()<line_sep>sdk_mocked=mock.Mock()<line_sep>sdk_mocked.push_scope.return_value=mock_scope<line_sep>handler=sentry_handler(sdk_mocked delete_message=<true>)<line_sep>exc=ValueError("test")<line_sep>exc_info=(type(exc) exc <none>)<line_sep>delete_message=handler(exc_info "test")<assert_stmt>delete_message<is><true><assert_stmt>sdk_mocked.push_scope.called<line_sep>mock_scope.__enter__.return_value.set_extra.assert_called_once_with("message" "test")<line_sep>sdk_mocked.capture_exception.assert_called_once_with(exc_info)<block_end>
|
<import_from_stmt>rentomatic.shared use_case<as>uc<import_from_stmt>rentomatic.shared response_object<as>res<class_stmt>StorageRoomListUseCase(uc.UseCase)<block_start><def_stmt>__init__ self repo<block_start>self.repo=repo<block_end><def_stmt>process_request self request_object<block_start>domain_storageroom=self.repo.list(filters=request_object.filters)<line_sep><return>res.ResponseSuccess(domain_storageroom)<block_end><block_end>
|
<import_stmt>cronjobs<import_from_stmt>airmozilla.cronlogger.decorators capture<import_from_stmt>. eventemails<line_sep>@cronjobs.register@capture<def_stmt>send_new_event_emails <block_start>eventemails.send_new_event_emails(verbose=<true>)<block_end>
|
<import_from_stmt>aws_cdk aws_cloudformation<as>cfn aws_wafv2<as>waf core <class_stmt>Waf(cfn.NestedStack)<block_start><def_stmt>__init__ self scope:core.Construct id:str target_arn **kwargs<arrow><none><block_start>super().__init__(scope id **kwargs)<line_sep>waf_rules=[]<line_sep># 1, AWS general rules
aws_managed_rules=waf.CfnWebACL.RuleProperty(name='AWS-AWSManagedRulesCommonRuleSet' priority=1 override_action=waf.CfnWebACL.OverrideActionProperty(none={}) statement=waf.CfnWebACL.StatementOneProperty(managed_rule_group_statement=waf.CfnWebACL.ManagedRuleGroupStatementProperty(name='AWSManagedRulesCommonRuleSet' vendor_name='AWS' excluded_rules=[waf.CfnWebACL.ExcludedRuleProperty(name='SizeRestrictions_BODY')])) visibility_config=waf.CfnWebACL.VisibilityConfigProperty(cloud_watch_metrics_enabled=<true> metric_name='awsCommonRules' sampled_requests_enabled=<true> ) )<line_sep>waf_rules.append(aws_managed_rules)<line_sep># 2, AWS AnonIPAddress
aws_anoniplist=waf.CfnWebACL.RuleProperty(name='awsAnonymousIP' priority=2 override_action=waf.CfnWebACL.OverrideActionProperty(none={}) statement=waf.CfnWebACL.StatementOneProperty(managed_rule_group_statement=waf.CfnWebACL.ManagedRuleGroupStatementProperty(name='AWSManagedRulesAnonymousIpList' vendor_name='AWS' excluded_rules=[])) visibility_config=waf.CfnWebACL.VisibilityConfigProperty(cloud_watch_metrics_enabled=<true> metric_name='awsAnonymous' sampled_requests_enabled=<true> ))<line_sep>waf_rules.append(aws_anoniplist)<line_sep># 3 AWS ip reputation List
aws_ip_rep_list=waf.CfnWebACL.RuleProperty(name='aws_Ipreputation' priority=3 override_action=waf.CfnWebACL.OverrideActionProperty(none={}) statement=waf.CfnWebACL.StatementOneProperty(managed_rule_group_statement=waf.CfnWebACL.ManagedRuleGroupStatementProperty(name='AWSManagedRulesAmazonIpReputationList' vendor_name='AWS' excluded_rules=[])) visibility_config=waf.CfnWebACL.VisibilityConfigProperty(cloud_watch_metrics_enabled=<true> metric_name='aws_reputation' sampled_requests_enabled=<true> ))<line_sep>waf_rules.append(aws_ip_rep_list)<line_sep># 4 GeoBlock NZ from accessing gateway
geoblock_rule=waf.CfnWebACL.RuleProperty(name='geoblocking_rule' priority=4 action=waf.CfnWebACL.RuleActionProperty(block={}) statement=waf.CfnWebACL.StatementOneProperty(geo_match_statement=waf.CfnWebACL.GeoMatchStatementProperty(country_codes=['NZ'] )) visibility_config=waf.CfnWebACL.VisibilityConfigProperty(cloud_watch_metrics_enabled=<true> metric_name='geoblock' sampled_requests_enabled=<true> ))<line_sep>waf_rules.append(geoblock_rule)<line_sep># Create the Waf ACL
WebACL=waf.CfnWebACL(self 'WebACL' default_action=waf.CfnWebACL.DefaultActionProperty(allow={}) scope="REGIONAL" # vs 'CLOUDFRONT'
visibility_config=waf.CfnWebACL.VisibilityConfigProperty(cloud_watch_metrics_enabled=<true> metric_name='webACL' sampled_requests_enabled=<true>) name='HelloWorldACL' rules=waf_rules)<line_sep># Associate it with the resource provided.
waf.CfnWebACLAssociation(self 'WAFAssnAPI' web_acl_arn=WebACL.attr_arn resource_arn=target_arn)<block_end><block_end>
|
#! /usr/bin/env python
<import_from_future_stmt> print_function<import_from_stmt>FWCore.PythonUtilities.LumiList LumiList<import_stmt>optparse<if_stmt>__name__<eq>'__main__'<block_start>parser=optparse.OptionParser("Usage: %prog [--options] edm1.root [edm2.root...]" description='Runs over input EDM files and prints out a list of contained lumi sections')<line_sep>parser.add_option('--intLumi' dest='intLumi' action='store_true' help='print out total recorded and delivered integrated luminosity')<line_sep>parser.add_option('--output' dest='output' type='string' help='save lumi sections output to file OUTPUT')<line_sep>(options args)=parser.parse_args()<line_sep># put this here after parsing the arguments since ROOT likes to
# grab command line arguments even when it shouldn't.
<import_from_stmt>DataFormats.FWLite Lumis Handle<if_stmt><not>args<block_start><raise>RuntimeError("Must provide at least one input file")<block_end># do we want to get the luminosity summary?
<if_stmt>options.intLumi<block_start>handle=Handle('LumiSummary')<line_sep>label=('lumiProducer')<block_end><else_stmt><block_start>handle,lable=<none> <none><block_end>runsLumisDict={}<line_sep>lumis=Lumis(args)<line_sep>delivered=recorded=0<for_stmt>lum lumis<block_start>runList=runsLumisDict.setdefault(lum.aux().run() [])<line_sep>runList.append(lum.aux().id().luminosityBlock())<line_sep># get the summary and keep track of the totals
<if_stmt>options.intLumi<block_start>lum.getByLabel(label handle)<line_sep>summary=handle.product()<line_sep>delivered<augadd>summary.avgInsDelLumi()<line_sep>recorded<augadd>summary.avgInsRecLumi()<block_end><block_end># print out lumi sections in JSON format
jsonList=LumiList(runsAndLumis=runsLumisDict)<if_stmt>options.output<block_start>jsonList.writeJSON(options.output)<block_end><else_stmt><block_start>print(jsonList)<block_end># print out integrated luminosity numbers if requested
<if_stmt>options.intLumi<block_start>print("\nNote: These numbers should be considered approximate. For official numbers, please use lumiCalc.py")<line_sep>print("delivered %.1f mb, recorded %.1f mb"%(delivered recorded))<block_end><block_end>
|
# coding:utf-8
<import_stmt>flanker.mime.message.part<as>part<import_from_stmt>nose.tools eq_<line_sep>STRINGS=(# Some normal strings
(b'' '') (b'hello' 'hello') (b'''hello
there
world''' '''hello
there
world''') (b'''hello
there
world
''' '''hello
there
world
''') (b'\201\202\203' '=81=82=83') # Add some trailing MUST QUOTE strings
(b'hello ' 'hello=20') (b'hello\t' 'hello=09') # Some long lines. First, a single line of 108 characters
(b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\xd8\xd9\xda\xdb\xdc\xdd\xde\xdfxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' '''xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx=D8=D9=DA=DB=DC=DD=DE=DFx=
xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx''') # A line of exactly 76 characters, no soft line break should be needed
(b'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy' 'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy') # A line of 77 characters, forcing a soft line break at position 75,
# and a second line of exactly 2 characters (because the soft line
# break `=' sign counts against the line length limit).
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz' '''zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=
zz''') # A line of 151 characters, forcing a soft line break at position 75,
# with a second line of exactly 76 characters and no trailing =
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz' '''zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz''') # A string containing a hard line break, but which the first line is
# 151 characters and the second line is exactly 76 characters. This
# should leave us with three lines, the first which has a soft line
# break, and which the second and third do not.
(b'''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz''' '''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy=
yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy
zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz''') # Lines that end with space or tab should be quoted
(b'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy ' '''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy=
=20''') # Lines that end with a partial quoted character
(b'yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy=y' '''yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy=
=3Dy''') # Lines that lead with a dot '.' should have the dot quoted
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.z' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2Ez') # Lines that end with a dot '.' are not quoted
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zz' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.=\n'+'zz') # Lines that lead with a dot '.' should have the dot quoted and cut
# if the quoted line is longer than 76 characters.
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2Ezzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\nzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'zz') # Respect quoted characters when considering leading '.'
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'+b'.\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f\x7f' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2E=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=7F=\n'+'=7F=7F=7F') # Should cut somewhere near the middle of the line
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'+b'.quick brown fox, quick brown cat, quick hot dog, quick read dog, quick white bird' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'<concat>'=2Equick brown fox, quick brown cat, qui=\n'+'ck hot dog, quick read dog, quick whi=\n'+'te bird') # Respect quoted character when considering where to cut
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'+b'.quick brown fox, quick brown cat\x7f\x7f\x7f\x7f\x7f, quick read dog, quick white bird' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2Equick brown fox, quick brown cat=7F=7F=\n'+'=7F=7F=7F, quick read dog, quick whi=\n'+'te bird') # Avoid considering non quoted characters when cutting
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'+b'.quick brown fox, quick brown cat=20=================, quick read dog, quick white bird' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2Equick brown fox, quick brown cat=3D20=\n'+'=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=\n'+'=3D=3D=3D=3D=3D, quick read dog, quick white bird') # Should quote leading '.' if the cut results in a '.' on the next line
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'+b'.quick brown fox, quick brown cat..................... quick read dog, quick white bird' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2Equick brown fox, quick brown cat.....=\n'+'=2E............... quick read dog, quic=\n'+'k white bird') # Should quote :space if the cut results in a :space at the end of the next line
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'+b'.quick brown fox, quick brown cat quick read dog, quick white bird' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2Equick brown fox, quick brown cat =20=\n'+' quick read dog, quic=\n'+'k white bird') # Should quote :tab if the cut results in a :tab at the end of the next line
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'+b'.quick brown fox, quick brown cat \t quick read dog, quick white bird' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2Equick brown fox, quick brown cat =09=\n'+' quick read dog, quic=\n'+'k white bird') # Should avoid cutting in the middle of multiple quoted characters near the cut point
(b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz'+b'.foo. \xF0\x9F\x99\x82 also there is \xF0\x9F\x99\x82 more in \xF0\x9F\x99\x82 '+b'this \xF0\x9F\x99\x82 message</body></html>' 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz=\n'+'=2Efoo. =F0=9F=99=82 also there is =F0=9F=\n'+'=99=82 more in =F0=9F=99=82 this =F0=\n'<concat>'=9F=99=82 message</body></html>') )<def_stmt>test_encode <block_start><for_stmt>p,e STRINGS<block_start>enc=part._encode_transfer_encoding('quoted-printable' p)<line_sep>eq_(enc e)<block_end><block_end>
|
<import_from_stmt>osr2mp4.ImageProcess imageproc<import_from_stmt>osr2mp4.ImageProcess.PrepareFrames.YImage YImage<line_sep>rankingreplay="pause-replay"<def_stmt>prepare_rankingreplay scale settings<block_start>img=YImage(rankingreplay settings scale).img<line_sep>img=imageproc.newalpha(img 0.4)<line_sep><return>[img]<block_end>
|
<import_from_stmt>django.test TestCase<import_from_stmt>hc.lib.string replace<class_stmt>StringTestCase(TestCase)<block_start><def_stmt>test_it_works self<block_start>result=replace("$A is $B" {"$A":"aaa" "$B":"bbb"})<line_sep>self.assertEqual(result "aaa is bbb")<block_end><def_stmt>test_it_ignores_placeholders_in_values self<block_start>result=replace("$A is $B" {"$A":"$B" "$B":"$A"})<line_sep>self.assertEqual(result "$B is $A")<block_end><def_stmt>test_it_ignores_overlapping_placeholders self<block_start>result=replace("$$AB" {"$A":"" "$B":"text"})<line_sep>self.assertEqual(result "$B")<block_end><def_stmt>test_it_preserves_non_placeholder_dollar_signs self<block_start>result=replace("$3.50" {"$A":"text"})<line_sep>self.assertEqual(result "$3.50")<block_end><block_end>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.