content
stringlengths
0
1.55M
<import_from_stmt>mushroom_rl.environments.pybullet_envs.air_hockey.hit AirHockeyHit<import_from_stmt>mushroom_rl.environments.pybullet_envs.air_hockey.defend AirHockeyDefend<line_sep>AirHockeyHit.register()<line_sep>AirHockeyDefend.register()<line_sep>
# Copyright 2018 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ A collection of helper functions for streaming events. """<import_stmt>os<import_stmt>json<import_stmt>time<import_from_stmt>google.cloud pubsub_v1<line_sep>publisher=pubsub_v1.PublisherClient()<line_sep>GCP_PROJECT=os.environ.get('GCP_PROJECT')<def_stmt>stream_event topic_name event_type event_context<block_start>""" Helper function for publishing an event. Parameters: topic_name (str): The name of the Cloud Pub/Sub topic. event_type (str): The type of the event. event_context: The context of the event. Output: None. """<line_sep>topic_path=publisher.topic_path(GCP_PROJECT topic_name)<line_sep>request={'event_type':event_type 'created_time':str(int(time.time())) 'event_context':event_context}<line_sep>data=json.dumps(request).encode()<line_sep>publisher.publish(topic_path data)<block_end>
<import_from_future_stmt> absolute_import<import_from_future_stmt> print_function<import_stmt>sys<import_stmt>os<line_sep># the next line can be removed after installation sys.path.insert(0 os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))))<import_from_stmt>veriloggen *<import_stmt>veriloggen.dataflow<as>dataflow<def_stmt>mkMain <block_start>m=Module('main')<line_sep>clk=m.Input('CLK')<line_sep>rst=m.Input('RST')<line_sep>df=dataflow.DataflowManager(m clk rst)<line_sep>x=df.Counter()<line_sep>y=df.Counter()<line_sep>z=x<times>2-y<line_sep>z.output('zdata' 'zvalid')<line_sep><return>m<block_end><def_stmt>mkTest numports=8<block_start>m=Module('test')<line_sep># target instance main=mkMain()<line_sep>params=m.copy_params(main)<line_sep>ports=m.copy_sim_ports(main)<line_sep>clk=ports['CLK']<line_sep>rst=ports['RST']<line_sep>uut=m.Instance(main 'uut' params=m.connect_params(main) ports=m.connect_ports(main))<line_sep>reset_done=m.Reg('reset_done' initval=0)<line_sep>reset_stmt=[]<line_sep>reset_stmt.append(reset_done(0))<line_sep>simulation.setup_waveform(m uut)<line_sep>simulation.setup_clock(m clk hperiod=5)<line_sep>init=simulation.setup_reset(m rst reset_stmt period=100)<line_sep>nclk=simulation.next_clock<line_sep>init.add(Delay(1000) reset_done(1) nclk(clk) Delay(10000) Systask('finish') )<line_sep><return>m<block_end><if_stmt>__name__<eq>'__main__'<block_start>test=mkTest()<line_sep>verilog=test.to_verilog('tmp.v')<line_sep>print(verilog)<line_sep># run simulator (Icarus Verilog) sim=simulation.Simulator(test)<line_sep>rslt=sim.run()# display=False #rslt = sim.run(display=True) print(rslt)<line_sep># launch waveform viewer (GTKwave) # sim.view_waveform() # background=False # sim.view_waveform(background=True) <block_end>
# coding: utf-8 """ Azure Blockchain Workbench REST API The Azure Blockchain Workbench REST API is a Workbench extensibility point, which allows developers to create and manage blockchain applications, manage users and organizations within a consortium, integrate blockchain applications into services and platforms, perform transactions on a blockchain, and retrieve transactional and contract data from a blockchain. # noqa: E501 OpenAPI spec version: v1 Generated by: https://github.com/swagger-api/swagger-codegen.git """<import_from_future_stmt> absolute_import<import_stmt>re# noqa: F401 # python 2 and python 3 compatibility library <import_stmt>six<import_from_stmt>swagger_client.api_client ApiClient<class_stmt>ApplicationsApi(object)<block_start>"""NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """<def_stmt>__init__ self api_client=<none><block_start><if_stmt>api_client<is><none><block_start>api_client=ApiClient()<block_end>self.api_client=api_client<block_end><def_stmt>application_delete self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Deletes the specified blockchain application. This method can only be performed by users who are Workbench administrators. NOTE: Currently not implemented. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.application_delete(application_id, async=True) >>> result = thread.get() :param async bool :param str application_id: The id of the application. (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.application_delete_with_http_info(application_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.application_delete_with_http_info(application_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>application_delete_with_http_info self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Deletes the specified blockchain application. This method can only be performed by users who are Workbench administrators. NOTE: Currently not implemented. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.application_delete_with_http_info(application_id, async=True) >>> result = thread.get() :param async bool :param str application_id: The id of the application. (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method application_delete"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `application_delete`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationID']=params['application_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationID}' 'DELETE' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type=<none> # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>application_disable self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Disables the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.application_disable(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application. (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.application_disable_with_http_info(application_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.application_disable_with_http_info(application_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>application_disable_with_http_info self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Disables the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.application_disable_with_http_info(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application. (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method application_disable"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `application_disable`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationID}/disable' 'PATCH' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type=<none> # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>application_enable self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Enables the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.application_enable(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application. (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.application_enable_with_http_info(application_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.application_enable_with_http_info(application_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>application_enable_with_http_info self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Enables the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.application_enable_with_http_info(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application. (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method application_enable"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `application_enable`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationID}/enable' 'PATCH' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type=<none> # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>application_get self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Gets the blockchain application matching a specific application ID. Users who are Workbench administrators get the blockchain application. Non-Workbench administrators get the blockchain application if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.application_get(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application. (required) :return: Application If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.application_get_with_http_info(application_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.application_get_with_http_info(application_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>application_get_with_http_info self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Gets the blockchain application matching a specific application ID. Users who are Workbench administrators get the blockchain application. Non-Workbench administrators get the blockchain application if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.application_get_with_http_info(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application. (required) :return: Application If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method application_get"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `application_get`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationId}' 'GET' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='Application' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>applications_get self **kwargs# noqa: E501 <block_start>""" # noqa: E501 Lists all blockchain applications to which a user has access in Workbench. Users who are Workbench administrators get all blockchain applications. Non-Workbench administrators get all blockchain applications for which they have at least one associated application role or an associated smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.applications_get(async=True) >>> result = thread.get() :param async bool :param int top: The maximum number of entries to return in the result set. :param int skip: The number of entries to skip in the result set. :param bool enabled: A Boolean for whether to filter the result set to only enabled applications. :return: ApplicationList If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.applications_get_with_http_info(**kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.applications_get_with_http_info(**kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>applications_get_with_http_info self **kwargs# noqa: E501 <block_start>""" # noqa: E501 Lists all blockchain applications to which a user has access in Workbench. Users who are Workbench administrators get all blockchain applications. Non-Workbench administrators get all blockchain applications for which they have at least one associated application role or an associated smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.applications_get_with_http_info(async=True) >>> result = thread.get() :param async bool :param int top: The maximum number of entries to return in the result set. :param int skip: The number of entries to skip in the result set. :param bool enabled: A Boolean for whether to filter the result set to only enabled applications. :return: ApplicationList If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['top' 'skip' 'enabled']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method applications_get"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep>collection_formats={}<line_sep>path_params={}<line_sep>query_params=[]<if_stmt>'top'<in>params<block_start>query_params.append(('top' params['top']))# noqa: E501 <block_end><if_stmt>'skip'<in>params<block_start>query_params.append(('skip' params['skip']))# noqa: E501 <block_end><if_stmt>'enabled'<in>params<block_start>query_params.append(('enabled' params['enabled']))<block_end># noqa: E501 header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications' 'GET' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='ApplicationList' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>applications_post self file **kwargs# noqa: E501 <block_start>""" # noqa: E501 Creates a new blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.applications_post(file, async=True) >>> result = thread.get() :param async bool :param file file: Upload File (required) :return: int If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.applications_post_with_http_info(file **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.applications_post_with_http_info(file **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>applications_post_with_http_info self file **kwargs# noqa: E501 <block_start>""" # noqa: E501 Creates a new blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.applications_post_with_http_info(file, async=True) >>> result = thread.get() :param async bool :param file file: Upload File (required) :return: int If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['file']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method applications_post"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'file' is set <if_stmt>('file'<not><in>params<or>params['file']<is><none>)<block_start><raise>ValueError("Missing the required parameter `file` when calling `applications_post`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<line_sep>query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<if_stmt>'file'<in>params<block_start>local_var_files['file']=params['file']<block_end># noqa: E501 body_params=<none><line_sep># HTTP header `Content-Type` header_params['Content-Type']=self.api_client.select_header_content_type(# noqa: E501 ['multipart/form-data'])<line_sep># noqa: E501 # Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications' 'POST' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='int' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>contract_code_delete self contract_code_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Deletes the specified blockchain smart contract implementation of a specific blockchain application. This method can only be performed by users who are Workbench administrators. NOTE: not currently implemented # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.contract_code_delete(contract_code_id, async=True) >>> result = thread.get() :param async bool :param int contract_code_id: The id of the contract code (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.contract_code_delete_with_http_info(contract_code_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.contract_code_delete_with_http_info(contract_code_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>contract_code_delete_with_http_info self contract_code_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Deletes the specified blockchain smart contract implementation of a specific blockchain application. This method can only be performed by users who are Workbench administrators. NOTE: not currently implemented # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.contract_code_delete_with_http_info(contract_code_id, async=True) >>> result = thread.get() :param async bool :param int contract_code_id: The id of the contract code (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['contract_code_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method contract_code_delete"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'contract_code_id' is set <if_stmt>('contract_code_id'<not><in>params<or>params['contract_code_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `contract_code_id` when calling `contract_code_delete`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'contract_code_id'<in>params<block_start>path_params['contractCodeId']=params['contract_code_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/contractCode/{contractCodeId}' 'DELETE' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type=<none> # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>contract_code_get self contract_code_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Get the blockchain smart contract implementation matching a specific ledger implementation ID. Users who are Workbench administrators get the specified smart contract implementation. Non-Workbench administrators get the smart contract implementation if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.contract_code_get(contract_code_id, async=True) >>> result = thread.get() :param async bool :param int contract_code_id: The id of the contract code (required) :return: FileStreamResult If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.contract_code_get_with_http_info(contract_code_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.contract_code_get_with_http_info(contract_code_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>contract_code_get_with_http_info self contract_code_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Get the blockchain smart contract implementation matching a specific ledger implementation ID. Users who are Workbench administrators get the specified smart contract implementation. Non-Workbench administrators get the smart contract implementation if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.contract_code_get_with_http_info(contract_code_id, async=True) >>> result = thread.get() :param async bool :param int contract_code_id: The id of the contract code (required) :return: FileStreamResult If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['contract_code_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method contract_code_get"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'contract_code_id' is set <if_stmt>('contract_code_id'<not><in>params<or>params['contract_code_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `contract_code_id` when calling `contract_code_get`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'contract_code_id'<in>params<block_start>path_params['contractCodeId']=params['contract_code_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/contractCode/{contractCodeId}' 'GET' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='FileStreamResult' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>contract_code_post self application_id file **kwargs# noqa: E501 <block_start>""" # noqa: E501 Uploads one or more smart contracts (ex. .sol or .zip), representing the implementation of the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.contract_code_post(application_id, file, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application (required) :param file file: Upload File (required) :param int ledger_id: The index of the ledger :return: int If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.contract_code_post_with_http_info(application_id file **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.contract_code_post_with_http_info(application_id file **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>contract_code_post_with_http_info self application_id file **kwargs# noqa: E501 <block_start>""" # noqa: E501 Uploads one or more smart contracts (ex. .sol or .zip), representing the implementation of the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.contract_code_post_with_http_info(application_id, file, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application (required) :param file file: Upload File (required) :param int ledger_id: The index of the ledger :return: int If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id' 'file' 'ledger_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method contract_code_post"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `contract_code_post`")# noqa: E501 <block_end># verify the required parameter 'file' is set <if_stmt>('file'<not><in>params<or>params['file']<is><none>)<block_start><raise>ValueError("Missing the required parameter `file` when calling `contract_code_post`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']<block_end># noqa: E501 query_params=[]<if_stmt>'ledger_id'<in>params<block_start>query_params.append(('ledgerId' params['ledger_id']))<block_end># noqa: E501 header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<if_stmt>'file'<in>params<block_start>local_var_files['file']=params['file']<block_end># noqa: E501 body_params=<none><line_sep># HTTP header `Content-Type` header_params['Content-Type']=self.api_client.select_header_content_type(# noqa: E501 ['multipart/form-data'])<line_sep># noqa: E501 # Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationId}/contractCode' 'POST' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='int' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>contract_codes_get self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 List all blockchain smart contract implementations of the specified blockchain application. Users who are Workbench administrators get all smart contract implementations. Non-Workbench administrators get all smart contract implementations for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.contract_codes_get(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application (required) :param int ledger_id: The index of the chain type :param int top: The maximum number of items to return :param int skip: The number of items to skip before returning :return: ContractCodeList If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.contract_codes_get_with_http_info(application_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.contract_codes_get_with_http_info(application_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>contract_codes_get_with_http_info self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 List all blockchain smart contract implementations of the specified blockchain application. Users who are Workbench administrators get all smart contract implementations. Non-Workbench administrators get all smart contract implementations for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.contract_codes_get_with_http_info(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application (required) :param int ledger_id: The index of the chain type :param int top: The maximum number of items to return :param int skip: The number of items to skip before returning :return: ContractCodeList If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id' 'ledger_id' 'top' 'skip']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method contract_codes_get"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `contract_codes_get`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']<block_end># noqa: E501 query_params=[]<if_stmt>'ledger_id'<in>params<block_start>query_params.append(('ledgerId' params['ledger_id']))# noqa: E501 <block_end><if_stmt>'top'<in>params<block_start>query_params.append(('top' params['top']))# noqa: E501 <block_end><if_stmt>'skip'<in>params<block_start>query_params.append(('skip' params['skip']))<block_end># noqa: E501 header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationID}/contractCode' 'GET' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='ContractCodeList' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>role_assignment_delete self application_id role_assignment_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Deletes the specified role assignment. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.role_assignment_delete(application_id, role_assignment_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application (required) :param int role_assignment_id: The id of the role assignment (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.role_assignment_delete_with_http_info(application_id role_assignment_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.role_assignment_delete_with_http_info(application_id role_assignment_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>role_assignment_delete_with_http_info self application_id role_assignment_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Deletes the specified role assignment. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.role_assignment_delete_with_http_info(application_id, role_assignment_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application (required) :param int role_assignment_id: The id of the role assignment (required) :return: None If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id' 'role_assignment_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method role_assignment_delete"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `role_assignment_delete`")# noqa: E501 <block_end># verify the required parameter 'role_assignment_id' is set <if_stmt>('role_assignment_id'<not><in>params<or>params['role_assignment_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `role_assignment_id` when calling `role_assignment_delete`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']# noqa: E501 <block_end><if_stmt>'role_assignment_id'<in>params<block_start>path_params['roleAssignmentId']=params['role_assignment_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationId}/roleAssignments/{roleAssignmentId}' 'DELETE' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type=<none> # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>role_assignment_get self application_id role_assignment_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Get a role assignment of the specified blockchain application matching a specific user role assignment ID. Users who are Workbench administrators get the role assignment. Non-Workbench administrators get the role assignment if they are associated in the application. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.role_assignment_get(application_id, role_assignment_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the configuration (required) :param int role_assignment_id: The id of the role assignment (required) :return: RoleAssignment If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.role_assignment_get_with_http_info(application_id role_assignment_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.role_assignment_get_with_http_info(application_id role_assignment_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>role_assignment_get_with_http_info self application_id role_assignment_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Get a role assignment of the specified blockchain application matching a specific user role assignment ID. Users who are Workbench administrators get the role assignment. Non-Workbench administrators get the role assignment if they are associated in the application. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.role_assignment_get_with_http_info(application_id, role_assignment_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the configuration (required) :param int role_assignment_id: The id of the role assignment (required) :return: RoleAssignment If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id' 'role_assignment_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method role_assignment_get"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `role_assignment_get`")# noqa: E501 <block_end># verify the required parameter 'role_assignment_id' is set <if_stmt>('role_assignment_id'<not><in>params<or>params['role_assignment_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `role_assignment_id` when calling `role_assignment_get`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']# noqa: E501 <block_end><if_stmt>'role_assignment_id'<in>params<block_start>path_params['roleAssignmentId']=params['role_assignment_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationId}/roleAssignments/{roleAssignmentId}' 'GET' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='RoleAssignment' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>role_assignments_get self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 List all role assignments of the specified blockchain application. Users who are Workbench administrators get all role assignments. Non-Workbench administrators get all their role assignments. Roles are specified in the Workbench application configuration and can be retrieved from GET /applications/{applicationID}. Also, user information can be retrieved from GET /users/{userID}. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.role_assignments_get(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the configuration (required) :param int application_role_id: The id of the application role :param int top: The maximum number of entries to return in the result set. :param int skip: The number of entries to skip in the result set. :return: RoleAssignmentList If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.role_assignments_get_with_http_info(application_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.role_assignments_get_with_http_info(application_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>role_assignments_get_with_http_info self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 List all role assignments of the specified blockchain application. Users who are Workbench administrators get all role assignments. Non-Workbench administrators get all their role assignments. Roles are specified in the Workbench application configuration and can be retrieved from GET /applications/{applicationID}. Also, user information can be retrieved from GET /users/{userID}. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.role_assignments_get_with_http_info(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the configuration (required) :param int application_role_id: The id of the application role :param int top: The maximum number of entries to return in the result set. :param int skip: The number of entries to skip in the result set. :return: RoleAssignmentList If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id' 'application_role_id' 'top' 'skip']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method role_assignments_get"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `role_assignments_get`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']<block_end># noqa: E501 query_params=[]<if_stmt>'application_role_id'<in>params<block_start>query_params.append(('applicationRoleId' params['application_role_id']))# noqa: E501 <block_end><if_stmt>'top'<in>params<block_start>query_params.append(('top' params['top']))# noqa: E501 <block_end><if_stmt>'skip'<in>params<block_start>query_params.append(('skip' params['skip']))<block_end># noqa: E501 header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationId}/roleAssignments' 'GET' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='RoleAssignmentList' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>role_assignments_post self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Creates a user-to-role mapping in the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.role_assignments_post(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the configuration. (required) :param RoleAssignmentInput role_assignment: New user-to-role mapping. :return: int If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.role_assignments_post_with_http_info(application_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.role_assignments_post_with_http_info(application_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>role_assignments_post_with_http_info self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Creates a user-to-role mapping in the specified blockchain application. This method can only be performed by users who are Workbench administrators. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.role_assignments_post_with_http_info(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the configuration. (required) :param RoleAssignmentInput role_assignment: New user-to-role mapping. :return: int If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id' 'role_assignment']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method role_assignments_post"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `role_assignments_post`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><if_stmt>'role_assignment'<in>params<block_start>body_params=params['role_assignment']<block_end># HTTP header `Content-Type` header_params['Content-Type']=self.api_client.select_header_content_type(# noqa: E501 ['application/json-patch+json' 'application/json' 'text/json' 'application/*+json'])<line_sep># noqa: E501 # Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationId}/roleAssignments' 'POST' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='int' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>workflow_get self workflow_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Get a workflow matching a specific workflow ID. Users who are Workbench administrators get the workflow. Non-Workbench administrators get the workflow if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.workflow_get(workflow_id, async=True) >>> result = thread.get() :param async bool :param int workflow_id: The id of the workflow (required) :return: Workflow If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.workflow_get_with_http_info(workflow_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.workflow_get_with_http_info(workflow_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>workflow_get_with_http_info self workflow_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 Get a workflow matching a specific workflow ID. Users who are Workbench administrators get the workflow. Non-Workbench administrators get the workflow if they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.workflow_get_with_http_info(workflow_id, async=True) >>> result = thread.get() :param async bool :param int workflow_id: The id of the workflow (required) :return: Workflow If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['workflow_id']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method workflow_get"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'workflow_id' is set <if_stmt>('workflow_id'<not><in>params<or>params['workflow_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `workflow_id` when calling `workflow_get`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'workflow_id'<in>params<block_start>path_params['workflowId']=params['workflow_id']<block_end># noqa: E501 query_params=[]<line_sep>header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/workflows/{workflowId}' 'GET' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='Workflow' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><def_stmt>workflows_get self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 List all workflows of the specified blockchain application. Users who are Workbench administrators get all workflows. Non-Workbench administrators get all workflows for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.workflows_get(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application (required) :param int top: The maximum number of items to return :param int skip: The number of items to skip before returning :return: WorkflowList If the method is called asynchronously, returns the request thread. """<line_sep>kwargs['_return_http_data_only']=<true><if_stmt>kwargs.get('async')<block_start><return>self.workflows_get_with_http_info(application_id **kwargs)# noqa: E501 <block_end><else_stmt><block_start>(data)=self.workflows_get_with_http_info(application_id **kwargs)# noqa: E501 <return>data<block_end><block_end><def_stmt>workflows_get_with_http_info self application_id **kwargs# noqa: E501 <block_start>""" # noqa: E501 List all workflows of the specified blockchain application. Users who are Workbench administrators get all workflows. Non-Workbench administrators get all workflows for which they have at least one associated application role or is associated with a smart contract instance role. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.workflows_get_with_http_info(application_id, async=True) >>> result = thread.get() :param async bool :param int application_id: The id of the application (required) :param int top: The maximum number of items to return :param int skip: The number of items to skip before returning :return: WorkflowList If the method is called asynchronously, returns the request thread. """<line_sep>all_params=['application_id' 'top' 'skip']# noqa: E501 all_params.append('async')<line_sep>all_params.append('_return_http_data_only')<line_sep>all_params.append('_preload_content')<line_sep>all_params.append('_request_timeout')<line_sep>params=locals()<for_stmt>key,val six.iteritems(params['kwargs'])<block_start><if_stmt>key<not><in>all_params<block_start><raise>TypeError("Got an unexpected keyword argument '%s'"<concat>" to method workflows_get"%key)<block_end>params[key]=val<block_end><del_stmt>params['kwargs']<line_sep># verify the required parameter 'application_id' is set <if_stmt>('application_id'<not><in>params<or>params['application_id']<is><none>)<block_start><raise>ValueError("Missing the required parameter `application_id` when calling `workflows_get`")<block_end># noqa: E501 collection_formats={}<line_sep>path_params={}<if_stmt>'application_id'<in>params<block_start>path_params['applicationId']=params['application_id']<block_end># noqa: E501 query_params=[]<if_stmt>'top'<in>params<block_start>query_params.append(('top' params['top']))# noqa: E501 <block_end><if_stmt>'skip'<in>params<block_start>query_params.append(('skip' params['skip']))<block_end># noqa: E501 header_params={}<line_sep>form_params=[]<line_sep>local_var_files={}<line_sep>body_params=<none><line_sep># Authentication setting auth_settings=[]# noqa: E501 <return>self.api_client.call_api('/api/v1/applications/{applicationId}/workflows' 'GET' path_params query_params header_params body=body_params post_params=form_params files=local_var_files response_type='WorkflowList' # noqa: E501 auth_settings=auth_settings async=params.get('async') _return_http_data_only=params.get('_return_http_data_only') _preload_content=params.get('_preload_content' <true>) _request_timeout=params.get('_request_timeout') collection_formats=collection_formats)<block_end><block_end>
<import_from_stmt>osp.common config<import_from_stmt>osp.common.models BaseModel<import_from_stmt>osp.corpus.models Document<import_from_stmt>osp.citations.models Text<import_from_stmt>osp.institutions.models Institution<import_from_stmt>osp.institutions.models Institution_Document<import_from_stmt>osp.fields.models Subfield<import_from_stmt>osp.fields.models Subfield_Document<import_from_stmt>playhouse.postgres_ext ArrayField<import_from_stmt>peewee ForeignKeyField CharField<class_stmt>Citation(BaseModel)<block_start>text=ForeignKeyField(Text)<line_sep>document=ForeignKeyField(Document)<line_sep>tokens=ArrayField(CharField)<class_stmt>Meta<block_start>database=config.get_table_db('citation')<line_sep>indexes=((('document' 'text') <true>) )<block_end>@property<def_stmt>subfield self<block_start>""" Get the document's subfield, if any. Returns: Subfield """<line_sep><return>(Subfield.select().join(Subfield_Document).join(Document).where(Document.id<eq>self.document).order_by(Subfield_Document.offset.asc()).first())<block_end>@property<def_stmt>institution self<block_start>""" Get the document's institution, if any. Returns: Institution """<line_sep><return>(Institution.select().join(Institution_Document).join(Document).where(Document.id<eq>self.document).first())<block_end><block_end>
load("@bazel_skylib//lib:shell.bzl" "shell")<line_sep>load("@bazel_skylib//lib:paths.bzl" "paths")<line_sep>AsciidocInfo=provider(doc="Information about the asciidoc-generated files." fields={"primary_output_path":"Path of the primary output file beneath {resource_dir}." "resource_dir":"File for the directory containing all of the generated resources." } )<line_sep>_toolchain_type="//tools/build_rules/external_tools:external_tools_toolchain_type"<def_stmt>_asciidoc_impl ctx<block_start>resource_dir=ctx.actions.declare_directory(ctx.label.name+".d")<line_sep>primary_output="{name}.html".format(name=ctx.label.name)<line_sep># Declared as an output, but not saved as part of the default output group. # Build with --output_groups=+asciidoc_logfile to retain. logfile=ctx.actions.declare_file(ctx.label.name+".logfile")<line_sep># Locate the asciidoc binary from the toolchain and construct its args. asciidoc=ctx.toolchains[_toolchain_type].asciidoc<line_sep>args=["--backend" "html" "--no-header-footer"]<for_stmt>key,value ctx.attr.attrs.items()<block_start><if_stmt>value<block_start>args.append("--attribute=%s=%s"%(key value))<block_end><else_stmt><block_start>args.append("--attribute=%s!"%(key ))<block_end><block_end><if_stmt>ctx.attr.example_script<block_start>args.append("--attribute=example_script="+ctx.file.example_script.path)<block_end>args<augadd>["--conf-file=%s"%c.path<for>c ctx.files.confs]<line_sep>args<augadd>["-o" paths.join(resource_dir.path primary_output)]<line_sep>args.append(ctx.file.src.path)<line_sep># Get the path where all our necessary tools are located so it can be set # to PATH in our run_shell command. tool_path=ctx.toolchains[_toolchain_type].path<line_sep># Resolve data targets to get input files and runfiles manifests. data,_,manifests=ctx.resolve_command(tools=ctx.attr.data)<line_sep># Run asciidoc and capture stderr to logfile. If it succeeds, look in the # captured log for error messages and fail if we find any. ctx.actions.run_shell(inputs=([ctx.file.src]+ctx.files.confs+([ctx.file.example_script]<if>ctx.file.example_script<else>[])+data) input_manifests=manifests outputs=[resource_dir logfile] arguments=args command="\n".join(["set -e" "mkdir -p {resource_dir}".format(resource_dir=shell.quote(resource_dir.path)) # Run asciidoc itself, and fail if it returns nonzero. "{asciidoc} \"$@\" 2> >(tee -a {logfile} >&2)".format(logfile=shell.quote(logfile.path) asciidoc=shell.quote(asciidoc) ) # The tool succeeded, but now check for error diagnostics. 'if grep -q -e "filter non-zero exit code" -e "no output from filter" {logfile}; then'.format(logfile=shell.quote(logfile.path) ) "exit 1" "fi" # Move SVGs to the appropriate directory. "find . -name '*.svg' -maxdepth 1 -exec mv '{{}}' {out}/ \\;".format(out=shell.quote(resource_dir.path)) ]) env={"PATH":tool_path} mnemonic="RunAsciidoc" )<line_sep><return>[DefaultInfo(files=depset([resource_dir])) OutputGroupInfo(asciidoc_logfile=depset([logfile])) AsciidocInfo(primary_output_path=primary_output resource_dir=resource_dir) ]<block_end>asciidoc=rule(implementation=_asciidoc_impl toolchains=["//tools/build_rules/external_tools:external_tools_toolchain_type"] attrs={"src":attr.label(doc="asciidoc file to process" allow_single_file=<true> ) "attrs":attr.string_dict(doc="Dict of attributes to pass to asciidoc as --attribute=KEY=VALUE" ) "confs":attr.label_list(doc="`conf-file`s to pass to asciidoc" allow_files=<true> ) "data":attr.label_list(doc="Files/targets used during asciidoc generation. Only needed for tools used in example_script." allow_files=<true> ) "example_script":attr.label(doc="Script to pass to asciidoc as --attribute=example_script=VALUE." allow_single_file=<true> ) } doc="Generate asciidoc" )<line_sep>
"""queries for the 'chat' method family"""<import_stmt>snug<import_from_stmt>.query json_post<import_from_stmt>.types Message<line_sep>@json_post('chat.postMessage' rtype=Message key='message')<def_stmt>post_message channel:str text:str<arrow>snug.Query[Message]<block_start><return>{'channel':channel 'text':text}<block_end>
<import_stmt>numpy<as>np<import_from_stmt>sklearn.cluster FeatureAgglomeration<import_from_stmt>sklearn.ensemble ExtraTreesClassifier GradientBoostingClassifier VotingClassifier<import_from_stmt>sklearn.feature_selection SelectFromModel<import_from_stmt>sklearn.model_selection train_test_split<import_from_stmt>sklearn.pipeline make_pipeline make_union<import_from_stmt>sklearn.preprocessing FunctionTransformer<line_sep># NOTE: Make sure that the class is labeled 'class' in the data file tpot_data=np.recfromcsv('PATH/TO/DATA/FILE' delimiter='COLUMN_SEPARATOR' dtype=np.float64)<line_sep>features=np.delete(tpot_data.view(np.float64).reshape(tpot_data.size -1) tpot_data.dtype.names.index('class') axis=1)<line_sep>training_features,testing_features,training_classes,testing_classes=train_test_split(features tpot_data['class'] random_state=42)<line_sep>exported_pipeline=make_pipeline(make_union(make_union(FeatureAgglomeration(affinity="euclidean" linkage="ward") SelectFromModel(estimator=ExtraTreesClassifier(bootstrap=<false> class_weight=<none> criterion='gini' max_depth=<none> max_features=0.02 max_leaf_nodes=<none> min_impurity_split=1e-07 min_samples_leaf=1 min_samples_split=2 min_weight_fraction_leaf=0.0 n_estimators=10 n_jobs=1 oob_score=<false> random_state=<none> verbose=0 warm_start=<false>) threshold=0.06)) FunctionTransformer(<lambda>X:X)) GradientBoostingClassifier(learning_rate=0.21 max_features=0.21 n_estimators=500))<line_sep>exported_pipeline.fit(training_features training_classes)<line_sep>results=exported_pipeline.predict(testing_features)<line_sep>
<import_stmt>os<import_stmt>base64<import_from_stmt>Crypto.PublicKey RSA<import_from_stmt>pyinfraboxutils.secrets encrypt_secret<import_stmt>psycopg2<line_sep>private_key_path=os.environ.get('INFRABOX_RSA_PRIVATE_KEY_PATH' '/var/run/secrets/infrabox.net/rsa/id_rsa')<def_stmt>decrypt_secret s<block_start><with_stmt>open(private_key_path)<as>f<block_start>key=RSA.importKey(f.read())<line_sep>s=base64.b64decode(s)<line_sep><return>key.decrypt(s)<block_end><block_end><def_stmt>migrate conn<block_start>cur=conn.cursor(cursor_factory=psycopg2.extras.DictCursor)<line_sep>cur.execute(''' SELECT id, value FROM secret ''')<line_sep>secrets=cur.fetchall()<line_sep>cur.close()<for_stmt>s secrets<block_start>value=decrypt_secret(s['value'])<line_sep>new_value=encrypt_secret(value)<line_sep>cur=conn.cursor()<line_sep>cur.execute(''' UPDATE secret SET value = %s WHERE id = %s ''' [new_value s['id']])<line_sep>cur.close()<block_end><block_end>
<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>numpy<as>np<import_stmt>matplotlib.pyplot<as>plt<def_stmt>solve_CVC xmin xmax tmin tmax f g V Nx Nt<block_start>"""Solve u_t + a(x) * u_x = 0 """<line_sep># Case I: Analytical solution for a(x)=1, u(x,0)=V(x) (V,V' periodic) x=np.linspace(xmin xmax Nx)<line_sep>t=np.linspace(tmin tmax Nt)<line_sep>u=V((x[: <none>]-t)%1)<line_sep># Case II: Wendroff for a(x)=1, u(x,0)=f(x), u(0,t)=g(t) (f(0)=g(0)) """ x = np.linspace(xmin, xmax, Nx) t = np.linspace(tmin, tmax, Nt) h = x[1] - x[0] dt = t[1] - t[0] lam = dt / h u = np.zeros([Nx, Nt]) u[:, 0] = f(x) u[0, :] = g(t) r = (1 - lam) / (1 + lam) K = np.eye(Nx - 1, k=0) K_temp = np.eye(Nx - 1, k=0) Trans = np.eye(Nx - 1, k=-1) for _ in range(Nx - 2): K_temp = (-r) * (Trans @ K_temp) K += K_temp D = r * np.eye(Nx - 1, k=0) + np.eye(Nx - 1, k=-1) for n in range(Nt - 1): b = np.zeros(Nx - 1) b[0] = g(n * dt) - r * g((n + 1) * dt) u[1:, n + 1] = K @ (D @ u[1:, n] + b) """<line_sep># Case III: Wendroff for a(x)=1+0.1*V(x), u(x,0)=f(x), u(0,t)=g(t) (f(0)=g(0)) """ x = np.linspace(xmin, xmax, Nx) t = np.linspace(tmin, tmax, Nt) h = x[1] - x[0] dt = t[1] - t[0] lam = dt / h v = 1 + 0.1 * V(x) u = np.zeros([Nx, Nt]) u[:, 0] = f(x) u[0, :] = g(t) a = (v[:-1] + v[1:]) / 2 k = (1 - a * lam) / (1 + a * lam) K = np.eye(Nx - 1, k=0) K_temp = np.eye(Nx - 1, k=0) Trans = np.eye(Nx - 1, k=-1) for _ in range(Nx - 2): K_temp = (-k[:, None]) * (Trans @ K_temp) K += K_temp D = np.diag(k) + np.eye(Nx - 1, k=-1) for n in range(Nt - 1): b = np.zeros(Nx - 1) b[0] = g(n * dt) - k[0] * g((n + 1) * dt) u[1:, n + 1] = K @ (D @ u[1:, n] + b) """<line_sep># Case IV: Wendroff for a(x)=1+0.1*(V(x)+V(1-x))/2, u(x,0)=f(x) (f,f' periodic) """ x = np.linspace(xmin, xmax, Nx) t = np.linspace(tmin, tmax, Nt) h = x[1] - x[0] dt = t[1] - t[0] lam = dt / h v = 1 + 0.1 * (V(x) + V(x)[::-1]) / 2 u = np.zeros([Nx, Nt]) u[:, 0] = f(x) a = (v[:-1] + v[1:]) / 2 I = np.eye(Nx - 1) Ir = np.roll(I, 1, axis=0) D = lam * a[:, None] * (I - Ir) A = I + Ir + D B = I + Ir - D for n in range(Nt - 1): u[1:, n + 1] = np.linalg.solve(A, B @ u[1:, n]) u[0, :] = u[-1, :] """<line_sep><return>x t u<block_end><def_stmt>main # Case I: Analytical solution for a(x)=1, u(x,0)=V(x) (V,V' periodic) <block_start>xmin,xmax=0 1<line_sep>tmin,tmax=0 1<line_sep>V=<lambda>x:np.sin(2<times>np.pi<times>x)<line_sep>f=<none><line_sep>g=<none><line_sep>u_true=<lambda>x t:V(x-t)<line_sep>Nx,Nt=100 100<line_sep>x,t,u=solve_CVC(xmin xmax tmin tmax f g V Nx Nt)<line_sep>print(np.max(abs(u-u_true(x[: <none>] t))))<line_sep>print(np.average(abs(u-u_true(x[: <none>] t))))<line_sep># Case II: Wendroff for a(x)=1, u(x,0)=V(x), u(0,t)=0 (V(0)=0) """ xmin, xmax = 0, 1 tmin, tmax = 0, 1 V = None f = lambda x: (2 * np.pi * x) ** 5 g = lambda t: (2 * np.pi * (-t)) ** 5 u_true = lambda x, t: (2 * np.pi * (x - t)) ** 5 Nx, Nt = 100, 100 x, t, u = solve_CVC(xmin, xmax, tmin, tmax, f, g, V, Nx, Nt) print(np.max(abs(u - u_true(x[:, None], t)))) print(np.average(abs(u - u_true(x[:, None], t)))) """<line_sep># Case III: Wendroff for a(x)=1+0.1*V(x), u(x,0)=f(x), u(0,t)=g(t) (f(0)=g(0)) """ vel = 1 xmin, xmax = 0, 1 tmin, tmax = 0, 1 V = lambda x: np.ones_like(x) * vel f = lambda x: np.sin(2 * np.pi * x) g = lambda t: np.sin(2 * np.pi * (-(1 + 0.1 * vel) * t)) u_true = lambda x, t: np.sin(2 * np.pi * (x - (1 + 0.1 * vel) * t)) Nx, Nt = 100, 100 x, t, u = solve_CVC(xmin, xmax, tmin, tmax, f, g, V, Nx, Nt) print(np.max(abs(u - u_true(x[:, None], t)))) print(np.average(abs(u - u_true(x[:, None], t)))) """<line_sep># Case IV: Wendroff for a(x)=1+0.1*(V(x)+V(1-x))/2, u(x,0)=f(x) (f,f' periodic) """ vel = 1 xmin, xmax = 0, 1 tmin, tmax = 0, 1 V = lambda x: np.ones_like(x) * vel f = lambda x: np.sin(2 * np.pi * x) g = lambda t: np.sin(2 * np.pi * (-(1 + 0.1 * vel) * t)) u_true = lambda x, t: np.sin(2 * np.pi * (x - (1 + 0.1 * vel) * t)) Nx, Nt = 100, 100 x, t, u = solve_CVC(xmin, xmax, tmin, tmax, f, g, V, Nx, Nt) print(np.max(abs(u - u_true(x[:, None], t)))) print(np.average(abs(u - u_true(x[:, None], t)))) """<line_sep># plot u_true=u_true(x[: <none>] t)<line_sep>error=abs(u-u_true)<line_sep>axis=plt.subplot(111)<line_sep>plt.imshow(error cmap="rainbow" vmin=0)<line_sep>plt.colorbar()<line_sep>xlabel=[format(i ".1f")<for>i np.linspace(0 1 num=11)]<line_sep>ylabel=[format(i ".1f")<for>i np.linspace(0 1 num=11)]<line_sep>axis.set_xticks(range(0 101 10))<line_sep>axis.set_xticklabels(xlabel)<line_sep>axis.set_yticks(range(0 101 10))<line_sep>axis.set_yticklabels(ylabel)<line_sep>axis.set_xlabel("t")<line_sep>axis.set_ylabel("x")<line_sep>axis.set_title(r"Error" fontdict={"fontsize":30} loc="left")<line_sep><return>error<block_end><if_stmt>__name__<eq>"__main__"<block_start>error=main()<block_end>
<import_from_stmt>.PertFunction PertFunction<import_from_stmt>.Loss *<import_from_stmt>.Reward *<import_from_stmt>.Bleu *<line_sep>
<import_stmt>logging<import_from_stmt>functools partial<import_from_stmt>gdc_client.common.config GDCClientConfigShared<line_sep>logger=logging.getLogger("gdc-client")<line_sep>HELP=("Path to INI-type config file. See what settings will look like if a custom"<concat>" config file is used")<class_stmt>SettingsResolver(object)<block_start><def_stmt>__init__ self config_file<block_start>self.config=GDCClientConfigShared(config_file)<block_end><def_stmt>download self<block_start>logger.info(self.config.to_display_string("download"))<line_sep><return>self.config.to_display_string("download")<block_end><def_stmt>upload self<block_start>logger.info(self.config.to_display_string("upload"))<line_sep><return>self.config.to_display_string("upload")<block_end><block_end><def_stmt>resolve config_file args<block_start>resolver=SettingsResolver(config_file)<line_sep>func=getattr(resolver args.section)<line_sep><return>func()<block_end><def_stmt>config parser config_file=<none><block_start>parser.add_argument("--config" help=HELP metavar="FILE")<line_sep>choices=parser.add_subparsers(title="Settings to display" dest="section")<line_sep>choices.required=<true><line_sep>download_choice=choices.add_parser("download" help="Display download settings")<line_sep>download_choice.add_argument("--config" help=HELP metavar="FILE")<line_sep>download_choice.set_defaults(func=partial(resolve config_file))<line_sep>upload_choice=choices.add_parser("upload" help="Display upload settings")<line_sep>upload_choice.add_argument("--config" help=HELP metavar="FILE")<line_sep>upload_choice.set_defaults(func=partial(resolve config_file))<block_end>
<import_from_stmt>vit.formatter.modified Modified<class_stmt>ModifiedJulian(Modified)<block_start><def_stmt>format self modified task<block_start><return>self.julian(modified)<block_end><block_end>
<import_from_stmt>hendrix.deploy.base HendrixDeploy<import_from_stmt>hendrix.experience hey_joe<line_sep>deployer=HendrixDeploy(options={'wsgi':'example_app.wsgi.application' 'http_port':7575})<line_sep>websocket_service=hey_joe.WebSocketService("127.0.0.1" 9000)<line_sep>deployer.add_non_tls_websocket_service(websocket_service)<line_sep>deployer.run()<line_sep>
<import_from_stmt>mealie.core.config determine_sqlite_path settings<line_sep>DB_URL=determine_sqlite_path(path=<true> suffix="test")<line_sep>DB_URL.unlink(missing_ok=<true>)<if_stmt>settings.DB_ENGINE<ne>"postgres"# Monkeypatch Database Testing <block_start>settings.DB_URL=determine_sqlite_path(path=<false> suffix="test")<block_end>
# Copyright 2017-2020 EPAM Systems, Inc. (https://www.epam.com/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <class_stmt>ShareMountModel(object)<block_start><def_stmt>__init__ self<block_start>self.identifier=<none><line_sep>self.region_id=<none><line_sep>self.mount_root=<none><line_sep>self.mount_type=<none><line_sep>self.mount_options=<none><block_end>@classmethod<def_stmt>load cls json<block_start>instance=ShareMountModel()<if_stmt><not>json<block_start><return><none><block_end>instance.identifier=json['id']<line_sep>instance.region_id=json['regionId']<line_sep>instance.mount_root=json['mountRoot']<line_sep>instance.mount_type=json['mountType']<line_sep>instance.mount_options=json['mountOptions']<if>'mountOptions'<in>json<else><none><line_sep><return>instance<block_end><block_end>
<import_stmt>logging<class_stmt>SigOptExperiment<block_start><def_stmt>__init__ self connection<block_start>self.connection=connection<block_end><def_stmt>initialize_random_experiment self experiment_name project_name parameters_list metrics_list observation_budget metadata parallel_bandwidth=1<block_start><return>self.initialize_experiment(experiment_name project_name parameters_list list() list() metrics_list observation_budget metadata "random" parallel_bandwidth)<block_end><def_stmt>initialize_bayesian_experiment self experiment_name project_name parameters_list metrics_list observation_budget metadata parallel_bandwidth<block_start><return>self.initialize_experiment(experiment_name project_name parameters_list list() list() metrics_list observation_budget metadata "offline" parallel_bandwidth)<block_end><def_stmt>initialize_experiment self experiment_name project_name parameters_list conditionals_list linear_constraints_list metrics_list observation_budget metadata experiment_type parallel_bandwidth=1<block_start>experiment=self.connection.experiments().create(name=experiment_name project=project_name # Define which parameters you would like to tune parameters=parameters_list linear_constraints=linear_constraints_list conditionals=conditionals_list metrics=metrics_list parallel_bandwidth=parallel_bandwidth # Define an Observation Budget for your experiment observation_budget=observation_budget metadata=metadata type=experiment_type)<line_sep>logging.info("Created experiment: https://sigopt.com/experiment/%s" experiment.id)<line_sep><return>experiment<block_end><def_stmt>get_initialized_experiment self experiment_id<block_start><return>self.connection.experiments(experiment_id).fetch()<block_end><def_stmt>get_suggestions self experiment<block_start><return>self.connection.experiments(experiment.id).suggestions().create()<block_end><def_stmt>get_suggestions_meatadata self experiment metadata_dict<block_start><return>self.connection.experiments(experiment.id).suggestions().create(metadata=metadata_dict)<block_end><def_stmt>get_best_suggestions self experiment<block_start><return>self.connection.experiments(experiment.id).best_assignments().fetch()<block_end><def_stmt>update_suggestion self experiment_id suggestion_id metadata_dict<block_start><return>self.connection.experiments(experiment_id).suggestions(suggestion_id).update(metadata=metadata_dict)<block_end><def_stmt>update_experiment self experiment suggestion evaluated_value<block_start>observation=self.connection.experiments(experiment.id).observations().create(suggestion=suggestion.id value=evaluated_value)<line_sep><return>self.connection.experiments(experiment.id).fetch() observation<block_end><def_stmt>update_experiment_multimetric_metadata self experiment suggestion evaluated_value metadata_dict failed=<false><block_start>logging.info("updating experiment %s with metadata %s" experiment.id str(metadata_dict))<line_sep>self.connection.experiments(experiment.id).observations().create(suggestion=suggestion.id values=evaluated_value failed=failed metadata=metadata_dict)<line_sep><return>self.connection.experiments(experiment.id).fetch()<block_end><def_stmt>update_experiment_multimetric self experiment suggestion evaluated_values failed=<false><block_start>self.connection.experiments(experiment.id).observations().create(suggestion=suggestion.id values=evaluated_values failed=failed)<line_sep><return>self.connection.experiments(experiment.id).fetch()<block_end><def_stmt>create_experiment_metadata self experiment metadata_dict<block_start>self.connection.experiments(experiment.id).observations().create(metadata=metadata_dict)<line_sep><return>self.connection.experiments(experiment.id).fetch()<block_end><def_stmt>create_observation_metadata self experiment observation metadata_dict<block_start>updated_observation=self.connection.experiments(experiment.id).observations(observation.id).update(metadata=metadata_dict)<line_sep><return>self.connection.experiments(experiment.id).fetch() updated_observation<block_end><def_stmt>get_all_experiments self<block_start><return>self.connection.experiments().fetch()<block_end><def_stmt>get_all_observations self experiment<block_start><return>self.connection.experiments(experiment.id).observations().fetch()<block_end><def_stmt>archive_experiment self experiment<block_start>logging.info("archiving experiment with id: %s" experiment.id)<line_sep>self.connection.experiments(experiment.id).delete()<block_end><block_end>
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>paddle<import_stmt>paddle.nn<as>nn<import_stmt>math<import_stmt>cv2<import_stmt>numpy<as>np<import_from_stmt>ppgan.utils.download get_path_from_url<import_from_stmt>ppgan.models.generators GPEN<import_from_stmt>ppgan.faceutils.face_detection.detection.blazeface.utils *<line_sep>GPEN_weights='https://paddlegan.bj.bcebos.com/models/GPEN-512.pdparams'<class_stmt>FaceEnhancement(object)<block_start><def_stmt>__init__ self path_to_enhance=<none> size=512 batch_size=1<block_start>super(FaceEnhancement self).__init__()<line_sep># Initialise the face detector <if_stmt>path_to_enhance<is><none><block_start>model_weights_path=get_path_from_url(GPEN_weights)<line_sep>model_weights=paddle.load(model_weights_path)<block_end><else_stmt><block_start>model_weights=paddle.load(path_to_enhance)<block_end>self.face_enhance=GPEN(size=512 style_dim=512 n_mlp=8)<line_sep>self.face_enhance.load_dict(model_weights)<line_sep>self.face_enhance.eval()<line_sep>self.size=size<line_sep>self.mask=np.zeros((512 512) np.float32)<line_sep>cv2.rectangle(self.mask (26 26) (486 486) (1 1 1) -1 cv2.LINE_AA)<line_sep>self.mask=cv2.GaussianBlur(self.mask (101 101) 11)<line_sep>self.mask=cv2.GaussianBlur(self.mask (101 101) 11)<line_sep>self.mask=paddle.tile(paddle.to_tensor(self.mask).unsqueeze(0).unsqueeze(-1) repeat_times=[batch_size 1 1 3]).numpy()<block_end><def_stmt>enhance_from_image self img<block_start><if_stmt>isinstance(img np.ndarray)<block_start>img,_=resize_and_crop_image(img 512)<line_sep>img=paddle.to_tensor(img).transpose([2 0 1])<block_end><else_stmt><block_start><assert_stmt>img.shape<eq>[3 512 512]<block_end><return>self.enhance_from_batch(img.unsqueeze(0))[0]<block_end><def_stmt>enhance_from_batch self img<block_start><if_stmt>isinstance(img np.ndarray)<block_start>img_ori,_=resize_and_crop_batch(img 512)<line_sep>img=paddle.to_tensor(img_ori).transpose([0 3 1 2])<block_end><else_stmt><block_start><assert_stmt>img.shape[1:]<eq>[3 512 512]<line_sep>img_ori=img.transpose([0 2 3 1]).numpy()<block_end>img_t=(img/255.-0.5)/0.5<with_stmt>paddle.no_grad()<block_start>out,__=self.face_enhance(img_t)<block_end>image_tensor=out<times>0.5+0.5<line_sep>image_tensor=image_tensor.transpose([0 2 3 1])# RGB image_numpy=paddle.clip(image_tensor 0 1)<times>255.0<line_sep>out=image_numpy.astype(np.uint8).cpu().numpy()<line_sep><return>out<times>self.mask+(1-self.mask)<times>img_ori<block_end><block_end>
<import_from_stmt>django.db.models CharField ManyToManyField BooleanField<import_from_stmt>open.core.betterself.constants BetterSelfResourceConstants<import_from_stmt>open.core.betterself.models.ingredient_composition IngredientComposition<import_from_stmt>open.utilities.models BaseModelWithUserGeneratedContent<class_stmt>Supplement(BaseModelWithUserGeneratedContent)<block_start>""" Could be a stack like BCAA (which would have 4 ingredient comps) Or could just be something simple like Caffeine. """<line_sep>RESOURCE_NAME=BetterSelfResourceConstants.SUPPLEMENTS<line_sep>name=CharField(max_length=300)<line_sep>ingredient_compositions=ManyToManyField(IngredientComposition blank=<true>)<line_sep>is_taken_with_food=BooleanField(default=<none> blank=<true> null=<true>)<class_stmt>Meta<block_start>unique_together=("user" "name")<line_sep>ordering=["user" "name"]<line_sep>verbose_name="Supplement"<line_sep>verbose_name_plural="Supplements"<block_end><block_end>
""" This is the file running on all the workers. They will run the classifier with the desired hyper parameters And return back the results. """<line_sep># from __future__ import absolute_import, unicode_literals # from __future__ import absolute_import, unicode_literals # from celery import Celery <import_from_future_stmt> absolute_import unicode_literals<import_from_stmt>.celery app<line_sep>#whether sklearn_xgboost models should be enables on Celery include_sklearn_xgboost=<true><line_sep>#whether prophet model should be enabled on Celery include_prophet=<true><line_sep># app = Celery('Mango', # broker='amqp://', # backend='rpc://') # # # Optional configuration # app.conf.update( # result_expires=3600, # broker_heartbeat = 0 # ) <if_stmt>include_sklearn_xgboost<block_start>""" All the Classifier Functions from sklearn """<import_stmt>sklearn<line_sep>""" All the Classifier Functions from xgboost """<import_stmt>xgboost<import_from_stmt>sklearn.model_selection cross_val_score<import_stmt>numpy<as>np<import_from_stmt>importlib import_module<line_sep>#Global variables to identify dataset is loaded by the worker X=<none><line_sep>Y=<none><line_sep>worker_dataset_name=<none><line_sep>#global variables to identify classifier loaded by the worker clf_fxn=<none><line_sep>worker_clf_name=<none><line_sep>num_counter=0<line_sep># load the dataset for the classifier <def_stmt>get_data_loader dataset_name<block_start><global>worker_dataset_name<line_sep>module=import_module('sklearn.datasets')<line_sep>data_loader=getattr(module dataset_name)<line_sep>worker_dataset_name=dataset_name<line_sep><return>data_loader<block_end># load the classifier as passed to the worker <def_stmt>get_clf clf_name<block_start><global>worker_clf_name<line_sep>worker_clf_name=clf_name<for_stmt>module sklearn.__all__<block_start><try_stmt><block_start>module=import_module(f'sklearn.{module}')<try_stmt><block_start><for_stmt>clf module.__all__<block_start><if_stmt>clf<eq>clf_name<block_start>clf_function=getattr(module clf_name)<line_sep><return>clf_function<block_end><block_end><block_end><except_stmt><block_start><pass><block_end><block_end><except_stmt><block_start><pass><block_end><block_end><for_stmt>module xgboost.__all__<block_start><try_stmt><block_start><if_stmt>module<eq>clf_name<block_start>module=import_module(f'xgboost')<line_sep>clf_function=getattr(module clf_name)<line_sep><return>clf_function<block_end><block_end><except_stmt>Exception<as>e<block_start>print(e)<block_end><block_end><block_end>@app.task<def_stmt>run_clf_celery clf_name dataset_name hyper_par=<none><block_start><global>X Y clf_fxn worker_clf_name worker_dataset_name num_counter<line_sep>num_counter=num_counter+1<line_sep>#print('Worked is called:',num_counter) #load dataset if not done already <if_stmt>worker_dataset_name<ne>dataset_name<block_start>data_loader=get_data_loader(dataset_name)<line_sep>X,Y=data_loader(return_X_y=<true>)<block_end>#load classifier if not done already <if_stmt>worker_clf_name<ne>clf_name<block_start>clf_fxn=get_clf(clf_name)<block_end>#Assign the hyper parameters to the classifier <if_stmt>hyper_par<ne><none><block_start>clf=clf_fxn(**hyper_par)<block_end><else_stmt><block_start>clf=clf_fxn()<block_end>accuracy=cross_val_score(clf X Y cv=3 scoring='accuracy').mean()<line_sep>#print('accuracy is:',accuracy) <return>accuracy<block_end><block_end># If include_prophet is set to true <if_stmt>include_prophet<block_start>""" Enabling the functionality of running prophet on PJME """<import_stmt>numpy<as>np<import_from_stmt>.prophet Prophet<import_from_stmt>.xgboosttree Xgboosttree<import_from_stmt>sklearn.model_selection cross_val_score<import_from_stmt>sklearn.metrics mean_squared_error<import_stmt>os<line_sep>data_path=os.path.abspath('.')+'/classifiers/data/'<line_sep>model=Xgboosttree()<line_sep>#X_train, y_train = model.load_train_dataset("data/PJME/train_data") X_train,y_train=model.load_train_dataset(data_path+"PJME/train_data")<line_sep>X_validate,y_validate=model.load_train_dataset(data_path+"PJME/validate_data")<line_sep>@app.task<def_stmt>run_prophet hyper_par<block_start><global>X_train y_train X_validate y_validate<line_sep>clf=Prophet(**hyper_par)<line_sep>clf.fit(X_train y_train.ravel())<line_sep>y_pred=clf.predict(X_validate)<line_sep>mse=mean_squared_error(y_validate y_pred)<line_sep>mse=mse/10e5<line_sep>result=(-1.0)<times>mse<line_sep><return>result<block_end><block_end>
<import_from_stmt>.widget Widget<import_from_stmt>.styles.utils prop<class_stmt>WidgetList<block_start>"""WidgetList Args: widgets (list, Widget): The list of widgets for a layer. default_widget (Widget, optional): The widget to be used by default. """<def_stmt>__init__ self widgets=<none> default_widget=<none><block_start>self._widgets=self._init_widgets(widgets default_widget)<block_end><def_stmt>_init_widgets self widgets default_widget<block_start><if_stmt>isinstance(widgets list)<block_start>widget_list=[]<for_stmt>widget widgets<block_start><if_stmt>isinstance(widget dict)<block_start>widget_list.append(Widget(widget))<block_end><elif_stmt>isinstance(widget Widget)<block_start><if_stmt>widget._type<eq>'default'<and>default_widget<block_start>widget._type=default_widget._type<line_sep>widget._prop=default_widget._prop<line_sep>widget._value=default_widget._value<block_end>widget_list.append(widget)<block_end><block_end><return>widget_list<block_end><if_stmt>isinstance(widgets dict)<block_start><return>[Widget(widgets)]<block_end><else_stmt><block_start><return>[]<block_end><block_end><def_stmt>get_widgets_info self<block_start>widgets_info=[]<for_stmt>widget self._widgets<block_start><if_stmt>widget<block_start>widgets_info.append(widget.get_info())<block_end><block_end><return>widgets_info<block_end><def_stmt>get_variables self<block_start>output={}<for_stmt>widget self._widgets<block_start><if_stmt>widget._variable_name<block_start>output[widget._variable_name]=prop(widget._value)<if>widget.has_bridge()<else>widget._value<block_end><block_end><return>output<block_end><block_end>
<import_from_stmt>django.db models<class_stmt>Flavor(models.Model)<block_start>label=models.CharField(max_length=12)<line_sep>memory=models.IntegerField()<line_sep>vcpu=models.IntegerField()<line_sep>disk=models.IntegerField()<def_stmt>__unicode__ self<block_start><return>self.name<block_end><block_end>
<import_stmt>pytest<import_stmt>time<line_sep>@pytest.mark.slow<def_stmt>test_slow <block_start>time.sleep(2)<assert_stmt>1+1<eq>2<block_end><def_stmt>test_fast <block_start><assert_stmt>1+1<eq>2<block_end>
<import_from_stmt>.base *<import_from_stmt>.presets *<line_sep>
<import_stmt>FWCore.ParameterSet.Config<as>cms<import_from_stmt>IOMC.EventVertexGenerators.VtxSmearedParameters_cfi Early2p2TeVCollisionVtxSmearingParameters VtxSmearedCommon<line_sep>VtxSmeared=cms.EDProducer("BetafuncEvtVtxGenerator" Early2p2TeVCollisionVtxSmearingParameters VtxSmearedCommon)<line_sep>
<import_stmt>numpy<as>np<import_from_stmt>sk_dsp_comm fec_conv<import_from_stmt>sk_dsp_comm digitalcom<as>dc<line_sep>np.random.seed(100)<line_sep>cc=fec_conv.FecConv()<line_sep>print(cc.Nstates)<import_stmt>matplotlib.pyplot<as>plt<import_stmt>numpy<as>np<import_from_stmt>sk_dsp_comm fec_conv<as>fc<line_sep>SNRdB=np.arange(2 12 .1)<line_sep>Pb_uc=fc.conv_Pb_bound(1/2 5 [1 4 12 32 80 192 448 1024] SNRdB 2)<line_sep>Pb_s=fc.conv_Pb_bound(1/2 5 [1 4 12 32 80 192 448 1024] SNRdB 1)<line_sep>plt.figure(figsize=(5 5))<line_sep>plt.semilogy(SNRdB Pb_uc)<line_sep>plt.semilogy(SNRdB Pb_s)<line_sep>plt.axis([2 12 1e-7 1e0])<line_sep>plt.xlabel(r'$E_b/N_0$ (dB)')<line_sep>plt.ylabel(r'Symbol Error Probability')<line_sep>#plt.legend(('Uncoded BPSK','R=1/2, K=5, Soft'),loc='best') plt.grid()<line_sep>plt.show()<line_sep>
"""Package-wide data and code."""<import_from_stmt>os environ<line_sep>UNENCRYPTED_TAGS=environ.get("EXCH_UNENCRYPTED_TAGS" "False").upper()<eq>"TRUE"<line_sep>
# Copyright (c) 2020, 2021, Oracle and/or its affiliates. # # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ # <import_stmt>subprocess<import_stmt>mysqlsh<import_stmt>sys<import_stmt>os<import_stmt>logging<import_stmt>shutil<import_from_stmt>typing cast<import_from_stmt>.controller utils k8sobject<import_from_stmt>.controller.innodbcluster.cluster_api MySQLPod<line_sep>k8sobject.g_component="initconf"<line_sep>k8sobject.g_host=os.getenv("HOSTNAME")<line_sep>mysql=mysqlsh.mysql<def_stmt>init_conf datadir pod cluster logger<block_start>""" Initialize MySQL configuration files and init scripts, which must be mounted in /mnt/mycnfdata. The source config files must be mounted in /mnt/initconf. The config files are them symlinked to /etc to be used by mysqld in the rest of the script. The main container should directly mount them in their final locations. Init scripts are executed by the mysql container entrypoint when it's initializing for the 1st time. """<line_sep>server_id=pod.index+cluster.parsed_spec.baseServerId<line_sep>report_host=f'{os.getenv("MY_POD_NAME")}.{cluster.name}-instances.{cluster.namespace}.svc.cluster.local'<line_sep>logger.info(f"Setting up configurations for {pod.name} server_id={server_id} report_host={report_host}")<line_sep>srcdir="/mnt/initconf/"<line_sep>destdir="/mnt/mycnfdata/"<line_sep>os.makedirs(destdir+"my.cnf.d" exist_ok=<true>)<line_sep>os.makedirs(destdir+"docker-entrypoint-initdb.d" exist_ok=<true>)<with_stmt>open(srcdir+"my.cnf.in")<as>f<block_start>data=f.read()<line_sep>data=data.replace("@@SERVER_ID@@" str(server_id))<line_sep>data=data.replace("@@HOSTNAME@@" str(report_host))<line_sep>data=data.replace("@@DATADIR@@" datadir)<with_stmt>open(destdir+"my.cnf" "w+")<as>mycnf<block_start>mycnf.write(data)<block_end><block_end><for_stmt>f os.listdir(srcdir)<block_start><if_stmt>f.startswith("initdb-")<block_start>shutil.copy(os.path.join(srcdir f) destdir+"docker-entrypoint-initdb.d")<if_stmt>f.endswith(".sh")<block_start>os.chmod(os.path.join(destdir+"docker-entrypoint-initdb.d" f) 0o555)<block_end><block_end><elif_stmt>f.endswith(".cnf")<block_start>shutil.copy(os.path.join(srcdir f) destdir+"my.cnf.d")<block_end><block_end><if_stmt>os.path.exists("/etc/my.cnf")<block_start>logger.info("Replacing /etc/my.cnf, old contents were:")<line_sep>logger.info(open("/etc/my.cnf").read())<line_sep>os.remove("/etc/my.cnf")<block_end>os.symlink(destdir+"my.cnf" "/etc/my.cnf")<if_stmt>os.path.exists("/etc/my.cnf.d")<block_start>os.rmdir("/etc/my.cnf.d")<block_end>os.symlink(destdir+"my.cnf.d" "/etc/my.cnf.d")<line_sep>logger.info(f"Configuration done")<block_end><def_stmt>main argv<block_start>datadir=argv[1]<if>len(argv)<g>1<else>"/var/lib/mysql"<line_sep>mysqlsh.globals.shell.options.useWizards=<false><line_sep>logging.basicConfig(level=logging.DEBUG format='%(asctime)s - [%(levelname)s] [%(name)s] %(message)s' datefmt="%Y-%m-%dT%H:%M:%S")<line_sep>logger=logging.getLogger("initmysql")<line_sep>name=cast(str os.getenv("MY_POD_NAME"))<line_sep>namespace=cast(str os.getenv("MY_POD_NAMESPACE"))<line_sep>utils.log_banner(__file__ logger)<line_sep>logger.info(f"Configuring mysql pod {namespace}/{name}, datadir={datadir}")<line_sep>logger.debug(f"Initial contents of {datadir}:")<line_sep>subprocess.run(["ls" "-l" datadir])<line_sep>logger.debug("Initial contents of /mnt:")<line_sep>subprocess.run(["ls" "-lR" "/mnt"])<try_stmt><block_start>pod=MySQLPod.read(name namespace)<line_sep>cluster=pod.get_cluster()<line_sep>init_conf(datadir pod cluster logger)<block_end><except_stmt>Exception<as>e<block_start><import_stmt>traceback<line_sep>traceback.print_exc()<line_sep>logger.critical(f"Unhandled exception while bootstrapping MySQL: {e}")<line_sep># TODO post event to the Pod and the Cluster object if this is the seed <return>1<block_end># TODO support for restoring from clone snapshot or MEB goes in here <return>0<block_end>
<import_stmt>unittest<import_from_stmt>slack_sdk.oauth.installation_store InstallationStore<import_from_stmt>slack_sdk.oauth.installation_store.async_installation_store AsyncInstallationStore <class_stmt>TestInterface(unittest.TestCase)<block_start><def_stmt>setUp self<block_start><pass><block_end><def_stmt>tearDown self<block_start><pass><block_end><def_stmt>test_sync self<block_start>store=InstallationStore()<line_sep>self.assertIsNotNone(store)<block_end><def_stmt>test_async self<block_start>store=AsyncInstallationStore()<line_sep>self.assertIsNotNone(store)<block_end><block_end>
<import_from_stmt>typing List<import_from_stmt>ray workflow<line_sep>@workflow.step<def_stmt>iterate array:List[str] result:str i:int<arrow>str<block_start><if_stmt>i<ge>len(array)<block_start><return>result<block_end><return>iterate.step(array result+array[i] i+1)<block_end><if_stmt>__name__<eq>"__main__"<block_start>workflow.init()<line_sep>print(iterate.step(["foo" "ba" "r"] "" 0).run())<block_end>
<import_stmt>unittest<import_stmt>pytest<import_stmt>hypothesis<import_from_stmt>grapl_analyzerlib.prelude GraphClient<import_from_stmt>grapl_analyzerlib.nodes.lens LensView LensQuery<line_sep>@pytest.mark.integration_test<class_stmt>TestQueryGen(unittest.TestCase)<block_start>@hypothesis.settings(deadline=<none>)@hypothesis.given(lens_name=hypothesis.strategies.text(max_size=100) )<def_stmt>test_weird_chars_in_lens_name self lens_name:str<arrow><none><block_start>""" Roundabout way to ensure some basic properties of filter generation. """<line_sep>client=GraphClient()<line_sep>lens=LensView.get_or_create(gclient=client lens_name=lens_name lens_type="engagement" )<line_sep>requery_lens=LensQuery().with_lens_name(lens_name).query_first(client)<assert_stmt>requery_lens.get_lens_name()<eq>lens_name<block_end><block_end>
<import_stmt>numpy<as>np<import_stmt>torch<import_from_stmt>torch.autograd Variable<import_stmt>const<class_stmt>DataLoader(object)<block_start><def_stmt>__init__ self src_sents max_len batch_size cuda=<true><block_start>self.cuda=cuda<line_sep>self.sents_size=len(src_sents)<line_sep>self._step=0<line_sep>self._stop_step=self.sents_size<floordiv>batch_size<line_sep>self._batch_size=batch_size<line_sep>self._max_len=max_len<line_sep>self.gen_data(src_sents)<block_end><def_stmt>gen_data self src_sents<block_start>src_sents=np.asarray(src_sents)<line_sep>self._src_sents=src_sents[: :-1]<line_sep>self._label=src_sents[: 1:]<block_end><def_stmt>_shuffle self<block_start>indices=np.arange(self._src_sents.shape[0])<line_sep>np.random.shuffle(indices)<line_sep>self._src_sents=self._src_sents[indices]<line_sep>self._label=self._label[indices]<block_end><def_stmt>__iter__ self<block_start><return>self<block_end><def_stmt>__next__ self<block_start><def_stmt>to_longest insts<block_start>inst_data_tensor=Variable(torch.from_numpy(insts))<if_stmt>self.cuda<block_start>inst_data_tensor=inst_data_tensor.cuda()<block_end><return>inst_data_tensor<block_end><if_stmt>self._step<eq>self._stop_step<block_start>self._step=0<line_sep><raise>StopIteration()<block_end>_start=self._step<times>self._batch_size<line_sep>_bsz=self._batch_size<line_sep>self._step<augadd>1<line_sep>data=to_longest(self._src_sents[_start:_start+_bsz])<line_sep>label=to_longest(self._label[_start:_start+_bsz])<line_sep><return>data label.contiguous().view(-1)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>data=torch.load("data/ch_pro_nlg.pt")<line_sep>_data=DataLoader(data['train'] data["max_word_len"] 64)<line_sep>d={v:k<for>k,v data['dict']['src'].items()}<line_sep>print([d[w]<for>s _data._src_sents<for>w s])<line_sep>print([d[w]<for>s _data._label<for>w s])<block_end>
# from icevision.all import * # first_task = tasks.Task("first") # second_task = tasks.Task("second") # record = BaseRecord( # ( # FilepathRecordComponent(), # InstancesLabelsRecordComponent(task=first_task), # BBoxesRecordComponent(task=first_task), # InstancesLabelsRecordComponent(task=second_task), # BBoxesRecordComponent(task=second_task), # ) # ) # record.builder_template() # [ # "record.set_img_size(<ImgSize>)", # "record.set_filepath(<Union[str, Path]>)", # "record.first.add_labels_names(<Sequence[Hashable]>)", # "record.first.add_bboxes(<Sequence[BBox]>)", # "record.second.add_labels_names(<Sequence[Hashable]>)", # "record.second.add_bboxes(<Sequence[BBox]>)", # ]
<import_from_stmt>django.conf.urls url<import_from_stmt>api.brands views<line_sep>app_name='osf'<line_sep>urlpatterns=[url(r'^$' views.BrandList.as_view() name=views.BrandList.view_name) url(r'^(?P<brand_id>\w+)/$' views.BrandDetail.as_view() name=views.BrandDetail.view_name) ]<line_sep>
<import_stmt>os<import_stmt>json<import_stmt>requests<import_stmt>telebot<import_stmt>re<import_from_stmt>telebot types<line_sep>LOKLAK_API_URL="http://loklak.org/api/search.json?q={query}"<line_sep>bot=telebot.TeleBot(os.environ['TELEGRAM_BOT_TOKEN'])<line_sep>user_results={}<def_stmt>get_tweet_rating tweet<block_start>""" Function that count tweet rating based on favourites and retweets """<line_sep><return>(tweet['retweet_count']<times>2)+tweet['favourites_count']<block_end><def_stmt>tweet_answer tweet tweets_left<block_start>""" Function that making text answer from tweet object """<line_sep>answer='"{message}" - {author} \n\n{link}\n\n{more} more tweets.'.format(message=tweet['text'] author=tweet['screen_name'] link=tweet['link'] more=tweets_left)<line_sep><return>answer<block_end>@bot.message_handler(commands=['start' 'help'])<def_stmt>description message<block_start>bot.reply_to(message "loklak.org bot - simple Telegram bot for searching tweets.\n"<concat>"Just send a message with your query and bot will process it, "<concat>"using loklag.org API. \n"<concat>"If you want to contribute, project is open source: "<concat>"https://github.com/sevazhidkov/tweets-search-bot\n"<concat>"You can search a particular user's entire tweets by enter \"/user:USERNAME\"")<block_end>@bot.message_handler(commands=['next-tweet' 'next_tweet'])<def_stmt>next_tweet message<block_start>user_id=message.from_user.id<if_stmt>user_id<in>user_results<and>user_results[user_id]<block_start>tweet=user_results[user_id].pop()<line_sep>bot.reply_to(message tweet_answer(tweet len(user_results[user_id])))<block_end><else_stmt><block_start>bot.reply_to(message "You haven't searched anything.")<block_end><block_end>@bot.message_handler(regexp="/user:.+")<def_stmt>user_search message<block_start>query_msg=message.text<line_sep>baseURL="http://loklak.org/api/search.json?q=from:"<line_sep>base_infoURL="http://loklak.org/api/user.json?screen_name="<line_sep>pattern=re.compile("/user:(.+)")<line_sep>mtch=pattern.match(query_msg)<if_stmt>mtch<block_start>username=mtch.group(1)<line_sep>raw=requests.get(baseURL+username)<line_sep>info_raw=requests.get(base_infoURL+username)<try_stmt><block_start>tweets=json.loads(raw.text)['statuses']<line_sep>info=json.loads(info_raw.text)['user']<line_sep>time_zone=info['time_zone']<line_sep>profile_image=info['profile_image_url']<line_sep>friends_num=info['friends_count']<block_end><except_stmt>ValueError<block_start><return><block_end><if_stmt>tweets<block_start>tweets.sort(key=get_tweet_rating)<line_sep>tweet=tweets.pop()<line_sep>user_results[message.from_user.id]=tweets<line_sep>#show a botton on top of input markup=types.ReplyKeyboardMarkup(row_width=1)<line_sep>markup.add('/next-tweet')<line_sep>full_text=""<line_sep>full_text<augadd>"Username:"+username+"\n"<line_sep>full_text<augadd>"Profile Picture:"+profile_image+"\n"<line_sep>full_text<augadd>"Friends Number:"+str(friends_num)+"\n"<line_sep>full_text<augadd>tweet_answer(tweet len(tweets))<line_sep>bot.reply_to(message full_text reply_markup=markup)<block_end><else_stmt><block_start>bot.reply_to(message "Error in find a user, make sure you are in a correct format. \"user:USERNAME\"")<block_end><block_end><else_stmt><block_start>bot.reply_to(message "Error in format, make sure you are in a correct format.")<block_end><block_end>@bot.message_handler(func=<lambda>m:<true>)<def_stmt>search message<block_start>query_msg=message.text<line_sep>result=requests.get(LOKLAK_API_URL.format(query=query_msg))<try_stmt><block_start>tweets=json.loads(result.text)['statuses']<block_end><except_stmt>ValueError<block_start><return><block_end><if_stmt>tweets# Find the best tweet for this search query, # by using sorting <block_start>tweets.sort(key=get_tweet_rating)<line_sep>tweet=tweets.pop()<line_sep>user_results[message.from_user.id]=tweets<line_sep>#show a botton on top of input markup=types.ReplyKeyboardMarkup(row_width=2)<line_sep>markup.add('/next-tweet')<line_sep>bot.reply_to(message tweet_answer(tweet len(tweets)) reply_markup=markup)<block_end><else_stmt># Delete words from message until result is not avaliable #Strategy: keep removing the smallest word in a sentence <block_start>words=query_msg.split()<if_stmt>(len(words)<g>1)<block_start>words.sort(key=len)<del_stmt>words[0]<line_sep>reconstructed=""<for_stmt>word words<block_start>reconstructed<augadd>word+" "<block_end>message.text=reconstructed<line_sep>search(message)<block_end><else_stmt><block_start>bot.reply_to(message '404 Not found')<block_end><block_end><block_end>bot.polling()<line_sep>
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. <class_stmt>ContentType<block_start>O365_CONNECTOR_CARD="application/vnd.microsoft.teams.card.o365connector"<line_sep>FILE_CONSENT_CARD="application/vnd.microsoft.teams.card.file.consent"<line_sep>FILE_DOWNLOAD_INFO="application/vnd.microsoft.teams.file.download.info"<line_sep>FILE_INFO_CARD="application/vnd.microsoft.teams.card.file.info"<block_end><class_stmt>Type<block_start>O365_CONNECTOR_CARD_VIEWACTION="ViewAction"<line_sep>O365_CONNECTOR_CARD_OPEN_URI="OpenUri"<line_sep>O365_CONNECTOR_CARD_HTTP_POST="HttpPOST"<line_sep>O365_CONNECTOR_CARD_ACTION_CARD="ActionCard"<line_sep>O365_CONNECTOR_CARD_TEXT_INPUT="TextInput"<line_sep>O365_CONNECTOR_CARD_DATE_INPUT="DateInput"<line_sep>O365_CONNECTOR_CARD_MULTICHOICE_INPUT="MultichoiceInput"<block_end>
<class_stmt>EncodingApiCommunicator(object)<block_start><def_stmt>__init__ self inner<block_start>self.inner=inner<block_end><def_stmt>call self path command arguments=<none> queries=<none> additional_queries=()<block_start>path=path.encode()<line_sep>command=command.encode()<line_sep>arguments=self.transform_dictionary(arguments<or>{})<line_sep>queries=self.transform_dictionary(queries<or>{})<line_sep>promise=self.inner.call(path command arguments queries additional_queries)<line_sep><return>self.decorate_promise(promise)<block_end><def_stmt>transform_dictionary self dictionary<block_start><return>dict(self.transform_item(item)<for>item dictionary.items())<block_end><def_stmt>transform_item self item<block_start>key,value=item<line_sep><return>(key.encode() value)<block_end><def_stmt>decorate_promise self promise<block_start><return>EncodedPromiseDecorator(promise)<block_end><block_end><class_stmt>EncodedPromiseDecorator(object)<block_start><def_stmt>__init__ self inner<block_start>self.inner=inner<block_end><def_stmt>get self<block_start>response=self.inner.get()<line_sep><return>response.map(self.transform_row)<block_end><def_stmt>__iter__ self<block_start><return>map(self.transform_row self.inner)<block_end><def_stmt>transform_row self row<block_start><return>dict(self.transform_item(item)<for>item row.items())<block_end><def_stmt>transform_item self item<block_start>key,value=item<line_sep><return>(key.decode() value)<block_end><block_end>
<import_from_stmt>collections Sequence<import_from_stmt>.constants LPARA RPARA<def_stmt>issequence obj<block_start><return>isinstance(obj Sequence)<block_end><def_stmt>issequence_except_str obj<block_start><if_stmt>isinstance(obj str)<block_start><return><false><block_end><return>isinstance(obj Sequence)<block_end><def_stmt>is_tuple_or_list obj<block_start><return>type(obj)<in>{tuple list}<block_end><def_stmt>emit_sexp sexpr<block_start>ol=[]<line_sep>stack=[sexpr]<while_stmt>len(stack)<g>0<block_start>sexpr=stack.pop()<if_stmt>is_tuple_or_list(sexpr)<block_start>stack.append(RPARA)<line_sep>rsexpr=[]<for_stmt>sub sexpr<block_start>rsexpr.insert(0 sub)<block_end>stack.extend(rsexpr)<line_sep>stack.append(LPARA)<block_end><else_stmt><block_start>ol.append(sexpr)<block_end><block_end>retval=''<line_sep>oldsitem=''<for_stmt>item ol<block_start>sitem=repr(item)<if_stmt>sitem[0]<eq>"'"<and>sitem[-1]<eq>"'"<block_start>sitem=sitem.replace('"' "\\\"")<line_sep>sitem='"'+sitem[1:-1]+'"'<block_end><if_stmt><not>((sitem<eq>')')<or>(oldsitem<eq>'('))<block_start>oldsitem=sitem<line_sep>sitem=' '+sitem<block_end><else_stmt><block_start>oldsitem=sitem<block_end>retval<augadd>sitem<block_end><return>retval[1:]<block_end>
#This file is part of ElectricEye. #SPDX-License-Identifier: Apache-2.0 #Licensed to the Apache Software Foundation (ASF) under one #or more contributor license agreements. See the NOTICE file #distributed with this work for additional information #regarding copyright ownership. The ASF licenses this file #to you under the Apache License, Version 2.0 (the #"License"); you may not use this file except in compliance #with the License. You may obtain a copy of the License at #http://www.apache.org/licenses/LICENSE-2.0 #Unless required by applicable law or agreed to in writing, #software distributed under the License is distributed on an #"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #KIND, either express or implied. See the License for the #specific language governing permissions and limitations #under the License. <import_stmt>boto3<import_stmt>datetime<import_stmt>os<import_from_stmt>check_register CheckRegister<line_sep>registry=CheckRegister()<line_sep># import boto3 clients licensemanager=boto3.client("license-manager")<line_sep>@registry.register_check("license-manager")<def_stmt>license_manager_hard_count_check cache:dict awsAccountId:str awsRegion:str awsPartition:str<arrow>dict<block_start>"""[LicenseManager.1] License Manager license configurations should be configured to enforce a hard limit"""<try_stmt># TODO: need to catch the case that License Manager is not setup <block_start>response=licensemanager.list_license_configurations()<line_sep>lmCheck=str(response["LicenseConfigurations"])<if_stmt>lmCheck<eq>"[]"<block_start><pass><block_end><else_stmt><block_start>myLiscMgrConfigs=response["LicenseConfigurations"]<for_stmt>lmconfigs myLiscMgrConfigs<block_start>liscConfigArn=str(lmconfigs["LicenseConfigurationArn"])<line_sep># ISO Time iso8601Time=(datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat())<try_stmt><block_start>response=licensemanager.get_license_configuration(LicenseConfigurationArn=liscConfigArn)<line_sep>liscConfigId=str(response["LicenseConfigurationId"])<line_sep>liscConfigName=str(response["Name"])<line_sep>hardLimitCheck=str(response["LicenseCountHardLimit"])<if_stmt>hardLimitCheck<eq>"False"<block_start>finding={"SchemaVersion":"2018-10-08" "Id":liscConfigArn+"/license-manager-enforce-hard-limit-check" "ProductArn":f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default" "GeneratorId":liscConfigArn "AwsAccountId":awsAccountId "Types":["Software and Configuration Checks/AWS Security Best Practices"] "FirstObservedAt":iso8601Time "CreatedAt":iso8601Time "UpdatedAt":iso8601Time "Severity":{"Label":"LOW"} "Confidence":99 "Title":"[LicenseManager.1] License Manager license configurations should be configured to enforce a hard limit" "Description":"License Manager license configuration "+liscConfigName+" does not enforce a hard limit. Enforcing a hard limit prevents new instances from being created that if you have already provisioned all available licenses. Refer to the remediation instructions to remediate this behavior" "Remediation":{"Recommendation":{"Text":"For information on hard limits refer to the License Configuration Parameters and Rules section of the AWS License Manager User Guide" "Url":"https://docs.aws.amazon.com/license-manager/latest/userguide/config-overview.html" }} "ProductFields":{"Product Name":"ElectricEye"} "Resources":[{"Type":"AwsLicenseManagerLicenseConfiguration" "Id":liscConfigArn "Partition":awsPartition "Region":awsRegion "Details":{"Other":{"licenseConfigurationId":liscConfigId "licenseConfigurationName":liscConfigName }} }] "Compliance":{"Status":"FAILED" "RelatedRequirements":["NIST CSF ID.AM-2" "NIST SP 800-53 CM-8" "NIST SP 800-53 PM-5" "AICPA TSC CC3.2" "AICPA TSC CC6.1" "ISO 27001:2013 A.8.1.1" "ISO 27001:2013 A.8.1.2" "ISO 27001:2013 A.12.5.1" ] } "Workflow":{"Status":"NEW"} "RecordState":"ACTIVE" }<line_sep><yield>finding<block_end><else_stmt><block_start>finding={"SchemaVersion":"2018-10-08" "Id":liscConfigArn+"/license-manager-enforce-hard-limit-check" "ProductArn":f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default" "GeneratorId":liscConfigArn "AwsAccountId":awsAccountId "Types":["Software and Configuration Checks/AWS Security Best Practices"] "FirstObservedAt":iso8601Time "CreatedAt":iso8601Time "UpdatedAt":iso8601Time "Severity":{"Label":"INFORMATIONAL"} "Confidence":99 "Title":"[LicenseManager.1] License Manager license configurations should be configured to enforce a hard limit" "Description":"License Manager license configuration "+liscConfigName+" enforces a hard limit." "Remediation":{"Recommendation":{"Text":"For information on hard limits refer to the License Configuration Parameters and Rules section of the AWS License Manager User Guide" "Url":"https://docs.aws.amazon.com/license-manager/latest/userguide/config-overview.html" }} "ProductFields":{"Product Name":"ElectricEye"} "Resources":[{"Type":"AwsLicenseManagerLicenseConfiguration" "Id":liscConfigArn "Partition":awsPartition "Region":awsRegion "Details":{"Other":{"licenseConfigurationId":liscConfigId "licenseConfigurationName":liscConfigName }} }] "Compliance":{"Status":"PASSED" "RelatedRequirements":["NIST CSF ID.AM-2" "NIST SP 800-53 CM-8" "NIST SP 800-53 PM-5" "AICPA TSC CC3.2" "AICPA TSC CC6.1" "ISO 27001:2013 A.8.1.1" "ISO 27001:2013 A.8.1.2" "ISO 27001:2013 A.12.5.1" ] } "Workflow":{"Status":"RESOLVED"} "RecordState":"ARCHIVED" }<line_sep><yield>finding<block_end><block_end><except_stmt>Exception<as>e<block_start>print(e)<block_end><block_end><block_end><block_end><except_stmt>Exception<as>e<block_start>print(e)<block_end><block_end>@registry.register_check("license-manager")<def_stmt>license_manager_disassociation_check cache:dict awsAccountId:str awsRegion:str awsPartition:str<arrow>dict<block_start>"""[LicenseManager.2] License Manager license configurations should disassociate hosts when license in scope is not found"""<try_stmt># TODO: need to catch the case that License Manager is not setup <block_start>response=licensemanager.list_license_configurations()<line_sep>lmCheck=str(response["LicenseConfigurations"])<if_stmt>lmCheck<eq>"[]"<block_start><pass><block_end><else_stmt><block_start>myLiscMgrConfigs=response["LicenseConfigurations"]<for_stmt>lmconfigs myLiscMgrConfigs<block_start>liscConfigArn=str(lmconfigs["LicenseConfigurationArn"])<line_sep># ISO Time iso8601Time=(datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat())<try_stmt><block_start>response=licensemanager.get_license_configuration(LicenseConfigurationArn=liscConfigArn)<line_sep>liscConfigId=str(response["LicenseConfigurationId"])<line_sep>liscConfigName=str(response["Name"])<line_sep>disassocCheck=str(response["DisassociateWhenNotFound"])<if_stmt>disassocCheck<eq>"False"<block_start>finding={"SchemaVersion":"2018-10-08" "Id":liscConfigArn+"/license-manager-disassociation-check" "ProductArn":f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default" "GeneratorId":liscConfigArn "AwsAccountId":awsAccountId "Types":["Software and Configuration Checks/AWS Security Best Practices"] "FirstObservedAt":iso8601Time "CreatedAt":iso8601Time "UpdatedAt":iso8601Time "Severity":{"Label":"LOW"} "Confidence":99 "Title":"[LicenseManager.2] License Manager license configurations should disassociate hosts when license in scope is not found" "Description":"License Manager license configuration "+liscConfigName+" does not enforce automatic disassociation. Refer to the remediation instructions to remediate this behavior." "Remediation":{"Recommendation":{"Text":"For information on disassociation refer to the Disassociating license configurations and AMIs section of the AWS License Manager User Guide" "Url":"https://docs.aws.amazon.com/license-manager/latest/userguide/license-rules.html#ami-disassociation" }} "ProductFields":{"Product Name":"ElectricEye"} "Resources":[{"Type":"AwsLicenseManagerLicenseConfiguration" "Id":liscConfigArn "Partition":awsPartition "Region":awsRegion "Details":{"Other":{"LicenseConfigurationId":liscConfigId "LicenseConfigurationName":liscConfigName }} }] "Compliance":{"Status":"FAILED" "RelatedRequirements":["NIST CSF ID.AM-2" "NIST SP 800-53 CM-8" "NIST SP 800-53 PM-5" "AICPA TSC CC3.2" "AICPA TSC CC6.1" "ISO 27001:2013 A.8.1.1" "ISO 27001:2013 A.8.1.2" "ISO 27001:2013 A.12.5.1" ] } "Workflow":{"Status":"NEW"} "RecordState":"ACTIVE" }<line_sep><yield>finding<block_end><else_stmt><block_start>finding={"SchemaVersion":"2018-10-08" "Id":liscConfigArn+"/license-manager-disassociation-check" "ProductArn":f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default" "GeneratorId":liscConfigArn "AwsAccountId":awsAccountId "Types":["Software and Configuration Checks/AWS Security Best Practices"] "FirstObservedAt":iso8601Time "CreatedAt":iso8601Time "UpdatedAt":iso8601Time "Severity":{"Label":"INFORMATIONAL"} "Confidence":99 "Title":"[LicenseManager.2] License Manager license configurations should disassociate hosts when license in scope is not found" "Description":"License Manager license configuration "+liscConfigName+" enforces automatic disassociation." "Remediation":{"Recommendation":{"Text":"For information on disassociation refer to the Disassociating license configurations and AMIs section of the AWS License Manager User Guide" "Url":"https://docs.aws.amazon.com/license-manager/latest/userguide/license-rules.html#ami-disassociation" }} "ProductFields":{"Product Name":"ElectricEye"} "Resources":[{"Type":"AwsLicenseManagerLicenseConfiguration" "Id":liscConfigArn "Partition":awsPartition "Region":awsRegion "Details":{"Other":{"LicenseConfigurationId":liscConfigId "LicenseConfigurationName":liscConfigName }} }] "Compliance":{"Status":"PASSED" "RelatedRequirements":["NIST CSF ID.AM-2" "NIST SP 800-53 CM-8" "NIST SP 800-53 PM-5" "AICPA TSC CC3.2" "AICPA TSC CC6.1" "ISO 27001:2013 A.8.1.1" "ISO 27001:2013 A.8.1.2" "ISO 27001:2013 A.12.5.1"]} "Workflow":{"Status":"RESOLVED"} "RecordState":"ARCHIVED" }<line_sep><yield>finding<block_end><block_end><except_stmt>Exception<as>e<block_start>print(e)<block_end><block_end><block_end><block_end><except_stmt>Exception<as>e<block_start>print(e)<block_end><block_end>
# -*- coding=utf-8 -*- <import_from_future_stmt> absolute_import unicode_literals<import_stmt>itertools<import_stmt>operator<import_from_stmt>packaging.specifiers SpecifierSet Specifier<import_from_stmt>vistir.misc dedup<def_stmt>_tuplize_version version<block_start><return>tuple(int(x)<for>x version.split("."))<block_end><def_stmt>_format_version version<block_start><return>".".join(str(i)<for>i version)<block_end># Prefer [x,y) ranges. REPLACE_RANGES={">":">=" "<=":"<"}<def_stmt>_format_pyspec specifier<block_start><if_stmt>isinstance(specifier str)<block_start><if_stmt><not>any(op<in>specifier<for>op Specifier._operators.keys())<block_start>specifier="=={0}".format(specifier)<block_end>specifier=Specifier(specifier)<block_end><if_stmt>specifier.operator<eq>"=="<and>specifier.version.endswith(".*")<block_start>specifier=Specifier("=={0}".format(specifier.version[:-2]))<block_end><try_stmt><block_start>op=REPLACE_RANGES[specifier.operator]<block_end><except_stmt>KeyError<block_start><return>specifier<block_end>version=specifier.version.replace(".*" "")<line_sep>curr_tuple=_tuplize_version(version)<try_stmt><block_start>next_tuple=(curr_tuple[0] curr_tuple[1]+1)<block_end><except_stmt>IndexError<block_start>next_tuple=(curr_tuple[0] 1)<block_end>specifier=Specifier("{0}{1}".format(op _format_version(next_tuple)))<line_sep><return>specifier<block_end><def_stmt>_get_specs specset<block_start><if_stmt>isinstance(specset Specifier)<block_start>specset=str(specset)<block_end><if_stmt>isinstance(specset str)<block_start>specset=SpecifierSet(specset.replace(".*" ""))<block_end><return>[(spec._spec[0] _tuplize_version(spec._spec[1]))<for>spec getattr(specset "_specs" [])]<block_end><def_stmt>_group_by_op specs<block_start>specs=[_get_specs(x)<for>x list(specs)]<line_sep>flattened=[(op version)<for>spec specs<for>op,version spec]<line_sep>specs=sorted(flattened key=operator.itemgetter(1))<line_sep>grouping=itertools.groupby(specs key=operator.itemgetter(0))<line_sep><return>grouping<block_end><def_stmt>cleanup_pyspecs specs joiner="or"<block_start>specs={_format_pyspec(spec)<for>spec specs}<line_sep># for != operator we want to group by version # if all are consecutive, join as a list results=set()<for_stmt>op,versions _group_by_op(specs)<block_start>versions=[version[1]<for>version versions]<line_sep>versions=sorted(dedup(versions))<line_sep># if we are doing an or operation, we need to use the min for >= # this way OR(>=2.6, >=2.7, >=3.6) picks >=2.6 # if we do an AND operation we need to use MAX to be more selective <if_stmt>op<in>(">" ">=")<block_start><if_stmt>joiner<eq>"or"<block_start>results.add((op _format_version(min(versions))))<block_end><else_stmt><block_start>results.add((op _format_version(max(versions))))<block_end><block_end># we use inverse logic here so we will take the max value if we are # using OR but the min value if we are using AND <elif_stmt>op<in>("<=" "<")<block_start><if_stmt>joiner<eq>"or"<block_start>results.add((op _format_version(max(versions))))<block_end><else_stmt><block_start>results.add((op _format_version(min(versions))))<block_end><block_end># leave these the same no matter what operator we use <elif_stmt>op<in>("!=" "==" "~=")<block_start>version_list=sorted("{0}".format(_format_version(version))<for>version versions)<line_sep>version=", ".join(version_list)<if_stmt>len(version_list)<eq>1<block_start>results.add((op version))<block_end><elif_stmt>op<eq>"!="<block_start>results.add(("not in" version))<block_end><elif_stmt>op<eq>"=="<block_start>results.add(("in" version))<block_end><else_stmt><block_start>specifier=SpecifierSet(",".join(sorted("{0}".format(op v)<for>v version_list)))._specs<for_stmt>s specifier<block_start>results<augand>(specifier._spec[0] specifier._spec[1])<block_end><block_end><block_end><else_stmt><block_start><if_stmt>len(version)<eq>1<block_start>results.add((op version))<block_end><else_stmt><block_start>specifier=SpecifierSet("{0}".format(version))._specs<for_stmt>s specifier<block_start>results<augor>(specifier._spec[0] specifier._spec[1])<block_end><block_end><block_end><block_end><return>results<block_end><def_stmt>pyspec_from_markers marker<block_start><if_stmt>marker._markers[0][0]<ne>'python_version'<block_start><return><block_end>op=marker._markers[0][1].value<line_sep>version=marker._markers[0][2].value<line_sep>specset=set()<if_stmt>op<eq>"in"<block_start>specset.update(Specifier("=={0}".format(v.strip()))<for>v version.split(","))<block_end><elif_stmt>op<eq>"not in"<block_start>specset.update(Specifier("!={0}".format(v.strip()))<for>v version.split(","))<block_end><else_stmt><block_start>specset.add(Specifier("".join([op version])))<block_end><if_stmt>specset<block_start><return>specset<block_end><return><none><block_end>
<import_from_stmt>keras optimizers<import_from_stmt>keras.layers Input Dense Convolution2D Bidirectional TimeDistributed<import_from_stmt>keras.layers Flatten BatchNormalization Reshape<import_from_stmt>keras.layers.core Activation<import_from_stmt>keras.models Model load_model<import_from_stmt>keras.layers.recurrent LSTM<import_from_stmt>keras.initializers he_normal glorot_uniform<def_stmt>AO_model people_num=2<block_start>model_input=Input(shape=(298 257 2))<line_sep>print('0:' model_input.shape)<line_sep>conv1=Convolution2D(96 kernel_size=(1 7) strides=(1 1) padding='same' dilation_rate=(1 1) name='conv1')(model_input)<line_sep>conv1=BatchNormalization()(conv1)<line_sep>conv1=Activation('relu')(conv1)<line_sep>print('1:' conv1.shape)<line_sep>conv2=Convolution2D(96 kernel_size=(7 1) strides=(1 1) padding='same' dilation_rate=(1 1) name='conv2')(conv1)<line_sep>conv2=BatchNormalization()(conv2)<line_sep>conv2=Activation('relu')(conv2)<line_sep>print('2:' conv2.shape)<line_sep>conv3=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(1 1) name='conv3')(conv2)<line_sep>conv3=BatchNormalization()(conv3)<line_sep>conv3=Activation('relu')(conv3)<line_sep>print('3:' conv3.shape)<line_sep>conv4=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(2 1) name='conv4')(conv3)<line_sep>conv4=BatchNormalization()(conv4)<line_sep>conv4=Activation('relu')(conv4)<line_sep>print('4:' conv4.shape)<line_sep>conv5=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(4 1) name='conv5')(conv4)<line_sep>conv5=BatchNormalization()(conv5)<line_sep>conv5=Activation('relu')(conv5)<line_sep>print('5:' conv5.shape)<line_sep>conv6=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(8 1) name='conv6')(conv5)<line_sep>conv6=BatchNormalization()(conv6)<line_sep>conv6=Activation('relu')(conv6)<line_sep>print('6:' conv6.shape)<line_sep>conv7=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(16 1) name='conv7')(conv6)<line_sep>conv7=BatchNormalization()(conv7)<line_sep>conv7=Activation('relu')(conv7)<line_sep>print('7:' conv7.shape)<line_sep>conv8=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(32 1) name='conv8')(conv7)<line_sep>conv8=BatchNormalization()(conv8)<line_sep>conv8=Activation('relu')(conv8)<line_sep>print('8:' conv8.shape)<line_sep>conv9=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(1 1) name='conv9')(conv8)<line_sep>conv9=BatchNormalization()(conv9)<line_sep>conv9=Activation('relu')(conv9)<line_sep>print('9:' conv9.shape)<line_sep>conv10=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(2 2) name='conv10')(conv9)<line_sep>conv10=BatchNormalization()(conv10)<line_sep>conv10=Activation('relu')(conv10)<line_sep>print('10:' conv10.shape)<line_sep>conv11=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(4 4) name='conv11')(conv10)<line_sep>conv11=BatchNormalization()(conv11)<line_sep>conv11=Activation('relu')(conv11)<line_sep>print('11:' conv11.shape)<line_sep>conv12=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(8 8) name='conv12')(conv11)<line_sep>conv12=BatchNormalization()(conv12)<line_sep>conv12=Activation('relu')(conv12)<line_sep>print('12:' conv12.shape)<line_sep>conv13=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(16 16) name='conv13')(conv12)<line_sep>conv13=BatchNormalization()(conv13)<line_sep>conv13=Activation('relu')(conv13)<line_sep>print('13:' conv13.shape)<line_sep>conv14=Convolution2D(96 kernel_size=(5 5) strides=(1 1) padding='same' dilation_rate=(32 32) name='conv14')(conv13)<line_sep>conv14=BatchNormalization()(conv14)<line_sep>conv14=Activation('relu')(conv14)<line_sep>print('14:' conv14.shape)<line_sep>conv15=Convolution2D(8 kernel_size=(1 1) strides=(1 1) padding='same' dilation_rate=(1 1) name='conv15')(conv14)<line_sep>conv15=BatchNormalization()(conv15)<line_sep>conv15=Activation('relu')(conv15)<line_sep>print('15:' conv15.shape)<line_sep>AVfusion=TimeDistributed(Flatten())(conv15)<line_sep>print('AVfusion:' AVfusion.shape)<line_sep>lstm=Bidirectional(LSTM(400 input_shape=(298 8<times>257) return_sequences=<true>) merge_mode='sum')(AVfusion)<line_sep>print('lstm:' lstm.shape)<line_sep>fc1=Dense(600 name="fc1" activation='relu' kernel_initializer=he_normal(seed=27))(lstm)<line_sep>print('fc1:' fc1.shape)<line_sep>fc2=Dense(600 name="fc2" activation='relu' kernel_initializer=he_normal(seed=42))(fc1)<line_sep>print('fc2:' fc2.shape)<line_sep>fc3=Dense(600 name="fc3" activation='relu' kernel_initializer=he_normal(seed=65))(fc2)<line_sep>print('fc3:' fc3.shape)<line_sep>complex_mask=Dense(257<times>2<times>people_num name="complex_mask" kernel_initializer=glorot_uniform(seed=87))(fc3)<line_sep>print('complex_mask:' complex_mask.shape)<line_sep>complex_mask_out=Reshape((298 257 2 people_num))(complex_mask)<line_sep>print('complex_mask_out:' complex_mask_out.shape)<line_sep># --------------------------- AO end --------------------------- AO_model=Model(inputs=model_input outputs=complex_mask_out)<line_sep><return>AO_model<block_end>
<import_stmt>matplotlib.pyplot<as>plt<import_stmt>numpy<as>np<import_stmt>scipy<import_stmt>sklearn.linear_model<import_from_stmt>matplotlib gridspec<import_from_stmt>sklearn.feature_extraction image<import_stmt>skvideo.datasets<try_stmt><block_start>xrange<block_end><except_stmt>NameError<block_start>xrange=range<block_end>np.random.seed(0)<line_sep># use greedy K-SVD algorithm with OMP <def_stmt>code_step X D<block_start>model=sklearn.linear_model.OrthogonalMatchingPursuit(n_nonzero_coefs=5 fit_intercept=<false> normalize=<false>)<line_sep>#C = sklearn. model.fit(D.T X.T)<line_sep><return>model.coef_<block_end><def_stmt>dict_step X C D<block_start>unused_indices=[]<for_stmt>k xrange(D.shape[0])<block_start>usedidx=np.abs(C[: k])<g>0<if_stmt>np.sum(usedidx)<le>1<block_start>print("Skipping filter #%d"%(k ))<line_sep>unused_indices.append(k)<line_sep><continue><block_end>selectNotK=np.arange(D.shape[0])<ne>k<line_sep>used_coef=C[usedidx :][: selectNotK]<line_sep>E_kR=X[usedidx :].T-np.dot(used_coef D[selectNotK :]).T<line_sep>U,S,V=scipy.sparse.linalg.svds(E_kR k=1)<line_sep># choose sign based on largest dot product choicepos=np.dot(D[k :] U[: 0])<line_sep>choiceneg=np.dot(D[k :] -U[: 0])<if_stmt>choicepos<g>choiceneg<block_start>D[k :]=U[: 0]<line_sep>C[usedidx k]=S[0]<times>V[0 :]<block_end><else_stmt><block_start>D[k :]=-U[: 0]<line_sep>C[usedidx k]=-S[0]<times>V[0 :]<block_end><block_end># re-randomize filters that were not used <for_stmt>i unused_indices<block_start>D[i :]=np.random.normal(size=D.shape[1])<line_sep>D[i :]<augdiv>np.sqrt(np.dot(D[i :] D[i :]))<block_end><return>D<block_end><def_stmt>plot_weights basis<block_start>n_filters,n_channels,height,width=basis.shape<line_sep>ncols=10<line_sep>nrows=10<line_sep>fig=plt.figure()<line_sep>gs=gridspec.GridSpec(nrows ncols)<line_sep>rown=0<line_sep>coln=0<for_stmt>filter xrange(n_filters)<block_start>ax=fig.add_subplot(gs[rown coln])<line_sep>mi=np.min(basis[filter 0 : :])<line_sep>ma=np.max(basis[filter 0 : :])<line_sep>ma=np.max((np.abs(mi) np.abs(ma)))<line_sep>mi=-ma<line_sep>ax.imshow(basis[filter 0 : :] vmin=mi vmax=ma cmap='Greys_r' interpolation='none')<line_sep>ax.xaxis.set_major_locator(plt.NullLocator())<line_sep>ax.yaxis.set_major_locator(plt.NullLocator())<line_sep>coln<augadd>1<if_stmt>coln<ge>ncols<block_start>coln=0<line_sep>rown<augadd>1<block_end><block_end>gs.tight_layout(fig pad=0 h_pad=0 w_pad=0)<line_sep>fig.canvas.draw()<line_sep>buf,sz=fig.canvas.print_to_buffer()<line_sep>data=np.fromstring(buf dtype=np.uint8).reshape(sz[1] sz[0] -1)[: : :3]<line_sep>plt.close()<line_sep><return>data<block_end># a 5 fps video encoded using x264 writer=skvideo.io.FFmpegWriter("sparsity.mp4" inputdict={"-r":"10"} outputdict={'-vcodec':'libx264' '-b':'30000000'})<line_sep># open the first frame of bigbuckbunny filename=skvideo.datasets.bigbuckbunny()<line_sep>vidframe=skvideo.io.vread(filename outputdict={"-pix_fmt":"gray"})[0 : : 0]<line_sep># initialize D D=np.random.normal(size=(100 7<times>7))<for_stmt>i range(D.shape[0])<block_start>D[i :]<augdiv>np.sqrt(np.dot(D[i :] D[i :]))<block_end>X=image.extract_patches_2d(vidframe (7 7))<line_sep>X=X.reshape(X.shape[0] -1).astype(np.float)<line_sep># sumsample about 10000 patches X=X[np.random.permutation(X.shape[0])[:10000]]<for_stmt>i range(200)<block_start>print("Iteration %d / %d"%(i 200))<line_sep>C=code_step(X D)<line_sep>D=dict_step(X C D)<line_sep>frame=plot_weights(D.reshape(100 1 7 7))<line_sep>writer.writeFrame(frame)<block_end>writer.close()<line_sep>
<import_from_future_stmt> absolute_import print_function division<import_stmt>unittest<import_from_stmt>pony.orm.core *<import_from_stmt>pony.orm.tests.testutils *<import_from_stmt>pony.orm.tests setup_database teardown_database<line_sep>db=Database()<class_stmt>Person(db.Entity)<block_start>name=Required(unicode)<line_sep>passport=Optional("Passport")<block_end><class_stmt>Passport(db.Entity)<block_start>code=Required(unicode)<line_sep>person=Required("Person")<block_end><class_stmt>TestOneToOne4(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>setup_database(db)<with_stmt>db_session<block_start>p1=Person(id=1 name='John')<line_sep>Passport(id=1 code='123' person=p1)<block_end><block_end><def_stmt>tearDown self<block_start>teardown_database(db)<block_end>@raises_exception(ConstraintError 'Cannot unlink Passport[1] from previous Person[1] object, because Passport.person attribute is required')@db_session<def_stmt>test1 self<block_start>p2=Person(id=2 name='Mike')<line_sep>pas2=Passport(id=2 code='456' person=p2)<line_sep>commit()<line_sep>p1=Person.get(name='John')<line_sep>pas2.person=p1<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
<import_stmt>numpy<as>np<import_stmt>torch<import_stmt>csv<import_stmt>os<import_stmt>cv2<import_stmt>math<import_stmt>random<import_stmt>json<import_stmt>pickle<import_stmt>os.path<as>osp<import_from_stmt>lietorch SE3<import_from_stmt>.stream RGBDStream<import_from_stmt>.rgbd_utils loadtum<line_sep>intrinsics_dict={'freiburg1':[517.3 516.5 318.6 255.3] 'freiburg2':[520.9 521.0 325.1 249.7] 'freiburg3':[535.4 539.2 320.1 247.6] }<line_sep>distortion_dict={'freiburg1':[0.2624 -0.9531 -0.0054 0.0026 1.1633] 'freiburg2':[0.2312 -0.7849 -0.0033 -0.0001 0.9172] 'freiburg3':[0 0 0 0 0] }<def_stmt>as_intrinsics_matrix intrinsics<block_start>K=np.eye(3)<line_sep>K[0 0]=intrinsics[0]<line_sep>K[1 1]=intrinsics[1]<line_sep>K[0 2]=intrinsics[2]<line_sep>K[1 2]=intrinsics[3]<line_sep><return>K<block_end><class_stmt>TUMStream(RGBDStream)<block_start><def_stmt>__init__ self datapath **kwargs<block_start>super(TUMStream self).__init__(datapath=datapath **kwargs)<block_end><def_stmt>_build_dataset_index self<block_start>""" build list of images, poses, depths, and intrinsics """<line_sep>images,depths,poses,intrinsics=loadtum(self.datapath self.frame_rate)<line_sep>intrinsic,_=TUMStream.calib_read(self.datapath)<line_sep>intrinsics=np.tile(intrinsic[<none>] (len(images) 1))<line_sep># set first pose to identity poses=SE3(torch.as_tensor(poses))<line_sep>poses=poses[[0]].inv()<times>poses<line_sep>poses=poses.data.cpu().numpy()<line_sep>self.images=images<line_sep>self.poses=poses<line_sep>self.depths=depths<line_sep>self.intrinsics=intrinsics<block_end>@staticmethod<def_stmt>calib_read datapath<block_start><if_stmt>'freiburg1'<in>datapath<block_start>intrinsic=intrinsics_dict['freiburg1']<line_sep>d_coef=distortion_dict['freiburg1']<block_end><elif_stmt>'freiburg2'<in>datapath<block_start>intrinsic=intrinsics_dict['freiburg2']<line_sep>d_coef=distortion_dict['freiburg2']<block_end><elif_stmt>'freiburg3'<in>datapath<block_start>intrinsic=intrinsics_dict['freiburg3']<line_sep>d_coef=distortion_dict['freiburg3']<block_end><return>np.array(intrinsic) np.array(d_coef)<block_end>@staticmethod<def_stmt>image_read image_file<block_start>intrinsics,d_coef=TUMStream.calib_read(image_file)<line_sep>K=as_intrinsics_matrix(intrinsics)<line_sep>image=cv2.imread(image_file)<line_sep><return>cv2.undistort(image K d_coef)<block_end>@staticmethod<def_stmt>depth_read depth_file<block_start>depth=cv2.imread(depth_file cv2.IMREAD_ANYDEPTH)<line_sep><return>depth.astype(np.float32)/5000.0<block_end><block_end>
<import_stmt>unittest<import_stmt>os<import_from_stmt>boto3.session Session<import_from_stmt>unittest.mock patch MagicMock ANY<import_from_stmt>common.boto3_manager Boto3Manager<class_stmt>Boto3ManagerTestCase(unittest.TestCase)<block_start><def_stmt>test_assume_default_boto3_session self<block_start>returned_session=Boto3Manager._get_boto3_session("us-east-1")<assert_stmt>isinstance(returned_session Session)<assert_stmt>returned_session.region_name<eq>"us-east-1"<block_end>@patch("common.boto3_manager.DeferredRefreshableCredentials" MagicMock())@patch("common.boto3_manager.AssumeRoleCredentialFetcher" MagicMock())<def_stmt>test_assume_role_boto3_session self<block_start>returned_session=Boto3Manager._get_boto3_session("us-east-1" role_arn="abc123")<assert_stmt>isinstance(returned_session Session)<assert_stmt>returned_session.region_name<eq>"us-east-1"<line_sep># Bury into the internals to ensure our provider was registered correctly our_provider=returned_session._session._components.get_component("credential_provider").providers[0]<assert_stmt>isinstance(our_provider Boto3Manager.AssumeRoleProvider)<block_end><def_stmt>test_assumed_sagemaker_client self<block_start>Boto3Manager._get_boto3_session=MagicMock()<line_sep>mock_sm_client=MagicMock()<line_sep># Mock the client("SageMaker", ...) return value Boto3Manager._get_boto3_session.return_value.client.return_value=(mock_sm_client)<line_sep>client=Boto3Manager.get_sagemaker_client("v1.0.0" "us-east-1" assume_role_arn="abc123")<assert_stmt>client<eq>mock_sm_client<line_sep>Boto3Manager._get_boto3_session.assert_called_once_with("us-east-1" "abc123")<line_sep>Boto3Manager._get_boto3_session.return_value.client.assert_called_once_with("sagemaker" endpoint_url=<none> config=ANY region_name="us-east-1")<block_end><block_end>
""" Implements various synthetic functions on NN architectures. -- <EMAIL> """<line_sep># pylint: disable=invalid-name <import_stmt>numpy<as>np<def_stmt>_get_vals_wo_None iter_of_vals<block_start>""" Returns a list of values without Nones. """<line_sep><return>[x<for>x iter_of_vals<if>x<is><not><none>]<block_end><def_stmt>_num_units_signal num_units_vals bias_val decay<block_start>""" Signal on the number of units. """<line_sep>num_units_vals=np.array(_get_vals_wo_None(num_units_vals))<line_sep><return>np.exp(-decay<times>abs(num_units_vals.mean()-bias_val))<block_end><def_stmt>_degree_signal in_degrees out_degrees bias_val decay<block_start>""" Signal on the degrees. """<line_sep>avg_degree=(in_degrees.mean()+out_degrees.mean())/2.0<line_sep><return>np.exp(-decay<times>abs(avg_degree-bias_val))<block_end><def_stmt>_get_ip_op_distance_signal ip_op_dist bias_val decay<block_start>""" Signal on distance from input to output. """<line_sep><return>np.exp(-decay<times>abs(ip_op_dist-bias_val))<block_end><def_stmt>_get_layer_degree_signal degree_of_layer bias_val decay<block_start>""" A signal based on the degree of a layer. """<line_sep><return>np.exp(-decay<times>abs(degree_of_layer-bias_val))<block_end><def_stmt>_get_num_layers_signal num_layers bias_val decay<block_start>""" A signal based on the number of layers. """<line_sep><return>np.exp(-decay<times>abs(num_layers-bias_val))<block_end><def_stmt>_get_num_edges_signal num_edges bias_val decay<block_start>""" A signal based on the total number of edges. """<line_sep><return>np.exp(-decay<times>abs(num_edges-bias_val))<block_end><def_stmt>_get_stride_signal strides bias_val decay<block_start>""" A signal using the strides. """<line_sep>strides=np.array(_get_vals_wo_None(strides))<line_sep><return>np.exp(-decay<times>abs(strides.mean()-bias_val))<block_end><def_stmt>_get_conv_signal layer_labels<block_start>""" A signal using the convolutional layers. """<line_sep>conv_layers=[ll<for>ll layer_labels<if>ll.startswith('conv')<or>ll.startswith('res')]<line_sep>conv_filter_vals=np.array([float(ll[-1])<for>ll conv_layers])<line_sep><return>(conv_filter_vals<eq>3).sum()/float(len(conv_filter_vals)+1)<block_end><def_stmt>_get_sigmoid_signal layer_labels<block_start>""" A function using the sigmoid layer fraction as the signal. """<line_sep>internal_layers=[ll<for>ll layer_labels<if>ll<not><in>['ip' 'op' 'linear']]<line_sep>good_layers=[ll<in>['logistic' 'relu']<for>ll internal_layers]<line_sep><return>sum(good_layers)/float(len(internal_layers)+1)<block_end><def_stmt>syn_func1_common nn<block_start>""" A synthetic function on NN architectures. """<line_sep><return>_num_units_signal(nn.num_units_in_each_layer 1000 0.002)+_degree_signal(nn.get_in_degrees() nn.get_out_degrees() 5 0.4)+_get_ip_op_distance_signal(nn.get_distances_from_ip()[nn.get_op_layer_idx()] 10 0.2)+_get_layer_degree_signal(nn.get_in_degrees()[nn.get_op_layer_idx()] 3 0.5)+_get_layer_degree_signal(nn.get_out_degrees()[nn.get_ip_layer_idx()] 4 0.5)+_get_num_layers_signal(nn.num_layers 30 0.1)+_get_num_edges_signal(nn.conn_mat.sum() 100 0.05)<block_end><def_stmt>cnn_syn_func1 nn<block_start>""" A synthetic function for CNNs. """<line_sep><return>syn_func1_common(nn)+_num_units_signal(nn.num_units_in_each_layer 500 0.001)+_get_num_layers_signal(nn.num_layers 50 0.3)+_get_stride_signal(nn.strides 1.5 3.0)+_get_conv_signal(nn.layer_labels)<block_end><def_stmt>mlp_syn_func1 nn<block_start>""" A synthetic function for MLPs. """<line_sep><return>syn_func1_common(nn)+_get_num_edges_signal(nn.conn_mat.sum() 50 0.1)+_num_units_signal(nn.num_units_in_each_layer 2000 0.001)+_get_sigmoid_signal(nn.layer_labels)<block_end>
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. <import_from_stmt>oslo_policy policy<import_from_stmt>trove.common.policies.base PATH_DATASTORE PATH_VERSIONS<line_sep>rules=[policy.DocumentedRuleDefault(name='configuration-parameter:index' check_str='rule:admin_or_owner' description='List all parameters bind to a datastore version.' operations=[{'path':PATH_DATASTORE+'/versions/{version}/parameters' 'method':'GET'}]) policy.DocumentedRuleDefault(name='configuration-parameter:show' check_str='rule:admin_or_owner' description='Get a paramter of a datastore version.' operations=[{'path':(PATH_DATASTORE+'/versions/{version}/parameters/{param}') 'method':'GET'}]) policy.DocumentedRuleDefault(name='configuration-parameter:index_by_version' check_str='rule:admin_or_owner' description='List all paramters bind to a datastore version by '<concat>'the id of the version(datastore is not provided).' operations=[{'path':PATH_VERSIONS+'/{version}/paramters' 'method':'GET'}]) policy.DocumentedRuleDefault(name='configuration-parameter:show_by_version' check_str='rule:admin_or_owner' description='Get a paramter of a datastore version by it names and '<concat>'the id of the version(datastore is not provided).' operations=[{'path':PATH_VERSIONS+'/{version}/paramters/{param}' 'method':'GET'}])]<def_stmt>list_rules <block_start><return>rules<block_end>
# # Copyright (c) 2013-2015,2017 - Adjacent Link LLC, Bridgewater, # New Jersey # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of Adjacent Link LLC nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # <import_from_stmt>. event_pb2<import_from_stmt>..ota otaheader_pb2<import_from_stmt>. EventServiceException<import_stmt>os<import_stmt>socket<import_stmt>threading<import_stmt>fcntl<import_stmt>struct<import_stmt>select<import_stmt>time<import_stmt>uuid<import_stmt>sys<def_stmt>get_ip_address ifname# http://code.activestate.com/recipes/439094/ <block_start>s=socket.socket(socket.AF_INET socket.SOCK_DGRAM)<line_sep><return>socket.inet_ntoa(fcntl.ioctl(s.fileno() 0x8915 # SIOCGIFADDR struct.pack('256s' ifname[:15].encode()<if>sys.version_info<ge>(3 0)<else>ifname[:15]))[20:24])<block_end><def_stmt>init_multicast_socket group port device<block_start><try_stmt><block_start>sock=socket.socket(socket.AF_INET socket.SOCK_DGRAM)<block_end><except_stmt>socket.error<as>msg<block_start><if_stmt>sys.version_info<ge>(3 3)<block_start><raise>EventServiceException("event socket failure %s"%str(msg) <true>)<block_end><else_stmt><block_start><raise>EventServiceException("event socket failure %s %s"%(str(msg[0]) msg[1]) <true>)<block_end><block_end><try_stmt><block_start>sock.setsockopt(socket.IPPROTO_IP socket.IP_MULTICAST_TTL 32)<block_end><except_stmt>socket.error<as>msg<block_start><if_stmt>sys.version_info<ge>(3 3)<block_start><raise>EventServiceException("event socket option failure %s"%str(msg) <true>)<block_end><else_stmt><block_start><raise>EventServiceException("event socket option failure %s %s"%(str(msg[0]) msg[1]) <true>)<block_end><block_end><try_stmt><block_start>sock.setsockopt(socket.IPPROTO_IP socket.IP_MULTICAST_LOOP 1)<block_end><except_stmt>socket.error<as>msg<block_start><if_stmt>sys.version_info<ge>(3 3)<block_start><raise>EventServiceException("event socket option failure %s"%str(msg) <true>)<block_end><else_stmt><block_start><raise>EventServiceException("event socket option failure %s %s"%(str(msg[0]) msg[1]) <true>)<block_end><block_end><try_stmt><block_start>sock.setsockopt(socket.SOL_SOCKET socket.SO_REUSEADDR 1)<block_end><except_stmt>socket.error<as>msg<block_start><if_stmt>sys.version_info<ge>(3 3)<block_start><raise>EventServiceException("event socket option failure %s"%str(msg) <true>)<block_end><else_stmt><block_start><raise>EventServiceException("event socket option failure %s %s"%(str(msg[0]) msg[1]) <true>)<block_end><block_end><try_stmt><block_start>sock.bind((group port))<block_end><except_stmt>socket.error<as>msg<block_start><if_stmt>sys.version_info<ge>(3 3)<block_start><raise>EventServiceException("bind failure %s"%str(msg) <true>)<block_end><else_stmt><block_start><raise>EventServiceException("bind failure %s %s"%(str(msg[0]) msg[1]) <true>)<block_end><block_end><try_stmt><block_start><if_stmt>device<block_start>devAddress=socket.inet_aton(get_ip_address(device))<block_end><else_stmt><block_start>devAddress=socket.inet_aton("0.0.0.0")<block_end>sock.setsockopt(socket.SOL_IP socket.IP_ADD_MEMBERSHIP socket.inet_aton(group)+devAddress)<line_sep>sock.setsockopt(socket.SOL_IP socket.IP_MULTICAST_IF devAddress)<block_end><except_stmt>socket.error<as>msg<block_start><if_stmt>sys.version_info<ge>(3 3)<block_start><raise>EventServiceException("mulicast add membership failure %s"%str(msg) <true>)<block_end><else_stmt><block_start><raise>EventServiceException("mulicast add membership failure %s %s"%(str(msg[0]) msg[1]) <true>)<block_end><block_end><except_stmt>IOError<block_start><raise>EventServiceException("unknown device %s"%device <true>)<block_end><return>sock<block_end><class_stmt>EventService<block_start><def_stmt>__init__ self eventchannel otachannel=<none><block_start>(self._multicastGroup self._port _)=eventchannel<line_sep>self._defaultHandler=<none><line_sep>self._handlers={}<line_sep>self._socket=<none><line_sep>self._readFd,self._writeFd=os.pipe()<line_sep>self._uuid=uuid.uuid4()<line_sep>self._sequenceNumber=0<line_sep>self._socketOTA=<none><line_sep>self._socket=init_multicast_socket(*eventchannel)<if_stmt>otachannel<block_start>self._socketOTA=init_multicast_socket(*otachannel)<block_end>self._lock=threading.Lock()<block_end><def_stmt>breakloop self<block_start>os.write(self._writeFd "\n".encode())<block_end><def_stmt>loop self default=<none><block_start>buffer=""<line_sep>running=<true><while_stmt>running<block_start>rdfds=[self._socket self._readFd]<if_stmt>self._socketOTA<block_start>rdfds.append(self._socketOTA)<block_end><try_stmt><block_start>readable,_,_=select.select(rdfds [] [])<block_end><except_stmt>select.error<block_start><continue><block_end><for_stmt>fd readable<block_start><if_stmt>fd<is>self._socket<block_start>data,_=self._socket.recvfrom(65535)<if_stmt><not>len(data)<block_start>running=<false><line_sep><break><block_end>(length )=struct.unpack_from("!H" data)<if_stmt>length<eq>len(data)-2<block_start>event=event_pb2.Event()<line_sep>event.ParseFromString(data[2:])<for_stmt>serialization event.data.serializations<block_start>self._lock.acquire()<try_stmt><block_start><if_stmt>serialization.eventId<in>self._handlers<block_start>self._handlers[serialization.eventId](serialization.nemId serialization.eventId serialization.data uuid.UUID(bytes=event.uuid) event.sequenceNumber)<block_end><elif_stmt>default<block_start>default(serialization.nemId serialization.eventId serialization.data uuid.UUID(bytes=event.uuid) event.sequenceNumber)<block_end><block_end><finally_stmt><block_start>self._lock.release()<block_end><block_end><block_end><block_end><elif_stmt>fd<is>self._readFd<block_start>running=<false><line_sep><break><block_end><elif_stmt>fd<is>self._socketOTA<block_start>data,_=self._socketOTA.recvfrom(65535)<if_stmt><not>len(data)<block_start>running=<false><line_sep><break><block_end>(headerLength )=struct.unpack_from("!H" data)<line_sep>otaHeader=otaheader_pb2.OTAHeader()<line_sep>otaHeader.ParseFromString(data[2:headerLength+2])<line_sep>eventData=event_pb2.Event.Data()<line_sep># currently we only process attached events that # are fully contained in the first part (fragment) # of a one-part (no fragmentation) or multi-part # (fragmented) OTA message # # Notes for below logic: # 2 + headerLength = 2 byte header length field # + header length # # 9 = OTA PartInfo header length. Where PartInfo # is used to support fragmentation. <if_stmt>otaHeader.HasField("payloadInfo")<and>len(data)<ge>2+headerLength+9+otaHeader.payloadInfo.eventLength<block_start>eventData.ParseFromString(data[2+headerLength+9:2+headerLength+9+otaHeader.payloadInfo.eventLength])<for_stmt>serialization eventData.serializations<block_start>self._lock.acquire()<try_stmt><block_start><if_stmt>serialization.eventId<in>self._handlers<block_start>self._handlers[serialization.eventId](serialization.nemId serialization.eventId serialization.data uuid.UUID(bytes=otaHeader.uuid) otaHeader.sequence)<block_end><elif_stmt>default<block_start>default(serialization.nemId serialization.eventId serialization.data uuid.UUID(bytes=otaHeader.uuid) otaHeader.sequence)<block_end><block_end><finally_stmt><block_start>self._lock.release()<block_end><block_end><block_end><block_end><block_end><block_end><block_end><def_stmt>nextEvent self<block_start>events=[]<line_sep>eventId=0<line_sep>running=<true><while_stmt>running<block_start><try_stmt><block_start>rdfds=[self._socket self._readFd]<if_stmt>self._socketOTA<block_start>rdfds.append(self._socketOTA)<block_end>readable,_,_=select.select(rdfds [] [])<block_end><except_stmt>select.error<block_start><continue><block_end><for_stmt>fd readable<block_start><if_stmt>fd<is>self._socket<block_start>data,_=self._socket.recvfrom(65535)<if_stmt><not>len(data)<block_start>running=<false><line_sep><break><block_end>(length )=struct.unpack_from("!H" data)<if_stmt>length<eq>len(data)-2<block_start>event=event_pb2.Event()<line_sep>event.ParseFromString(data[2:])<for_stmt>serialization event.data.serializations<block_start>events.append((serialization.nemId serialization.eventId serialization.data))<block_end><return>(uuid.UUID(bytes=event.uuid) event.sequenceNumber tuple(events))<block_end><block_end><elif_stmt>fd<is>self._readFd<block_start>running=<false><line_sep><break><block_end><elif_stmt>fd<is>self._socketOTA<block_start>data,_=self._socketOTA.recvfrom(65535)<if_stmt><not>len(data)<block_start>running=<false><line_sep><break><block_end>(headerLength )=struct.unpack_from("!H" data)<line_sep>otaHeader=otaheader_pb2.OTAHeader()<line_sep>otaHeader.ParseFromString(data[2:headerLength+2])<line_sep>eventData=event_pb2.Event.Data()<line_sep>eventData.ParseFromString(data[2+headerLength:2+headerLength+otaHeader.eventLength])<for_stmt>serialization eventData.serializations<block_start>events.append((serialization.nemId serialization.eventId serialization.data))<block_end><return>(uuid.UUID(bytes=otaHeader.uuid) otaHeader.sequenceNumber tuple(events))<block_end><block_end><block_end><return>(<none> <none> tuple(events))<block_end><def_stmt>subscribe self eventId callback<block_start>self._lock.acquire()<if_stmt>callback<block_start>self._handlers[eventId]=callback<block_end>self._lock.release()<block_end><def_stmt>unsubscribe self eventId<block_start>self._lock.acquire()<if_stmt>eventId<in>self._handlers<block_start><del_stmt>self._handlers[eventId]<block_end>self._lock.release()<block_end><def_stmt>publish self nemId event<block_start>self._sequenceNumber<augadd>1<line_sep>msg=event_pb2.Event()<line_sep>msg.uuid=self._uuid.bytes<line_sep>msg.sequenceNumber=self._sequenceNumber<line_sep>serialization=msg.data.serializations.add()<line_sep>serialization.nemId=nemId<line_sep>serialization.eventId=event.IDENTIFIER<line_sep>serialization.data=event.serialize()<line_sep>buf=msg.SerializeToString()<line_sep>self._socket.sendto(struct.pack("!H" len(buf))+buf (self._multicastGroup self._port))<block_end><block_end>
<import_stmt>argparse<import_stmt>itertools<import_stmt>matplotlib<import_stmt>matplotlib.pyplot<as>plt<import_stmt>matplotlib.patches<as>patches<import_stmt>matplotlib.colors<as>colors<import_stmt>numpy<as>np<import_stmt>os<import_stmt>data<import_from_stmt>PIL Image<line_sep>parser=argparse.ArgumentParser()<line_sep>parser.add_argument("n" type=int nargs="*")<line_sep>parser.add_argument("--keep" type=int nargs="*")<line_sep>args=parser.parse_args()<line_sep>matplotlib.rc("text" usetex=<true>)<line_sep>params={"text.latex.preamble":[r"\usepackage{bm,amsmath,mathtools,amssymb}"]}<line_sep>plt.rcParams.update(params)<line_sep>base_path="clevr/images/val"<line_sep>val_images=sorted(os.listdir(base_path))<def_stmt>take iterable n<block_start>l=[]<for_stmt>_ range(n)<block_start>l.append(next(iterable))<block_end><return>l<block_end><def_stmt>load_file path<block_start><with_stmt>open(path)<as>fd<block_start><for_stmt>f fd<block_start>tokens=iter(f.strip().split(" "))<line_sep>take(tokens 1)<if_stmt>"detect"<in>path<block_start>score=float(take(tokens 1)[0])<if_stmt>score<l>0.5<block_start><continue><block_end><block_end><else_stmt><block_start>score=1.0<block_end>coord=take(tokens 3)<line_sep>material=np.argmax(take(tokens 2))<line_sep>color=np.argmax(take(tokens 8))<line_sep>shape=np.argmax(take(tokens 3))<line_sep>size=np.argmax(take(tokens 2))<line_sep>access=<lambda>x i:data.CLASSES[x][i]<line_sep><yield>("({:.2f}, {:.2f}, {:.2f})".format(*map(<lambda>x:3<times>float(x) coord)) access("size" size) access("color" color) access("material" material) access("shape" shape) )<block_end><block_end><block_end>indices_to_use=args.keep<line_sep>indices_to_use.append(-2)<line_sep>indices_to_use.append(-1)<line_sep>plt.figure(figsize=(12 4))<for_stmt>j,index enumerate(args.n)<block_start>progress=[]<line_sep>path="out/clevr-state/{}-clevr-state-1-{}/{}/{}.txt"<for_stmt>i range(31)<block_start>points=list(load_file(path.format("dspn" "30" "detections" f"{index}-step{i}")))<line_sep>progress.append(points)<block_end>progress.append(list(load_file(path.format("base" "10" "groundtruths" index))))<line_sep>progress.append(list(load_file(path.format("base" "10" "detections" index))))<line_sep>img=Image.open(os.path.join(base_path val_images[int(index)]))<line_sep>img=img.resize((128 128) Image.LANCZOS)<line_sep>plt.imshow(img)<line_sep>plt.xticks([])<line_sep>plt.yticks([])<line_sep>plt.savefig(f"img-{j}.pdf" bbox_inches="tight")<line_sep>matrix=[]<for_stmt>i,progress_n enumerate(indices_to_use)<block_start>column=[]<line_sep>step=progress[progress_n]<if_stmt>progress_n<eq>-2<block_start>header=r"True $\bm{Y}$"<block_end><elif_stmt>progress_n<eq>-1<block_start>header=r"Baseline"<block_end><else_stmt><block_start>header=r"$\hat{\bm{Y}}^{("+str(progress_n)+")}$"<block_end>column.append(header)<for_stmt>object sorted(step key=<lambda>x:[float(x.strip())<for>x x[0][1:-1].split(",")])<block_start>column.append(object[0])<line_sep>column.append(" ".join(object[1:]))<block_end>matrix.append(column)<block_end># transpose matrix=itertools.zip_longest(*matrix fillvalue="")<line_sep># mark mismatched entries contains_words=<lambda>row:"small"<in>row[-2]<or>"large"<in>row[-2]<line_sep># make an attribute red if it isn't correct matrix=[[" ".join((r"\textcolor{red}{"+attribute+"}"<if>attribute<ne>correct_attribute<else>attribute)<for>attribute,correct_attribute zip(state.split(" ") row[-2].split(" ")))<for>state row]<if>contains_words(row)<else>row<for>row matrix]<line_sep>matrix=[" & ".join(row)<for>row matrix]<line_sep># format into table template=r""" \includegraphics[width=0.22\linewidth]{{img-{}}} \begin{{tabular}}{} \toprule {}\\ \midrule {}\\ \bottomrule \end{{tabular}} """<line_sep>table=template.format(j "{"+"c"<times>len(indices_to_use)+"}" matrix[0] "\\\\\n".join(matrix[1:]))<line_sep>print(table)<block_end>
<import_stmt>unittest<import_stmt>os<import_stmt>numpy<as>np<import_from_stmt>numpy.testing assert_array_equal<import_from_stmt>sklearn.cluster KMeans<import_from_stmt>sklearn.metrics.pairwise cosine_similarity<import_from_stmt>libact.base.dataset Dataset import_libsvm_sparse<import_from_stmt>libact.models LogisticRegression<import_from_stmt>libact.query_strategies UncertaintySampling<import_from_stmt>libact.labelers IdealLabeler<import_from_stmt>..density_weighted_meta DensityWeightedMeta<import_from_stmt>.utils run_qs<class_stmt>DensityWeightedMetaTestCase(unittest.TestCase)<block_start><def_stmt>setUp self<block_start>dataset_filepath=os.path.join(os.path.dirname(os.path.realpath(__file__)) 'datasets/heart_scale')<line_sep>self.X,self.y=import_libsvm_sparse(dataset_filepath).format_sklearn()<line_sep>self.quota=10<block_end><def_stmt>test_density_weighted_meta_uncertainty_lc self<block_start>trn_ds=Dataset(self.X[:20] np.concatenate([self.y[:6] [<none>]<times>14]))<line_sep>base_qs=UncertaintySampling(trn_ds method='lc' model=LogisticRegression(solver='liblinear' multi_class="ovr"))<line_sep>similarity_metric=cosine_similarity<line_sep>clustering_method=KMeans(n_clusters=3 random_state=1126)<line_sep>qs=DensityWeightedMeta(dataset=trn_ds base_query_strategy=base_qs similarity_metric=similarity_metric clustering_method=clustering_method beta=1.0 random_state=1126)<line_sep>model=LogisticRegression(solver='liblinear' multi_class="ovr")<line_sep>qseq=run_qs(trn_ds qs self.y self.quota)<line_sep>assert_array_equal(qseq np.array([13 18 9 12 8 16 10 19 15 17]))<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
<import_from_stmt>asks BasicAuth<import_from_stmt>unittest TestCase<import_stmt>pytest<import_from_stmt>asks.response_objects Response<import_from_stmt>many_requests.many_requests_ ManyRequests<import_from_stmt>many_requests.common BadResponse<import_from_stmt>.mock_server web_server<line_sep>@pytest.mark.usefixtures("web_server")<class_stmt>TestManyRequestAuth(TestCase)<block_start><def_stmt>test_basic_auth self<block_start>auths=[BasicAuth(auth_info=(("username" "password"))) # ok BasicAuth(auth_info=(("username" "bad_password")))# bad ]<line_sep>url='http://0.0.0.0:8080/basic_auth'<line_sep>responses=ManyRequests(10 2 retries=2 retry_sleep=0)(method='GET' url=url auth=auths)<assert_stmt>len(responses)<eq>2<line_sep>ok_res=responses[0]<assert_stmt>isinstance(ok_res Response)<assert_stmt>ok_res.url<eq>url<line_sep>bad_res=responses[1]<assert_stmt>isinstance(bad_res BadResponse)<assert_stmt>bad_res.response.status_code<eq>401<assert_stmt>bad_res.response.url<eq>url<block_end><block_end>
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ TVMC PassContext Interface """<import_stmt>importlib<import_stmt>tvm<import_from_stmt>tvm.driver.tvmc TVMCException<def_stmt>load_function full_name<block_start>"""Dynamic loading a function by the full name. Parameters ---------- full_name: str The name of a PackedFunc or a string of the form "path.to.module.func" that indicates the module that can be imported. You must be aware of the load order here, it first tries to find it via TVM global function, if not find, try to import it by "importlib.import_module". Returns ------- func: function or PackedFunc The loaded fucntion. """<line_sep>global_func=tvm.get_global_func(full_name allow_missing=<true>)<if_stmt>global_func<is><not><none><block_start><return>global_func<block_end># split full name "path.to.module.func" into two parts ["path.to.module", "func"] module_name,func_name=full_name.rsplit("." 1)<line_sep># import module and find the function module=importlib.import_module(module_name)<if_stmt>hasattr(module func_name)<block_start><return>getattr(module func_name)<block_end><raise>TVMCException(f"No function '{func_name}' found in module '{module_name}'.")<block_end><def_stmt>get_pass_config_value name value config_type<block_start>"""Get a PassContext configuration value, based on its config data type. Parameters ---------- name: str config identifier name. value: str value assigned to the config, provided via command line. config_type: str data type defined to the config, as string. Returns ------- parsed_value: bool, int or str a representation of the input value, converted to the type specified by config_type. """<line_sep>parsed_value=<none><if_stmt>config_type<eq>"IntImm"# "Bool" configurations in the PassContext are recognized as # IntImm, so deal with this case here <block_start>mapping_values={"false":<false> "true":<true> }<if_stmt>value.isdigit()<block_start>parsed_value=int(value)<block_end><else_stmt># if not an int, accept only values on the mapping table, case insensitive <block_start>parsed_value=mapping_values.get(value.lower() <none>)<block_end><if_stmt>parsed_value<is><none><block_start><raise>TVMCException(f"Invalid value '{value}' for configuration '{name}'.")<block_end><block_end><elif_stmt>config_type<eq>"runtime.String"<block_start>parsed_value=value<block_end><elif_stmt>config_type<eq>"Array"<block_start><if_stmt>name<eq>"tir.add_lower_pass"<block_start>pass_list=value.split(",")<if_stmt>len(pass_list)%2<ne>0<block_start><raise>TVMCException(f"The configuration of '{name}' must be of the form "<concat>"'tir.add_lower_pass=opt_level1,pass1,opt_evel2,pass2'")<block_end>parsed_value=[]<for_stmt>i range(0 len(pass_list) 2)<block_start>level,pass_func=pass_list[i].strip() pass_list[i+1].strip()<try_stmt><block_start>level=int(level)<block_end><except_stmt>ValueError<block_start><raise>TVMCException(f"Only integer is allow for configuration '{name}'.")<block_end># TODO (@leeexyz) We should parse configurations of each tir Pass. # For now, we only use the defaults. Currently, There are four config nodes: # `tir.transform.LoopPartitionConfig` # `tir.transform.UnrollLoopConfig` # `tir.transform.HoistIfThenElseConfig` # `tir.transform.InjectDoubleBufferConfig` # loading pass func and calling it to get the Pass pass_func=load_function(pass_func)()<line_sep>parsed_value.append((level pass_func))<block_end><block_end><else_stmt><block_start><raise>TVMCException(f"Unsupported configuration '{name}' for '{config_type}' type.")<block_end><block_end><else_stmt># not raise here cause we alreay checked before calling this function <block_start><pass><block_end><return>parsed_value<block_end><def_stmt>parse_configs input_configs<block_start>"""Parse configuration values set via command line. Parameters ---------- input_configs: list of str list of configurations provided via command line. Returns ------- pass_context_configs: dict a dict containing key-value configs to be used in the PassContext. """<if_stmt><not>input_configs<block_start><return>{}<block_end>all_configs=tvm.ir.transform.PassContext.list_configs()<line_sep>supported_config_types=("IntImm" "runtime.String" "Array")<line_sep>supported_configs=[name<for>name all_configs.keys()<if>all_configs[name]["type"]<in>supported_config_types]<line_sep>pass_context_configs={}<for_stmt>config input_configs<block_start><if_stmt><not>config<block_start><raise>TVMCException(f"Invalid format for configuration '{config}', use <config>=<value>")<block_end># Each config is expected to be provided as "name=value" <try_stmt><block_start>name,value=config.split("=")<line_sep>name=name.strip()<line_sep>value=value.strip()<block_end><except_stmt>ValueError<block_start><raise>TVMCException(f"Invalid format for configuration '{config}', use <config>=<value>")<block_end><if_stmt>name<not><in>all_configs<block_start><raise>TVMCException(f"Configuration '{name}' is not defined in TVM. "<concat>f"These are the existing configurations: {', '.join(all_configs)}")<block_end><if_stmt>name<not><in>supported_configs<block_start><raise>TVMCException(f"Configuration '{name}' uses a data type not supported by TVMC. "<concat>f"The following configurations are supported: {', '.join(supported_configs)}")<block_end>config_type=all_configs[name]["type"]<line_sep>parsed_value=get_pass_config_value(name value config_type)<if_stmt>config_type<eq>"Array"<and>name<in>pass_context_configs# merge configs if the configuration exists <block_start>pass_context_configs[name].extend(parsed_value)<block_end><else_stmt><block_start>pass_context_configs[name]=parsed_value<block_end><block_end><return>pass_context_configs<block_end>
<import_from_stmt>pygears.typing Queue typeof<def_stmt>din_data_cat intfs order=<none><block_start><if_stmt>order<is><none><block_start>order=range(len(intfs))<block_end>data=[]<for_stmt>o order<block_start>intf=intfs[o]<if_stmt>intf['modport']<eq>'producer'<block_start><continue><block_end><if_stmt>issubclass(intf['type'] Queue)<block_start><if_stmt>intf['type'][0].width<g>0<block_start>data.append(f'{intf["name"]}_s.data')<block_end><block_end><else_stmt><block_start><if_stmt>intf['type'].width<g>0<block_start>data.append(f'{intf["name"]}_s')<block_end><block_end><block_end><return>f'{{ {", ".join(reversed(data))} }}'<block_end><def_stmt>din_data_cat_value data<block_start>dout=[]<for_stmt>d data<block_start><if_stmt>isinstance(d Queue)<block_start>dout.append(d.data)<block_end><else_stmt><block_start>dout.append(d)<block_end><block_end><return>tuple(dout)<block_end>
<import_stmt>json os sys<line_sep>os.chdir(r'C:\Users\xtrem\Desktop\Electric\Electric Packages\packages')<for_stmt>f os.listdir()<block_start>data=''<try_stmt><block_start><with_stmt>open(f 'r')<as>file<block_start>data=json.load(file)<block_end><block_end><except_stmt><block_start><continue><block_end>linted={'display-name':data['display-name'] 'package-name':data['package-name'] }<line_sep># Change Based On Version # Not Portable <if_stmt>'portable'<in>list(data.keys())<block_start>linted['portable']={'latest-version':data['portable']['latest-version']}<for_stmt>version list(data['portable'].keys())<block_start><if_stmt>version<not><in>['latest-version' 'auto-update' 'package-name' 'display-name']<block_start>linted['portable'][version]={'url':data['portable'][version]['url']}<if_stmt>'checksum'<in>list(data['portable'][version].keys())<block_start>linted['portable'][version]['checksum']=data['portable'][version]['checksum']<block_end><if_stmt>'file-type'<in>list(data['portable'][version].keys())<block_start>linted['portable'][version]['file-type']=data['portable'][version]['file-type']<block_end><if_stmt>'pre-install'<in>list(data['portable'][version].keys())<block_start>linted['portable'][version]['pre-install']=data['portable'][version]['pre-install']<block_end><if_stmt>'post-install'<in>list(data['portable'][version].keys())<block_start>linted['portable'][version]['post-install']=data['portable'][version]['post-install']<block_end><for_stmt>key list(data['portable'][version].keys())<block_start><if_stmt>key<not><in>list(linted['portable'][version].keys())<block_start>linted['portable'][version][key]=data['portable'][version][key]<block_end><block_end>print(json.dumps(linted indent=4))<line_sep># TODO: Handle Portable Section data['portable'] <block_end><block_end><block_end><block_end>
<import_stmt>tensorflow<as>tf<import_from_stmt>onnx_tf.common exception<import_from_stmt>onnx_tf.common data_type<import_from_stmt>onnx_tf.common sys_config<import_from_stmt>onnx_tf.handlers.backend_handler BackendHandler<import_from_stmt>onnx_tf.handlers.handler onnx_op<line_sep>@onnx_op("Max")<class_stmt>Max(BackendHandler)<block_start>supported_types=[tf.bfloat16 tf.float16 tf.float32 tf.float64 tf.int32 tf.int64]<line_sep>cast_map={tf.uint8:tf.int32 tf.uint16:tf.int32 tf.uint32:tf.int64 tf.int8:tf.int32 tf.int16:tf.int32}<line_sep>cast_map[tf.uint64]=tf.int64<if>sys_config.auto_cast<else><none><line_sep>@classmethod<def_stmt>args_check cls node **kwargs<block_start>dtype=kwargs["tensor_dict"][node.inputs[0]].dtype<if_stmt>dtype<in>cls.cast_map<and>cls.cast_map[dtype]<is><none><block_start>exception.DTYPE_NOT_CAST_EXCEPT("Max input "+node.inputs[0]+" with data type '"+data_type.tf_to_np_str(dtype)+"'" data_type.tf_to_np_str_list(cls.supported_types))<block_end><block_end>@classmethod<def_stmt>_common cls node **kwargs<block_start>values=[kwargs["tensor_dict"][inp]<for>inp node.inputs]<line_sep>dtype=values[0].dtype<if_stmt>dtype<in>cls.cast_map<block_start>values=[tf.cast(v cls.cast_map[v.dtype])<for>v values]<block_end>result=values[0]<for_stmt>i range(1 len(values))<block_start>result=tf.maximum(result values[i])<block_end><return>[tf.cast(result dtype)<if>dtype<in>cls.cast_map<else>result]<block_end>@classmethod<def_stmt>version_1 cls node **kwargs<block_start><return>cls._common(node **kwargs)<block_end>@classmethod<def_stmt>version_6 cls node **kwargs<block_start><return>cls._common(node **kwargs)<block_end>@classmethod<def_stmt>version_8 cls node **kwargs<block_start><return>cls._common(node **kwargs)<block_end>@classmethod<def_stmt>version_12 cls node **kwargs<block_start><return>cls._common(node **kwargs)<block_end>@classmethod<def_stmt>version_13 cls node **kwargs<block_start><return>cls._common(node **kwargs)<block_end><block_end>
<import_from_stmt>typing List<import_stmt>editdistance<as>ed<class_stmt>BKTree<block_start>"""Burkhard Keller tree: used to find strings within tolerance (w.r.t. edit distance metric) to given query string."""<def_stmt>__init__ self txt_list:List[str]<arrow><none><block_start>"""Pass list of texts (words) which are inserted into the tree."""<line_sep>self.root=<none><for_stmt>txt txt_list<block_start>self._insert(self.root txt)<block_end><block_end><def_stmt>query self txt:str tolerance:int<arrow>List[str]<block_start>"""Query strings within given tolerance (w.r.t. edit distance metric)."""<line_sep><return>self._query(self.root txt tolerance)<block_end><def_stmt>_insert self node txt# insert root node <block_start><if_stmt>node<is><none><block_start>self.root=(txt {})<line_sep><return><block_end># insert all other nodes d=ed.eval(node[0] txt)<if_stmt>d<in>node[1]<block_start>self._insert(node[1][d] txt)<block_end><else_stmt><block_start>node[1][d]=(txt {})<block_end><block_end><def_stmt>_query self node txt tolerance# handle empty root node <block_start><if_stmt>node<is><none><block_start><return>[]<block_end># distance between query and current node d=ed.eval(node[0] txt)<line_sep># add current node to result if within tolerance res=[]<if_stmt>d<le>tolerance<block_start>res.append(node[0])<block_end># iterate over children <for_stmt>(edge child) node[1].items()<block_start><if_stmt>d-tolerance<le>edge<le>d+tolerance<block_start>res<augadd>self._query(child txt tolerance)<block_end><block_end><return>res<block_end><block_end>
""" Additional theano/keras functions. Author: <NAME> Email: <EMAIL> """<line_sep>#import marshal <import_stmt>numpy<line_sep>#import types <import_from_stmt>keras.layers.convolutional Convolution1D<import_from_stmt>keras.layers.convolutional MaxPooling1D<import_from_stmt>keras.layers.core Lambda<import_from_stmt>keras.layers.core MaskedLayer<import_from_stmt>keras.layers.core TimeDistributedMerge<import_from_stmt>keras backend<as>K<line_sep>## functions ## <def_stmt>time_distributed_nonzero_max_pooling x<block_start>""" Computes maximum along the first (time) dimension. It ignores the mask m. In: x - input; a 3D tensor mask_value - value to mask out, if None then no masking; by default 0.0, """<import_stmt>theano.tensor<as>T<line_sep>mask_value=0.0<line_sep>x=T.switch(T.eq(x mask_value) -numpy.inf x)<line_sep>masked_max_x=x.max(axis=1)<line_sep># replace infinities with mask_value masked_max_x=T.switch(T.eq(masked_max_x -numpy.inf) 0 masked_max_x)<line_sep><return>masked_max_x<block_end><def_stmt>time_distributed_masked_ave x m<block_start>""" Computes average along the first (time) dimension. In: x - input; a 3D tensor m - mask """<line_sep>tmp=K.sum(x axis=1)<line_sep>nonzeros=K.sum(m axis=-1)<line_sep><return>tmp/K.expand_dims(K.cast(nonzeros tmp.dtype))<block_end><def_stmt>time_distributed_masked_max x m<block_start>""" Computes max along the first (time) dimension. In: x - input; a 3D tensor m - mask m_value - value for masking """<line_sep># place infinities where mask is off m_value=0.0<line_sep>tmp=K.switch(K.equal(m 0.0) -numpy.inf 0.0)<line_sep>x_with_inf=x+K.expand_dims(tmp)<line_sep>x_max=K.max(x_with_inf axis=1)<line_sep>r=K.switch(K.equal(x_max -numpy.inf) m_value x_max)<line_sep><return>r<block_end>## classes ## # Transforms existing layers to masked layers <class_stmt>MaskedTimeDistributedMerge(MaskedLayer TimeDistributedMerge)<block_start><pass><block_end><class_stmt>MaskedConvolution1D(MaskedLayer Convolution1D)<block_start><pass><block_end><class_stmt>MaskedMaxPooling1D(MaskedLayer MaxPooling1D)<block_start><pass><block_end># auxiliary mask-aware layers <class_stmt>DropMask(MaskedLayer)<block_start>""" Removes a mask from the layer. """<def_stmt>get_output_mask self train=<false><block_start><return><none><block_end><block_end><class_stmt>LambdaWithMask(MaskedLayer Lambda)<block_start>""" Lambda function that takes a two argument function, and returns a value returned by the function applied to the output of the previous layer and the mask. That is: LambdaWithMask(f) = f(previous, mask) """<def_stmt>get_output self train=<false>#func = marshal.loads(self.function) #func = types.FunctionType(func, globals()) <block_start>func=self.function<if_stmt>hasattr(self 'previous')<block_start><return>func(self.previous.get_output(train) self.previous.get_output_mask(train))<block_end><else_stmt><block_start><return>func(self.input self.get_output_mask(train))<block_end><block_end><block_end>
# -*- encoding:utf-8 -*- """Autogenerated file, do not edit. Submit translations on Transifex."""<line_sep>MESSAGES={"%d min remaining to read":"%dminutas de lectura remanente" "(active)":"(active)" "Also available in:":"Anque disponibile in:" "Archive":"Archivo" "Atom feed":"Fluxo Atom" "Authors":"Authores" "Categories":"Categorias" "Comments":"Commentos" "LANGUAGE":"Interlingua" "Languages:":"Linguas:" "More posts about %s":"Plure entratas super %s" "Newer posts":"Entratas plus recente" "Next post":"Entrata successive" "Next":"Successive" "No posts found.":"Nulle entrata esseva trovate." "Nothing found.":"Nihil esseva trovate." "Older posts":"Entratas plus vetule" "Original site":"Sito original" "Posted:":"Publicate:" "Posts about %s":"Entratas super %s" "Posts by %s":"Entratas per %s" "Posts for year %s":"Entratas del anno %s" "Posts for {month_day_year}":"Entratas de {month_day_year}" "Posts for {month_year}":"Entratas de {month_year}" "Previous post":"Entrata precedente" "Previous":"Precendente" "Publication date":"Data de publication" "RSS feed":"Fluxo RSS" "Read in English":"Lege in interlingua" "Read more":"Lege plus" "Skip to main content":"Salta al contento principal" "Source":"Sorgente" "Subcategories:":"Subcategorias:" "Tags and Categories":"Etiquettas e categorias" "Tags":"Etiquettas" "Toggle navigation":"Commuta navigation" "Uncategorized":"Sin categoria" "Up":"In alto" "Updates":"Actualisationes" "Write your page here.":"Scribe tu pagina hic." "Write your post here.":"Scribe tu entrata hic." "old posts, page %d":"Vetule entratas, pagina %d" "page %d":"pagina %d" "updated":"actualisate" }<line_sep>
<import_from_stmt>cnsenti.emotion Emotion<import_from_stmt>cnsenti.sentiment Sentiment<line_sep>
<import_from_stmt>spaceNet evalTools<as>eT<import_from_stmt>spaceNet geoTools<as>gT<import_stmt>numpy<as>np<import_stmt>sys<import_stmt>multiprocessing<import_stmt>time<if_stmt>__name__<eq>"__main__"# load Truth and Test File Locations <block_start><if_stmt>len(sys.argv)<g>1<block_start>truth_fp=sys.argv[1]<line_sep>test_fp=sys.argv[2]<block_end><else_stmt><block_start>test_fp='/data/building_extraction/SpaceNet/data/predict_pixelGeoJson/3band_013022223132_Public_img2052_predict.geojson'<line_sep>truth_fp='/data/building_extraction/SpaceNet/data/predict_pixelGeoJson/3band_013022223132_Public_img2052_predict.geojson'<block_end># check for cores available <if_stmt>len(sys.argv)<g>3<block_start>max_cpu=int(sys.argv[3])<block_end><else_stmt><block_start>max_cpu=multiprocessing.cpu_count()<block_end>parallel=<false><line_sep># initialize scene counts true_pos_counts=[]<line_sep>false_pos_counts=[]<line_sep>false_neg_counts=[]<line_sep>t0=time.time()<line_sep># Start Ingest Of Truth and Test Case sol_polys=gT.importgeojson(truth_fp removeNoBuildings=<true>)<line_sep>prop_polys=gT.importgeojson(test_fp)<line_sep>#print('sol_polys{}'.format(sol_polys)) #print('prop_polys{}'.format(prop_polys)) t1=time.time()<line_sep>total=t1-t0<line_sep>print('time of ingest: ' total)<line_sep># Speed up search by preprocessing ImageId and polygonIds test_image_ids=set([item['ImageId']<for>item prop_polys<if>item['ImageId']<g>0])<line_sep>prop_polysIdList=np.asarray([item['ImageId']<for>item prop_polys<if>item["ImageId"]<g>0<and>item['BuildingId']<ne>-1])<line_sep>#print('prop_polysIdLIST{}'.format(prop_polysIdList)) prop_polysPoly=np.asarray([item['poly']<for>item prop_polys<if>item["ImageId"]<g>0<and>item['BuildingId']<ne>-1])<line_sep>#print('prop_polyspoly{}'.format(prop_polysPoly)) sol_polysIdsList=np.asarray([item['ImageId']<for>item sol_polys<if>item["ImageId"]<g>0<and>item['BuildingId']<ne>-1])<line_sep>sol_polysPoly=np.asarray([item['poly']<for>item sol_polys<if>item["ImageId"]<g>0<and>item['BuildingId']<ne>-1])<line_sep>bad_count=0<line_sep>F1ScoreList=[]<line_sep>cpu_count=min(multiprocessing.cpu_count() max_cpu)<line_sep>#print('{}'.format(max_cpu)) p=multiprocessing.Pool(processes=cpu_count)<line_sep>ResultList=[]<line_sep>eval_function_input_list=eT.create_eval_function_input((test_image_ids (prop_polysIdList prop_polysPoly) (sol_polysIdsList sol_polysPoly)))<line_sep># evalFunctionInput = creatEevalFunctionInput((test_image_ids, # (prop_polysIdList, prop_polysPoly), # (sol_polysIdsList, sol_polysPoly))) # Calculate Values t3=time.time()<line_sep>print('time For DataCreation {}s'.format(t3-t1))<line_sep>#result_list = p.map(eT.evalfunction, eval_function_input_list) <if_stmt>parallel<eq><false><block_start>result_list=[]<for_stmt>eval_input eval_function_input_list<block_start>print('eval_input={}'.format(eval_input))<line_sep>result_list.append(eT.evalfunction(eval_input))<block_end><block_end><else_stmt><block_start>result_list=p.map(eT.evalfunction eval_function_input_list)<block_end>result_sum=np.sum(result_list axis=0)<line_sep>true_pos_total=result_sum[1]<line_sep>false_pos_total=result_sum[2]<line_sep>false_neg_total=result_sum[3]<line_sep>print('True_Pos_Total' true_pos_total)<line_sep>print('False_Pos_Total' false_pos_total)<line_sep>print('False_Neg_Total' false_neg_total)<line_sep>precision=float(true_pos_total)/(float(true_pos_total)+float(false_pos_total))<line_sep>recall=float(true_pos_total)/(float(true_pos_total)+float(false_neg_total))<line_sep>F1ScoreTotal=2.0<times>precision<times>recall/(precision+recall)<line_sep>print('F1Total' F1ScoreTotal)<line_sep>t2=time.time()<line_sep>total=t2-t0<line_sep>print('time of evaluation: {}'.format(t2-t1))<line_sep>print('time of evaluation {}s/imageId'.format((t2-t1)/len(result_list)))<line_sep>print('Total Time {}s'.format(total))<line_sep>print(result_list)<line_sep>print(np.mean(result_list))<block_end>
<import_from_stmt>.lin_op LinOp<import_stmt>numpy<as>np<import_stmt>cv2<import_from_stmt>proximal.halide.halide Halide<import_from_stmt>proximal.utils.utils Impl<class_stmt>warp(LinOp)<block_start>"""Warp using a homography. """<def_stmt>__init__ self arg H implem=<none><block_start>self.H=H.copy()<line_sep># Compute inverse self.Hinv=np.zeros(H.shape)<if_stmt>len(H.shape)<g>2<block_start><for_stmt>j range(self.H.shape[2])<block_start>self.Hinv[: : j]=np.linalg.pinv(H[: : j])<block_end><block_end><else_stmt><block_start>self.Hinv=np.linalg.pinv(H)<block_end># Check for the shape <if_stmt>len(H.shape)<l>2<or>len(H.shape)<g>3<block_start><raise>Exception('Error, warp supports only up to 4d inputs (expects first 3 to be image).')<block_end># Has to have third dimension #if len(arg.shape) != 3: # raise Exception('Images must have third dimension') shape=arg.shape<if_stmt>len(H.shape)<eq>3<block_start>shape<augadd>(H.shape[2] )<block_end># Temp array for halide self.tmpfwd=np.zeros((shape[0] shape[1] shape[2]<if>(len(shape)<g>2)<else>1 H.shape[2]<if>(len(H.shape)<g>2)<else>1) dtype=np.float32 order='F')<line_sep>self.tmpadj=np.zeros((shape[0] shape[1] shape[2]<if>(len(shape)<g>2)<else>1) dtype=np.float32 order='F')<line_sep>super(warp self).__init__([arg] shape implem)<block_end><def_stmt>forward self inputs outputs<block_start>"""The forward operator. Reads from inputs and writes to outputs. """<if_stmt>self.implementation<eq>Impl['halide']# Halide implementation <block_start>Halide('A_warp').A_warp(inputs[0] self.H self.tmpfwd)# Call np.copyto(outputs[0] np.reshape(self.tmpfwd self.shape))<block_end><else_stmt># CV2 version <block_start>inimg=inputs[0]<if_stmt>len(self.H.shape)<eq>2<block_start>warpedInput=cv2.warpPerspective(np.asfortranarray(inimg) self.H inimg.shape[1::-1] flags=cv2.INTER_LINEAR|cv2.WARP_INVERSE_MAP borderMode=cv2.BORDER_CONSTANT borderValue=0.)<line_sep># Necessary due to array layout in opencv np.copyto(outputs[0] warpedInput)<block_end><else_stmt><block_start><for_stmt>j range(self.H.shape[2])<block_start>warpedInput=cv2.warpPerspective(np.asfortranarray(inimg) self.H[: : j] inimg.shape[1::-1] flags=cv2.INTER_LINEAR|cv2.WARP_INVERSE_MAP borderMode=cv2.BORDER_CONSTANT borderValue=0.)<line_sep># Necessary due to array layout in opencv np.copyto(outputs[0][: : : j] warpedInput)<block_end><block_end><block_end><block_end><def_stmt>adjoint self inputs outputs<block_start>"""The adjoint operator. Reads from inputs and writes to outputs. """<if_stmt>self.implementation<eq>Impl['halide']# Halide implementation <block_start>Halide('At_warp').At_warp(inputs[0] self.Hinv self.tmpadj)# Call <if_stmt>outputs[0].ndim<eq>2<block_start>np.copyto(outputs[0] self.tmpadj[<ellipsis> 0])<block_end><else_stmt><block_start>np.copyto(outputs[0] self.tmpadj)<block_end><block_end><else_stmt># CV2 version <block_start>inimg=inputs[0]<if_stmt>len(self.H.shape)<eq>2# + cv2.WARP_INVERSE_MAP <block_start>warpedInput=cv2.warpPerspective(np.asfortranarray(inimg) self.H inimg.shape[1::-1] flags=cv2.INTER_LINEAR borderMode=cv2.BORDER_CONSTANT borderValue=0.)<line_sep>np.copyto(outputs[0] warpedInput)<block_end><else_stmt><block_start>outputs[0][:]=0.0<for_stmt>j range(self.H.shape[2])<block_start>warpedInput=cv2.warpPerspective(np.asfortranarray(inimg[: : : j]) self.H inimg.shape[1::-1] flags=cv2.INTER_LINEAR borderMode=cv2.BORDER_CONSTANT borderValue=0.)<line_sep># Necessary due to array layout in opencv outputs[0]<augadd>warpedInput<block_end><block_end><block_end><block_end># TODO what is the spectral norm of a warp? <block_end>
<import_from_future_stmt> print_function<import_stmt>sys<line_sep>sys.path.insert(1 "../../")<import_stmt>h2o<import_from_stmt>tests pyunit_utils<def_stmt>isax <block_start>df=h2o.create_frame(rows=1 cols=256 real_fraction=1.0 missing_fraction=0.0 seed=123)<line_sep>df2=df.cumsum(axis=1)<line_sep>res=df2.isax(num_words=10 max_cardinality=10)<line_sep>res.show()<line_sep>answer="0^10_0^10_0^10_0^10_5^10_7^10_8^10_9^10_9^10_8^10"<assert_stmt>answer<eq>res[0 0] "expected isax index to be "+answer+" but got"+res[0 0]+" instead."<line_sep>h2o.remove(df)<line_sep>h2o.remove(df2)<line_sep>h2o.remove(res)<block_end><if_stmt>__name__<eq>"__main__"<block_start>pyunit_utils.standalone_test(isax)<block_end><else_stmt><block_start>isax()<block_end>
<import_stmt>unittest<import_from_stmt>aws_allowlister.database.compliance_data ComplianceData<import_from_stmt>aws_allowlister.database.database connect_db<line_sep>compliance_data=ComplianceData()<line_sep>db_session=connect_db()<class_stmt>HitrustQATestCase(unittest.TestCase)<block_start><def_stmt>test_gh_51_HITRUST_compliant_services self<block_start>results=compliance_data.get_compliant_services(db_session=db_session compliance_standard="HITRUST")<line_sep>expected_results=["athena" "kendra" "guardduty" "sagemaker" "states"]<line_sep># print(len(results)) <for_stmt>expected_result expected_results# print(f"{expected_result}: in {expected_result in results}") <block_start>self.assertTrue(expected_result<in>results)<block_end><block_end><block_end>
# Copyright 2020 The 9nFL Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # coding: utf-8 <import_stmt>threading<class_stmt>AppendExamplesManager(object)<block_start><def_stmt>__init__ self queue partition_id<block_start>self._lock=threading.Lock()<line_sep>self._queue=queue<line_sep>self._partition_id=partition_id<line_sep>self._send_example_finished=<false><line_sep>self._next_example_index=0<block_end><def_stmt>get_next_example_index self<block_start><with_stmt>self._lock<block_start><return>self._next_example_index<block_end><block_end><def_stmt>append_batch_examples_into_queue self batch_examples<block_start><with_stmt>self._lock<block_start><assert_stmt>batch_examples "batch_examples is None"<assert_stmt>batch_examples.partition_id<eq>self._partition_id "the partition id of example batch mismatch with "<concat>"partition id of examples appending into queue : {} != {}".format(self._partition_id batch_examples.partition_id)<line_sep>self._next_example_index<augadd>len(batch_examples.example_id)-1<line_sep>self._queue.put(batch_examples)<line_sep><return><true><block_end><block_end><def_stmt>finish_send_examples self<block_start><with_stmt>self._lock<block_start>self._send_example_finished=<true><block_end><block_end><def_stmt>is_send_example_finished self<block_start><with_stmt>self._lock<block_start><return>self._send_example_finished<block_end><block_end><def_stmt>need_append_into_queue self<block_start><with_stmt>self._lock<block_start><if_stmt><not>self._queue.empty()<block_start><return><true><block_end><return><false><block_end><block_end><block_end>
<import_stmt>zeus<if_stmt>zeus.is_tf_backend()<block_start><import_from_stmt>.tensorflow_quant *<block_end><elif_stmt>zeus.is_torch_backend()<block_start><import_from_stmt>.pytorch_quant *<block_end>
<import_from_stmt>AppKit *<import_from_stmt>PyObjCTools.TestSupport *<class_stmt>TestNSObjectController(TestCase)<block_start><def_stmt>testMethods self<block_start>self.assertResultIsBOOL(NSObjectController.automaticallyPreparesContent)<line_sep>self.assertArgIsBOOL(NSObjectController.setAutomaticallyPreparesContent_ 0)<line_sep>self.assertResultIsBOOL(NSObjectController.isEditable)<line_sep>self.assertArgIsBOOL(NSObjectController.setEditable_ 0)<line_sep>self.assertResultIsBOOL(NSObjectController.canAdd)<line_sep>self.assertResultIsBOOL(NSObjectController.canRemove)<line_sep>self.assertResultIsBOOL(NSObjectController.validateUserInterfaceItem_)<line_sep>self.assertResultIsBOOL(NSObjectController.fetchWithRequest_merge_error_)<line_sep>self.assertArgIsBOOL(NSObjectController.fetchWithRequest_merge_error_ 1)<line_sep>self.assertArgIsOut(NSObjectController.fetchWithRequest_merge_error_ 2)<line_sep>self.assertResultIsBOOL(NSObjectController.usesLazyFetching)<line_sep>self.assertArgIsBOOL(NSObjectController.setUsesLazyFetching_ 0)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>main()<block_end>
<import_from_stmt>unittest TestCase<import_from_stmt>CombinationSum CombinationSum<class_stmt>TestCombinationSum(TestCase)<block_start><def_stmt>test_combinationSum self<block_start>cs=CombinationSum()<line_sep>list0=cs.combinationSum([2 3 6 7] 7)<line_sep>self.assertTrue(len(list0)<eq>2)<line_sep>self.assertTrue(list0.__contains__([7]))<line_sep>self.assertTrue(list0.__contains__([2 2 3]))<line_sep>self.assertEqual(cs.combinationSum([1] 3) [[1 1 1]])<line_sep>list1=cs.combinationSum([1 2] 4)<line_sep>self.assertTrue(len(list1))<line_sep>self.assertTrue(list1.__contains__([1 1 1 1]))<line_sep>self.assertTrue(list1.__contains__([1 1 2]))<line_sep>self.assertTrue(list1.__contains__([2 2]))<block_end><block_end>
# tests/test_provider_banzaicloud_k8s.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:19:55 UTC) <def_stmt>test_provider_import <block_start><import_stmt>terrascript.provider.banzaicloud.k8s<block_end><def_stmt>test_resource_import <block_start><import_from_stmt>terrascript.resource.banzaicloud.k8s k8s_manifest<block_end># TODO: Shortcut imports without namespace for official and supported providers. # TODO: This has to be moved into a required_providers block. # def test_version_source(): # # import terrascript.provider.banzaicloud.k8s # # t = terrascript.provider.banzaicloud.k8s.k8s() # s = str(t) # # assert 'https://github.com/banzaicloud/terraform-provider-k8s' in s # assert '0.9.1' in s
<import_from_stmt>rest_framework status<import_from_stmt>rest_framework.reverse reverse<import_from_stmt>tests.test_service_catalog.base_test_request BaseTestRequest<import_from_stmt>tests.utils check_data_in_dict<class_stmt>TestApiServicePut(BaseTestRequest)<block_start><def_stmt>setUp self<block_start>super(TestApiServicePut self).setUp()<line_sep>self.put_data={'name':"My new name" 'description':"My new description" 'billing_group_id':self.service_test.billing_group_id 'billing_group_is_shown':self.service_test.billing_group_is_shown 'billing_group_is_selectable':self.service_test.billing_group_is_selectable 'billing_groups_are_restricted':self.service_test.billing_groups_are_restricted 'enabled':self.service_test.enabled}<line_sep>self.kwargs={'pk':self.service_test.id}<line_sep>self.get_service_details_url=reverse('api_service_details' kwargs=self.kwargs)<block_end><def_stmt>test_admin_put_service self<block_start>response=self.client.put(self.get_service_details_url data=self.put_data content_type="application/json")<line_sep>self.assertEqual(response.status_code status.HTTP_200_OK)<line_sep>check_data_in_dict(self [self.put_data] [response.data])<block_end><def_stmt>test_admin_cannot_put_on_service_not_full self<block_start>self.put_data.pop('name')<line_sep>response=self.client.put(self.get_service_details_url data=self.put_data content_type="application/json")<line_sep>self.assertEqual(response.status_code status.HTTP_400_BAD_REQUEST)<block_end><def_stmt>test_customer_cannot_put_service self<block_start>self.client.force_login(user=self.standard_user)<line_sep>response=self.client.put(self.get_service_details_url data=self.put_data content_type="application/json")<line_sep>self.assertEqual(response.status_code status.HTTP_403_FORBIDDEN)<block_end><def_stmt>test_cannot_put_service_when_logout self<block_start>self.client.logout()<line_sep>response=self.client.put(self.get_service_details_url data=self.put_data content_type="application/json")<line_sep>self.assertEqual(response.status_code status.HTTP_403_FORBIDDEN)<block_end><block_end>
<import_from_stmt>.unet Unet<import_from_stmt>.linknet Linknet<import_from_stmt>.deeplabv3_plus DeepLabV3<import_from_stmt>.segm_fpn SegmentationFPN<import_from_stmt>.segm_bifpn SegmentationBiFPN<import_from_stmt>.hrnet HRNet<line_sep>
# -*- coding: UTF-8 -*- # @Time : 04/08/2020 15:38 # @Author : QYD # @FileName: dijstra.py # @Software: PyCharm <import_stmt>heapq<import_stmt>numpy<as>np<import_stmt>copy<def_stmt>get_distance p1 p2<block_start><return>np.linalg.norm(np.array(p1)-np.array(p2))<block_end><class_stmt>QueueElement<block_start><def_stmt>__init__ self dis connection:list<block_start>self.dis=dis<line_sep>self.connection=connection<block_end><def_stmt>__lt__ self other<block_start><return>self.dis<l>other.dis<block_end><block_end><def_stmt>correspondence ref infer<block_start>""" :param ref: 参考点 :param infer: :return: """<line_sep>refer_size=len(ref)<line_sep>infer_size=len(infer)<if_stmt>infer<eq>[]<or>ref<eq>[]<block_start>print("data error")<block_end>Done=0<line_sep>OnFront=1<line_sep>NotVisited=2<line_sep>nodeStatus=np.ones((refer_size infer_size))<times>NotVisited# 记录点访问状态的数组 distanceMap=np.ones((refer_size infer_size))<times>float("inf")# 距离数组 prevPointer=np.zeros((refer_size infer_size))# 用以对匹配点进行回溯 # q=[]<line_sep>dist=get_distance(ref[0] infer[0])<line_sep>priorityQueue=QueueElement(dist [0 0])<line_sep>heapq.heappush(q priorityQueue)# 建立优先级队列 nodeStatus[0][0]=OnFront<line_sep>distanceMap[0][0]=dist<while_stmt>q<and>q[0].dis<l>distanceMap[-1][-1]<block_start>queueElem=copy.deepcopy(q[0].connection)<line_sep>dist=q[0].dis<line_sep>heapq.heappop(q)<while_stmt>q<and>nodeStatus[queueElem[0]][queueElem[1]]<eq>Done# 优先级队列不为空而且该点已经被访问过 <block_start>queueElem=copy.deepcopy(q[0].connection)<line_sep>dist=q[0].dis<line_sep>heapq.heappop(q)<block_end><if_stmt>nodeStatus[queueElem[0]][queueElem[1]]<eq>Done<block_start><break><block_end><if_stmt>dist<g>distanceMap[-1][-1]<block_start><break><block_end>nodeStatus[queueElem[0]][queueElem[1]]=Done<line_sep>distanceMap[queueElem[0]][queueElem[1]]=dist<if_stmt>queueElem[1]<l>infer_size-1<block_start>newDist=dist+get_distance(ref[queueElem[0]] infer[queueElem[1]+1])<if_stmt>nodeStatus[queueElem[0]][queueElem[1]+1]<eq>Done<block_start><continue><block_end><elif_stmt>nodeStatus[queueElem[0]][queueElem[1]+1]<eq>OnFront<block_start><if_stmt>newDist<ge>distanceMap[queueElem[0]][queueElem[1]+1]<block_start><continue><block_end><block_end>nodeStatus[queueElem[0]][queueElem[1]+1]=OnFront<line_sep>distanceMap[queueElem[0]][queueElem[1]+1]=newDist<line_sep>prevPointer[queueElem[0]][queueElem[1]+1]=2<line_sep>heapq.heappush(q QueueElement(newDist [queueElem[0] queueElem[1]+1]))<block_end><if_stmt>queueElem[0]<l>refer_size-1<block_start>newDist=dist+get_distance(ref[queueElem[0]+1] infer[queueElem[1]])<if_stmt>nodeStatus[queueElem[0]+1][queueElem[1]]<eq>Done<block_start><continue><block_end><elif_stmt>nodeStatus[queueElem[0]+1][queueElem[1]]<eq>OnFront<block_start><if_stmt>newDist<ge>distanceMap[queueElem[0]+1][queueElem[1]]<block_start><continue><block_end><block_end>nodeStatus[queueElem[0]+1][queueElem[1]]=OnFront<line_sep>distanceMap[queueElem[0]+1][queueElem[1]]=newDist<line_sep>prevPointer[queueElem[0]+1][queueElem[1]]=1<line_sep>heapq.heappush(q QueueElement(newDist [queueElem[0]+1 queueElem[1]]))<block_end><block_end>revPath=[]<line_sep>revPath.append([refer_size-1 infer_size-1])<while_stmt>revPath[-1][0]<or>revPath[-1][1]<block_start>pointer=prevPointer[revPath[-1][0]][revPath[-1][1]]<if_stmt>pointer<eq>1<block_start>revPath.append([revPath[-1][0]-1 revPath[-1][1]])<block_end><elif_stmt>pointer<eq>2<block_start>revPath.append([revPath[-1][0] revPath[-1][1]-1])<block_end><else_stmt><block_start><raise>ValueError<block_end><block_end>revPath.reverse()<line_sep><return>revPath<block_end>
# Generated by Django 3.2.2 on 2021-05-31 16:49 <import_stmt>django.db.models.deletion<import_from_stmt>django.db migrations models<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[("zerver" "0331_scheduledmessagenotificationemail") ]<line_sep>operations=[migrations.CreateModel(name="RealmUserDefault" fields=[("id" models.AutoField(auto_created=<true> primary_key=<true> serialize=<false> verbose_name="ID") ) ("enter_sends" models.BooleanField(null=<true> default=<false>)) ("left_side_userlist" models.BooleanField(default=<false>)) ("default_language" models.CharField(default="en" max_length=50)) ("default_view" models.TextField(default="recent_topics")) ("dense_mode" models.BooleanField(default=<true>)) ("fluid_layout_width" models.BooleanField(default=<false>)) ("high_contrast_mode" models.BooleanField(default=<false>)) ("translate_emoticons" models.BooleanField(default=<false>)) ("twenty_four_hour_time" models.BooleanField(default=<false>)) ("starred_message_counts" models.BooleanField(default=<true>)) ("color_scheme" models.PositiveSmallIntegerField(default=1)) ("demote_inactive_streams" models.PositiveSmallIntegerField(default=1)) ("emojiset" models.CharField(choices=[("google" "Google modern") ("google-blob" "Google classic") ("twitter" "Twitter") ("text" "Plain text") ] default="google-blob" max_length=20 ) ) ("enable_stream_desktop_notifications" models.BooleanField(default=<false>)) ("enable_stream_email_notifications" models.BooleanField(default=<false>)) ("enable_stream_push_notifications" models.BooleanField(default=<false>)) ("enable_stream_audible_notifications" models.BooleanField(default=<false>)) ("notification_sound" models.CharField(default="zulip" max_length=20)) ("wildcard_mentions_notify" models.BooleanField(default=<true>)) ("enable_desktop_notifications" models.BooleanField(default=<true>)) ("pm_content_in_desktop_notifications" models.BooleanField(default=<true>)) ("enable_sounds" models.BooleanField(default=<true>)) ("enable_offline_email_notifications" models.BooleanField(default=<true>)) ("message_content_in_email_notifications" models.BooleanField(default=<true>)) ("enable_offline_push_notifications" models.BooleanField(default=<true>)) ("enable_online_push_notifications" models.BooleanField(default=<true>)) ("desktop_icon_count_display" models.PositiveSmallIntegerField(default=1)) ("enable_digest_emails" models.BooleanField(default=<true>)) ("enable_login_emails" models.BooleanField(default=<true>)) ("enable_marketing_emails" models.BooleanField(default=<true>)) ("realm_name_in_notifications" models.BooleanField(default=<false>)) ("presence_enabled" models.BooleanField(default=<true>)) ("realm" models.ForeignKey(on_delete=django.db.models.deletion.CASCADE to="zerver.realm") ) ] options={"abstract":<false> } ) ]<block_end>
# Copyright (c) 2019 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom # the Software is furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. <import_stmt>argparse<import_stmt>grp<import_stmt>pwd<import_stmt>re<import_stmt>sys<import_from_stmt>uxy base<def_stmt>_linux args uxy_args<block_start>parser=argparse.ArgumentParser("__main__.py ls" add_help=<false>)<line_sep>parser.add_argument("--author" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-b" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--escape" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-C" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--color" nargs="?" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-D" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-f" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--format" nargs="?" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--full-time" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-g" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-h" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--human-readable" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--si" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-G" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--no-group" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-i" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--inode" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-k" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--kibibytes" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-l" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-m" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-N" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--literal" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-o" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-q" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--hide-control-chars" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-Q" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--quote-name" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--quoting-style" nargs=1 default=argparse.SUPPRESS)<line_sep>parser.add_argument("-s" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--time" nargs=1 default=argparse.SUPPRESS)<line_sep>parser.add_argument("--time-style" nargs=1 default=argparse.SUPPRESS)<line_sep>parser.add_argument("-T" nargs=1 default=argparse.SUPPRESS)<line_sep>parser.add_argument("--tabsize" nargs=1 default=argparse.SUPPRESS)<line_sep>parser.add_argument("-w" nargs=1 default=argparse.SUPPRESS)<line_sep>parser.add_argument("--width" nargs=1 default=argparse.SUPPRESS)<line_sep>parser.add_argument("-x" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-Z" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--context" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("-1" action="store_true" default=argparse.SUPPRESS)<line_sep>parser.add_argument("--help" action="store_true" default=argparse.SUPPRESS)<line_sep>base.check_args(args parser)<if_stmt>uxy_args.long<block_start>fmtargs=['-lnNisZ' '--time-style=full-iso']<line_sep>regexp=re.compile(r'\s*([^\s]*)\s+([^\s]*)\s+(.)([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+(.*)')<line_sep>fmt=base.Format("INODE BLOCKS TYPE PERMISSIONS LINKS OWNER GROUP CONTEXT SIZE TIME NAME")<line_sep>owner_col=6<line_sep>group_col=7<block_end><else_stmt><block_start>fmtargs=['-lnN' '--time-style=full-iso']<line_sep>regexp=re.compile(r'\s*(.)([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+(.*)')<line_sep>fmt=base.Format("TYPE PERMISSIONS LINKS OWNER GROUP SIZE TIME NAME")<line_sep>owner_col=4<line_sep>group_col=5<block_end>resolve_ids=<true><if_stmt>"-n"<in>args[1:]<or>"--numeric-uid-gid"<in>args[1:]<block_start>resolve_ids=<false><block_end>proc=base.launch(uxy_args ['ls']+fmtargs+args[1:])<line_sep>base.writeline(fmt.render())<line_sep>path=""<for_stmt>ln proc<block_start><if_stmt>ln.startswith('total')<block_start><continue><block_end><if_stmt>ln<eq>""# When running with -R this is the name of the directory. <block_start>ln=proc.readline()<if_stmt>ln.endswith(":")<block_start>path=ln[:-1]+"/"<block_end><continue><block_end>m=regexp.match(ln)<if_stmt><not>m<block_start><continue><block_end>fields=[]<for_stmt>i range(1 regexp.groups-3)<block_start>field=m.group(i)<line_sep># In general, __main__.py is not supposed to supplant the functionality provided # by the wrapped tool. However, there's little option here: User names # can contain spaces (e.g. when provided by LDAP), but ls tool doesn't # escape spaces in the names even with run with -b parameter. <if_stmt>resolve_ids<block_start><try_stmt><block_start><if_stmt>i<eq>owner_col<block_start>field=pwd.getpwuid(int(field)).pw_name<block_end><elif_stmt>i<eq>group_col<block_start>field=grp.getgrgid(int(field)).gr_name<block_end><block_end><except_stmt>(KeyError ValueError)<block_start><pass><block_end><block_end>fields.append(base.encode_field(field))<block_end># Convert to actual ISO8601 format. time="%sT%s%s:%s"%(m.group(regexp.groups-3) m.group(regexp.groups-2) m.group(regexp.groups-1)[:-2] m.group(regexp.groups-1)[-2:])<line_sep>fields.append(base.encode_field(time))<line_sep>fields.append(base.encode_field(path+m.group(regexp.groups)))<line_sep>base.writeline(fmt.render(fields))<block_end><return>proc.wait()<block_end><def_stmt>_bsd args uxy_args<block_start>fmtargs=['-l']<line_sep># -rw-r--r-- 1 501 20 1025 May 31 07:11:49 2019 LICENSE regexp=re.compile(r'\s*(.)([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*)\s+([^\s]*\s+[^\s]*\s+[^\s]*\s+[^\s]*)\s+(.*)')<line_sep>fmt=base.Format("TYPE PERMISSIONS LINKS OWNER GROUP SIZE TIME NAME")<line_sep>proc=base.launch(uxy_args ['ls']+fmtargs+args[1:])<line_sep>base.writeline(fmt.render())<line_sep>path=""<for_stmt>ln proc<block_start><if_stmt>ln.startswith('total')<block_start><continue><block_end><if_stmt>ln<eq>""# When running with -R this is the name of the directory. <block_start>ln=proc.readline()<if_stmt>ln.endswith(":")<block_start>path=ln[:-1]+"/"<block_end><continue><block_end>m=regexp.match(ln)<if_stmt><not>m<block_start><continue><block_end>fields=[]<for_stmt>i range(1 regexp.groups+1)<block_start>fields.append(base.encode_field(m.group(i)))<block_end>base.writeline(fmt.render(fields))<block_end><return>proc.wait()<block_end><def_stmt>ls args uxy_args<block_start><if_stmt>uxy_args.platform.startswith("linux")<block_start><return>_linux(args uxy_args)<block_end><else_stmt><block_start><return>_bsd(args uxy_args)<block_end><block_end>
<import_from_stmt>django.urls re_path<import_from_stmt>baserow.contrib.database.api.formula.views TypeFormulaView<line_sep>app_name="baserow.contrib.database.api.export"<line_sep>urlpatterns=[re_path(r"(?P<field_id>[0-9]+)/type/$" TypeFormulaView.as_view() name="type_formula" ) ]<line_sep>
<import_from_stmt>.rotatewidget RotateWidget<line_sep>
<import_stmt>json<import_stmt>sys<def_stmt>main <block_start>data=load_data()<line_sep>printFields(data)<block_end><def_stmt>load_data <block_start>data=""<line_sep>filename=sys.argv[1]<with_stmt>open(filename "r")<as>read<block_start>data=json.load(read)<block_end><if_stmt>isinstance(data list)<block_start>data=data[0]<block_end><return>data<block_end><def_stmt>printFields data<block_start>fields=sys.argv[2:]<for_stmt>i fields<block_start>print(data[i])<block_end><block_end>main()<line_sep>
<import_stmt>pytest<import_stmt>genomepy<import_from_stmt>tests linux travis<line_sep>@pytest.mark.skipif(travis<and>linux reason="FTP does not work on Travis-Linux")<def_stmt>test_gencodeprovider gencode<block_start><assert_stmt>gencode.name<eq>"GENCODE"<assert_stmt>gencode.taxid_fields<eq>["taxonomy_id"]<block_end>@pytest.mark.skipif(travis<and>linux reason="FTP does not work on Travis-Linux")<def_stmt>test_genome_info_tuple gencode<block_start>t=gencode._genome_info_tuple("GRCh37")<assert_stmt>isinstance(t tuple)<assert_stmt>t[0:4]<eq>("GRCh37" "GCA_000001405.1" 9606 <true>)<block_end>@pytest.mark.skipif(travis<and>linux reason="FTP does not work on Travis-Linux")<def_stmt>test_genomes gencode<block_start><assert_stmt>gencode.genomes["GRCh37"]["other_info"]<eq>"GENCODE annotation + UCSC genome"<assert_stmt>gencode.genomes["GRCh38"]["assembly_accession"]<eq>"GCA_000001405.15"<block_end>@pytest.mark.skipif(travis<and>linux reason="FTP does not work on Travis-Linux")<def_stmt>test_get_genome_download_link gencode<block_start>link=gencode.get_genome_download_link("GRCh37" mask="soft")<assert_stmt>link<in>["http://hgdownload.soe.ucsc.edu/goldenPath/hg19/bigZips/chromFa.tar.gz" "http://hgdownload.soe.ucsc.edu/goldenPath/hg19/bigZips/hg19.fa.masked.gz" ]<block_end>@pytest.mark.skipif(travis<and>linux reason="FTP does not work on Travis-Linux")<def_stmt>test_get_annotation_download_links gencode# default annotation filing system <block_start>genome="GRCm39"<line_sep>annots=gencode.get_annotation_download_links(genome)<line_sep>expected=[# release numbers removed "ftp.ebi.ac.uk/pub/databases/gencode/Gencode_mouse/release_M" "/gencode.vM" ".annotation.gtf.gz" ]<assert_stmt>all([exp<in>annots[0]<for>exp expected])<line_sep># GRCh37, the one with the unique filing system. genome="GRCh37"<line_sep>annots=gencode.get_annotation_download_links(genome)<line_sep>expected=[# release numbers removed "ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/release_" "/GRCh37_mapping/gencode.v" "lift37.annotation.gtf.gz" ]<assert_stmt>all([exp<in>annots[0]<for>exp expected])<block_end>@pytest.mark.skipif(travis<and>linux reason="FTP does not work on Travis-Linux")<def_stmt>test_download_annotation gencode<block_start>gencode.download_annotation("GRCm39")<block_end># smallest gencode annotation (0.8 GB) <def_stmt>test_get_gencode2ucsc <block_start>genomes={"test1":{"species":"Homo sapiens"} "test2":{"species":"Mus Musculus"} "test3":{"species":"whatever"} }<line_sep>gencode2ucsc=genomepy.providers.gencode.get_gencode2ucsc(genomes)<assert_stmt>gencode2ucsc["test1"]<eq>"hg1"<assert_stmt>gencode2ucsc["test2"]<eq>"mm2"<assert_stmt>gencode2ucsc["test3"]<eq>"mm3"<block_end><def_stmt>test_get_releases <block_start>listing=["/path/to/release_22" "/path/to/mouse/release_M44" "/path/to/mouse/release_M33" "/path/to/release_01" # too old "/path/to/something/else" ]<line_sep>specie="human"<line_sep>releases=genomepy.providers.gencode.get_releases(listing specie)<assert_stmt>releases<eq>["44" "33" "22"]<line_sep>specie="mouse"<line_sep>releases=genomepy.providers.gencode.get_releases(listing specie)<assert_stmt>releases<eq>["M44" "M33" "M22"]<block_end><def_stmt>test_add_grch37 <block_start>release=42<line_sep>genomes={"GRCh11":{} "GRCh22":{"annotations":[f"ftp/to/release_{release}/gtf"]} }<line_sep>genomes=genomepy.providers.gencode.add_grch37(genomes "")<line_sep>expected=(f"/Gencode_human/release_{release}/GRCh37_mapping/"<concat>f"gencode.v{release}lift37.annotation.gtf.gz")<assert_stmt>genomes["GRCh22"]["annotations"]<eq>[f"ftp/to/release_{release}/gtf"]<assert_stmt>genomes["GRCh37"]["annotations"]<eq>[expected]<block_end>
<import_from_stmt>.textutils print_progress statistics_table<import_from_stmt>.base view_image_multiple_landmarks plot_cumulative_error_distribution <line_sep>
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved. <import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torch.nn.functional<as>F<class_stmt>MLP(nn.Module)<block_start><def_stmt>__init__ self filter_channels merge_layer=0 res_layers=[] norm='group' last_op=<none><block_start>super(MLP self).__init__()<line_sep>self.filters=nn.ModuleList()<line_sep>self.norms=nn.ModuleList()<line_sep>self.merge_layer=merge_layer<if>merge_layer<g>0<else>len(filter_channels)<floordiv>2<line_sep>self.res_layers=res_layers<line_sep>self.norm=norm<line_sep>self.last_op=last_op<for_stmt>l range(0 len(filter_channels)-1)<block_start><if_stmt>l<in>self.res_layers<block_start>self.filters.append(nn.Conv1d(filter_channels[l]+filter_channels[0] filter_channels[l+1] 1))<block_end><else_stmt><block_start>self.filters.append(nn.Conv1d(filter_channels[l] filter_channels[l+1] 1))<block_end><if_stmt>l<ne>len(filter_channels)-2<block_start><if_stmt>norm<eq>'group'<block_start>self.norms.append(nn.GroupNorm(32 filter_channels[l+1]))<block_end><elif_stmt>norm<eq>'batch'<block_start>self.norms.append(nn.BatchNorm1d(filter_channels[l+1]))<block_end><block_end><block_end><block_end><def_stmt>forward self feature<block_start>''' feature may include multiple view inputs args: feature: [B, C_in, N] return: [B, C_out, N] prediction '''<line_sep>y=feature<line_sep>tmpy=feature<line_sep>phi=<none><for_stmt>i,f enumerate(self.filters)<block_start>y=f(y<if>i<not><in>self.res_layers<else>torch.cat([y tmpy] 1))<if_stmt>i<ne>len(self.filters)-1<block_start><if_stmt>self.norm<not><in>['batch' 'group']<block_start>y=F.leaky_relu(y)<block_end><else_stmt><block_start>y=F.leaky_relu(self.norms[i](y))<block_end><block_end><if_stmt>i<eq>self.merge_layer<block_start>phi=y.clone()<block_end><block_end><if_stmt>self.last_op<is><not><none><block_start>y=self.last_op(y)<block_end><return>y phi<block_end><block_end>
#-*- coding:utf-8 -*- # &Author AnFany # 适用于多维输出 <import_from_stmt>BPNN_DATA_Reg model_data<as>R_data<import_stmt>numpy<as>np<import_stmt>tensorflow<as>tf<line_sep>'''第一部分:数据准备'''<line_sep>train_x_data=R_data[0]# 训练输入 train_y_data=R_data[1]# 训练输出 predict_x_data=R_data[2]# 测试输入 predict_y_data=R_data[3]# 测试输出 '''第二部分: 基于TensorFlow构建训练函数'''<line_sep># 创建激活函数 <def_stmt>activate input_layer weights biases actfunc<block_start>layer=tf.add(tf.matmul(input_layer weights) biases)<if_stmt>actfunc<eq>'relu'<block_start><return>tf.nn.relu(layer)<block_end><elif_stmt>actfunc<eq>'tanh'<block_start><return>tf.nn.tanh(layer)<block_end><elif_stmt>actfunc<eq>'sigmoid'<block_start><return>tf.nn.sigmoid(layer)<block_end><block_end># 权重初始化的方式和利用激活函数的关系很大 # sigmoid: xavir tanh: xavir relu: he # 构建训练函数 <def_stmt>Ten_train xdata ydata prexdata hiddenlayers=3 hiddennodes=100 learn_rate=0.05 itertimes=100000 batch_size=200 activate_func='sigmoid' break_error=0.0043# 开始搭建神经网络 <block_start>Input_Dimen=len(xdata[0])<line_sep>Unit_Layers=[Input_Dimen]+[hiddennodes]<times>hiddenlayers+[len(ydata[0])]# 输入的维数,隐层的神经数,输出的维数1 # 创建占位符 x_data=tf.placeholder(shape=[<none> Input_Dimen] dtype=tf.float32)<line_sep>y_target=tf.placeholder(shape=[<none> len(ydata[0])] dtype=tf.float32)<line_sep># 实现动态命名变量 VAR_NAME=locals()<for_stmt>jj range(hiddenlayers+1)<block_start>VAR_NAME['weight%s'%jj]=tf.Variable(np.random.rand(Unit_Layers[jj] Unit_Layers[jj+1]) dtype=tf.float32 name='weight%s'%jj)/np.sqrt(Unit_Layers[jj])<line_sep># sigmoid tanh # VAR_NAME['weight%s'%jj] = tf.Variable(np.random.rand(Unit_Layers[jj], Unit_Layers[jj + 1]), dtype=tf.float32,name='weight%s' % jj) \/ np.sqrt(Unit_Layers[jj] / 2) # relu VAR_NAME['bias%s'%jj]=tf.Variable(tf.random_normal([Unit_Layers[jj+1]] stddev=10 name='bias%s'%jj) dtype=tf.float32)<if_stmt>jj<eq>0<block_start>VAR_NAME['ooutda%s'%jj]=activate(x_data eval('weight%s'%jj) eval('bias%s'%jj) actfunc=activate_func)<block_end><else_stmt><block_start>VAR_NAME['ooutda%s'%jj]=activate(eval('ooutda%s'%(jj-1)) eval('weight%s'%jj) eval('bias%s'%jj) actfunc=activate_func)<block_end><block_end># 均方误差 loss=tf.reduce_mean(tf.reduce_sum(tf.square(y_target-eval('ooutda%s'%(hiddenlayers))) reduction_indices=[1]))<line_sep># 优化的方法 my_opt=tf.train.AdamOptimizer(learn_rate)<line_sep>train_step=my_opt.minimize(loss)<line_sep># 初始化 init=tf.global_variables_initializer()<line_sep>loss_vec=[]# 训练误差 <with_stmt>tf.Session()<as>sess<block_start>saver=tf.train.Saver()<line_sep>sess.run(init)<for_stmt>i range(itertimes)<block_start>rand_index=np.random.choice(len(xdata) size=batch_size replace=<false>)<line_sep>rand_x=xdata[rand_index]<line_sep>rand_y=ydata[rand_index]<line_sep>sess.run(train_step feed_dict={x_data:rand_x y_target:rand_y})<line_sep>temp_loss=sess.run(loss feed_dict={x_data:xdata y_target:ydata})<line_sep>loss_vec.append(temp_loss)<line_sep># 根据输出的误差,判断训练的情况 <if_stmt>(i+1)%25<eq>0<block_start>print('Generation: '+str(i+1)+'. 归一误差:Loss = '+str(temp_loss))<block_end># 提前退出的判断 <if_stmt>temp_loss<l>break_error# 根据经验获得此数值, 因为采用的是随机下降,因此误差在前期可能出现浮动 <block_start><break><block_end><block_end># 计算预测数据的输出 pre_in_data0=np.array(prexdata dtype=np.float32)<for_stmt>ipre range(hiddenlayers+1)<block_start>VAR_NAME['pre_in_data%s'%(ipre+1)]=activate(eval('pre_in_data%s'%ipre) eval('weight%s'%ipre).eval() eval('bias%s'%ipre).eval() actfunc=activate_func)<block_end># 计算训练数据的输出 train_in_data0=np.array(xdata dtype=np.float32)<for_stmt>ipre range(hiddenlayers+1)<block_start>VAR_NAME['train_in_data%s'%(ipre+1)]=activate(eval('train_in_data%s'%ipre) eval('weight%s'%ipre).eval() eval('bias%s'%ipre).eval() actfunc=activate_func)<block_end><return>eval('train_in_data%s'%(hiddenlayers+1)).eval() eval('pre_in_data%s'%(hiddenlayers+1)).eval() loss_vec<block_end><block_end>'''第三部分: 结果展示函数'''<import_stmt>matplotlib.pyplot<as>plt<import_from_stmt>pylab mpl# 作图显示中文 mpl.rcParams['font.sans-serif']=['FangSong']# 设置中文字体新宋体 # 绘制图像 <def_stmt>figure real net le='训练' real_line='ko-' net_line='r.-' width=3<block_start>length=len(real[0])<line_sep># 绘制每个维度的对比图 <for_stmt>iwe range(length)<block_start>plt.subplot(length 1 iwe+1)<line_sep>plt.plot(list(range(len(real.T[iwe]))) real.T[iwe] real_line linewidth=width)<line_sep>plt.plot(list(range(len(net.T[iwe]))) net.T[iwe] net_line linewidth=width-1)<line_sep>plt.legend(['%s真实值'%le '网络输出值'])<if_stmt>length<eq>1<block_start>plt.title('%s结果对比'%le)<block_end><else_stmt><block_start><if_stmt>iwe<eq>0<block_start>plt.title('%s结果: %s维度对比'%(le iwe))<block_end><else_stmt><block_start>plt.title('%s维度对比'%iwe)<block_end><block_end><block_end>plt.show()<block_end># 绘制成本函数曲线图 <def_stmt>costfig errlist le='成本函数曲线图'<block_start>plt.plot(list(range(len(errlist))) errlist linewidth=3)<line_sep>plt.title(le)<line_sep>plt.xlabel('迭代次数')<line_sep>plt.ylabel('成本函数值')<line_sep>plt.show()<block_end># 因为训练数据较多,为了不影响展示效果,按序随机选取一定数量的数据,便于展示 <def_stmt>select datax datay count=200<block_start>sign=list(range(len(datax)))<line_sep>selectr_sign=np.random.choice(sign count replace=<false>)<line_sep><return>datax[selectr_sign] datay[selectr_sign]<block_end># 将输出的数据转换尺寸,变为原始数据的尺寸 <def_stmt>trans ydata minumber=R_data[4][0] maxumber=R_data[4][1]<block_start><return>ydata<times>(maxumber-minumber)+minumber<block_end><if_stmt>__name__<eq>'__main__'# 训练 <block_start>tfrelu=Ten_train(train_x_data train_y_data predict_x_data)<line_sep># 真实的数据转换尺寸 train_y_data_tran=trans(train_y_data)<line_sep>predict_y_data_tran=trans(predict_y_data)<line_sep># 网络预测的数据转换尺寸 train_output=trans(tfrelu[0])<line_sep>predict_output=trans(tfrelu[1])<line_sep># 数据多影响展示,随机挑选100条数据 random_train_x_data=select(train_output train_y_data_tran 200)<line_sep>random_predict_x_data=select(predict_output predict_y_data_tran 100)<line_sep>figure(random_train_x_data[1] random_train_x_data[0] le='训练')<line_sep>figure(random_predict_x_data[1] random_predict_x_data[0] le='预测')<line_sep>costfig(tfrelu[2])<block_end>
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # United States Government Sponsorship acknowledged. This software is subject to # U.S. export control laws and regulations and has been classified as 'EAR99 NLR' # (No [Export] License Required except when exporting to an embargoed country, # end user, or in support of a prohibited end use). By downloading this software, # the user agrees to comply with all applicable U.S. export laws and regulations. # The user has the responsibility to obtain export licenses, or other export # authority as may be required before exporting this software to any 'EAR99' # embargoed foreign country or citizen of those countries. # # Authors: <NAME>, <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Comment: Adapted from InsarProc/runOffoutliers.py <import_stmt>logging<import_stmt>isceobj<line_sep>logger=logging.getLogger('isce.isceProc.runOffoutliers')<def_stmt>runOffoutliers self distance<block_start>refPol=self._isce.refPol<line_sep>stdWriter=self._stdWriter<for_stmt>sceneid1,sceneid2 self._isce.pairsToCoreg<block_start>pair=(sceneid1 sceneid2)<line_sep>rgOffsets=self._isce.refinedOffsetFields[pair]<line_sep>catalog=isceobj.Catalog.createCatalog(self._isce.procDoc.name)<line_sep>sid=self._isce.formatname(pair)<line_sep>offsetField=run(rgOffsets distance stdWriter catalog=catalog sceneid=sid)<line_sep>self._isce.procDoc.addAllFromCatalog(catalog)<line_sep>self._isce.refinedOffsetFields[pair]=offsetField<block_end><block_end><def_stmt>run rgOffsets distance stdWriter catalog=<none> sceneid='NO_ID'#offoutliers returns a list of modified locations #the list of lists is #list[0] = location across #list[1] = location across offset #list[2] = location down #list[3] = location down offset #list[4] = snr #list[5] = sig <block_start>logger.info("Culling offset field outliers: %s"%sceneid)<line_sep>objOff=isceobj.createOffoutliers()<line_sep>objOff.wireInputPort(name='offsets' object=rgOffsets)<line_sep>objOff.setSNRThreshold(2.0)<line_sep>objOff.setDistance(distance)<line_sep>#set the tag used in the outfile. each message is precided by this tag #is the writer is not of "file" type the call has no effect stdWriter.setFileTag("offoutliers" "log")<line_sep>stdWriter.setFileTag("offoutliers" "err")<line_sep>stdWriter.setFileTag("offoutliers" "out")<line_sep>objOff.stdWriter=stdWriter.set_file_tags("offoutliers" "log" "err" "out")<line_sep>objOff.offoutliers()<if_stmt>catalog<is><not><none># Record the inputs and outputs <block_start>isceobj.Catalog.recordInputsAndOutputs(catalog objOff "runOffoutliers.%s"%sceneid logger "runOffoutliers.%s"%sceneid)<block_end><return>objOff.getRefinedOffsetField()<block_end>
# # Author: <NAME> (aka sch3m4) # @sch3m4 # https://github.com/thiber-org/userline # <import_stmt>json<import_from_stmt>lib config<class_stmt>JSON()<block_start><def_stmt>__init__ self fd duplicate=<false><block_start>self.fd=fd<line_sep>self.duplicate=duplicate<block_end><def_stmt>add_sequence self event<block_start>evt=[]<line_sep>aux=dict(event)<for_stmt>k list(aux.keys())<block_start><if_stmt>aux[k]<eq>'N/A'<block_start><del_stmt>aux[k]<block_end><block_end><if_stmt>self.duplicate<block_start>logout=dict(aux)<if_stmt>'logoff.datetime'<in>aux.keys()<block_start>logout['datetime']=aux['logoff.datetime']<line_sep>logout['action']='logoff'<block_end>evt.append(logout)<line_sep>aux['datetime']=aux['logon.datetime']<line_sep>aux['action']='logon'<block_end>evt.append(aux)<for_stmt>i evt<block_start>self.fd.write(json.dumps(i sort_keys=<true> indent=config.JSON_INDENT)+'\n')<block_end><block_end><def_stmt>finish self<block_start>self.fd.close()<block_end><block_end>
<import_from_stmt>to_import func<def_stmt>param_func <block_start><pass><block_end>func(param_func)<line_sep>
""" Copyright 2019 Samsung SDS Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """<import_stmt>unittest<import_stmt>numpy<as>np<import_from_stmt>brightics.common.datasets load_iris<import_from_stmt>brightics.function.classification xgb_classification_train xgb_classification_predict<import_stmt>HtmlTestRunner<import_stmt>os<class_stmt>TestXgbClassification(unittest.TestCase)<block_start><def_stmt>test_default self<block_start>df_iris=load_iris()<line_sep>model_train=xgb_classification_train(table=df_iris feature_cols=['sepal_length' 'sepal_width'] label_col='species' max_depth=3 learning_rate=0.1 n_estimators=100 random_state=12345)['model']<line_sep>df_feature_importance=model_train['feature_importance_table']<line_sep>np.testing.assert_array_almost_equal([0.5877061486 0.4122938514] [df_feature_importance.values[i][1]<for>i range(2)] 10 'incorrect feature_importance')<line_sep>df_test=xgb_classification_predict(table=df_iris model=model_train prediction_col='prediction' probability_col='probability' thresholds=<none> suffix='index' output_margin=<false> ntree_limit=<none>)['out_table']<line_sep>self.assertListEqual(['setosa']<times>5 df_test['prediction'].tolist()[:5] 'incorrect prediction')<line_sep>np.testing.assert_array_almost_equal([0.9961014986 0.9555388689 0.9964415431 0.9961314201 0.9970849156] df_test['probability_0'].values[:5].astype('float64') 10 'incorrect probability_0')<line_sep>np.testing.assert_array_almost_equal([0.0029145265 0.0210829079 0.0020782573 0.001782414 0.0019302238] df_test['probability_1'].values[:5].astype('float64') 10 'incorrect probability_1')<line_sep>np.testing.assert_array_almost_equal([0.0009839422 0.0233781971 0.0014802075 0.0020861221 0.0009849136] df_test['probability_2'].values[:5].astype('float64') 10 'incorrect probability_2')<block_end><def_stmt>test_class_weight self<block_start>df_iris=load_iris()<line_sep>model_train=xgb_classification_train(table=df_iris feature_cols=['sepal_length' 'sepal_width' 'petal_length' 'petal_width'] label_col='species' max_depth=3 learning_rate=0.1 n_estimators=100 class_weight=[0 1 1] random_state=12345)['model']<line_sep>df_feature_importance=model_train['feature_importance_table']<line_sep>np.testing.assert_array_almost_equal([0.114977307617 0.234493196010 0.332829058170 0.3177004456520] [df_feature_importance.values[i][1]<for>i range(4)] 10 'incorrect feature_importance')<line_sep>df_test=xgb_classification_predict(table=df_iris model=model_train prediction_col='prediction' probability_col='probability' thresholds=<none> suffix='index' output_margin=<false> ntree_limit=<none>)['out_table']<line_sep>self.assertListEqual(['versicolor']<times>5 df_test['prediction'].tolist()[:5] 'incorrect prediction')<line_sep>np.testing.assert_array_almost_equal([0.0007314461 0.0010454282 0.0010394535 0.0010394285 0.0010394535] df_test['probability_0'].values[:5].astype('float64') 10 'incorrect probability_0')<line_sep>np.testing.assert_array_almost_equal([0.9976045489 0.9954549074 0.9956334233 0.9956094623 0.9956334233] df_test['probability_1'].values[:5].astype('float64') 10 'incorrect probability_1')<line_sep>np.testing.assert_array_almost_equal([0.0016639883 0.0034996143 0.0033270852 0.0033510949 0.0033270852] df_test['probability_2'].values[:5].astype('float64') 10 'incorrect probability_2')<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>filepath=os.path.dirname(os.path.abspath(__file__))<line_sep>reportFoler=filepath+"/../../../../../../../reports"<line_sep>unittest.main(testRunner=HtmlTestRunner.HTMLTestRunner(combine_reports=<true> output=reportFoler))<block_end>
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. <import_from_stmt>botbuilder.core ConversationState<import_from_stmt>botbuilder.dialogs.memory scope_path<import_from_stmt>.bot_state_memory_scope BotStateMemoryScope<class_stmt>ConversationMemoryScope(BotStateMemoryScope)<block_start><def_stmt>__init__ self<block_start>super().__init__(ConversationState scope_path.CONVERSATION)<block_end><block_end>
<import_from_stmt>dataclasses dataclass<import_from_stmt>typing Union<import_from_stmt>pathlib Path<import_from_stmt>simple_parsing ArgumentParser subparsers<line_sep>@dataclass<class_stmt>Train<block_start>"""Example of a command to start a Training run."""<line_sep># the training directory train_dir:Path=Path("~/train")<def_stmt>execute self<block_start>print(f"Training in directory {self.train_dir}")<block_end><block_end>@dataclass<class_stmt>Test<block_start>"""Example of a command to start a Test run."""<line_sep># the testing directory test_dir:Path=Path("~/train")<def_stmt>execute self<block_start>print(f"Testing in directory {self.test_dir}")<block_end><block_end>@dataclass<class_stmt>Program<block_start>"""Some top-level command"""<line_sep>command:Union[Train Test]<line_sep>verbose:bool=<false># log additional messages in the console. <def_stmt>execute self<block_start>print(f"Program (verbose: {self.verbose})")<line_sep><return>self.command.execute()<block_end><block_end>parser=ArgumentParser()<line_sep>parser.add_arguments(Program dest="prog")<line_sep>args=parser.parse_args()<line_sep>prog:Program=args.prog<line_sep>print("prog:" prog)<line_sep>prog.execute()<line_sep>
<import_from_stmt>..biotools find_specification_label_in_feature<import_from_stmt>.tools install_extras_message<line_sep>DFV_AVAILABLE=<false><try_stmt><block_start><import_from_stmt>dna_features_viewer BiopythonTranslator<line_sep>DFV_AVAILABLE=<true><block_end><except_stmt>ImportError<block_start><class_stmt>BiopythonTranslator<block_start>"Class unavailable. Install DNA Features Viewer."<def_stmt>__init__ self<block_start><raise>ImportError(install_extras_message("DNA Features Viewer"))<block_end><block_end><block_end><class_stmt>SpecAnnotationsTranslator(BiopythonTranslator)<block_start>"""Translator of DnaChisel feature-constraints for DNA Features Viewer"""<line_sep>feature_prefixes_colors={"@":"#ce5454" "~":"#e5be54" "#":"#8edfff" "!":"#fcff75" }<def_stmt>compute_filtered_features self features<block_start>"""Do not display edits."""<line_sep><return>[feature<for>feature features<if>"".join(feature.qualifiers.get("is_edit" "false"))<ne>"true"]<block_end><def_stmt>compute_feature_color self f<block_start>color=f.qualifiers.get("color" <none>)<if_stmt>color<is><not><none><block_start><if_stmt>isinstance(color list)<block_start>color=color[0]<block_end><return>color<block_end><if_stmt>f.type<eq>"misc_feature"<block_start>specification=find_specification_label_in_feature(f)<if_stmt>specification<is><not><none><block_start><return>self.feature_prefixes_colors.get(specification[0] "#f4df42")<block_end><block_end><return>"#eeeafa"<block_end><def_stmt>compute_feature_label self f<block_start>is_edit=f.qualifiers.get("is_edit" "false")<if_stmt>"true"<in>[is_edit is_edit[0]]<block_start><return><none><block_end>default=BiopythonTranslator.compute_feature_label(self f)<line_sep>label=<none><if>(f.type<ne>"misc_feature")<else>default<if_stmt>label<eq>"misc_feature"<block_start>label=<none><block_end><return>label<block_end><block_end>
<import_from_stmt>mythril.laser.ethereum.state.annotation StateAnnotation MergeableStateAnnotation <import_from_stmt>copy copy<import_from_stmt>typing Dict List Set<import_stmt>logging<line_sep>log=logging.getLogger(__name__)<class_stmt>MutationAnnotation(StateAnnotation)<block_start>"""Mutation Annotation This is the annotation used by the MutationPruner plugin to record mutations """<def_stmt>__init__ self<block_start><pass><block_end>@property<def_stmt>persist_over_calls self<arrow>bool<block_start><return><true><block_end><block_end><class_stmt>DependencyAnnotation(MergeableStateAnnotation)<block_start>"""Dependency Annotation This annotation tracks read and write access to the state during each transaction. """<def_stmt>__init__ self<block_start>self.storage_loaded=set()# type: Set self.storage_written={}# type: Dict[int, Set] self.has_call=<false># type: bool self.path=[0]# type: List self.blocks_seen=set()<block_end># type: Set[int] <def_stmt>__copy__ self<block_start>result=DependencyAnnotation()<line_sep>result.storage_loaded=copy(self.storage_loaded)<line_sep>result.storage_written=copy(self.storage_written)<line_sep>result.has_call=self.has_call<line_sep>result.path=copy(self.path)<line_sep>result.blocks_seen=copy(self.blocks_seen)<line_sep><return>result<block_end><def_stmt>get_storage_write_cache self iteration:int<block_start><return>self.storage_written.get(iteration set())<block_end><def_stmt>extend_storage_write_cache self iteration:int value:object<block_start><if_stmt>iteration<not><in>self.storage_written<block_start>self.storage_written[iteration]=set()<block_end>self.storage_written[iteration].add(value)<block_end><def_stmt>check_merge_annotation self other:"DependencyAnnotation"<block_start><if_stmt><not>isinstance(other DependencyAnnotation)<block_start><raise>TypeError("Expected an instance of DependencyAnnotation")<block_end><return>self.has_call<eq>other.has_call<and>self.path<eq>other.path<block_end><def_stmt>merge_annotation self other:"DependencyAnnotation"<block_start>merged_annotation=DependencyAnnotation()<line_sep>merged_annotation.blocks_seen=self.blocks_seen.union(other.blocks_seen)<line_sep>merged_annotation.has_call=self.has_call<line_sep>merged_annotation.path=copy(self.path)<line_sep>merged_annotation.storage_loaded=self.storage_loaded.union(other.storage_loaded)<line_sep>keys=set(list(other.storage_written.keys())+list(self.storage_written.keys()))<for_stmt>key keys<block_start>other_set=other.storage_written.get(key set())<line_sep>merged_annotation.storage_written[key]=self.storage_written.get(key set()).union(other_set)<block_end><return>merged_annotation<block_end><block_end><class_stmt>WSDependencyAnnotation(MergeableStateAnnotation)<block_start>"""Dependency Annotation for World state This world state annotation maintains a stack of state annotations. It is used to transfer individual state annotations from one transaction to the next. """<def_stmt>__init__ self<block_start>self.annotations_stack:List[DependencyAnnotation]=[]<block_end><def_stmt>__copy__ self<block_start>result=WSDependencyAnnotation()<line_sep>result.annotations_stack=copy(self.annotations_stack)<line_sep><return>result<block_end><def_stmt>check_merge_annotation self annotation:"WSDependencyAnnotation"<arrow>bool<block_start><if_stmt>len(self.annotations_stack)<ne>len(annotation.annotations_stack)# We can only merge worldstate annotations that have seen an equal amount of transactions # since the beginning of symbolic execution <block_start><return><false><block_end><for_stmt>a1,a2 zip(self.annotations_stack annotation.annotations_stack)<block_start><if_stmt>a1<eq>a2<block_start><continue><block_end><if_stmt>(isinstance(a1 MergeableStateAnnotation)<and>isinstance(a2 MergeableStateAnnotation)<and>a1.check_merge_annotation(a2)<is><true>)<block_start><continue><block_end>log.debug("Aborting merge between annotations {} and {}".format(a1 a2))<line_sep><return><false><block_end><return><true><block_end><def_stmt>merge_annotation self annotation:"WSDependencyAnnotation"<arrow>"WSDependencyAnnotation"<block_start>merged_annotation=WSDependencyAnnotation()<for_stmt>a1,a2 zip(self.annotations_stack annotation.annotations_stack)<block_start><if_stmt>a1<eq>a2<block_start>merged_annotation.annotations_stack.append(copy(a1))<block_end>merged_annotation.annotations_stack.append(a1.merge_annotation(a2))<block_end><return>merged_annotation<block_end><block_end>
<import_from_stmt>.decorators accepts responds# noqa
<import_from_stmt>django.apps AppConfig<class_stmt>SetupConfig(AppConfig)<block_start>name='setup'<block_end>
<import_stmt>sys getpass<import_stmt>ldap<line_sep>#l = ldap.open("localhost", 31001) l=ldap.open("marta.it.uq.edu.au")<line_sep>login_dn="cn=root,ou=CSEE,o=UQ,c=AU"<line_sep>login_pw=getpass.getpass("Password for %s: "%login_dn)<line_sep>l.simple_bind_s(login_dn login_pw)<line_sep># # create a new sub organisation # <try_stmt><block_start>dn="ou=CSEE,o=UQ,c=AU"<line_sep>print("Adding" repr(dn))<line_sep>l.add_s(dn [("objectclass" ["organizationalUnit"]) ("ou" ["CSEE"]) ("description" ["Department of Computer Science and Electrical Engineering"]) ])<block_end><except_stmt>_ldap.LDAPError<block_start><pass><block_end># # create an entry for me # dn="cn=<NAME>,ou=CSEE,o=UQ,c=AU"<line_sep>print("Updating" repr(dn))<try_stmt><block_start>l.delete_s(dn)<block_end><except_stmt><block_start><pass><block_end>l.add_s(dn [("objectclass" ["organizationalPerson"]) ("sn" ["Leonard"]) ("cn" ["<NAME>"]) ("description" ["Ph.D. student"]) ("display-name" ["<NAME>"]) #("commonname", ["<NAME>"]), ("mail" ["<EMAIL>"]) ("othermailbox" ["<EMAIL>"]) ("givenname" ["David"]) ("surname" ["Leonard"]) ("seeAlso" ["http://www.csee.uq.edu.au/~leonard/"]) ("url" ["http://www.csee.uq.edu.au/~leonard/"]) #("homephone", []), #("fax", []), #("otherfacsimiletelephonenumber",[]), #("officefax", []), #("mobile", []), #("otherpager", []), #("officepager", []), #("pager", []), ("info" ["info"]) ("title" ["Mr"]) #("telephonenumber", []), ("l" ["Brisbane"]) ("st" ["Queensland"]) ("c" ["AU"]) ("co" ["co"]) ("o" ["UQ"]) ("ou" ["CSEE"]) #("homepostaladdress", []), #("postaladdress", []), #("streetaddress", []), #("street", []), ("department" ["CSEE"]) ("comment" ["comment"]) #("postalcode", []), ("physicaldeliveryofficename" ["Bldg 78, UQ, St Lucia"]) ("preferredDeliveryMethod" ["email"]) ("initials" ["DRL"]) ("conferenceinformation" ["MS-conferenceinformation"]) #("usercertificate", []), ("labeleduri" ["labeleduri"]) ("manager" ["cn=<NAME>"]) ("reports" ["reports"]) ("jpegPhoto" [open("/www/leonard/leonard.jpg").read()]) ("uid" ["leonard"]) ("userPassword" [""])])<line_sep># # search beneath the CSEE/UQ/AU tree # res=l.search_s("ou=CSEE, o=UQ, c=AU" _ldap.SCOPE_SUBTREE "objectclass=*" )<line_sep>print(res)<line_sep>l.unbind()<line_sep>
<import_from_stmt>datetime datetime<import_stmt>pytest<import_from_stmt>openbb_terminal.core.log.collection logging_clock<line_sep>clock=logging_clock.LoggingClock()<line_sep>now=datetime.now()<def_stmt>mock_next **_<block_start><raise>NotImplementedError<block_end>@pytest.mark.parametrize("precision" [logging_clock.Precision.hour logging_clock.Precision.minute])<def_stmt>test_calculate_next_sharp precision<block_start>value=clock.calculate_next_sharp(now precision)<assert_stmt>value<block_end><def_stmt>test_calculate_next_sharp_invalid <block_start><with_stmt>pytest.raises(Exception)<block_start>clock.calculate_next_sharp(now "bad")<block_end><block_end># TODO: find a better way to mock the while loop <def_stmt>test_do_action_every_sharp mocker<block_start>mock=mocker.Mock()<line_sep>mock.count=0<line_sep>mock.mock_next=mock_next<with_stmt>pytest.raises(NotImplementedError)<block_start>clock.do_action_every_sharp(mock.mock_next)<block_end><block_end><def_stmt>test_run mocker<block_start>mocker.patch("openbb_terminal.core.log.collection.logging_clock.LoggingClock.do_action_every_sharp")<line_sep>clock.run()<block_end><def_stmt>test_default_action <block_start>clock.default_action()<block_end>
<import_stmt>logging<import_from_stmt>..errors HTTPException<import_from_stmt>.models ensureQueueObjectType<line_sep>log=logging.getLogger("discodo.client.http")<class_stmt>HTTPClient<block_start><def_stmt>__init__ self client<block_start>self.VoiceClient=client<line_sep>self.Node=client.Node<line_sep>self.loop=client.Node.loop<block_end>@property<def_stmt>headers self<arrow>dict<block_start><return>{"Authorization":self.Node.password "User-ID":str(self.Node.user_id) "Guild-ID":str(self.VoiceClient.guild_id) "VoiceClient-ID":str(self.VoiceClient.id) }<block_end><async_keyword><def_stmt>fetch self method endpoint **kwargs<block_start>URL=self.Node.URL+endpoint<if_stmt>"headers"<not><in>kwargs<block_start>kwargs["headers"]={}<block_end>kwargs["headers"].update(self.headers)<async_keyword><with_stmt>self.Node.session.request(method URL **kwargs)<as>response<block_start>log.debug(f"{method} {URL} with {kwargs} has returned {response.status}")<line_sep>data=ensureQueueObjectType(self.VoiceClient <await>response.json(content_type=<none>))<if_stmt>200<le>response.status<l>300<block_start><return>data<block_end><raise>HTTPException(response.status data)<block_end><block_end><async_keyword><def_stmt>getSource self query<block_start><return><await>self.fetch("GET" "/getSource" params={"query":query})<block_end><async_keyword><def_stmt>searchSources self query<block_start><return><await>self.fetch("GET" "/searchSources" params={"query":query})<block_end><async_keyword><def_stmt>getVCContext self<block_start><return><await>self.fetch("GET" "/context")<block_end><async_keyword><def_stmt>setVCContext self data<block_start><return><await>self.fetch("POST" "/context" json={"context":data})<block_end><async_keyword><def_stmt>putSource self source<block_start><return><await>self.fetch("POST" "/putSource" json={"source":source})<block_end><async_keyword><def_stmt>loadSource self query<block_start><return><await>self.fetch("POST" "/loadSource" json={"query":query})<block_end><async_keyword><def_stmt>getOptions self<block_start><return><await>self.fetch("GET" "/options")<block_end><async_keyword><def_stmt>setOptions self options<block_start><return><await>self.fetch("POST" "/options" json=options)<block_end><async_keyword><def_stmt>getSeek self<block_start><return><await>self.fetch("GET" "/seek")<block_end><async_keyword><def_stmt>seek self offset<block_start><return><await>self.fetch("POST" "/seek" json={"offset":offset})<block_end><async_keyword><def_stmt>skip self offset<block_start><return><await>self.fetch("POST" "/skip" json={"offset":offset})<block_end><async_keyword><def_stmt>pause self<block_start><return><await>self.fetch("POST" "/pause")<block_end><async_keyword><def_stmt>resume self<block_start><return><await>self.fetch("POST" "/resume")<block_end><async_keyword><def_stmt>shuffle self<block_start><return><await>self.fetch("POST" "/shuffle")<block_end><async_keyword><def_stmt>queue self<block_start><return><await>self.fetch("GET" "/queue")<block_end><async_keyword><def_stmt>getCurrent self<block_start><return><await>self.fetch("GET" "/current")<block_end><async_keyword><def_stmt>getQueueSource self tag<block_start><return><await>self.fetch("GET" f"/queue/{tag}")<block_end><async_keyword><def_stmt>setCurrent self data<block_start><return><await>self.fetch("POST" "/current" json=data)<block_end><async_keyword><def_stmt>setQueueSource self tag data<block_start><return><await>self.fetch("POST" f"/queue/{tag}" json=data)<block_end><async_keyword><def_stmt>removeQueueSource self tag<block_start><return><await>self.fetch("DELETE" f"/queue/{tag}")<block_end><block_end>
# Examples of valid version strings # __version__ = '1.2.3.dev1' # Development release 1 # __version__ = '1.2.3a1' # Alpha Release 1 # __version__ = '1.2.3b1' # Beta Release 1 # __version__ = '1.2.3rc1' # RC Release 1 # __version__ = '1.2.3' # Final Release # __version__ = '1.2.3.post1' # Post Release 1 __version__="0.3.1.dev1"<line_sep># Import commonly used submodules right away. # The first few imports are only included for clarity. They are not strictly necessary, because the from-imports below # also import the types and runtime modules and implicitly add them to the rubicon.objc namespace. <import_from_stmt>. types# noqa: F401 <import_from_stmt>. runtime# noqa: F401 <import_from_stmt>. api# noqa: F401 # The import of collections is important, however. The classes from collections are not meant to be used directly, # instead they are registered with the runtime module (using the for_objcclass decorator) so they are used in place of # ObjCInstance when representing Foundation collections in Python. If this module is not imported, the registration # will not take place, and Foundation collections will not support the expected methods/operators in Python! <import_from_stmt>. collections# noqa: F401 <import_from_stmt>.types # noqa: F401 CFIndex CFRange CGFloat CGGlyph CGPoint CGPointMake CGRect CGRectMake CGSize CGSizeMake NSEdgeInsets NSEdgeInsetsMake NSInteger NSMakePoint NSMakeRect NSMakeSize NSPoint NSRange NSRect NSSize NSTimeInterval NSUInteger NSZeroPoint UIEdgeInsets UIEdgeInsetsMake UIEdgeInsetsZero UniChar unichar <import_from_stmt>.runtime SEL send_message send_super# noqa: F401 <import_from_stmt>.api # noqa: F401 Block NSArray NSDictionary NSMutableArray NSMutableDictionary NSObject NSObjectProtocol ObjCBlock ObjCClass ObjCInstance ObjCMetaClass ObjCProtocol at ns_from_py objc_classmethod objc_const objc_ivar objc_method objc_property objc_rawmethod py_from_ns <line_sep>
"""Test evil genius labs init."""<import_stmt>pytest<import_from_stmt>homeassistant config_entries<import_from_stmt>homeassistant.components.evil_genius_labs PLATFORMS<line_sep>@pytest.mark.parametrize("platforms" [PLATFORMS])<async_keyword><def_stmt>test_setup_unload_entry hass setup_evil_genius_labs config_entry<block_start>"""Test setting up and unloading a config entry."""<assert_stmt>len(hass.states.async_entity_ids())<eq>1<assert_stmt><await>hass.config_entries.async_unload(config_entry.entry_id)<assert_stmt>config_entry.state<eq>config_entries.ConfigEntryState.NOT_LOADED<block_end>
<import_from_stmt>.cell *<import_from_stmt>.worksheet *<line_sep>
<import_from_stmt>rest_framework viewsets<import_from_stmt>.models ListModel<import_from_stmt>. serializers<import_from_stmt>utils.page MyPageNumberPagination<import_from_stmt>rest_framework.filters OrderingFilter<import_from_stmt>django_filters.rest_framework DjangoFilterBackend<import_from_stmt>.filter Filter<class_stmt>APIViewSet(viewsets.ModelViewSet)<block_start>""" list: Response a data list(all) """<line_sep>pagination_class=MyPageNumberPagination<line_sep>filter_backends=[DjangoFilterBackend OrderingFilter ]<line_sep>ordering_fields=['id' "create_time" "update_time" ]<line_sep>filter_class=Filter<def_stmt>get_queryset self<block_start><if_stmt>self.request.user<block_start><return>ListModel.objects.filter(is_delete=<false>)<block_end><else_stmt><block_start><return>ListModel.objects.none()<block_end><block_end><def_stmt>get_serializer_class self<block_start><if_stmt>self.action<in>['list']<block_start><return>serializers.BinpropertyGetSerializer<block_end><else_stmt><block_start><return>self.http_method_not_allowed(request=self.request)<block_end><block_end><block_end>
"""In this module we test that there is a module settings and is has the required attributes and functionality"""<import_from_stmt>awesome_streamlit.database settings<def_stmt>test_github <block_start>"""Test that there is a GITHUB_URL Setting"""<assert_stmt>settings.GITHUB_URL<block_end><def_stmt>test_github_raw <block_start>"""Test that there is a GITHUB_RAW_URL Setting"""<assert_stmt>settings.GITHUB_RAW_URL<block_end>
<import_stmt>os<import_stmt>sys<import_stmt>torch<import_from_stmt>easyreg.piplines run_one_task<import_stmt>argparse<import_from_stmt>task ModelTask<class_stmt>SegmentationTraining()<block_start><def_stmt>__init__ self args<block_start>self.args=args<block_end><def_stmt>_set_environment self<block_start>sys.path.insert(0 os.path.abspath('..'))<line_sep>sys.path.insert(0 os.path.abspath('.'))<line_sep>sys.path.insert(0 os.path.abspath('../easyreg'))<line_sep>torch.backends.cudnn.benchmark=<true><block_end><def_stmt>train self<block_start><return><block_end><def_stmt>_create_folders self<block_start>self._create_folder(self.output_root_path)<line_sep>self._create_folder(self.task_output_path)<line_sep>self._create_folder(self.data_task_path)<line_sep>self._create_folder(self.setting_backup)<block_end><def_stmt>_create_folder self path<block_start><if_stmt><not>os.path.exists(path)<block_start>os.makedirs(path)<block_end><else_stmt><block_start>print("Warning, {} exists, press Y/y to overide, N/n to stop")<line_sep>user_inp=input()<if_stmt>user_inp<in>["Y" "y"]<block_start>os.makedirs(path)<block_end><elif_stmt>user_inp<in>["N" "n"]<block_start>exit()<block_end><block_end><block_end><def_stmt>__do_segmentation_train self pipeline=<none><block_start><block_end>""" set running env and run the task :param pipeline:a Pipeline object :return: a Pipeline object """<line_sep>self.pipeline=pipeline<line_sep>self.output_root_path=self.args.output_root_path<line_sep>self.task_name=self.args.task_name<line_sep>self.data_task_name=self.args.data_task_name<line_sep>self.setting_folder_path=self.args.setting_folder_path<line_sep>self.data_task_path=os.path.join(output_root_path data_task_name)<line_sep>self.task_output_path=os.path.join(data_task_path task_name)<line_sep>os.makedirs(task_output_path exist_ok=<true>)<line_sep>dm,tsm=self.init_train_env()<line_sep>tsm.task_par['tsk_set']['gpu_ids']=args.gpu_id<line_sep>self.dm_json_path=os.path.join(task_output_path 'cur_data_setting.json')<if>dm<is><not><none><else><none><line_sep>self.tsm_json_path=os.path.join(task_output_path 'cur_task_setting.json')<line_sep>tsm.save(self.tsm_json_path)<if_stmt>dm<is><not><none><block_start>dm.save(self.dm_json_path)<block_end>data_loaders=pipeline.data_loaders<if>self.pipeline<is><not><none><else><none><line_sep>self.pipeline=run_one_task(self.tsm_json_path self.dm_json_path data_loaders)<block_end><def_stmt>init_train_env self<block_start><assert_stmt>os.path.isfile(self.tsm_json_path) "task setting not exists"<line_sep>dm=DataTask('task_reg' self.dm_json_path)<if>os.path.isfile(self.dm_json_path)<else><none><line_sep>tsm=ModelTask('task_reg' tsm_json_path)<line_sep>self.data_task_name=self.data_task_name<if>len(self.data_task_name)<g>0<else>'custom'<if_stmt>dm<is><not><none><block_start>dm.data_par['datapro']['dataset']['output_path']=self.output_root_path<line_sep>dm.data_par['datapro']['dataset']['task_name']=self.data_task_name<block_end>tsm.task_par['tsk_set']['task_name']=self.task_name<line_sep>tsm.task_par['tsk_set']['output_root_path']=self.data_task_path<line_sep><return>dm tsm<block_end><def_stmt>save_settings self<block_start>self.setting_folder_path=args.setting_folder_path<line_sep>self.dm_json_path=os.path.join(setting_folder_path 'cur_data_setting.json')<line_sep>self.tsm_json_path=os.path.join(setting_folder_path 'cur_task_setting.json')<line_sep>dm=DataTask('task_reg' self.dm_json_path)<if>os.path.isfile(self.dm_json_path)<else><none><line_sep>tsm=ModelTask('task_reg' tsm_json_path)<line_sep>task_name=args.task_name_record<line_sep>setting_backup=os.path.join(setting_folder_path task_name+'_backup')<line_sep>os.makedirs(setting_backup exist_ok=<true>)<line_sep>dm_backup_json_path=os.path.join(setting_backup 'cur_data_setting.json')<line_sep>tsm_backup_json_path=os.path.join(setting_backup 'cur_task_setting.json')<line_sep>tsm.save(tsm_backup_json_path)<if_stmt>dm<is><not><none><block_start>dm.save(dm_backup_json_path)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>""" An interface for learning segmentation methods. This script will generate the three folders for the training, if the folder is not found in the given path It is recommended to use CUDA_VISIBLE_DEVICES to control the data parallelism, but it is possible to Assume there is three level folder, output_root_path/ data_task_folder/ task_folder Arguments: --output_root_path/ -o: the path of output folder --data_task_name/ -dtn: data task name i.e. lung_reg_task , oai_reg_task --task_name / -tn: task name i.e. run_training_vsvf_task, run_training_rdmm_task --setting_folder_path/ -ts: path of the folder where settings are saved,should include cur_task_setting.json --gpu_id/ -g: gpu_id to use """<line_sep>parser=argparse.ArgumentParser(description="An easy interface for training segmentation models")<line_sep>parser.add_argument('-o' '--output_root_path' required=<false> type=str default=<none> help='the path of output folder')<line_sep>parser.add_argument('-dtn' '--data_task_name' required=<false> type=str default='' help='the name of the data related task (like subsampling)')<line_sep>parser.add_argument('-tn' '--task_name' required=<false> type=str default=<none> help='the name of the task')<line_sep>parser.add_argument('-ts' '--setting_folder_path' required=<false> type=str default=<none> help='path of the folder where settings are saved,should include cur_task_setting.json)')<line_sep>parser.add_argument('-g' "--gpu_id" required=<false> type=int default=0 help='gpu_id to use')<line_sep>args=parser.parse_args()<line_sep>print(args)<line_sep>trainer=SegmentationTraining(args)<line_sep>trainer.train()<line_sep># do_segmentation_train(args) <block_end>
<import_from_stmt>.models *<import_from_stmt>.module *<import_from_stmt>.utils *<line_sep>