body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
4060a941d79f64585301a11a7598cb1ef0ed5502f44553dbd106fb604455f361
def restart_all_services(self): '\n Restart all magma services on magma_dev VM\n ' self.exec_command('sudo service magma@* stop ; sudo service magma@magmad start') time.sleep(10)
Restart all magma services on magma_dev VM
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
restart_all_services
marosmars/magma
2
python
def restart_all_services(self): '\n \n ' self.exec_command('sudo service magma@* stop ; sudo service magma@magmad start') time.sleep(10)
def restart_all_services(self): '\n \n ' self.exec_command('sudo service magma@* stop ; sudo service magma@magmad start') time.sleep(10)<|docstring|>Restart all magma services on magma_dev VM<|endoftext|>
d60af252264686e4bfcccdb3408cc9368e9754c2c37b8a6255138e9157eea34d
def restart_services(self, services): '\n Restart a list of magmad services.\n\n Args:\n services: List of (str) services names\n\n ' self._magmad_client.restart_services(services)
Restart a list of magmad services. Args: services: List of (str) services names
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
restart_services
marosmars/magma
2
python
def restart_services(self, services): '\n Restart a list of magmad services.\n\n Args:\n services: List of (str) services names\n\n ' self._magmad_client.restart_services(services)
def restart_services(self, services): '\n Restart a list of magmad services.\n\n Args:\n services: List of (str) services names\n\n ' self._magmad_client.restart_services(services)<|docstring|>Restart a list of magmad services. Args: services: List of (str) services names<|endoftext|>
c5f8b0cdb159a29cc75bf4b7e4947743cec151d7eb443d1db2cd3d708acf9451
def __init__(self, mobility_client): '\n Initialize mobility util.\n\n Args:\n mobility_client (mobility_service_client.MobilityServiceClient):\n client interacting with our mobility APIs\n ' self._mobility_client = mobility_client
Initialize mobility util. Args: mobility_client (mobility_service_client.MobilityServiceClient): client interacting with our mobility APIs
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
__init__
marosmars/magma
2
python
def __init__(self, mobility_client): '\n Initialize mobility util.\n\n Args:\n mobility_client (mobility_service_client.MobilityServiceClient):\n client interacting with our mobility APIs\n ' self._mobility_client = mobility_client
def __init__(self, mobility_client): '\n Initialize mobility util.\n\n Args:\n mobility_client (mobility_service_client.MobilityServiceClient):\n client interacting with our mobility APIs\n ' self._mobility_client = mobility_client<|docstring|>Initialize mobility util. Args: mobility_client (mobility_service_client.MobilityServiceClient): client interacting with our mobility APIs<|endoftext|>
e4b790c11e43474c1667426f7308b88ee4118cdeef08fcd4e9b0c8580615f5f1
def add_ip_block(self, ip_block): ' Add an ip block\n\n Args:\n ip_block (str | ipaddress.ip_network): the IP block to add\n ' ip_network_block = ipaddress.ip_network(ip_block) self._mobility_client.add_ip_block(ip_network_block)
Add an ip block Args: ip_block (str | ipaddress.ip_network): the IP block to add
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
add_ip_block
marosmars/magma
2
python
def add_ip_block(self, ip_block): ' Add an ip block\n\n Args:\n ip_block (str | ipaddress.ip_network): the IP block to add\n ' ip_network_block = ipaddress.ip_network(ip_block) self._mobility_client.add_ip_block(ip_network_block)
def add_ip_block(self, ip_block): ' Add an ip block\n\n Args:\n ip_block (str | ipaddress.ip_network): the IP block to add\n ' ip_network_block = ipaddress.ip_network(ip_block) self._mobility_client.add_ip_block(ip_network_block)<|docstring|>Add an ip block Args: ip_block (str | ipaddress.ip_network): the IP block to add<|endoftext|>
813f24dff0fc12d317b3714df9f3695fe8a335a80f79162cf6a0ab44a0f339eb
def remove_all_ip_blocks(self): ' Delete all allocated IP blocks. ' self._mobility_client.remove_all_ip_blocks()
Delete all allocated IP blocks.
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
remove_all_ip_blocks
marosmars/magma
2
python
def remove_all_ip_blocks(self): ' ' self._mobility_client.remove_all_ip_blocks()
def remove_all_ip_blocks(self): ' ' self._mobility_client.remove_all_ip_blocks()<|docstring|>Delete all allocated IP blocks.<|endoftext|>
55897a713bf828b7e3c226e2197329a0e9c520125e59ec2a1dcb629a1eafeaa7
def get_subscriber_table(self): ' Retrieve subscriber table from mobilityd ' table = self._mobility_client.get_subscriber_ip_table() return table
Retrieve subscriber table from mobilityd
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
get_subscriber_table
marosmars/magma
2
python
def get_subscriber_table(self): ' ' table = self._mobility_client.get_subscriber_ip_table() return table
def get_subscriber_table(self): ' ' table = self._mobility_client.get_subscriber_ip_table() return table<|docstring|>Retrieve subscriber table from mobilityd<|endoftext|>
9f1ec7dd5c1595f66a37bc09dd452711be8161843fc330690c24c1af50366ef5
def list_ip_blocks(self): ' List all IP blocks in mobilityd ' blocks = self._mobility_client.list_added_blocks() return blocks
List all IP blocks in mobilityd
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
list_ip_blocks
marosmars/magma
2
python
def list_ip_blocks(self): ' ' blocks = self._mobility_client.list_added_blocks() return blocks
def list_ip_blocks(self): ' ' blocks = self._mobility_client.list_added_blocks() return blocks<|docstring|>List all IP blocks in mobilityd<|endoftext|>
e57cdb5522132db4fdae4b9ac3c2a97536442b0a5c128746a1ac3722274ce996
def remove_ip_blocks(self, blocks): ' Attempt to remove the given blocks from mobilityd\n\n Args:\n blocks (tuple(ip_network)): tuple of ipaddress.ip_network objects\n representing the IP blocks to remove.\n Returns:\n removed_blocks (tuple(ip_network)): tuple of ipaddress.ip_netework\n objects representing the removed IP blocks.\n ' removed_blocks = self._mobility_client.remove_ip_blocks(blocks) return removed_blocks
Attempt to remove the given blocks from mobilityd Args: blocks (tuple(ip_network)): tuple of ipaddress.ip_network objects representing the IP blocks to remove. Returns: removed_blocks (tuple(ip_network)): tuple of ipaddress.ip_netework objects representing the removed IP blocks.
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
remove_ip_blocks
marosmars/magma
2
python
def remove_ip_blocks(self, blocks): ' Attempt to remove the given blocks from mobilityd\n\n Args:\n blocks (tuple(ip_network)): tuple of ipaddress.ip_network objects\n representing the IP blocks to remove.\n Returns:\n removed_blocks (tuple(ip_network)): tuple of ipaddress.ip_netework\n objects representing the removed IP blocks.\n ' removed_blocks = self._mobility_client.remove_ip_blocks(blocks) return removed_blocks
def remove_ip_blocks(self, blocks): ' Attempt to remove the given blocks from mobilityd\n\n Args:\n blocks (tuple(ip_network)): tuple of ipaddress.ip_network objects\n representing the IP blocks to remove.\n Returns:\n removed_blocks (tuple(ip_network)): tuple of ipaddress.ip_netework\n objects representing the removed IP blocks.\n ' removed_blocks = self._mobility_client.remove_ip_blocks(blocks) return removed_blocks<|docstring|>Attempt to remove the given blocks from mobilityd Args: blocks (tuple(ip_network)): tuple of ipaddress.ip_network objects representing the IP blocks to remove. Returns: removed_blocks (tuple(ip_network)): tuple of ipaddress.ip_netework objects representing the removed IP blocks.<|endoftext|>
96990df6f3806ee2d812dd6e073d2f051b072e971565ea48190f7f111607889b
def cleanup(self): ' Cleanup added IP blocks ' blocks = self.list_ip_blocks() self.remove_ip_blocks(blocks)
Cleanup added IP blocks
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
cleanup
marosmars/magma
2
python
def cleanup(self): ' ' blocks = self.list_ip_blocks() self.remove_ip_blocks(blocks)
def cleanup(self): ' ' blocks = self.list_ip_blocks() self.remove_ip_blocks(blocks)<|docstring|>Cleanup added IP blocks<|endoftext|>
d7ad8a2df9c10f72742c74b5bf958d4235a6bfa12732f97f5f332b7ca076439a
def __init__(self): '\n Initialize spgw util.\n ' self._stub = SpgwServiceStub(get_rpc_channel('spgw_service'))
Initialize spgw util.
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
__init__
marosmars/magma
2
python
def __init__(self): '\n \n ' self._stub = SpgwServiceStub(get_rpc_channel('spgw_service'))
def __init__(self): '\n \n ' self._stub = SpgwServiceStub(get_rpc_channel('spgw_service'))<|docstring|>Initialize spgw util.<|endoftext|>
73505e7ba052038ef254e250e83ede30d58b2a94a5e758443fd9555a7da532cf
def create_bearer(self, imsi, lbi, qci_val=1): '\n Sends a CreateBearer Request to SPGW service\n ' print('Sending CreateBearer request to spgw service') req = CreateBearerRequest(sid=SIDUtils.to_pb(imsi), link_bearer_id=lbi, policy_rules=[PolicyRule(qos=FlowQos(qci=qci_val, gbr_ul=10000000, gbr_dl=10000000, max_req_bw_ul=10000000, max_req_bw_dl=10000000, arp=QosArp(priority_level=1, pre_capability=1, pre_vulnerability=0)), flow_list=[FlowDescription(match=FlowMatch(ipv4_dst='0.0.0.0/0', tcp_dst=5001, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42/24', tcp_dst=5002, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5003, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5004, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5005, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.DENY), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42', tcp_src=5001, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='', tcp_src=5002, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.64/26', tcp_src=5003, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42/16', tcp_src=5004, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42', tcp_src=5005, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.DENY)])]) self._stub.CreateBearer(req)
Sends a CreateBearer Request to SPGW service
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
create_bearer
marosmars/magma
2
python
def create_bearer(self, imsi, lbi, qci_val=1): '\n \n ' print('Sending CreateBearer request to spgw service') req = CreateBearerRequest(sid=SIDUtils.to_pb(imsi), link_bearer_id=lbi, policy_rules=[PolicyRule(qos=FlowQos(qci=qci_val, gbr_ul=10000000, gbr_dl=10000000, max_req_bw_ul=10000000, max_req_bw_dl=10000000, arp=QosArp(priority_level=1, pre_capability=1, pre_vulnerability=0)), flow_list=[FlowDescription(match=FlowMatch(ipv4_dst='0.0.0.0/0', tcp_dst=5001, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42/24', tcp_dst=5002, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5003, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5004, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5005, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.DENY), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42', tcp_src=5001, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src=, tcp_src=5002, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.64/26', tcp_src=5003, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42/16', tcp_src=5004, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42', tcp_src=5005, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.DENY)])]) self._stub.CreateBearer(req)
def create_bearer(self, imsi, lbi, qci_val=1): '\n \n ' print('Sending CreateBearer request to spgw service') req = CreateBearerRequest(sid=SIDUtils.to_pb(imsi), link_bearer_id=lbi, policy_rules=[PolicyRule(qos=FlowQos(qci=qci_val, gbr_ul=10000000, gbr_dl=10000000, max_req_bw_ul=10000000, max_req_bw_dl=10000000, arp=QosArp(priority_level=1, pre_capability=1, pre_vulnerability=0)), flow_list=[FlowDescription(match=FlowMatch(ipv4_dst='0.0.0.0/0', tcp_dst=5001, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42/24', tcp_dst=5002, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5003, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5004, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_dst='192.168.129.42', tcp_dst=5005, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.UPLINK), action=FlowDescription.DENY), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42', tcp_src=5001, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src=, tcp_src=5002, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.64/26', tcp_src=5003, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42/16', tcp_src=5004, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.PERMIT), FlowDescription(match=FlowMatch(ipv4_src='192.168.129.42', tcp_src=5005, ip_proto=FlowMatch.IPPROTO_TCP, direction=FlowMatch.DOWNLINK), action=FlowDescription.DENY)])]) self._stub.CreateBearer(req)<|docstring|>Sends a CreateBearer Request to SPGW service<|endoftext|>
7c959440b9abfd7c1aef305f4e025f4997a3c8957f32b73bc45240ee3e589a72
def delete_bearer(self, imsi, lbi, ebi): '\n Sends a DeleteBearer Request to SPGW service\n ' print('Sending DeleteBearer request to spgw service') req = DeleteBearerRequest(sid=SIDUtils.to_pb(imsi), link_bearer_id=lbi, eps_bearer_ids=[ebi]) self._stub.DeleteBearer(req)
Sends a DeleteBearer Request to SPGW service
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
delete_bearer
marosmars/magma
2
python
def delete_bearer(self, imsi, lbi, ebi): '\n \n ' print('Sending DeleteBearer request to spgw service') req = DeleteBearerRequest(sid=SIDUtils.to_pb(imsi), link_bearer_id=lbi, eps_bearer_ids=[ebi]) self._stub.DeleteBearer(req)
def delete_bearer(self, imsi, lbi, ebi): '\n \n ' print('Sending DeleteBearer request to spgw service') req = DeleteBearerRequest(sid=SIDUtils.to_pb(imsi), link_bearer_id=lbi, eps_bearer_ids=[ebi]) self._stub.DeleteBearer(req)<|docstring|>Sends a DeleteBearer Request to SPGW service<|endoftext|>
cac3b0d20942b2be0642ff219cf284397e4461e8198dfc3610f9d0bd8a4dfa96
def __init__(self): '\n Initialize sessionManager util.\n ' self._session_stub = SessionProxyResponderStub(get_rpc_channel('sessiond')) self._directorydstub = GatewayDirectoryServiceStub(get_rpc_channel('directoryd'))
Initialize sessionManager util.
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
__init__
marosmars/magma
2
python
def __init__(self): '\n \n ' self._session_stub = SessionProxyResponderStub(get_rpc_channel('sessiond')) self._directorydstub = GatewayDirectoryServiceStub(get_rpc_channel('directoryd'))
def __init__(self): '\n \n ' self._session_stub = SessionProxyResponderStub(get_rpc_channel('sessiond')) self._directorydstub = GatewayDirectoryServiceStub(get_rpc_channel('directoryd'))<|docstring|>Initialize sessionManager util.<|endoftext|>
48f5de6989d4e67e65108bf3955ae68fb2dd4a81dd896a6c930d2679f7da86de
def get_flow_match(self, flow_list, flow_match_list): '\n Populates flow match list\n ' for flow in flow_list: flow_direction = (FlowMatch.UPLINK if (flow['direction'] == 'UL') else FlowMatch.DOWNLINK) ip_protocol = flow['ip_proto'] if (ip_protocol == 'TCP'): ip_protocol = FlowMatch.IPPROTO_TCP udp_src_port = 0 udp_dst_port = 0 tcp_src_port = (int(flow['tcp_src_port']) if ('tcp_src_port' in flow) else 0) tcp_dst_port = (int(flow['tcp_dst_port']) if ('tcp_dst_port' in flow) else 0) elif (ip_protocol == 'UDP'): ip_protocol = FlowMatch.IPPROTO_UDP tcp_src_port = 0 tcp_dst_port = 0 udp_src_port = (int(flow['udp_src_port']) if ('udp_src_port' in flow) else 0) udp_dst_port = (int(flow['udp_dst_port']) if ('udp_dst_port' in flow) else 0) else: udp_src_port = 0 udp_dst_port = 0 tcp_src_port = 0 tcp_dst_port = 0 ipv4_src_addr = flow.get('ipv4_src', None) ipv4_dst_addr = flow.get('ipv4_dst', None) flow_match_list.append(FlowDescription(match=FlowMatch(ipv4_dst=ipv4_dst_addr, ipv4_src=ipv4_src_addr, tcp_src=tcp_src_port, tcp_dst=tcp_dst_port, udp_src=udp_src_port, udp_dst=udp_dst_port, ip_proto=ip_protocol, direction=flow_direction), action=FlowDescription.PERMIT))
Populates flow match list
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
get_flow_match
marosmars/magma
2
python
def get_flow_match(self, flow_list, flow_match_list): '\n \n ' for flow in flow_list: flow_direction = (FlowMatch.UPLINK if (flow['direction'] == 'UL') else FlowMatch.DOWNLINK) ip_protocol = flow['ip_proto'] if (ip_protocol == 'TCP'): ip_protocol = FlowMatch.IPPROTO_TCP udp_src_port = 0 udp_dst_port = 0 tcp_src_port = (int(flow['tcp_src_port']) if ('tcp_src_port' in flow) else 0) tcp_dst_port = (int(flow['tcp_dst_port']) if ('tcp_dst_port' in flow) else 0) elif (ip_protocol == 'UDP'): ip_protocol = FlowMatch.IPPROTO_UDP tcp_src_port = 0 tcp_dst_port = 0 udp_src_port = (int(flow['udp_src_port']) if ('udp_src_port' in flow) else 0) udp_dst_port = (int(flow['udp_dst_port']) if ('udp_dst_port' in flow) else 0) else: udp_src_port = 0 udp_dst_port = 0 tcp_src_port = 0 tcp_dst_port = 0 ipv4_src_addr = flow.get('ipv4_src', None) ipv4_dst_addr = flow.get('ipv4_dst', None) flow_match_list.append(FlowDescription(match=FlowMatch(ipv4_dst=ipv4_dst_addr, ipv4_src=ipv4_src_addr, tcp_src=tcp_src_port, tcp_dst=tcp_dst_port, udp_src=udp_src_port, udp_dst=udp_dst_port, ip_proto=ip_protocol, direction=flow_direction), action=FlowDescription.PERMIT))
def get_flow_match(self, flow_list, flow_match_list): '\n \n ' for flow in flow_list: flow_direction = (FlowMatch.UPLINK if (flow['direction'] == 'UL') else FlowMatch.DOWNLINK) ip_protocol = flow['ip_proto'] if (ip_protocol == 'TCP'): ip_protocol = FlowMatch.IPPROTO_TCP udp_src_port = 0 udp_dst_port = 0 tcp_src_port = (int(flow['tcp_src_port']) if ('tcp_src_port' in flow) else 0) tcp_dst_port = (int(flow['tcp_dst_port']) if ('tcp_dst_port' in flow) else 0) elif (ip_protocol == 'UDP'): ip_protocol = FlowMatch.IPPROTO_UDP tcp_src_port = 0 tcp_dst_port = 0 udp_src_port = (int(flow['udp_src_port']) if ('udp_src_port' in flow) else 0) udp_dst_port = (int(flow['udp_dst_port']) if ('udp_dst_port' in flow) else 0) else: udp_src_port = 0 udp_dst_port = 0 tcp_src_port = 0 tcp_dst_port = 0 ipv4_src_addr = flow.get('ipv4_src', None) ipv4_dst_addr = flow.get('ipv4_dst', None) flow_match_list.append(FlowDescription(match=FlowMatch(ipv4_dst=ipv4_dst_addr, ipv4_src=ipv4_src_addr, tcp_src=tcp_src_port, tcp_dst=tcp_dst_port, udp_src=udp_src_port, udp_dst=udp_dst_port, ip_proto=ip_protocol, direction=flow_direction), action=FlowDescription.PERMIT))<|docstring|>Populates flow match list<|endoftext|>
505ea4458e9cab625f0070314348c5ae7be27d46715fe17660e1b9f98418e411
def create_ReAuthRequest(self, imsi, policy_id, flow_list, qos): '\n Sends Policy RAR message to session manager\n ' print('Sending Policy RAR message to session manager') flow_match_list = [] res = None self.get_flow_match(flow_list, flow_match_list) policy_qos = FlowQos(qci=qos['qci'], max_req_bw_ul=qos['max_req_bw_ul'], max_req_bw_dl=qos['max_req_bw_dl'], gbr_ul=qos['gbr_ul'], gbr_dl=qos['gbr_dl'], arp=QosArp(priority_level=qos['arp_prio'], pre_capability=qos['pre_cap'], pre_vulnerability=qos['pre_vul'])) policy_rule = PolicyRule(id=policy_id, priority=qos['priority'], flow_list=flow_match_list, tracking_type=PolicyRule.NO_TRACKING, rating_group=1, monitoring_key=None, qos=policy_qos) qos = QoSInformation(qci=qos['qci']) req = GetDirectoryFieldRequest(id=imsi, field_key='session_id') try: res = self._directorydstub.GetDirectoryField(req, DEFAULT_GRPC_TIMEOUT) except grpc.RpcError as err: logging.error('GetDirectoryFieldRequest error for id: %s! [%s] %s', imsi, err.code(), err.details()) self._session_stub.PolicyReAuth(PolicyReAuthRequest(session_id=res.value, imsi=imsi, rules_to_remove=[], rules_to_install=[], dynamic_rules_to_install=[DynamicRuleInstall(policy_rule=policy_rule)], event_triggers=[], revalidation_time=None, usage_monitoring_credits=[], qos_info=qos))
Sends Policy RAR message to session manager
lte/gateway/python/integ_tests/s1aptests/s1ap_utils.py
create_ReAuthRequest
marosmars/magma
2
python
def create_ReAuthRequest(self, imsi, policy_id, flow_list, qos): '\n \n ' print('Sending Policy RAR message to session manager') flow_match_list = [] res = None self.get_flow_match(flow_list, flow_match_list) policy_qos = FlowQos(qci=qos['qci'], max_req_bw_ul=qos['max_req_bw_ul'], max_req_bw_dl=qos['max_req_bw_dl'], gbr_ul=qos['gbr_ul'], gbr_dl=qos['gbr_dl'], arp=QosArp(priority_level=qos['arp_prio'], pre_capability=qos['pre_cap'], pre_vulnerability=qos['pre_vul'])) policy_rule = PolicyRule(id=policy_id, priority=qos['priority'], flow_list=flow_match_list, tracking_type=PolicyRule.NO_TRACKING, rating_group=1, monitoring_key=None, qos=policy_qos) qos = QoSInformation(qci=qos['qci']) req = GetDirectoryFieldRequest(id=imsi, field_key='session_id') try: res = self._directorydstub.GetDirectoryField(req, DEFAULT_GRPC_TIMEOUT) except grpc.RpcError as err: logging.error('GetDirectoryFieldRequest error for id: %s! [%s] %s', imsi, err.code(), err.details()) self._session_stub.PolicyReAuth(PolicyReAuthRequest(session_id=res.value, imsi=imsi, rules_to_remove=[], rules_to_install=[], dynamic_rules_to_install=[DynamicRuleInstall(policy_rule=policy_rule)], event_triggers=[], revalidation_time=None, usage_monitoring_credits=[], qos_info=qos))
def create_ReAuthRequest(self, imsi, policy_id, flow_list, qos): '\n \n ' print('Sending Policy RAR message to session manager') flow_match_list = [] res = None self.get_flow_match(flow_list, flow_match_list) policy_qos = FlowQos(qci=qos['qci'], max_req_bw_ul=qos['max_req_bw_ul'], max_req_bw_dl=qos['max_req_bw_dl'], gbr_ul=qos['gbr_ul'], gbr_dl=qos['gbr_dl'], arp=QosArp(priority_level=qos['arp_prio'], pre_capability=qos['pre_cap'], pre_vulnerability=qos['pre_vul'])) policy_rule = PolicyRule(id=policy_id, priority=qos['priority'], flow_list=flow_match_list, tracking_type=PolicyRule.NO_TRACKING, rating_group=1, monitoring_key=None, qos=policy_qos) qos = QoSInformation(qci=qos['qci']) req = GetDirectoryFieldRequest(id=imsi, field_key='session_id') try: res = self._directorydstub.GetDirectoryField(req, DEFAULT_GRPC_TIMEOUT) except grpc.RpcError as err: logging.error('GetDirectoryFieldRequest error for id: %s! [%s] %s', imsi, err.code(), err.details()) self._session_stub.PolicyReAuth(PolicyReAuthRequest(session_id=res.value, imsi=imsi, rules_to_remove=[], rules_to_install=[], dynamic_rules_to_install=[DynamicRuleInstall(policy_rule=policy_rule)], event_triggers=[], revalidation_time=None, usage_monitoring_credits=[], qos_info=qos))<|docstring|>Sends Policy RAR message to session manager<|endoftext|>
4f3452e103ac90a8335a216ac883f2c7687f9269ad3b5cf1eb89c001d4ce5697
def __init__(self, env, noop_max=30): 'Sample initial states by taking random number of no-ops on reset.\n No-op is assumed to be action 0.\n ' gym.Wrapper.__init__(self, env) self.noop_max = noop_max self.override_num_noops = None self.noop_action = 0 assert (env.unwrapped.get_action_meanings()[0] == 'NOOP')
Sample initial states by taking random number of no-ops on reset. No-op is assumed to be action 0.
reward/utils/wrapper/gym/atari_wrappers.py
__init__
lgvaz/torchrl
5
python
def __init__(self, env, noop_max=30): 'Sample initial states by taking random number of no-ops on reset.\n No-op is assumed to be action 0.\n ' gym.Wrapper.__init__(self, env) self.noop_max = noop_max self.override_num_noops = None self.noop_action = 0 assert (env.unwrapped.get_action_meanings()[0] == 'NOOP')
def __init__(self, env, noop_max=30): 'Sample initial states by taking random number of no-ops on reset.\n No-op is assumed to be action 0.\n ' gym.Wrapper.__init__(self, env) self.noop_max = noop_max self.override_num_noops = None self.noop_action = 0 assert (env.unwrapped.get_action_meanings()[0] == 'NOOP')<|docstring|>Sample initial states by taking random number of no-ops on reset. No-op is assumed to be action 0.<|endoftext|>
c0bbd486931d17d929dc86a4b6d3f726e96f5f4308b97d937a7ee6ae27ec8533
def reset(self, **kwargs): ' Do no-op action for a number of steps in [1, noop_max].' self.env.reset(**kwargs) if (self.override_num_noops is not None): noops = self.override_num_noops else: noops = self.unwrapped.np_random.randint(1, (self.noop_max + 1)) assert (noops > 0) obs = None for _ in range(noops): (obs, _, done, _) = self.env.step(self.noop_action) if done: obs = self.env.reset(**kwargs) return obs
Do no-op action for a number of steps in [1, noop_max].
reward/utils/wrapper/gym/atari_wrappers.py
reset
lgvaz/torchrl
5
python
def reset(self, **kwargs): ' ' self.env.reset(**kwargs) if (self.override_num_noops is not None): noops = self.override_num_noops else: noops = self.unwrapped.np_random.randint(1, (self.noop_max + 1)) assert (noops > 0) obs = None for _ in range(noops): (obs, _, done, _) = self.env.step(self.noop_action) if done: obs = self.env.reset(**kwargs) return obs
def reset(self, **kwargs): ' ' self.env.reset(**kwargs) if (self.override_num_noops is not None): noops = self.override_num_noops else: noops = self.unwrapped.np_random.randint(1, (self.noop_max + 1)) assert (noops > 0) obs = None for _ in range(noops): (obs, _, done, _) = self.env.step(self.noop_action) if done: obs = self.env.reset(**kwargs) return obs<|docstring|>Do no-op action for a number of steps in [1, noop_max].<|endoftext|>
56d0be54500241e0b54dabb5c19919baf097f50408fcc735d4c079b790f18e5b
def __init__(self, env): 'Take action on reset for environments that are fixed until firing.' gym.Wrapper.__init__(self, env) assert (env.unwrapped.get_action_meanings()[1] == 'FIRE') assert (len(env.unwrapped.get_action_meanings()) >= 3)
Take action on reset for environments that are fixed until firing.
reward/utils/wrapper/gym/atari_wrappers.py
__init__
lgvaz/torchrl
5
python
def __init__(self, env): gym.Wrapper.__init__(self, env) assert (env.unwrapped.get_action_meanings()[1] == 'FIRE') assert (len(env.unwrapped.get_action_meanings()) >= 3)
def __init__(self, env): gym.Wrapper.__init__(self, env) assert (env.unwrapped.get_action_meanings()[1] == 'FIRE') assert (len(env.unwrapped.get_action_meanings()) >= 3)<|docstring|>Take action on reset for environments that are fixed until firing.<|endoftext|>
948958a8908ae8a7d19992f33ba825f5a1c9080d0e88e3a395e866de125f0f10
def __init__(self, env): 'Make end-of-life == end-of-episode, but only reset on true game over.\n Done by DeepMind for the DQN and co. since it helps value estimation.\n ' gym.Wrapper.__init__(self, env) self.lives = 0 self.was_real_done = True
Make end-of-life == end-of-episode, but only reset on true game over. Done by DeepMind for the DQN and co. since it helps value estimation.
reward/utils/wrapper/gym/atari_wrappers.py
__init__
lgvaz/torchrl
5
python
def __init__(self, env): 'Make end-of-life == end-of-episode, but only reset on true game over.\n Done by DeepMind for the DQN and co. since it helps value estimation.\n ' gym.Wrapper.__init__(self, env) self.lives = 0 self.was_real_done = True
def __init__(self, env): 'Make end-of-life == end-of-episode, but only reset on true game over.\n Done by DeepMind for the DQN and co. since it helps value estimation.\n ' gym.Wrapper.__init__(self, env) self.lives = 0 self.was_real_done = True<|docstring|>Make end-of-life == end-of-episode, but only reset on true game over. Done by DeepMind for the DQN and co. since it helps value estimation.<|endoftext|>
acd3d4c5542291608b53c9e17ceb0573bca23e15c0eeed3292a4f011b75a2a5c
def reset(self, **kwargs): 'Reset only when lives are exhausted.\n This way all states are still reachable even though lives are episodic,\n and the learner need not know about any of this behind-the-scenes.\n ' if self.was_real_done: obs = self.env.reset(**kwargs) else: (obs, _, _, _) = self.env.step(0) self.lives = self.env.unwrapped.ale.lives() return obs
Reset only when lives are exhausted. This way all states are still reachable even though lives are episodic, and the learner need not know about any of this behind-the-scenes.
reward/utils/wrapper/gym/atari_wrappers.py
reset
lgvaz/torchrl
5
python
def reset(self, **kwargs): 'Reset only when lives are exhausted.\n This way all states are still reachable even though lives are episodic,\n and the learner need not know about any of this behind-the-scenes.\n ' if self.was_real_done: obs = self.env.reset(**kwargs) else: (obs, _, _, _) = self.env.step(0) self.lives = self.env.unwrapped.ale.lives() return obs
def reset(self, **kwargs): 'Reset only when lives are exhausted.\n This way all states are still reachable even though lives are episodic,\n and the learner need not know about any of this behind-the-scenes.\n ' if self.was_real_done: obs = self.env.reset(**kwargs) else: (obs, _, _, _) = self.env.step(0) self.lives = self.env.unwrapped.ale.lives() return obs<|docstring|>Reset only when lives are exhausted. This way all states are still reachable even though lives are episodic, and the learner need not know about any of this behind-the-scenes.<|endoftext|>
912288f283c9a31361c105444a6d148cac7b3f707a67778dd380bf00affc5f4d
def __init__(self, env, skip=4): 'Return only every `skip`-th frame' gym.Wrapper.__init__(self, env) self._obs_buffer = np.zeros(((2,) + env.observation_space.shape), dtype=np.uint8) self._skip = skip
Return only every `skip`-th frame
reward/utils/wrapper/gym/atari_wrappers.py
__init__
lgvaz/torchrl
5
python
def __init__(self, env, skip=4): gym.Wrapper.__init__(self, env) self._obs_buffer = np.zeros(((2,) + env.observation_space.shape), dtype=np.uint8) self._skip = skip
def __init__(self, env, skip=4): gym.Wrapper.__init__(self, env) self._obs_buffer = np.zeros(((2,) + env.observation_space.shape), dtype=np.uint8) self._skip = skip<|docstring|>Return only every `skip`-th frame<|endoftext|>
1a35b438664f869bf4a4c16fa213bf9fc7f3fe09dc72a523f77bf05f7466e5af
def step(self, action): 'Repeat action, sum reward, and max over last observations.' total_reward = 0.0 done = None for i in range(self._skip): (obs, reward, done, info) = self.env.step(action) if (i == (self._skip - 2)): self._obs_buffer[0] = obs if (i == (self._skip - 1)): self._obs_buffer[1] = obs total_reward += reward if done: break max_frame = self._obs_buffer.max(axis=0) return (max_frame, total_reward, done, info)
Repeat action, sum reward, and max over last observations.
reward/utils/wrapper/gym/atari_wrappers.py
step
lgvaz/torchrl
5
python
def step(self, action): total_reward = 0.0 done = None for i in range(self._skip): (obs, reward, done, info) = self.env.step(action) if (i == (self._skip - 2)): self._obs_buffer[0] = obs if (i == (self._skip - 1)): self._obs_buffer[1] = obs total_reward += reward if done: break max_frame = self._obs_buffer.max(axis=0) return (max_frame, total_reward, done, info)
def step(self, action): total_reward = 0.0 done = None for i in range(self._skip): (obs, reward, done, info) = self.env.step(action) if (i == (self._skip - 2)): self._obs_buffer[0] = obs if (i == (self._skip - 1)): self._obs_buffer[1] = obs total_reward += reward if done: break max_frame = self._obs_buffer.max(axis=0) return (max_frame, total_reward, done, info)<|docstring|>Repeat action, sum reward, and max over last observations.<|endoftext|>
17ac5fb9599f5d83a2a66250a3367ab66fc5d4cf48de6ede4225a04e7091c603
def reward(self, reward): 'Bin reward to {+1, 0, -1} by its sign.' return np.sign(reward)
Bin reward to {+1, 0, -1} by its sign.
reward/utils/wrapper/gym/atari_wrappers.py
reward
lgvaz/torchrl
5
python
def reward(self, reward): return np.sign(reward)
def reward(self, reward): return np.sign(reward)<|docstring|>Bin reward to {+1, 0, -1} by its sign.<|endoftext|>
37fb6aacab3acb21a34b1f715a1d23a5a27c97a266ebd0e20fbdab047acf38ba
def SupportedFiletypes(self): ' Just csharp ' return ['cs']
Just csharp
vim/patches/YouCompleteMe_linux/cs_completer.py
SupportedFiletypes
yjpark/dotfiles
7
python
def SupportedFiletypes(self): ' ' return ['cs']
def SupportedFiletypes(self): ' ' return ['cs']<|docstring|>Just csharp<|endoftext|>
2736c731d20a27f586bed683b8e32d0ee82eabb6ae00b9f475044a090159acf0
def _StartServer(self, request_data): ' Start the OmniSharp server ' self._logger.info('startup') path_to_solutionfile = solutiondetection.FindSolutionPath(request_data['filepath']) if (not path_to_solutionfile): raise RuntimeError('Autodetection of solution file failed.\n') self._logger.info('Loading solution file {0}'.format(path_to_solutionfile)) self._omnisharp_port = utils.GetUnusedLocalhostPort() command = ' '.join([PATH_TO_OMNISHARP_BINARY, '-p', str(self._omnisharp_port), '-s', path_to_solutionfile]) if ((not utils.OnWindows()) and (not utils.OnCygwin())): command = ('/opt/monodevelop/bin/mono ' + command) if utils.OnCygwin(): command = (command + ' --client-path-mode Cygwin') filename_format = os.path.join(utils.PathToTempDir(), 'omnisharp_{port}_{sln}_{std}.log') solutionfile = os.path.basename(path_to_solutionfile) self._filename_stdout = filename_format.format(port=self._omnisharp_port, sln=solutionfile, std='stdout') self._filename_stderr = filename_format.format(port=self._omnisharp_port, sln=solutionfile, std='stderr') with open(self._filename_stderr, 'w') as fstderr: with open(self._filename_stdout, 'w') as fstdout: utils.SafePopen(command, stdout=fstdout, stderr=fstderr, shell=True) self._solution_path = path_to_solutionfile self._logger.info('Starting OmniSharp server')
Start the OmniSharp server
vim/patches/YouCompleteMe_linux/cs_completer.py
_StartServer
yjpark/dotfiles
7
python
def _StartServer(self, request_data): ' ' self._logger.info('startup') path_to_solutionfile = solutiondetection.FindSolutionPath(request_data['filepath']) if (not path_to_solutionfile): raise RuntimeError('Autodetection of solution file failed.\n') self._logger.info('Loading solution file {0}'.format(path_to_solutionfile)) self._omnisharp_port = utils.GetUnusedLocalhostPort() command = ' '.join([PATH_TO_OMNISHARP_BINARY, '-p', str(self._omnisharp_port), '-s', path_to_solutionfile]) if ((not utils.OnWindows()) and (not utils.OnCygwin())): command = ('/opt/monodevelop/bin/mono ' + command) if utils.OnCygwin(): command = (command + ' --client-path-mode Cygwin') filename_format = os.path.join(utils.PathToTempDir(), 'omnisharp_{port}_{sln}_{std}.log') solutionfile = os.path.basename(path_to_solutionfile) self._filename_stdout = filename_format.format(port=self._omnisharp_port, sln=solutionfile, std='stdout') self._filename_stderr = filename_format.format(port=self._omnisharp_port, sln=solutionfile, std='stderr') with open(self._filename_stderr, 'w') as fstderr: with open(self._filename_stdout, 'w') as fstdout: utils.SafePopen(command, stdout=fstdout, stderr=fstderr, shell=True) self._solution_path = path_to_solutionfile self._logger.info('Starting OmniSharp server')
def _StartServer(self, request_data): ' ' self._logger.info('startup') path_to_solutionfile = solutiondetection.FindSolutionPath(request_data['filepath']) if (not path_to_solutionfile): raise RuntimeError('Autodetection of solution file failed.\n') self._logger.info('Loading solution file {0}'.format(path_to_solutionfile)) self._omnisharp_port = utils.GetUnusedLocalhostPort() command = ' '.join([PATH_TO_OMNISHARP_BINARY, '-p', str(self._omnisharp_port), '-s', path_to_solutionfile]) if ((not utils.OnWindows()) and (not utils.OnCygwin())): command = ('/opt/monodevelop/bin/mono ' + command) if utils.OnCygwin(): command = (command + ' --client-path-mode Cygwin') filename_format = os.path.join(utils.PathToTempDir(), 'omnisharp_{port}_{sln}_{std}.log') solutionfile = os.path.basename(path_to_solutionfile) self._filename_stdout = filename_format.format(port=self._omnisharp_port, sln=solutionfile, std='stdout') self._filename_stderr = filename_format.format(port=self._omnisharp_port, sln=solutionfile, std='stderr') with open(self._filename_stderr, 'w') as fstderr: with open(self._filename_stdout, 'w') as fstdout: utils.SafePopen(command, stdout=fstdout, stderr=fstderr, shell=True) self._solution_path = path_to_solutionfile self._logger.info('Starting OmniSharp server')<|docstring|>Start the OmniSharp server<|endoftext|>
b26970ed3be0bae65052b0aeeec97adea4729a3ca86a55b883cd2a248eeb6f14
def _StopServer(self): ' Stop the OmniSharp server ' self._GetResponse('/stopserver') self._omnisharp_port = None self._logger.info('Stopping OmniSharp server')
Stop the OmniSharp server
vim/patches/YouCompleteMe_linux/cs_completer.py
_StopServer
yjpark/dotfiles
7
python
def _StopServer(self): ' ' self._GetResponse('/stopserver') self._omnisharp_port = None self._logger.info('Stopping OmniSharp server')
def _StopServer(self): ' ' self._GetResponse('/stopserver') self._omnisharp_port = None self._logger.info('Stopping OmniSharp server')<|docstring|>Stop the OmniSharp server<|endoftext|>
027113869539578b5c1b7273d5b9a317f7ef2314c4871652256fe619b5d99fd4
def _RestartServer(self, request_data): ' Restarts the OmniSharp server ' if self.ServerIsRunning(): self._StopServer() return self._StartServer(request_data)
Restarts the OmniSharp server
vim/patches/YouCompleteMe_linux/cs_completer.py
_RestartServer
yjpark/dotfiles
7
python
def _RestartServer(self, request_data): ' ' if self.ServerIsRunning(): self._StopServer() return self._StartServer(request_data)
def _RestartServer(self, request_data): ' ' if self.ServerIsRunning(): self._StopServer() return self._StartServer(request_data)<|docstring|>Restarts the OmniSharp server<|endoftext|>
3efee70e70102a49aa5cdb39e9e439682cac43471b0716359459ada155014b79
def _ReloadSolution(self): ' Reloads the solutions in the OmniSharp server ' self._logger.info('Reloading Solution in OmniSharp server') return self._GetResponse('/reloadsolution')
Reloads the solutions in the OmniSharp server
vim/patches/YouCompleteMe_linux/cs_completer.py
_ReloadSolution
yjpark/dotfiles
7
python
def _ReloadSolution(self): ' ' self._logger.info('Reloading Solution in OmniSharp server') return self._GetResponse('/reloadsolution')
def _ReloadSolution(self): ' ' self._logger.info('Reloading Solution in OmniSharp server') return self._GetResponse('/reloadsolution')<|docstring|>Reloads the solutions in the OmniSharp server<|endoftext|>
61cfcfcb769b91a581ca7d751c921e5f15fdce922ee67dd43a2e056bba5f7c77
def _GetCompletions(self, request_data): ' Ask server for completions ' completions = self._GetResponse('/autocomplete', self._DefaultParameters(request_data)) return (completions if (completions != None) else [])
Ask server for completions
vim/patches/YouCompleteMe_linux/cs_completer.py
_GetCompletions
yjpark/dotfiles
7
python
def _GetCompletions(self, request_data): ' ' completions = self._GetResponse('/autocomplete', self._DefaultParameters(request_data)) return (completions if (completions != None) else [])
def _GetCompletions(self, request_data): ' ' completions = self._GetResponse('/autocomplete', self._DefaultParameters(request_data)) return (completions if (completions != None) else [])<|docstring|>Ask server for completions<|endoftext|>
39f419969b6847c9e63cb93eeaa9c8d2be7534dc1aa0a3f2095b5b76ddbfdbad
def _GoToDefinition(self, request_data): ' Jump to definition of identifier under cursor ' definition = self._GetResponse('/gotodefinition', self._DefaultParameters(request_data)) if (definition['FileName'] != None): return responses.BuildGoToResponse(definition['FileName'], definition['Line'], definition['Column']) else: raise RuntimeError("Can't jump to definition")
Jump to definition of identifier under cursor
vim/patches/YouCompleteMe_linux/cs_completer.py
_GoToDefinition
yjpark/dotfiles
7
python
def _GoToDefinition(self, request_data): ' ' definition = self._GetResponse('/gotodefinition', self._DefaultParameters(request_data)) if (definition['FileName'] != None): return responses.BuildGoToResponse(definition['FileName'], definition['Line'], definition['Column']) else: raise RuntimeError("Can't jump to definition")
def _GoToDefinition(self, request_data): ' ' definition = self._GetResponse('/gotodefinition', self._DefaultParameters(request_data)) if (definition['FileName'] != None): return responses.BuildGoToResponse(definition['FileName'], definition['Line'], definition['Column']) else: raise RuntimeError("Can't jump to definition")<|docstring|>Jump to definition of identifier under cursor<|endoftext|>
a316b508a488ab12e58f9bec902847cedfa3c552732f95dca9f07dabb1a28bf2
def _GoToImplementation(self, request_data, fallback_to_declaration): ' Jump to implementation of identifier under cursor ' implementation = self._GetResponse('/findimplementations', self._DefaultParameters(request_data)) if implementation['QuickFixes']: if (len(implementation['QuickFixes']) == 1): return responses.BuildGoToResponse(implementation['QuickFixes'][0]['FileName'], implementation['QuickFixes'][0]['Line'], implementation['QuickFixes'][0]['Column']) else: return [responses.BuildGoToResponse(x['FileName'], x['Line'], x['Column']) for x in implementation['QuickFixes']] elif fallback_to_declaration: return self._GoToDefinition(request_data) elif (implementation['QuickFixes'] == None): raise RuntimeError("Can't jump to implementation") else: raise RuntimeError('No implementations found')
Jump to implementation of identifier under cursor
vim/patches/YouCompleteMe_linux/cs_completer.py
_GoToImplementation
yjpark/dotfiles
7
python
def _GoToImplementation(self, request_data, fallback_to_declaration): ' ' implementation = self._GetResponse('/findimplementations', self._DefaultParameters(request_data)) if implementation['QuickFixes']: if (len(implementation['QuickFixes']) == 1): return responses.BuildGoToResponse(implementation['QuickFixes'][0]['FileName'], implementation['QuickFixes'][0]['Line'], implementation['QuickFixes'][0]['Column']) else: return [responses.BuildGoToResponse(x['FileName'], x['Line'], x['Column']) for x in implementation['QuickFixes']] elif fallback_to_declaration: return self._GoToDefinition(request_data) elif (implementation['QuickFixes'] == None): raise RuntimeError("Can't jump to implementation") else: raise RuntimeError('No implementations found')
def _GoToImplementation(self, request_data, fallback_to_declaration): ' ' implementation = self._GetResponse('/findimplementations', self._DefaultParameters(request_data)) if implementation['QuickFixes']: if (len(implementation['QuickFixes']) == 1): return responses.BuildGoToResponse(implementation['QuickFixes'][0]['FileName'], implementation['QuickFixes'][0]['Line'], implementation['QuickFixes'][0]['Column']) else: return [responses.BuildGoToResponse(x['FileName'], x['Line'], x['Column']) for x in implementation['QuickFixes']] elif fallback_to_declaration: return self._GoToDefinition(request_data) elif (implementation['QuickFixes'] == None): raise RuntimeError("Can't jump to implementation") else: raise RuntimeError('No implementations found')<|docstring|>Jump to implementation of identifier under cursor<|endoftext|>
5f6857e203ffed083328db370c3c73b80976f3daba4f7161a19d5854bc1210a2
def _DefaultParameters(self, request_data): ' Some very common request parameters ' parameters = {} parameters['line'] = request_data['line_num'] parameters['column'] = request_data['column_num'] filepath = request_data['filepath'] parameters['buffer'] = request_data['file_data'][filepath]['contents'] parameters['filename'] = filepath return parameters
Some very common request parameters
vim/patches/YouCompleteMe_linux/cs_completer.py
_DefaultParameters
yjpark/dotfiles
7
python
def _DefaultParameters(self, request_data): ' ' parameters = {} parameters['line'] = request_data['line_num'] parameters['column'] = request_data['column_num'] filepath = request_data['filepath'] parameters['buffer'] = request_data['file_data'][filepath]['contents'] parameters['filename'] = filepath return parameters
def _DefaultParameters(self, request_data): ' ' parameters = {} parameters['line'] = request_data['line_num'] parameters['column'] = request_data['column_num'] filepath = request_data['filepath'] parameters['buffer'] = request_data['file_data'][filepath]['contents'] parameters['filename'] = filepath return parameters<|docstring|>Some very common request parameters<|endoftext|>
ca75358378bba2178e2853bbff80753eb51efb332489921832a4445fa39edd42
def ServerIsRunning(self): ' Check if our OmniSharp server is running (up and serving).' try: return bool((self._omnisharp_port and self._GetResponse('/checkalivestatus', silent=True))) except: return False
Check if our OmniSharp server is running (up and serving).
vim/patches/YouCompleteMe_linux/cs_completer.py
ServerIsRunning
yjpark/dotfiles
7
python
def ServerIsRunning(self): ' ' try: return bool((self._omnisharp_port and self._GetResponse('/checkalivestatus', silent=True))) except: return False
def ServerIsRunning(self): ' ' try: return bool((self._omnisharp_port and self._GetResponse('/checkalivestatus', silent=True))) except: return False<|docstring|>Check if our OmniSharp server is running (up and serving).<|endoftext|>
54520ea7b9be22cd1f4d34e7ed5f35db07f9ff6689c4f757d626f2515a7378d4
def ServerIsReady(self): ' Check if our OmniSharp server is ready (loaded solution file).' try: return bool((self._omnisharp_port and self._GetResponse('/checkreadystatus', silent=True))) except: return False
Check if our OmniSharp server is ready (loaded solution file).
vim/patches/YouCompleteMe_linux/cs_completer.py
ServerIsReady
yjpark/dotfiles
7
python
def ServerIsReady(self): ' ' try: return bool((self._omnisharp_port and self._GetResponse('/checkreadystatus', silent=True))) except: return False
def ServerIsReady(self): ' ' try: return bool((self._omnisharp_port and self._GetResponse('/checkreadystatus', silent=True))) except: return False<|docstring|>Check if our OmniSharp server is ready (loaded solution file).<|endoftext|>
e387831b2f19458f92edcd7905b52a089abc7137ebe43db8f41eba2025b73814
def _SolutionFile(self): ' Find out which solution file server was started with ' return self._solution_path
Find out which solution file server was started with
vim/patches/YouCompleteMe_linux/cs_completer.py
_SolutionFile
yjpark/dotfiles
7
python
def _SolutionFile(self): ' ' return self._solution_path
def _SolutionFile(self): ' ' return self._solution_path<|docstring|>Find out which solution file server was started with<|endoftext|>
b1b67b370f0a74e27c169cdca3252dcdf4b374c2c3ac845316ad31fedd23ea33
def _GetResponse(self, handler, parameters={}, silent=False): ' Handle communication with server ' target = urlparse.urljoin(self._ServerLocation(), handler) parameters = urllib.urlencode(parameters) response = urllib2.urlopen(target, parameters) return json.loads(response.read())
Handle communication with server
vim/patches/YouCompleteMe_linux/cs_completer.py
_GetResponse
yjpark/dotfiles
7
python
def _GetResponse(self, handler, parameters={}, silent=False): ' ' target = urlparse.urljoin(self._ServerLocation(), handler) parameters = urllib.urlencode(parameters) response = urllib2.urlopen(target, parameters) return json.loads(response.read())
def _GetResponse(self, handler, parameters={}, silent=False): ' ' target = urlparse.urljoin(self._ServerLocation(), handler) parameters = urllib.urlencode(parameters) response = urllib2.urlopen(target, parameters) return json.loads(response.read())<|docstring|>Handle communication with server<|endoftext|>
546f888001f80a487a9b7edf548a9960573d5c6a9a599ea7b8403c0575d25124
def __init__(self, action_space: gym.spaces.Discrete, observation_space: gym.spaces.Dict, rgb_uuid: str, subtask_uuid: str, rel_position_change_uuid: str, ordered_object_types: Sequence[str], hidden_size=512, prev_action_embedding_dim: int=32, task_type_embedding_dim: int=32, object_type_embedding_dim: int=128, object_visible_embedding_dim: int=16, position_dim: int=32, num_rnn_layers=1, rnn_type='GRU'): '\n # Parameters\n action_space : The action space of the agent.\n Should equal `gym.spaces.Discrete(# actions available to the agent)`.\n observation_space : The observation space available to the agent.\n rgb_uuid : The unique id of the RGB image sensor (see `RGBSensor`).\n unshuffled_rgb_uuid : The unique id of the `UnshuffledRGBRearrangeSensor` available to the agent.\n hidden_size : The size of the hidden layer of the RNN.\n num_rnn_layers: The number of hidden layers in the RNN.\n rnn_type : The RNN type, should be "GRU" or "LSTM".\n ' super().__init__(action_space=action_space, observation_space=observation_space) self._hidden_size = hidden_size self.rgb_uuid = rgb_uuid self.subtask_uuid = subtask_uuid self.rel_position_change_uuid = rel_position_change_uuid self.prev_action_embedder = nn.Embedding((action_space.n + 1), embedding_dim=prev_action_embedding_dim) self.task_type_embedder = nn.Embedding(num_embeddings=8, embedding_dim=task_type_embedding_dim) self.object_type_embedder = nn.Embedding(num_embeddings=(len(ordered_object_types) + 1), embedding_dim=object_type_embedding_dim) self.object_visible_embedder = nn.Embedding(num_embeddings=3, embedding_dim=object_visible_embedding_dim) self.position_encoder = nn.Linear(11, position_dim) self.visual_encoder = self._create_visual_encoder() self.state_encoder = RNNStateEncoder((((((prev_action_embedding_dim + task_type_embedding_dim) + (object_type_embedding_dim * 2)) + (object_visible_embedding_dim * 2)) + position_dim) + self.recurrent_hidden_state_size), self._hidden_size, num_layers=num_rnn_layers, rnn_type=rnn_type) self.actor = LinearActorHead(self._hidden_size, action_space.n) self.critic = LinearCriticHead(self._hidden_size) self.train()
# Parameters action_space : The action space of the agent. Should equal `gym.spaces.Discrete(# actions available to the agent)`. observation_space : The observation space available to the agent. rgb_uuid : The unique id of the RGB image sensor (see `RGBSensor`). unshuffled_rgb_uuid : The unique id of the `UnshuffledRGBRearrangeSensor` available to the agent. hidden_size : The size of the hidden layer of the RNN. num_rnn_layers: The number of hidden layers in the RNN. rnn_type : The RNN type, should be "GRU" or "LSTM".
env/baseline_models.py
__init__
SAMMiCA/robot_home_service
0
python
def __init__(self, action_space: gym.spaces.Discrete, observation_space: gym.spaces.Dict, rgb_uuid: str, subtask_uuid: str, rel_position_change_uuid: str, ordered_object_types: Sequence[str], hidden_size=512, prev_action_embedding_dim: int=32, task_type_embedding_dim: int=32, object_type_embedding_dim: int=128, object_visible_embedding_dim: int=16, position_dim: int=32, num_rnn_layers=1, rnn_type='GRU'): '\n # Parameters\n action_space : The action space of the agent.\n Should equal `gym.spaces.Discrete(# actions available to the agent)`.\n observation_space : The observation space available to the agent.\n rgb_uuid : The unique id of the RGB image sensor (see `RGBSensor`).\n unshuffled_rgb_uuid : The unique id of the `UnshuffledRGBRearrangeSensor` available to the agent.\n hidden_size : The size of the hidden layer of the RNN.\n num_rnn_layers: The number of hidden layers in the RNN.\n rnn_type : The RNN type, should be "GRU" or "LSTM".\n ' super().__init__(action_space=action_space, observation_space=observation_space) self._hidden_size = hidden_size self.rgb_uuid = rgb_uuid self.subtask_uuid = subtask_uuid self.rel_position_change_uuid = rel_position_change_uuid self.prev_action_embedder = nn.Embedding((action_space.n + 1), embedding_dim=prev_action_embedding_dim) self.task_type_embedder = nn.Embedding(num_embeddings=8, embedding_dim=task_type_embedding_dim) self.object_type_embedder = nn.Embedding(num_embeddings=(len(ordered_object_types) + 1), embedding_dim=object_type_embedding_dim) self.object_visible_embedder = nn.Embedding(num_embeddings=3, embedding_dim=object_visible_embedding_dim) self.position_encoder = nn.Linear(11, position_dim) self.visual_encoder = self._create_visual_encoder() self.state_encoder = RNNStateEncoder((((((prev_action_embedding_dim + task_type_embedding_dim) + (object_type_embedding_dim * 2)) + (object_visible_embedding_dim * 2)) + position_dim) + self.recurrent_hidden_state_size), self._hidden_size, num_layers=num_rnn_layers, rnn_type=rnn_type) self.actor = LinearActorHead(self._hidden_size, action_space.n) self.critic = LinearCriticHead(self._hidden_size) self.train()
def __init__(self, action_space: gym.spaces.Discrete, observation_space: gym.spaces.Dict, rgb_uuid: str, subtask_uuid: str, rel_position_change_uuid: str, ordered_object_types: Sequence[str], hidden_size=512, prev_action_embedding_dim: int=32, task_type_embedding_dim: int=32, object_type_embedding_dim: int=128, object_visible_embedding_dim: int=16, position_dim: int=32, num_rnn_layers=1, rnn_type='GRU'): '\n # Parameters\n action_space : The action space of the agent.\n Should equal `gym.spaces.Discrete(# actions available to the agent)`.\n observation_space : The observation space available to the agent.\n rgb_uuid : The unique id of the RGB image sensor (see `RGBSensor`).\n unshuffled_rgb_uuid : The unique id of the `UnshuffledRGBRearrangeSensor` available to the agent.\n hidden_size : The size of the hidden layer of the RNN.\n num_rnn_layers: The number of hidden layers in the RNN.\n rnn_type : The RNN type, should be "GRU" or "LSTM".\n ' super().__init__(action_space=action_space, observation_space=observation_space) self._hidden_size = hidden_size self.rgb_uuid = rgb_uuid self.subtask_uuid = subtask_uuid self.rel_position_change_uuid = rel_position_change_uuid self.prev_action_embedder = nn.Embedding((action_space.n + 1), embedding_dim=prev_action_embedding_dim) self.task_type_embedder = nn.Embedding(num_embeddings=8, embedding_dim=task_type_embedding_dim) self.object_type_embedder = nn.Embedding(num_embeddings=(len(ordered_object_types) + 1), embedding_dim=object_type_embedding_dim) self.object_visible_embedder = nn.Embedding(num_embeddings=3, embedding_dim=object_visible_embedding_dim) self.position_encoder = nn.Linear(11, position_dim) self.visual_encoder = self._create_visual_encoder() self.state_encoder = RNNStateEncoder((((((prev_action_embedding_dim + task_type_embedding_dim) + (object_type_embedding_dim * 2)) + (object_visible_embedding_dim * 2)) + position_dim) + self.recurrent_hidden_state_size), self._hidden_size, num_layers=num_rnn_layers, rnn_type=rnn_type) self.actor = LinearActorHead(self._hidden_size, action_space.n) self.critic = LinearCriticHead(self._hidden_size) self.train()<|docstring|># Parameters action_space : The action space of the agent. Should equal `gym.spaces.Discrete(# actions available to the agent)`. observation_space : The observation space available to the agent. rgb_uuid : The unique id of the RGB image sensor (see `RGBSensor`). unshuffled_rgb_uuid : The unique id of the `UnshuffledRGBRearrangeSensor` available to the agent. hidden_size : The size of the hidden layer of the RNN. num_rnn_layers: The number of hidden layers in the RNN. rnn_type : The RNN type, should be "GRU" or "LSTM".<|endoftext|>
fd667ecf5b51c98a762f4370f04624bf07590f568ec6123ca4beddd8058a511a
def _create_visual_encoder(self) -> nn.Module: 'Create the visual encoder for the model.' return SimpleCNN(observation_space=gym.spaces.Dict({self.rgb_uuid: self.observation_space[self.rgb_uuid]}), output_size=self._hidden_size, rgb_uuid=self.rgb_uuid, depth_uuid=None)
Create the visual encoder for the model.
env/baseline_models.py
_create_visual_encoder
SAMMiCA/robot_home_service
0
python
def _create_visual_encoder(self) -> nn.Module: return SimpleCNN(observation_space=gym.spaces.Dict({self.rgb_uuid: self.observation_space[self.rgb_uuid]}), output_size=self._hidden_size, rgb_uuid=self.rgb_uuid, depth_uuid=None)
def _create_visual_encoder(self) -> nn.Module: return SimpleCNN(observation_space=gym.spaces.Dict({self.rgb_uuid: self.observation_space[self.rgb_uuid]}), output_size=self._hidden_size, rgb_uuid=self.rgb_uuid, depth_uuid=None)<|docstring|>Create the visual encoder for the model.<|endoftext|>
233bccc350d4cdc4aa88ecfeafce9c02af9db5d529a815c13e558708892a4260
def __init__(self, action_space: gym.spaces.Discrete, observation_space: gym.spaces.Dict, rgb_uuid: str, subtask_uuid: str, rel_position_change_uuid: str, ordered_object_types: Sequence[str], hidden_size=512, task_type_embedding_dim: int=32, object_type_embedding_dim: int=128, object_visible_embedding_dim: int=16, position_dim: int=32, num_rnn_layers=1, rnn_type='GRU'): "A CNN->RNN rearrangement model that expects ResNet features instead\n of RGB images.\n\n Nearly identical to `RearrangeActorCriticSimpleConvRNN` but\n `rgb_uuid` should now be the unique id of the ResNetPreprocessor\n used to featurize RGB images using a pretrained ResNet before\n they're passed to this model.\n " self.visual_attention: Optional[nn.Module] = None super().__init__(**prepare_locals_for_super(locals()))
A CNN->RNN rearrangement model that expects ResNet features instead of RGB images. Nearly identical to `RearrangeActorCriticSimpleConvRNN` but `rgb_uuid` should now be the unique id of the ResNetPreprocessor used to featurize RGB images using a pretrained ResNet before they're passed to this model.
env/baseline_models.py
__init__
SAMMiCA/robot_home_service
0
python
def __init__(self, action_space: gym.spaces.Discrete, observation_space: gym.spaces.Dict, rgb_uuid: str, subtask_uuid: str, rel_position_change_uuid: str, ordered_object_types: Sequence[str], hidden_size=512, task_type_embedding_dim: int=32, object_type_embedding_dim: int=128, object_visible_embedding_dim: int=16, position_dim: int=32, num_rnn_layers=1, rnn_type='GRU'): "A CNN->RNN rearrangement model that expects ResNet features instead\n of RGB images.\n\n Nearly identical to `RearrangeActorCriticSimpleConvRNN` but\n `rgb_uuid` should now be the unique id of the ResNetPreprocessor\n used to featurize RGB images using a pretrained ResNet before\n they're passed to this model.\n " self.visual_attention: Optional[nn.Module] = None super().__init__(**prepare_locals_for_super(locals()))
def __init__(self, action_space: gym.spaces.Discrete, observation_space: gym.spaces.Dict, rgb_uuid: str, subtask_uuid: str, rel_position_change_uuid: str, ordered_object_types: Sequence[str], hidden_size=512, task_type_embedding_dim: int=32, object_type_embedding_dim: int=128, object_visible_embedding_dim: int=16, position_dim: int=32, num_rnn_layers=1, rnn_type='GRU'): "A CNN->RNN rearrangement model that expects ResNet features instead\n of RGB images.\n\n Nearly identical to `RearrangeActorCriticSimpleConvRNN` but\n `rgb_uuid` should now be the unique id of the ResNetPreprocessor\n used to featurize RGB images using a pretrained ResNet before\n they're passed to this model.\n " self.visual_attention: Optional[nn.Module] = None super().__init__(**prepare_locals_for_super(locals()))<|docstring|>A CNN->RNN rearrangement model that expects ResNet features instead of RGB images. Nearly identical to `RearrangeActorCriticSimpleConvRNN` but `rgb_uuid` should now be the unique id of the ResNetPreprocessor used to featurize RGB images using a pretrained ResNet before they're passed to this model.<|endoftext|>
9daa34a9b060ccccfe65158d2eb132f53655b8267cbc9691be3f63fa303c8fb4
def get_test_sets(): "Get test sentence and dict from rmrb dataset\n Return:\n test_set: a list of tokenized sentences. Each of it is a list of words.\n For example, [['今天', '是', '星期二', '。'], ...]\n dicts: a list of all words that appear in the dataset\n " with open('data/rmrb.txt', 'r', encoding='utf-8') as f: lines = f.readlines() test_set = [] dicts = [] for line in lines: line = pre_process(line, use_re=0) idata = line.strip().split() if (len(idata) == 0): continue idata = [x.split('/')[0] for x in idata] test_set.append(idata) dicts.extend(idata) dicts = list(set(dicts)) return (test_set, dicts)
Get test sentence and dict from rmrb dataset Return: test_set: a list of tokenized sentences. Each of it is a list of words. For example, [['今天', '是', '星期二', '。'], ...] dicts: a list of all words that appear in the dataset
test_exp1.py
get_test_sets
volgachen/Chinese-Tokenization
0
python
def get_test_sets(): "Get test sentence and dict from rmrb dataset\n Return:\n test_set: a list of tokenized sentences. Each of it is a list of words.\n For example, [['今天', '是', '星期二', '。'], ...]\n dicts: a list of all words that appear in the dataset\n " with open('data/rmrb.txt', 'r', encoding='utf-8') as f: lines = f.readlines() test_set = [] dicts = [] for line in lines: line = pre_process(line, use_re=0) idata = line.strip().split() if (len(idata) == 0): continue idata = [x.split('/')[0] for x in idata] test_set.append(idata) dicts.extend(idata) dicts = list(set(dicts)) return (test_set, dicts)
def get_test_sets(): "Get test sentence and dict from rmrb dataset\n Return:\n test_set: a list of tokenized sentences. Each of it is a list of words.\n For example, [['今天', '是', '星期二', '。'], ...]\n dicts: a list of all words that appear in the dataset\n " with open('data/rmrb.txt', 'r', encoding='utf-8') as f: lines = f.readlines() test_set = [] dicts = [] for line in lines: line = pre_process(line, use_re=0) idata = line.strip().split() if (len(idata) == 0): continue idata = [x.split('/')[0] for x in idata] test_set.append(idata) dicts.extend(idata) dicts = list(set(dicts)) return (test_set, dicts)<|docstring|>Get test sentence and dict from rmrb dataset Return: test_set: a list of tokenized sentences. Each of it is a list of words. For example, [['今天', '是', '星期二', '。'], ...] dicts: a list of all words that appear in the dataset<|endoftext|>
ad7663df2e5c48e07c29d3084c1a78a7481bb89425d80dd2224cdaacefef66e8
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type query: List[List[str]]\n :rtype: List[float]\n ' def dfs(x, y): if (y in graph[x]): return graph[x][y] for nei in graph[x]: if (nei not in visited): visited.add(nei) ret = dfs(nei, y) if (ret is not None): return (graph[x][nei] * ret) return None graph = collections.defaultdict(dict) for (i, (a, b)) in enumerate(equations): graph[a][a] = 1 graph[b][b] = 1 graph[a][b] = values[i] if values[i]: graph[b][a] = (1 / values[i]) ans = [] for (x, y) in queries: if ((x not in graph) or (y not in graph)): ans.append((- 1.0)) else: visited = set() v = dfs(x, y) ans.append((v if (v is not None) else (- 1.0))) return ans
:type equations: List[List[str]] :type values: List[float] :type query: List[List[str]] :rtype: List[float]
Python/evaluate-division.py
calcEquation
RideGreg/LeetCode
1
python
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type query: List[List[str]]\n :rtype: List[float]\n ' def dfs(x, y): if (y in graph[x]): return graph[x][y] for nei in graph[x]: if (nei not in visited): visited.add(nei) ret = dfs(nei, y) if (ret is not None): return (graph[x][nei] * ret) return None graph = collections.defaultdict(dict) for (i, (a, b)) in enumerate(equations): graph[a][a] = 1 graph[b][b] = 1 graph[a][b] = values[i] if values[i]: graph[b][a] = (1 / values[i]) ans = [] for (x, y) in queries: if ((x not in graph) or (y not in graph)): ans.append((- 1.0)) else: visited = set() v = dfs(x, y) ans.append((v if (v is not None) else (- 1.0))) return ans
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type query: List[List[str]]\n :rtype: List[float]\n ' def dfs(x, y): if (y in graph[x]): return graph[x][y] for nei in graph[x]: if (nei not in visited): visited.add(nei) ret = dfs(nei, y) if (ret is not None): return (graph[x][nei] * ret) return None graph = collections.defaultdict(dict) for (i, (a, b)) in enumerate(equations): graph[a][a] = 1 graph[b][b] = 1 graph[a][b] = values[i] if values[i]: graph[b][a] = (1 / values[i]) ans = [] for (x, y) in queries: if ((x not in graph) or (y not in graph)): ans.append((- 1.0)) else: visited = set() v = dfs(x, y) ans.append((v if (v is not None) else (- 1.0))) return ans<|docstring|>:type equations: List[List[str]] :type values: List[float] :type query: List[List[str]] :rtype: List[float]<|endoftext|>
5a4cdcf1e19c65bac9f4f2cd40e449fbe001cd61af693329db3f2cbb5bcc55b0
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' union_find = UnionFind() for ((a, b), k) in zip(equations, values): union_find.union_set(a, b, k) return [union_find.query_set(a, b) for (a, b) in queries]
:type equations: List[List[str]] :type values: List[float] :type queries: List[List[str]] :rtype: List[float]
Python/evaluate-division.py
calcEquation
RideGreg/LeetCode
1
python
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' union_find = UnionFind() for ((a, b), k) in zip(equations, values): union_find.union_set(a, b, k) return [union_find.query_set(a, b) for (a, b) in queries]
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' union_find = UnionFind() for ((a, b), k) in zip(equations, values): union_find.union_set(a, b, k) return [union_find.query_set(a, b) for (a, b) in queries]<|docstring|>:type equations: List[List[str]] :type values: List[float] :type queries: List[List[str]] :rtype: List[float]<|endoftext|>
b2a0e361808667073b856ca1666feaacbe38e8ff57605f16bbb3d48abf557f29
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' adj = collections.defaultdict(dict) for ((a, b), k) in zip(equations, values): adj[a][b] = k adj[b][a] = (1.0 / k) def bfs(adj, a, b, lookup): if ((a not in adj) or (b not in adj)): return (- 1.0) if ((a, b) in lookup): return lookup[(a, b)] visited = {a} q = collections.deque([(a, 1.0)]) while q: (u, val) = q.popleft() if (u == b): lookup[(a, b)] = val return val for (v, k) in adj[u].items(): if (v not in visited): visited.add(v) q.append((v, (val * k))) lookup[(a, b)] = (- 1.0) return (- 1.0) lookup = {} return [bfs(adj, a, b, lookup) for (a, b) in queries]
:type equations: List[List[str]] :type values: List[float] :type queries: List[List[str]] :rtype: List[float]
Python/evaluate-division.py
calcEquation
RideGreg/LeetCode
1
python
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' adj = collections.defaultdict(dict) for ((a, b), k) in zip(equations, values): adj[a][b] = k adj[b][a] = (1.0 / k) def bfs(adj, a, b, lookup): if ((a not in adj) or (b not in adj)): return (- 1.0) if ((a, b) in lookup): return lookup[(a, b)] visited = {a} q = collections.deque([(a, 1.0)]) while q: (u, val) = q.popleft() if (u == b): lookup[(a, b)] = val return val for (v, k) in adj[u].items(): if (v not in visited): visited.add(v) q.append((v, (val * k))) lookup[(a, b)] = (- 1.0) return (- 1.0) lookup = {} return [bfs(adj, a, b, lookup) for (a, b) in queries]
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' adj = collections.defaultdict(dict) for ((a, b), k) in zip(equations, values): adj[a][b] = k adj[b][a] = (1.0 / k) def bfs(adj, a, b, lookup): if ((a not in adj) or (b not in adj)): return (- 1.0) if ((a, b) in lookup): return lookup[(a, b)] visited = {a} q = collections.deque([(a, 1.0)]) while q: (u, val) = q.popleft() if (u == b): lookup[(a, b)] = val return val for (v, k) in adj[u].items(): if (v not in visited): visited.add(v) q.append((v, (val * k))) lookup[(a, b)] = (- 1.0) return (- 1.0) lookup = {} return [bfs(adj, a, b, lookup) for (a, b) in queries]<|docstring|>:type equations: List[List[str]] :type values: List[float] :type queries: List[List[str]] :rtype: List[float]<|endoftext|>
1b3fad24c414765f69895d9759a61e356c7b1ba1bcec5ff6bc7a0d11820cc97e
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' adj = collections.defaultdict(dict) for ((a, b), k) in itertools.izip(equations, values): adj[a][a] = adj[b][b] = 1.0 adj[a][b] = k adj[b][a] = (1.0 / k) for k in adj: for i in adj[k]: for j in adj[k]: adj[i][j] = (adj[i][k] * adj[k][j]) return [adj[a].get(b, (- 1.0)) for (a, b) in queries]
:type equations: List[List[str]] :type values: List[float] :type queries: List[List[str]] :rtype: List[float]
Python/evaluate-division.py
calcEquation
RideGreg/LeetCode
1
python
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' adj = collections.defaultdict(dict) for ((a, b), k) in itertools.izip(equations, values): adj[a][a] = adj[b][b] = 1.0 adj[a][b] = k adj[b][a] = (1.0 / k) for k in adj: for i in adj[k]: for j in adj[k]: adj[i][j] = (adj[i][k] * adj[k][j]) return [adj[a].get(b, (- 1.0)) for (a, b) in queries]
def calcEquation(self, equations, values, queries): '\n :type equations: List[List[str]]\n :type values: List[float]\n :type queries: List[List[str]]\n :rtype: List[float]\n ' adj = collections.defaultdict(dict) for ((a, b), k) in itertools.izip(equations, values): adj[a][a] = adj[b][b] = 1.0 adj[a][b] = k adj[b][a] = (1.0 / k) for k in adj: for i in adj[k]: for j in adj[k]: adj[i][j] = (adj[i][k] * adj[k][j]) return [adj[a].get(b, (- 1.0)) for (a, b) in queries]<|docstring|>:type equations: List[List[str]] :type values: List[float] :type queries: List[List[str]] :rtype: List[float]<|endoftext|>
f11c4127d29f86f1ce557c8366c1bc12af5c81854d15282e00e551785ec7ea53
def search_api(query, index, limit=10): '\n Sends a query to the search API (supports simple search\n queries only)\n ' api_gateway = get_from_config('apiGatewayEndpoint') api_gateway_host = urlparse(api_gateway).hostname match = re.match('.*\\.([a-z]{2}-[a-z]+-\\d)\\.amazonaws\\.com$', api_gateway_host) region = match.groups()[0] auth = search_credentials(api_gateway_host, region, 'execute-api') encoded_params = urlencode(dict(index=index, action='search', query=query), quote_via=quote) response = requests.get(f'{api_gateway}/search?{encoded_params}', auth=auth) if (not response.ok): raise QuiltException(response.text) return response.json()
Sends a query to the search API (supports simple search queries only)
api/python/quilt3/search_util.py
search_api
BearerPipelineTest/quilt
1,115
python
def search_api(query, index, limit=10): '\n Sends a query to the search API (supports simple search\n queries only)\n ' api_gateway = get_from_config('apiGatewayEndpoint') api_gateway_host = urlparse(api_gateway).hostname match = re.match('.*\\.([a-z]{2}-[a-z]+-\\d)\\.amazonaws\\.com$', api_gateway_host) region = match.groups()[0] auth = search_credentials(api_gateway_host, region, 'execute-api') encoded_params = urlencode(dict(index=index, action='search', query=query), quote_via=quote) response = requests.get(f'{api_gateway}/search?{encoded_params}', auth=auth) if (not response.ok): raise QuiltException(response.text) return response.json()
def search_api(query, index, limit=10): '\n Sends a query to the search API (supports simple search\n queries only)\n ' api_gateway = get_from_config('apiGatewayEndpoint') api_gateway_host = urlparse(api_gateway).hostname match = re.match('.*\\.([a-z]{2}-[a-z]+-\\d)\\.amazonaws\\.com$', api_gateway_host) region = match.groups()[0] auth = search_credentials(api_gateway_host, region, 'execute-api') encoded_params = urlencode(dict(index=index, action='search', query=query), quote_via=quote) response = requests.get(f'{api_gateway}/search?{encoded_params}', auth=auth) if (not response.ok): raise QuiltException(response.text) return response.json()<|docstring|>Sends a query to the search API (supports simple search queries only)<|endoftext|>
5bbd7a165aae94a523bda275aaaf0d7ab90230d6fd1c1f79746cdf5abc37765e
def get_key_by_value(dc: dict, value): ' Returns key from dict by value ' for (k, v) in dc.items(): if (v == value): return k
Returns key from dict by value
app/eparser/ecommons.py
get_key_by_value
fs-basis/DemonEditor
66
python
def get_key_by_value(dc: dict, value): ' ' for (k, v) in dc.items(): if (v == value): return k
def get_key_by_value(dc: dict, value): ' ' for (k, v) in dc.items(): if (v == value): return k<|docstring|>Returns key from dict by value<|endoftext|>
1da963574bf3e0b6bd925003d4986990df072406f1fb32120db5edf2dfa6b947
def get_value_by_name(en, name): ' Returns value by name from enums ' for n in en: if (n.name == name): return n.value
Returns value by name from enums
app/eparser/ecommons.py
get_value_by_name
fs-basis/DemonEditor
66
python
def get_value_by_name(en, name): ' ' for n in en: if (n.name == name): return n.value
def get_value_by_name(en, name): ' ' for n in en: if (n.name == name): return n.value<|docstring|>Returns value by name from enums<|endoftext|>
cae67b8601586638357fe795d13b328720c9bfa6f91cc2ea8b90888b3c39013e
def is_transponder_valid(tr: Transponder): ' Checks transponder validity ' try: int(tr.frequency) int(tr.symbol_rate) ((tr.pls_mode is None) or int(tr.pls_mode)) ((tr.pls_code is None) or int(tr.pls_code)) ((tr.is_id is None) or int(tr.is_id)) except TypeError: return False if (tr.polarization not in POLARIZATION.values()): return False if (tr.fec_inner not in FEC.values()): return False if (tr.system not in SYSTEM.values()): return False if (tr.modulation not in MODULATION.values()): return False return True
Checks transponder validity
app/eparser/ecommons.py
is_transponder_valid
fs-basis/DemonEditor
66
python
def is_transponder_valid(tr: Transponder): ' ' try: int(tr.frequency) int(tr.symbol_rate) ((tr.pls_mode is None) or int(tr.pls_mode)) ((tr.pls_code is None) or int(tr.pls_code)) ((tr.is_id is None) or int(tr.is_id)) except TypeError: return False if (tr.polarization not in POLARIZATION.values()): return False if (tr.fec_inner not in FEC.values()): return False if (tr.system not in SYSTEM.values()): return False if (tr.modulation not in MODULATION.values()): return False return True
def is_transponder_valid(tr: Transponder): ' ' try: int(tr.frequency) int(tr.symbol_rate) ((tr.pls_mode is None) or int(tr.pls_mode)) ((tr.pls_code is None) or int(tr.pls_code)) ((tr.is_id is None) or int(tr.is_id)) except TypeError: return False if (tr.polarization not in POLARIZATION.values()): return False if (tr.fec_inner not in FEC.values()): return False if (tr.system not in SYSTEM.values()): return False if (tr.modulation not in MODULATION.values()): return False return True<|docstring|>Checks transponder validity<|endoftext|>
55a82cdd5b1e1ad5d29f5b7e81a9c433cc161f997bf2b5d2627135fcd846814c
def test_status_iter(log_fh, cache_filepath=None): " Iterator generator, which accepts a log file handle\n (which contains an output of a CI job) and yields\n (test, conf, status) tuples.\n\n Caches result in the 'cache_filepath' file when its name\n is provided. Reuses the existing cache on next\n invocations.\n " if cache_filepath: if os.path.isfile(cache_filepath): with open(cache_filepath, 'r') as cache_fh: data = json.load(cache_fh) for test_status in data: (yield tuple(test_status)) return cache = [] hang_detected = False for line in log_fh: m = TEST_STATUS_LINE_RE.match(line) if m: status = (m.group('status') or 'fail') res = (m['test'], m['conf'], status) if cache_filepath: cache.append(res) (yield res) continue m = TEST_HANG_RE.match(line) if m: hang_detected = True continue if hang_detected: hang_detected = False m = HANG_RESULT_RE.match(line) if m: result = m['result'] test = (result.split('.', 1)[0] + '.test.lua') res = (test, None, 'hang') if cache_filepath: cache.append(res) (yield res) continue if cache_filepath: with open(cache_filepath, 'w') as cache_fh: json.dump(cache, cache_fh, indent=2)
Iterator generator, which accepts a log file handle (which contains an output of a CI job) and yields (test, conf, status) tuples. Caches result in the 'cache_filepath' file when its name is provided. Reuses the existing cache on next invocations.
multivac/sensors/test_status.py
test_status_iter
Totktonada/multivac
1
python
def test_status_iter(log_fh, cache_filepath=None): " Iterator generator, which accepts a log file handle\n (which contains an output of a CI job) and yields\n (test, conf, status) tuples.\n\n Caches result in the 'cache_filepath' file when its name\n is provided. Reuses the existing cache on next\n invocations.\n " if cache_filepath: if os.path.isfile(cache_filepath): with open(cache_filepath, 'r') as cache_fh: data = json.load(cache_fh) for test_status in data: (yield tuple(test_status)) return cache = [] hang_detected = False for line in log_fh: m = TEST_STATUS_LINE_RE.match(line) if m: status = (m.group('status') or 'fail') res = (m['test'], m['conf'], status) if cache_filepath: cache.append(res) (yield res) continue m = TEST_HANG_RE.match(line) if m: hang_detected = True continue if hang_detected: hang_detected = False m = HANG_RESULT_RE.match(line) if m: result = m['result'] test = (result.split('.', 1)[0] + '.test.lua') res = (test, None, 'hang') if cache_filepath: cache.append(res) (yield res) continue if cache_filepath: with open(cache_filepath, 'w') as cache_fh: json.dump(cache, cache_fh, indent=2)
def test_status_iter(log_fh, cache_filepath=None): " Iterator generator, which accepts a log file handle\n (which contains an output of a CI job) and yields\n (test, conf, status) tuples.\n\n Caches result in the 'cache_filepath' file when its name\n is provided. Reuses the existing cache on next\n invocations.\n " if cache_filepath: if os.path.isfile(cache_filepath): with open(cache_filepath, 'r') as cache_fh: data = json.load(cache_fh) for test_status in data: (yield tuple(test_status)) return cache = [] hang_detected = False for line in log_fh: m = TEST_STATUS_LINE_RE.match(line) if m: status = (m.group('status') or 'fail') res = (m['test'], m['conf'], status) if cache_filepath: cache.append(res) (yield res) continue m = TEST_HANG_RE.match(line) if m: hang_detected = True continue if hang_detected: hang_detected = False m = HANG_RESULT_RE.match(line) if m: result = m['result'] test = (result.split('.', 1)[0] + '.test.lua') res = (test, None, 'hang') if cache_filepath: cache.append(res) (yield res) continue if cache_filepath: with open(cache_filepath, 'w') as cache_fh: json.dump(cache, cache_fh, indent=2)<|docstring|>Iterator generator, which accepts a log file handle (which contains an output of a CI job) and yields (test, conf, status) tuples. Caches result in the 'cache_filepath' file when its name is provided. Reuses the existing cache on next invocations.<|endoftext|>
bedd0fcb49a7b6e8b28ee24c330bc02a9c69ba3026dd63ee97aaeb857c8ed9f6
def test_smart_status_iter(log_fh, cache_filepath=None): " Iterator generator that yields (test, conf, status)\n tuples.\n\n The difference from `test_status_iter()` is that this\n iterator squashes duplicates and reports 'transient fail'\n status for a test, which fails, run again and succeeds.\n " tmp = OrderedDict() for (test, conf, status) in test_status_iter(log_fh, cache_filepath): key = (test, conf) if ((status == 'pass') and (tmp.get(key) == 'fail')): status = 'transient fail' tmp[key] = status for (key, status) in tmp.items(): (test, conf) = key (yield (test, conf, status))
Iterator generator that yields (test, conf, status) tuples. The difference from `test_status_iter()` is that this iterator squashes duplicates and reports 'transient fail' status for a test, which fails, run again and succeeds.
multivac/sensors/test_status.py
test_smart_status_iter
Totktonada/multivac
1
python
def test_smart_status_iter(log_fh, cache_filepath=None): " Iterator generator that yields (test, conf, status)\n tuples.\n\n The difference from `test_status_iter()` is that this\n iterator squashes duplicates and reports 'transient fail'\n status for a test, which fails, run again and succeeds.\n " tmp = OrderedDict() for (test, conf, status) in test_status_iter(log_fh, cache_filepath): key = (test, conf) if ((status == 'pass') and (tmp.get(key) == 'fail')): status = 'transient fail' tmp[key] = status for (key, status) in tmp.items(): (test, conf) = key (yield (test, conf, status))
def test_smart_status_iter(log_fh, cache_filepath=None): " Iterator generator that yields (test, conf, status)\n tuples.\n\n The difference from `test_status_iter()` is that this\n iterator squashes duplicates and reports 'transient fail'\n status for a test, which fails, run again and succeeds.\n " tmp = OrderedDict() for (test, conf, status) in test_status_iter(log_fh, cache_filepath): key = (test, conf) if ((status == 'pass') and (tmp.get(key) == 'fail')): status = 'transient fail' tmp[key] = status for (key, status) in tmp.items(): (test, conf) = key (yield (test, conf, status))<|docstring|>Iterator generator that yields (test, conf, status) tuples. The difference from `test_status_iter()` is that this iterator squashes duplicates and reports 'transient fail' status for a test, which fails, run again and succeeds.<|endoftext|>
fdf3533080f2f04000314ac680718fcc2bde3109569d829722bef8aa49d48c91
def execute(log_filepath): " External API for the smart test status iterator.\n\n The result format is designed to provide some level of\n unification between different sensors. The event\n dictionary for the 'test status' event contains `test`,\n `conf` and `status` fields (except common `event` field).\n " cache_filepath = get_cache_filepath(log_filepath) with open(log_filepath, 'r') as log_fh: for (test, conf, status) in test_smart_status_iter(log_fh, cache_filepath): (yield {'event': 'test status', 'test': test, 'conf': conf, 'status': status})
External API for the smart test status iterator. The result format is designed to provide some level of unification between different sensors. The event dictionary for the 'test status' event contains `test`, `conf` and `status` fields (except common `event` field).
multivac/sensors/test_status.py
execute
Totktonada/multivac
1
python
def execute(log_filepath): " External API for the smart test status iterator.\n\n The result format is designed to provide some level of\n unification between different sensors. The event\n dictionary for the 'test status' event contains `test`,\n `conf` and `status` fields (except common `event` field).\n " cache_filepath = get_cache_filepath(log_filepath) with open(log_filepath, 'r') as log_fh: for (test, conf, status) in test_smart_status_iter(log_fh, cache_filepath): (yield {'event': 'test status', 'test': test, 'conf': conf, 'status': status})
def execute(log_filepath): " External API for the smart test status iterator.\n\n The result format is designed to provide some level of\n unification between different sensors. The event\n dictionary for the 'test status' event contains `test`,\n `conf` and `status` fields (except common `event` field).\n " cache_filepath = get_cache_filepath(log_filepath) with open(log_filepath, 'r') as log_fh: for (test, conf, status) in test_smart_status_iter(log_fh, cache_filepath): (yield {'event': 'test status', 'test': test, 'conf': conf, 'status': status})<|docstring|>External API for the smart test status iterator. The result format is designed to provide some level of unification between different sensors. The event dictionary for the 'test status' event contains `test`, `conf` and `status` fields (except common `event` field).<|endoftext|>
a83516990815966b68cfef948af661972d2faa5f29a7171941d0648c3a54c461
def run_task(snapshot_config, *_): 'Run task.\n\n Args:\n snapshot_config (garage.experiment.SnapshotConfig): The snapshot\n configuration used by LocalRunner to create the snapshotter.\n *_ (object): Ignored by this function.\n\n ' with LocalTFRunner(snapshot_config=snapshot_config) as runner: env = TfEnv(gym.make('FetchReach-v1')) action_noise = OUStrategy(env.spec, sigma=0.2) policy = ContinuousMLPPolicy(env_spec=env.spec, name='Policy', hidden_sizes=[256, 256, 256], hidden_nonlinearity=tf.nn.relu, output_nonlinearity=tf.nn.tanh) qf = ContinuousMLPQFunction(env_spec=env.spec, name='QFunction', hidden_sizes=[256, 256, 256], hidden_nonlinearity=tf.nn.relu) replay_buffer = HerReplayBuffer(env_spec=env.spec, size_in_transitions=int(1000000.0), time_horizon=100, replay_k=0.4, reward_fun=env.compute_reward) ddpg = DDPG(env_spec=env.spec, policy=policy, policy_lr=0.001, qf_lr=0.001, qf=qf, replay_buffer=replay_buffer, target_update_tau=0.05, steps_per_epoch=20, max_path_length=100, n_train_steps=40, discount=0.9, exploration_strategy=action_noise, policy_optimizer=tf.compat.v1.train.AdamOptimizer, qf_optimizer=tf.compat.v1.train.AdamOptimizer, buffer_batch_size=256) runner.setup(algo=ddpg, env=env) runner.train(n_epochs=50, batch_size=100)
Run task. Args: snapshot_config (garage.experiment.SnapshotConfig): The snapshot configuration used by LocalRunner to create the snapshotter. *_ (object): Ignored by this function.
examples/tf/her_ddpg_fetchreach.py
run_task
maliesa96/garage
0
python
def run_task(snapshot_config, *_): 'Run task.\n\n Args:\n snapshot_config (garage.experiment.SnapshotConfig): The snapshot\n configuration used by LocalRunner to create the snapshotter.\n *_ (object): Ignored by this function.\n\n ' with LocalTFRunner(snapshot_config=snapshot_config) as runner: env = TfEnv(gym.make('FetchReach-v1')) action_noise = OUStrategy(env.spec, sigma=0.2) policy = ContinuousMLPPolicy(env_spec=env.spec, name='Policy', hidden_sizes=[256, 256, 256], hidden_nonlinearity=tf.nn.relu, output_nonlinearity=tf.nn.tanh) qf = ContinuousMLPQFunction(env_spec=env.spec, name='QFunction', hidden_sizes=[256, 256, 256], hidden_nonlinearity=tf.nn.relu) replay_buffer = HerReplayBuffer(env_spec=env.spec, size_in_transitions=int(1000000.0), time_horizon=100, replay_k=0.4, reward_fun=env.compute_reward) ddpg = DDPG(env_spec=env.spec, policy=policy, policy_lr=0.001, qf_lr=0.001, qf=qf, replay_buffer=replay_buffer, target_update_tau=0.05, steps_per_epoch=20, max_path_length=100, n_train_steps=40, discount=0.9, exploration_strategy=action_noise, policy_optimizer=tf.compat.v1.train.AdamOptimizer, qf_optimizer=tf.compat.v1.train.AdamOptimizer, buffer_batch_size=256) runner.setup(algo=ddpg, env=env) runner.train(n_epochs=50, batch_size=100)
def run_task(snapshot_config, *_): 'Run task.\n\n Args:\n snapshot_config (garage.experiment.SnapshotConfig): The snapshot\n configuration used by LocalRunner to create the snapshotter.\n *_ (object): Ignored by this function.\n\n ' with LocalTFRunner(snapshot_config=snapshot_config) as runner: env = TfEnv(gym.make('FetchReach-v1')) action_noise = OUStrategy(env.spec, sigma=0.2) policy = ContinuousMLPPolicy(env_spec=env.spec, name='Policy', hidden_sizes=[256, 256, 256], hidden_nonlinearity=tf.nn.relu, output_nonlinearity=tf.nn.tanh) qf = ContinuousMLPQFunction(env_spec=env.spec, name='QFunction', hidden_sizes=[256, 256, 256], hidden_nonlinearity=tf.nn.relu) replay_buffer = HerReplayBuffer(env_spec=env.spec, size_in_transitions=int(1000000.0), time_horizon=100, replay_k=0.4, reward_fun=env.compute_reward) ddpg = DDPG(env_spec=env.spec, policy=policy, policy_lr=0.001, qf_lr=0.001, qf=qf, replay_buffer=replay_buffer, target_update_tau=0.05, steps_per_epoch=20, max_path_length=100, n_train_steps=40, discount=0.9, exploration_strategy=action_noise, policy_optimizer=tf.compat.v1.train.AdamOptimizer, qf_optimizer=tf.compat.v1.train.AdamOptimizer, buffer_batch_size=256) runner.setup(algo=ddpg, env=env) runner.train(n_epochs=50, batch_size=100)<|docstring|>Run task. Args: snapshot_config (garage.experiment.SnapshotConfig): The snapshot configuration used by LocalRunner to create the snapshotter. *_ (object): Ignored by this function.<|endoftext|>
29ff2a4a01f829dbecce48252fd382f3c1b1f3f379c76e2d3e814e3fb26eff13
def get_tagger(): '\n Return a tagging function given some app settings.\n `Settings` is the settings module of an app.\n The returned value is a function that receives a unicode string and returns\n a list of `Word` instances.\n ' from quepy.nltktagger import run_nltktagger tagger_function = (lambda x: run_nltktagger(x, settings.NLTK_DATA_PATH)) def wrapper(string): assert_valid_encoding(string) words = tagger_function(string) for word in words: if (word.pos not in PENN_TAGSET): logger.warning('Tagger emmited a non-penn POS tag {!r}'.format(word.pos)) return words return wrapper
Return a tagging function given some app settings. `Settings` is the settings module of an app. The returned value is a function that receives a unicode string and returns a list of `Word` instances.
quepy/tagger.py
get_tagger
DrDub/quepy
951
python
def get_tagger(): '\n Return a tagging function given some app settings.\n `Settings` is the settings module of an app.\n The returned value is a function that receives a unicode string and returns\n a list of `Word` instances.\n ' from quepy.nltktagger import run_nltktagger tagger_function = (lambda x: run_nltktagger(x, settings.NLTK_DATA_PATH)) def wrapper(string): assert_valid_encoding(string) words = tagger_function(string) for word in words: if (word.pos not in PENN_TAGSET): logger.warning('Tagger emmited a non-penn POS tag {!r}'.format(word.pos)) return words return wrapper
def get_tagger(): '\n Return a tagging function given some app settings.\n `Settings` is the settings module of an app.\n The returned value is a function that receives a unicode string and returns\n a list of `Word` instances.\n ' from quepy.nltktagger import run_nltktagger tagger_function = (lambda x: run_nltktagger(x, settings.NLTK_DATA_PATH)) def wrapper(string): assert_valid_encoding(string) words = tagger_function(string) for word in words: if (word.pos not in PENN_TAGSET): logger.warning('Tagger emmited a non-penn POS tag {!r}'.format(word.pos)) return words return wrapper<|docstring|>Return a tagging function given some app settings. `Settings` is the settings module of an app. The returned value is a function that receives a unicode string and returns a list of `Word` instances.<|endoftext|>
26aed4b066cdabf8eb7534a47e98d28f739fce512ec4125bcc59670a9b504cd8
def wavelength_to_xy_config(lad, ind, band='yj'): '\n convert wavelength to X-Y position\n Input: wavelength in microns, index (1 less than\n desired configuration number)\n\n Returns X and Y position on the detector in mm for + and - sides of slit\n ' if (band.lower() == 'yj'): band_string = '' elif (band.lower() == 'hk'): band_string = '_HK' Xp = np.loadtxt('XY_eq{}/X_plus.dat') Xm = np.loadtxt('XY_eq{}/X_minus.dat') Yp = np.loadtxt('XY_eq{}/Y_plus.dat') Ym = np.loadtxt('XY_eq{}/Y_minus.dat') pxp = np.poly1d(Xp[ind]) pxm = np.poly1d(Xm[ind]) pyp = np.poly1d(Yp[ind]) pym = np.poly1d(Ym[ind]) Xla = [pxp(lad), pxm(lad)] Yla = [pyp(lad), pym(lad)] return (Xla, Yla)
convert wavelength to X-Y position Input: wavelength in microns, index (1 less than desired configuration number) Returns X and Y position on the detector in mm for + and - sides of slit
rimas.py
wavelength_to_xy_config
joedurbak/echelle_simulator_model_creation
0
python
def wavelength_to_xy_config(lad, ind, band='yj'): '\n convert wavelength to X-Y position\n Input: wavelength in microns, index (1 less than\n desired configuration number)\n\n Returns X and Y position on the detector in mm for + and - sides of slit\n ' if (band.lower() == 'yj'): band_string = elif (band.lower() == 'hk'): band_string = '_HK' Xp = np.loadtxt('XY_eq{}/X_plus.dat') Xm = np.loadtxt('XY_eq{}/X_minus.dat') Yp = np.loadtxt('XY_eq{}/Y_plus.dat') Ym = np.loadtxt('XY_eq{}/Y_minus.dat') pxp = np.poly1d(Xp[ind]) pxm = np.poly1d(Xm[ind]) pyp = np.poly1d(Yp[ind]) pym = np.poly1d(Ym[ind]) Xla = [pxp(lad), pxm(lad)] Yla = [pyp(lad), pym(lad)] return (Xla, Yla)
def wavelength_to_xy_config(lad, ind, band='yj'): '\n convert wavelength to X-Y position\n Input: wavelength in microns, index (1 less than\n desired configuration number)\n\n Returns X and Y position on the detector in mm for + and - sides of slit\n ' if (band.lower() == 'yj'): band_string = elif (band.lower() == 'hk'): band_string = '_HK' Xp = np.loadtxt('XY_eq{}/X_plus.dat') Xm = np.loadtxt('XY_eq{}/X_minus.dat') Yp = np.loadtxt('XY_eq{}/Y_plus.dat') Ym = np.loadtxt('XY_eq{}/Y_minus.dat') pxp = np.poly1d(Xp[ind]) pxm = np.poly1d(Xm[ind]) pyp = np.poly1d(Yp[ind]) pym = np.poly1d(Ym[ind]) Xla = [pxp(lad), pxm(lad)] Yla = [pyp(lad), pym(lad)] return (Xla, Yla)<|docstring|>convert wavelength to X-Y position Input: wavelength in microns, index (1 less than desired configuration number) Returns X and Y position on the detector in mm for + and - sides of slit<|endoftext|>
62921736c622dab8105cf3d46b281f173621bd2a1140fc320925e40df9926fa9
def do_affine_transformation_calculation(ccd=default_ccd, config_to_order_array=default_config_to_order_array, band='YJ', sw=80, sh=800): '\n Calculates Affine Matrices that describe spectrograph\n\n The spectrograph can be described by affine transformations from the input slit to the focal plane.\n an affine transofmration can be described by a 3x3 matrix.\n this function calculates the 3x3 matrix per wavelength and order that matches the input slit to the focal plane\n\n :param band:\n :type band:\n :param config_to_order_array:\n :type config_to_order_array:\n :param ccd:\n :type ccd: PyEchelle.CCD\n :param fw: fiber/slit width [microns]\n :param fh: fiber/slit height [microns]\n :return:\n ' from skimage import transform as tf ray_trace_csv = 'RIMAS_{}_affine_dependencies.csv'.format(band.upper()) df = pd.read_csv(ray_trace_csv, encoding='utf-16') df['config'] = df['config'].astype(np.int) df['order'] = config_to_order_array[(df['config'] - 1)] unique_orders = df['order'].unique() fields = df[['fieldy', 'fieldx']] unique_fields = fields.drop_duplicates() unique_fields_array = unique_fields.to_numpy() nfields = len(unique_fields_array.tolist()) norm_field = fields.loc[(0:(nfields - 1), :)] norm_field = norm_field.to_numpy() norm_field = norm_field.astype(np.int) norm_field_list = norm_field.tolist() fw = sw fh = sh sampling_input_x = fw sampling_input_y = fh res = {'MatricesPerOrder': np.int(unique_orders.shape[0]), 'norm_field': norm_field_list, 'sampling_input_x': np.int(sampling_input_x)} print(('Field width: ' + str(fw))) print(('Field height: ' + str(fh))) res['field_width'] = np.double(fw) res['field_height'] = np.double(fh) src = np.array(norm_field, dtype=float) src[(:, 0)] -= np.min(src[(:, 0)]) src[(:, 1)] -= np.min(src[(:, 1)]) src[(:, 0)] /= np.max(src[(:, 0)]) src[(:, 1)] /= np.max(src[(:, 1)]) dst_x = df['y'].to_numpy() dst_y = df['x'].to_numpy() orders = df['order'].to_numpy() wavelength = df['wavelength'].to_numpy() dst_x = np.array(dst_x) dst_y = np.array(dst_y) dst = np.vstack((dst_x, dst_y)) dst /= (ccd.pixelSize / 1000.0) dst += (ccd.Nx / 2) dst = dst.reshape(2, (len(dst[0]) / nfields), nfields).transpose((1, 2, 0)) orders = orders.reshape(((len(orders) / nfields), nfields)) wavelength = wavelength.reshape(((len(wavelength) / nfields), nfields)) affine_matrices = {} transformations = {} p_headers = ['p{:d}'.format(i) for i in range(nfields)] src_headers = ['src{:d}'.format(i) for i in range(nfields)] affine_tsv_headers = (((['order', 'wavelength'] + p_headers) + src_headers) + ['rotation', 'scale0', 'scale1', 'shear', 'translation0', 'translation1']) affine_save_lines = ['\t'.join(affine_tsv_headers)] for (order, wavel, p) in zip(orders, wavelength, dst): print('affine transformation inputs {} {}'.format(src, p)) p_list = [i for i in p] src_list = [i for i in src] inputs_list = (([order[0], wavel[0]] + p_list) + src_list) params = tf.estimate_transform('affine', src, p) params_list = [params.rotation, params.scale[0], params.scale[1], params.shear, params.translation[0], params.translation[1]] affine_save_line = (inputs_list + params_list) affine_save_lines.append('\t'.join([str(i) for i in affine_save_line])) if affine_matrices.has_key(order[0]): affine_matrices[order[0]].update({wavel[0]: np.array(params_list)}) else: affine_matrices[order[0]] = {wavel[0]: np.array(params_list)} with open(affine_tsv_filename(), 'w') as f: f.write('\n'.join(affine_save_lines)) res['matrices'] = affine_matrices return res
Calculates Affine Matrices that describe spectrograph The spectrograph can be described by affine transformations from the input slit to the focal plane. an affine transofmration can be described by a 3x3 matrix. this function calculates the 3x3 matrix per wavelength and order that matches the input slit to the focal plane :param band: :type band: :param config_to_order_array: :type config_to_order_array: :param ccd: :type ccd: PyEchelle.CCD :param fw: fiber/slit width [microns] :param fh: fiber/slit height [microns] :return:
rimas.py
do_affine_transformation_calculation
joedurbak/echelle_simulator_model_creation
0
python
def do_affine_transformation_calculation(ccd=default_ccd, config_to_order_array=default_config_to_order_array, band='YJ', sw=80, sh=800): '\n Calculates Affine Matrices that describe spectrograph\n\n The spectrograph can be described by affine transformations from the input slit to the focal plane.\n an affine transofmration can be described by a 3x3 matrix.\n this function calculates the 3x3 matrix per wavelength and order that matches the input slit to the focal plane\n\n :param band:\n :type band:\n :param config_to_order_array:\n :type config_to_order_array:\n :param ccd:\n :type ccd: PyEchelle.CCD\n :param fw: fiber/slit width [microns]\n :param fh: fiber/slit height [microns]\n :return:\n ' from skimage import transform as tf ray_trace_csv = 'RIMAS_{}_affine_dependencies.csv'.format(band.upper()) df = pd.read_csv(ray_trace_csv, encoding='utf-16') df['config'] = df['config'].astype(np.int) df['order'] = config_to_order_array[(df['config'] - 1)] unique_orders = df['order'].unique() fields = df[['fieldy', 'fieldx']] unique_fields = fields.drop_duplicates() unique_fields_array = unique_fields.to_numpy() nfields = len(unique_fields_array.tolist()) norm_field = fields.loc[(0:(nfields - 1), :)] norm_field = norm_field.to_numpy() norm_field = norm_field.astype(np.int) norm_field_list = norm_field.tolist() fw = sw fh = sh sampling_input_x = fw sampling_input_y = fh res = {'MatricesPerOrder': np.int(unique_orders.shape[0]), 'norm_field': norm_field_list, 'sampling_input_x': np.int(sampling_input_x)} print(('Field width: ' + str(fw))) print(('Field height: ' + str(fh))) res['field_width'] = np.double(fw) res['field_height'] = np.double(fh) src = np.array(norm_field, dtype=float) src[(:, 0)] -= np.min(src[(:, 0)]) src[(:, 1)] -= np.min(src[(:, 1)]) src[(:, 0)] /= np.max(src[(:, 0)]) src[(:, 1)] /= np.max(src[(:, 1)]) dst_x = df['y'].to_numpy() dst_y = df['x'].to_numpy() orders = df['order'].to_numpy() wavelength = df['wavelength'].to_numpy() dst_x = np.array(dst_x) dst_y = np.array(dst_y) dst = np.vstack((dst_x, dst_y)) dst /= (ccd.pixelSize / 1000.0) dst += (ccd.Nx / 2) dst = dst.reshape(2, (len(dst[0]) / nfields), nfields).transpose((1, 2, 0)) orders = orders.reshape(((len(orders) / nfields), nfields)) wavelength = wavelength.reshape(((len(wavelength) / nfields), nfields)) affine_matrices = {} transformations = {} p_headers = ['p{:d}'.format(i) for i in range(nfields)] src_headers = ['src{:d}'.format(i) for i in range(nfields)] affine_tsv_headers = (((['order', 'wavelength'] + p_headers) + src_headers) + ['rotation', 'scale0', 'scale1', 'shear', 'translation0', 'translation1']) affine_save_lines = ['\t'.join(affine_tsv_headers)] for (order, wavel, p) in zip(orders, wavelength, dst): print('affine transformation inputs {} {}'.format(src, p)) p_list = [i for i in p] src_list = [i for i in src] inputs_list = (([order[0], wavel[0]] + p_list) + src_list) params = tf.estimate_transform('affine', src, p) params_list = [params.rotation, params.scale[0], params.scale[1], params.shear, params.translation[0], params.translation[1]] affine_save_line = (inputs_list + params_list) affine_save_lines.append('\t'.join([str(i) for i in affine_save_line])) if affine_matrices.has_key(order[0]): affine_matrices[order[0]].update({wavel[0]: np.array(params_list)}) else: affine_matrices[order[0]] = {wavel[0]: np.array(params_list)} with open(affine_tsv_filename(), 'w') as f: f.write('\n'.join(affine_save_lines)) res['matrices'] = affine_matrices return res
def do_affine_transformation_calculation(ccd=default_ccd, config_to_order_array=default_config_to_order_array, band='YJ', sw=80, sh=800): '\n Calculates Affine Matrices that describe spectrograph\n\n The spectrograph can be described by affine transformations from the input slit to the focal plane.\n an affine transofmration can be described by a 3x3 matrix.\n this function calculates the 3x3 matrix per wavelength and order that matches the input slit to the focal plane\n\n :param band:\n :type band:\n :param config_to_order_array:\n :type config_to_order_array:\n :param ccd:\n :type ccd: PyEchelle.CCD\n :param fw: fiber/slit width [microns]\n :param fh: fiber/slit height [microns]\n :return:\n ' from skimage import transform as tf ray_trace_csv = 'RIMAS_{}_affine_dependencies.csv'.format(band.upper()) df = pd.read_csv(ray_trace_csv, encoding='utf-16') df['config'] = df['config'].astype(np.int) df['order'] = config_to_order_array[(df['config'] - 1)] unique_orders = df['order'].unique() fields = df[['fieldy', 'fieldx']] unique_fields = fields.drop_duplicates() unique_fields_array = unique_fields.to_numpy() nfields = len(unique_fields_array.tolist()) norm_field = fields.loc[(0:(nfields - 1), :)] norm_field = norm_field.to_numpy() norm_field = norm_field.astype(np.int) norm_field_list = norm_field.tolist() fw = sw fh = sh sampling_input_x = fw sampling_input_y = fh res = {'MatricesPerOrder': np.int(unique_orders.shape[0]), 'norm_field': norm_field_list, 'sampling_input_x': np.int(sampling_input_x)} print(('Field width: ' + str(fw))) print(('Field height: ' + str(fh))) res['field_width'] = np.double(fw) res['field_height'] = np.double(fh) src = np.array(norm_field, dtype=float) src[(:, 0)] -= np.min(src[(:, 0)]) src[(:, 1)] -= np.min(src[(:, 1)]) src[(:, 0)] /= np.max(src[(:, 0)]) src[(:, 1)] /= np.max(src[(:, 1)]) dst_x = df['y'].to_numpy() dst_y = df['x'].to_numpy() orders = df['order'].to_numpy() wavelength = df['wavelength'].to_numpy() dst_x = np.array(dst_x) dst_y = np.array(dst_y) dst = np.vstack((dst_x, dst_y)) dst /= (ccd.pixelSize / 1000.0) dst += (ccd.Nx / 2) dst = dst.reshape(2, (len(dst[0]) / nfields), nfields).transpose((1, 2, 0)) orders = orders.reshape(((len(orders) / nfields), nfields)) wavelength = wavelength.reshape(((len(wavelength) / nfields), nfields)) affine_matrices = {} transformations = {} p_headers = ['p{:d}'.format(i) for i in range(nfields)] src_headers = ['src{:d}'.format(i) for i in range(nfields)] affine_tsv_headers = (((['order', 'wavelength'] + p_headers) + src_headers) + ['rotation', 'scale0', 'scale1', 'shear', 'translation0', 'translation1']) affine_save_lines = ['\t'.join(affine_tsv_headers)] for (order, wavel, p) in zip(orders, wavelength, dst): print('affine transformation inputs {} {}'.format(src, p)) p_list = [i for i in p] src_list = [i for i in src] inputs_list = (([order[0], wavel[0]] + p_list) + src_list) params = tf.estimate_transform('affine', src, p) params_list = [params.rotation, params.scale[0], params.scale[1], params.shear, params.translation[0], params.translation[1]] affine_save_line = (inputs_list + params_list) affine_save_lines.append('\t'.join([str(i) for i in affine_save_line])) if affine_matrices.has_key(order[0]): affine_matrices[order[0]].update({wavel[0]: np.array(params_list)}) else: affine_matrices[order[0]] = {wavel[0]: np.array(params_list)} with open(affine_tsv_filename(), 'w') as f: f.write('\n'.join(affine_save_lines)) res['matrices'] = affine_matrices return res<|docstring|>Calculates Affine Matrices that describe spectrograph The spectrograph can be described by affine transformations from the input slit to the focal plane. an affine transofmration can be described by a 3x3 matrix. this function calculates the 3x3 matrix per wavelength and order that matches the input slit to the focal plane :param band: :type band: :param config_to_order_array: :type config_to_order_array: :param ccd: :type ccd: PyEchelle.CCD :param fw: fiber/slit width [microns] :param fh: fiber/slit height [microns] :return:<|endoftext|>
f46b8f91dc5804315b9b8f3d9ea19240e631eee49d0af34e65b14ec46749ca2c
@force_fp32(apply_to=('bbox_preds',)) def refine_bboxes(self, rois, labels, bbox_preds, pos_is_gts, img_metas): "Refine bboxes during training.\n\n Args:\n rois (Tensor): Shape (n*bs, 5), where n is image number per GPU,\n and bs is the sampled RoIs per image. The first column is\n the image id and the next 4 columns are x1, y1, x2, y2.\n labels (Tensor): Shape (n*bs, ).\n bbox_preds (Tensor): Shape (n*bs, 4) or (n*bs, 4*#class).\n pos_is_gts (list[Tensor]): Flags indicating if each positive bbox\n is a gt bbox.\n img_metas (list[dict]): Meta info of each image.\n\n Returns:\n list[Tensor]: Refined bboxes of each image in a mini-batch.\n\n Example:\n >>> # xdoctest: +REQUIRES(module:kwarray)\n >>> import kwarray\n >>> import numpy as np\n >>> from mmdet.core.bbox.demodata import random_boxes\n >>> self = BBoxHead(reg_class_agnostic=True)\n >>> n_roi = 2\n >>> n_img = 4\n >>> scale = 512\n >>> rng = np.random.RandomState(0)\n >>> img_metas = [{'img_shape': (scale, scale)}\n ... for _ in range(n_img)]\n >>> # Create rois in the expected format\n >>> roi_boxes = random_boxes(n_roi, scale=scale, rng=rng)\n >>> img_ids = torch.randint(0, n_img, (n_roi,))\n >>> img_ids = img_ids.float()\n >>> rois = torch.cat([img_ids[:, None], roi_boxes], dim=1)\n >>> # Create other args\n >>> labels = torch.randint(0, 2, (n_roi,)).long()\n >>> bbox_preds = random_boxes(n_roi, scale=scale, rng=rng)\n >>> # For each image, pretend random positive boxes are gts\n >>> is_label_pos = (labels.numpy() > 0).astype(np.int)\n >>> lbl_per_img = kwarray.group_items(is_label_pos,\n ... img_ids.numpy())\n >>> pos_per_img = [sum(lbl_per_img.get(gid, []))\n ... for gid in range(n_img)]\n >>> pos_is_gts = [\n >>> torch.randint(0, 2, (npos,)).byte().sort(\n >>> descending=True)[0]\n >>> for npos in pos_per_img\n >>> ]\n >>> bboxes_list = self.refine_bboxes(rois, labels, bbox_preds,\n >>> pos_is_gts, img_metas)\n >>> print(bboxes_list)\n " img_ids = rois[(:, 0)].long().unique(sorted=True) assert (img_ids.numel() <= len(img_metas)) bboxes_list = [] for i in range(len(img_metas)): inds = torch.nonzero((rois[(:, 0)] == i), as_tuple=False).squeeze(dim=1) num_rois = inds.numel() bboxes_ = rois[(inds, 1:)] label_ = labels[inds] bbox_pred_ = bbox_preds[inds] img_meta_ = img_metas[i] pos_is_gts_ = pos_is_gts[i] bboxes = self.regress_by_class(bboxes_, label_, bbox_pred_, img_meta_) pos_keep = (1 - pos_is_gts_) keep_inds = pos_is_gts_.new_ones(num_rois) keep_inds[:len(pos_is_gts_)] = pos_keep bboxes_list.append(bboxes[keep_inds.type(torch.bool)]) return bboxes_list
Refine bboxes during training. Args: rois (Tensor): Shape (n*bs, 5), where n is image number per GPU, and bs is the sampled RoIs per image. The first column is the image id and the next 4 columns are x1, y1, x2, y2. labels (Tensor): Shape (n*bs, ). bbox_preds (Tensor): Shape (n*bs, 4) or (n*bs, 4*#class). pos_is_gts (list[Tensor]): Flags indicating if each positive bbox is a gt bbox. img_metas (list[dict]): Meta info of each image. Returns: list[Tensor]: Refined bboxes of each image in a mini-batch. Example: >>> # xdoctest: +REQUIRES(module:kwarray) >>> import kwarray >>> import numpy as np >>> from mmdet.core.bbox.demodata import random_boxes >>> self = BBoxHead(reg_class_agnostic=True) >>> n_roi = 2 >>> n_img = 4 >>> scale = 512 >>> rng = np.random.RandomState(0) >>> img_metas = [{'img_shape': (scale, scale)} ... for _ in range(n_img)] >>> # Create rois in the expected format >>> roi_boxes = random_boxes(n_roi, scale=scale, rng=rng) >>> img_ids = torch.randint(0, n_img, (n_roi,)) >>> img_ids = img_ids.float() >>> rois = torch.cat([img_ids[:, None], roi_boxes], dim=1) >>> # Create other args >>> labels = torch.randint(0, 2, (n_roi,)).long() >>> bbox_preds = random_boxes(n_roi, scale=scale, rng=rng) >>> # For each image, pretend random positive boxes are gts >>> is_label_pos = (labels.numpy() > 0).astype(np.int) >>> lbl_per_img = kwarray.group_items(is_label_pos, ... img_ids.numpy()) >>> pos_per_img = [sum(lbl_per_img.get(gid, [])) ... for gid in range(n_img)] >>> pos_is_gts = [ >>> torch.randint(0, 2, (npos,)).byte().sort( >>> descending=True)[0] >>> for npos in pos_per_img >>> ] >>> bboxes_list = self.refine_bboxes(rois, labels, bbox_preds, >>> pos_is_gts, img_metas) >>> print(bboxes_list)
mmdet/models/roi_heads/bbox_heads/bbox_head_cb.py
refine_bboxes
zhenyuw16/combatnoise
1
python
@force_fp32(apply_to=('bbox_preds',)) def refine_bboxes(self, rois, labels, bbox_preds, pos_is_gts, img_metas): "Refine bboxes during training.\n\n Args:\n rois (Tensor): Shape (n*bs, 5), where n is image number per GPU,\n and bs is the sampled RoIs per image. The first column is\n the image id and the next 4 columns are x1, y1, x2, y2.\n labels (Tensor): Shape (n*bs, ).\n bbox_preds (Tensor): Shape (n*bs, 4) or (n*bs, 4*#class).\n pos_is_gts (list[Tensor]): Flags indicating if each positive bbox\n is a gt bbox.\n img_metas (list[dict]): Meta info of each image.\n\n Returns:\n list[Tensor]: Refined bboxes of each image in a mini-batch.\n\n Example:\n >>> # xdoctest: +REQUIRES(module:kwarray)\n >>> import kwarray\n >>> import numpy as np\n >>> from mmdet.core.bbox.demodata import random_boxes\n >>> self = BBoxHead(reg_class_agnostic=True)\n >>> n_roi = 2\n >>> n_img = 4\n >>> scale = 512\n >>> rng = np.random.RandomState(0)\n >>> img_metas = [{'img_shape': (scale, scale)}\n ... for _ in range(n_img)]\n >>> # Create rois in the expected format\n >>> roi_boxes = random_boxes(n_roi, scale=scale, rng=rng)\n >>> img_ids = torch.randint(0, n_img, (n_roi,))\n >>> img_ids = img_ids.float()\n >>> rois = torch.cat([img_ids[:, None], roi_boxes], dim=1)\n >>> # Create other args\n >>> labels = torch.randint(0, 2, (n_roi,)).long()\n >>> bbox_preds = random_boxes(n_roi, scale=scale, rng=rng)\n >>> # For each image, pretend random positive boxes are gts\n >>> is_label_pos = (labels.numpy() > 0).astype(np.int)\n >>> lbl_per_img = kwarray.group_items(is_label_pos,\n ... img_ids.numpy())\n >>> pos_per_img = [sum(lbl_per_img.get(gid, []))\n ... for gid in range(n_img)]\n >>> pos_is_gts = [\n >>> torch.randint(0, 2, (npos,)).byte().sort(\n >>> descending=True)[0]\n >>> for npos in pos_per_img\n >>> ]\n >>> bboxes_list = self.refine_bboxes(rois, labels, bbox_preds,\n >>> pos_is_gts, img_metas)\n >>> print(bboxes_list)\n " img_ids = rois[(:, 0)].long().unique(sorted=True) assert (img_ids.numel() <= len(img_metas)) bboxes_list = [] for i in range(len(img_metas)): inds = torch.nonzero((rois[(:, 0)] == i), as_tuple=False).squeeze(dim=1) num_rois = inds.numel() bboxes_ = rois[(inds, 1:)] label_ = labels[inds] bbox_pred_ = bbox_preds[inds] img_meta_ = img_metas[i] pos_is_gts_ = pos_is_gts[i] bboxes = self.regress_by_class(bboxes_, label_, bbox_pred_, img_meta_) pos_keep = (1 - pos_is_gts_) keep_inds = pos_is_gts_.new_ones(num_rois) keep_inds[:len(pos_is_gts_)] = pos_keep bboxes_list.append(bboxes[keep_inds.type(torch.bool)]) return bboxes_list
@force_fp32(apply_to=('bbox_preds',)) def refine_bboxes(self, rois, labels, bbox_preds, pos_is_gts, img_metas): "Refine bboxes during training.\n\n Args:\n rois (Tensor): Shape (n*bs, 5), where n is image number per GPU,\n and bs is the sampled RoIs per image. The first column is\n the image id and the next 4 columns are x1, y1, x2, y2.\n labels (Tensor): Shape (n*bs, ).\n bbox_preds (Tensor): Shape (n*bs, 4) or (n*bs, 4*#class).\n pos_is_gts (list[Tensor]): Flags indicating if each positive bbox\n is a gt bbox.\n img_metas (list[dict]): Meta info of each image.\n\n Returns:\n list[Tensor]: Refined bboxes of each image in a mini-batch.\n\n Example:\n >>> # xdoctest: +REQUIRES(module:kwarray)\n >>> import kwarray\n >>> import numpy as np\n >>> from mmdet.core.bbox.demodata import random_boxes\n >>> self = BBoxHead(reg_class_agnostic=True)\n >>> n_roi = 2\n >>> n_img = 4\n >>> scale = 512\n >>> rng = np.random.RandomState(0)\n >>> img_metas = [{'img_shape': (scale, scale)}\n ... for _ in range(n_img)]\n >>> # Create rois in the expected format\n >>> roi_boxes = random_boxes(n_roi, scale=scale, rng=rng)\n >>> img_ids = torch.randint(0, n_img, (n_roi,))\n >>> img_ids = img_ids.float()\n >>> rois = torch.cat([img_ids[:, None], roi_boxes], dim=1)\n >>> # Create other args\n >>> labels = torch.randint(0, 2, (n_roi,)).long()\n >>> bbox_preds = random_boxes(n_roi, scale=scale, rng=rng)\n >>> # For each image, pretend random positive boxes are gts\n >>> is_label_pos = (labels.numpy() > 0).astype(np.int)\n >>> lbl_per_img = kwarray.group_items(is_label_pos,\n ... img_ids.numpy())\n >>> pos_per_img = [sum(lbl_per_img.get(gid, []))\n ... for gid in range(n_img)]\n >>> pos_is_gts = [\n >>> torch.randint(0, 2, (npos,)).byte().sort(\n >>> descending=True)[0]\n >>> for npos in pos_per_img\n >>> ]\n >>> bboxes_list = self.refine_bboxes(rois, labels, bbox_preds,\n >>> pos_is_gts, img_metas)\n >>> print(bboxes_list)\n " img_ids = rois[(:, 0)].long().unique(sorted=True) assert (img_ids.numel() <= len(img_metas)) bboxes_list = [] for i in range(len(img_metas)): inds = torch.nonzero((rois[(:, 0)] == i), as_tuple=False).squeeze(dim=1) num_rois = inds.numel() bboxes_ = rois[(inds, 1:)] label_ = labels[inds] bbox_pred_ = bbox_preds[inds] img_meta_ = img_metas[i] pos_is_gts_ = pos_is_gts[i] bboxes = self.regress_by_class(bboxes_, label_, bbox_pred_, img_meta_) pos_keep = (1 - pos_is_gts_) keep_inds = pos_is_gts_.new_ones(num_rois) keep_inds[:len(pos_is_gts_)] = pos_keep bboxes_list.append(bboxes[keep_inds.type(torch.bool)]) return bboxes_list<|docstring|>Refine bboxes during training. Args: rois (Tensor): Shape (n*bs, 5), where n is image number per GPU, and bs is the sampled RoIs per image. The first column is the image id and the next 4 columns are x1, y1, x2, y2. labels (Tensor): Shape (n*bs, ). bbox_preds (Tensor): Shape (n*bs, 4) or (n*bs, 4*#class). pos_is_gts (list[Tensor]): Flags indicating if each positive bbox is a gt bbox. img_metas (list[dict]): Meta info of each image. Returns: list[Tensor]: Refined bboxes of each image in a mini-batch. Example: >>> # xdoctest: +REQUIRES(module:kwarray) >>> import kwarray >>> import numpy as np >>> from mmdet.core.bbox.demodata import random_boxes >>> self = BBoxHead(reg_class_agnostic=True) >>> n_roi = 2 >>> n_img = 4 >>> scale = 512 >>> rng = np.random.RandomState(0) >>> img_metas = [{'img_shape': (scale, scale)} ... for _ in range(n_img)] >>> # Create rois in the expected format >>> roi_boxes = random_boxes(n_roi, scale=scale, rng=rng) >>> img_ids = torch.randint(0, n_img, (n_roi,)) >>> img_ids = img_ids.float() >>> rois = torch.cat([img_ids[:, None], roi_boxes], dim=1) >>> # Create other args >>> labels = torch.randint(0, 2, (n_roi,)).long() >>> bbox_preds = random_boxes(n_roi, scale=scale, rng=rng) >>> # For each image, pretend random positive boxes are gts >>> is_label_pos = (labels.numpy() > 0).astype(np.int) >>> lbl_per_img = kwarray.group_items(is_label_pos, ... img_ids.numpy()) >>> pos_per_img = [sum(lbl_per_img.get(gid, [])) ... for gid in range(n_img)] >>> pos_is_gts = [ >>> torch.randint(0, 2, (npos,)).byte().sort( >>> descending=True)[0] >>> for npos in pos_per_img >>> ] >>> bboxes_list = self.refine_bboxes(rois, labels, bbox_preds, >>> pos_is_gts, img_metas) >>> print(bboxes_list)<|endoftext|>
72af6f5947d4b3dae37d6bc79019108c8e7dce90c07eecd466487be88e4ec791
@force_fp32(apply_to=('bbox_pred',)) def regress_by_class(self, rois, label, bbox_pred, img_meta): 'Regress the bbox for the predicted class. Used in Cascade R-CNN.\n\n Args:\n rois (Tensor): shape (n, 4) or (n, 5)\n label (Tensor): shape (n, )\n bbox_pred (Tensor): shape (n, 4*(#class)) or (n, 4)\n img_meta (dict): Image meta info.\n\n Returns:\n Tensor: Regressed bboxes, the same shape as input rois.\n ' assert ((rois.size(1) == 4) or (rois.size(1) == 5)), repr(rois.shape) if (not self.reg_class_agnostic): label = (label * 4) inds = torch.stack((label, (label + 1), (label + 2), (label + 3)), 1) bbox_pred = torch.gather(bbox_pred, 1, inds) assert (bbox_pred.size(1) == 4) if (rois.size(1) == 4): new_rois = self.bbox_coder.decode(rois, bbox_pred, max_shape=img_meta['img_shape']) else: bboxes = self.bbox_coder.decode(rois[(:, 1:)], bbox_pred, max_shape=img_meta['img_shape']) new_rois = torch.cat((rois[(:, [0])], bboxes), dim=1) return new_rois
Regress the bbox for the predicted class. Used in Cascade R-CNN. Args: rois (Tensor): shape (n, 4) or (n, 5) label (Tensor): shape (n, ) bbox_pred (Tensor): shape (n, 4*(#class)) or (n, 4) img_meta (dict): Image meta info. Returns: Tensor: Regressed bboxes, the same shape as input rois.
mmdet/models/roi_heads/bbox_heads/bbox_head_cb.py
regress_by_class
zhenyuw16/combatnoise
1
python
@force_fp32(apply_to=('bbox_pred',)) def regress_by_class(self, rois, label, bbox_pred, img_meta): 'Regress the bbox for the predicted class. Used in Cascade R-CNN.\n\n Args:\n rois (Tensor): shape (n, 4) or (n, 5)\n label (Tensor): shape (n, )\n bbox_pred (Tensor): shape (n, 4*(#class)) or (n, 4)\n img_meta (dict): Image meta info.\n\n Returns:\n Tensor: Regressed bboxes, the same shape as input rois.\n ' assert ((rois.size(1) == 4) or (rois.size(1) == 5)), repr(rois.shape) if (not self.reg_class_agnostic): label = (label * 4) inds = torch.stack((label, (label + 1), (label + 2), (label + 3)), 1) bbox_pred = torch.gather(bbox_pred, 1, inds) assert (bbox_pred.size(1) == 4) if (rois.size(1) == 4): new_rois = self.bbox_coder.decode(rois, bbox_pred, max_shape=img_meta['img_shape']) else: bboxes = self.bbox_coder.decode(rois[(:, 1:)], bbox_pred, max_shape=img_meta['img_shape']) new_rois = torch.cat((rois[(:, [0])], bboxes), dim=1) return new_rois
@force_fp32(apply_to=('bbox_pred',)) def regress_by_class(self, rois, label, bbox_pred, img_meta): 'Regress the bbox for the predicted class. Used in Cascade R-CNN.\n\n Args:\n rois (Tensor): shape (n, 4) or (n, 5)\n label (Tensor): shape (n, )\n bbox_pred (Tensor): shape (n, 4*(#class)) or (n, 4)\n img_meta (dict): Image meta info.\n\n Returns:\n Tensor: Regressed bboxes, the same shape as input rois.\n ' assert ((rois.size(1) == 4) or (rois.size(1) == 5)), repr(rois.shape) if (not self.reg_class_agnostic): label = (label * 4) inds = torch.stack((label, (label + 1), (label + 2), (label + 3)), 1) bbox_pred = torch.gather(bbox_pred, 1, inds) assert (bbox_pred.size(1) == 4) if (rois.size(1) == 4): new_rois = self.bbox_coder.decode(rois, bbox_pred, max_shape=img_meta['img_shape']) else: bboxes = self.bbox_coder.decode(rois[(:, 1:)], bbox_pred, max_shape=img_meta['img_shape']) new_rois = torch.cat((rois[(:, [0])], bboxes), dim=1) return new_rois<|docstring|>Regress the bbox for the predicted class. Used in Cascade R-CNN. Args: rois (Tensor): shape (n, 4) or (n, 5) label (Tensor): shape (n, ) bbox_pred (Tensor): shape (n, 4*(#class)) or (n, 4) img_meta (dict): Image meta info. Returns: Tensor: Regressed bboxes, the same shape as input rois.<|endoftext|>
c5499e8b358d9f99b92a7ad888d633e01e38f397e4035e261992380b47f8612a
def parse(self): 'Generator of testsuites parsed from the given YAML file.' suite = None with open(self.filepath) as f: for (idx, raw_yaml) in enumerate(yaml.safe_load_all(f.read())): try: suite = self._merge(suite, raw_yaml) self._validate(suite) (yield dict(suite)) except SchemaError as e: if (idx == 0): raise e raise SchemaError(('failed to parse %s testsuite' % common.ordinal((idx + 1)))) from e
Generator of testsuites parsed from the given YAML file.
utils/parser.py
parse
miabbott/redhat-ci
0
python
def parse(self): suite = None with open(self.filepath) as f: for (idx, raw_yaml) in enumerate(yaml.safe_load_all(f.read())): try: suite = self._merge(suite, raw_yaml) self._validate(suite) (yield dict(suite)) except SchemaError as e: if (idx == 0): raise e raise SchemaError(('failed to parse %s testsuite' % common.ordinal((idx + 1)))) from e
def parse(self): suite = None with open(self.filepath) as f: for (idx, raw_yaml) in enumerate(yaml.safe_load_all(f.read())): try: suite = self._merge(suite, raw_yaml) self._validate(suite) (yield dict(suite)) except SchemaError as e: if (idx == 0): raise e raise SchemaError(('failed to parse %s testsuite' % common.ordinal((idx + 1)))) from e<|docstring|>Generator of testsuites parsed from the given YAML file.<|endoftext|>
8fe91e89a8d63621a1673538fa38d42abc71c58bc4e9f4803eb94caa30cd1cc4
def _merge(self, suite, new): 'Merge the next document into the current one.' if (type(new) is not dict): raise SyntaxError('top-level type should be a dict') if (suite is None): if ('context' not in new): new['context'] = 'Red Hat CI' if (('inherit' in new) and (type(new['inherit']) is not bool)): raise SyntaxError("expected 'bool' value for 'inherit' key") if ((suite is None) or (not new.get('inherit', False))): return self._normalize(new.copy()) assert (type(suite) is dict) envtypes = ['container', 'host', 'cluster'] if any([(i in new) for i in envtypes]): for i in envtypes: if (i in suite): del suite[i] del suite['context'] suite.update(new) return self._normalize(suite)
Merge the next document into the current one.
utils/parser.py
_merge
miabbott/redhat-ci
0
python
def _merge(self, suite, new): if (type(new) is not dict): raise SyntaxError('top-level type should be a dict') if (suite is None): if ('context' not in new): new['context'] = 'Red Hat CI' if (('inherit' in new) and (type(new['inherit']) is not bool)): raise SyntaxError("expected 'bool' value for 'inherit' key") if ((suite is None) or (not new.get('inherit', False))): return self._normalize(new.copy()) assert (type(suite) is dict) envtypes = ['container', 'host', 'cluster'] if any([(i in new) for i in envtypes]): for i in envtypes: if (i in suite): del suite[i] del suite['context'] suite.update(new) return self._normalize(suite)
def _merge(self, suite, new): if (type(new) is not dict): raise SyntaxError('top-level type should be a dict') if (suite is None): if ('context' not in new): new['context'] = 'Red Hat CI' if (('inherit' in new) and (type(new['inherit']) is not bool)): raise SyntaxError("expected 'bool' value for 'inherit' key") if ((suite is None) or (not new.get('inherit', False))): return self._normalize(new.copy()) assert (type(suite) is dict) envtypes = ['container', 'host', 'cluster'] if any([(i in new) for i in envtypes]): for i in envtypes: if (i in suite): del suite[i] del suite['context'] suite.update(new) return self._normalize(suite)<|docstring|>Merge the next document into the current one.<|endoftext|>
e784bc48d8bb23009bed0ed654e035f625def350007408dd1780728039c8f4ed
def _find_s3_output_path(self): 'Looks in SageMaker hyperparameters for the S3 output path.\n Uses SM module directory to extract the output path.\n Returns:\n tuple (bucket, prefix)\n ' module_dir_s3_path = self._required_environment_param('module_dir') if (not module_dir_s3_path.startswith('s3://')): raise ValueError('Unexpected format for module_dir_s3_path. Expected "s3://...') bucket_prefix = module_dir_s3_path.replace('s3://', '') (bucket, key) = bucket_prefix.split('/', 1) prefix = '/'.join(key.split('/')[:(- 2)]) if (prefix == ''): prefix = self._required_environment_param('job_name') return (bucket, prefix)
Looks in SageMaker hyperparameters for the S3 output path. Uses SM module directory to extract the output path. Returns: tuple (bucket, prefix)
reinforcement_learning/rl_tic_tac_toe_coach_customEnv/common/sagemaker_rl/sage_cluster_communicator.py
_find_s3_output_path
Amirosimani/amazon-sagemaker-examples
2,610
python
def _find_s3_output_path(self): 'Looks in SageMaker hyperparameters for the S3 output path.\n Uses SM module directory to extract the output path.\n Returns:\n tuple (bucket, prefix)\n ' module_dir_s3_path = self._required_environment_param('module_dir') if (not module_dir_s3_path.startswith('s3://')): raise ValueError('Unexpected format for module_dir_s3_path. Expected "s3://...') bucket_prefix = module_dir_s3_path.replace('s3://', ) (bucket, key) = bucket_prefix.split('/', 1) prefix = '/'.join(key.split('/')[:(- 2)]) if (prefix == ): prefix = self._required_environment_param('job_name') return (bucket, prefix)
def _find_s3_output_path(self): 'Looks in SageMaker hyperparameters for the S3 output path.\n Uses SM module directory to extract the output path.\n Returns:\n tuple (bucket, prefix)\n ' module_dir_s3_path = self._required_environment_param('module_dir') if (not module_dir_s3_path.startswith('s3://')): raise ValueError('Unexpected format for module_dir_s3_path. Expected "s3://...') bucket_prefix = module_dir_s3_path.replace('s3://', ) (bucket, key) = bucket_prefix.split('/', 1) prefix = '/'.join(key.split('/')[:(- 2)]) if (prefix == ): prefix = self._required_environment_param('job_name') return (bucket, prefix)<|docstring|>Looks in SageMaker hyperparameters for the S3 output path. Uses SM module directory to extract the output path. Returns: tuple (bucket, prefix)<|endoftext|>
8be955027496b3b99e4d0fe28fe4af09686750ff0a7edbd43ff2b527f73a7544
def train_step(self, dataloader) -> dict: 'One-step training on the input dataloader.\n\n Parameters\n ----------\n dataloader : DataLoader\n the training dataloader\n\n Returns\n -------\n dict\n the output logs, including `loss` and `val_accuracy`, etc.\n ' loss_fn = self.loss model = self.model self.reset_metrics() model.train() att_reg = self.cfg.get('att_reg', 0.07) for (epoch, batch) in enumerate(dataloader): self.callbacks.on_train_batch_begin(epoch) (x, y, out_index) = self.unravel_batch(batch) x = self.to_device(x) y = self.to_device(y) if (not isinstance(x, tuple)): x = (x,) out = model(*x) if (out_index is not None): out = out[out_index] loss = (loss_fn(out, y) + (att_reg * torch.sum(model.attention.view((- 1)).square()))) loss.backward(loss) for metric in self.metrics: metric.update_state(y.cpu(), out.detach().cpu()) self.callbacks.on_train_batch_end(epoch) metrics = [metric.result() for metric in self.metrics] results = ([loss.cpu().item()] + metrics) return dict(zip(self.metrics_names, results))
One-step training on the input dataloader. Parameters ---------- dataloader : DataLoader the training dataloader Returns ------- dict the output logs, including `loss` and `val_accuracy`, etc.
graphgallery/gallery/nodeclas/pytorch/node2grids.py
train_step
houchengbin/GraphGallery
1
python
def train_step(self, dataloader) -> dict: 'One-step training on the input dataloader.\n\n Parameters\n ----------\n dataloader : DataLoader\n the training dataloader\n\n Returns\n -------\n dict\n the output logs, including `loss` and `val_accuracy`, etc.\n ' loss_fn = self.loss model = self.model self.reset_metrics() model.train() att_reg = self.cfg.get('att_reg', 0.07) for (epoch, batch) in enumerate(dataloader): self.callbacks.on_train_batch_begin(epoch) (x, y, out_index) = self.unravel_batch(batch) x = self.to_device(x) y = self.to_device(y) if (not isinstance(x, tuple)): x = (x,) out = model(*x) if (out_index is not None): out = out[out_index] loss = (loss_fn(out, y) + (att_reg * torch.sum(model.attention.view((- 1)).square()))) loss.backward(loss) for metric in self.metrics: metric.update_state(y.cpu(), out.detach().cpu()) self.callbacks.on_train_batch_end(epoch) metrics = [metric.result() for metric in self.metrics] results = ([loss.cpu().item()] + metrics) return dict(zip(self.metrics_names, results))
def train_step(self, dataloader) -> dict: 'One-step training on the input dataloader.\n\n Parameters\n ----------\n dataloader : DataLoader\n the training dataloader\n\n Returns\n -------\n dict\n the output logs, including `loss` and `val_accuracy`, etc.\n ' loss_fn = self.loss model = self.model self.reset_metrics() model.train() att_reg = self.cfg.get('att_reg', 0.07) for (epoch, batch) in enumerate(dataloader): self.callbacks.on_train_batch_begin(epoch) (x, y, out_index) = self.unravel_batch(batch) x = self.to_device(x) y = self.to_device(y) if (not isinstance(x, tuple)): x = (x,) out = model(*x) if (out_index is not None): out = out[out_index] loss = (loss_fn(out, y) + (att_reg * torch.sum(model.attention.view((- 1)).square()))) loss.backward(loss) for metric in self.metrics: metric.update_state(y.cpu(), out.detach().cpu()) self.callbacks.on_train_batch_end(epoch) metrics = [metric.result() for metric in self.metrics] results = ([loss.cpu().item()] + metrics) return dict(zip(self.metrics_names, results))<|docstring|>One-step training on the input dataloader. Parameters ---------- dataloader : DataLoader the training dataloader Returns ------- dict the output logs, including `loss` and `val_accuracy`, etc.<|endoftext|>
042d55fbcd6233f9e94118dd37b04a43bba63312a710a3f4c9938882d2caef96
def topological_sort(graph): '\n Return list of vertices in (reverse) topologically sorted order\n ' result = [] explored = set() dfs_stack = [(v, 0) for v in graph.vertices] while dfs_stack: (v, i) = dfs_stack.pop() if ((v, i) in explored): continue explored.add((v, i)) if (i == 0): dfs_stack.extend(([(v, 1)] + [(u, 0) for u in graph.adjacencies(v)])) elif (i == 1): result.append(v) return result
Return list of vertices in (reverse) topologically sorted order
QueryScripts/QueryAscending.py
topological_sort
marciogameiro/three-node-hysteresis
0
python
def topological_sort(graph): '\n \n ' result = [] explored = set() dfs_stack = [(v, 0) for v in graph.vertices] while dfs_stack: (v, i) = dfs_stack.pop() if ((v, i) in explored): continue explored.add((v, i)) if (i == 0): dfs_stack.extend(([(v, 1)] + [(u, 0) for u in graph.adjacencies(v)])) elif (i == 1): result.append(v) return result
def topological_sort(graph): '\n \n ' result = [] explored = set() dfs_stack = [(v, 0) for v in graph.vertices] while dfs_stack: (v, i) = dfs_stack.pop() if ((v, i) in explored): continue explored.add((v, i)) if (i == 0): dfs_stack.extend(([(v, 1)] + [(u, 0) for u in graph.adjacencies(v)])) elif (i == 1): result.append(v) return result<|docstring|>Return list of vertices in (reverse) topologically sorted order<|endoftext|>
f04737768f5c98549b685abde7b82e8768f374714e04e047323e020697d2a6e8
def count_paths(graph, source=None, target=None, allowed=None): '\n returns card{ (u,v) : source(u) & target(v) & there is an allowed path from u to v}\n ' if (source == None): source = (lambda v: True) if (target == None): target = (lambda v: True) if (allowed == None): allowed = (lambda x: True) ts = topological_sort(graph) unit_paths = {} paths = {} result_unit = 0 result = 0 for v in ts: if (not allowed(v)): continue unit_paths[v] = sum([1 for u in graph.adjacencies(v) if (target(u) and allowed(u))]) paths[v] = sum([(paths[u] + unit_paths[u]) for u in graph.adjacencies(v) if allowed(u)]) if source(v): result_unit += unit_paths[v] result += paths[v] return result
returns card{ (u,v) : source(u) & target(v) & there is an allowed path from u to v}
QueryScripts/QueryAscending.py
count_paths
marciogameiro/three-node-hysteresis
0
python
def count_paths(graph, source=None, target=None, allowed=None): '\n \n ' if (source == None): source = (lambda v: True) if (target == None): target = (lambda v: True) if (allowed == None): allowed = (lambda x: True) ts = topological_sort(graph) unit_paths = {} paths = {} result_unit = 0 result = 0 for v in ts: if (not allowed(v)): continue unit_paths[v] = sum([1 for u in graph.adjacencies(v) if (target(u) and allowed(u))]) paths[v] = sum([(paths[u] + unit_paths[u]) for u in graph.adjacencies(v) if allowed(u)]) if source(v): result_unit += unit_paths[v] result += paths[v] return result
def count_paths(graph, source=None, target=None, allowed=None): '\n \n ' if (source == None): source = (lambda v: True) if (target == None): target = (lambda v: True) if (allowed == None): allowed = (lambda x: True) ts = topological_sort(graph) unit_paths = {} paths = {} result_unit = 0 result = 0 for v in ts: if (not allowed(v)): continue unit_paths[v] = sum([1 for u in graph.adjacencies(v) if (target(u) and allowed(u))]) paths[v] = sum([(paths[u] + unit_paths[u]) for u in graph.adjacencies(v) if allowed(u)]) if source(v): result_unit += unit_paths[v] result += paths[v] return result<|docstring|>returns card{ (u,v) : source(u) & target(v) & there is an allowed path from u to v}<|endoftext|>
b21c0c1a52c0c18b2f4740ac9bb1a6e2fc380b7e8d3fdebe9ff1011b57432796
def _get_import_parent_path(mod) -> str: 'Get the parent directory of the given module' if (not hasattr(mod, '__file__')): return _std_lib_dir file_path = mod.__file__ if (os.path.splitext(os.path.basename(mod.__file__))[0] == '__init__'): file_path = os.path.dirname(file_path) parent_path = os.path.dirname(file_path) return parent_path
Get the parent directory of the given module
import_tracker/__main__.py
_get_import_parent_path
IBM/import-tracker
0
python
def _get_import_parent_path(mod) -> str: if (not hasattr(mod, '__file__')): return _std_lib_dir file_path = mod.__file__ if (os.path.splitext(os.path.basename(mod.__file__))[0] == '__init__'): file_path = os.path.dirname(file_path) parent_path = os.path.dirname(file_path) return parent_path
def _get_import_parent_path(mod) -> str: if (not hasattr(mod, '__file__')): return _std_lib_dir file_path = mod.__file__ if (os.path.splitext(os.path.basename(mod.__file__))[0] == '__init__'): file_path = os.path.dirname(file_path) parent_path = os.path.dirname(file_path) return parent_path<|docstring|>Get the parent directory of the given module<|endoftext|>
78710ae09be4f4fbbdc532cb0f9896e246c975e5ab8c393378d2bc6117221e50
def _get_non_std_modules(mod_names: Union[(Set[str], Dict[(str, List[dict])])]) -> Set[str]: 'Take a snapshot of the non-standard modules currently imported' non_std_mods = {mod_name.split('.')[0] for (mod_name, mod) in sys.modules.items() if ((mod_name in mod_names) and (not mod_name.startswith('_')) and ('.' not in mod_name) and (_get_import_parent_path(mod) not in [_std_lib_dir, _std_dylib_dir]) and (mod_name.split('.')[0] != THIS_PACKAGE))} if isinstance(mod_names, set): return non_std_mods return {mod_name: mod_vals for (mod_name, mod_vals) in mod_names.items() if (mod_name in non_std_mods)}
Take a snapshot of the non-standard modules currently imported
import_tracker/__main__.py
_get_non_std_modules
IBM/import-tracker
0
python
def _get_non_std_modules(mod_names: Union[(Set[str], Dict[(str, List[dict])])]) -> Set[str]: non_std_mods = {mod_name.split('.')[0] for (mod_name, mod) in sys.modules.items() if ((mod_name in mod_names) and (not mod_name.startswith('_')) and ('.' not in mod_name) and (_get_import_parent_path(mod) not in [_std_lib_dir, _std_dylib_dir]) and (mod_name.split('.')[0] != THIS_PACKAGE))} if isinstance(mod_names, set): return non_std_mods return {mod_name: mod_vals for (mod_name, mod_vals) in mod_names.items() if (mod_name in non_std_mods)}
def _get_non_std_modules(mod_names: Union[(Set[str], Dict[(str, List[dict])])]) -> Set[str]: non_std_mods = {mod_name.split('.')[0] for (mod_name, mod) in sys.modules.items() if ((mod_name in mod_names) and (not mod_name.startswith('_')) and ('.' not in mod_name) and (_get_import_parent_path(mod) not in [_std_lib_dir, _std_dylib_dir]) and (mod_name.split('.')[0] != THIS_PACKAGE))} if isinstance(mod_names, set): return non_std_mods return {mod_name: mod_vals for (mod_name, mod_vals) in mod_names.items() if (mod_name in non_std_mods)}<|docstring|>Take a snapshot of the non-standard modules currently imported<|endoftext|>
5acc011e50ac7a49434f0577908ec93bee4c8282c306a999b8d9e9c29df93c6f
def main(): 'Main entrypoint as a function' parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--name', '-n', help='Module name to import', required=True) parser.add_argument('--package', '-p', help='Package for relative imports', default=None) parser.add_argument('--indent', '-i', type=int, help='Indent for json printing', default=None) parser.add_argument('--log_level', '-l', help='Log level', default=os.environ.get('LOG_LEVEL', 'error')) parser.add_argument('--recursive', '-r', action='store_true', help='Recursively perform tracking on all nested modules', default=False) parser.add_argument('--submodules', '-u', nargs='*', default=None, help='List of sub-modules to recurse on (only used when --recursive set)') parser.add_argument('--num_jobs', '-j', type=int, help='Number of workers to spawn when recursing', default=0) parser.add_argument('--side_effect_modules', '-s', nargs='*', default=None, help='Modules with known import-time side effect which should always be allowed to import') parser.add_argument('--track_import_stack', '-t', action='store_true', default=False, help='Store the stack trace of imports belonging to the tracked module') args = parser.parse_args() if (args.submodules and (not args.recursive)): raise ValueError('Ignoring --submodules without --recursive') enable_tracking_mode() log_level = getattr(logging, args.log_level.upper(), None) if (log_level is None): log_level = int(args.log_level) logging.basicConfig(level=log_level) full_module_name = args.name if (args.package is not None): assert args.name.startswith('.'), "When providing --package, module name must be relative (start with '.')" full_module_name = f'{args.package}{args.name}' tracker_finder = ImportTrackerMetaFinder(tracked_module=full_module_name, side_effect_modules=args.side_effect_modules, track_import_stack=args.track_import_stack) sys.meta_path = ([tracker_finder] + sys.meta_path) log.debug('Importing %s', full_module_name) try: imported = importlib.import_module(full_module_name) except Exception as err: log.error('Error on top-level import [%s]: %s', full_module_name, err) raise downstream_mapping = {full_module_name: _get_non_std_modules(tracker_finder.get_all_new_modules())} if args.recursive: all_internals = [downstream for downstream in sys.modules.keys() if (downstream.startswith(full_module_name) and (downstream != full_module_name))] recursive_internals = all_internals if args.submodules: recursive_internals = [downstream for downstream in all_internals if (downstream in args.submodules)] log.debug('Recursing on: %s', recursive_internals) recursive_kwargs = dict(log_level=log_level, recursive=False, side_effect_modules=args.side_effect_modules, track_import_stack=args.track_import_stack) if (args.num_jobs > 0): pool = ThreadPoolExecutor(max_workers=args.num_jobs) futures = [] for internal_downstream in recursive_internals: futures.append(pool.submit(track_module, module_name=internal_downstream, **recursive_kwargs)) for future in futures: downstream_mapping.update(future.result()) else: for internal_downstream in recursive_internals: try: log.debug('Starting sub-module tracking for [%s]', internal_downstream) downstream_mapping.update(track_module(module_name=internal_downstream, **recursive_kwargs)) except Exception as err: log.error('Error while tracking submodule [%s]: %s', internal_downstream, err) raise log.debug('Downstream Mapping: %s', downstream_mapping) if args.track_import_stack: output_dict = {key: dict(sorted(val.items())) for (key, val) in downstream_mapping.items()} else: output_dict = {key: sorted(list(val)) for (key, val) in downstream_mapping.items()} print(json.dumps(output_dict, indent=args.indent))
Main entrypoint as a function
import_tracker/__main__.py
main
IBM/import-tracker
0
python
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--name', '-n', help='Module name to import', required=True) parser.add_argument('--package', '-p', help='Package for relative imports', default=None) parser.add_argument('--indent', '-i', type=int, help='Indent for json printing', default=None) parser.add_argument('--log_level', '-l', help='Log level', default=os.environ.get('LOG_LEVEL', 'error')) parser.add_argument('--recursive', '-r', action='store_true', help='Recursively perform tracking on all nested modules', default=False) parser.add_argument('--submodules', '-u', nargs='*', default=None, help='List of sub-modules to recurse on (only used when --recursive set)') parser.add_argument('--num_jobs', '-j', type=int, help='Number of workers to spawn when recursing', default=0) parser.add_argument('--side_effect_modules', '-s', nargs='*', default=None, help='Modules with known import-time side effect which should always be allowed to import') parser.add_argument('--track_import_stack', '-t', action='store_true', default=False, help='Store the stack trace of imports belonging to the tracked module') args = parser.parse_args() if (args.submodules and (not args.recursive)): raise ValueError('Ignoring --submodules without --recursive') enable_tracking_mode() log_level = getattr(logging, args.log_level.upper(), None) if (log_level is None): log_level = int(args.log_level) logging.basicConfig(level=log_level) full_module_name = args.name if (args.package is not None): assert args.name.startswith('.'), "When providing --package, module name must be relative (start with '.')" full_module_name = f'{args.package}{args.name}' tracker_finder = ImportTrackerMetaFinder(tracked_module=full_module_name, side_effect_modules=args.side_effect_modules, track_import_stack=args.track_import_stack) sys.meta_path = ([tracker_finder] + sys.meta_path) log.debug('Importing %s', full_module_name) try: imported = importlib.import_module(full_module_name) except Exception as err: log.error('Error on top-level import [%s]: %s', full_module_name, err) raise downstream_mapping = {full_module_name: _get_non_std_modules(tracker_finder.get_all_new_modules())} if args.recursive: all_internals = [downstream for downstream in sys.modules.keys() if (downstream.startswith(full_module_name) and (downstream != full_module_name))] recursive_internals = all_internals if args.submodules: recursive_internals = [downstream for downstream in all_internals if (downstream in args.submodules)] log.debug('Recursing on: %s', recursive_internals) recursive_kwargs = dict(log_level=log_level, recursive=False, side_effect_modules=args.side_effect_modules, track_import_stack=args.track_import_stack) if (args.num_jobs > 0): pool = ThreadPoolExecutor(max_workers=args.num_jobs) futures = [] for internal_downstream in recursive_internals: futures.append(pool.submit(track_module, module_name=internal_downstream, **recursive_kwargs)) for future in futures: downstream_mapping.update(future.result()) else: for internal_downstream in recursive_internals: try: log.debug('Starting sub-module tracking for [%s]', internal_downstream) downstream_mapping.update(track_module(module_name=internal_downstream, **recursive_kwargs)) except Exception as err: log.error('Error while tracking submodule [%s]: %s', internal_downstream, err) raise log.debug('Downstream Mapping: %s', downstream_mapping) if args.track_import_stack: output_dict = {key: dict(sorted(val.items())) for (key, val) in downstream_mapping.items()} else: output_dict = {key: sorted(list(val)) for (key, val) in downstream_mapping.items()} print(json.dumps(output_dict, indent=args.indent))
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--name', '-n', help='Module name to import', required=True) parser.add_argument('--package', '-p', help='Package for relative imports', default=None) parser.add_argument('--indent', '-i', type=int, help='Indent for json printing', default=None) parser.add_argument('--log_level', '-l', help='Log level', default=os.environ.get('LOG_LEVEL', 'error')) parser.add_argument('--recursive', '-r', action='store_true', help='Recursively perform tracking on all nested modules', default=False) parser.add_argument('--submodules', '-u', nargs='*', default=None, help='List of sub-modules to recurse on (only used when --recursive set)') parser.add_argument('--num_jobs', '-j', type=int, help='Number of workers to spawn when recursing', default=0) parser.add_argument('--side_effect_modules', '-s', nargs='*', default=None, help='Modules with known import-time side effect which should always be allowed to import') parser.add_argument('--track_import_stack', '-t', action='store_true', default=False, help='Store the stack trace of imports belonging to the tracked module') args = parser.parse_args() if (args.submodules and (not args.recursive)): raise ValueError('Ignoring --submodules without --recursive') enable_tracking_mode() log_level = getattr(logging, args.log_level.upper(), None) if (log_level is None): log_level = int(args.log_level) logging.basicConfig(level=log_level) full_module_name = args.name if (args.package is not None): assert args.name.startswith('.'), "When providing --package, module name must be relative (start with '.')" full_module_name = f'{args.package}{args.name}' tracker_finder = ImportTrackerMetaFinder(tracked_module=full_module_name, side_effect_modules=args.side_effect_modules, track_import_stack=args.track_import_stack) sys.meta_path = ([tracker_finder] + sys.meta_path) log.debug('Importing %s', full_module_name) try: imported = importlib.import_module(full_module_name) except Exception as err: log.error('Error on top-level import [%s]: %s', full_module_name, err) raise downstream_mapping = {full_module_name: _get_non_std_modules(tracker_finder.get_all_new_modules())} if args.recursive: all_internals = [downstream for downstream in sys.modules.keys() if (downstream.startswith(full_module_name) and (downstream != full_module_name))] recursive_internals = all_internals if args.submodules: recursive_internals = [downstream for downstream in all_internals if (downstream in args.submodules)] log.debug('Recursing on: %s', recursive_internals) recursive_kwargs = dict(log_level=log_level, recursive=False, side_effect_modules=args.side_effect_modules, track_import_stack=args.track_import_stack) if (args.num_jobs > 0): pool = ThreadPoolExecutor(max_workers=args.num_jobs) futures = [] for internal_downstream in recursive_internals: futures.append(pool.submit(track_module, module_name=internal_downstream, **recursive_kwargs)) for future in futures: downstream_mapping.update(future.result()) else: for internal_downstream in recursive_internals: try: log.debug('Starting sub-module tracking for [%s]', internal_downstream) downstream_mapping.update(track_module(module_name=internal_downstream, **recursive_kwargs)) except Exception as err: log.error('Error while tracking submodule [%s]: %s', internal_downstream, err) raise log.debug('Downstream Mapping: %s', downstream_mapping) if args.track_import_stack: output_dict = {key: dict(sorted(val.items())) for (key, val) in downstream_mapping.items()} else: output_dict = {key: sorted(list(val)) for (key, val) in downstream_mapping.items()} print(json.dumps(output_dict, indent=args.indent))<|docstring|>Main entrypoint as a function<|endoftext|>
d197065e330940f94095a07a5b18568d1041a99611f97ef9b06dd4f6c230365a
def __init__(self, name: str): 'Hang onto the import args to use lazily' self.__name = name self.__wrapped_module = None
Hang onto the import args to use lazily
import_tracker/__main__.py
__init__
IBM/import-tracker
0
python
def __init__(self, name: str): self.__name = name self.__wrapped_module = None
def __init__(self, name: str): self.__name = name self.__wrapped_module = None<|docstring|>Hang onto the import args to use lazily<|endoftext|>
ee7866c69171433c1aed0afcaf3515bde0e771db7121fa9adeac3b6992002706
def __getattr__(self, name: str) -> any: 'When asked for an attribute, make sure the wrapped module is imported\n and then delegate\n ' if (self.__wrapped_module is None): if (name in ['__name__', '__loader__', '__package__', '__path__', '__file__', '__cached__']): log.debug4('Not triggering load of [%s] for getattr(%s)', self.name, name) return None log.debug1('Triggering lazy import for %s.%s', self.name, name) self.do_import() return getattr(self.__wrapped_module, name)
When asked for an attribute, make sure the wrapped module is imported and then delegate
import_tracker/__main__.py
__getattr__
IBM/import-tracker
0
python
def __getattr__(self, name: str) -> any: 'When asked for an attribute, make sure the wrapped module is imported\n and then delegate\n ' if (self.__wrapped_module is None): if (name in ['__name__', '__loader__', '__package__', '__path__', '__file__', '__cached__']): log.debug4('Not triggering load of [%s] for getattr(%s)', self.name, name) return None log.debug1('Triggering lazy import for %s.%s', self.name, name) self.do_import() return getattr(self.__wrapped_module, name)
def __getattr__(self, name: str) -> any: 'When asked for an attribute, make sure the wrapped module is imported\n and then delegate\n ' if (self.__wrapped_module is None): if (name in ['__name__', '__loader__', '__package__', '__path__', '__file__', '__cached__']): log.debug4('Not triggering load of [%s] for getattr(%s)', self.name, name) return None log.debug1('Triggering lazy import for %s.%s', self.name, name) self.do_import() return getattr(self.__wrapped_module, name)<|docstring|>When asked for an attribute, make sure the wrapped module is imported and then delegate<|endoftext|>
76b8c40abc260e9edc4d31c6b7cfa8d6fdd1e8cadaf6716bd4a06158c3b56a9f
@property def imported(self) -> bool: 'Return whether or not this module has actually imported' return (self.__wrapped_module is not None)
Return whether or not this module has actually imported
import_tracker/__main__.py
imported
IBM/import-tracker
0
python
@property def imported(self) -> bool: return (self.__wrapped_module is not None)
@property def imported(self) -> bool: return (self.__wrapped_module is not None)<|docstring|>Return whether or not this module has actually imported<|endoftext|>
b5a49a495c37aa4d576ca72082bca5e0ee5ec5814ac116ddcdb778b56ed6241d
@property def name(self) -> str: 'Expose the name of this module' return self.__name
Expose the name of this module
import_tracker/__main__.py
name
IBM/import-tracker
0
python
@property def name(self) -> str: return self.__name
@property def name(self) -> str: return self.__name<|docstring|>Expose the name of this module<|endoftext|>
b9e3d9699ec94771f60a577d4fa5f6739c11215f3f3dcd7cc0578eecc7125448
def do_import(self): 'Trigger the import' if (log.level <= logging.DEBUG4): for line in traceback.format_stack(): log.debug4(line.strip()) log.debug2('Clearing sys.modules of parents of [%s]', self.name) self_mod_name_parts = self.name.split('.') popped_mods = {} for i in range(1, (len(self_mod_name_parts) + 1)): pop_mod_name = '.'.join(self_mod_name_parts[:i]) sys_mod = sys.modules.get(pop_mod_name) if (isinstance(sys_mod, self.__class__) and (not sys_mod.imported)): log.debug2('Removing sys.modules[%s]', pop_mod_name) popped_mods[pop_mod_name] = sys.modules.pop(pop_mod_name) log.debug2('Performing deferred import of [%s]', self.name) self.__wrapped_module = importlib.import_module(self.name) log.debug2('Done with deferred import of [%s]', self.name) for (popped_mod_name, popped_mod) in popped_mods.items(): updated_mod = sys.modules.get(popped_mod_name) assert updated_mod, f'No re-imported version of [{popped_mod_name}] found' popped_mod.__dict__.update(updated_mod.__dict__)
Trigger the import
import_tracker/__main__.py
do_import
IBM/import-tracker
0
python
def do_import(self): if (log.level <= logging.DEBUG4): for line in traceback.format_stack(): log.debug4(line.strip()) log.debug2('Clearing sys.modules of parents of [%s]', self.name) self_mod_name_parts = self.name.split('.') popped_mods = {} for i in range(1, (len(self_mod_name_parts) + 1)): pop_mod_name = '.'.join(self_mod_name_parts[:i]) sys_mod = sys.modules.get(pop_mod_name) if (isinstance(sys_mod, self.__class__) and (not sys_mod.imported)): log.debug2('Removing sys.modules[%s]', pop_mod_name) popped_mods[pop_mod_name] = sys.modules.pop(pop_mod_name) log.debug2('Performing deferred import of [%s]', self.name) self.__wrapped_module = importlib.import_module(self.name) log.debug2('Done with deferred import of [%s]', self.name) for (popped_mod_name, popped_mod) in popped_mods.items(): updated_mod = sys.modules.get(popped_mod_name) assert updated_mod, f'No re-imported version of [{popped_mod_name}] found' popped_mod.__dict__.update(updated_mod.__dict__)
def do_import(self): if (log.level <= logging.DEBUG4): for line in traceback.format_stack(): log.debug4(line.strip()) log.debug2('Clearing sys.modules of parents of [%s]', self.name) self_mod_name_parts = self.name.split('.') popped_mods = {} for i in range(1, (len(self_mod_name_parts) + 1)): pop_mod_name = '.'.join(self_mod_name_parts[:i]) sys_mod = sys.modules.get(pop_mod_name) if (isinstance(sys_mod, self.__class__) and (not sys_mod.imported)): log.debug2('Removing sys.modules[%s]', pop_mod_name) popped_mods[pop_mod_name] = sys.modules.pop(pop_mod_name) log.debug2('Performing deferred import of [%s]', self.name) self.__wrapped_module = importlib.import_module(self.name) log.debug2('Done with deferred import of [%s]', self.name) for (popped_mod_name, popped_mod) in popped_mods.items(): updated_mod = sys.modules.get(popped_mod_name) assert updated_mod, f'No re-imported version of [{popped_mod_name}] found' popped_mod.__dict__.update(updated_mod.__dict__)<|docstring|>Trigger the import<|endoftext|>
6f97cd35e95fe40f2e572b6b855a77d385e5dadcdeff74cdb48d102420e8c2d6
def referenced_by(self, module_name: str) -> bool: 'Determine if this deferred module is referenced by the module with\n the given name\n ' assert (module_name in sys.modules), f'Programming error: Ref module not found {module_name}' ref_module = sys.modules[module_name] ref_module_pkg = module_name.split('.')[0] mods_to_check = [ref_module] checked_modules = [] while mods_to_check: next_mods_to_check = [] for mod in mods_to_check: for attr in vars(mod).values(): if (attr is self): return True next_mods_to_check.extend([attr for attr in vars(mod).values() if (isinstance(attr, ModuleType) and attr.__name__.startswith(ref_module_pkg) and (mod not in checked_modules))]) checked_modules.append(mod) mods_to_check = next_mods_to_check return False
Determine if this deferred module is referenced by the module with the given name
import_tracker/__main__.py
referenced_by
IBM/import-tracker
0
python
def referenced_by(self, module_name: str) -> bool: 'Determine if this deferred module is referenced by the module with\n the given name\n ' assert (module_name in sys.modules), f'Programming error: Ref module not found {module_name}' ref_module = sys.modules[module_name] ref_module_pkg = module_name.split('.')[0] mods_to_check = [ref_module] checked_modules = [] while mods_to_check: next_mods_to_check = [] for mod in mods_to_check: for attr in vars(mod).values(): if (attr is self): return True next_mods_to_check.extend([attr for attr in vars(mod).values() if (isinstance(attr, ModuleType) and attr.__name__.startswith(ref_module_pkg) and (mod not in checked_modules))]) checked_modules.append(mod) mods_to_check = next_mods_to_check return False
def referenced_by(self, module_name: str) -> bool: 'Determine if this deferred module is referenced by the module with\n the given name\n ' assert (module_name in sys.modules), f'Programming error: Ref module not found {module_name}' ref_module = sys.modules[module_name] ref_module_pkg = module_name.split('.')[0] mods_to_check = [ref_module] checked_modules = [] while mods_to_check: next_mods_to_check = [] for mod in mods_to_check: for attr in vars(mod).values(): if (attr is self): return True next_mods_to_check.extend([attr for attr in vars(mod).values() if (isinstance(attr, ModuleType) and attr.__name__.startswith(ref_module_pkg) and (mod not in checked_modules))]) checked_modules.append(mod) mods_to_check = next_mods_to_check return False<|docstring|>Determine if this deferred module is referenced by the module with the given name<|endoftext|>
62c72fc9e2ab97624021726e3e20eecb71566593d28317358bd21d4b7b7588c5
def exec_module(self, *_, **__): 'Nothing to do here because the errors will be thrown by the module\n created in create_module\n '
Nothing to do here because the errors will be thrown by the module created in create_module
import_tracker/__main__.py
exec_module
IBM/import-tracker
0
python
def exec_module(self, *_, **__): 'Nothing to do here because the errors will be thrown by the module\n created in create_module\n '
def exec_module(self, *_, **__): 'Nothing to do here because the errors will be thrown by the module\n created in create_module\n '<|docstring|>Nothing to do here because the errors will be thrown by the module created in create_module<|endoftext|>
6061094f5fcced942681091862ed06e2e881956ffdbe1a608a1bf1fb03aca085
def __init__(self, tracked_module: str, side_effect_modules: Optional[List[str]]=None, track_import_stack: bool=False): 'Initialize with the name of the package being tracked\n\n Args:\n tracked_module: str\n The name of the module (may be nested) being tracked\n side_effect_modules: Optional[List[str]]\n Some libraries rely on certain import-time side effects in order\n to perform required import tasks (e.g. global singleton\n registries). These modules will be allowed to import regardless\n of where they fall relative to the targeted module.\n track_import_stack: bool\n If true, when imports are allowed through, their stack trace is\n captured.\n NOTE: This will cause a stack trace to be computed for every\n import in the tracked set, so it will be very slow and\n should only be used as a debugging tool on targeted imports.\n ' self._tracked_module = tracked_module self._side_effect_modules = (side_effect_modules or []) self._tracked_module_parts = tracked_module.split('.') self._enabled = True self._starting_modules = set(sys.modules.keys()) log.debug2('Starting modules: %s', self._starting_modules) self._ending_modules = None self._deferred_modules = set() self._track_import_stack = track_import_stack self._import_stacks = {}
Initialize with the name of the package being tracked Args: tracked_module: str The name of the module (may be nested) being tracked side_effect_modules: Optional[List[str]] Some libraries rely on certain import-time side effects in order to perform required import tasks (e.g. global singleton registries). These modules will be allowed to import regardless of where they fall relative to the targeted module. track_import_stack: bool If true, when imports are allowed through, their stack trace is captured. NOTE: This will cause a stack trace to be computed for every import in the tracked set, so it will be very slow and should only be used as a debugging tool on targeted imports.
import_tracker/__main__.py
__init__
IBM/import-tracker
0
python
def __init__(self, tracked_module: str, side_effect_modules: Optional[List[str]]=None, track_import_stack: bool=False): 'Initialize with the name of the package being tracked\n\n Args:\n tracked_module: str\n The name of the module (may be nested) being tracked\n side_effect_modules: Optional[List[str]]\n Some libraries rely on certain import-time side effects in order\n to perform required import tasks (e.g. global singleton\n registries). These modules will be allowed to import regardless\n of where they fall relative to the targeted module.\n track_import_stack: bool\n If true, when imports are allowed through, their stack trace is\n captured.\n NOTE: This will cause a stack trace to be computed for every\n import in the tracked set, so it will be very slow and\n should only be used as a debugging tool on targeted imports.\n ' self._tracked_module = tracked_module self._side_effect_modules = (side_effect_modules or []) self._tracked_module_parts = tracked_module.split('.') self._enabled = True self._starting_modules = set(sys.modules.keys()) log.debug2('Starting modules: %s', self._starting_modules) self._ending_modules = None self._deferred_modules = set() self._track_import_stack = track_import_stack self._import_stacks = {}
def __init__(self, tracked_module: str, side_effect_modules: Optional[List[str]]=None, track_import_stack: bool=False): 'Initialize with the name of the package being tracked\n\n Args:\n tracked_module: str\n The name of the module (may be nested) being tracked\n side_effect_modules: Optional[List[str]]\n Some libraries rely on certain import-time side effects in order\n to perform required import tasks (e.g. global singleton\n registries). These modules will be allowed to import regardless\n of where they fall relative to the targeted module.\n track_import_stack: bool\n If true, when imports are allowed through, their stack trace is\n captured.\n NOTE: This will cause a stack trace to be computed for every\n import in the tracked set, so it will be very slow and\n should only be used as a debugging tool on targeted imports.\n ' self._tracked_module = tracked_module self._side_effect_modules = (side_effect_modules or []) self._tracked_module_parts = tracked_module.split('.') self._enabled = True self._starting_modules = set(sys.modules.keys()) log.debug2('Starting modules: %s', self._starting_modules) self._ending_modules = None self._deferred_modules = set() self._track_import_stack = track_import_stack self._import_stacks = {}<|docstring|>Initialize with the name of the package being tracked Args: tracked_module: str The name of the module (may be nested) being tracked side_effect_modules: Optional[List[str]] Some libraries rely on certain import-time side effects in order to perform required import tasks (e.g. global singleton registries). These modules will be allowed to import regardless of where they fall relative to the targeted module. track_import_stack: bool If true, when imports are allowed through, their stack trace is captured. NOTE: This will cause a stack trace to be computed for every import in the tracked set, so it will be very slow and should only be used as a debugging tool on targeted imports.<|endoftext|>
200291e778ae5dcf3e050f90a8979431b8a2b8163a423cc5cc52814d5cf10e0d
def find_spec(self, fullname: str, *args, **kwargs) -> Optional[importlib.machinery.ModuleSpec]: 'The find_spec implementation for this finder tracks the source of the\n import call for the given module and determines if it is on the critical\n path for the target module.\n\n https://docs.python.org/3/library/importlib.html#importlib.abc.MetaPathFinder.find_spec\n\n Args:\n fullname: str\n The fully qualified module name under import\n\n Returns:\n spec: Optional[importlib.machinery.ModuleSpec]\n If the desired import is not on the critical path for the target\n module, a spec with a _DeferredLoader will be returned. If the\n import is on the critical path, None will be returned to defer\n to the rest of the "real" finders.\n ' result = self._find_spec(fullname, *args, **kwargs) if (result is not None): log.debug2('Returning deferred module for [%s]', fullname) return result log.debug2('Stack tracking? %s, Ending modules set? %s', self._track_import_stack, (self._ending_modules is not None)) if (self._track_import_stack and (fullname != self._tracked_module) and (not self._enabled)): stack = inspect.stack() stack_info = [] for frame in stack: frame_module_name = frame.frame.f_globals['__name__'].split('.')[0] if (frame_module_name == self._tracked_module_parts[0]): stack_info.append({'filename': frame.filename, 'lineno': frame.lineno, 'code_context': [line.strip('\n') for line in frame.code_context]}) log.debug2('Found %d stack frames for [%s]', len(stack_info), fullname) self._import_stacks[fullname] = stack_info return None
The find_spec implementation for this finder tracks the source of the import call for the given module and determines if it is on the critical path for the target module. https://docs.python.org/3/library/importlib.html#importlib.abc.MetaPathFinder.find_spec Args: fullname: str The fully qualified module name under import Returns: spec: Optional[importlib.machinery.ModuleSpec] If the desired import is not on the critical path for the target module, a spec with a _DeferredLoader will be returned. If the import is on the critical path, None will be returned to defer to the rest of the "real" finders.
import_tracker/__main__.py
find_spec
IBM/import-tracker
0
python
def find_spec(self, fullname: str, *args, **kwargs) -> Optional[importlib.machinery.ModuleSpec]: 'The find_spec implementation for this finder tracks the source of the\n import call for the given module and determines if it is on the critical\n path for the target module.\n\n https://docs.python.org/3/library/importlib.html#importlib.abc.MetaPathFinder.find_spec\n\n Args:\n fullname: str\n The fully qualified module name under import\n\n Returns:\n spec: Optional[importlib.machinery.ModuleSpec]\n If the desired import is not on the critical path for the target\n module, a spec with a _DeferredLoader will be returned. If the\n import is on the critical path, None will be returned to defer\n to the rest of the "real" finders.\n ' result = self._find_spec(fullname, *args, **kwargs) if (result is not None): log.debug2('Returning deferred module for [%s]', fullname) return result log.debug2('Stack tracking? %s, Ending modules set? %s', self._track_import_stack, (self._ending_modules is not None)) if (self._track_import_stack and (fullname != self._tracked_module) and (not self._enabled)): stack = inspect.stack() stack_info = [] for frame in stack: frame_module_name = frame.frame.f_globals['__name__'].split('.')[0] if (frame_module_name == self._tracked_module_parts[0]): stack_info.append({'filename': frame.filename, 'lineno': frame.lineno, 'code_context': [line.strip('\n') for line in frame.code_context]}) log.debug2('Found %d stack frames for [%s]', len(stack_info), fullname) self._import_stacks[fullname] = stack_info return None
def find_spec(self, fullname: str, *args, **kwargs) -> Optional[importlib.machinery.ModuleSpec]: 'The find_spec implementation for this finder tracks the source of the\n import call for the given module and determines if it is on the critical\n path for the target module.\n\n https://docs.python.org/3/library/importlib.html#importlib.abc.MetaPathFinder.find_spec\n\n Args:\n fullname: str\n The fully qualified module name under import\n\n Returns:\n spec: Optional[importlib.machinery.ModuleSpec]\n If the desired import is not on the critical path for the target\n module, a spec with a _DeferredLoader will be returned. If the\n import is on the critical path, None will be returned to defer\n to the rest of the "real" finders.\n ' result = self._find_spec(fullname, *args, **kwargs) if (result is not None): log.debug2('Returning deferred module for [%s]', fullname) return result log.debug2('Stack tracking? %s, Ending modules set? %s', self._track_import_stack, (self._ending_modules is not None)) if (self._track_import_stack and (fullname != self._tracked_module) and (not self._enabled)): stack = inspect.stack() stack_info = [] for frame in stack: frame_module_name = frame.frame.f_globals['__name__'].split('.')[0] if (frame_module_name == self._tracked_module_parts[0]): stack_info.append({'filename': frame.filename, 'lineno': frame.lineno, 'code_context': [line.strip('\n') for line in frame.code_context]}) log.debug2('Found %d stack frames for [%s]', len(stack_info), fullname) self._import_stacks[fullname] = stack_info return None<|docstring|>The find_spec implementation for this finder tracks the source of the import call for the given module and determines if it is on the critical path for the target module. https://docs.python.org/3/library/importlib.html#importlib.abc.MetaPathFinder.find_spec Args: fullname: str The fully qualified module name under import Returns: spec: Optional[importlib.machinery.ModuleSpec] If the desired import is not on the critical path for the target module, a spec with a _DeferredLoader will be returned. If the import is on the critical path, None will be returned to defer to the rest of the "real" finders.<|endoftext|>
eabceb7a7cf77b0d217bf29fad76a5375793b9e76d580c44a083cadbd19e6067
def _find_spec(self, fullname: str, *args, **kwargs) -> Optional[importlib.machinery.ModuleSpec]: 'This implements the core logic of find_spec. It is wrapped by the\n public find_spec so that when an import is allowed, the stack can be\n optionally tracked.\n ' if (fullname in self._side_effect_modules): log.debug('Allowing import of side-effect module [%s]', fullname) return None if (self._enabled and (fullname != self._tracked_module) and (not self._is_parent_module(fullname)) and (fullname not in self._deferred_modules) and (fullname.split('.')[0] == self._tracked_module_parts[0])): log.debug3('Deferring import of [%s]', fullname) self._deferred_modules.add(fullname) loader = _LazyLoader() return importlib.util.spec_from_loader(fullname, loader) if (fullname == self._tracked_module): log.debug('Tracked module [%s] found. Tracking started', self._tracked_module) self._enabled = False lazy_modules = [mod_name for (mod_name, mod) in sys.modules.items() if isinstance(mod, _DeferredModule)] for mod_name in lazy_modules: log.debug2('Removing lazy module [%s]', mod_name) del sys.modules[mod_name] if ((self._ending_modules is None) and (not getattr(getattr(sys.modules.get(self._tracked_module, {}), '__spec__', {}), '_initializing', True))): log.debug('Tracked module [%s] finished importing', self._tracked_module) self._set_ending_modules(fullname) log.debug2('Ending modules: %s', self._ending_modules) log.debug3('Allowing import of [%s]', fullname) return None
This implements the core logic of find_spec. It is wrapped by the public find_spec so that when an import is allowed, the stack can be optionally tracked.
import_tracker/__main__.py
_find_spec
IBM/import-tracker
0
python
def _find_spec(self, fullname: str, *args, **kwargs) -> Optional[importlib.machinery.ModuleSpec]: 'This implements the core logic of find_spec. It is wrapped by the\n public find_spec so that when an import is allowed, the stack can be\n optionally tracked.\n ' if (fullname in self._side_effect_modules): log.debug('Allowing import of side-effect module [%s]', fullname) return None if (self._enabled and (fullname != self._tracked_module) and (not self._is_parent_module(fullname)) and (fullname not in self._deferred_modules) and (fullname.split('.')[0] == self._tracked_module_parts[0])): log.debug3('Deferring import of [%s]', fullname) self._deferred_modules.add(fullname) loader = _LazyLoader() return importlib.util.spec_from_loader(fullname, loader) if (fullname == self._tracked_module): log.debug('Tracked module [%s] found. Tracking started', self._tracked_module) self._enabled = False lazy_modules = [mod_name for (mod_name, mod) in sys.modules.items() if isinstance(mod, _DeferredModule)] for mod_name in lazy_modules: log.debug2('Removing lazy module [%s]', mod_name) del sys.modules[mod_name] if ((self._ending_modules is None) and (not getattr(getattr(sys.modules.get(self._tracked_module, {}), '__spec__', {}), '_initializing', True))): log.debug('Tracked module [%s] finished importing', self._tracked_module) self._set_ending_modules(fullname) log.debug2('Ending modules: %s', self._ending_modules) log.debug3('Allowing import of [%s]', fullname) return None
def _find_spec(self, fullname: str, *args, **kwargs) -> Optional[importlib.machinery.ModuleSpec]: 'This implements the core logic of find_spec. It is wrapped by the\n public find_spec so that when an import is allowed, the stack can be\n optionally tracked.\n ' if (fullname in self._side_effect_modules): log.debug('Allowing import of side-effect module [%s]', fullname) return None if (self._enabled and (fullname != self._tracked_module) and (not self._is_parent_module(fullname)) and (fullname not in self._deferred_modules) and (fullname.split('.')[0] == self._tracked_module_parts[0])): log.debug3('Deferring import of [%s]', fullname) self._deferred_modules.add(fullname) loader = _LazyLoader() return importlib.util.spec_from_loader(fullname, loader) if (fullname == self._tracked_module): log.debug('Tracked module [%s] found. Tracking started', self._tracked_module) self._enabled = False lazy_modules = [mod_name for (mod_name, mod) in sys.modules.items() if isinstance(mod, _DeferredModule)] for mod_name in lazy_modules: log.debug2('Removing lazy module [%s]', mod_name) del sys.modules[mod_name] if ((self._ending_modules is None) and (not getattr(getattr(sys.modules.get(self._tracked_module, {}), '__spec__', {}), '_initializing', True))): log.debug('Tracked module [%s] finished importing', self._tracked_module) self._set_ending_modules(fullname) log.debug2('Ending modules: %s', self._ending_modules) log.debug3('Allowing import of [%s]', fullname) return None<|docstring|>This implements the core logic of find_spec. It is wrapped by the public find_spec so that when an import is allowed, the stack can be optionally tracked.<|endoftext|>
f2fa5ed6a5d8a6ab72a6a4ebe4d299de24c21778db773b6f9dcb32715f221ac6
def get_all_new_modules(self) -> Set[str]: 'Get all of the imports that have happened since the start' assert (self._starting_modules is not None), f'Target module never impoted!' if (self._ending_modules is None): self._set_ending_modules() mod_names = {mod for mod in (self._ending_modules - self._starting_modules) if (not self._is_parent_module(mod))} if self._track_import_stack: return {mod_name: self._import_stacks.get(mod_name, []) for mod_name in mod_names} return mod_names
Get all of the imports that have happened since the start
import_tracker/__main__.py
get_all_new_modules
IBM/import-tracker
0
python
def get_all_new_modules(self) -> Set[str]: assert (self._starting_modules is not None), f'Target module never impoted!' if (self._ending_modules is None): self._set_ending_modules() mod_names = {mod for mod in (self._ending_modules - self._starting_modules) if (not self._is_parent_module(mod))} if self._track_import_stack: return {mod_name: self._import_stacks.get(mod_name, []) for mod_name in mod_names} return mod_names
def get_all_new_modules(self) -> Set[str]: assert (self._starting_modules is not None), f'Target module never impoted!' if (self._ending_modules is None): self._set_ending_modules() mod_names = {mod for mod in (self._ending_modules - self._starting_modules) if (not self._is_parent_module(mod))} if self._track_import_stack: return {mod_name: self._import_stacks.get(mod_name, []) for mod_name in mod_names} return mod_names<|docstring|>Get all of the imports that have happened since the start<|endoftext|>
6ef7d2576911d1f8576007e8ae862191b60d63d35234ebdbf413614c14d24f12
def _is_parent_module(self, fullname: str) -> bool: 'Determine if the given module fullname is a direct parent of the\n tracked module\n ' parts = fullname.split('.') return (self._tracked_module_parts[:len(parts)] == parts)
Determine if the given module fullname is a direct parent of the tracked module
import_tracker/__main__.py
_is_parent_module
IBM/import-tracker
0
python
def _is_parent_module(self, fullname: str) -> bool: 'Determine if the given module fullname is a direct parent of the\n tracked module\n ' parts = fullname.split('.') return (self._tracked_module_parts[:len(parts)] == parts)
def _is_parent_module(self, fullname: str) -> bool: 'Determine if the given module fullname is a direct parent of the\n tracked module\n ' parts = fullname.split('.') return (self._tracked_module_parts[:len(parts)] == parts)<|docstring|>Determine if the given module fullname is a direct parent of the tracked module<|endoftext|>
5fbe5a6a76b3fdc597a999aff21d756055b49b78da431a36148cae8549311de7
def _set_ending_modules(self, trigger_module_name: Optional[str]=None): 'Set the ending module set for the target' self._ending_modules = {} deferred_attrs = [] while True: for (mod_name, mod) in list(sys.modules.items()): if mod_name.startswith(self._tracked_module.split('.')[0]): for (attr_name, attr) in vars(mod).items(): if (isinstance(attr, _DeferredModule) and (not attr.imported) and attr.referenced_by(self._tracked_module)): deferred_attrs.append((mod_name, attr_name, attr)) if (not deferred_attrs): break for (mod_name, attr_name, attr) in deferred_attrs: log.debug2('Finalizing deferred import for %s.%s', mod_name, attr_name) attr.do_import() log.debug2('Done finalizing deferred import for %s.%s', mod_name, attr_name) deferred_attrs = [] self._ending_modules = (set(sys.modules.keys()) - {trigger_module_name})
Set the ending module set for the target
import_tracker/__main__.py
_set_ending_modules
IBM/import-tracker
0
python
def _set_ending_modules(self, trigger_module_name: Optional[str]=None): self._ending_modules = {} deferred_attrs = [] while True: for (mod_name, mod) in list(sys.modules.items()): if mod_name.startswith(self._tracked_module.split('.')[0]): for (attr_name, attr) in vars(mod).items(): if (isinstance(attr, _DeferredModule) and (not attr.imported) and attr.referenced_by(self._tracked_module)): deferred_attrs.append((mod_name, attr_name, attr)) if (not deferred_attrs): break for (mod_name, attr_name, attr) in deferred_attrs: log.debug2('Finalizing deferred import for %s.%s', mod_name, attr_name) attr.do_import() log.debug2('Done finalizing deferred import for %s.%s', mod_name, attr_name) deferred_attrs = [] self._ending_modules = (set(sys.modules.keys()) - {trigger_module_name})
def _set_ending_modules(self, trigger_module_name: Optional[str]=None): self._ending_modules = {} deferred_attrs = [] while True: for (mod_name, mod) in list(sys.modules.items()): if mod_name.startswith(self._tracked_module.split('.')[0]): for (attr_name, attr) in vars(mod).items(): if (isinstance(attr, _DeferredModule) and (not attr.imported) and attr.referenced_by(self._tracked_module)): deferred_attrs.append((mod_name, attr_name, attr)) if (not deferred_attrs): break for (mod_name, attr_name, attr) in deferred_attrs: log.debug2('Finalizing deferred import for %s.%s', mod_name, attr_name) attr.do_import() log.debug2('Done finalizing deferred import for %s.%s', mod_name, attr_name) deferred_attrs = [] self._ending_modules = (set(sys.modules.keys()) - {trigger_module_name})<|docstring|>Set the ending module set for the target<|endoftext|>
f75dea44bc0f96cc0a5973fa3edc10e20d1e675cb577d83b91f1ed9be8fc0b38
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_acc_list = rsp_pb.s2c.accList acc_list = [{'acc_id': record.accID, 'trd_env': (TRADE.REV_TRD_ENV_MAP[record.trdEnv] if (record.trdEnv in TRADE.REV_TRD_ENV_MAP) else ''), 'trdMarket_list': [(TRADE.REV_TRD_MKT_MAP[trdMkt] if (trdMkt in TRADE.REV_TRD_MKT_MAP) else TrdMarket.NONE) for trdMkt in record.trdMarketAuthList]} for record in raw_acc_list] return (RET_OK, '', acc_list)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_acc_list = rsp_pb.s2c.accList acc_list = [{'acc_id': record.accID, 'trd_env': (TRADE.REV_TRD_ENV_MAP[record.trdEnv] if (record.trdEnv in TRADE.REV_TRD_ENV_MAP) else ), 'trdMarket_list': [(TRADE.REV_TRD_MKT_MAP[trdMkt] if (trdMkt in TRADE.REV_TRD_MKT_MAP) else TrdMarket.NONE) for trdMkt in record.trdMarketAuthList]} for record in raw_acc_list] return (RET_OK, , acc_list)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_acc_list = rsp_pb.s2c.accList acc_list = [{'acc_id': record.accID, 'trd_env': (TRADE.REV_TRD_ENV_MAP[record.trdEnv] if (record.trdEnv in TRADE.REV_TRD_ENV_MAP) else ), 'trdMarket_list': [(TRADE.REV_TRD_MKT_MAP[trdMkt] if (trdMkt in TRADE.REV_TRD_MKT_MAP) else TrdMarket.NONE) for trdMkt in record.trdMarketAuthList]} for record in raw_acc_list] return (RET_OK, , acc_list)<|docstring|>Convert from PLS response to user response<|endoftext|>
a46e7f710f7f649e79acc67c5ff2ea55ae41fee820ebb50f3e8b40259a60707e
@classmethod def pack_req(cls, is_unlock, password_md5, conn_id): 'Convert from user request for trading days to PLS request' from futuquant.common.pb.Trd_UnlockTrade_pb2 import Request req = Request() req.c2s.unlock = is_unlock req.c2s.pwdMD5 = password_md5 return pack_pb_req(req, ProtoId.Trd_UnlockTrade, conn_id)
Convert from user request for trading days to PLS request
futuquant/trade/trade_query.py
pack_req
hxhxhx88/futuquant
5
python
@classmethod def pack_req(cls, is_unlock, password_md5, conn_id): from futuquant.common.pb.Trd_UnlockTrade_pb2 import Request req = Request() req.c2s.unlock = is_unlock req.c2s.pwdMD5 = password_md5 return pack_pb_req(req, ProtoId.Trd_UnlockTrade, conn_id)
@classmethod def pack_req(cls, is_unlock, password_md5, conn_id): from futuquant.common.pb.Trd_UnlockTrade_pb2 import Request req = Request() req.c2s.unlock = is_unlock req.c2s.pwdMD5 = password_md5 return pack_pb_req(req, ProtoId.Trd_UnlockTrade, conn_id)<|docstring|>Convert from user request for trading days to PLS request<|endoftext|>
842c042f8a9b6fc698be0e7852101417238a23df6db4bea7ed4e7b2e213d38ba
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) if rsp_pb.HasField('retMsg'): return (RET_OK, rsp_pb.retMsg, None) return (RET_OK, '', None)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) if rsp_pb.HasField('retMsg'): return (RET_OK, rsp_pb.retMsg, None) return (RET_OK, , None)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) if rsp_pb.HasField('retMsg'): return (RET_OK, rsp_pb.retMsg, None) return (RET_OK, , None)<|docstring|>Convert from PLS response to user response<|endoftext|>
c3e324eb9f3f59625fef9d119fdf0d3d1b5bd37551df8ef8fee69a9446367439
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) return (RET_OK, '', None)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) return (RET_OK, , None)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) return (RET_OK, , None)<|docstring|>Convert from PLS response to user response<|endoftext|>
2b33d61a0a7f747c092f5b80a2f9af521a80abf47f8aaac44f63f51739820095
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_funds = rsp_pb.s2c.funds accinfo_list = [{'power': raw_funds.power, 'total_assets': raw_funds.totalAssets, 'cash': raw_funds.cash, 'market_val': raw_funds.marketVal, 'frozen_cash': raw_funds.frozenCash, 'avl_withdrawal_cash': raw_funds.avlWithdrawalCash}] return (RET_OK, '', accinfo_list)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_funds = rsp_pb.s2c.funds accinfo_list = [{'power': raw_funds.power, 'total_assets': raw_funds.totalAssets, 'cash': raw_funds.cash, 'market_val': raw_funds.marketVal, 'frozen_cash': raw_funds.frozenCash, 'avl_withdrawal_cash': raw_funds.avlWithdrawalCash}] return (RET_OK, , accinfo_list)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_funds = rsp_pb.s2c.funds accinfo_list = [{'power': raw_funds.power, 'total_assets': raw_funds.totalAssets, 'cash': raw_funds.cash, 'market_val': raw_funds.marketVal, 'frozen_cash': raw_funds.frozenCash, 'avl_withdrawal_cash': raw_funds.avlWithdrawalCash}] return (RET_OK, , accinfo_list)<|docstring|>Convert from PLS response to user response<|endoftext|>
eceaa908643cd86ebcb1e643af8d24d69f9fd80eb5d0e5d5a92f94b55e05675f
@classmethod def pack_req(cls, code, pl_ratio_min, pl_ratio_max, trd_env, acc_id, trd_mkt, conn_id): 'Convert from user request for trading days to PLS request' from futuquant.common.pb.Trd_GetPositionList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) if (pl_ratio_min is not None): req.c2s.filterPLRatioMin = (float(pl_ratio_min) / 100.0) if (pl_ratio_max is not None): req.c2s.filterPLRatioMax = (float(pl_ratio_max) / 100.0) return pack_pb_req(req, ProtoId.Trd_GetPositionList, conn_id)
Convert from user request for trading days to PLS request
futuquant/trade/trade_query.py
pack_req
hxhxhx88/futuquant
5
python
@classmethod def pack_req(cls, code, pl_ratio_min, pl_ratio_max, trd_env, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_GetPositionList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) if (pl_ratio_min is not None): req.c2s.filterPLRatioMin = (float(pl_ratio_min) / 100.0) if (pl_ratio_max is not None): req.c2s.filterPLRatioMax = (float(pl_ratio_max) / 100.0) return pack_pb_req(req, ProtoId.Trd_GetPositionList, conn_id)
@classmethod def pack_req(cls, code, pl_ratio_min, pl_ratio_max, trd_env, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_GetPositionList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) if (pl_ratio_min is not None): req.c2s.filterPLRatioMin = (float(pl_ratio_min) / 100.0) if (pl_ratio_max is not None): req.c2s.filterPLRatioMax = (float(pl_ratio_max) / 100.0) return pack_pb_req(req, ProtoId.Trd_GetPositionList, conn_id)<|docstring|>Convert from user request for trading days to PLS request<|endoftext|>
6747e65626184900f5f2b44db6831f8eb26ad2cae3b2363ffb175b66ecb71442
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_position_list = rsp_pb.s2c.positionList position_list = [{'code': merge_trd_mkt_stock_str(rsp_pb.s2c.header.trdMarket, position.code), 'stock_name': position.name, 'qty': position.qty, 'can_sell_qty': position.canSellQty, 'cost_price': (position.costPrice if position.HasField('costPrice') else 0), 'cost_price_valid': (1 if position.HasField('costPrice') else 0), 'market_val': position.val, 'nominal_price': position.price, 'pl_ratio': ((100 * position.plRatio) if position.HasField('plRatio') else 0), 'pl_ratio_valid': (1 if position.HasField('plRatio') else 0), 'pl_val': (position.plVal if position.HasField('plVal') else 0), 'pl_val_valid': (1 if position.HasField('plVal') else 0), 'today_buy_qty': (position.td_buyQty if position.HasField('td_buyQty') else 0), 'today_buy_val': (position.td_buyVal if position.HasField('td_buyVal') else 0), 'today_pl_val': (position.td_plVal if position.HasField('td_plVal') else 0), 'today_sell_qty': (position.td_sellQty if position.HasField('td_sellQty') else 0), 'today_sell_val': (position.td_sellVal if position.HasField('td_sellVal') else 0), 'position_side': (TRADE.REV_POSITION_SIDE_MAP[position.positionSide] if (position.positionSide in TRADE.REV_POSITION_SIDE_MAP) else PositionSide.NONE)} for position in raw_position_list] return (RET_OK, '', position_list)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_position_list = rsp_pb.s2c.positionList position_list = [{'code': merge_trd_mkt_stock_str(rsp_pb.s2c.header.trdMarket, position.code), 'stock_name': position.name, 'qty': position.qty, 'can_sell_qty': position.canSellQty, 'cost_price': (position.costPrice if position.HasField('costPrice') else 0), 'cost_price_valid': (1 if position.HasField('costPrice') else 0), 'market_val': position.val, 'nominal_price': position.price, 'pl_ratio': ((100 * position.plRatio) if position.HasField('plRatio') else 0), 'pl_ratio_valid': (1 if position.HasField('plRatio') else 0), 'pl_val': (position.plVal if position.HasField('plVal') else 0), 'pl_val_valid': (1 if position.HasField('plVal') else 0), 'today_buy_qty': (position.td_buyQty if position.HasField('td_buyQty') else 0), 'today_buy_val': (position.td_buyVal if position.HasField('td_buyVal') else 0), 'today_pl_val': (position.td_plVal if position.HasField('td_plVal') else 0), 'today_sell_qty': (position.td_sellQty if position.HasField('td_sellQty') else 0), 'today_sell_val': (position.td_sellVal if position.HasField('td_sellVal') else 0), 'position_side': (TRADE.REV_POSITION_SIDE_MAP[position.positionSide] if (position.positionSide in TRADE.REV_POSITION_SIDE_MAP) else PositionSide.NONE)} for position in raw_position_list] return (RET_OK, , position_list)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_position_list = rsp_pb.s2c.positionList position_list = [{'code': merge_trd_mkt_stock_str(rsp_pb.s2c.header.trdMarket, position.code), 'stock_name': position.name, 'qty': position.qty, 'can_sell_qty': position.canSellQty, 'cost_price': (position.costPrice if position.HasField('costPrice') else 0), 'cost_price_valid': (1 if position.HasField('costPrice') else 0), 'market_val': position.val, 'nominal_price': position.price, 'pl_ratio': ((100 * position.plRatio) if position.HasField('plRatio') else 0), 'pl_ratio_valid': (1 if position.HasField('plRatio') else 0), 'pl_val': (position.plVal if position.HasField('plVal') else 0), 'pl_val_valid': (1 if position.HasField('plVal') else 0), 'today_buy_qty': (position.td_buyQty if position.HasField('td_buyQty') else 0), 'today_buy_val': (position.td_buyVal if position.HasField('td_buyVal') else 0), 'today_pl_val': (position.td_plVal if position.HasField('td_plVal') else 0), 'today_sell_qty': (position.td_sellQty if position.HasField('td_sellQty') else 0), 'today_sell_val': (position.td_sellVal if position.HasField('td_sellVal') else 0), 'position_side': (TRADE.REV_POSITION_SIDE_MAP[position.positionSide] if (position.positionSide in TRADE.REV_POSITION_SIDE_MAP) else PositionSide.NONE)} for position in raw_position_list] return (RET_OK, , position_list)<|docstring|>Convert from PLS response to user response<|endoftext|>
1f1c0fc7410946189d57c55586e00b81838889cbb1c55a6937cbe8c9facd7943
@classmethod def pack_req(cls, order_id, status_filter_list, code, start, end, trd_env, acc_id, trd_mkt, conn_id): 'Convert from user request for trading days to PLS request' from futuquant.common.pb.Trd_GetOrderList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) if order_id: req.c2s.filterConditions.idList.append(int(order_id)) if start: req.c2s.filterConditions.beginTime = start if end: req.c2s.filterConditions.endTime = end if len(status_filter_list): for order_status in status_filter_list: req.c2s.filterStatusList.append(ORDER_STATUS_MAP[order_status]) return pack_pb_req(req, ProtoId.Trd_GetOrderList, conn_id)
Convert from user request for trading days to PLS request
futuquant/trade/trade_query.py
pack_req
hxhxhx88/futuquant
5
python
@classmethod def pack_req(cls, order_id, status_filter_list, code, start, end, trd_env, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_GetOrderList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) if order_id: req.c2s.filterConditions.idList.append(int(order_id)) if start: req.c2s.filterConditions.beginTime = start if end: req.c2s.filterConditions.endTime = end if len(status_filter_list): for order_status in status_filter_list: req.c2s.filterStatusList.append(ORDER_STATUS_MAP[order_status]) return pack_pb_req(req, ProtoId.Trd_GetOrderList, conn_id)
@classmethod def pack_req(cls, order_id, status_filter_list, code, start, end, trd_env, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_GetOrderList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) if order_id: req.c2s.filterConditions.idList.append(int(order_id)) if start: req.c2s.filterConditions.beginTime = start if end: req.c2s.filterConditions.endTime = end if len(status_filter_list): for order_status in status_filter_list: req.c2s.filterStatusList.append(ORDER_STATUS_MAP[order_status]) return pack_pb_req(req, ProtoId.Trd_GetOrderList, conn_id)<|docstring|>Convert from user request for trading days to PLS request<|endoftext|>
a6f0d5b4096c6b9b4f86df0137a350b08ce92cb748c3808b9a8b2f54abf5b356
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_order_list = rsp_pb.s2c.orderList order_list = [OrderListQuery.parse_order(rsp_pb, order) for order in raw_order_list] return (RET_OK, '', order_list)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_order_list = rsp_pb.s2c.orderList order_list = [OrderListQuery.parse_order(rsp_pb, order) for order in raw_order_list] return (RET_OK, , order_list)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_order_list = rsp_pb.s2c.orderList order_list = [OrderListQuery.parse_order(rsp_pb, order) for order in raw_order_list] return (RET_OK, , order_list)<|docstring|>Convert from PLS response to user response<|endoftext|>
db4477d2fd87d82e00886bab1f6f24c5cf6a9770b7e67b061c86b48573032e57
@classmethod def pack_req(cls, trd_side, order_type, price, qty, code, adjust_limit, trd_env, sec_mkt_str, acc_id, trd_mkt, conn_id): 'Convert from user request for place order to PLS request' from futuquant.common.pb.Trd_PlaceOrder_pb2 import Request req = Request() serial_no = get_unique_id32() req.c2s.packetID.serialNo = serial_no req.c2s.packetID.connID = conn_id req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] req.c2s.trdSide = TRD_SIDE_MAP[trd_side] req.c2s.orderType = ORDER_TYPE_MAP[order_type] req.c2s.code = code req.c2s.qty = qty req.c2s.price = price req.c2s.adjustPrice = (adjust_limit != 0) req.c2s.adjustSideAndLimit = adjust_limit proto_qot_mkt = MKT_MAP.get(sec_mkt_str, Qot_Common_pb2.QotMarket_Unknown) proto_trd_sec_mkt = QOT_MARKET_TO_TRD_SEC_MARKET_MAP.get(proto_qot_mkt, Trd_Common_pb2.TrdSecMarket_Unknown) req.c2s.secMarket = proto_trd_sec_mkt return pack_pb_req(req, ProtoId.Trd_PlaceOrder, conn_id, serial_no)
Convert from user request for place order to PLS request
futuquant/trade/trade_query.py
pack_req
hxhxhx88/futuquant
5
python
@classmethod def pack_req(cls, trd_side, order_type, price, qty, code, adjust_limit, trd_env, sec_mkt_str, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_PlaceOrder_pb2 import Request req = Request() serial_no = get_unique_id32() req.c2s.packetID.serialNo = serial_no req.c2s.packetID.connID = conn_id req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] req.c2s.trdSide = TRD_SIDE_MAP[trd_side] req.c2s.orderType = ORDER_TYPE_MAP[order_type] req.c2s.code = code req.c2s.qty = qty req.c2s.price = price req.c2s.adjustPrice = (adjust_limit != 0) req.c2s.adjustSideAndLimit = adjust_limit proto_qot_mkt = MKT_MAP.get(sec_mkt_str, Qot_Common_pb2.QotMarket_Unknown) proto_trd_sec_mkt = QOT_MARKET_TO_TRD_SEC_MARKET_MAP.get(proto_qot_mkt, Trd_Common_pb2.TrdSecMarket_Unknown) req.c2s.secMarket = proto_trd_sec_mkt return pack_pb_req(req, ProtoId.Trd_PlaceOrder, conn_id, serial_no)
@classmethod def pack_req(cls, trd_side, order_type, price, qty, code, adjust_limit, trd_env, sec_mkt_str, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_PlaceOrder_pb2 import Request req = Request() serial_no = get_unique_id32() req.c2s.packetID.serialNo = serial_no req.c2s.packetID.connID = conn_id req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] req.c2s.trdSide = TRD_SIDE_MAP[trd_side] req.c2s.orderType = ORDER_TYPE_MAP[order_type] req.c2s.code = code req.c2s.qty = qty req.c2s.price = price req.c2s.adjustPrice = (adjust_limit != 0) req.c2s.adjustSideAndLimit = adjust_limit proto_qot_mkt = MKT_MAP.get(sec_mkt_str, Qot_Common_pb2.QotMarket_Unknown) proto_trd_sec_mkt = QOT_MARKET_TO_TRD_SEC_MARKET_MAP.get(proto_qot_mkt, Trd_Common_pb2.TrdSecMarket_Unknown) req.c2s.secMarket = proto_trd_sec_mkt return pack_pb_req(req, ProtoId.Trd_PlaceOrder, conn_id, serial_no)<|docstring|>Convert from user request for place order to PLS request<|endoftext|>
681ec6881dea8a947df01d91e7bab0ee2a7d98c44412bb620ab8fe1818fd862f
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) order_id = str(rsp_pb.s2c.orderID) return (RET_OK, '', order_id)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) order_id = str(rsp_pb.s2c.orderID) return (RET_OK, , order_id)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) order_id = str(rsp_pb.s2c.orderID) return (RET_OK, , order_id)<|docstring|>Convert from PLS response to user response<|endoftext|>
ee43f1670d97730905ac4963abaf50637cdd4f9e0be271e43ab2fbf53e1d4dfd
@classmethod def pack_req(cls, modify_order_op, order_id, price, qty, adjust_limit, trd_env, acc_id, trd_mkt, conn_id): 'Convert from user request for place order to PLS request' from futuquant.common.pb.Trd_ModifyOrder_pb2 import Request req = Request() serial_no = get_unique_id32() req.c2s.packetID.serialNo = serial_no req.c2s.packetID.connID = conn_id req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] req.c2s.orderID = int(order_id) req.c2s.modifyOrderOp = MODIFY_ORDER_OP_MAP[modify_order_op] req.c2s.forAll = False if (modify_order_op == ModifyOrderOp.NORMAL): req.c2s.qty = qty req.c2s.price = price req.c2s.adjustPrice = (adjust_limit != 0) req.c2s.adjustSideAndLimit = adjust_limit return pack_pb_req(req, ProtoId.Trd_ModifyOrder, conn_id, serial_no)
Convert from user request for place order to PLS request
futuquant/trade/trade_query.py
pack_req
hxhxhx88/futuquant
5
python
@classmethod def pack_req(cls, modify_order_op, order_id, price, qty, adjust_limit, trd_env, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_ModifyOrder_pb2 import Request req = Request() serial_no = get_unique_id32() req.c2s.packetID.serialNo = serial_no req.c2s.packetID.connID = conn_id req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] req.c2s.orderID = int(order_id) req.c2s.modifyOrderOp = MODIFY_ORDER_OP_MAP[modify_order_op] req.c2s.forAll = False if (modify_order_op == ModifyOrderOp.NORMAL): req.c2s.qty = qty req.c2s.price = price req.c2s.adjustPrice = (adjust_limit != 0) req.c2s.adjustSideAndLimit = adjust_limit return pack_pb_req(req, ProtoId.Trd_ModifyOrder, conn_id, serial_no)
@classmethod def pack_req(cls, modify_order_op, order_id, price, qty, adjust_limit, trd_env, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_ModifyOrder_pb2 import Request req = Request() serial_no = get_unique_id32() req.c2s.packetID.serialNo = serial_no req.c2s.packetID.connID = conn_id req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] req.c2s.orderID = int(order_id) req.c2s.modifyOrderOp = MODIFY_ORDER_OP_MAP[modify_order_op] req.c2s.forAll = False if (modify_order_op == ModifyOrderOp.NORMAL): req.c2s.qty = qty req.c2s.price = price req.c2s.adjustPrice = (adjust_limit != 0) req.c2s.adjustSideAndLimit = adjust_limit return pack_pb_req(req, ProtoId.Trd_ModifyOrder, conn_id, serial_no)<|docstring|>Convert from user request for place order to PLS request<|endoftext|>
7d0b222d3fa2c017230ceb07336e3dc79f20c9610aa62cd719bf8145e18f4953
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) order_id = str(rsp_pb.s2c.orderID) modify_order_list = [{'trd_env': TRADE.REV_TRD_ENV_MAP[rsp_pb.s2c.header.trdEnv], 'order_id': order_id}] return (RET_OK, '', modify_order_list)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) order_id = str(rsp_pb.s2c.orderID) modify_order_list = [{'trd_env': TRADE.REV_TRD_ENV_MAP[rsp_pb.s2c.header.trdEnv], 'order_id': order_id}] return (RET_OK, , modify_order_list)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) order_id = str(rsp_pb.s2c.orderID) modify_order_list = [{'trd_env': TRADE.REV_TRD_ENV_MAP[rsp_pb.s2c.header.trdEnv], 'order_id': order_id}] return (RET_OK, , modify_order_list)<|docstring|>Convert from PLS response to user response<|endoftext|>
760fa64cfcdea9d69c21686f41f176cfe7fc2405c6c0022730eb968a415e64f3
@classmethod def pack_req(cls, code, trd_env, acc_id, trd_mkt, conn_id): 'Convert from user request for place order to PLS request' from futuquant.common.pb.Trd_GetOrderFillList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) return pack_pb_req(req, ProtoId.Trd_GetOrderFillList, conn_id)
Convert from user request for place order to PLS request
futuquant/trade/trade_query.py
pack_req
hxhxhx88/futuquant
5
python
@classmethod def pack_req(cls, code, trd_env, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_GetOrderFillList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) return pack_pb_req(req, ProtoId.Trd_GetOrderFillList, conn_id)
@classmethod def pack_req(cls, code, trd_env, acc_id, trd_mkt, conn_id): from futuquant.common.pb.Trd_GetOrderFillList_pb2 import Request req = Request() req.c2s.header.trdEnv = TRD_ENV_MAP[trd_env] req.c2s.header.accID = acc_id req.c2s.header.trdMarket = TRD_MKT_MAP[trd_mkt] if code: req.c2s.filterConditions.codeList.append(code) return pack_pb_req(req, ProtoId.Trd_GetOrderFillList, conn_id)<|docstring|>Convert from user request for place order to PLS request<|endoftext|>
4b8d55a5c95b4aa75ead86da44a2fcdb1e8200671bf2b17866bcac8559fc50ee
@classmethod def unpack_rsp(cls, rsp_pb): 'Convert from PLS response to user response' if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_deal_list = rsp_pb.s2c.orderFillList deal_list = [DealListQuery.parse_deal(rsp_pb, deal) for deal in raw_deal_list] return (RET_OK, '', deal_list)
Convert from PLS response to user response
futuquant/trade/trade_query.py
unpack_rsp
hxhxhx88/futuquant
5
python
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_deal_list = rsp_pb.s2c.orderFillList deal_list = [DealListQuery.parse_deal(rsp_pb, deal) for deal in raw_deal_list] return (RET_OK, , deal_list)
@classmethod def unpack_rsp(cls, rsp_pb): if (rsp_pb.retType != RET_OK): return (RET_ERROR, rsp_pb.retMsg, None) raw_deal_list = rsp_pb.s2c.orderFillList deal_list = [DealListQuery.parse_deal(rsp_pb, deal) for deal in raw_deal_list] return (RET_OK, , deal_list)<|docstring|>Convert from PLS response to user response<|endoftext|>
b640365f135cf0dcac9e80bf29d10e0503e3dc472c79be07ee1d7ea286959160
def Jn(r, n): '\n numerical spherical bessel functions of order n\n ' return (np.sqrt((np.pi / (2 * r))) * sp.jv((n + 0.5), r))
numerical spherical bessel functions of order n
nff/utils/functions.py
Jn
jkaraguesian/NeuralForceField
0
python
def Jn(r, n): '\n \n ' return (np.sqrt((np.pi / (2 * r))) * sp.jv((n + 0.5), r))
def Jn(r, n): '\n \n ' return (np.sqrt((np.pi / (2 * r))) * sp.jv((n + 0.5), r))<|docstring|>numerical spherical bessel functions of order n<|endoftext|>
fb1a30817723d94ec7eaf1ff7b1ecdc11dfb61d68cf765a74d0374762315bd28
def Jn_zeros(n, k): '\n Compute the first k zeros of the spherical bessel functions up to order n (excluded)\n ' zerosj = np.zeros((n, k), dtype='float32') zerosj[0] = (np.arange(1, (k + 1)) * np.pi) points = (np.arange(1, (k + n)) * np.pi) racines = np.zeros(((k + n) - 1), dtype='float32') for i in range(1, n): for j in range((((k + n) - 1) - i)): foo = brentq(Jn, points[j], points[(j + 1)], (i,)) racines[j] = foo points = racines zerosj[i][:k] = racines[:k] return zerosj
Compute the first k zeros of the spherical bessel functions up to order n (excluded)
nff/utils/functions.py
Jn_zeros
jkaraguesian/NeuralForceField
0
python
def Jn_zeros(n, k): '\n \n ' zerosj = np.zeros((n, k), dtype='float32') zerosj[0] = (np.arange(1, (k + 1)) * np.pi) points = (np.arange(1, (k + n)) * np.pi) racines = np.zeros(((k + n) - 1), dtype='float32') for i in range(1, n): for j in range((((k + n) - 1) - i)): foo = brentq(Jn, points[j], points[(j + 1)], (i,)) racines[j] = foo points = racines zerosj[i][:k] = racines[:k] return zerosj
def Jn_zeros(n, k): '\n \n ' zerosj = np.zeros((n, k), dtype='float32') zerosj[0] = (np.arange(1, (k + 1)) * np.pi) points = (np.arange(1, (k + n)) * np.pi) racines = np.zeros(((k + n) - 1), dtype='float32') for i in range(1, n): for j in range((((k + n) - 1) - i)): foo = brentq(Jn, points[j], points[(j + 1)], (i,)) racines[j] = foo points = racines zerosj[i][:k] = racines[:k] return zerosj<|docstring|>Compute the first k zeros of the spherical bessel functions up to order n (excluded)<|endoftext|>
66d125895cca9f85ce421738695c9f6c95842ace3f5ebb703563f743339cf3b6
def spherical_bessel_formulas(n): '\n Computes the sympy formulas for the spherical bessel functions up to order n (excluded)\n ' x = sym.symbols('x') f = [(sym.sin(x) / x)] a = (sym.sin(x) / x) for i in range(1, n): b = (sym.diff(a, x) / x) f += [sym.simplify((b * ((- x) ** i)))] a = sym.simplify(b) return f
Computes the sympy formulas for the spherical bessel functions up to order n (excluded)
nff/utils/functions.py
spherical_bessel_formulas
jkaraguesian/NeuralForceField
0
python
def spherical_bessel_formulas(n): '\n \n ' x = sym.symbols('x') f = [(sym.sin(x) / x)] a = (sym.sin(x) / x) for i in range(1, n): b = (sym.diff(a, x) / x) f += [sym.simplify((b * ((- x) ** i)))] a = sym.simplify(b) return f
def spherical_bessel_formulas(n): '\n \n ' x = sym.symbols('x') f = [(sym.sin(x) / x)] a = (sym.sin(x) / x) for i in range(1, n): b = (sym.diff(a, x) / x) f += [sym.simplify((b * ((- x) ** i)))] a = sym.simplify(b) return f<|docstring|>Computes the sympy formulas for the spherical bessel functions up to order n (excluded)<|endoftext|>
6da74b15a919265b8bdb2c947a9a0b02ee2f0f4811d8f515d3f99fc4b4c1e9fa
def bessel_basis(n, k): '\n Compute the sympy formulas for the normalized and rescaled spherical bessel functions up to\n order n (excluded) and maximum frequency k (excluded).\n ' zeros = Jn_zeros(n, k) normalizer = [] for order in range(n): normalizer_tmp = [] for i in range(k): normalizer_tmp += [(0.5 * (Jn(zeros[(order, i)], (order + 1)) ** 2))] normalizer_tmp = (1 / (np.array(normalizer_tmp) ** 0.5)) normalizer += [normalizer_tmp] f = spherical_bessel_formulas(n) x = sym.symbols('x') bess_basis = [] for order in range(n): bess_basis_tmp = [] for i in range(k): bess_basis_tmp += [sym.simplify((normalizer[order][i] * f[order].subs(x, (zeros[(order, i)] * x))))] bess_basis += [bess_basis_tmp] return bess_basis
Compute the sympy formulas for the normalized and rescaled spherical bessel functions up to order n (excluded) and maximum frequency k (excluded).
nff/utils/functions.py
bessel_basis
jkaraguesian/NeuralForceField
0
python
def bessel_basis(n, k): '\n Compute the sympy formulas for the normalized and rescaled spherical bessel functions up to\n order n (excluded) and maximum frequency k (excluded).\n ' zeros = Jn_zeros(n, k) normalizer = [] for order in range(n): normalizer_tmp = [] for i in range(k): normalizer_tmp += [(0.5 * (Jn(zeros[(order, i)], (order + 1)) ** 2))] normalizer_tmp = (1 / (np.array(normalizer_tmp) ** 0.5)) normalizer += [normalizer_tmp] f = spherical_bessel_formulas(n) x = sym.symbols('x') bess_basis = [] for order in range(n): bess_basis_tmp = [] for i in range(k): bess_basis_tmp += [sym.simplify((normalizer[order][i] * f[order].subs(x, (zeros[(order, i)] * x))))] bess_basis += [bess_basis_tmp] return bess_basis
def bessel_basis(n, k): '\n Compute the sympy formulas for the normalized and rescaled spherical bessel functions up to\n order n (excluded) and maximum frequency k (excluded).\n ' zeros = Jn_zeros(n, k) normalizer = [] for order in range(n): normalizer_tmp = [] for i in range(k): normalizer_tmp += [(0.5 * (Jn(zeros[(order, i)], (order + 1)) ** 2))] normalizer_tmp = (1 / (np.array(normalizer_tmp) ** 0.5)) normalizer += [normalizer_tmp] f = spherical_bessel_formulas(n) x = sym.symbols('x') bess_basis = [] for order in range(n): bess_basis_tmp = [] for i in range(k): bess_basis_tmp += [sym.simplify((normalizer[order][i] * f[order].subs(x, (zeros[(order, i)] * x))))] bess_basis += [bess_basis_tmp] return bess_basis<|docstring|>Compute the sympy formulas for the normalized and rescaled spherical bessel functions up to order n (excluded) and maximum frequency k (excluded).<|endoftext|>
eb3adc8a5f6deeb528486b1337af820601ac1f3fcdd639ad8d23eb5f8aa0fd10
def sph_harm_prefactor(l, m): '\n Computes the constant pre-factor for the spherical harmonic of degree l and order m\n input:\n l: int, l>=0\n m: int, -l<=m<=l\n ' return (((((2 * l) + 1) * np.math.factorial((l - abs(m)))) / ((4 * np.pi) * np.math.factorial((l + abs(m))))) ** 0.5)
Computes the constant pre-factor for the spherical harmonic of degree l and order m input: l: int, l>=0 m: int, -l<=m<=l
nff/utils/functions.py
sph_harm_prefactor
jkaraguesian/NeuralForceField
0
python
def sph_harm_prefactor(l, m): '\n Computes the constant pre-factor for the spherical harmonic of degree l and order m\n input:\n l: int, l>=0\n m: int, -l<=m<=l\n ' return (((((2 * l) + 1) * np.math.factorial((l - abs(m)))) / ((4 * np.pi) * np.math.factorial((l + abs(m))))) ** 0.5)
def sph_harm_prefactor(l, m): '\n Computes the constant pre-factor for the spherical harmonic of degree l and order m\n input:\n l: int, l>=0\n m: int, -l<=m<=l\n ' return (((((2 * l) + 1) * np.math.factorial((l - abs(m)))) / ((4 * np.pi) * np.math.factorial((l + abs(m))))) ** 0.5)<|docstring|>Computes the constant pre-factor for the spherical harmonic of degree l and order m input: l: int, l>=0 m: int, -l<=m<=l<|endoftext|>
0655349d04b36caea5df78276a2c05dadbe8c562a37873ab27b0eec29a608d3b
def associated_legendre_polynomials(l, zero_m_only=True): '\n Computes sympy formulas of the associated legendre polynomials up to order l (excluded).\n ' z = sym.symbols('z') P_l_m = [([0] * (j + 1)) for j in range(l)] P_l_m[0][0] = 1 if (l > 0): P_l_m[1][0] = z for j in range(2, l): P_l_m[j][0] = sym.simplify(((((((2 * j) - 1) * z) * P_l_m[(j - 1)][0]) - ((j - 1) * P_l_m[(j - 2)][0])) / j)) if (not zero_m_only): for i in range(1, l): P_l_m[i][i] = sym.simplify(((1 - (2 * i)) * P_l_m[(i - 1)][(i - 1)])) if ((i + 1) < l): P_l_m[(i + 1)][i] = sym.simplify(((((2 * i) + 1) * z) * P_l_m[i][i])) for j in range((i + 2), l): P_l_m[j][i] = sym.simplify(((((((2 * j) - 1) * z) * P_l_m[(j - 1)][i]) - (((i + j) - 1) * P_l_m[(j - 2)][i])) / (j - i))) return P_l_m
Computes sympy formulas of the associated legendre polynomials up to order l (excluded).
nff/utils/functions.py
associated_legendre_polynomials
jkaraguesian/NeuralForceField
0
python
def associated_legendre_polynomials(l, zero_m_only=True): '\n \n ' z = sym.symbols('z') P_l_m = [([0] * (j + 1)) for j in range(l)] P_l_m[0][0] = 1 if (l > 0): P_l_m[1][0] = z for j in range(2, l): P_l_m[j][0] = sym.simplify(((((((2 * j) - 1) * z) * P_l_m[(j - 1)][0]) - ((j - 1) * P_l_m[(j - 2)][0])) / j)) if (not zero_m_only): for i in range(1, l): P_l_m[i][i] = sym.simplify(((1 - (2 * i)) * P_l_m[(i - 1)][(i - 1)])) if ((i + 1) < l): P_l_m[(i + 1)][i] = sym.simplify(((((2 * i) + 1) * z) * P_l_m[i][i])) for j in range((i + 2), l): P_l_m[j][i] = sym.simplify(((((((2 * j) - 1) * z) * P_l_m[(j - 1)][i]) - (((i + j) - 1) * P_l_m[(j - 2)][i])) / (j - i))) return P_l_m
def associated_legendre_polynomials(l, zero_m_only=True): '\n \n ' z = sym.symbols('z') P_l_m = [([0] * (j + 1)) for j in range(l)] P_l_m[0][0] = 1 if (l > 0): P_l_m[1][0] = z for j in range(2, l): P_l_m[j][0] = sym.simplify(((((((2 * j) - 1) * z) * P_l_m[(j - 1)][0]) - ((j - 1) * P_l_m[(j - 2)][0])) / j)) if (not zero_m_only): for i in range(1, l): P_l_m[i][i] = sym.simplify(((1 - (2 * i)) * P_l_m[(i - 1)][(i - 1)])) if ((i + 1) < l): P_l_m[(i + 1)][i] = sym.simplify(((((2 * i) + 1) * z) * P_l_m[i][i])) for j in range((i + 2), l): P_l_m[j][i] = sym.simplify(((((((2 * j) - 1) * z) * P_l_m[(j - 1)][i]) - (((i + j) - 1) * P_l_m[(j - 2)][i])) / (j - i))) return P_l_m<|docstring|>Computes sympy formulas of the associated legendre polynomials up to order l (excluded).<|endoftext|>