ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b40443d33951fd00cd76b27080cfecbc77a331a8 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
class FunctionalList:
''' 实现了内置类型list的功能,并丰富了一些其他方法: head, tail, init, last, drop, take'''
def __init__(self, values=None):
if values is None:
self.values = []
else:
self.values = values
def __len__(self):
return len(self.values)
def __getitem__(self, key):
return self.values[key]
def __setitem__(self, key, value):
self.values[key] = value
def __delitem__(self, key):
del self.values[key]
def __iter__(self):
return iter(self.values)
def __reversed__(self):
return FunctionalList(reversed(self.values))
def append(self, value):
self.values.append(value)
def head(self):
# 获取第一个元素
return self.values[0]
def tail(self):
# 获取第一个元素之后的所有元素
return self.values[1:]
def init(self):
# 获取最后一个元素之前的所有元素
return self.values[:-1]
def last(self):
# 获取最后一个元素
return self.values[-1]
def drop(self, n):
# 获取所有元素,除了前N个
return self.values[n:]
def take(self, n):
# 获取前N个元素
return self.values[:n]
|
py | b40444630d188c794e8a026369dc78ada4ffd2d5 |
def test_add_project(app, json_projects):
project = json_projects
old_projects = app.soap.get_projects()
app.project.add_new_project(project)
new_projects = app.soap.get_projects()
flag = app.project.add_new_project(project)
if flag:
old_projects.append(json_projects)
assert sorted(old_projects, key=lambda project: project.name) == sorted(new_projects, key=lambda project: project.name)
|
py | b40444868b847a4d8438002780e42acb5e793f31 | #!/router/bin/python
from collections import OrderedDict, namedtuple
from scapy.utils import ltoa
from scapy.error import Scapy_Exception
import random
import base64
import string
import traceback
import copy
import imp
from ..common.trex_exceptions import *
from ..common.trex_types import verify_exclusive_arg, validate_type
from ..utils.text_opts import format_num
from .trex_stl_packet_builder_interface import CTrexPktBuilderInterface
from .trex_stl_packet_builder_scapy import *
# base class for TX mode
class STLTXMode(object):
""" mode rate speed """
def __init__ (self, pps = None, bps_L1 = None, bps_L2 = None, percentage = None):
"""
Speed can be given in packets per second (pps), L2/L1 bps, or port percent
Use only one unit.
you can enter pps =10000 oe bps_L1=10
:parameters:
pps : float
Packets per second
bps_L1 : float
Bits per second L1 (with IPG)
bps_L2 : float
Bits per second L2 (Ethernet-FCS)
percentage : float
Link interface percent (0-100). Example: 10 is 10% of the port link setup
.. code-block:: python
# STLTXMode Example
mode = STLTXCont(pps = 10)
mode = STLTXCont(bps_L1 = 10000000) #10mbps L1
mode = STLTXCont(bps_L2 = 10000000) #10mbps L2
mode = STLTXCont(percentage = 10) #10%
"""
args = [pps, bps_L1, bps_L2, percentage]
# default
if all([x is None for x in args]):
pps = 1.0
else:
verify_exclusive_arg(args)
self.fields = {'rate': {}}
if pps is not None:
validate_type('pps', pps, [float, int])
self.fields['rate']['type'] = 'pps'
self.fields['rate']['value'] = pps
elif bps_L1 is not None:
validate_type('bps_L1', bps_L1, [float, int])
self.fields['rate']['type'] = 'bps_L1'
self.fields['rate']['value'] = bps_L1
elif bps_L2 is not None:
validate_type('bps_L2', bps_L2, [float, int])
self.fields['rate']['type'] = 'bps_L2'
self.fields['rate']['value'] = bps_L2
elif percentage is not None:
validate_type('percentage', percentage, [float, int])
if not (percentage > 0 and percentage <= 100):
raise TRexArgumentError('percentage', percentage)
self.fields['rate']['type'] = 'percentage'
self.fields['rate']['value'] = percentage
def to_json (self):
return dict(self.fields)
@staticmethod
def from_json (json_data):
try:
mode = json_data['mode']
rate = mode['rate']
# check the rate type
if rate['type'] not in ['pps', 'bps_L1', 'bps_L2', 'percentage']:
raise TRexError("from_json: invalid rate type '{0}'".format(rate['type']))
# construct the pair
kwargs = {rate['type'] : rate['value']}
if mode['type'] == 'single_burst':
return STLTXSingleBurst(total_pkts = mode['total_pkts'], **kwargs)
elif mode['type'] == 'multi_burst':
return STLTXMultiBurst(pkts_per_burst = mode['pkts_per_burst'],
ibg = mode['ibg'],
count = mode['count'],
**kwargs)
elif mode['type'] == 'continuous':
return STLTXCont(**kwargs)
else:
raise TRexError("from_json: unknown mode type '{0}'".format(mode['type']))
except KeyError as e:
raise TRexError("from_json: missing field {0} from JSON".format(e))
# continuous mode
class STLTXCont(STLTXMode):
""" Continuous mode """
def __init__ (self, **kwargs):
"""
Continuous mode
see :class:`trex.stl.trex_stl_streams.STLTXMode` for rate
.. code-block:: python
# STLTXCont Example
mode = STLTXCont(pps = 10)
"""
super(STLTXCont, self).__init__(**kwargs)
self.fields['type'] = 'continuous'
@staticmethod
def __str__ ():
return "Continuous"
# single burst mode
class STLTXSingleBurst(STLTXMode):
""" Single burst mode """
def __init__ (self, total_pkts = 1, **kwargs):
"""
Single burst mode
:parameters:
total_pkts : int
Number of packets for this burst
see :class:`trex.stl.trex_stl_streams.STLTXMode` for rate
.. code-block:: python
# STLTXSingleBurst Example
mode = STLTXSingleBurst( pps = 10, total_pkts = 1)
"""
if not isinstance(total_pkts, int):
raise TRexArgumentError('total_pkts', total_pkts)
super(STLTXSingleBurst, self).__init__(**kwargs)
self.fields['type'] = 'single_burst'
self.fields['total_pkts'] = total_pkts
@staticmethod
def __str__ ():
return "Single Burst"
# multi burst mode
class STLTXMultiBurst(STLTXMode):
""" Multi-burst mode """
def __init__ (self,
pkts_per_burst = 1,
ibg = 0.0, # usec not SEC
count = 1,
**kwargs):
"""
Multi-burst mode
:parameters:
pkts_per_burst: int
Number of packets per burst
ibg : float
Inter-burst gap in usec 1,000,000.0 is 1 sec
count : int
Number of bursts
see :class:`trex.stl.trex_stl_streams.STLTXMode` for rate
.. code-block:: python
# STLTXMultiBurst Example
mode = STLTXMultiBurst(pps = 10, pkts_per_burst = 1,count 10, ibg=10.0)
"""
if not isinstance(pkts_per_burst, int):
raise TRexArgumentError('pkts_per_burst', pkts_per_burst)
if not isinstance(ibg, (int, float)):
raise TRexArgumentError('ibg', ibg)
if not isinstance(count, int):
raise TRexArgumentError('count', count)
super(STLTXMultiBurst, self).__init__(**kwargs)
self.fields['type'] = 'multi_burst'
self.fields['pkts_per_burst'] = pkts_per_burst
self.fields['ibg'] = ibg
self.fields['count'] = count
@staticmethod
def __str__ ():
return "Multi Burst"
STLStreamDstMAC_CFG_FILE=0
STLStreamDstMAC_PKT =1
STLStreamDstMAC_ARP =2
class STLFlowStatsInterface(object):
def __init__ (self, pg_id, vxlan, multi_tag):
self.fields = {
'enabled': True,
'stream_id': pg_id,
'vxlan': vxlan,
'multi_tag': multi_tag,
}
def to_json (self):
""" Dump as json"""
return dict(self.fields)
@staticmethod
def from_json (json_data):
'''
create the object from JSON output
'''
try:
# no flow stats
if not json_data['enabled']:
return None
# flow stats
if json_data['rule_type'] == 'stats':
return STLFlowStats(pg_id = json_data['stream_id'])
# latency
elif json_data['rule_type'] == 'latency':
return STLFlowLatencyStats(pg_id = json_data['stream_id'], multi_tag = json_data['multi_tag'])
else:
raise TRexError("from_json: invalid flow stats type {0}".format(json_data['rule_type']))
except KeyError as e:
raise TRexError("from_json: missing field {0} from JSON".format(e))
@staticmethod
def defaults ():
return {'enabled' : False}
class STLFlowStats(STLFlowStatsInterface):
""" Define per stream basic stats
.. code-block:: python
# STLFlowStats Example
flow_stats = STLFlowStats(pg_id = 7)
"""
def __init__(self, pg_id, vxlan = False, multi_tag = False):
super(STLFlowStats, self).__init__(pg_id, vxlan, multi_tag)
self.fields['rule_type'] = 'stats'
class STLFlowLatencyStats(STLFlowStatsInterface):
""" Define per stream basic stats + latency, jitter, packet reorder/loss
.. code-block:: python
# STLFlowLatencyStats Example
flow_stats = STLFlowLatencyStats(pg_id = 7)
"""
def __init__(self, pg_id, vxlan = False, multi_tag = False):
super(STLFlowLatencyStats, self).__init__(pg_id, vxlan, multi_tag)
self.fields['rule_type'] = 'latency'
def get_multi_tag (self):
""" Get multi tag value from object """
return self.multi_tag
class STLStream(object):
""" One stream object. Includes mode, Field Engine mode packet template and Rx stats
.. code-block:: python
# STLStream Example
base_pkt = Ether()/IP(src="16.0.0.1",dst="48.0.0.1")/UDP(dport=12,sport=1025)
pad = max(0, size - len(base_pkt)) * 'x'
STLStream( isg = 10.0, # star in delay
name ='S0',
packet = STLPktBuilder(pkt = base_pkt/pad),
mode = STLTXSingleBurst( pps = 10, total_pkts = 1),
next = 'S1'), # point to next stream
"""
def __init__ (self,
name = None,
packet = None,
mode = STLTXCont(pps = 1),
enabled = True,
self_start = True,
isg = 0.0,
flow_stats = None,
next = None,
stream_id = None,
action_count = 0,
random_seed =0,
mac_src_override_by_pkt = None,
mac_dst_override_mode = None, #see STLStreamDstMAC_xx
dummy_stream = False,
start_paused = False,
core_id = -1
):
"""
Stream object
:parameters:
name : string
Name of the stream. Required if this stream is dependent on another stream, and another stream needs to refer to this stream by name.
packet : STLPktBuilder see :class:`trex.stl.trex_stl_packet_builder_scapy.STLPktBuilder`
Template packet and field engine program. Example: packet = STLPktBuilder(pkt = base_pkt/pad)
mode : :class:`trex.stl.trex_stl_streams.STLTXCont` or :class:`trex.stl.trex_stl_streams.STLTXSingleBurst` or :class:`trex.stl.trex_stl_streams.STLTXMultiBurst`
enabled : bool
Indicates whether the stream is enabled.
self_start : bool
If False, another stream activates it.
isg : float
Inter-stream gap in usec. Time to wait until the stream sends the first packet.
flow_stats : :class:`trex.stl.trex_stl_streams.STLFlowStats`
Per stream statistic object. See: STLFlowStats
next : string
Name of the stream to activate.
stream_id :
For use by HLTAPI.
action_count : uint16_t
If there is a next stream, number of loops before stopping. Default: 0 (unlimited).
random_seed: uint32_t
If given, the seed for this stream will be this value. Useful if you need a deterministic random value.
mac_src_override_by_pkt : bool
Template packet sets src MAC.
mac_dst_override_mode=None : STLStreamDstMAC_xx
Template packet sets dst MAC.
dummy_stream : bool
For delay purposes, will not be sent.
start_paused : bool
Experimental flag, might be removed in future!
Stream will not be transmitted until un-paused.
core_id: int
Pins the stream to core_id in case core_id is specified and 0 <= core_id < number of cores.
Default value = -1.
Negative value (default) keeps the current behaviour.
"""
# type checking
validate_type('name', name, (type(None), int, basestring))
validate_type('next', next, (type(None), int, basestring))
validate_type('mode', mode, STLTXMode)
validate_type('packet', packet, (type(None), CTrexPktBuilderInterface))
validate_type('flow_stats', flow_stats, (type(None), STLFlowStatsInterface))
validate_type('enabled', enabled, bool)
validate_type('self_start', self_start, bool)
validate_type('isg', isg, (int, float))
validate_type('stream_id', stream_id, (type(None), int))
validate_type('random_seed',random_seed,int)
validate_type('dummy_stream', dummy_stream, bool)
validate_type('start_paused', start_paused, bool)
validate_type('core_id', core_id, int)
if (type(mode) == STLTXCont) and (next != None):
raise TRexError("Continuous stream cannot have a next stream ID")
if (type(flow_stats) == STLFlowLatencyStats and core_id >= 0):
raise TRexError("Core ID is not supported for latency streams.")
# tag for the stream and next - can be anything
self.name = name
self.next = next
self.id = stream_id
# set externally
self.fields = {}
if not packet:
packet = STLPktBuilder(pkt = Ether()/IP())
self.scapy_pkt_builder = packet
# packet builder
packet.compile()
int_mac_src_override_by_pkt = 0;
int_mac_dst_override_mode = 0;
if mac_src_override_by_pkt == None:
if not packet.is_default_src_mac():
int_mac_src_override_by_pkt = 1
else:
int_mac_src_override_by_pkt = int(mac_src_override_by_pkt);
if mac_dst_override_mode == None:
if not packet.is_default_dst_mac():
int_mac_dst_override_mode = STLStreamDstMAC_PKT
else:
int_mac_dst_override_mode = int(mac_dst_override_mode);
self.is_default_mac = not (int_mac_src_override_by_pkt or int_mac_dst_override_mode)
self.fields['flags'] = (int_mac_src_override_by_pkt&1) + ((int_mac_dst_override_mode&3)<<1) + (int(dummy_stream) << 3)
self.fields['action_count'] = action_count
# basic fields
self.fields['enabled'] = enabled
self.fields['self_start'] = self_start
self.fields['start_paused'] = start_paused
self.fields['isg'] = isg
self.fields['core_id'] = core_id
if random_seed !=0 :
self.fields['random_seed'] = random_seed # optional
# mode
self.fields['mode'] = mode.to_json()
self.mode_desc = str(mode)
# packet and VM
pkt_json = packet.to_json()
self.fields['packet'] = pkt_json['packet']
self.fields['vm'] = pkt_json['vm']
self.pkt = base64.b64decode(self.fields['packet']['binary'])
# this is heavy, calculate lazy
self.packet_desc = None
if not flow_stats:
self.fields['flow_stats'] = STLFlowStats.defaults()
else:
self.fields['flow_stats'] = flow_stats.to_json()
def __str__ (self):
s = "Stream Name: {0}\n".format(self.name)
s += "Stream Next: {0}\n".format(self.next)
s += "Stream JSON:\n{0}\n".format(json.dumps(self.fields, indent = 4, separators=(',', ': '), sort_keys = True))
return s
def get_id (self):
""" Get the stream id after resolution """
return self.id
def has_custom_mac_addr (self):
""" Return True if src or dst MAC were set as custom """
return not self.is_default_mac
def is_explicit_dst_mac(self):
return ((self.fields['flags'] >> 1) & 0x3) == STLStreamDstMAC_PKT
def get_name (self):
""" Get the stream name """
return self.name
def get_next (self):
""" Get next stream object """
return self.next
def has_flow_stats (self):
""" Return True if stream was configured with flow stats """
return self.fields['flow_stats']['enabled']
def get_pg_id (self):
""" Returns packet group ID if exists """
return self.fields['flow_stats'].get('stream_id')
def get_flow_stats_type (self):
""" Returns flow stats type if exists """
return self.fields['flow_stats'].get('rule_type')
def get_pkt (self):
""" Get packet as string """
return self.pkt
def get_pkt_len (self, count_crc = True):
""" Get packet number of bytes """
pkt_len = len(self.get_pkt())
if count_crc:
pkt_len += 4
return pkt_len
def is_dummy (self):
""" return true if stream is marked as dummy stream """
return ( (self.fields['flags'] & 0x8) == 0x8 )
def get_pkt_type (self):
""" Get packet description. Example: IP:UDP """
if self.is_dummy():
return '-'
elif self.packet_desc == None:
self.packet_desc = STLPktBuilder.pkt_layers_desc_from_buffer(self.get_pkt())
return self.packet_desc
def get_mode (self):
return 'delay' if self.is_dummy() else self.mode_desc
@staticmethod
def get_rate_from_field (rate_json):
""" Get rate from json """
t = rate_json['type']
v = rate_json['value']
if t == "pps":
return format_num(v, suffix = "pps")
elif t == "bps_L1":
return format_num(v, suffix = "bps (L1)")
elif t == "bps_L2":
return format_num(v, suffix = "bps (L2)")
elif t == "percentage":
return format_num(v, suffix = "%")
def get_rate (self):
return self.get_rate_from_field(self.fields['mode']['rate'])
def to_pkt_dump (self):
""" Print packet description from Scapy """
if self.name:
print("Stream Name: ",self.name)
scapy_b = self.scapy_pkt_builder;
if scapy_b and isinstance(scapy_b,STLPktBuilder):
scapy_b.to_pkt_dump()
else:
print("Nothing to dump")
# return True if FE variable is being written only to IP src or dst, to show its value as IP
@staticmethod
def __is_all_IP(vm_var_usage_list):
for offsets_tuple in vm_var_usage_list:
if type(offsets_tuple) is not tuple:
return False
if offsets_tuple[0] != 'IP' or offsets_tuple[2] not in ('src', 'dst'):
return False
return True
# replace offset number by user-friendly string 'IP.src' etc.
@staticmethod
def __fix_offset_by_name(pkt, inst, name):
if name in inst:
ret = pkt.get_field_by_offset(inst[name])
if ret:
if inst['type'] in ('fix_checksum_ipv4', 'fix_checksum_hw', 'fix_checksum_icmpv6'): # do not include field name
if ret[1] == 0: # layer index is redundant
inst[name] = "'%s'" % ret[0]
else:
inst[name] = "'%s:%s'" % ret[0:2]
else:
if ret[1] == 0:
inst[name] = "'%s.%s'" % (ret[0], ret[2])
else:
inst[name] = "'%s:%s.%s'" % ret[0:3]
# returns the Python code (text) to build this stream, inside the code it will be in variable "stream"
def to_code(self):
""" Convert to Python code as profile """
layer = Ether(self.pkt)
pkt = CTRexScapyPktUtl(layer)
vm_var_usage = {}
for inst in self.fields['vm']['instructions']:
if inst['type'] == 'trim_pkt_size':
fv_name = inst['name']
if fv_name in vm_var_usage:
vm_var_usage[fv_name].append('trim')
else:
vm_var_usage[fv_name] = ['trim']
if 'pkt_offset' in inst:
fv_name = inst.get('fv_name', inst.get('name'))
if fv_name in vm_var_usage:
vm_var_usage[fv_name].append(pkt.get_field_by_offset(inst['pkt_offset']))
else:
vm_var_usage[fv_name] = [pkt.get_field_by_offset(inst['pkt_offset'])]
vm_list = ['vm = STLVM()']
for inst in self.fields['vm']['instructions']:
inst = dict(inst)
#print inst
self.__fix_offset_by_name(pkt, inst, 'pkt_offset')
if 'is_big_endian' in inst:
inst['byte_order'] = "'big'" if inst['is_big_endian'] else "'little'"
if inst['type'] == 'flow_var':
value_list = inst.get('value_list')
if inst['name'] in vm_var_usage and inst['size'] == 4 and self.__is_all_IP(vm_var_usage[inst['name']]):
if value_list is not None:
inst['value_list'] = ['%s' % ltoa(val) for val in value_list]
else:
inst['init_value'] = "'%s'" % ltoa(inst['init_value'])
inst['min_value'] = "'%s'" % ltoa(inst['min_value'])
inst['max_value'] = "'%s'" % ltoa(inst['max_value'])
if inst['next_var']:
inst['next_var'] = "'%s'" % inst['next_var']
common_start = "vm.var(name='{name}', op='{op}', step={step}, size={size}, split_to_cores={split_to_cores}, next_var={next_var}, "
if value_list is not None:
vm_list.append((common_start + "min_value=None, max_value=None, value_list={value_list})").format(**inst))
else:
vm_list.append((common_start + "min_value={min_value}, max_value={max_value}, init_value={init_value})").format(**inst))
elif inst['type'] == 'write_flow_var':
vm_list.append("vm.write(fv_name='{name}', pkt_offset={pkt_offset}, add_val={add_value}, byte_order={byte_order})".format(**inst))
elif inst['type'] == 'write_mask_flow_var':
inst['mask'] = hex(inst['mask'])
vm_list.append("vm.write_mask(fv_name='{name}', pkt_offset={pkt_offset}, pkt_cast_size={pkt_cast_size}, mask={mask}, shift={shift}, add_val={add_value}, byte_order={byte_order})".format(**inst))
elif inst['type'] == 'fix_checksum_ipv4':
vm_list.append("vm.fix_chksum(offset={pkt_offset})".format(**inst))
elif inst['type'] == 'fix_checksum_hw':
inst['l3_offset'] = inst['l2_len']
inst['l4_offset'] = inst['l2_len'] + inst['l3_len']
self.__fix_offset_by_name(pkt, inst, 'l3_offset')
self.__fix_offset_by_name(pkt, inst, 'l4_offset')
vm_list.append("vm.fix_chksum_hw(l3_offset={l3_offset}, l4_offset={l4_offset}, l4_type={l4_type})".format(**inst))
elif inst['type'] == 'fix_checksum_icmpv6':
inst['l3_offset'] = inst['l2_len']
inst['l4_offset'] = inst['l2_len'] + inst['l3_len']
self.__fix_offset_by_name(pkt, inst, 'l3_offset')
self.__fix_offset_by_name(pkt, inst, 'l4_offset')
vm_list.append("vm.fix_chksum_icmpv6(l3_offset={l3_offset}, l4_offset={l4_offset})".format(**inst))
elif inst['type'] == 'trim_pkt_size':
vm_list.append("vm.trim(fv_name='{name}')".format(**inst))
elif inst['type'] == 'tuple_flow_var':
inst['ip_min'] = ltoa(inst['ip_min'])
inst['ip_max'] = ltoa(inst['ip_max'])
vm_list.append("vm.tuple_var(name='{name}', ip_min='{ip_min}', ip_max='{ip_max}', port_min={port_min}, port_max={port_max}, limit_flows={limit_flows}, flags={flags})".format(**inst))
elif inst['type'] == 'flow_var_rand_limit':
if inst['next_var']:
inst['next_var'] = "'%s'" % inst['next_var']
vm_list.append("vm.repeatable_random_var(fv_name='{name}', size={size}, limit={limit}, seed={seed}, min_value={min_value}, max_value={max_value}, split_to_cores={split_to_cores}, next_var={next_var})".format(**inst))
else:
raise TRexError('Got unhandled FE instruction type: %s' % inst['type'])
if 'cache' in self.fields['vm']:
vm_list.append('vm.set_cached(%s)' % self.fields['vm']['cache'])
vm_code = '\n'.join(vm_list)
stream_params_list = []
stream_params_list.append('packet = STLPktBuilder(pkt = packet, vm = vm)')
if default_STLStream.name != self.name:
stream_params_list.append('name = %s' % STLStream.__add_quotes(self.name))
if default_STLStream.fields['enabled'] != self.fields['enabled']:
stream_params_list.append('enabled = %s' % self.fields['enabled'])
if default_STLStream.fields['self_start'] != self.fields['self_start']:
stream_params_list.append('self_start = %s' % self.fields['self_start'])
if default_STLStream.fields['start_paused'] != self.fields['start_paused']:
stream_params_list.append('start_paused = %s' % self.fields['start_paused'])
if default_STLStream.fields['isg'] != self.fields['isg']:
stream_params_list.append('isg = %s' % self.fields['isg'])
if default_STLStream.fields['flow_stats'] != self.fields['flow_stats']:
if 'rule_type' in self.fields['flow_stats']:
stream_params_list.append('flow_stats = %s(%s)' % ('STLFlowStats' if self.fields['flow_stats']['rule_type'] == 'stats' else 'STLFlowLatencyStats', self.fields['flow_stats']['stream_id']))
if default_STLStream.next != self.next:
stream_params_list.append('next = %s' % STLStream.__add_quotes(self.next))
if default_STLStream.id != self.id:
stream_params_list.append('stream_id = %s' % self.id)
if default_STLStream.fields['action_count'] != self.fields['action_count']:
stream_params_list.append('action_count = %s' % self.fields['action_count'])
if 'random_seed' in self.fields:
stream_params_list.append('random_seed = %s' % self.fields.get('random_seed', 0))
if default_STLStream.fields['core_id'] != self.fields['core_id']:
stream_params_list.append('core_id = %s' % self.fields['core_id'])
stream_params_list.append('mac_src_override_by_pkt = %s' % bool(self.fields['flags'] & 1))
stream_params_list.append('mac_dst_override_mode = %s' % (self.fields['flags'] >> 1 & 3))
if self.is_dummy():
stream_params_list.append('dummy_stream = True')
mode_args = ''
for key, value in self.fields['mode'].items():
if key not in ('rate', 'type'):
mode_args += '%s = %s, ' % (key, value)
mode_args += '%s = %s' % (self.fields['mode']['rate']['type'], self.fields['mode']['rate']['value'])
if self.mode_desc == STLTXCont.__str__():
stream_params_list.append('mode = STLTXCont(%s)' % mode_args)
elif self.mode_desc == STLTXSingleBurst().__str__():
stream_params_list.append('mode = STLTXSingleBurst(%s)' % mode_args)
elif self.mode_desc == STLTXMultiBurst().__str__():
stream_params_list.append('mode = STLTXMultiBurst(%s)' % mode_args)
else:
raise TRexError('Could not determine mode: %s' % self.mode_desc)
stream = "stream = STLStream(" + ',\n '.join(stream_params_list) + ')'
layer.hide_defaults() # remove fields with default values
imports_arr = []
layers_commands = []
# remove checksums, add imports if needed
while layer:
layer_class = layer.__class__.__name__
if layer_class not in vars(scapy.layers.all): # custom import
found_import = False
for module_path, module in sys.modules.items():
if not module_path.startswith(('scapy.layers', 'scapy.contrib')):
continue
import_string = 'from %s import %s' % (module_path, layer_class)
if import_string in imports_arr: # already present in extra imports
found_import = True
break
if hasattr(module, layer_class): # add as extra import
imports_arr.append(import_string)
found_import = True
break
if not found_import:
raise TRexError('Could not determine import of layer %s' % layer.name)
payload = layer.payload
layer.remove_payload()
if isinstance(layer, Raw):
payload_data = bytes(layer)
if payload_data == payload_data[0:1] * len(payload_data): # compact form Raw('x' * 100) etc.
layer_command = '%s * %s)' % (Raw(payload_data[0:1]).command().rstrip(')'), len(payload_data))
else:
layer_command = layer.command()
layers_commands.append(layer_command)
else:
layers_commands.append(layer.command())
layer = payload
imports = '\n'.join(imports_arr)
packet_code = 'packet = (' + (' / \n ').join(layers_commands) + ')'
if imports:
return '\n'.join([imports, packet_code, vm_code, stream])
return '\n'.join([packet_code, vm_code, stream])
# add quoted for string, or leave as is if other type
@staticmethod
def __add_quotes(arg):
if type(arg) is str:
return "'%s'" % arg
return arg
# used to replace non-printable characters with hex
@staticmethod
def __replchars_to_hex(match):
return r'\x{0:02x}'.format(ord(match.group()))
def to_json (self):
""" convert stream object to JSON """
json_data = dict(self.fields)
# required fields for 'from_json' - send it to the server
if self.name:
json_data['name'] = self.name
if self.next:
json_data['next'] = self.next
return json_data
@staticmethod
def from_json (json_data):
# packet builder
builder = STLPktBuilder.from_json(json_data)
mode = STLTXMode.from_json(json_data)
# flow stats / latency
fs = STLFlowStatsInterface.from_json(json_data['flow_stats'])
try:
return STLStream(name = json_data.get('name'),
next = json_data.get('next'),
packet = builder,
mode = mode,
flow_stats = fs,
enabled = json_data['enabled'],
self_start = json_data['self_start'],
isg = json_data['isg'],
action_count = json_data['action_count'],
core_id = json_data.get('core_id', -1),
stream_id = json_data.get('stream_id'),
random_seed = json_data.get('random_seed', 0),
mac_src_override_by_pkt = (json_data['flags'] & 0x1) == 0x1,
mac_dst_override_mode = (json_data['flags'] >> 1 & 0x3),
dummy_stream = (json_data['flags'] & 0x8) == 0x8,
start_paused = json_data.get('start_paused', False))
except KeyError as e:
raise TRexError("from_json: missing field {0} from JSON".format(e))
def clone (self):
return STLStream.from_json(self.to_json())
# profile class
class STLProfile(object):
""" Describe a list of streams
.. code-block:: python
# STLProfile Example
profile = STLProfile( [ STLStream( isg = 10.0, # star in delay
name ='S0',
packet = STLPktBuilder(pkt = base_pkt/pad),
mode = STLTXSingleBurst( pps = 10, total_pkts = self.burst_size),
next = 'S1'), # point to next stream
STLStream( self_start = False, # stream is disabled enable trow S0
name ='S1',
packet = STLPktBuilder(pkt = base_pkt1/pad),
mode = STLTXSingleBurst( pps = 10, total_pkts = self.burst_size),
next = 'S2' ),
STLStream( self_start = False, # stream is disabled enable trow S0
name ='S2',
packet = STLPktBuilder(pkt = base_pkt2/pad),
mode = STLTXSingleBurst( pps = 10, total_pkts = self.burst_size )
)
]).get_streams()
"""
def __init__ (self, streams = None):
"""
:parameters:
streams : list of :class:`trex.stl.trex_stl_streams.STLStream`
a list of stream objects
"""
if streams == None:
streams = []
if not type(streams) == list:
streams = [streams]
if not all([isinstance(stream, STLStream) for stream in streams]):
raise TRexArgumentError('streams', streams, valid_values = STLStream)
self.streams = streams
self.meta = None
def get_streams (self):
""" Get the list of streams"""
return self.streams
def __str__ (self):
return '\n'.join([str(stream) for stream in self.streams])
def is_pauseable (self):
return all([x.get_mode() == "Continuous" for x in self.get_streams()])
def has_custom_mac_addr (self):
return any([x.has_custom_mac_addr() for x in self.get_streams()])
def has_flow_stats (self):
return any([x.has_flow_stats() for x in self.get_streams()])
@staticmethod
def __flatten_json (stream_list):
# GUI provides a YAML/JSON from the RPC capture - flatten it to match
if not isinstance(stream_list, list):
return
for stream in stream_list:
if 'stream' in stream:
d = stream['stream']
del stream['stream']
stream.update(d)
@staticmethod
def __load_plain (plain_file, fmt):
"""
Load (from JSON / YAML file) a profile with a number of streams
'fmt' can be either 'json' or 'yaml'
"""
# check filename
if not os.path.isfile(plain_file):
raise TRexError("file '{0}' does not exists".format(plain_file))
# read the content
with open(plain_file) as f:
try:
data = json.load(f) if fmt == 'json' else yaml.load(f)
STLProfile.__flatten_json(data)
except (ValueError, yaml.parser.ParserError):
raise TRexError("file '{0}' is not a valid {1} formatted file".format(plain_file, 'JSON' if fmt == 'json' else 'YAML'))
return STLProfile.from_json(data)
@staticmethod
def load_yaml (yaml_file):
""" Load (from YAML file) a profile with a number of streams """
return STLProfile.__load_plain(yaml_file, fmt = 'yaml')
@staticmethod
def load_json (json_file):
""" Load (from JSON file) a profile with a number of streams """
return STLProfile.__load_plain(json_file, fmt = 'json')
@staticmethod
def get_module_tunables(module):
# remove self and variables
func = module.register().get_streams
argc = func.__code__.co_argcount
tunables = func.__code__.co_varnames[1:argc]
# fetch defaults
defaults = func.__defaults__
if defaults is None:
return {}
if len(defaults) != (argc - 1):
raise TRexError("Module should provide default values for all arguments on get_streams()")
output = {}
for t, d in zip(tunables, defaults):
output[t] = d
return output
@staticmethod
def load_py (python_file, direction = 0, port_id = 0, **kwargs):
""" Load from Python profile """
# in case load_py is not being called from stl_client, there is need to convert
# the tunables to the new format to support argparse
if "tunables" not in kwargs:
tunable_list = []
# converting from tunables dictionary to list
for tunable_key in kwargs:
tunable_list.extend(["--{}".format(tunable_key), str(kwargs[tunable_key])])
kwargs["tunables"] = tunable_list
# check filename
if not os.path.isfile(python_file):
raise TRexError("File '{0}' does not exist".format(python_file))
basedir = os.path.dirname(python_file)
sys.path.insert(0, basedir)
dont_write_bytecode = sys.dont_write_bytecode
try:
file = os.path.basename(python_file).split('.')[0]
sys.dont_write_bytecode = True
module = __import__(file, globals(), locals(), [], 0)
imp.reload(module) # reload the update
t = STLProfile.get_module_tunables(module)
#for arg in kwargs:
# if arg not in t:
# raise TRexError("Profile {0} does not support tunable '{1}' - supported tunables are: '{2}'".format(python_file, arg, t))
try:
streams = module.register().get_streams(direction = direction,
port_id = port_id,
**kwargs)
except SystemExit:
# called ".. -t --help", return None
return None
profile = STLProfile(streams)
profile.meta = {'type': 'python',
'tunables': t}
return profile
except Exception as e:
a, b, tb = sys.exc_info()
x =''.join(traceback.format_list(traceback.extract_tb(tb)[1:])) + a.__name__ + ": " + str(b) + "\n"
summary = "\nPython Traceback follows:\n\n" + x
raise TRexError(summary)
finally:
sys.dont_write_bytecode = dont_write_bytecode
sys.path.remove(basedir)
# loop_count = 0 means loop forever
@staticmethod
def load_pcap (pcap_file,
ipg_usec = None,
speedup = 1.0,
loop_count = 1,
vm = None,
packet_hook = None,
split_mode = None,
min_ipg_usec = None,
src_mac_pcap = False,
dst_mac_pcap = False):
""" Convert a pcap file with a number of packets to a list of connected streams.
packet1->packet2->packet3 etc
:parameters:
pcap_file : string
Name of the pcap file
ipg_usec : float
Inter packet gap in usec. If IPG is None, IPG is taken from pcap file
speedup : float
When reading the pcap file, divide IPG by this "speedup" factor. Resulting IPG is sped up by this factor.
loop_count : uint16_t
Number of loops to repeat the pcap file
vm : list
List of Field engine instructions
packet_hook : Callable or function
will be applied to every packet
split_mode : str
should this PCAP be split to two profiles based on IPs / MACs
used for dual mode
can be 'MAC' or 'IP'
min_ipg_usec : float
Minumum inter packet gap in usec. Used to guard from too small IPGs.
src_mac_pcap : bool
Source MAC address will be taken from pcap file if True.
dst_mac_pcap : bool
Destination MAC address will be taken from pcap file if True.
:return: STLProfile
"""
if speedup <= 0:
raise TRexError('Speedup should be positive.')
if min_ipg_usec and min_ipg_usec < 0:
raise TRexError('min_ipg_usec should not be negative.')
# make sure IPG is not less than 0.001 usec
if (ipg_usec is not None and (ipg_usec < 0.001 * speedup) and
(min_ipg_usec is None or min_ipg_usec < 0.001)):
raise TRexError("ipg_usec cannot be less than 0.001 usec: '{0}'".format(ipg_usec))
if loop_count < 0:
raise TRexError("'loop_count' cannot be negative")
try:
if split_mode is None:
pkts = PCAPReader(pcap_file).read_all(ipg_usec, min_ipg_usec, speedup)
if len(pkts) == 0:
raise TRexError("'{0}' does not contain any packets".format(pcap_file))
return STLProfile.__pkts_to_streams(pkts,
loop_count,
vm,
packet_hook,
src_mac_pcap = src_mac_pcap,
dst_mac_pcap = dst_mac_pcap)
else:
pkts_a, pkts_b = PCAPReader(pcap_file).read_all(ipg_usec, min_ipg_usec, speedup, split_mode = split_mode)
if not (pkts_a or pkts_b):
raise TRexError("'%s' does not contain any packets." % pcap_file)
elif not (pkts_a and pkts_b):
raise TRexError("'%s' contains only one direction." % pcap_file)
# swap is ts of first packet in b is earlier
start_time_a = pkts_a[0][1]
start_time_b = pkts_b[0][1]
if start_time_b < start_time_a:
pkts_a, pkts_b = pkts_b, pkts_a
# get last ts
end_time_a = pkts_a[-1][1]
end_time_b = pkts_b[-1][1]
start_delay_usec = 1000
if ipg_usec:
start_delay_usec = ipg_usec / speedup
if min_ipg_usec and min_ipg_usec > start_delay_usec:
start_delay_usec = min_ipg_usec
end_time = max(end_time_a, end_time_b)
profile_a = STLProfile.__pkts_to_streams(pkts_a,
loop_count,
vm,
packet_hook,
start_delay_usec,
end_delay_usec = end_time - end_time_a,
src_mac_pcap = src_mac_pcap,
dst_mac_pcap = dst_mac_pcap)
profile_b = STLProfile.__pkts_to_streams(pkts_b,
loop_count,
vm,
packet_hook,
start_delay_usec,
end_delay_usec = end_time - end_time_b,
src_mac_pcap = src_mac_pcap,
dst_mac_pcap = dst_mac_pcap)
return profile_a, profile_b
except Scapy_Exception as e:
raise TRexError("failed to open PCAP file {0}: '{1}'".format(pcap_file, str(e)))
@staticmethod
def __pkts_to_streams (pkts, loop_count, vm, packet_hook, start_delay_usec = 0, end_delay_usec = 0, src_mac_pcap = False, dst_mac_pcap = False):
streams = []
if packet_hook:
pkts = [(packet_hook(cap), meta) for (cap, meta) in pkts]
stream_dst_mac = STLStreamDstMAC_PKT if dst_mac_pcap else STLStreamDstMAC_CFG_FILE
last_ts = 0
for i, (cap, ts) in enumerate(pkts, start = 1):
isg = ts - last_ts
last_ts = ts
# handle last packet
if i == len(pkts):
if end_delay_usec:
next = 'delay_stream'
action_count = 0
streams.append(STLStream(name = 'delay_stream',
mode = STLTXSingleBurst(total_pkts = 1, percentage = 100),
self_start = False,
isg = end_delay_usec,
action_count = loop_count,
dummy_stream = True,
next = 1,
mac_src_override_by_pkt = src_mac_pcap,
mac_dst_override_mode = stream_dst_mac))
else:
next = 1
action_count = loop_count
else:
next = i + 1
action_count = 0
if i == 1:
streams.append(STLStream(name = 1,
packet = STLPktBuilder(pkt_buffer = cap, vm = vm),
mode = STLTXSingleBurst(total_pkts = 1, percentage = 100),
self_start = True,
isg = isg + start_delay_usec, # usec
action_count = action_count,
next = next,
mac_src_override_by_pkt = src_mac_pcap,
mac_dst_override_mode = stream_dst_mac))
else:
streams.append(STLStream(name = i,
packet = STLPktBuilder(pkt_buffer = cap, vm = vm),
mode = STLTXSingleBurst(total_pkts = 1, percentage = 100),
self_start = False,
isg = isg, # usec
action_count = action_count,
next = next,
mac_src_override_by_pkt = src_mac_pcap,
mac_dst_override_mode = stream_dst_mac))
profile = STLProfile(streams)
profile.meta = {'type': 'pcap'}
return profile
@staticmethod
def load (filename, direction = 0, port_id = 0, **kwargs):
""" Load a profile by its type. Supported types are:
* py
* json
* pcap file that converted to profile automaticly
:Parameters:
filename : string as filename
direction : profile's direction (if supported by the profile)
port_id : which port ID this profile is being loaded to
kwargs : forward those key-value pairs to the profile
"""
x = os.path.basename(filename).split('.')
suffix = x[1] if (len(x) == 2) else None
if suffix == 'py':
profile = STLProfile.load_py(filename, direction, port_id, **kwargs)
elif suffix == 'json':
profile = STLProfile.load_json(filename)
elif suffix == 'yaml':
profile = STLProfile.load_yaml(filename)
elif suffix in ['cap', 'pcap']:
profile = STLProfile.load_pcap(filename, speedup = 1, ipg_usec = 1e6)
else:
raise TRexError("unknown profile file type: '{0}'".format(suffix))
if profile is not None:
profile.meta['stream_count'] = len(profile.get_streams()) if isinstance(profile.get_streams(), list) else 1
return profile
@staticmethod
def get_info (filename):
profile = STLProfile.load(filename)
return profile.meta
def dump_as_pkt (self):
""" Dump the profile as Scapy packet. If the packet is raw, convert it to Scapy before dumping it."""
cnt=0;
for stream in self.streams:
print("=======================")
print("Stream %d" % cnt)
print("=======================")
cnt = cnt +1
stream.to_pkt_dump()
def to_json (self):
""" convert profile to JSON object """
return [s.to_json() for s in self.get_streams()]
@staticmethod
def from_json (json_data):
""" create profile object from JSON object """
if not isinstance(json_data, list):
raise TRexError("JSON should contain a list of streams")
streams = [STLStream.from_json(stream_json) for stream_json in json_data]
profile = STLProfile(streams)
profile.meta = {'type': 'json'}
return profile
def dump_to_code (self, profile_file = None):
""" Convert the profile to Python native profile. """
profile_dump = '''# !!! Auto-generated code !!!
from trex.stl.api import *
class STLS1(object):
def get_streams(self, direction = 0, **kwargs):
streams = []
'''
for stream in self.streams:
profile_dump += ' '*8 + stream.to_code().replace('\n', '\n' + ' '*8) + '\n'
profile_dump += ' '*8 + 'streams.append(stream)\n'
profile_dump += '''
return streams
def register():
return STLS1()
'''
# write to file if provided
if profile_file:
with open(profile_file, 'w') as f:
f.write(profile_dump)
return profile_dump
def __len__ (self):
return len(self.streams)
class PCAPReader(object):
def __init__(self, pcap_file):
if not os.path.isfile(pcap_file):
raise TRexError("File '{0}' does not exist.".format(pcap_file))
self.pcap_file = pcap_file
def read_all(self, ipg_usec, min_ipg_usec, speedup, split_mode = None):
# get the packets
if split_mode is None:
pkts = RawPcapReader(self.pcap_file).read_all()
else:
pkts = rdpcap(self.pcap_file)
if not pkts:
raise TRexError("'%s' does not contain any packets." % self.pcap_file)
self.pkts_arr = []
last_ts = 0
# fix times
for pkt in pkts:
if split_mode is None:
pkt_data, meta = pkt
ts_usec = meta[0] * 1e6 + meta[1]
else:
pkt_data = pkt
ts_usec = float(pkt.time) * 1e6
if ipg_usec is None:
if 'prev_time' in locals():
delta_usec = (ts_usec - prev_time) / float(speedup)
else:
delta_usec = 0
if min_ipg_usec and delta_usec < min_ipg_usec:
delta_usec = min_ipg_usec
prev_time = ts_usec
last_ts += delta_usec
else: # user specified ipg
if min_ipg_usec:
last_ts += min_ipg_usec
elif ipg_usec:
last_ts += ipg_usec / float(speedup)
else:
raise TRexError('Please specify either min_ipg_usec or ipg_usec, not both.')
self.pkts_arr.append([pkt_data, last_ts])
if split_mode is None:
return self.pkts_arr
# we need to split
self.graph = Graph()
self.pkt_groups = [ [], [] ]
if split_mode == 'MAC':
self.generate_mac_groups()
elif split_mode == 'IP':
self.generate_ip_groups()
else:
raise TRexError('unknown split mode for PCAP')
return self.pkt_groups
# generate two groups based on MACs
def generate_mac_groups (self):
for i, (pkt, _) in enumerate(self.pkts_arr):
if not isinstance(pkt, (Ether, Dot3)):
raise TRexError("Packet #{0} has an unknown L2 format: {1}".format(i, type(pkt)))
self.graph.add(pkt.src, pkt.dst)
# split the graph to two groups
mac_groups = self.graph.split()
for pkt, ts in self.pkts_arr:
group = 1 if pkt.src in mac_groups[1] else 0
self.pkt_groups[group].append((bytes(pkt), ts))
# generate two groups based on IPs
def generate_ip_groups (self):
for i, (pkt, t) in enumerate(self.pkts_arr):
if not isinstance(pkt, (Ether, Dot3) ):
raise TRexError("Packet #{0} has an unknown L2 format: {1}".format(i, type(pkt)))
ip = pkt.getlayer('IP')
if not ip:
ip = pkt.getlayer('IPv6')
if not ip:
continue
self.graph.add(ip.src, ip.dst)
# split the graph to two groups
ip_groups = self.graph.split()
for pkt, ts in self.pkts_arr:
ip = pkt.getlayer('IP')
if not ip:
ip = pkt.getlayer('IPv6')
group = 0
if ip and ip.src in ip_groups[1]:
group = 1
self.pkt_groups[group].append((bytes(pkt), ts))
# a simple graph object - used to split to two groups
class Graph(object):
def __init__ (self):
self.db = OrderedDict()
self.debug = False
def log (self, msg):
if self.debug:
print(msg)
# add a connection v1 --> v2
def add (self, v1, v2):
# init value for v1
if v1 not in self.db:
self.db[v1] = set()
# init value for v2
if v2 not in self.db:
self.db[v2] = set()
# ignore self to self edges
if v1 == v2:
return
# undirected - add two ways
self.db[v1].add(v2)
self.db[v2].add(v1)
# create a 2-color of the graph if possible
def split (self):
color_a = set()
color_b = set()
# start with all
nodes = list(self.db.keys())
# process one by one
while len(nodes) > 0:
node = nodes.pop(0)
friends = self.db[node]
# node has never been seen - move to color_a
if node not in color_a and node not in color_b:
self.log("<NEW> {0} --> A".format(node))
color_a.add(node)
# node color
node_color, other_color = (color_a, color_b) if node in color_a else (color_b, color_a)
# check that the coloring is possible
bad_friends = friends.intersection(node_color)
if bad_friends:
raise TRexError("ERROR: failed to split PCAP file - {0} and {1} are in the same group".format(node, bad_friends))
# add all the friends to the other color
for friend in friends:
self.log("<FRIEND> {0} --> {1}".format(friend, 'A' if other_color is color_a else 'B'))
other_color.add(friend)
return color_a, color_b
default_STLStream = STLStream()
|
py | b40445f77c173a585350e204f3a35e0c5a580713 | # coding: utf-8
from __future__ import unicode_literals
from mock import patch
import dns.resolver
from django.core.files.base import ContentFile
from django.core.urlresolvers import reverse
from modoboa.core import factories as core_factories
from modoboa.core.models import User
from modoboa.lib.tests import ModoTestCase
from . import utils
from .. import factories
from ..models import Domain, Alias, DomainAlias
class ImportTestCase(ModoTestCase):
@classmethod
def setUpTestData(cls):
"""Create test data."""
super(ImportTestCase, cls).setUpTestData()
cls.localconfig.parameters.set_value(
"enable_admin_limits", False, app="limits")
cls.localconfig.save()
factories.populate_database()
def test_domains_import(self):
response = self.client.get(reverse("admin:domain_import"))
self.assertEqual(response.status_code, 200)
self.assertIn("Provide a CSV", response.content.decode())
f = ContentFile(b"""domain; domain1.com; 1000; 100; True
domain; domain2.com; 1000; 200; False
domainalias; domalias1.com; domain1.com; True
""", name="domains.csv")
self.client.post(
reverse("admin:domain_import"), {
"sourcefile": f
}
)
admin = User.objects.get(username="admin")
dom = Domain.objects.get(name="domain1.com")
self.assertEqual(dom.quota, 1000)
self.assertEqual(dom.default_mailbox_quota, 100)
self.assertTrue(dom.enabled)
self.assertTrue(admin.is_owner(dom))
domalias = DomainAlias.objects.get(name="domalias1.com")
self.assertEqual(domalias.target, dom)
self.assertTrue(dom.enabled)
self.assertTrue(admin.is_owner(domalias))
dom = Domain.objects.get(name="domain2.com")
self.assertEqual(dom.default_mailbox_quota, 200)
self.assertFalse(dom.enabled)
self.assertTrue(admin.is_owner(dom))
def test_domain_import_bad_syntax(self):
"""Check errors handling."""
url = reverse("admin:domain_import")
f = ContentFile("domain; domain1.com; 100; True",
name="domains.csv")
response = self.client.post(url, {"sourcefile": f})
self.assertContains(response, "Invalid line")
f = ContentFile("domain; domain1.com; XX; 100; True",
name="domains.csv")
response = self.client.post(url, {"sourcefile": f})
self.assertContains(response, "Invalid quota value")
f = ContentFile("domain; domain1.com; 100; XX; True",
name="domains.csv")
response = self.client.post(url, {"sourcefile": f})
self.assertContains(response, "Invalid default mailbox quota")
f = ContentFile("domain; domain1.com; 10; 100; True",
name="domains.csv")
response = self.client.post(url, {"sourcefile": f})
self.assertContains(
response,
"Default mailbox quota cannot be greater than domain quota")
@patch.object(dns.resolver.Resolver, "query")
@patch("socket.gethostbyname")
def test_domain_import_with_mx_check(self, mock_gethostbyname, mock_query):
"""Check domain import when MX check is enabled."""
reseller = core_factories.UserFactory(
username="reseller", groups=("Resellers", ))
self.client.force_login(reseller)
self.set_global_parameter("valid_mxs", "1.2.3.4")
self.set_global_parameter("domains_must_have_authorized_mx", True)
mock_query.return_value = [utils.FakeDNSAnswer("mail.ok.com")]
mock_gethostbyname.return_value = "1.2.3.5"
f = ContentFile(
b"domain; domain1.com; 100; 1; True", name="domains.csv")
resp = self.client.post(
reverse("admin:domain_import"), {
"sourcefile": f
}
)
self.assertContains(resp, "No authorized MX record found for domain")
mock_gethostbyname.return_value = "1.2.3.4"
f.seek(0)
resp = self.client.post(
reverse("admin:domain_import"), {
"sourcefile": f
}
)
self.assertTrue(
Domain.objects.filter(name="domain1.com").exists())
def test_import_domains_with_conflict(self):
f = ContentFile(b"""domain;test.alias;100;10;True
domainalias;test.alias;test.com;True
""", name="domains.csv")
resp = self.client.post(
reverse("admin:domain_import"), {
"sourcefile": f
}
)
self.assertIn(
"Object already exists: domainalias", resp.content.decode())
def test_identities_import(self):
response = self.client.get(reverse("admin:identity_import"))
self.assertEqual(response.status_code, 200)
self.assertIn("Provide a CSV", response.content.decode())
f = ContentFile("""
account; [email protected]; toto; User; One; True; SimpleUsers; [email protected]; 0
account; [email protected]; toto; René; Truc; True; DomainAdmins; [email protected]; 5; test.com
alias; [email protected]; True; [email protected]
forward; [email protected]; True; [email protected]
forward; [email protected]; True; [email protected]
dlist; [email protected]; True; [email protected]; [email protected]
""", name="identities.csv")
self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True}
)
admin = User.objects.get(username="admin")
u1 = User.objects.get(username="[email protected]")
mb1 = u1.mailbox
self.assertTrue(admin.is_owner(u1))
self.assertEqual(u1.email, "[email protected]")
self.assertEqual(u1.first_name, "User")
self.assertEqual(u1.last_name, "One")
self.assertTrue(u1.is_active)
self.assertEqual(u1.role, "SimpleUsers")
self.assertTrue(mb1.use_domain_quota)
self.assertEqual(mb1.quota, 0)
self.assertTrue(admin.is_owner(mb1))
self.assertEqual(mb1.full_address, "[email protected]")
self.assertTrue(
self.client.login(username="[email protected]", password="toto")
)
da = User.objects.get(username="[email protected]")
damb = da.mailbox
self.assertEqual(da.first_name, u"René")
self.assertEqual(da.role, "DomainAdmins")
self.assertEqual(damb.quota, 5)
self.assertFalse(damb.use_domain_quota)
self.assertEqual(damb.full_address, "[email protected]")
dom = Domain.objects.get(name="test.com")
self.assertIn(da, dom.admins)
u = User.objects.get(username="[email protected]")
self.assertTrue(da.can_access(u))
al = Alias.objects.get(address="[email protected]")
self.assertTrue(
al.aliasrecipient_set
.filter(r_mailbox=u1.mailbox).exists()
)
self.assertTrue(admin.is_owner(al))
fwd = Alias.objects.get(address="[email protected]")
self.assertTrue(
fwd.aliasrecipient_set
.filter(
address="[email protected]", r_mailbox__isnull=True,
r_alias__isnull=True)
.exists()
)
self.assertTrue(admin.is_owner(fwd))
dlist = Alias.objects.get(address="[email protected]")
self.assertTrue(
dlist.aliasrecipient_set
.filter(r_mailbox=u1.mailbox).exists()
)
self.assertTrue(
dlist.aliasrecipient_set.filter(address="[email protected]")
.exists()
)
self.assertTrue(admin.is_owner(dlist))
def test_import_for_nonlocal_domain(self):
"""Try to import an account for nonlocal domain."""
f = ContentFile(b"""
account; [email protected]; toto; User; One; True; SimpleUsers; [email protected]; 0
""", name="identities.csv")
self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True}
)
self.assertFalse(
User.objects.filter(username="[email protected]").exists())
def test_import_invalid_quota(self):
f = ContentFile(b"""
account; [email protected]; toto; User; One; True; SimpleUsers; [email protected]; ; test.com
""", name="identities.csv")
resp = self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True}
)
self.assertIn('wrong quota value', resp.content.decode())
def test_import_domain_by_domainadmin(self):
"""Check if a domain admin is not allowed to import a domain."""
self.client.logout()
self.client.login(username="[email protected]", password="toto")
f = ContentFile(b"""
domain; domain2.com; 1000; 200; False
""", name="identities.csv")
resp = self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True}
)
self.assertIn("You are not allowed to import domains", resp.content.decode())
f = ContentFile(b"""
domainalias; domalias1.com; test.com; True
""", name="identities.csv")
resp = self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True}
)
self.assertIn(
"You are not allowed to import domain aliases", resp.content.decode())
def test_import_quota_too_big(self):
self.client.logout()
self.client.login(username="[email protected]", password="toto")
f = ContentFile(b"""
account; [email protected]; toto; User; One; True; SimpleUsers; [email protected]; 40
""", name="identities.csv")
resp = self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True}
)
self.assertIn("Domain quota exceeded", resp.content.decode())
def test_import_missing_quota(self):
f = ContentFile(b"""
account; [email protected]; toto; User; One; True; SimpleUsers; [email protected]
""", name="identities.csv")
self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True}
)
account = User.objects.get(username="[email protected]")
self.assertEqual(
account.mailbox.quota,
account.mailbox.domain.default_mailbox_quota
)
def test_import_duplicate(self):
f = ContentFile("""
account; [email protected]; toto; Admin; ; True; DomainAdmins; [email protected]; 0; test.com
account; [email protected]; toto; René; Truc; True; DomainAdmins; [email protected]; 0; test.com
""", name="identities.csv")
self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True,
"continue_if_exists": True}
)
admin = User.objects.get(username="admin")
u1 = User.objects.get(username="[email protected]")
self.assertTrue(admin.is_owner(u1))
def test_import_superadmin(self):
"""Check if a domain admin can import a superadmin
Expected result: no
"""
self.client.logout()
self.assertTrue(
self.client.login(username="[email protected]", password="toto")
)
f = ContentFile(b"""
account; [email protected]; toto; Super; Admin; True; SuperAdmins; [email protected]; 50
""", name="identities.csv")
self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True,
"continue_if_exists": True}
)
with self.assertRaises(User.DoesNotExist):
User.objects.get(username="[email protected]")
def test_import_alias_with_empty_values(self):
f = ContentFile(b"""
alias;[email protected];True;[email protected];;;;;;;;;;;;;;;;
""", name="identities.csv")
self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True,
"continue_if_exists": True}
)
alias = Alias.objects.get(address="[email protected]")
self.assertEqual(alias.type, "alias")
def test_import_account_alias_conflict(self):
"""Specific test for #1144."""
f = ContentFile(b"""
alias;[email protected];True;[email protected]
""", name="identities.csv")
self.client.post(
reverse("admin:identity_import"),
{"sourcefile": f, "crypt_password": True}
)
self.assertTrue(
Alias.objects.filter(
address="[email protected]", internal=False).exists())
|
py | b404460a557716b5fc74d0d53897a3a93326c3d7 | # Description: Carved isomesh representation of electron density.
# Source: placeHolder
"""
cmd.do('delete all;')
cmd.do('# Fetch the coordinates. Need internet connection.')
cmd.do('fetch ${1:4dgr}, async=0;')
cmd.do('# Fetch the electron density map.')
cmd.do('fetch ${1:4dgr}, type=2fofc,async=0;')
cmd.do('# create a selection out of the glycan')
cmd.do('select ${2:LongGlycan}, resi ${3:469:477};')
cmd.do('orient ${2:LongGlycan};')
cmd.do('remove not ${2:LongGlycan};')
cmd.do('remove name H*;')
cmd.do('isomesh 2fofcmap, ${1:4dgr}_2fofc, 1, ${2:LongGlycan}, carve = 1.8;')
cmd.do('color density, 2fofcmap; ')
cmd.do('show sticks;')
cmd.do('show spheres;')
cmd.do('set stick_radius, .07;')
cmd.do('set sphere_scale, .19;')
cmd.do('set sphere_scale, .13, elem H;')
cmd.do('set bg_rgb=[1, 1, 1];')
cmd.do('set stick_quality, 50;')
cmd.do('set sphere_quality, 4;')
cmd.do('color gray85, elem C;')
cmd.do('color red, elem O;')
cmd.do('color slate, elem N;')
cmd.do('color gray98, elem H;')
cmd.do('set stick_color, gray50;')
cmd.do('set ray_trace_mode, 1;')
cmd.do('set ray_texture, 2;')
cmd.do('set antialias, 3;')
cmd.do('set ambient, 0.5;')
cmd.do('set spec_count, 5;')
cmd.do('set shininess, 50;')
cmd.do('set specular, 1;')
cmd.do('set reflect, .1;')
cmd.do('set dash_gap, 0;')
cmd.do('set dash_color, black;')
cmd.do('set dash_gap, .15;')
cmd.do('set dash_length, .05;')
cmd.do('set dash_round_ends, 0;')
cmd.do('set dash_radius, .05;')
cmd.do('set_view (0.34,-0.72,0.61,0.8,0.56,0.22,-0.51,0.4,0.77,0.0,0.0,-81.31,44.64,-9.02,58.62,65.34,97.28,-20.0);')
cmd.do('preset.ball_and_stick("all",mode=1);')
cmd.do('draw;')
"""
cmd.do('delete all;')
cmd.do('# Fetch the coordinates. Need internet connection.')
cmd.do('fetch 4dgr, async=0;')
cmd.do('# Fetch the electron density map.')
cmd.do('fetch 4dgr, type=2fofc,async=0;')
cmd.do('# create a selection out of the glycan')
cmd.do('select LongGlycan, resi 469:477;')
cmd.do('orient LongGlycan;')
cmd.do('remove not LongGlycan;')
cmd.do('remove name H*;')
cmd.do('isomesh 2fofcmap, 4dgr_2fofc, 1, LongGlycan, carve = 1.8;')
cmd.do('color density, 2fofcmap; ')
cmd.do('show sticks;')
cmd.do('show spheres;')
cmd.do('set stick_radius, .07;')
cmd.do('set sphere_scale, .19;')
cmd.do('set sphere_scale, .13, elem H;')
cmd.do('set bg_rgb=[1, 1, 1];')
cmd.do('set stick_quality, 50;')
cmd.do('set sphere_quality, 4;')
cmd.do('color gray85, elem C;')
cmd.do('color red, elem O;')
cmd.do('color slate, elem N;')
cmd.do('color gray98, elem H;')
cmd.do('set stick_color, gray50;')
cmd.do('set ray_trace_mode, 1;')
cmd.do('set ray_texture, 2;')
cmd.do('set antialias, 3;')
cmd.do('set ambient, 0.5;')
cmd.do('set spec_count, 5;')
cmd.do('set shininess, 50;')
cmd.do('set specular, 1;')
cmd.do('set reflect, .1;')
cmd.do('set dash_gap, 0;')
cmd.do('set dash_color, black;')
cmd.do('set dash_gap, .15;')
cmd.do('set dash_length, .05;')
cmd.do('set dash_round_ends, 0;')
cmd.do('set dash_radius, .05;')
cmd.do('set_view (0.34,-0.72,0.61,0.8,0.56,0.22,-0.51,0.4,0.77,0.0,0.0,-81.31,44.64,-9.02,58.62,65.34,97.28,-20.0);')
cmd.do('preset.ball_and_stick("all",mode=1);')
cmd.do('draw;')
|
py | b404461e863beb1564572ad1274875ee376255a9 | a=input('digite algo : ')
print('o tipo primitivo deste valor é',type(a))
print('so tem espaços ?:',a.isspace())
print('è um numero ?',a.isnumeric())
print('é alfabético ?' ,a.isalpha())
print('é alfanumérico?',a.isalnum())
print('esta em maiusculo ?',a.isupper())
print(' esta em minusculo ?',a.islower())
print('esta captalizada ?' ,a.istitle()) |
py | b40446edb25aa94841c5c96603786018f33a0f1c | """
DeskPro API Client
"""
import re
import requests
from django.template import loader
from django.conf import settings
from peeringdb_server.models import DeskProTicket
from peeringdb_server.inet import RdapNotFoundError
def ticket_queue(subject, body, user):
""" queue a deskpro ticket for creation """
ticket = DeskProTicket.objects.create(subject=u"{}{}".format(
settings.EMAIL_SUBJECT_PREFIX, subject), body=body, user=user)
class APIError(IOError):
def __init__(self, msg, data):
super(APIError, self).__init__(msg)
self.data = data
def ticket_queue_asnauto_skipvq(user, org, net, rir_data):
"""
queue deskro ticket creation for asn automation action: skip vq
"""
if isinstance(net, dict):
net_name = net.get("name")
else:
net_name = net.name
if isinstance(org, dict):
org_name = org.get("name")
else:
org_name = org.name
ticket_queue("[ASNAUTO] Network '%s' approved for existing Org '%s'" %
(net_name, org_name),
loader.get_template(
'email/notify-pdb-admin-asnauto-skipvq.txt').render({
"user": user,
"org": org,
"net": net,
"rir_data": rir_data
}), user)
def ticket_queue_asnauto_affil(user, org, net, rir_data):
"""
queue deskro ticket creation for asn automation action: affil
"""
ticket_queue(
"[ASNAUTO] Ownership claim granted to Org '%s' for user '%s'" %
(org.name, user.username),
loader.get_template('email/notify-pdb-admin-asnauto-affil.txt').render(
{
"user": user,
"org": org,
"net": net,
"rir_data": rir_data
}), user)
def ticket_queue_asnauto_create(user, org, net, rir_data, asn,
org_created=False, net_created=False):
"""
queue deskro ticket creation for asn automation action: create
"""
subject = []
if org_created:
subject.append("Organization '%s'" % org.name)
if net_created:
subject.append("Network '%s'" % net.name)
if not subject:
return
subject = ", ".join(subject)
ticket_queue(
"[ASNAUTO] %s created" % subject,
loader.get_template(
'email/notify-pdb-admin-asnauto-entity-creation.txt').render({
"user": user,
"org": org,
"net": net,
"asn": asn,
"org_created": org_created,
"net_created": net_created,
"rir_data": rir_data
}), user)
def ticket_queue_rdap_error(user, asn, error):
if isinstance(error, RdapNotFoundError):
return
error_message = "{}".format(error)
if re.match("(.+) returned 400", error_message):
return
subject = "[RDAP_ERR] {} - AS{}".format(user.username, asn)
ticket_queue(
subject,
loader.get_template('email/notify-pdb-admin-rdap-error.txt').render({
"user": user,
"asn": asn,
"error_details": error_message
}), user)
class APIClient(object):
def __init__(self, url, key):
self.key = key
self.url = url
@property
def auth_headers(self):
return {"Authorization": "key {}".format(self.key)}
def parse_response(self, response, many=False):
r_json = response.json()
if "status" in r_json:
if r_json["status"] >= 400:
raise APIError(r_json["message"], r_json)
else:
response.raise_for_status()
data = r_json["data"]
if isinstance(data, list):
if many:
return r_json["data"]
elif data:
return data[0]
else:
return data
def get(self, endpoint, param):
response = requests.get("{}/{}".format(self.url, endpoint),
params=param, headers=self.auth_headers)
return self.parse_response(response)
def create(self, endpoint, param):
response = requests.post("{}/{}".format(self.url, endpoint),
json=param, headers=self.auth_headers)
return self.parse_response(response)
def require_person(self, user):
person = self.get("people", {"primary_email": user.email})
if not person:
person = self.create(
"people", {
"primary_email": user.email,
"first_name": user.first_name,
"last_name": user.last_name,
"name": user.full_name
})
return person
def create_ticket(self, ticket):
person = self.require_person(ticket.user)
ticket_response = self.create(
"tickets", {
"subject": ticket.subject,
"person": {
"id": person["id"]
},
"status": "awaiting_agent"
})
self.create(
"tickets/{}/messages".format(ticket_response["id"]), {
"message": ticket.body.replace("\n", "<br />\n"),
"person": person["id"],
"format": "html"
})
|
py | b4044707ce3ce7df6f983830f7b152ca9702bda8 | # Copyright (C) 2020 Aidil Aryanto.
# All rights reserved.
import asyncio
import glob
import os
import shutil
import time
import deezloader
from hachoir.metadata import extractMetadata
from hachoir.parser import createParser
from pylast import User
from telethon import events
from telethon.errors.rpcerrorlist import YouBlockedUserError
from telethon.tl.types import DocumentAttributeAudio, DocumentAttributeVideo
from userbot import CMD_HANDLER as cmd
from userbot import (
CMD_HELP,
DEEZER_ARL_TOKEN,
LASTFM_USERNAME,
TEMP_DOWNLOAD_DIRECTORY,
lastfm,
)
from userbot.utils import bash, chrome, edit_or_reply, ayiin_cmd, progress
from userbot.utils.FastTelethon import upload_file
async def getmusic(cat):
video_link = ""
search = cat
driver = await chrome()
driver.get("https://www.youtube.com/results?search_query=" + search)
user_data = driver.find_elements_by_xpath('//*[@id="video-title"]')
for i in user_data:
video_link = i.get_attribute("href")
break
command = f"yt-dlp -x --add-metadata --embed-thumbnail --no-progress --audio-format mp3 {video_link}"
await bash(command)
return video_link
async def getmusicvideo(cat):
video_link = ""
search = cat
driver = await chrome()
driver.get("https://www.youtube.com/results?search_query=" + search)
user_data = driver.find_elements_by_xpath('//*[@id="video-title"]')
for i in user_data:
video_link = i.get_attribute("href")
break
command = (
'yt-dlp -f "[filesize<50M]" --no-progress --merge-output-format mp4 '
+ video_link
)
await bash(command)
@ayiin_cmd(pattern="song (.*)")
async def _(event):
reply = await event.get_reply_message()
if event.pattern_match.group(1):
query = event.pattern_match.group(1)
xx = await edit_or_reply(event, "`Processing..`")
elif reply.message:
query = reply.message
await xx.edit("`Tunggu..! Saya menemukan lagu Anda..`")
else:
await xx.edit("`Apa yang seharusnya saya temukan?`")
return
await getmusic(str(query))
loa = glob.glob("*.mp3")[0]
await xx.edit("`Yeah.. Mengupload lagu Anda..`")
c_time = time.time()
with open(loa, "rb") as f:
result = await upload_file(
client=event.client,
file=f,
name=loa,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, event, c_time, "[UPLOAD]", loa)
),
)
await event.client.send_file(
event.chat_id,
result,
allow_cache=False,
)
await event.delete()
await bash("rm -rf *.mp3")
@ayiin_cmd(pattern="vsong(?: |$)(.*)")
async def _(event):
reply = await event.get_reply_message()
if event.pattern_match.group(1):
query = event.pattern_match.group(1)
xx = await edit_or_reply(event, "`Processing..`")
elif reply:
query = str(reply.message)
await xx.edit("**Tunggu..! Saya menemukan lagu video Anda..**")
else:
await xx.edit("**Apa yang seharusnya saya temukan?**")
return
await getmusicvideo(query)
l = glob.glob(("*.mp4")) + glob.glob(("*.mkv")) + glob.glob(("*.webm"))
if l:
await xx.edit("**Ya..! aku menemukan sesuatu..**")
else:
await xx.edit(
f"**Maaf..! saya tidak dapat menemukan apa pun dengan** `{query}`"
)
return
try:
loa = l[0]
metadata = extractMetadata(createParser(loa))
duration = metadata.get("duration").seconds if metadata.has("duration") else 0
width = metadata.get("width") if metadata.has("width") else 0
height = metadata.get("height") if metadata.has("height") else 0
await bash("cp *mp4 thumb.mp4")
await bash("ffmpeg -i thumb.mp4 -vframes 1 -an -s 480x360 -ss 5 thumb.jpg")
thumb_image = "thumb.jpg"
c_time = time.time()
with open(loa, "rb") as f:
result = await upload_file(
client=event.client,
file=f,
name=loa,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, event, c_time, "[UPLOAD]", loa)
),
)
await event.client.send_file(
event.chat_id,
result,
force_document=False,
thumb=thumb_image,
allow_cache=False,
caption=query,
supports_streaming=True,
attributes=[
DocumentAttributeVideo(
duration=duration,
w=width,
h=height,
round_message=False,
supports_streaming=True,
)
],
)
await xx.edit(f"**{query} Berhasil Diupload..!**")
os.remove(thumb_image)
await bash("rm *.mkv *.mp4 *.webm")
except BaseException:
os.remove(thumb_image)
await bash("rm *.mkv *.mp4 *.webm")
return
@ayiin_cmd(pattern="smd (?:(now)|(.*) - (.*))")
async def _(event):
if event.fwd_from:
return
if event.pattern_match.group(1) == "now":
playing = User(LASTFM_USERNAME, lastfm).get_now_playing()
if playing is None:
return await event.edit(
"`Error: Tidak ada data scrobbling yang ditemukan.`"
)
artist = playing.get_artist()
song = playing.get_title()
else:
artist = event.pattern_match.group(2)
song = event.pattern_match.group(3)
track = str(artist) + " - " + str(song)
chat = "@SpotifyMusicDownloaderBot"
try:
await event.edit("`Getting Your Music...`")
async with event.client.conversation(chat) as conv:
await asyncio.sleep(2)
await event.edit("`Downloading...`")
try:
response = conv.wait_event(
events.NewMessage(incoming=True, from_users=752979930)
)
msg = await event.client.send_message(chat, track)
respond = await response
res = conv.wait_event(
events.NewMessage(incoming=True, from_users=752979930)
)
r = await res
await event.client.send_read_acknowledge(conv.chat_id)
except YouBlockedUserError:
await event.reply(
"`Unblock `@SpotifyMusicDownloaderBot` dan coba lagi`"
)
return
await event.client.forward_messages(event.chat_id, respond.message)
await event.client.delete_messages(conv.chat_id, [msg.id, r.id, respond.id])
await event.delete()
except TimeoutError:
return await event.edit(
"`Error: `@SpotifyMusicDownloaderBot` tidak merespons atau Lagu tidak ditemukan!.`"
)
@ayiin_cmd(pattern="net (?:(now)|(.*) - (.*))")
async def _(event):
if event.fwd_from:
return
if event.pattern_match.group(1) == "now":
playing = User(LASTFM_USERNAME, lastfm).get_now_playing()
if playing is None:
return await event.edit(
"`Error: Tidak ada scrobble saat ini yang ditemukan.`"
)
artist = playing.get_artist()
song = playing.get_title()
else:
artist = event.pattern_match.group(2)
song = event.pattern_match.group(3)
track = str(artist) + " - " + str(song)
chat = "@WooMaiBot"
link = f"/netease {track}"
await event.edit("`Searching...`")
try:
async with event.client.conversation(chat) as conv:
await asyncio.sleep(2)
await event.edit("`Processing...`")
try:
msg = await conv.send_message(link)
response = await conv.get_response()
respond = await conv.get_response()
await event.client.send_read_acknowledge(conv.chat_id)
except YouBlockedUserError:
await event.reply("`Please unblock @WooMaiBot and try again`")
return
await event.edit("`Sending Your Music...`")
await asyncio.sleep(3)
await event.client.send_file(event.chat_id, respond)
await event.client.delete_messages(
conv.chat_id, [msg.id, response.id, respond.id]
)
await event.delete()
except TimeoutError:
return await event.edit(
"`Error: `@WooMaiBot` tidak merespons atau Lagu tidak ditemukan!.`"
)
@ayiin_cmd(pattern="mhb(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
d_link = event.pattern_match.group(1)
if ".com" not in d_link:
await event.edit("`Masukkan link yang valid untuk mendownload`")
else:
await event.edit("`Processing...`")
chat = "@MusicsHunterBot"
try:
async with event.client.conversation(chat) as conv:
try:
msg_start = await conv.send_message("/start")
response = await conv.get_response()
msg = await conv.send_message(d_link)
details = await conv.get_response()
song = await conv.get_response()
await event.client.send_read_acknowledge(conv.chat_id)
except YouBlockedUserError:
await event.edit("`Unblock `@MusicsHunterBot` and retry`")
return
await event.client.send_file(event.chat_id, song, caption=details.text)
await event.client.delete_messages(
conv.chat_id, [msg_start.id, response.id, msg.id, details.id, song.id]
)
await event.delete()
except TimeoutError:
return await event.edit(
"`Error: `@MusicsHunterBot` tidak merespons atau Lagu tidak ditemukan!.`"
)
@ayiin_cmd(pattern="deez (.+?|) (FLAC|MP3\_320|MP3\_256|MP3\_128)")
async def _(event):
"""DeezLoader by @An0nimia. Ported for UniBorg by @SpEcHlDe"""
if event.fwd_from:
return
strings = {
"name": "DeezLoad",
"arl_token_cfg_doc": "Token ARL untuk Deezer",
"invalid_arl_token": "Harap setel variabel yang diperlukan untuk modul ini",
"wrong_cmd_syntax": "Bruh, sekarang saya pikir seberapa jauh kita harus melangkah. tolong hentikan Sesi saya ð¥º",
"server_error": "Mengalami kesalahan teknis.",
"processing": "`Sedang Mendownload....`",
"uploading": "`Mengunggah.....`",
}
ARL_TOKEN = DEEZER_ARL_TOKEN
if ARL_TOKEN is None:
await event.edit(strings["invalid_arl_token"])
return
try:
loader = deezloader.Login(ARL_TOKEN)
except Exception as er:
await event.edit(str(er))
return
temp_dl_path = os.path.join(TEMP_DOWNLOAD_DIRECTORY, str(time.time()))
if not os.path.exists(temp_dl_path):
os.makedirs(temp_dl_path)
required_link = event.pattern_match.group(1)
required_qty = event.pattern_match.group(2)
await event.edit(strings["processing"])
if "spotify" in required_link:
if "track" in required_link:
required_track = loader.download_trackspo(
required_link,
output=temp_dl_path,
quality=required_qty,
recursive_quality=True,
recursive_download=True,
not_interface=True,
)
await event.edit(strings["uploading"])
await upload_track(required_track, event)
shutil.rmtree(temp_dl_path)
await event.delete()
elif "album" in required_link:
reqd_albums = loader.download_albumspo(
required_link,
output=temp_dl_path,
quality=required_qty,
recursive_quality=True,
recursive_download=True,
not_interface=True,
zips=False,
)
await event.edit(strings["uploading"])
for required_track in reqd_albums:
await upload_track(required_track, event)
shutil.rmtree(temp_dl_path)
await event.delete()
elif "deezer" in required_link:
if "track" in required_link:
required_track = loader.download_trackdee(
required_link,
output=temp_dl_path,
quality=required_qty,
recursive_quality=True,
recursive_download=True,
not_interface=True,
)
await event.edit(strings["uploading"])
await upload_track(required_track, event)
shutil.rmtree(temp_dl_path)
await event.delete()
elif "album" in required_link:
reqd_albums = loader.download_albumdee(
required_link,
output=temp_dl_path,
quality=required_qty,
recursive_quality=True,
recursive_download=True,
not_interface=True,
zips=False,
)
await event.edit(strings["uploading"])
for required_track in reqd_albums:
await upload_track(required_track, event)
shutil.rmtree(temp_dl_path)
await event.delete()
else:
await event.edit(strings["wrong_cmd_syntax"])
async def upload_track(track_location, message):
metadata = extractMetadata(createParser(track_location))
duration = metadata.get("duration").seconds if metadata.has("duration") else 0
title = metadata.get("title") if metadata.has("title") else ""
performer = metadata.get("artist") if metadata.has("artist") else ""
document_attributes = [
DocumentAttributeAudio(
duration=duration,
voice=False,
title=title,
performer=performer,
waveform=None,
)
]
supports_streaming = True
force_document = False
caption_rts = os.path.basename(track_location)
c_time = time.time()
with open(track_location, "rb") as f:
result = await upload_file(
client=message.client,
file=f,
name=track_location,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, message, c_time, "[UPLOAD]", track_location)
),
)
await message.client.send_file(
message.chat_id,
result,
caption=caption_rts,
force_document=force_document,
supports_streaming=supports_streaming,
allow_cache=False,
attributes=document_attributes,
)
os.remove(track_location)
CMD_HELP.update(
{
"getmusic": f"**Plugin : **`getmusic`\
\n\n • **Syntax :** `{cmd}smd` <nama lagu>\
\n • **Function : **Mendowload lagu dari bot @SpotifyMusicDownloaderBot\
\n\n • **Syntax :** `{cmd}smd now`\
\n • **Function : **Unduh penggunaan scrobble LastFM saat ini dari bot @SpotifyMusicDownloaderBot\
\n\n • **Syntax :** `{cmd}net` <nama lagu>\
\n • **Function : **Mendowload lagu dari bot @WooMaiBot\
\n\n • **Syntax :** `{cmd}net now`\
\n • **Function : **Unduh penggunaan scrobble LastFM saat ini dari bot @WooMaiBot\
\n\n • **Syntax :** `{cmd}mhb` <Link Spotify/Deezer>\
\n • **Function : **Mendowload lagu dari Spotify atau Deezer dari bot @MusicsHunterBot\
\n\n • **Syntax :** `{cmd}deez` <link spotify/deezer> FORMAT\
\n • **Function : **Mendowload lagu dari deezer atau spotify.\
\n • **Format : ** `FLAC`, `MP3_320`, `MP3_256`, `MP3_128`.\
"
}
)
|
py | b4044764886ec62e65ec68f70553b3246f77458e | from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="5G4RyBKlvugN0ZQypS38zGYFDfMvI33e8gTcmfbgb5YBje7dp9oDXrkrBm2mil6H",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1"]
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "",
}
}
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = env(
"DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend"
)
# WhiteNoise
# ------------------------------------------------------------------------------
# http://whitenoise.evans.io/en/latest/django.html#using-whitenoise-in-development
INSTALLED_APPS = ["whitenoise.runserver_nostatic"] + INSTALLED_APPS # noqa F405
# django-debug-toolbar
# ------------------------------------------------------------------------------
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites
INSTALLED_APPS += ["debug_toolbar"] # noqa F405
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware
MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"] # noqa F405
# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config
DEBUG_TOOLBAR_CONFIG = {
"DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"],
"SHOW_TEMPLATE_CONTEXT": True,
}
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips
INTERNAL_IPS = ["127.0.0.1", "10.0.2.2"]
if env("USE_DOCKER") == "yes":
import socket
hostname, _, ips = socket.gethostbyname_ex(socket.gethostname())
INTERNAL_IPS += [".".join(ip.split(".")[:-1] + ["1"]) for ip in ips]
# django-extensions
# ------------------------------------------------------------------------------
# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration
INSTALLED_APPS += ["django_extensions"] # noqa F405
# Your stuff...
# ------------------------------------------------------------------------------
|
py | b404488cb1c8905a33f1c6ef3abb17a7129eba9c | """
Copyright 2017-present, Airbnb Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from nose.tools import assert_equal
from stream_alert_cli.terraform import common, classifier
class TestTerraformGenerateClassifier(object):
"""CLI Terraform Generate, Classifier"""
# pylint: disable=no-self-use,attribute-defined-outside-init
def setup(self):
"""CLI Terraform Generate, Classifier - Setup"""
self.config = {
'global': {
'account': {
'aws_account_id': '123456789012',
'prefix': 'unit-test',
'region': 'us-east-1'
},
'infrastructure': {
'monitoring': {
'sns_topic_name': 'test_topic'
}
}
},
'clusters': {
'test': {
'modules': {
'stream_alert': {
'classifier_config': {
'inputs': {
'aws-sns': [
'arn:aws:sns:us-east-1:123456789012:foo_bar'
]
},
'log_level': 'info',
'log_retention_days': 14,
'memory': 128,
'metric_alarms': {
'errors': {
'enabled': True,
'evaluation_periods': 1,
'period_secs': 120,
'threshold': 0
},
'throttles': {
'enabled': True,
'evaluation_periods': 1,
'period_secs': 120,
'threshold': 0
}
},
'timeout': 60,
'vpc_config': {
'security_group_ids': [],
'subnet_ids': []
}
}
}
}
}
}
}
def test_generate_classifier(self):
"""CLI - Terraform Generate, Classifier"""
cluster_dict = common.infinitedict()
classifier.generate_classifier(
'test',
cluster_dict,
self.config
)
expected_result = {
'module': {
'classifier_test_iam': {
'source': 'modules/tf_classifier',
'account_id': '123456789012',
'region': 'us-east-1',
'function_role_id': '${module.classifier_test_lambda.role_id}',
'function_alias_arn': '${module.classifier_test_lambda.function_alias_arn}',
'function_name': '${module.classifier_test_lambda.function_name}',
'classifier_sqs_queue_arn': '${module.globals.classifier_sqs_queue_arn}',
'classifier_sqs_queue_url': '${module.globals.classifier_sqs_queue_url}',
'classifier_sqs_sse_kms_key_arn': (
'${module.globals.classifier_sqs_sse_kms_key_arn}'
),
'input_sns_topics': [
'arn:aws:sns:us-east-1:123456789012:foo_bar'
]
},
'classifier_test_lambda': {
'alarm_actions': ['arn:aws:sns:us-east-1:123456789012:test_topic'],
'description': 'Unit-Test Streamalert Classifier Test',
'environment_variables': {
'CLUSTER': 'test',
'SQS_QUEUE_URL': '${module.globals.classifier_sqs_queue_url}',
'LOGGER_LEVEL': 'info',
'ENABLE_METRICS': '0'
},
'errors_alarm_enabled': True,
'errors_alarm_evaluation_periods': 1,
'errors_alarm_period_secs': 120,
'errors_alarm_threshold': 0,
'filename': 'classifier.zip',
'function_name': 'unit-test_streamalert_classifier_test',
'handler': 'stream_alert.classifier.main.handler',
'log_retention_days': 14,
'memory_size_mb': 128,
'source': 'modules/tf_lambda',
'throttles_alarm_enabled': True,
'throttles_alarm_evaluation_periods': 1,
'throttles_alarm_period_secs': 120,
'throttles_alarm_threshold': 0,
'timeout_sec': 60,
'vpc_security_group_ids': [],
'vpc_subnet_ids': [],
'input_sns_topics': [
'arn:aws:sns:us-east-1:123456789012:foo_bar'
]
}
}
}
assert_equal(cluster_dict, expected_result)
|
py | b4044942906e26320b5210d5c3fcb8558c6c5bae | #Leia um valor em real e cotação do dolar. Em seguida,
#Imprima o valor correspondente em dolares.
real=float(input("Informe um valor: "))
cotacao=float(input("Informe a cotaçao do dolar: "))
dolar=real*cotacao
print(f"O valor correspondente de dolar eh {round(dolar,1)}") |
py | b4044943b5a6ddfad20f13d8d2a5f06ccb822cd7 | # Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import filecmp
import os
import re
import shutil
import stat
import sys
import urllib2
from color import Coloring
from git_command import GitCommand
from git_config import GitConfig, IsId
from error import GitError, ImportError, UploadError
from error import ManifestInvalidRevisionError
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB
def _lwrite(path, content):
lock = '%s.lock' % path
fd = open(lock, 'wb')
try:
fd.write(content)
finally:
fd.close()
try:
os.rename(lock, path)
except OSError:
os.remove(lock)
raise
def _error(fmt, *args):
msg = fmt % args
print >>sys.stderr, 'error: %s' % msg
def not_rev(r):
return '^' + r
def sq(r):
return "'" + r.replace("'", "'\''") + "'"
hook_list = None
def repo_hooks():
global hook_list
if hook_list is None:
d = os.path.abspath(os.path.dirname(__file__))
d = os.path.join(d , 'hooks')
hook_list = map(lambda x: os.path.join(d, x), os.listdir(d))
return hook_list
def relpath(dst, src):
src = os.path.dirname(src)
top = os.path.commonprefix([dst, src])
if top.endswith('/'):
top = top[:-1]
else:
top = os.path.dirname(top)
tmp = src
rel = ''
while top != tmp:
rel += '../'
tmp = os.path.dirname(tmp)
return rel + dst[len(top) + 1:]
class DownloadedChange(object):
_commit_cache = None
def __init__(self, project, base, change_id, ps_id, commit):
self.project = project
self.base = base
self.change_id = change_id
self.ps_id = ps_id
self.commit = commit
@property
def commits(self):
if self._commit_cache is None:
self._commit_cache = self.project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
'--reverse',
'--date-order',
not_rev(self.base),
self.commit,
'--')
return self._commit_cache
class ReviewableBranch(object):
_commit_cache = None
def __init__(self, project, branch, base):
self.project = project
self.branch = branch
self.base = base
@property
def name(self):
return self.branch.name
@property
def commits(self):
if self._commit_cache is None:
self._commit_cache = self.project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
'--reverse',
'--date-order',
not_rev(self.base),
R_HEADS + self.name,
'--')
return self._commit_cache
@property
def unabbrev_commits(self):
r = dict()
for commit in self.project.bare_git.rev_list(
not_rev(self.base),
R_HEADS + self.name,
'--'):
r[commit[0:8]] = commit
return r
@property
def date(self):
return self.project.bare_git.log(
'--pretty=format:%cd',
'-n', '1',
R_HEADS + self.name,
'--')
def UploadForReview(self, people, auto_topic=False):
self.project.UploadForReview(self.name,
people,
auto_topic=auto_topic)
def GetPublishedRefs(self):
refs = {}
output = self.project.bare_git.ls_remote(
self.branch.remote.SshReviewUrl(self.project.UserEmail),
'refs/changes/*')
for line in output.split('\n'):
try:
(sha, ref) = line.split()
refs[sha] = ref
except ValueError:
pass
return refs
class StatusColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'status')
self.project = self.printer('header', attr = 'bold')
self.branch = self.printer('header', attr = 'bold')
self.nobranch = self.printer('nobranch', fg = 'red')
self.important = self.printer('important', fg = 'red')
self.added = self.printer('added', fg = 'green')
self.changed = self.printer('changed', fg = 'red')
self.untracked = self.printer('untracked', fg = 'red')
class DiffColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'diff')
self.project = self.printer('header', attr = 'bold')
class _CopyFile:
def __init__(self, src, dest, abssrc, absdest):
self.src = src
self.dest = dest
self.abs_src = abssrc
self.abs_dest = absdest
def _Copy(self):
src = self.abs_src
dest = self.abs_dest
# copy file if it does not exist or is out of date
if not os.path.exists(dest) or not filecmp.cmp(src, dest):
try:
# remove existing file first, since it might be read-only
if os.path.exists(dest):
os.remove(dest)
else:
dir = os.path.dirname(dest)
if not os.path.isdir(dir):
os.makedirs(dir)
shutil.copy(src, dest)
# make the file read-only
mode = os.stat(dest)[stat.ST_MODE]
mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
os.chmod(dest, mode)
except IOError:
_error('Cannot copy file %s to %s', src, dest)
class RemoteSpec(object):
def __init__(self,
name,
url = None,
review = None):
self.name = name
self.url = url
self.review = review
class Project(object):
def __init__(self,
manifest,
name,
remote,
gitdir,
worktree,
relpath,
revisionExpr,
revisionId):
self.manifest = manifest
self.name = name
self.remote = remote
self.gitdir = gitdir.replace('\\', '/')
if worktree:
self.worktree = worktree.replace('\\', '/')
else:
self.worktree = None
self.relpath = relpath
self.revisionExpr = revisionExpr
if revisionId is None \
and revisionExpr \
and IsId(revisionExpr):
self.revisionId = revisionExpr
else:
self.revisionId = revisionId
self.snapshots = {}
self.copyfiles = []
self.config = GitConfig.ForRepository(
gitdir = self.gitdir,
defaults = self.manifest.globalConfig)
if self.worktree:
self.work_git = self._GitGetByExec(self, bare=False)
else:
self.work_git = None
self.bare_git = self._GitGetByExec(self, bare=True)
self.bare_ref = GitRefs(gitdir)
@property
def Exists(self):
return os.path.isdir(self.gitdir)
@property
def CurrentBranch(self):
"""Obtain the name of the currently checked out branch.
The branch name omits the 'refs/heads/' prefix.
None is returned if the project is on a detached HEAD.
"""
b = self.work_git.GetHead()
if b.startswith(R_HEADS):
return b[len(R_HEADS):]
return None
def IsRebaseInProgress(self):
w = self.worktree
g = os.path.join(w, '.git')
return os.path.exists(os.path.join(g, 'rebase-apply')) \
or os.path.exists(os.path.join(g, 'rebase-merge')) \
or os.path.exists(os.path.join(w, '.dotest'))
def IsDirty(self, consider_untracked=True):
"""Is the working directory modified in some way?
"""
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
if self.work_git.DiffZ('diff-index','-M','--cached',HEAD):
return True
if self.work_git.DiffZ('diff-files'):
return True
if consider_untracked and self.work_git.LsOthers():
return True
return False
_userident_name = None
_userident_email = None
@property
def UserName(self):
"""Obtain the user's personal name.
"""
if self._userident_name is None:
self._LoadUserIdentity()
return self._userident_name
@property
def UserEmail(self):
"""Obtain the user's email address. This is very likely
to be their Gerrit login.
"""
if self._userident_email is None:
self._LoadUserIdentity()
return self._userident_email
def _LoadUserIdentity(self):
u = self.bare_git.var('GIT_COMMITTER_IDENT')
m = re.compile("^(.*) <([^>]*)> ").match(u)
if m:
self._userident_name = m.group(1)
self._userident_email = m.group(2)
else:
self._userident_name = ''
self._userident_email = ''
def GetRemote(self, name):
"""Get the configuration for a single remote.
"""
return self.config.GetRemote(name)
def GetBranch(self, name):
"""Get the configuration for a single branch.
"""
return self.config.GetBranch(name)
def GetBranches(self):
"""Get all existing local branches.
"""
current = self.CurrentBranch
all = self._allrefs
heads = {}
pubd = {}
for name, id in all.iteritems():
if name.startswith(R_HEADS):
name = name[len(R_HEADS):]
b = self.GetBranch(name)
b.current = name == current
b.published = None
b.revision = id
heads[name] = b
for name, id in all.iteritems():
if name.startswith(R_PUB):
name = name[len(R_PUB):]
b = heads.get(name)
if b:
b.published = id
return heads
## Status Display ##
def HasChanges(self):
"""Returns true if there are uncommitted changes.
"""
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
if self.IsRebaseInProgress():
return True
if self.work_git.DiffZ('diff-index', '--cached', HEAD):
return True
if self.work_git.DiffZ('diff-files'):
return True
if self.work_git.LsOthers():
return True
return False
def PrintWorkTreeStatus(self):
"""Prints the status of the repository to stdout.
"""
if not os.path.isdir(self.worktree):
print ''
print 'project %s/' % self.relpath
print ' missing (run "repo sync")'
return
self.work_git.update_index('-q',
'--unmerged',
'--ignore-missing',
'--refresh')
rb = self.IsRebaseInProgress()
di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD)
df = self.work_git.DiffZ('diff-files')
do = self.work_git.LsOthers()
if not rb and not di and not df and not do:
return 'CLEAN'
out = StatusColoring(self.config)
out.project('project %-40s', self.relpath + '/')
branch = self.CurrentBranch
if branch is None:
out.nobranch('(*** NO BRANCH ***)')
else:
out.branch('branch %s', branch)
out.nl()
if rb:
out.important('prior sync failed; rebase still in progress')
out.nl()
paths = list()
paths.extend(di.keys())
paths.extend(df.keys())
paths.extend(do)
paths = list(set(paths))
paths.sort()
for p in paths:
try: i = di[p]
except KeyError: i = None
try: f = df[p]
except KeyError: f = None
if i: i_status = i.status.upper()
else: i_status = '-'
if f: f_status = f.status.lower()
else: f_status = '-'
if i and i.src_path:
line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
i.src_path, p, i.level)
else:
line = ' %s%s\t%s' % (i_status, f_status, p)
if i and not f:
out.added('%s', line)
elif (i and f) or (not i and f):
out.changed('%s', line)
elif not i and not f:
out.untracked('%s', line)
else:
out.write('%s', line)
out.nl()
return 'DIRTY'
def PrintWorkTreeDiff(self):
"""Prints the status of the repository to stdout.
"""
out = DiffColoring(self.config)
cmd = ['diff']
if out.is_on:
cmd.append('--color')
cmd.append(HEAD)
cmd.append('--')
p = GitCommand(self,
cmd,
capture_stdout = True,
capture_stderr = True)
has_diff = False
for line in p.process.stdout:
if not has_diff:
out.nl()
out.project('project %s/' % self.relpath)
out.nl()
has_diff = True
print line[:-1]
p.Wait()
## Publish / Upload ##
def WasPublished(self, branch, all=None):
"""Was the branch published (uploaded) for code review?
If so, returns the SHA-1 hash of the last published
state for the branch.
"""
key = R_PUB + branch
if all is None:
try:
return self.bare_git.rev_parse(key)
except GitError:
return None
else:
try:
return all[key]
except KeyError:
return None
def CleanPublishedCache(self, all=None):
"""Prunes any stale published refs.
"""
if all is None:
all = self._allrefs
heads = set()
canrm = {}
for name, id in all.iteritems():
if name.startswith(R_HEADS):
heads.add(name)
elif name.startswith(R_PUB):
canrm[name] = id
for name, id in canrm.iteritems():
n = name[len(R_PUB):]
if R_HEADS + n not in heads:
self.bare_git.DeleteRef(name, id)
def GetUploadableBranches(self):
"""List any branches which can be uploaded for review.
"""
heads = {}
pubed = {}
for name, id in self._allrefs.iteritems():
if name.startswith(R_HEADS):
heads[name[len(R_HEADS):]] = id
elif name.startswith(R_PUB):
pubed[name[len(R_PUB):]] = id
ready = []
for branch, id in heads.iteritems():
if branch in pubed and pubed[branch] == id:
continue
rb = self.GetUploadableBranch(branch)
if rb:
ready.append(rb)
return ready
def GetUploadableBranch(self, branch_name):
"""Get a single uploadable branch, or None.
"""
branch = self.GetBranch(branch_name)
base = branch.LocalMerge
if branch.LocalMerge:
rb = ReviewableBranch(self, branch, base)
if rb.commits:
return rb
return None
def UploadForReview(self, branch=None,
people=([],[]),
auto_topic=False):
"""Uploads the named branch for code review.
"""
if branch is None:
branch = self.CurrentBranch
if branch is None:
raise GitError('not currently on a branch')
branch = self.GetBranch(branch)
if not branch.LocalMerge:
raise GitError('branch %s does not track a remote' % branch.name)
if not branch.remote.review:
raise GitError('remote %s has no review url' % branch.remote.name)
dest_branch = branch.merge
if not dest_branch.startswith(R_HEADS):
dest_branch = R_HEADS + dest_branch
if not branch.remote.projectname:
branch.remote.projectname = self.name
branch.remote.Save()
if branch.remote.ReviewProtocol == 'ssh':
if dest_branch.startswith(R_HEADS):
dest_branch = dest_branch[len(R_HEADS):]
rp = ['gerrit receive-pack']
for e in people[0]:
rp.append('--reviewer=%s' % sq(e))
for e in people[1]:
rp.append('--cc=%s' % sq(e))
ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch)
if auto_topic:
ref_spec = ref_spec + '/' + branch.name
cmd = ['push']
cmd.append('--receive-pack=%s' % " ".join(rp))
cmd.append(branch.remote.SshReviewUrl(self.UserEmail))
cmd.append(ref_spec)
if GitCommand(self, cmd, bare = True).Wait() != 0:
raise UploadError('Upload failed')
else:
raise UploadError('Unsupported protocol %s' \
% branch.remote.review)
msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
self.bare_git.UpdateRef(R_PUB + branch.name,
R_HEADS + branch.name,
message = msg)
## Sync ##
def Sync_NetworkHalf(self, quiet=False):
"""Perform only the network IO portion of the sync process.
Local working directory/branch state is not affected.
"""
is_new = not self.Exists
if is_new:
if not quiet:
print >>sys.stderr
print >>sys.stderr, 'Initializing project %s ...' % self.name
self._InitGitDir()
self._InitRemote()
if not self._RemoteFetch(initial=is_new, quiet=quiet):
return False
#Check that the requested ref was found after fetch
#
try:
self.GetRevisionId()
except ManifestInvalidRevisionError:
# if the ref is a tag. We can try fetching
# the tag manually as a last resort
#
rev = self.revisionExpr
if rev.startswith(R_TAGS):
self._RemoteFetch(None, rev[len(R_TAGS):], quiet=quiet)
if self.worktree:
self.manifest.SetMRefs(self)
else:
self._InitMirrorHead()
try:
os.remove(os.path.join(self.gitdir, 'FETCH_HEAD'))
except OSError:
pass
return True
def PostRepoUpgrade(self):
self._InitHooks()
def _CopyFiles(self):
for file in self.copyfiles:
file._Copy()
def GetRevisionId(self, all=None):
if self.revisionId:
return self.revisionId
rem = self.GetRemote(self.remote.name)
rev = rem.ToLocal(self.revisionExpr)
if all is not None and rev in all:
return all[rev]
try:
return self.bare_git.rev_parse('--verify', '%s^0' % rev)
except GitError:
raise ManifestInvalidRevisionError(
'revision %s in %s not found' % (self.revisionExpr,
self.name))
def Sync_LocalHalf(self, syncbuf):
"""Perform only the local IO portion of the sync process.
Network access is not required.
"""
self._InitWorkTree()
all = self.bare_ref.all
self.CleanPublishedCache(all)
revid = self.GetRevisionId(all)
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
branch = head[len(R_HEADS):]
try:
head = all[head]
except KeyError:
head = None
else:
branch = None
if branch is None or syncbuf.detach_head:
# Currently on a detached HEAD. The user is assumed to
# not have any local modifications worth worrying about.
#
if self.IsRebaseInProgress():
syncbuf.fail(self, _PriorSyncFailedError())
return
if head == revid:
# No changes; don't do anything further.
#
return
lost = self._revlist(not_rev(revid), HEAD)
if lost:
syncbuf.info(self, "discarding %d commits", len(lost))
try:
self._Checkout(revid, quiet=True)
except GitError, e:
syncbuf.fail(self, e)
return
self._CopyFiles()
return
if head == revid:
# No changes; don't do anything further.
#
return
branch = self.GetBranch(branch)
if not branch.LocalMerge:
# The current branch has no tracking configuration.
# Jump off it to a deatched HEAD.
#
syncbuf.info(self,
"leaving %s; does not track upstream",
branch.name)
try:
self._Checkout(revid, quiet=True)
except GitError, e:
syncbuf.fail(self, e)
return
self._CopyFiles()
return
upstream_gain = self._revlist(not_rev(HEAD), revid)
pub = self.WasPublished(branch.name, all)
if pub:
not_merged = self._revlist(not_rev(revid), pub)
if not_merged:
if upstream_gain:
# The user has published this branch and some of those
# commits are not yet merged upstream. We do not want
# to rewrite the published commits so we punt.
#
syncbuf.fail(self,
"branch %s is published (but not merged) and is now %d commits behind"
% (branch.name, len(upstream_gain)))
return
elif pub == head:
# All published commits are merged, and thus we are a
# strict subset. We can fast-forward safely.
#
def _doff():
self._FastForward(revid)
self._CopyFiles()
syncbuf.later1(self, _doff)
return
# Examine the local commits not in the remote. Find the
# last one attributed to this user, if any.
#
local_changes = self._revlist(not_rev(revid), HEAD, format='%H %ce')
last_mine = None
cnt_mine = 0
for commit in local_changes:
commit_id, committer_email = commit.split(' ', 1)
if committer_email == self.UserEmail:
last_mine = commit_id
cnt_mine += 1
if not upstream_gain and cnt_mine == len(local_changes):
return
if self.IsDirty(consider_untracked=False):
syncbuf.fail(self, _DirtyError())
return
# If the upstream switched on us, warn the user.
#
if branch.merge != self.revisionExpr:
if branch.merge and self.revisionExpr:
syncbuf.info(self,
'manifest switched %s...%s',
branch.merge,
self.revisionExpr)
elif branch.merge:
syncbuf.info(self,
'manifest no longer tracks %s',
branch.merge)
if cnt_mine < len(local_changes):
# Upstream rebased. Not everything in HEAD
# was created by this user.
#
syncbuf.info(self,
"discarding %d commits removed from upstream",
len(local_changes) - cnt_mine)
branch.remote = self.GetRemote(self.remote.name)
branch.merge = self.revisionExpr
branch.Save()
if cnt_mine > 0:
def _dorebase():
self._Rebase(upstream = '%s^1' % last_mine, onto = revid)
self._CopyFiles()
syncbuf.later2(self, _dorebase)
elif local_changes:
try:
self._ResetHard(revid)
self._CopyFiles()
except GitError, e:
syncbuf.fail(self, e)
return
else:
def _doff():
self._FastForward(revid)
self._CopyFiles()
syncbuf.later1(self, _doff)
def AddCopyFile(self, src, dest, absdest):
# dest should already be an absolute path, but src is project relative
# make src an absolute path
abssrc = os.path.join(self.worktree, src)
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
def DownloadPatchSet(self, change_id, patch_id):
"""Download a single patch set of a single change to FETCH_HEAD.
"""
remote = self.GetRemote(self.remote.name)
cmd = ['fetch', remote.name]
cmd.append('refs/changes/%2.2d/%d/%d' \
% (change_id % 100, change_id, patch_id))
cmd.extend(map(lambda x: str(x), remote.fetch))
if GitCommand(self, cmd, bare=True).Wait() != 0:
return None
return DownloadedChange(self,
self.GetRevisionId(),
change_id,
patch_id,
self.bare_git.rev_parse('FETCH_HEAD'))
## Branch Management ##
def StartBranch(self, name):
"""Create a new branch off the manifest's revision.
"""
head = self.work_git.GetHead()
if head == (R_HEADS + name):
return True
all = self.bare_ref.all
if (R_HEADS + name) in all:
return GitCommand(self,
['checkout', name, '--'],
capture_stdout = True,
capture_stderr = True).Wait() == 0
branch = self.GetBranch(name)
branch.remote = self.GetRemote(self.remote.name)
branch.merge = self.revisionExpr
revid = self.GetRevisionId(all)
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if revid and head and revid == head:
ref = os.path.join(self.gitdir, R_HEADS + name)
try:
os.makedirs(os.path.dirname(ref))
except OSError:
pass
_lwrite(ref, '%s\n' % revid)
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'ref: %s%s\n' % (R_HEADS, name))
branch.Save()
return True
if GitCommand(self,
['checkout', '-b', branch.name, revid],
capture_stdout = True,
capture_stderr = True).Wait() == 0:
branch.Save()
return True
return False
def CheckoutBranch(self, name):
"""Checkout a local topic branch.
"""
rev = R_HEADS + name
head = self.work_git.GetHead()
if head == rev:
# Already on the branch
#
return True
all = self.bare_ref.all
try:
revid = all[rev]
except KeyError:
# Branch does not exist in this project
#
return False
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if head == revid:
# Same revision; just update HEAD to point to the new
# target branch, but otherwise take no other action.
#
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'ref: %s%s\n' % (R_HEADS, name))
return True
return GitCommand(self,
['checkout', name, '--'],
capture_stdout = True,
capture_stderr = True).Wait() == 0
def AbandonBranch(self, name):
"""Destroy a local topic branch.
"""
rev = R_HEADS + name
all = self.bare_ref.all
if rev not in all:
# Doesn't exist; assume already abandoned.
#
return True
head = self.work_git.GetHead()
if head == rev:
# We can't destroy the branch while we are sitting
# on it. Switch to a detached HEAD.
#
head = all[head]
revid = self.GetRevisionId(all)
if head == revid:
_lwrite(os.path.join(self.worktree, '.git', HEAD),
'%s\n' % revid)
else:
self._Checkout(revid, quiet=True)
return GitCommand(self,
['branch', '-D', name],
capture_stdout = True,
capture_stderr = True).Wait() == 0
def PruneHeads(self):
"""Prune any topic branches already merged into upstream.
"""
cb = self.CurrentBranch
kill = []
left = self._allrefs
for name in left.keys():
if name.startswith(R_HEADS):
name = name[len(R_HEADS):]
if cb is None or name != cb:
kill.append(name)
rev = self.GetRevisionId(left)
if cb is not None \
and not self._revlist(HEAD + '...' + rev) \
and not self.IsDirty(consider_untracked = False):
self.work_git.DetachHead(HEAD)
kill.append(cb)
if kill:
old = self.bare_git.GetHead()
if old is None:
old = 'refs/heads/please_never_use_this_as_a_branch_name'
try:
self.bare_git.DetachHead(rev)
b = ['branch', '-d']
b.extend(kill)
b = GitCommand(self, b, bare=True,
capture_stdout=True,
capture_stderr=True)
b.Wait()
finally:
self.bare_git.SetHead(old)
left = self._allrefs
for branch in kill:
if (R_HEADS + branch) not in left:
self.CleanPublishedCache()
break
if cb and cb not in kill:
kill.append(cb)
kill.sort()
kept = []
for branch in kill:
if (R_HEADS + branch) in left:
branch = self.GetBranch(branch)
base = branch.LocalMerge
if not base:
base = rev
kept.append(ReviewableBranch(self, branch, base))
return kept
## Direct Git Commands ##
def _RemoteFetch(self, name=None, tag=None,
initial=False,
quiet=False):
if not name:
name = self.remote.name
ssh_proxy = False
if self.GetRemote(name).PreConnectFetch():
ssh_proxy = True
if initial:
alt = os.path.join(self.gitdir, 'objects/info/alternates')
try:
fd = open(alt, 'rb')
try:
ref_dir = fd.readline()
if ref_dir and ref_dir.endswith('\n'):
ref_dir = ref_dir[:-1]
finally:
fd.close()
except IOError, e:
ref_dir = None
if ref_dir and 'objects' == os.path.basename(ref_dir):
ref_dir = os.path.dirname(ref_dir)
packed_refs = os.path.join(self.gitdir, 'packed-refs')
remote = self.GetRemote(name)
all = self.bare_ref.all
ids = set(all.values())
tmp = set()
for r, id in GitRefs(ref_dir).all.iteritems():
if r not in all:
if r.startswith(R_TAGS) or remote.WritesTo(r):
all[r] = id
ids.add(id)
continue
if id in ids:
continue
r = 'refs/_alt/%s' % id
all[r] = id
ids.add(id)
tmp.add(r)
ref_names = list(all.keys())
ref_names.sort()
tmp_packed = ''
old_packed = ''
for r in ref_names:
line = '%s %s\n' % (all[r], r)
tmp_packed += line
if r not in tmp:
old_packed += line
_lwrite(packed_refs, tmp_packed)
else:
ref_dir = None
cmd = ['fetch']
if quiet:
cmd.append('--quiet')
if not self.worktree:
cmd.append('--update-head-ok')
cmd.append(name)
if tag is not None:
cmd.append('tag')
cmd.append(tag)
ok = GitCommand(self,
cmd,
bare = True,
ssh_proxy = ssh_proxy).Wait() == 0
if initial:
if ref_dir:
if old_packed != '':
_lwrite(packed_refs, old_packed)
else:
os.remove(packed_refs)
self.bare_git.pack_refs('--all', '--prune')
return ok
def _Checkout(self, rev, quiet=False):
cmd = ['checkout']
if quiet:
cmd.append('-q')
cmd.append(rev)
cmd.append('--')
if GitCommand(self, cmd).Wait() != 0:
if self._allrefs:
raise GitError('%s checkout %s ' % (self.name, rev))
def _ResetHard(self, rev, quiet=True):
cmd = ['reset', '--hard']
if quiet:
cmd.append('-q')
cmd.append(rev)
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s reset --hard %s ' % (self.name, rev))
def _Rebase(self, upstream, onto = None):
cmd = ['rebase']
if onto is not None:
cmd.extend(['--onto', onto])
cmd.append(upstream)
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s rebase %s ' % (self.name, upstream))
def _FastForward(self, head):
cmd = ['merge', head]
if GitCommand(self, cmd).Wait() != 0:
raise GitError('%s merge %s ' % (self.name, head))
def _InitGitDir(self):
if not os.path.exists(self.gitdir):
os.makedirs(self.gitdir)
self.bare_git.init()
mp = self.manifest.manifestProject
ref_dir = mp.config.GetString('repo.reference')
if ref_dir:
mirror_git = os.path.join(ref_dir, self.name + '.git')
repo_git = os.path.join(ref_dir, '.repo', 'projects',
self.relpath + '.git')
if os.path.exists(mirror_git):
ref_dir = mirror_git
elif os.path.exists(repo_git):
ref_dir = repo_git
else:
ref_dir = None
if ref_dir:
_lwrite(os.path.join(self.gitdir, 'objects/info/alternates'),
os.path.join(ref_dir, 'objects') + '\n')
if self.manifest.IsMirror:
self.config.SetString('core.bare', 'true')
else:
self.config.SetString('core.bare', None)
hooks = self._gitdir_path('hooks')
try:
to_rm = os.listdir(hooks)
except OSError:
to_rm = []
for old_hook in to_rm:
os.remove(os.path.join(hooks, old_hook))
self._InitHooks()
m = self.manifest.manifestProject.config
for key in ['user.name', 'user.email']:
if m.Has(key, include_defaults = False):
self.config.SetString(key, m.GetString(key))
def _InitHooks(self):
hooks = self._gitdir_path('hooks')
if not os.path.exists(hooks):
os.makedirs(hooks)
for stock_hook in repo_hooks():
name = os.path.basename(stock_hook)
if name in ('commit-msg') and not self.remote.review:
# Don't install a Gerrit Code Review hook if this
# project does not appear to use it for reviews.
#
continue
dst = os.path.join(hooks, name)
if os.path.islink(dst):
continue
if os.path.exists(dst):
if filecmp.cmp(stock_hook, dst, shallow=False):
os.remove(dst)
else:
_error("%s: Not replacing %s hook", self.relpath, name)
continue
try:
os.symlink(relpath(stock_hook, dst), dst)
except OSError, e:
if e.errno == errno.EPERM:
raise GitError('filesystem must support symlinks')
else:
raise
def _InitRemote(self):
if self.remote.url:
remote = self.GetRemote(self.remote.name)
remote.url = self.remote.url
remote.review = self.remote.review
remote.projectname = self.name
if self.worktree:
remote.ResetFetch(mirror=False)
else:
remote.ResetFetch(mirror=True)
remote.Save()
def _InitMirrorHead(self):
self._InitAnyMRef(HEAD)
def _InitAnyMRef(self, ref):
cur = self.bare_ref.symref(ref)
if self.revisionId:
if cur != '' or self.bare_ref.get(ref) != self.revisionId:
msg = 'manifest set to %s' % self.revisionId
dst = self.revisionId + '^0'
self.bare_git.UpdateRef(ref, dst, message = msg, detach = True)
else:
remote = self.GetRemote(self.remote.name)
dst = remote.ToLocal(self.revisionExpr)
if cur != dst:
msg = 'manifest set to %s' % self.revisionExpr
self.bare_git.symbolic_ref('-m', msg, ref, dst)
def _LinkWorkTree(self, relink=False):
dotgit = os.path.join(self.worktree, '.git')
if not relink:
os.makedirs(dotgit)
for name in ['config',
'description',
'hooks',
'info',
'logs',
'objects',
'packed-refs',
'refs',
'rr-cache',
'svn']:
try:
src = os.path.join(self.gitdir, name)
dst = os.path.join(dotgit, name)
if relink:
os.remove(dst)
if os.path.islink(dst) or not os.path.exists(dst):
os.symlink(relpath(src, dst), dst)
else:
raise GitError('cannot overwrite a local work tree')
except OSError, e:
if e.errno == errno.EPERM:
raise GitError('filesystem must support symlinks')
else:
raise
def _InitWorkTree(self):
dotgit = os.path.join(self.worktree, '.git')
if not os.path.exists(dotgit):
self._LinkWorkTree()
_lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
cmd = ['read-tree', '--reset', '-u']
cmd.append('-v')
cmd.append(HEAD)
if GitCommand(self, cmd).Wait() != 0:
raise GitError("cannot initialize work tree")
self._CopyFiles()
def _gitdir_path(self, path):
return os.path.join(self.gitdir, path)
def _revlist(self, *args, **kw):
a = []
a.extend(args)
a.append('--')
return self.work_git.rev_list(*a, **kw)
@property
def _allrefs(self):
return self.bare_ref.all
class _GitGetByExec(object):
def __init__(self, project, bare):
self._project = project
self._bare = bare
def LsOthers(self):
p = GitCommand(self._project,
['ls-files',
'-z',
'--others',
'--exclude-standard'],
bare = False,
capture_stdout = True,
capture_stderr = True)
if p.Wait() == 0:
out = p.stdout
if out:
return out[:-1].split("\0")
return []
def DiffZ(self, name, *args):
cmd = [name]
cmd.append('-z')
cmd.extend(args)
p = GitCommand(self._project,
cmd,
bare = False,
capture_stdout = True,
capture_stderr = True)
try:
out = p.process.stdout.read()
r = {}
if out:
out = iter(out[:-1].split('\0'))
while out:
try:
info = out.next()
path = out.next()
except StopIteration:
break
class _Info(object):
def __init__(self, path, omode, nmode, oid, nid, state):
self.path = path
self.src_path = None
self.old_mode = omode
self.new_mode = nmode
self.old_id = oid
self.new_id = nid
if len(state) == 1:
self.status = state
self.level = None
else:
self.status = state[:1]
self.level = state[1:]
while self.level.startswith('0'):
self.level = self.level[1:]
info = info[1:].split(' ')
info =_Info(path, *info)
if info.status in ('R', 'C'):
info.src_path = info.path
info.path = out.next()
r[info.path] = info
return r
finally:
p.Wait()
def GetHead(self):
if self._bare:
path = os.path.join(self._project.gitdir, HEAD)
else:
path = os.path.join(self._project.worktree, '.git', HEAD)
fd = open(path, 'rb')
try:
line = fd.read()
finally:
fd.close()
if line.startswith('ref: '):
return line[5:-1]
return line[:-1]
def SetHead(self, ref, message=None):
cmdv = []
if message is not None:
cmdv.extend(['-m', message])
cmdv.append(HEAD)
cmdv.append(ref)
self.symbolic_ref(*cmdv)
def DetachHead(self, new, message=None):
cmdv = ['--no-deref']
if message is not None:
cmdv.extend(['-m', message])
cmdv.append(HEAD)
cmdv.append(new)
self.update_ref(*cmdv)
def UpdateRef(self, name, new, old=None,
message=None,
detach=False):
cmdv = []
if message is not None:
cmdv.extend(['-m', message])
if detach:
cmdv.append('--no-deref')
cmdv.append(name)
cmdv.append(new)
if old is not None:
cmdv.append(old)
self.update_ref(*cmdv)
def DeleteRef(self, name, old=None):
if not old:
old = self.rev_parse(name)
self.update_ref('-d', name, old)
self._project.bare_ref.deleted(name)
def rev_list(self, *args, **kw):
if 'format' in kw:
cmdv = ['log', '--pretty=format:%s' % kw['format']]
else:
cmdv = ['rev-list']
cmdv.extend(args)
p = GitCommand(self._project,
cmdv,
bare = self._bare,
capture_stdout = True,
capture_stderr = True)
r = []
for line in p.process.stdout:
if line[-1] == '\n':
line = line[:-1]
r.append(line)
if p.Wait() != 0:
raise GitError('%s rev-list %s: %s' % (
self._project.name,
str(args),
p.stderr))
return r
def __getattr__(self, name):
name = name.replace('_', '-')
def runner(*args):
cmdv = [name]
cmdv.extend(args)
p = GitCommand(self._project,
cmdv,
bare = self._bare,
capture_stdout = True,
capture_stderr = True)
if p.Wait() != 0:
raise GitError('%s %s: %s' % (
self._project.name,
name,
p.stderr))
r = p.stdout
if r.endswith('\n') and r.index('\n') == len(r) - 1:
return r[:-1]
return r
return runner
class _PriorSyncFailedError(Exception):
def __str__(self):
return 'prior sync failed; rebase still in progress'
class _DirtyError(Exception):
def __str__(self):
return 'contains uncommitted changes'
class _InfoMessage(object):
def __init__(self, project, text):
self.project = project
self.text = text
def Print(self, syncbuf):
syncbuf.out.info('%s/: %s', self.project.relpath, self.text)
syncbuf.out.nl()
class _Failure(object):
def __init__(self, project, why):
self.project = project
self.why = why
def Print(self, syncbuf):
syncbuf.out.fail('error: %s/: %s',
self.project.relpath,
str(self.why))
syncbuf.out.nl()
class _Later(object):
def __init__(self, project, action):
self.project = project
self.action = action
def Run(self, syncbuf):
out = syncbuf.out
out.project('project %s/', self.project.relpath)
out.nl()
try:
self.action()
out.nl()
return True
except GitError, e:
out.nl()
return False
class _SyncColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'reposync')
self.project = self.printer('header', attr = 'bold')
self.info = self.printer('info')
self.fail = self.printer('fail', fg='red')
class SyncBuffer(object):
def __init__(self, config, detach_head=False):
self._messages = []
self._failures = []
self._later_queue1 = []
self._later_queue2 = []
self.out = _SyncColoring(config)
self.out.redirect(sys.stderr)
self.detach_head = detach_head
self.clean = True
def info(self, project, fmt, *args):
self._messages.append(_InfoMessage(project, fmt % args))
def fail(self, project, err=None):
self._failures.append(_Failure(project, err))
self.clean = False
def later1(self, project, what):
self._later_queue1.append(_Later(project, what))
def later2(self, project, what):
self._later_queue2.append(_Later(project, what))
def Finish(self):
self._PrintMessages()
self._RunLater()
self._PrintMessages()
return self.clean
def _RunLater(self):
for q in ['_later_queue1', '_later_queue2']:
if not self._RunQueue(q):
return
def _RunQueue(self, queue):
for m in getattr(self, queue):
if not m.Run(self):
self.clean = False
return False
setattr(self, queue, [])
return True
def _PrintMessages(self):
for m in self._messages:
m.Print(self)
for m in self._failures:
m.Print(self)
self._messages = []
self._failures = []
class MetaProject(Project):
"""A special project housed under .repo.
"""
def __init__(self, manifest, name, gitdir, worktree, relpath=None):
repodir = manifest.repodir
if relpath is None:
relpath = '.repo/%s' % name
Project.__init__(self,
manifest = manifest,
name = name,
gitdir = gitdir,
worktree = worktree,
remote = RemoteSpec('origin'),
relpath = relpath,
revisionExpr = 'refs/heads/master',
revisionId = None)
def PreSync(self):
if self.Exists:
cb = self.CurrentBranch
if cb:
cb = self.GetBranch(cb)
if cb.merge:
self.revisionExpr = cb.merge
self.revisionId = None
if cb.remote and cb.remote.name:
self.remote.name = cb.remote.name
@property
def LastFetch(self):
try:
fh = os.path.join(self.gitdir, 'FETCH_HEAD')
return os.path.getmtime(fh)
except OSError:
return 0
@property
def HasChanges(self):
"""Has the remote received new commits not yet checked out?
"""
if not self.remote or not self.revisionExpr:
return False
all = self.bare_ref.all
revid = self.GetRevisionId(all)
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
try:
head = all[head]
except KeyError:
head = None
if revid == head:
return False
elif self._revlist(not_rev(HEAD), revid):
return True
return False
|
py | b4044a0abf6e7602f4b5870d7b4b58c05f6858dd | """
Django settings for samples project.
Generated by 'django-admin startproject' using Django 2.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Used for a default title
APP_NAME = 'Home' # Add
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'g$iqqu&*mw4_sg3(#ld0sqaalxebel&168^yj%i&sgrw(fmn@w'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
# Extensions - installed with pip3 / requirements.txt
'django_extensions',
'crispy_forms',
'rest_framework',
'social_django',
"bootstrap_datepicker_plus",
'datetimepicker',
'home.apps.HomeConfig',
# Sample Applications - don't copy
'invests.apps.InvestsConfig',
'blogs.apps.BlogsConfig',
# 'unesco.apps.UnescoConfig',
]
# When we get to crispy forms :)
CRISPY_TEMPLATE_PACK = 'bootstrap3' # Add
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'social_django.middleware.SocialAuthExceptionMiddleware', # Add
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'home.context_processors.settings', # Add
'social_django.context_processors.backends', # Add
'social_django.context_processors.login_redirect', # Add
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
# Add the settings below
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
)
}
# Configure the social login
# try:
# from . import github_settings
# SOCIAL_AUTH_GITHUB_KEY = github_settings.SOCIAL_AUTH_GITHUB_KEY
# SOCIAL_AUTH_GITHUB_SECRET = github_settings.SOCIAL_AUTH_GITHUB_SECRET
# except:
# print('When you want to use social login, please see dj4e-samples/github_settings-dist.py')
# https://python-social-auth.readthedocs.io/en/latest/configuration/django.html#authentication-backends
# https://simpleisbetterthancomplex.com/tutorial/2016/10/24/how-to-add-social-login-to-django.html
AUTHENTICATION_BACKENDS = (
'social_core.backends.github.GithubOAuth2',
# 'social_core.backends.twitter.TwitterOAuth',
# 'social_core.backends.facebook.FacebookOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
LOGOUT_REDIRECT_URL = '/'
LOGIN_REDIRECT_URL = '/'
# Don't set default LOGIN_URL - let django.contrib.auth set it when it is loaded
# LOGIN_URL = '/accounts/login'
# https://coderwall.com/p/uzhyca/quickly-setup-sql-query-logging-django
# https://stackoverflow.com/questions/12027545/determine-if-django-is-running-under-the-development-server
''' # Leave off for now
import sys
if (len(sys.argv) >= 2 and sys.argv[1] == 'runserver'):
print('Running locally')
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
}
},
'loggers': {
'django.db.backends': {
'handlers': ['console'],
'level': 'DEBUG',
},
}
}
'''
|
py | b4044a3875f5fb84ea56d1a9cbf3dce469556d53 | from viper.parser.parser import LLLnode
from .opcodes import opcodes
from viper.utils import MemoryPositions
def num_to_bytearray(x):
o = []
while x > 0:
o.insert(0, x % 256)
x //= 256
return o
PUSH_OFFSET = 0x5f
DUP_OFFSET = 0x7f
SWAP_OFFSET = 0x8f
next_symbol = [0]
def mksymbol():
next_symbol[0] += 1
return '_sym_' + str(next_symbol[0])
def is_symbol(i):
return isinstance(i, str) and i[:5] == '_sym_'
# Compiles LLL to assembly
def compile_to_assembly(code, withargs=None, break_dest=None, height=0):
if withargs is None:
withargs = {}
# Opcodes
if isinstance(code.value, str) and code.value.upper() in opcodes:
o = []
for i, c in enumerate(code.args[::-1]):
o.extend(compile_to_assembly(c, withargs, break_dest, height + i))
o.append(code.value.upper())
return o
# Numbers
elif isinstance(code.value, int):
if code.value <= -2**255:
raise Exception("Value too low: %d" % code.value)
elif code.value >= 2**256:
raise Exception("Value too high: %d" % code.value)
bytez = num_to_bytearray(code.value % 2**256) or [0]
return ['PUSH' + str(len(bytez))] + bytez
# Variables connected to with statements
elif isinstance(code.value, str) and code.value in withargs:
if height - withargs[code.value] > 16:
raise Exception("With statement too deep")
return ['DUP' + str(height - withargs[code.value])]
# Setting variables connected to with statements
elif code.value == "set":
if height - withargs[code.args[0].value] > 16:
raise Exception("With statement too deep")
if len(code.args) != 2 or code.args[0].value not in withargs:
raise Exception("Set expects two arguments, the first being a stack variable")
return compile_to_assembly(code.args[1], withargs, break_dest, height) + \
['SWAP' + str(height - withargs[code.args[0].value]), 'POP']
# Pass statements
elif code.value == 'pass':
return []
# Code length
elif code.value == '~codelen':
return ['_sym_codeend']
# Calldataload equivalent for code
elif code.value == 'codeload':
return compile_to_assembly(LLLnode.from_list(['seq', ['codecopy', MemoryPositions.FREE_VAR_SPACE, code.args[0], 32], ['mload', MemoryPositions.FREE_VAR_SPACE]]),
withargs, break_dest, height)
# If statements (2 arguments, ie. if x: y)
elif code.value == 'if' and len(code.args) == 2:
o = []
o.extend(compile_to_assembly(code.args[0], withargs, break_dest, height))
end_symbol = mksymbol()
o.extend(['ISZERO', end_symbol, 'JUMPI'])
o.extend(compile_to_assembly(code.args[1], withargs, break_dest, height))
o.extend([end_symbol, 'JUMPDEST'])
return o
# If statements (3 arguments, ie. if x: y, else: z)
elif code.value == 'if' and len(code.args) == 3:
o = []
o.extend(compile_to_assembly(code.args[0], withargs, break_dest, height))
mid_symbol = mksymbol()
end_symbol = mksymbol()
o.extend(['ISZERO', mid_symbol, 'JUMPI'])
o.extend(compile_to_assembly(code.args[1], withargs, break_dest, height))
o.extend([end_symbol, 'JUMP', mid_symbol, 'JUMPDEST'])
o.extend(compile_to_assembly(code.args[2], withargs, break_dest, height))
o.extend([end_symbol, 'JUMPDEST'])
return o
# Repeat statements (compiled from for loops)
# Repeat(memloc, start, rounds, body)
elif code.value == 'repeat':
o = []
loops = num_to_bytearray(code.args[2].value)
if not loops:
raise Exception("Number of times repeated must be a constant nonzero positive integer: %r" % loops)
start, end = mksymbol(), mksymbol()
o.extend(compile_to_assembly(code.args[0], withargs, break_dest, height))
o.extend(compile_to_assembly(code.args[1], withargs, break_dest, height + 1))
o.extend(['PUSH' + str(len(loops))] + loops)
# stack: memloc, startvalue, rounds
o.extend(['DUP2', 'DUP4', 'MSTORE', 'ADD', start, 'JUMPDEST'])
# stack: memloc, exit_index
o.extend(compile_to_assembly(code.args[3], withargs, (end, height + 2), height + 2))
# stack: memloc, exit_index
o.extend(['DUP2', 'MLOAD', 'PUSH1', 1, 'ADD', 'DUP1', 'DUP4', 'MSTORE'])
# stack: len(loops), index memory address, new index
o.extend(['DUP2', 'EQ', 'ISZERO', start, 'JUMPI', end, 'JUMPDEST', 'POP', 'POP'])
return o
# Break from inside a for loop
elif code.value == 'break':
if not break_dest:
raise Exception("Invalid break")
dest, break_height = break_dest
return ['POP'] * (height - break_height) + [dest, 'JUMP']
# With statements
elif code.value == 'with':
o = []
o.extend(compile_to_assembly(code.args[1], withargs, break_dest, height))
old = withargs.get(code.args[0].value, None)
withargs[code.args[0].value] = height
o.extend(compile_to_assembly(code.args[2], withargs, break_dest, height + 1))
if code.args[2].valency:
o.extend(['SWAP1', 'POP'])
else:
o.extend(['POP'])
if old is not None:
withargs[code.args[0].value] = old
else:
del withargs[code.args[0].value]
return o
# LLL statement (used to contain code inside code)
elif code.value == 'lll':
o = []
begincode = mksymbol()
endcode = mksymbol()
o.extend([endcode, 'JUMP', begincode, 'BLANK'])
o.append(compile_to_assembly(code.args[0], {}, None, 0)) # Append is intentional
o.extend([endcode, 'JUMPDEST', begincode, endcode, 'SUB', begincode])
o.extend(compile_to_assembly(code.args[1], withargs, break_dest, height))
o.extend(['CODECOPY', begincode, endcode, 'SUB'])
return o
# Seq (used to piece together multiple statements)
elif code.value == 'seq':
o = []
for arg in code.args:
o.extend(compile_to_assembly(arg, withargs, break_dest, height))
if arg.valency == 1 and arg != code.args[-1]:
o.append('POP')
return o
# Assert (if false, exit)
elif code.value == 'assert':
o = compile_to_assembly(code.args[0], withargs, break_dest, height)
end_symbol = mksymbol()
o.extend([end_symbol, 'JUMPI'])
o.extend(['PUSH1', 0, 'DUP1', 'REVERT'])
o.extend([end_symbol, 'JUMPDEST'])
return o
# Unsigned/signed clamp, check less-than
elif code.value in ('uclamplt', 'uclample', 'clamplt', 'clample', 'uclampgt', 'uclampge', 'clampgt', 'clampge'):
if isinstance(code.args[0].value, int) and isinstance(code.args[1].value, int):
# Checks for clamp errors at compile time as opposed to run time
if code.value in ('uclamplt', 'clamplt') and 0 <= code.args[0].value < code.args[1].value or \
code.value in ('uclample', 'clample') and 0 <= code.args[0].value <= code.args[1].value or \
code.value in ('uclampgt', 'clampgt') and 0 <= code.args[0].value > code.args[1].value or \
code.value in ('uclampge', 'clampge') and 0 <= code.args[0].value >= code.args[1].value:
return compile_to_assembly(code.args[0], withargs, break_dest, height)
else:
raise Exception("Invalid %r with values %r and %r" % (code.value, code.args[0], code.args[1]))
o = compile_to_assembly(code.args[0], withargs, break_dest, height)
o.extend(compile_to_assembly(code.args[1], withargs, break_dest, height + 1))
o.extend(['DUP2'])
# Stack: num num bound
if code.value == 'uclamplt':
o.extend(["LT", 'ISZERO'])
elif code.value == "clamplt":
o.extend(["SLT", 'ISZERO'])
elif code.value == "uclample":
o.extend(["GT"])
elif code.value == "clample":
o.extend(["SGT"])
elif code.value == 'uclampgt':
o.extend(["GT", 'ISZERO'])
elif code.value == "clampgt":
o.extend(["SGT", 'ISZERO'])
elif code.value == "uclampge":
o.extend(["LT"])
elif code.value == "clampge":
o.extend(["SLT"])
o.extend(['PC', 'JUMPI'])
return o
# Signed clamp, check against upper and lower bounds
elif code.value in ('clamp', 'uclamp'):
comp1 = 'SGT' if code.value == 'clamp' else 'GT'
comp2 = 'SLT' if code.value == 'clamp' else 'LT'
o = compile_to_assembly(code.args[0], withargs, break_dest, height)
o.extend(compile_to_assembly(code.args[1], withargs, break_dest, height + 1))
o.extend(['DUP1'])
o.extend(compile_to_assembly(code.args[2], withargs, break_dest, height + 3))
o.extend(['SWAP1', comp1, 'PC', 'JUMPI'])
o.extend(['DUP1', 'SWAP2', 'SWAP1', comp2, 'PC', 'JUMPI'])
return o
# Checks that a value is nonzero
elif code.value == 'clamp_nonzero':
o = compile_to_assembly(code.args[0], withargs, break_dest, height)
o.extend(['DUP1', 'ISZERO', 'PC', 'JUMPI'])
return o
# SHA3 a single value
elif code.value == 'sha3_32':
o = compile_to_assembly(code.args[0], withargs, break_dest, height)
o.extend(['PUSH1', MemoryPositions.FREE_VAR_SPACE, 'MSTORE', 'PUSH1', 32, 'PUSH1', MemoryPositions.FREE_VAR_SPACE, 'SHA3'])
return o
# <= operator
elif code.value == 'le':
return compile_to_assembly(LLLnode.from_list(['iszero', ['gt', code.args[0], code.args[1]]]), withargs, break_dest, height)
# >= operator
elif code.value == 'ge':
return compile_to_assembly(LLLnode.from_list(['iszero', ['lt', code.args[0], code.args[1]]]), withargs, break_dest, height)
# <= operator
elif code.value == 'sle':
return compile_to_assembly(LLLnode.from_list(['iszero', ['sgt', code.args[0], code.args[1]]]), withargs, break_dest, height)
# >= operator
elif code.value == 'sge':
return compile_to_assembly(LLLnode.from_list(['iszero', ['slt', code.args[0], code.args[1]]]), withargs, break_dest, height)
# != operator
elif code.value == 'ne':
return compile_to_assembly(LLLnode.from_list(['iszero', ['eq', code.args[0], code.args[1]]]), withargs, break_dest, height)
# eg. 95 -> 96, 96 -> 96, 97 -> 128
elif code.value == "ceil32":
return compile_to_assembly(LLLnode.from_list(['with', '_val', code.args[0],
['sub', ['add', '_val', 31],
['mod', ['sub', '_val', 1], 32]]]), withargs, break_dest, height)
else:
raise Exception("Weird code element: " + repr(code))
# Assembles assembly into EVM
def assembly_to_evm(assembly):
posmap = {}
sub_assemblies = []
codes = []
pos = 0
for i, item in enumerate(assembly):
if is_symbol(item):
if assembly[i + 1] == 'JUMPDEST' or assembly[i + 1] == 'BLANK':
posmap[item] = pos # Don't increment position as the symbol itself doesn't go into code
else:
pos += 3 # PUSH2 highbits lowbits
elif item == 'BLANK':
pos += 0
elif isinstance(item, list):
c = assembly_to_evm(item)
sub_assemblies.append(item)
codes.append(c)
pos += len(c)
else:
pos += 1
posmap['_sym_codeend'] = pos
o = b''
for i, item in enumerate(assembly):
if is_symbol(item):
if assembly[i + 1] != 'JUMPDEST' and assembly[i + 1] != 'BLANK':
o += bytes([PUSH_OFFSET + 2, posmap[item] // 256, posmap[item] % 256])
elif isinstance(item, int):
o += bytes([item])
elif isinstance(item, str) and item.upper() in opcodes:
o += bytes([opcodes[item.upper()][0]])
elif item[:4] == 'PUSH':
o += bytes([PUSH_OFFSET + int(item[4:])])
elif item[:3] == 'DUP':
o += bytes([DUP_OFFSET + int(item[3:])])
elif item[:4] == 'SWAP':
o += bytes([SWAP_OFFSET + int(item[4:])])
elif item == 'BLANK':
pass
elif isinstance(item, list):
for i in range(len(sub_assemblies)):
if sub_assemblies[i] == item:
o += codes[i]
break
else:
raise Exception("Weird symbol in assembly: " + str(item))
assert len(o) == pos
return o
|
gyp | b4044ad265eca070aca6275afb63908fe645cf45 | # GRPC GYP build file
# This file has been automatically generated from a template file.
# Please look at the templates directory instead.
# This file can be regenerated from the template by running
# tools/buildgen/generate_projects.sh
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
{
'variables': {
# The openssl and zlib dependencies must be passed in as variables
# defined in an included gypi file, usually common.gypi.
'openssl_gyp_target%': 'Please Define openssl_gyp_target variable',
'zlib_gyp_target%': 'Please Define zlib_gyp_target variable',
'grpc_gcov%': 'false',
'grpc_alpine%': 'false',
},
'target_defaults': {
'configurations': {
'Debug': {
'cflags': [
'-O0',
],
'defines': [
'_DEBUG',
'DEBUG',
],
},
'Release': {
'cflags': [
'-O2',
'-Wframe-larger-than=16384',
],
'defines': [
'NDEBUG',
],
},
},
'cflags': [
'-g',
'-Wall',
'-Wextra',
'-DOSATOMIC_USE_INLINED=1',
'-Ithird_party/abseil-cpp',
'-Ithird_party/re2',
'-Ithird_party/upb',
'-Isrc/core/ext/upb-generated',
'-Isrc/core/ext/upbdefs-generated',
'-Ithird_party/xxhash',
],
'ldflags': [
'-g',
],
'cflags_c': [
'-Werror',
'-std=c99',
],
'cflags_cc': [
'-Werror',
'-std=c++11',
],
'include_dirs': [
'.',
'../..',
'include',
],
'defines': [
'GRPC_ARES=0',
],
'dependencies': [
'<(openssl_gyp_target)',
'<(zlib_gyp_target)',
],
'conditions': [
['grpc_gcov=="true"', {
'cflags': [
'-O0',
'-fprofile-arcs',
'-ftest-coverage',
'-Wno-return-type',
],
'defines': [
'_DEBUG',
'DEBUG',
'GPR_GCOV',
],
'ldflags': [
'-fprofile-arcs',
'-ftest-coverage',
'-rdynamic',
'-lstdc++',
],
}],
['grpc_alpine=="true"', {
'defines': [
'GPR_MUSL_LIBC_COMPAT'
]
}],
['OS == "win"', {
'defines': [
'_WIN32_WINNT=0x0600',
'WIN32_LEAN_AND_MEAN',
'_HAS_EXCEPTIONS=0',
'UNICODE',
'_UNICODE',
'NOMINMAX',
],
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeLibrary': 1, # static debug
}
},
"libraries": [
"ws2_32"
]
}],
['OS == "mac"', {
'xcode_settings': {
'OTHER_CFLAGS': [
'-g',
'-Wall',
'-Wextra',
'-DOSATOMIC_USE_INLINED=1',
'-Ithird_party/abseil-cpp',
'-Ithird_party/re2',
'-Ithird_party/upb',
'-Isrc/core/ext/upb-generated',
'-Isrc/core/ext/upbdefs-generated',
'-Ithird_party/xxhash',
],
'OTHER_CPLUSPLUSFLAGS': [
'-g',
'-Wall',
'-Wextra',
'-DOSATOMIC_USE_INLINED=1',
'-Ithird_party/abseil-cpp',
'-Ithird_party/re2',
'-Ithird_party/upb',
'-Isrc/core/ext/upb-generated',
'-Isrc/core/ext/upbdefs-generated',
'-Ithird_party/xxhash',
'-stdlib=libc++',
'-std=c++11',
'-Wno-error=deprecated-declarations',
],
},
}]
]
},
'targets': [
{
'target_name': 'address_sorting',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/address_sorting/address_sorting.c',
'third_party/address_sorting/address_sorting_posix.c',
'third_party/address_sorting/address_sorting_windows.c',
],
},
{
'target_name': 'end2end_nosec_tests',
'type': 'static_library',
'dependencies': [
'grpc_test_util',
],
'sources': [
'test/core/end2end/cq_verifier.cc',
'test/core/end2end/data/client_certs.cc',
'test/core/end2end/data/server1_cert.cc',
'test/core/end2end/data/server1_key.cc',
'test/core/end2end/data/test_root_cert.cc',
'test/core/end2end/end2end_nosec_tests.cc',
'test/core/end2end/end2end_test_utils.cc',
'test/core/end2end/fixtures/http_proxy_fixture.cc',
'test/core/end2end/fixtures/local_util.cc',
'test/core/end2end/fixtures/proxy.cc',
'test/core/end2end/tests/authority_not_supported.cc',
'test/core/end2end/tests/bad_hostname.cc',
'test/core/end2end/tests/bad_ping.cc',
'test/core/end2end/tests/binary_metadata.cc',
'test/core/end2end/tests/call_host_override.cc',
'test/core/end2end/tests/cancel_after_accept.cc',
'test/core/end2end/tests/cancel_after_client_done.cc',
'test/core/end2end/tests/cancel_after_invoke.cc',
'test/core/end2end/tests/cancel_after_round_trip.cc',
'test/core/end2end/tests/cancel_before_invoke.cc',
'test/core/end2end/tests/cancel_in_a_vacuum.cc',
'test/core/end2end/tests/cancel_with_status.cc',
'test/core/end2end/tests/channelz.cc',
'test/core/end2end/tests/client_streaming.cc',
'test/core/end2end/tests/compressed_payload.cc',
'test/core/end2end/tests/connectivity.cc',
'test/core/end2end/tests/default_host.cc',
'test/core/end2end/tests/disappearing_server.cc',
'test/core/end2end/tests/empty_batch.cc',
'test/core/end2end/tests/filter_causes_close.cc',
'test/core/end2end/tests/filter_context.cc',
'test/core/end2end/tests/filter_init_fails.cc',
'test/core/end2end/tests/filter_latency.cc',
'test/core/end2end/tests/filter_status_code.cc',
'test/core/end2end/tests/graceful_server_shutdown.cc',
'test/core/end2end/tests/high_initial_seqno.cc',
'test/core/end2end/tests/hpack_size.cc',
'test/core/end2end/tests/idempotent_request.cc',
'test/core/end2end/tests/invoke_large_request.cc',
'test/core/end2end/tests/keepalive_timeout.cc',
'test/core/end2end/tests/large_metadata.cc',
'test/core/end2end/tests/max_concurrent_streams.cc',
'test/core/end2end/tests/max_connection_age.cc',
'test/core/end2end/tests/max_connection_idle.cc',
'test/core/end2end/tests/max_message_length.cc',
'test/core/end2end/tests/negative_deadline.cc',
'test/core/end2end/tests/no_error_on_hotpath.cc',
'test/core/end2end/tests/no_logging.cc',
'test/core/end2end/tests/no_op.cc',
'test/core/end2end/tests/payload.cc',
'test/core/end2end/tests/ping.cc',
'test/core/end2end/tests/ping_pong_streaming.cc',
'test/core/end2end/tests/proxy_auth.cc',
'test/core/end2end/tests/registered_call.cc',
'test/core/end2end/tests/request_with_flags.cc',
'test/core/end2end/tests/request_with_payload.cc',
'test/core/end2end/tests/resource_quota_server.cc',
'test/core/end2end/tests/retry.cc',
'test/core/end2end/tests/retry_cancel_during_delay.cc',
'test/core/end2end/tests/retry_cancel_with_multiple_send_batches.cc',
'test/core/end2end/tests/retry_cancellation.cc',
'test/core/end2end/tests/retry_disabled.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_delay.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_initial_batch.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_subsequent_batch.cc',
'test/core/end2end/tests/retry_lb_drop.cc',
'test/core/end2end/tests/retry_lb_fail.cc',
'test/core/end2end/tests/retry_non_retriable_status.cc',
'test/core/end2end/tests/retry_non_retriable_status_before_recv_trailing_metadata_started.cc',
'test/core/end2end/tests/retry_per_attempt_recv_timeout.cc',
'test/core/end2end/tests/retry_per_attempt_recv_timeout_on_last_attempt.cc',
'test/core/end2end/tests/retry_recv_initial_metadata.cc',
'test/core/end2end/tests/retry_recv_message.cc',
'test/core/end2end/tests/retry_recv_trailing_metadata_error.cc',
'test/core/end2end/tests/retry_send_op_fails.cc',
'test/core/end2end/tests/retry_server_pushback_delay.cc',
'test/core/end2end/tests/retry_server_pushback_disabled.cc',
'test/core/end2end/tests/retry_streaming.cc',
'test/core/end2end/tests/retry_streaming_after_commit.cc',
'test/core/end2end/tests/retry_streaming_succeeds_before_replay_finished.cc',
'test/core/end2end/tests/retry_throttled.cc',
'test/core/end2end/tests/retry_too_many_attempts.cc',
'test/core/end2end/tests/server_finishes_request.cc',
'test/core/end2end/tests/server_streaming.cc',
'test/core/end2end/tests/shutdown_finishes_calls.cc',
'test/core/end2end/tests/shutdown_finishes_tags.cc',
'test/core/end2end/tests/simple_cacheable_request.cc',
'test/core/end2end/tests/simple_delayed_request.cc',
'test/core/end2end/tests/simple_metadata.cc',
'test/core/end2end/tests/simple_request.cc',
'test/core/end2end/tests/stream_compression_compressed_payload.cc',
'test/core/end2end/tests/stream_compression_payload.cc',
'test/core/end2end/tests/stream_compression_ping_pong_streaming.cc',
'test/core/end2end/tests/streaming_error_response.cc',
'test/core/end2end/tests/trailing_metadata.cc',
'test/core/end2end/tests/workaround_cronet_compression.cc',
'test/core/end2end/tests/write_buffering.cc',
'test/core/end2end/tests/write_buffering_at_end.cc',
'test/core/util/test_lb_policies.cc',
],
},
{
'target_name': 'end2end_tests',
'type': 'static_library',
'dependencies': [
'grpc_test_util',
],
'sources': [
'test/core/end2end/cq_verifier.cc',
'test/core/end2end/data/client_certs.cc',
'test/core/end2end/data/server1_cert.cc',
'test/core/end2end/data/server1_key.cc',
'test/core/end2end/data/test_root_cert.cc',
'test/core/end2end/end2end_test_utils.cc',
'test/core/end2end/end2end_tests.cc',
'test/core/end2end/fixtures/http_proxy_fixture.cc',
'test/core/end2end/fixtures/local_util.cc',
'test/core/end2end/fixtures/proxy.cc',
'test/core/end2end/tests/authority_not_supported.cc',
'test/core/end2end/tests/bad_hostname.cc',
'test/core/end2end/tests/bad_ping.cc',
'test/core/end2end/tests/binary_metadata.cc',
'test/core/end2end/tests/call_creds.cc',
'test/core/end2end/tests/call_host_override.cc',
'test/core/end2end/tests/cancel_after_accept.cc',
'test/core/end2end/tests/cancel_after_client_done.cc',
'test/core/end2end/tests/cancel_after_invoke.cc',
'test/core/end2end/tests/cancel_after_round_trip.cc',
'test/core/end2end/tests/cancel_before_invoke.cc',
'test/core/end2end/tests/cancel_in_a_vacuum.cc',
'test/core/end2end/tests/cancel_with_status.cc',
'test/core/end2end/tests/channelz.cc',
'test/core/end2end/tests/client_streaming.cc',
'test/core/end2end/tests/compressed_payload.cc',
'test/core/end2end/tests/connectivity.cc',
'test/core/end2end/tests/default_host.cc',
'test/core/end2end/tests/disappearing_server.cc',
'test/core/end2end/tests/empty_batch.cc',
'test/core/end2end/tests/filter_causes_close.cc',
'test/core/end2end/tests/filter_context.cc',
'test/core/end2end/tests/filter_init_fails.cc',
'test/core/end2end/tests/filter_latency.cc',
'test/core/end2end/tests/filter_status_code.cc',
'test/core/end2end/tests/graceful_server_shutdown.cc',
'test/core/end2end/tests/high_initial_seqno.cc',
'test/core/end2end/tests/hpack_size.cc',
'test/core/end2end/tests/idempotent_request.cc',
'test/core/end2end/tests/invoke_large_request.cc',
'test/core/end2end/tests/keepalive_timeout.cc',
'test/core/end2end/tests/large_metadata.cc',
'test/core/end2end/tests/max_concurrent_streams.cc',
'test/core/end2end/tests/max_connection_age.cc',
'test/core/end2end/tests/max_connection_idle.cc',
'test/core/end2end/tests/max_message_length.cc',
'test/core/end2end/tests/negative_deadline.cc',
'test/core/end2end/tests/no_error_on_hotpath.cc',
'test/core/end2end/tests/no_logging.cc',
'test/core/end2end/tests/no_op.cc',
'test/core/end2end/tests/payload.cc',
'test/core/end2end/tests/ping.cc',
'test/core/end2end/tests/ping_pong_streaming.cc',
'test/core/end2end/tests/proxy_auth.cc',
'test/core/end2end/tests/registered_call.cc',
'test/core/end2end/tests/request_with_flags.cc',
'test/core/end2end/tests/request_with_payload.cc',
'test/core/end2end/tests/resource_quota_server.cc',
'test/core/end2end/tests/retry.cc',
'test/core/end2end/tests/retry_cancel_during_delay.cc',
'test/core/end2end/tests/retry_cancel_with_multiple_send_batches.cc',
'test/core/end2end/tests/retry_cancellation.cc',
'test/core/end2end/tests/retry_disabled.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_delay.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_initial_batch.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_subsequent_batch.cc',
'test/core/end2end/tests/retry_lb_drop.cc',
'test/core/end2end/tests/retry_lb_fail.cc',
'test/core/end2end/tests/retry_non_retriable_status.cc',
'test/core/end2end/tests/retry_non_retriable_status_before_recv_trailing_metadata_started.cc',
'test/core/end2end/tests/retry_per_attempt_recv_timeout.cc',
'test/core/end2end/tests/retry_per_attempt_recv_timeout_on_last_attempt.cc',
'test/core/end2end/tests/retry_recv_initial_metadata.cc',
'test/core/end2end/tests/retry_recv_message.cc',
'test/core/end2end/tests/retry_recv_trailing_metadata_error.cc',
'test/core/end2end/tests/retry_send_op_fails.cc',
'test/core/end2end/tests/retry_server_pushback_delay.cc',
'test/core/end2end/tests/retry_server_pushback_disabled.cc',
'test/core/end2end/tests/retry_streaming.cc',
'test/core/end2end/tests/retry_streaming_after_commit.cc',
'test/core/end2end/tests/retry_streaming_succeeds_before_replay_finished.cc',
'test/core/end2end/tests/retry_throttled.cc',
'test/core/end2end/tests/retry_too_many_attempts.cc',
'test/core/end2end/tests/server_finishes_request.cc',
'test/core/end2end/tests/server_streaming.cc',
'test/core/end2end/tests/shutdown_finishes_calls.cc',
'test/core/end2end/tests/shutdown_finishes_tags.cc',
'test/core/end2end/tests/simple_cacheable_request.cc',
'test/core/end2end/tests/simple_delayed_request.cc',
'test/core/end2end/tests/simple_metadata.cc',
'test/core/end2end/tests/simple_request.cc',
'test/core/end2end/tests/stream_compression_compressed_payload.cc',
'test/core/end2end/tests/stream_compression_payload.cc',
'test/core/end2end/tests/stream_compression_ping_pong_streaming.cc',
'test/core/end2end/tests/streaming_error_response.cc',
'test/core/end2end/tests/trailing_metadata.cc',
'test/core/end2end/tests/workaround_cronet_compression.cc',
'test/core/end2end/tests/write_buffering.cc',
'test/core/end2end/tests/write_buffering_at_end.cc',
'test/core/util/test_lb_policies.cc',
],
},
{
'target_name': 'gpr',
'type': 'static_library',
'dependencies': [
'absl/base:base',
'absl/memory:memory',
'absl/status:status',
'absl/strings:cord',
'absl/strings:str_format',
'absl/strings:strings',
'absl/synchronization:synchronization',
'absl/time:time',
'absl/types:optional',
'upb',
],
'sources': [
'src/core/ext/upb-generated/google/api/annotations.upb.c',
'src/core/ext/upb-generated/google/api/expr/v1alpha1/checked.upb.c',
'src/core/ext/upb-generated/google/api/expr/v1alpha1/syntax.upb.c',
'src/core/ext/upb-generated/google/api/http.upb.c',
'src/core/ext/upb-generated/google/protobuf/any.upb.c',
'src/core/ext/upb-generated/google/protobuf/duration.upb.c',
'src/core/ext/upb-generated/google/protobuf/empty.upb.c',
'src/core/ext/upb-generated/google/protobuf/struct.upb.c',
'src/core/ext/upb-generated/google/protobuf/timestamp.upb.c',
'src/core/ext/upb-generated/google/protobuf/wrappers.upb.c',
'src/core/ext/upb-generated/google/rpc/status.upb.c',
'src/core/lib/gpr/alloc.cc',
'src/core/lib/gpr/atm.cc',
'src/core/lib/gpr/cpu_iphone.cc',
'src/core/lib/gpr/cpu_linux.cc',
'src/core/lib/gpr/cpu_posix.cc',
'src/core/lib/gpr/cpu_windows.cc',
'src/core/lib/gpr/env_linux.cc',
'src/core/lib/gpr/env_posix.cc',
'src/core/lib/gpr/env_windows.cc',
'src/core/lib/gpr/log.cc',
'src/core/lib/gpr/log_android.cc',
'src/core/lib/gpr/log_linux.cc',
'src/core/lib/gpr/log_posix.cc',
'src/core/lib/gpr/log_windows.cc',
'src/core/lib/gpr/murmur_hash.cc',
'src/core/lib/gpr/string.cc',
'src/core/lib/gpr/string_posix.cc',
'src/core/lib/gpr/string_util_windows.cc',
'src/core/lib/gpr/string_windows.cc',
'src/core/lib/gpr/sync.cc',
'src/core/lib/gpr/sync_abseil.cc',
'src/core/lib/gpr/sync_posix.cc',
'src/core/lib/gpr/sync_windows.cc',
'src/core/lib/gpr/time.cc',
'src/core/lib/gpr/time_posix.cc',
'src/core/lib/gpr/time_precise.cc',
'src/core/lib/gpr/time_windows.cc',
'src/core/lib/gpr/tls_pthread.cc',
'src/core/lib/gpr/tmpfile_msys.cc',
'src/core/lib/gpr/tmpfile_posix.cc',
'src/core/lib/gpr/tmpfile_windows.cc',
'src/core/lib/gpr/wrap_memcpy.cc',
'src/core/lib/gprpp/arena.cc',
'src/core/lib/gprpp/examine_stack.cc',
'src/core/lib/gprpp/fork.cc',
'src/core/lib/gprpp/global_config_env.cc',
'src/core/lib/gprpp/host_port.cc',
'src/core/lib/gprpp/mpscq.cc',
'src/core/lib/gprpp/stat_posix.cc',
'src/core/lib/gprpp/stat_windows.cc',
'src/core/lib/gprpp/status_helper.cc',
'src/core/lib/gprpp/thd_posix.cc',
'src/core/lib/gprpp/thd_windows.cc',
'src/core/lib/gprpp/time_util.cc',
'src/core/lib/profiling/basic_timers.cc',
'src/core/lib/profiling/stap_timers.cc',
],
},
{
'target_name': 'grpc',
'type': 'static_library',
'dependencies': [
'absl/container:flat_hash_map',
'absl/container:inlined_vector',
'absl/functional:bind_front',
'absl/status:statusor',
'gpr',
'address_sorting',
],
'sources': [
'src/core/ext/filters/census/grpc_context.cc',
'src/core/ext/filters/client_channel/backend_metric.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_channelz.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/config_selector.cc',
'src/core/ext/filters/client_channel/dynamic_filters.cc',
'src/core/ext/filters/client_channel/global_subchannel_pool.cc',
'src/core/ext/filters/client_channel/health/health_check_client.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy/address_filtering.cc',
'src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/priority/priority.cc',
'src/core/ext/filters/client_channel/lb_policy/ring_hash/ring_hash.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/filters/client_channel/lb_policy/weighted_target/weighted_target.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/cds.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_impl.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_manager.cc',
'src/core/ext/filters/client_channel/lb_policy/xds/xds_cluster_resolver.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/local_subchannel_pool.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_event_engine.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_event_engine.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/client_channel/resolver/google_c2p/google_c2p_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/client_channel/resolver/xds/xds_resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/resolver_result_parsing.cc',
'src/core/ext/filters/client_channel/retry_filter.cc',
'src/core/ext/filters/client_channel/retry_service_config.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/server_address.cc',
'src/core/ext/filters/client_channel/service_config.cc',
'src/core/ext/filters/client_channel/service_config_channel_arg_filter.cc',
'src/core/ext/filters/client_channel/service_config_parser.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_pool_interface.cc',
'src/core/ext/filters/client_idle/client_idle_filter.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/filters/fault_injection/fault_injection_filter.cc',
'src/core/ext/filters/fault_injection/service_config_parser.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/client_authority_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/message_compress/message_decompress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/ext/filters/max_age/max_age_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc',
'src/core/ext/filters/workarounds/workaround_utils.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/transport/chttp2/client/authority.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc',
'src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc',
'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_slice_allocator.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/context_list.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/upb-generated/envoy/admin/v3/config_dump.upb.c',
'src/core/ext/upb-generated/envoy/annotations/deprecation.upb.c',
'src/core/ext/upb-generated/envoy/annotations/resource.upb.c',
'src/core/ext/upb-generated/envoy/config/accesslog/v3/accesslog.upb.c',
'src/core/ext/upb-generated/envoy/config/bootstrap/v3/bootstrap.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/circuit_breaker.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/cluster.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/filter.upb.c',
'src/core/ext/upb-generated/envoy/config/cluster/v3/outlier_detection.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/address.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/backoff.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/base.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/config_source.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/event_service_config.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/extension.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/grpc_service.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/health_check.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/http_uri.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/protocol.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/proxy_protocol.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/socket_option.upb.c',
'src/core/ext/upb-generated/envoy/config/core/v3/substitution_format_string.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/endpoint.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/endpoint_components.upb.c',
'src/core/ext/upb-generated/envoy/config/endpoint/v3/load_report.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/api_listener.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/listener.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/listener_components.upb.c',
'src/core/ext/upb-generated/envoy/config/listener/v3/udp_listener_config.upb.c',
'src/core/ext/upb-generated/envoy/config/metrics/v3/stats.upb.c',
'src/core/ext/upb-generated/envoy/config/overload/v3/overload.upb.c',
'src/core/ext/upb-generated/envoy/config/rbac/v3/rbac.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/route.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/route_components.upb.c',
'src/core/ext/upb-generated/envoy/config/route/v3/scoped_route.upb.c',
'src/core/ext/upb-generated/envoy/config/trace/v3/http_tracer.upb.c',
'src/core/ext/upb-generated/envoy/extensions/clusters/aggregate/v3/cluster.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/common/fault/v3/fault.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/http/fault/v3/fault.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/http/router/v3/router.upb.c',
'src/core/ext/upb-generated/envoy/extensions/filters/network/http_connection_manager/v3/http_connection_manager.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/cert.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/common.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/secret.upb.c',
'src/core/ext/upb-generated/envoy/extensions/transport_sockets/tls/v3/tls.upb.c',
'src/core/ext/upb-generated/envoy/service/cluster/v3/cds.upb.c',
'src/core/ext/upb-generated/envoy/service/discovery/v3/ads.upb.c',
'src/core/ext/upb-generated/envoy/service/discovery/v3/discovery.upb.c',
'src/core/ext/upb-generated/envoy/service/endpoint/v3/eds.upb.c',
'src/core/ext/upb-generated/envoy/service/listener/v3/lds.upb.c',
'src/core/ext/upb-generated/envoy/service/load_stats/v3/lrs.upb.c',
'src/core/ext/upb-generated/envoy/service/route/v3/rds.upb.c',
'src/core/ext/upb-generated/envoy/service/route/v3/srds.upb.c',
'src/core/ext/upb-generated/envoy/service/status/v3/csds.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/metadata.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/node.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/number.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/path.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/regex.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/string.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/struct.upb.c',
'src/core/ext/upb-generated/envoy/type/matcher/v3/value.upb.c',
'src/core/ext/upb-generated/envoy/type/metadata/v3/metadata.upb.c',
'src/core/ext/upb-generated/envoy/type/tracing/v3/custom_tag.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/http.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/percent.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/range.upb.c',
'src/core/ext/upb-generated/envoy/type/v3/semantic_version.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/altscontext.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/handshaker.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/gcp/transport_security_common.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c',
'src/core/ext/upb-generated/udpa/annotations/migrate.upb.c',
'src/core/ext/upb-generated/udpa/annotations/security.upb.c',
'src/core/ext/upb-generated/udpa/annotations/sensitive.upb.c',
'src/core/ext/upb-generated/udpa/annotations/status.upb.c',
'src/core/ext/upb-generated/udpa/annotations/versioning.upb.c',
'src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.c',
'src/core/ext/upb-generated/udpa/type/v1/typed_struct.upb.c',
'src/core/ext/upb-generated/validate/validate.upb.c',
'src/core/ext/upb-generated/xds/core/v3/authority.upb.c',
'src/core/ext/upb-generated/xds/core/v3/collection_entry.upb.c',
'src/core/ext/upb-generated/xds/core/v3/context_params.upb.c',
'src/core/ext/upb-generated/xds/core/v3/resource.upb.c',
'src/core/ext/upb-generated/xds/core/v3/resource_locator.upb.c',
'src/core/ext/upb-generated/xds/core/v3/resource_name.upb.c',
'src/core/ext/upbdefs-generated/envoy/admin/v3/config_dump.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/annotations/deprecation.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/annotations/resource.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/accesslog/v3/accesslog.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/bootstrap/v3/bootstrap.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/cluster/v3/circuit_breaker.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/cluster/v3/cluster.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/cluster/v3/filter.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/cluster/v3/outlier_detection.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/address.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/backoff.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/base.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/config_source.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/event_service_config.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/extension.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/grpc_service.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/health_check.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/http_uri.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/protocol.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/proxy_protocol.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/socket_option.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/core/v3/substitution_format_string.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/endpoint.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/endpoint_components.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/endpoint/v3/load_report.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/api_listener.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/listener.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/listener_components.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/listener/v3/udp_listener_config.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/metrics/v3/stats.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/overload/v3/overload.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/route/v3/route.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/route/v3/route_components.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/route/v3/scoped_route.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/config/trace/v3/http_tracer.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/clusters/aggregate/v3/cluster.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/common/fault/v3/fault.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/http/fault/v3/fault.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/http/router/v3/router.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/filters/network/http_connection_manager/v3/http_connection_manager.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/cert.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/common.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/secret.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/extensions/transport_sockets/tls/v3/tls.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/cluster/v3/cds.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/discovery/v3/ads.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/discovery/v3/discovery.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/endpoint/v3/eds.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/listener/v3/lds.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/load_stats/v3/lrs.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/route/v3/rds.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/route/v3/srds.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/service/status/v3/csds.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/metadata.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/node.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/number.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/path.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/regex.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/string.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/struct.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/matcher/v3/value.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/metadata/v3/metadata.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/tracing/v3/custom_tag.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/http.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/percent.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/range.upbdefs.c',
'src/core/ext/upbdefs-generated/envoy/type/v3/semantic_version.upbdefs.c',
'src/core/ext/upbdefs-generated/google/api/annotations.upbdefs.c',
'src/core/ext/upbdefs-generated/google/api/http.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/any.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/duration.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/empty.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/struct.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/timestamp.upbdefs.c',
'src/core/ext/upbdefs-generated/google/protobuf/wrappers.upbdefs.c',
'src/core/ext/upbdefs-generated/google/rpc/status.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/migrate.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/security.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/sensitive.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/status.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/annotations/versioning.upbdefs.c',
'src/core/ext/upbdefs-generated/udpa/type/v1/typed_struct.upbdefs.c',
'src/core/ext/upbdefs-generated/validate/validate.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/authority.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/collection_entry.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/context_params.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/resource.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/resource_locator.upbdefs.c',
'src/core/ext/upbdefs-generated/xds/core/v3/resource_name.upbdefs.c',
'src/core/ext/xds/certificate_provider_registry.cc',
'src/core/ext/xds/certificate_provider_store.cc',
'src/core/ext/xds/file_watcher_certificate_provider_factory.cc',
'src/core/ext/xds/xds_api.cc',
'src/core/ext/xds/xds_bootstrap.cc',
'src/core/ext/xds/xds_certificate_provider.cc',
'src/core/ext/xds/xds_client.cc',
'src/core/ext/xds/xds_client_stats.cc',
'src/core/ext/xds/xds_http_fault_filter.cc',
'src/core/ext/xds/xds_http_filters.cc',
'src/core/ext/xds/xds_server_config_fetcher.cc',
'src/core/lib/address_utils/parse_address.cc',
'src/core/lib/address_utils/sockaddr_utils.cc',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channelz.cc',
'src/core/lib/channel/channelz_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_args.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/debug/trace.cc',
'src/core/lib/event_engine/endpoint_config.cc',
'src/core/lib/event_engine/event_engine.cc',
'src/core/lib/event_engine/sockaddr.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/httpcli_security_connector.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/buffer_list.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/cfstream_handle.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/dualstack_socket_posix.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_cfstream.cc',
'src/core/lib/iomgr/endpoint_pair_event_engine.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/error_cfstream.cc',
'src/core/lib/iomgr/ev_apple.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/event_engine/closure.cc',
'src/core/lib/iomgr/event_engine/endpoint.cc',
'src/core/lib/iomgr/event_engine/iomgr.cc',
'src/core/lib/iomgr/event_engine/pollset.cc',
'src/core/lib/iomgr/event_engine/resolved_address_internal.cc',
'src/core/lib/iomgr/event_engine/resolver.cc',
'src/core/lib/iomgr/event_engine/tcp.cc',
'src/core/lib/iomgr/event_engine/timer.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/executor/mpmcqueue.cc',
'src/core/lib/iomgr/executor/threadpool.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_posix.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc',
'src/core/lib/iomgr/internal_errqueue.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_posix_cfstream.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_cfstream.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/iomgr/work_serializer.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_util.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/matchers/matchers.cc',
'src/core/lib/security/authorization/authorization_policy_provider_vtable.cc',
'src/core/lib/security/authorization/evaluate_args.cc',
'src/core/lib/security/context/security_context.cc',
'src/core/lib/security/credentials/alts/alts_credentials.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc',
'src/core/lib/security/credentials/composite/composite_credentials.cc',
'src/core/lib/security/credentials/credentials.cc',
'src/core/lib/security/credentials/credentials_metadata.cc',
'src/core/lib/security/credentials/external/aws_external_account_credentials.cc',
'src/core/lib/security/credentials/external/aws_request_signer.cc',
'src/core/lib/security/credentials/external/external_account_credentials.cc',
'src/core/lib/security/credentials/external/file_external_account_credentials.cc',
'src/core/lib/security/credentials/external/url_external_account_credentials.cc',
'src/core/lib/security/credentials/fake/fake_credentials.cc',
'src/core/lib/security/credentials/google_default/credentials_generic.cc',
'src/core/lib/security/credentials/google_default/google_default_credentials.cc',
'src/core/lib/security/credentials/iam/iam_credentials.cc',
'src/core/lib/security/credentials/insecure/insecure_credentials.cc',
'src/core/lib/security/credentials/jwt/json_token.cc',
'src/core/lib/security/credentials/jwt/jwt_credentials.cc',
'src/core/lib/security/credentials/jwt/jwt_verifier.cc',
'src/core/lib/security/credentials/local/local_credentials.cc',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc',
'src/core/lib/security/credentials/plugin/plugin_credentials.cc',
'src/core/lib/security/credentials/ssl/ssl_credentials.cc',
'src/core/lib/security/credentials/tls/grpc_tls_certificate_distributor.cc',
'src/core/lib/security/credentials/tls/grpc_tls_certificate_provider.cc',
'src/core/lib/security/credentials/tls/grpc_tls_credentials_options.cc',
'src/core/lib/security/credentials/tls/tls_credentials.cc',
'src/core/lib/security/credentials/tls/tls_utils.cc',
'src/core/lib/security/credentials/xds/xds_credentials.cc',
'src/core/lib/security/security_connector/alts/alts_security_connector.cc',
'src/core/lib/security/security_connector/fake/fake_security_connector.cc',
'src/core/lib/security/security_connector/insecure/insecure_security_connector.cc',
'src/core/lib/security/security_connector/load_system_roots_fallback.cc',
'src/core/lib/security/security_connector/load_system_roots_linux.cc',
'src/core/lib/security/security_connector/local/local_security_connector.cc',
'src/core/lib/security/security_connector/security_connector.cc',
'src/core/lib/security/security_connector/ssl/ssl_security_connector.cc',
'src/core/lib/security/security_connector/ssl_utils.cc',
'src/core/lib/security/security_connector/ssl_utils_config.cc',
'src/core/lib/security/security_connector/tls/tls_security_connector.cc',
'src/core/lib/security/transport/client_auth_filter.cc',
'src/core/lib/security/transport/secure_endpoint.cc',
'src/core/lib/security/transport/security_handshaker.cc',
'src/core/lib/security/transport/server_auth_filter.cc',
'src/core/lib/security/transport/tsi_error.cc',
'src/core/lib/security/util/json_util.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/init.cc',
'src/core/lib/surface/init_secure.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/authority_override.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/uri/uri_parser.cc',
'src/core/plugin_registry/grpc_plugin_registry.cc',
'src/core/tsi/alts/crypt/aes_gcm.cc',
'src/core/tsi/alts/crypt/gsec.cc',
'src/core/tsi/alts/frame_protector/alts_counter.cc',
'src/core/tsi/alts/frame_protector/alts_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_frame_protector.cc',
'src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc',
'src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/frame_handler.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_client.cc',
'src/core/tsi/alts/handshaker/alts_shared_resource.cc',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc',
'src/core/tsi/alts/handshaker/alts_tsi_utils.cc',
'src/core/tsi/alts/handshaker/transport_security_common_api.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc',
'src/core/tsi/fake_transport_security.cc',
'src/core/tsi/local_transport_security.cc',
'src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc',
'src/core/tsi/ssl/session_cache/ssl_session_cache.cc',
'src/core/tsi/ssl/session_cache/ssl_session_openssl.cc',
'src/core/tsi/ssl_transport_security.cc',
'src/core/tsi/transport_security.cc',
'src/core/tsi/transport_security_grpc.cc',
],
},
{
'target_name': 'grpc_csharp_ext',
'type': 'static_library',
'dependencies': [
'grpc',
],
'sources': [
'src/csharp/ext/grpc_csharp_ext.c',
],
},
{
'target_name': 'grpc_test_util',
'type': 'static_library',
'dependencies': [
'absl/debugging:failure_signal_handler',
'absl/debugging:stacktrace',
'absl/debugging:symbolize',
'grpc',
],
'sources': [
'test/core/util/cmdline.cc',
'test/core/util/fuzzer_util.cc',
'test/core/util/grpc_profiler.cc',
'test/core/util/histogram.cc',
'test/core/util/memory_counters.cc',
'test/core/util/mock_endpoint.cc',
'test/core/util/parse_hexstring.cc',
'test/core/util/passthru_endpoint.cc',
'test/core/util/port.cc',
'test/core/util/port_isolated_runtime_environment.cc',
'test/core/util/port_server_client.cc',
'test/core/util/reconnect_server.cc',
'test/core/util/resolve_localhost_ip46.cc',
'test/core/util/slice_splitter.cc',
'test/core/util/stack_tracer.cc',
'test/core/util/subprocess_posix.cc',
'test/core/util/subprocess_windows.cc',
'test/core/util/test_config.cc',
'test/core/util/test_tcp_server.cc',
'test/core/util/tls_utils.cc',
'test/core/util/tracer_util.cc',
'test/core/util/trickle_endpoint.cc',
],
},
{
'target_name': 'grpc_test_util_unsecure',
'type': 'static_library',
'dependencies': [
'absl/debugging:failure_signal_handler',
'absl/debugging:stacktrace',
'absl/debugging:symbolize',
'grpc_unsecure',
],
'sources': [
'test/core/util/cmdline.cc',
'test/core/util/fuzzer_util.cc',
'test/core/util/grpc_profiler.cc',
'test/core/util/histogram.cc',
'test/core/util/memory_counters.cc',
'test/core/util/mock_endpoint.cc',
'test/core/util/parse_hexstring.cc',
'test/core/util/passthru_endpoint.cc',
'test/core/util/port.cc',
'test/core/util/port_isolated_runtime_environment.cc',
'test/core/util/port_server_client.cc',
'test/core/util/reconnect_server.cc',
'test/core/util/resolve_localhost_ip46.cc',
'test/core/util/slice_splitter.cc',
'test/core/util/stack_tracer.cc',
'test/core/util/subprocess_posix.cc',
'test/core/util/subprocess_windows.cc',
'test/core/util/test_config.cc',
'test/core/util/test_tcp_server.cc',
'test/core/util/tracer_util.cc',
'test/core/util/trickle_endpoint.cc',
],
},
{
'target_name': 'grpc_unsecure',
'type': 'static_library',
'dependencies': [
'absl/container:flat_hash_map',
'absl/container:inlined_vector',
'absl/functional:bind_front',
'absl/status:statusor',
'gpr',
'address_sorting',
],
'sources': [
'src/core/ext/filters/census/grpc_context.cc',
'src/core/ext/filters/client_channel/backend_metric.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_channelz.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/config_selector.cc',
'src/core/ext/filters/client_channel/dynamic_filters.cc',
'src/core/ext/filters/client_channel/global_subchannel_pool.cc',
'src/core/ext/filters/client_channel/health/health_check_client.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy/address_filtering.cc',
'src/core/ext/filters/client_channel/lb_policy/child_policy_handler.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_balancer_addresses.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/priority/priority.cc',
'src/core/ext/filters/client_channel/lb_policy/ring_hash/ring_hash.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/filters/client_channel/lb_policy/weighted_target/weighted_target.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/local_subchannel_pool.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_event_engine.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_event_engine.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_libuv.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_windows.cc',
'src/core/ext/filters/client_channel/resolver/dns/dns_resolver_selection.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/resolver_result_parsing.cc',
'src/core/ext/filters/client_channel/retry_filter.cc',
'src/core/ext/filters/client_channel/retry_service_config.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/server_address.cc',
'src/core/ext/filters/client_channel/service_config.cc',
'src/core/ext/filters/client_channel/service_config_channel_arg_filter.cc',
'src/core/ext/filters/client_channel/service_config_parser.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_pool_interface.cc',
'src/core/ext/filters/client_idle/client_idle_filter.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/filters/fault_injection/fault_injection_filter.cc',
'src/core/ext/filters/fault_injection/service_config_parser.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/client_authority_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/message_compress/message_decompress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/ext/filters/max_age/max_age_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc',
'src/core/ext/filters/workarounds/workaround_utils.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/transport/chttp2/client/authority.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_slice_allocator.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/context_list.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/upb-generated/src/proto/grpc/health/v1/health.upb.c',
'src/core/ext/upb-generated/src/proto/grpc/lb/v1/load_balancer.upb.c',
'src/core/ext/upb-generated/udpa/data/orca/v1/orca_load_report.upb.c',
'src/core/ext/upb-generated/validate/validate.upb.c',
'src/core/lib/address_utils/parse_address.cc',
'src/core/lib/address_utils/sockaddr_utils.cc',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channelz.cc',
'src/core/lib/channel/channelz_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_args.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/debug/trace.cc',
'src/core/lib/event_engine/endpoint_config.cc',
'src/core/lib/event_engine/event_engine.cc',
'src/core/lib/event_engine/sockaddr.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/buffer_list.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/cfstream_handle.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/dualstack_socket_posix.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_cfstream.cc',
'src/core/lib/iomgr/endpoint_pair_event_engine.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/error_cfstream.cc',
'src/core/lib/iomgr/ev_apple.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/event_engine/closure.cc',
'src/core/lib/iomgr/event_engine/endpoint.cc',
'src/core/lib/iomgr/event_engine/iomgr.cc',
'src/core/lib/iomgr/event_engine/pollset.cc',
'src/core/lib/iomgr/event_engine/resolved_address_internal.cc',
'src/core/lib/iomgr/event_engine/resolver.cc',
'src/core/lib/iomgr/event_engine/tcp.cc',
'src/core/lib/iomgr/event_engine/timer.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/executor/mpmcqueue.cc',
'src/core/lib/iomgr/executor/threadpool.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_posix.cc',
'src/core/lib/iomgr/grpc_if_nametoindex_unsupported.cc',
'src/core/lib/iomgr/internal_errqueue.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_posix_cfstream.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_cfstream.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/iomgr/work_serializer.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_util.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/security/authorization/authorization_policy_provider_null_vtable.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/init.cc',
'src/core/lib/surface/init_unsecure.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/authority_override.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/uri/uri_parser.cc',
'src/core/plugin_registry/grpc_unsecure_plugin_registry.cc',
],
},
{
'target_name': 'benchmark_helpers',
'type': 'static_library',
'dependencies': [
'benchmark',
'grpc++_unsecure',
'grpc_test_util_unsecure',
'grpc++_test_config',
],
'sources': [
'src/proto/grpc/testing/echo.proto',
'src/proto/grpc/testing/echo_messages.proto',
'src/proto/grpc/testing/simple_messages.proto',
'test/cpp/microbenchmarks/helpers.cc',
],
},
{
'target_name': 'grpc++',
'type': 'static_library',
'dependencies': [
'grpc',
],
'sources': [
'src/cpp/client/channel_cc.cc',
'src/cpp/client/client_callback.cc',
'src/cpp/client/client_context.cc',
'src/cpp/client/client_interceptor.cc',
'src/cpp/client/create_channel.cc',
'src/cpp/client/create_channel_internal.cc',
'src/cpp/client/create_channel_posix.cc',
'src/cpp/client/credentials_cc.cc',
'src/cpp/client/insecure_credentials.cc',
'src/cpp/client/secure_credentials.cc',
'src/cpp/client/xds_credentials.cc',
'src/cpp/codegen/codegen_init.cc',
'src/cpp/common/alarm.cc',
'src/cpp/common/auth_property_iterator.cc',
'src/cpp/common/channel_arguments.cc',
'src/cpp/common/channel_filter.cc',
'src/cpp/common/completion_queue_cc.cc',
'src/cpp/common/core_codegen.cc',
'src/cpp/common/resource_quota_cc.cc',
'src/cpp/common/rpc_method.cc',
'src/cpp/common/secure_auth_context.cc',
'src/cpp/common/secure_channel_arguments.cc',
'src/cpp/common/secure_create_auth_context.cc',
'src/cpp/common/tls_certificate_provider.cc',
'src/cpp/common/tls_credentials_options.cc',
'src/cpp/common/tls_credentials_options_util.cc',
'src/cpp/common/validate_service_config.cc',
'src/cpp/common/version_cc.cc',
'src/cpp/server/async_generic_service.cc',
'src/cpp/server/channel_argument_option.cc',
'src/cpp/server/create_default_thread_pool.cc',
'src/cpp/server/dynamic_thread_pool.cc',
'src/cpp/server/external_connection_acceptor_impl.cc',
'src/cpp/server/health/default_health_check_service.cc',
'src/cpp/server/health/health_check_service.cc',
'src/cpp/server/health/health_check_service_server_builder_option.cc',
'src/cpp/server/insecure_server_credentials.cc',
'src/cpp/server/secure_server_credentials.cc',
'src/cpp/server/server_builder.cc',
'src/cpp/server/server_callback.cc',
'src/cpp/server/server_cc.cc',
'src/cpp/server/server_context.cc',
'src/cpp/server/server_credentials.cc',
'src/cpp/server/server_posix.cc',
'src/cpp/server/xds_server_credentials.cc',
'src/cpp/thread_manager/thread_manager.cc',
'src/cpp/util/byte_buffer_cc.cc',
'src/cpp/util/status.cc',
'src/cpp/util/string_ref.cc',
'src/cpp/util/time_cc.cc',
],
},
{
'target_name': 'grpc++_alts',
'type': 'static_library',
'dependencies': [
'grpc++',
],
'sources': [
'src/cpp/common/alts_context.cc',
'src/cpp/common/alts_util.cc',
],
},
{
'target_name': 'grpc++_error_details',
'type': 'static_library',
'dependencies': [
'grpc++',
],
'sources': [
'src/cpp/util/error_details.cc',
],
},
{
'target_name': 'grpc++_reflection',
'type': 'static_library',
'dependencies': [
'grpc++',
],
'sources': [
'src/proto/grpc/reflection/v1alpha/reflection.proto',
'src/cpp/ext/proto_server_reflection.cc',
'src/cpp/ext/proto_server_reflection_plugin.cc',
],
},
{
'target_name': 'grpc++_test',
'type': 'static_library',
'dependencies': [
'grpc++',
],
'sources': [
'src/cpp/client/channel_test_peer.cc',
],
},
{
'target_name': 'grpc++_test_config',
'type': 'static_library',
'dependencies': [
'absl/flags:parse',
'gpr',
],
'sources': [
'test/cpp/util/test_config_cc.cc',
],
},
{
'target_name': 'grpc++_test_util',
'type': 'static_library',
'dependencies': [
'absl/flags:flag',
'grpc++',
'grpc_test_util',
],
'sources': [
'test/core/end2end/data/client_certs.cc',
'test/core/end2end/data/server1_cert.cc',
'test/core/end2end/data/server1_key.cc',
'test/core/end2end/data/test_root_cert.cc',
'test/cpp/util/byte_buffer_proto_helper.cc',
'test/cpp/util/create_test_channel.cc',
'test/cpp/util/string_ref_helper.cc',
'test/cpp/util/subprocess.cc',
'test/cpp/util/test_credentials_provider.cc',
],
},
{
'target_name': 'grpc++_unsecure',
'type': 'static_library',
'dependencies': [
'grpc_unsecure',
],
'sources': [
'src/cpp/client/channel_cc.cc',
'src/cpp/client/client_callback.cc',
'src/cpp/client/client_context.cc',
'src/cpp/client/client_interceptor.cc',
'src/cpp/client/create_channel.cc',
'src/cpp/client/create_channel_internal.cc',
'src/cpp/client/create_channel_posix.cc',
'src/cpp/client/credentials_cc.cc',
'src/cpp/client/insecure_credentials.cc',
'src/cpp/codegen/codegen_init.cc',
'src/cpp/common/alarm.cc',
'src/cpp/common/channel_arguments.cc',
'src/cpp/common/channel_filter.cc',
'src/cpp/common/completion_queue_cc.cc',
'src/cpp/common/core_codegen.cc',
'src/cpp/common/insecure_create_auth_context.cc',
'src/cpp/common/resource_quota_cc.cc',
'src/cpp/common/rpc_method.cc',
'src/cpp/common/validate_service_config.cc',
'src/cpp/common/version_cc.cc',
'src/cpp/server/async_generic_service.cc',
'src/cpp/server/channel_argument_option.cc',
'src/cpp/server/create_default_thread_pool.cc',
'src/cpp/server/dynamic_thread_pool.cc',
'src/cpp/server/external_connection_acceptor_impl.cc',
'src/cpp/server/health/default_health_check_service.cc',
'src/cpp/server/health/health_check_service.cc',
'src/cpp/server/health/health_check_service_server_builder_option.cc',
'src/cpp/server/insecure_server_credentials.cc',
'src/cpp/server/server_builder.cc',
'src/cpp/server/server_callback.cc',
'src/cpp/server/server_cc.cc',
'src/cpp/server/server_context.cc',
'src/cpp/server/server_credentials.cc',
'src/cpp/server/server_posix.cc',
'src/cpp/thread_manager/thread_manager.cc',
'src/cpp/util/byte_buffer_cc.cc',
'src/cpp/util/status.cc',
'src/cpp/util/string_ref.cc',
'src/cpp/util/time_cc.cc',
],
},
{
'target_name': 'grpc_plugin_support',
'type': 'static_library',
'dependencies': [
],
'sources': [
'src/compiler/cpp_generator.cc',
'src/compiler/csharp_generator.cc',
'src/compiler/node_generator.cc',
'src/compiler/objective_c_generator.cc',
'src/compiler/php_generator.cc',
'src/compiler/python_generator.cc',
'src/compiler/ruby_generator.cc',
],
},
{
'target_name': 'grpcpp_channelz',
'type': 'static_library',
'dependencies': [
'grpc++',
],
'sources': [
'src/proto/grpc/channelz/channelz.proto',
'src/cpp/server/channelz/channelz_service.cc',
'src/cpp/server/channelz/channelz_service_plugin.cc',
],
},
{
'target_name': 'boringssl',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/boringssl-with-bazel/err_data.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bitstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_bool.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_dup.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_enum.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_gentm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_mbstr.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_object.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_octet.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_print.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_strnid.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_time.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_type.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utctm.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/a_utf8.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_lib.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn1_par.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/asn_pack.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_enum.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_int.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/f_string.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_dec.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_enc.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_fre.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_new.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_typ.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/tasn_utl.c',
'third_party/boringssl-with-bazel/src/crypto/asn1/time_support.c',
'third_party/boringssl-with-bazel/src/crypto/base64/base64.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio.c',
'third_party/boringssl-with-bazel/src/crypto/bio/bio_mem.c',
'third_party/boringssl-with-bazel/src/crypto/bio/connect.c',
'third_party/boringssl-with-bazel/src/crypto/bio/fd.c',
'third_party/boringssl-with-bazel/src/crypto/bio/file.c',
'third_party/boringssl-with-bazel/src/crypto/bio/hexdump.c',
'third_party/boringssl-with-bazel/src/crypto/bio/pair.c',
'third_party/boringssl-with-bazel/src/crypto/bio/printf.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket.c',
'third_party/boringssl-with-bazel/src/crypto/bio/socket_helper.c',
'third_party/boringssl-with-bazel/src/crypto/blake2/blake2.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/bn_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/bn_extra/convert.c',
'third_party/boringssl-with-bazel/src/crypto/buf/buf.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/asn1_compat.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/ber.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbb.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/cbs.c',
'third_party/boringssl-with-bazel/src/crypto/bytestring/unicode.c',
'third_party/boringssl-with-bazel/src/crypto/chacha/chacha.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/cipher_extra.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/derive_key.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesccm.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesctrhmac.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_aesgcmsiv.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_chacha20poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_null.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc2.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_rc4.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/e_tls.c',
'third_party/boringssl-with-bazel/src/crypto/cipher_extra/tls_cbc.c',
'third_party/boringssl-with-bazel/src/crypto/cmac/cmac.c',
'third_party/boringssl-with-bazel/src/crypto/conf/conf.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-aarch64-win.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-arm-linux.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-arm.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-intel.c',
'third_party/boringssl-with-bazel/src/crypto/cpu-ppc64le.c',
'third_party/boringssl-with-bazel/src/crypto/crypto.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/curve25519.c',
'third_party/boringssl-with-bazel/src/crypto/curve25519/spake25519.c',
'third_party/boringssl-with-bazel/src/crypto/dh_extra/dh_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/dh_extra/params.c',
'third_party/boringssl-with-bazel/src/crypto/digest_extra/digest_extra.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa.c',
'third_party/boringssl-with-bazel/src/crypto/dsa/dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/ec_derive.c',
'third_party/boringssl-with-bazel/src/crypto/ec_extra/hash_to_curve.c',
'third_party/boringssl-with-bazel/src/crypto/ecdh_extra/ecdh_extra.c',
'third_party/boringssl-with-bazel/src/crypto/ecdsa_extra/ecdsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/engine/engine.c',
'third_party/boringssl-with-bazel/src/crypto/err/err.c',
'third_party/boringssl-with-bazel/src/crypto/evp/digestsign.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/evp_ctx.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ec_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_ed25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519.c',
'third_party/boringssl-with-bazel/src/crypto/evp/p_x25519_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/evp/pbkdf.c',
'third_party/boringssl-with-bazel/src/crypto/evp/print.c',
'third_party/boringssl-with-bazel/src/crypto/evp/scrypt.c',
'third_party/boringssl-with-bazel/src/crypto/evp/sign.c',
'third_party/boringssl-with-bazel/src/crypto/ex_data.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/bcm.c',
'third_party/boringssl-with-bazel/src/crypto/fipsmodule/fips_shared_support.c',
'third_party/boringssl-with-bazel/src/crypto/hkdf/hkdf.c',
'third_party/boringssl-with-bazel/src/crypto/hpke/hpke.c',
'third_party/boringssl-with-bazel/src/crypto/hrss/hrss.c',
'third_party/boringssl-with-bazel/src/crypto/lhash/lhash.c',
'third_party/boringssl-with-bazel/src/crypto/mem.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj.c',
'third_party/boringssl-with-bazel/src/crypto/obj/obj_xref.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_all.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_info.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_lib.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_oth.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pk8.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pem/pem_xaux.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs7/pkcs7_x509.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8.c',
'third_party/boringssl-with-bazel/src/crypto/pkcs8/pkcs8_x509.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl-with-bazel/src/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl-with-bazel/src/crypto/pool/pool.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/deterministic.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/forkunsafe.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/fuchsia.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/passive.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/rand_extra.c',
'third_party/boringssl-with-bazel/src/crypto/rand_extra/windows.c',
'third_party/boringssl-with-bazel/src/crypto/rc4/rc4.c',
'third_party/boringssl-with-bazel/src/crypto/refcount_c11.c',
'third_party/boringssl-with-bazel/src/crypto/refcount_lock.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_asn1.c',
'third_party/boringssl-with-bazel/src/crypto/rsa_extra/rsa_print.c',
'third_party/boringssl-with-bazel/src/crypto/siphash/siphash.c',
'third_party/boringssl-with-bazel/src/crypto/stack/stack.c',
'third_party/boringssl-with-bazel/src/crypto/thread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_none.c',
'third_party/boringssl-with-bazel/src/crypto/thread_pthread.c',
'third_party/boringssl-with-bazel/src/crypto/thread_win.c',
'third_party/boringssl-with-bazel/src/crypto/trust_token/pmbtoken.c',
'third_party/boringssl-with-bazel/src/crypto/trust_token/trust_token.c',
'third_party/boringssl-with-bazel/src/crypto/trust_token/voprf.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_digest.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_sign.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_strex.c',
'third_party/boringssl-with-bazel/src/crypto/x509/a_verify.c',
'third_party/boringssl-with-bazel/src/crypto/x509/algorithm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/asn1_gen.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_dir.c',
'third_party/boringssl-with-bazel/src/crypto/x509/by_file.c',
'third_party/boringssl-with-bazel/src/crypto/x509/i2d_pr.c',
'third_party/boringssl-with-bazel/src/crypto/x509/rsa_pss.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/t_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_att.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_cmp.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_d2.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_def.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_ext.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_lu.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_obj.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_set.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_trs.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_txt.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_v3.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vfy.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509_vpm.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509cset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509rset.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x509spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_algor.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_all.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_attrib.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_crl.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_exten.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_name.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_pubkey.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_req.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_sig.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_spki.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_val.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509.c',
'third_party/boringssl-with-bazel/src/crypto/x509/x_x509a.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_cache.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_data.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_map.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_node.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/pcy_tree.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_akeya.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_alt.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_bitst.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_conf.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_cpols.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_crld.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_enum.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_extku.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_genn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ia5.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_info.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_int.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_lib.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ncons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_ocsp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pci.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcia.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pcons.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_prn.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_purp.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_skey.c',
'third_party/boringssl-with-bazel/src/crypto/x509v3/v3_utl.c',
'third_party/boringssl-with-bazel/src/ssl/bio_ssl.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_both.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/d1_srtp.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/dtls_record.cc',
'third_party/boringssl-with-bazel/src/ssl/encrypted_client_hello.cc',
'third_party/boringssl-with-bazel/src/ssl/handoff.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_client.cc',
'third_party/boringssl-with-bazel/src/ssl/handshake_server.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_both.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/s3_pkt.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_aead_ctx.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_asn1.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_buffer.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cert.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_cipher.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_file.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_key_share.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_privkey.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_session.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_stat.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_transcript.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_versions.cc',
'third_party/boringssl-with-bazel/src/ssl/ssl_x509.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/t1_lib.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_both.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_client.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_enc.cc',
'third_party/boringssl-with-bazel/src/ssl/tls13_server.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_method.cc',
'third_party/boringssl-with-bazel/src/ssl/tls_record.cc',
],
},
{
'target_name': 'boringssl_test_util',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/boringssl-with-bazel/src/crypto/test/file_test.cc',
'third_party/boringssl-with-bazel/src/crypto/test/malloc.cc',
'third_party/boringssl-with-bazel/src/crypto/test/test_util.cc',
'third_party/boringssl-with-bazel/src/crypto/test/wycheproof_util.cc',
],
},
{
'target_name': 'benchmark',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/benchmark/src/benchmark.cc',
'third_party/benchmark/src/benchmark_api_internal.cc',
'third_party/benchmark/src/benchmark_main.cc',
'third_party/benchmark/src/benchmark_name.cc',
'third_party/benchmark/src/benchmark_register.cc',
'third_party/benchmark/src/benchmark_runner.cc',
'third_party/benchmark/src/colorprint.cc',
'third_party/benchmark/src/commandlineflags.cc',
'third_party/benchmark/src/complexity.cc',
'third_party/benchmark/src/console_reporter.cc',
'third_party/benchmark/src/counter.cc',
'third_party/benchmark/src/csv_reporter.cc',
'third_party/benchmark/src/json_reporter.cc',
'third_party/benchmark/src/reporter.cc',
'third_party/benchmark/src/sleep.cc',
'third_party/benchmark/src/statistics.cc',
'third_party/benchmark/src/string_util.cc',
'third_party/benchmark/src/sysinfo.cc',
'third_party/benchmark/src/timers.cc',
],
},
{
'target_name': 're2',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/re2/re2/bitstate.cc',
'third_party/re2/re2/compile.cc',
'third_party/re2/re2/dfa.cc',
'third_party/re2/re2/filtered_re2.cc',
'third_party/re2/re2/mimics_pcre.cc',
'third_party/re2/re2/nfa.cc',
'third_party/re2/re2/onepass.cc',
'third_party/re2/re2/parse.cc',
'third_party/re2/re2/perl_groups.cc',
'third_party/re2/re2/prefilter.cc',
'third_party/re2/re2/prefilter_tree.cc',
'third_party/re2/re2/prog.cc',
'third_party/re2/re2/re2.cc',
'third_party/re2/re2/regexp.cc',
'third_party/re2/re2/set.cc',
'third_party/re2/re2/simplify.cc',
'third_party/re2/re2/stringpiece.cc',
'third_party/re2/re2/tostring.cc',
'third_party/re2/re2/unicode_casefold.cc',
'third_party/re2/re2/unicode_groups.cc',
'third_party/re2/util/pcre.cc',
'third_party/re2/util/rune.cc',
'third_party/re2/util/strutil.cc',
],
},
{
'target_name': 'upb',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/upb/upb/decode_fast.c',
'third_party/upb/upb/decode.c',
'third_party/upb/upb/def.c',
'third_party/upb/upb/encode.c',
'third_party/upb/upb/msg.c',
'third_party/upb/upb/reflection.c',
'third_party/upb/upb/table.c',
'third_party/upb/upb/text_encode.c',
'third_party/upb/upb/upb.c',
'src/core/ext/upb-generated/google/protobuf/descriptor.upb.c',
'src/core/ext/upbdefs-generated/google/protobuf/descriptor.upbdefs.c',
],
},
{
'target_name': 'z',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
],
},
]
}
|
py | b4044b5aa80d03e439c21b4e5f59f89503f5e37e | # adapted from https://github.com/open-mmlab/mmcv or
# https://github.com/open-mmlab/mmdetection
import torch
from flexinfer.misc import registry
from ..iou_calculators import build_iou_calculator
from .assign_result import AssignResult
from .base_assigner import BaseAssigner
@registry.register_module('bbox_assigner')
class ATSSAssigner(BaseAssigner):
"""Assign a corresponding gt bbox or background to each bbox.
Each proposals will be assigned with `0` or a positive integer
indicating the ground truth index.
- 0: negative sample, no assigned gt
- positive integer: positive sample, index (1-based) of assigned gt
Args:
topk (float): number of bbox selected in each level
"""
def __init__(self,
topk,
iou_calculator=dict(type='BboxOverlaps2D'),
ignore_iof_thr=-1):
self.topk = topk
self.iou_calculator = build_iou_calculator(iou_calculator)
self.ignore_iof_thr = ignore_iof_thr
# https://github.com/sfzhang15/ATSS/blob/master/atss_core/modeling/rpn/atss/loss.py
def assign(self,
bboxes,
num_level_bboxes,
gt_bboxes,
gt_bboxes_ignore=None,
gt_labels=None):
"""Assign gt to bboxes.
The assignment is done in following steps
1. compute iou between all bbox (bbox of all pyramid levels) and gt
2. compute center distance between all bbox and gt
3. on each pyramid level, for each gt, select k bbox whose center
are closest to the gt center, so we total select k*l bbox as
candidates for each gt
4. get corresponding iou for the these candidates, and compute the
mean and std, set mean + std as the iou threshold
5. select these candidates whose iou are greater than or equal to
the threshold as postive
6. limit the positive sample's center in gt
Args:
bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4).
num_level_bboxes (List): num of bboxes in each level
gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4).
gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are
labelled as `ignored`, e.g., crowd boxes in COCO.
gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ).
Returns:
:obj:`AssignResult`: The assign result.
"""
INF = 100000000
bboxes = bboxes[:, :4]
num_gt, num_bboxes = gt_bboxes.size(0), bboxes.size(0)
# compute iou between all bbox and gt
overlaps = self.iou_calculator(bboxes, gt_bboxes)
# assign 0 by default
assigned_gt_inds = overlaps.new_full((num_bboxes, ),
0,
dtype=torch.long)
if num_gt == 0 or num_bboxes == 0:
# No ground truth or boxes, return empty assignment
max_overlaps = overlaps.new_zeros((num_bboxes, ))
if num_gt == 0:
# No truth, assign everything to background
assigned_gt_inds[:] = 0
if gt_labels is None:
assigned_labels = None
else:
assigned_labels = overlaps.new_full((num_bboxes, ),
-1,
dtype=torch.long)
return AssignResult(
num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels)
# compute center distance between all bbox and gt
gt_cx = (gt_bboxes[:, 0] + gt_bboxes[:, 2]) / 2.0
gt_cy = (gt_bboxes[:, 1] + gt_bboxes[:, 3]) / 2.0
gt_points = torch.stack((gt_cx, gt_cy), dim=1)
bboxes_cx = (bboxes[:, 0] + bboxes[:, 2]) / 2.0
bboxes_cy = (bboxes[:, 1] + bboxes[:, 3]) / 2.0
bboxes_points = torch.stack((bboxes_cx, bboxes_cy), dim=1)
distances = (bboxes_points[:, None, :] -
gt_points[None, :, :]).pow(2).sum(-1).sqrt()
if (self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None
and gt_bboxes_ignore.numel() > 0 and bboxes.numel() > 0):
ignore_overlaps = self.iou_calculator(
bboxes, gt_bboxes_ignore, mode='iof')
ignore_max_overlaps, _ = ignore_overlaps.max(dim=1)
ignore_idxs = ignore_max_overlaps > self.ignore_iof_thr
distances[ignore_idxs, :] = INF
assigned_gt_inds[ignore_idxs] = -1
# Selecting candidates based on the center distance
candidate_idxs = []
start_idx = 0
for level, bboxes_per_level in enumerate(num_level_bboxes):
# on each pyramid level, for each gt,
# select k bbox whose center are closest to the gt center
end_idx = start_idx + bboxes_per_level
distances_per_level = distances[start_idx:end_idx, :]
_, topk_idxs_per_level = distances_per_level.topk(
self.topk, dim=0, largest=False)
candidate_idxs.append(topk_idxs_per_level + start_idx)
start_idx = end_idx
candidate_idxs = torch.cat(candidate_idxs, dim=0)
# get corresponding iou for the these candidates, and compute the
# mean and std, set mean + std as the iou threshold
candidate_overlaps = overlaps[candidate_idxs, torch.arange(num_gt)]
overlaps_mean_per_gt = candidate_overlaps.mean(0)
overlaps_std_per_gt = candidate_overlaps.std(0)
overlaps_thr_per_gt = overlaps_mean_per_gt + overlaps_std_per_gt
is_pos = candidate_overlaps >= overlaps_thr_per_gt[None, :]
# limit the positive sample's center in gt
for gt_idx in range(num_gt):
candidate_idxs[:, gt_idx] += gt_idx * num_bboxes
ep_bboxes_cx = bboxes_cx.view(1, -1).expand(
num_gt, num_bboxes).contiguous().view(-1)
ep_bboxes_cy = bboxes_cy.view(1, -1).expand(
num_gt, num_bboxes).contiguous().view(-1)
candidate_idxs = candidate_idxs.view(-1)
# calculate the left, top, right, bottom distance between positive
# bbox center and gt side
l_ = ep_bboxes_cx[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 0]
t_ = ep_bboxes_cy[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 1]
r_ = gt_bboxes[:, 2] - ep_bboxes_cx[candidate_idxs].view(-1, num_gt)
b_ = gt_bboxes[:, 3] - ep_bboxes_cy[candidate_idxs].view(-1, num_gt)
is_in_gts = torch.stack([l_, t_, r_, b_], dim=1).min(dim=1)[0] > 0.01
is_pos = is_pos & is_in_gts
# if an anchor box is assigned to multiple gts,
# the one with the highest IoU will be selected.
overlaps_inf = torch.full_like(overlaps,
-INF).t().contiguous().view(-1)
index = candidate_idxs.view(-1)[is_pos.view(-1)]
overlaps_inf[index] = overlaps.t().contiguous().view(-1)[index]
overlaps_inf = overlaps_inf.view(num_gt, -1).t()
max_overlaps, argmax_overlaps = overlaps_inf.max(dim=1)
assigned_gt_inds[
max_overlaps != -INF] = argmax_overlaps[max_overlaps != -INF] + 1
if gt_labels is not None:
assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1)
pos_inds = torch.nonzero(
assigned_gt_inds > 0, as_tuple=False).squeeze()
if pos_inds.numel() > 0:
assigned_labels[pos_inds] = gt_labels[
assigned_gt_inds[pos_inds] - 1]
else:
assigned_labels = None
return AssignResult(
num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels)
|
py | b4044c9e7991ca1c2d347c691e6f21525331d8d2 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from op_test import OpTest, skip_check_grad_ci
import unittest
from paddle.fluid.op import Operator
from paddle.fluid import compiler, Program, program_guard
# cast output to complex for numpy.linalg.eig
def cast_to_complex(input, output):
if (input.dtype == np.float32):
output = output.astype(np.complex64)
elif (input.dtype == np.float64):
output = output.astype(np.complex128)
return output
# define eig backward function for a single square matrix
def eig_backward(w, v, grad_w, grad_v):
v_tran = np.transpose(v)
v_tran = np.conjugate(v_tran)
w_conj = np.conjugate(w)
w_conj_l = w_conj.reshape(1, w.size)
w_conj_r = w_conj.reshape(w.size, 1)
w_conj_2d = w_conj_l - w_conj_r
vhgv = np.matmul(v_tran, grad_v)
real_vhgv = np.real(vhgv)
diag_real = real_vhgv.diagonal()
diag_2d = diag_real.reshape(1, w.size)
rhs = v * diag_2d
mid = np.matmul(v_tran, rhs)
result = vhgv - mid
res = np.divide(result, w_conj_2d)
row, col = np.diag_indices_from(res)
res[row, col] = 1.0
tmp = np.matmul(res, v_tran)
dx = np.linalg.solve(v_tran, tmp)
return dx
class TestEigOp(OpTest):
def setUp(self):
paddle.enable_static()
paddle.device.set_device("cpu")
self.op_type = "eig"
self.__class__.op_type = self.op_type
self.init_input()
self.inputs = {'X': OpTest.np_dtype_to_fluid_dtype(self.x)}
self.outputs = {'Eigenvalues': self.out[0], 'Eigenvectors': self.out[1]}
def init_input(self):
self.set_dtype()
self.set_dims()
self.x = np.random.random(self.shape).astype(self.dtype)
self.out = np.linalg.eig(self.x)
self.out = (cast_to_complex(self.x, self.out[0]),
cast_to_complex(self.x, self.out[1]))
# for the real input, a customized checker is needed
def checker(self, outs):
actual_out_w = outs[0].flatten()
expect_out_w = self.out[0].flatten()
actual_out_v = outs[1].flatten()
expect_out_v = self.out[1].flatten()
length_w = len(expect_out_w)
act_w_real = np.sort(
np.array([np.abs(actual_out_w[i].real) for i in range(length_w)]))
act_w_imag = np.sort(
np.array([np.abs(actual_out_w[i].imag) for i in range(length_w)]))
exp_w_real = np.sort(
np.array([np.abs(expect_out_w[i].real) for i in range(length_w)]))
exp_w_imag = np.sort(
np.array([np.abs(expect_out_w[i].imag) for i in range(length_w)]))
for i in range(length_w):
self.assertTrue(
np.allclose(act_w_real[i], exp_w_real[i], 1e-6, 1e-5),
"The eigenvalues real part have diff: \nExpected " +
str(act_w_real[i]) + "\n" + "But got: " + str(exp_w_real[i]))
self.assertTrue(
np.allclose(act_w_imag[i], exp_w_imag[i], 1e-6, 1e-5),
"The eigenvalues image part have diff: \nExpected " +
str(act_w_imag[i]) + "\n" + "But got: " + str(exp_w_imag[i]))
length_v = len(expect_out_v)
act_v_real = np.sort(
np.array([np.abs(actual_out_v[i].real) for i in range(length_v)]))
act_v_imag = np.sort(
np.array([np.abs(actual_out_v[i].imag) for i in range(length_v)]))
exp_v_real = np.sort(
np.array([np.abs(expect_out_v[i].real) for i in range(length_v)]))
exp_v_imag = np.sort(
np.array([np.abs(expect_out_v[i].imag) for i in range(length_v)]))
for i in range(length_v):
self.assertTrue(
np.allclose(act_v_real[i], exp_v_real[i], 1e-6, 1e-5),
"The eigenvectors real part have diff: \nExpected " +
str(act_v_real[i]) + "\n" + "But got: " + str(exp_v_real[i]))
self.assertTrue(
np.allclose(act_v_imag[i], exp_v_imag[i], 1e-6, 1e-5),
"The eigenvectors image part have diff: \nExpected " +
str(act_v_imag[i]) + "\n" + "But got: " + str(exp_v_imag[i]))
def set_dtype(self):
self.dtype = np.complex64
def set_dims(self):
self.shape = (10, 10)
def init_grad(self):
# grad_w, grad_v complex dtype
gtype = self.dtype
if self.dtype == np.float32:
gtype = np.complex64
elif self.dtype == np.float64:
gtype = np.complex128
self.grad_w = np.ones(self.out[0].shape, gtype)
self.grad_v = np.ones(self.out[1].shape, gtype)
self.grad_x = eig_backward(self.out[0], self.out[1], self.grad_w,
self.grad_v)
def test_check_output(self):
self.check_output_with_place_customized(checker=self.checker,
place=core.CPUPlace())
def test_check_grad(self):
self.init_grad()
self.check_grad(['X'], ['Eigenvalues', 'Eigenvectors'],
user_defined_grads=[self.grad_x],
user_defined_grad_outputs=[self.grad_w, self.grad_v])
class TestComplex128(TestEigOp):
def set_dtype(self):
self.dtype = np.complex128
@skip_check_grad_ci(
reason=
"For float dtype, numpy.linalg.eig forward outputs real or complex when input is real, therefore the grad computation may be not the same with paddle.linalg.eig"
)
class TestDouble(TestEigOp):
def set_dtype(self):
self.dtype = np.float64
def test_check_grad(self):
pass
@skip_check_grad_ci(
reason=
"For float dtype, numpy.linalg.eig forward outputs real or complex when input is real, therefore the grad computation may be not the same with paddle.linalg.eig"
)
class TestEigBatchMarices(TestEigOp):
def set_dtype(self):
self.dtype = np.float64
def set_dims(self):
self.shape = (3, 10, 10)
def test_check_grad(self):
pass
@skip_check_grad_ci(
reason=
"For float dtype, numpy.linalg.eig forward outputs real or complex when input is real, therefore the grad computation may be not the same with paddle.linalg.eig"
)
class TestFloat(TestEigOp):
def set_dtype(self):
self.dtype = np.float32
def test_check_grad(self):
pass
class TestEigStatic(TestEigOp):
def test_check_output_with_place(self):
paddle.enable_static()
place = core.CPUPlace()
input_np = np.random.random([3, 3]).astype('complex')
expect_val, expect_vec = np.linalg.eig(input_np)
with fluid.program_guard(fluid.Program(), fluid.Program()):
input = fluid.data(name="input", shape=[3, 3], dtype='complex')
act_val, act_vec = paddle.linalg.eig(input)
exe = fluid.Executor(place)
fetch_val, fetch_vec = exe.run(fluid.default_main_program(),
feed={"input": input_np},
fetch_list=[act_val, act_vec])
self.assertTrue(
np.allclose(expect_val, fetch_val, 1e-6,
1e-6), "The eigen values have diff: \nExpected " +
str(expect_val) + "\n" + "But got: " + str(fetch_val))
self.assertTrue(
np.allclose(np.abs(expect_vec), np.abs(fetch_vec), 1e-6,
1e-6), "The eigen vectors have diff: \nExpected " +
str(np.abs(expect_vec)) + "\n" + "But got: " +
str(np.abs(fetch_vec)))
class TestEigWrongDimsError(unittest.TestCase):
def test_error(self):
paddle.device.set_device("cpu")
paddle.disable_static()
a = np.random.random((3)).astype('float32')
x = paddle.to_tensor(a)
self.assertRaises(ValueError, paddle.linalg.eig, x)
class TestEigNotSquareError(unittest.TestCase):
def test_error(self):
paddle.device.set_device("cpu")
paddle.disable_static()
a = np.random.random((1, 2, 3)).astype('float32')
x = paddle.to_tensor(a)
self.assertRaises(ValueError, paddle.linalg.eig, x)
class TestEigUnsupportedDtypeError(unittest.TestCase):
def test_error(self):
paddle.device.set_device("cpu")
paddle.disable_static()
a = (np.random.random((3, 3)) * 10).astype('int64')
x = paddle.to_tensor(a)
self.assertRaises(ValueError, paddle.linalg.eig, x)
if __name__ == "__main__":
unittest.main()
|
py | b4044cfac19a8dc17aa42aa795fe4451cb3f1dea | """
"""
import pathlib
import sys
import os
if sys.version_info < (3, 5, 2):
print("ChemPred required Python >= 3.5.2")
sys.exit(1)
SCILK_ROOT = os.path.abspath(os.environ.get('SCILK_ROOT') or
os.path.expanduser('~/.scilk'))
os.makedirs(SCILK_ROOT, exist_ok=True)
if __name__ == '__main__':
raise RuntimeError
|
py | b4044d693bea7b7f1aac5174343b2b9786df47a9 | C = input()
print(len(C) - 1 - C.count('0'))
|
py | b4044e6da9ff75b685c3a8870d62b9a3bbe66c03 |
from time import sleep
print("-"*40)
print("Seja bem vindo ao nosso menu interativo! ")
print("-"*40)
n1 = float(input("Digite o primeiro valor: "))
n2 =float(input("Digite o segundo valor: "))
opcao = 0
while opcao != 10:
print("""
[1] somar,
[2] subitrair,
[3] multiplicar,
[4] dividir,
[5]resto da divisão inteira,
[6] divisão inteira,
[7] porcentagem,
[8] maior valor,
[9]novo número,
[10] sair do programa
""")
print("-"*30)
opcao = float(input("Qual é sua opção? "))
print("-"*30)
if opcao == 1:
soma = n1 + n2
print(f"a soma entre {n1:.1f} + {n2:.1f} = {soma:.1f}")
elif opcao ==2:
subtrair = n1 - n2
print(f'A subtração entre {n1} - {n2} = {subtrair}')
elif opcao == 3:
multiplica = n1 * n2
print(f"A multiplicação entre {n1} x {n2} = {multiplica}")
elif opcao == 4:
divisão = n1 / n2
print(f"A divisão entre {n1} / {n2} = {divisão:.2f}")
elif opcao == 5:
restoDivisão = n1 % n2
print(f'o resto da divisão entre {n1} é {n2} = {restoDivisão}')
if opcao == 6:
divisãoInteira = n1 // n2
print(f'a divisão inteira entre {n1} e {n2} e igual a {divisãoInteira}')
elif opcao == 7:
porcentagem = n1 * n2 /100
print(f"A porcentagem de {n1} % {n2} e igual a{porcentagem}")
elif opcao == 8:
if n1 > n2:
maior =n1
else:
maior = n2
print(f"entre {n1} e {n2} o maior é {maior}")
elif opcao == 9:
print('Informe os números novamente: ')
n1 = float(input("Digite o primeiro valor: "))
n2 = float(input("Digite o segundo valor: "))
if opcao >= 10:
print("Saindo do programa, até logo!")
else:
print("Valor inválido, tente novamente!")
print("-"*30)
sleep(1)
print('Fim do programa, volte sempre!')
|
py | b4044f40ff24c337ef192bff5b9daa482c0efb78 | from pynetest.expectations import expect
from pynetest.lib.matchers.matches_list_matcher import MatchesListMatcher
from pynetest.matchers import about
def test__matches_list_matcher__can_match():
expect([1, 2, 3, "banana"]).to_be(MatchesListMatcher([1, 2, 3, "banana"]))
def test__matches_list_matcher__when_lists_have_different_lengths__does_not_match():
expect([1, 2, 3, 4]).not_to_be(MatchesListMatcher([1, 2, 3, 4, 4]))
expect([1, 2, 3, 4, 4]).not_to_be(MatchesListMatcher([1, 2, 3, 4]))
def test__matches_list_matcher__when_lists_contain_different_items__does_not_match():
expect([1, 2, "banana"]).not_to_be(MatchesListMatcher([1, 3, "banana"]))
def test__matches_list_matcher__when_list_is_the_same_instance__does_not_match():
some_list = [1, 2, 3, 4]
expect(some_list).not_to_be(MatchesListMatcher(some_list))
def test__matches_list_matcher__when_comparing_empty_tuples__matches():
expect(()).to_be(MatchesListMatcher(()))
def test__matches_list_matcher__when_list_is_the_same_instance__explains_why_not():
some_list = [1, 2, 3, 4]
matcher = MatchesListMatcher(some_list)
matcher.matches(some_list)
expect(matcher.reason()).to_contain("it was the exact same instance")
def test__matches_list_matcher__supports_matchers_in_the_list():
expect([1]).to_be(MatchesListMatcher([about(1)]))
|
py | b4044ff171abefea8497bc9f03d480cc44a59dad | """Convert trajectory files to gsd format."""
import io
from pathlib import Path
import gsd.hoomd
import hoomd
import mbuild as mb
import numpy as np
import parmed
import unyt as u
def cassandra2gsd(
h_path,
xyz_path,
gsd_path,
species_list,
gsd_length_unit="nm",
cassandra_length_unit="angstrom",
):
"""Convert Cassandra H and xyz files to a gsd trajectory file.
Inputs:
h_path: path-like object (such as string or pathlib.Path) containing
the path to the Cassandra .H file containing the box dimensions.
xyz_path: path-like object (such as string or pathlib.Path) containing
the path to the Cassandra .xyz file containing the trajectory atom coordinates.
gsd_path: path-like object (such as string or pathlib.Path) containing
the path to the gsd file to be written.
species_list: list of parameterized single-molecule parmed Structure objects
with one element per species. This should be the same as the species_list
supplied to the MoSDeF Cassandra System and MoveSet objects used to generate
the trajectory.
gsd_length_unit: string or unyt.Unit representing the unit of length to be
used in the .gsd file.
cassandra_length_unit: string or unyt.Unit representing the unit of length
used in the .H and .xyz files.
"""
gsd_length_unit = u.Unit(gsd_length_unit)
if not (gsd_length_unit.dimensions is u.dimensions.length):
raise ValueError(
"gsd_length_unit must be a unit of length, not "
+ str(gsd_length_unit)
)
cassandra_length_unit = u.Unit(cassandra_length_unit)
if not (cassandra_length_unit.dimensions is u.dimensions.length):
raise ValueError(
"cassandra_length_unit must be a unit of length, not "
+ str(cassandra_length_unit)
)
length_factor = (1.0 * cassandra_length_unit).to_value(gsd_length_unit)
h_path = Path(h_path)
xyz_path = Path(xyz_path)
gsd_path = Path(gsd_path)
nspecies = len(species_list)
nmols_old = np.zeros(nspecies, dtype=int)
with h_path.open() as h_file, xyz_path.open() as xyz_file, gsd.hoomd.open(
gsd_path, "wb"
) as gsd_file:
while h_file.readline():
with io.StringIO() as buff:
for i in range(3):
buff.write(h_file.readline())
buff.seek(0)
lmat = np.loadtxt(buff) * length_factor
h_file.readline()
nspecies_in_box = int(h_file.readline().strip())
nmols = np.zeros(nspecies, dtype=int)
for i in range(nspecies_in_box):
mol_line_split = h_file.readline().strip().split()
nmols[int(mol_line_split[0]) - 1] = int(mol_line_split[1])
natoms = int(xyz_file.readline().strip())
step = int(xyz_file.readline().strip()[-1])
with io.StringIO() as buff:
for i in range(natoms):
buff.write(xyz_file.readline())
buff.seek(0)
xyz = np.loadtxt(buff, usecols=(1, 2, 3)) * length_factor
if any(nmols != nmols_old):
typed_system = parmed.Structure()
for i, parspec in enumerate(species_list):
n = nmols[i]
if n > 0:
typed_system += parspec * n
bonds = [
(bond.atom1.idx, bond.atom2.idx) for bond in typed_system.bonds
]
all_types = [a.type for a in typed_system.atoms]
types = list(set(all_types))
s = gsd.hoomd.Snapshot()
s.configuration.step = step
s.particles.N = natoms
s.particles.position = xyz
s.particles.types = types
s.particles.typeid = [types.index(i) for i in all_types]
s.bonds.N = len(bonds)
s.bonds.group = bonds
# must be upper triangular
# todo: verify whether matrix needs to be transposed for non-ortho boxes
box = hoomd.Box.from_matrix(lmat)
s.configuration.box = [
box.Lx,
box.Ly,
box.Lz,
box.xy,
box.xz,
box.yz,
]
s.validate()
gsd_file.append(s)
nmols_old = nmols
|
py | b404505977d56ce3caada84be90b4a2896f54bbf | class Solution:
def subarraysDivByK(self, A: List[int], K: int) -> int:
sums = [0]*(len(A)+1)
for i in range(0, len(A)):
sums[i+1] = (sums[i] + A[i]) % K
count = collections.Counter(sums)
return sum(v*(v-1)//2 for v in count.values())
|
py | b40450dfa36c7e7e8251a1b51c0237527cb7c3c6 | # -*- coding: utf-8 -*-
'''
Created on Fri Nov 9 09:03:03 2018
@author:
Visa Suomi
Turku University Hospital
November 2018
@description:
This function is used for plotting the performance metrics from a trained
Keras model
'''
#%% import necessary libraries
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
#%% define function
def plot_softmax_classification_performance(model, losses, cm_training, cm_validation):
# training logloss
f1 = plt.figure(figsize = (18, 4))
plt.subplot(1, 3, 1)
plt.title('Training and validation loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
if model == 'keras':
plt.plot(losses.epoch, np.array(losses.history['loss']),
label = 'Training')
plt.plot(losses.epoch, np.array(losses.history['val_loss']),
label = 'Validation')
if model == 'xgboost':
plt.plot(np.array(list(losses['training'].values())[0]), label = 'Training')
plt.plot(np.array(list(losses['validation'].values())[0]), label = 'Validation')
plt.grid()
plt.legend()
plt.legend()
plt.grid()
# confusion matrix (training)
# plt.figure()
plt.subplot(1, 3, 2)
ax = sns.heatmap(cm_training, cmap = 'bone_r')
ax.set_aspect(1)
plt.title('Confusion matrix (training)')
plt.ylabel('True class')
plt.xlabel('Predicted class')
# confusion matrix (validation)
# plt.figure()
plt.subplot(1, 3, 3)
ax = sns.heatmap(cm_validation, cmap = 'bone_r')
ax.set_aspect(1)
plt.title('Confusion matrix (validation)')
plt.ylabel('True class')
plt.xlabel('Predicted class')
return f1
|
py | b404524c22798db628e3eb54e8a242cd71b84a99 | #default settings
import nuke
# TIME
nuke.knobDefault("FrameRange.label", "[value knob.first_frame] - [value knob.last_frame]")
nuke.knobDefault("TimeBlur.shutteroffset", "centered")
nuke.knobDefault("Retime.before", "continue")
nuke.knobDefault("Retime.after", "continue")
nuke.knobDefault("Retime.filter", "nearest")
nuke.knobDefault("Retime.label", "speed: [value speed]")
# CHANNELS
nuke.knobDefault("Remove.operation", "keep")
nuke.knobDefault("Remove.channels", "rbga")
nuke.knobDefault("Remove.label", "[value channels]")
nuke.knobDefault("Shuffle.label", "[value in]")
# COLOR CORRECT
nuke.knobDefault("EXPTool.mode", "0")
nuke.knobDefault("Gamma.channels", "rgba")
nuke.knobDefault("Colorspace.label", "[value colorspace_in] - [value colorspace_out]")
nuke.knobDefault("Colorspace.colorspace_out", "AlexaV3LogC")
nuke.knobDefault("Multiply.label", "[value value]")
nuke.knobDefault("Saturation.label", "[value saturation]")
nuke.knobDefault("Saturation.saturation", "0")
# CONVOLUTIONS
nuke.knobDefault("Denoise2.useGPUIfAvailable", "1")
nuke.knobDefault("Blur.channels", "rgba")
nuke.knobDefault("Blur.label", "[value size] px")
nuke.knobDefault("Dilate.channels", "rgba")
nuke.knobDefault("Dilate.label", "[value size] px")
nuke.knobDefault("FilterErode.label", "[value size] px")
nuke.knobDefault("Erode.label", "[value size] px")
nuke.knobDefault("Median.label", "[value size] px")
nuke.knobDefault("Soften.channels", "rgba")
nuke.knobDefault("Soften.label", "[value size] px")
nuke.knobDefault("Sharpen.channels", "rgb")
nuke.knobDefault("Sharpen.label", "[value size] px")
nuke.knobDefault("GodRays.channels", "rgba")
nuke.knobDefault("Defocus.channels", "rgba")
nuke.knobDefault("Defocus.label", "[value defocus]")
nuke.knobDefault("ZDefocus2.channels", "rgba")
nuke.knobDefault("VectorBlur.channels", "rgba")
# MERGE
nuke.knobDefault("Switch.which", "1")
nuke.knobDefault("Switch.label", "[value which]")
nuke.knobDefault("Dissolve.which", "1")
nuke.knobDefault("Dissolve.label", "[value which]")
nuke.knobDefault("Keymix.bbox", "1")
nuke.knobDefault("Keymix.channels", "rgba")
nuke.knobDefault("Merge.bbox", "3")
# TRANSFORM
nuke.knobDefault("Transform.shutteroffset", "centered")
nuke.knobDefault("TransformMasked.shutteroffset", "centered")
nuke.knobDefault("CornerPin2D.shutteroffset", "centered")
nuke.knobDefault("Tracker4.shutteroffset", "centered")
nuke.knobDefault("Card3D.shutteroffset", "centered")
nuke.knobDefault("Reconcile3D.shutteroffset", "centered")
nuke.knobDefault("Mirror.Horizontal", "1")
nuke.knobDefault("Mirror2.flop", "1")
# 3D
nuke.knobDefault("ScanlineRender.antialiasing", "3")
nuke.knobDefault("ScanlineRender.label", "[value samples]")
nuke.knobDefault("ScanlineRender.shutteroffset", "centered")
# MISC
nuke.knobDefault("Expression.label", "[knob expr3]")
nuke.knobDefault("Viewer.freezeGuiWhenPlayBack", "1")
nuke.knobDefault("NoOp.hide_input", "1")
nuke.knobDefault("DeepReformat.pbb", "1")
nuke.knobDefault("DeepReformat.resize", "none")
nuke.knobDefault("STMap.channels", "rgba")
nuke.knobDefault("STMap.uv", "rgb")
nuke.knobDefault("AdjBBox.numpixels", "100")
nuke.knobDefault("AdjBBox.label", "[value numpixels]")
nuke.knobDefault("Constant.channels", "rgba")
nuke.knobDefault("VectorDistort.label", "REF: [value reference_frame]")
nuke.menu("Nuke").addCommand('Scripts/Align', 'align.aligner()', "shift+alt+a")
nuke.menu("Nuke").addCommand('Scripts/Reveal File', 'sb_revealInFileBrowser.sb_revealInFileBrowser()')
nuke.menu("Nuke").addCommand('Scripts/Convert Corner Pin', 'sb_convertCornerPin.main()')
nuke.menu("Nuke").addCommand('Scripts/Matrix Inverter', 'matrixInverter.main()')
nuke.menu("Nuke").addCommand('Scripts/Mirror Nodes', 'mirrorNodes.main()')
nuke.menu("Nuke").addCommand('Scripts/Delete Viewers', 'sb_deleteViewers.sb_deleteViewers()')
nuke.menu("Nuke").addCommand('Scripts/PlateLink', 'Links.plate_link()', "shift+alt+v")
nuke.menu("Nuke").addCommand('Scripts/DeepLink', 'Links.deep_link()', "shift+alt+d")
nuke.menu("Nuke").addCommand('Scripts/CameraLink', 'Links.camera_link()', "shift+alt+c")
nuke.menu("Nuke").addCommand("Edit/Node/Align/Left", 'W_smartAlign.alignNodes("left")', "Alt+left", shortcutContext=2)
nuke.menu("Nuke").addCommand("Edit/Node/Align/Right", 'W_smartAlign.alignNodes("right")', "Alt+right",
shortcutContext=2)
nuke.menu("Nuke").addCommand("Edit/Node/Align/Up", 'W_smartAlign.alignNodes("up")', "Alt+up", shortcutContext=2)
nuke.menu("Nuke").addCommand("Edit/Node/Align/Down", 'W_smartAlign.alignNodes("down")', "Alt+down", shortcutContext=2)
# nuke.pluginAddPath("Scripts")
# import plateLink
# nuke.menu("Nuke").addCommand('Scripts/PlateLink', 'plateLink.plateLink()', "shift+alt+v")
# import deepLink
# nuke.menu("Nuke").addCommand('Scripts/DeepLink', 'deepLink.deepLink()', "shift+alt+d")
# import cameraLink
# nuke.menu("Nuke").addCommand('Scripts/CameraLink', 'cameraLink.cameraLink()', "shift+alt+c") |
py | b404538ed91073c2bb747f5d09c4d7ab69d21f97 | """ Background model. """
from django.db import models
from common.models import OwnedModel
class Feature(OwnedModel):
"""
Model for representing features
Attributes:
name: the name for this feature. Should be unique.
description: text description of the feature, typically describing its uses
prerequisite_strength: required strength score value. Defaults to 0 (No requirement)
prerequisite_dexterity: required dexterity score value. Defaults to 0 (No requirement)
prerequisite_constitution: required constitution score value. Defaults to 0 (No requirement)
prerequisite_intelligence: required intelligence score value. Defaults to 0 (No requirement)
prerequisite_wisdom: required wisdom score value. Defaults to 0 (No requirement)
prerequisite_charisma: required charisma score value. Defaults to 0 (No requirement)
"""
name = models.CharField(unique=True, max_length=255, db_index=True)
description = models.TextField()
# Ability feature prerequisite fields
prerequisite_strength = models.IntegerField(default=0, db_index=True)
prerequisite_dexterity = models.IntegerField(default=0, db_index=True)
prerequisite_constitution = models.IntegerField(default=0, db_index=True)
prerequisite_intelligence = models.IntegerField(default=0, db_index=True)
prerequisite_wisdom = models.IntegerField(default=0, db_index=True)
prerequisite_charisma = models.IntegerField(default=0, db_index=True)
def __str__(self):
return f'Feature: {self.name}'
|
py | b404539d34e254e2b4622fd7e1eda71d77392f90 | from manim import *
import math
a = 3
b = 4
angle = round(math.degrees(math.atan(a/b)))
class main(Scene):
def construct(self):
triangle = Polygon([0,0,0],[0,a,0], [b, 0, 0])
right_angle = RightAngle(Line(DOWN, UP), Line(LEFT, RIGHT))
a_brace = BraceBetweenPoints([0,a,0], [0,0,0])
a_text = MathTex("a=",a).next_to(a_brace, LEFT)
a_label = VGroup(a_brace, a_text)
b_brace = BraceBetweenPoints([0,0,0], [b,0,0])
b_text = MathTex("b=",b).next_to(b_brace, DOWN)
b_label = VGroup(b_brace, b_text)
# sohcahtoa has been split to allow for colouring of toa only
sohcah = Text("SOHCAH")
toa = Text("TOA").next_to(sohcah, RIGHT, buff=0.05)
sohcahtoa = VGroup(sohcah, toa).move_to([4.2,3,0])
ac_angle = Arc(1, 3*PI/2, math.atan(b/a), arc_center=[0,a,0])
ac_theta = MathTex(r"\theta").move_to(ac_angle.get_arc_center() + DOWN*1.2 + 1.2*RIGHT/2)
ac_angle_number = MathTex(str(angle) + r"^\circ").move_to(ac_theta)
ac_angle_group_init = VGroup(ac_angle, ac_theta)
ac_angle_group_end = VGroup(ac_angle, ac_angle_number)
all_triangle = VGroup(triangle, a_label, b_label, ac_angle, ac_theta, ac_angle_number, right_angle)
all_triangle.shift([-b-0.5, -a/2, 0])
tan_text_1 = MathTex(r"\tan(\theta) = \frac{a}{b}").move_to([4, 0, 0])
tan_text_2 = MathTex(r"\tan(\theta) = \frac{" + str(a) + "}{" + str(b) + "}").move_to([4, 0, 0])
tan_text_3 = MathTex(r"\theta = \arctan\left(\frac{" + str(a) + "}{" + str(b) + r"}\right)").move_to([4, 0, 0])
tan_text_4 = MathTex(r"\theta = " + str(angle) + r"^\circ").move_to([4, 0, 0])
self.play(Create(triangle))
self.play(Create(right_angle))
self.play(Write(a_label))
self.play(Write(b_label))
self.wait(1)
self.play(Write(ac_angle_group_init))
self.wait(1)
self.play(Write(sohcahtoa))
self.wait(1)
self.play(ApplyMethod(right_angle.scale, 1.5), run_time = 0.5)
self.play(ApplyMethod(right_angle.scale, 2/3), run_time = 0.5)
self.wait(1)
self.play(ApplyMethod(toa.set_color, RED))
self.wait(1)
self.play(TransformFromCopy(toa, tan_text_1))
self.play(ApplyMethod(toa.set_color, WHITE))
self.wait(1)
self.play(ReplacementTransform(tan_text_1, tan_text_2))
self.wait(1)
self.play(ReplacementTransform(tan_text_2, tan_text_3))
self.wait(1)
self.play(ReplacementTransform(tan_text_3, tan_text_4))
self.wait(1)
self.play(ReplacementTransform(ac_theta, ac_angle_number), ReplacementTransform(tan_text_4, ac_angle_number))
self.wait(3)
self.play(FadeOut(sohcahtoa, all_triangle, ac_angle_group_end, tan_text_4, a_label, b_label))
self.wait(1)
banner=ManimBanner().scale(0.7).shift(DOWN*0.75)
made_with=Text("Made With").next_to(banner, UP, buff=0.5)
self.play(Write(made_with), banner.create())
self.play(banner.expand())
self.wait(1)
|
py | b40453cf2984d49f65626a1718bf20abd7116d6e | # Copyright 1996-2020 Soft_illusion.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import rclpy
from webots_ros2_core.webots_node import WebotsNode
from math import cos, sin , tan , pi
from std_msgs.msg import Float64
from rclpy.time import Time
from geometry_msgs.msg import Twist
from sensor_msgs.msg import LaserScan
from geometry_msgs.msg import TransformStamped
from tf2_ros import StaticTransformBroadcaster,TransformBroadcaster
from nav_msgs.msg import Odometry
from geometry_msgs.msg import Point, Pose, Quaternion, Twist, Vector3
from webots_ros2_core.math_utils import euler_to_quaternion
DEFAULT_WHEEL_RADIUS = 0.1
DEFAULT_WHEEL_DISTANCE = 0.1
class ServiceNodeVelocity(WebotsNode):
def __init__(self, args):
super().__init__('slave_node', args)
# Enable 3 sensors
self.service_node_vel_timestep = 32
# Sensor section
self.sensor_timer = self.create_timer(
0.001 * self.service_node_vel_timestep, self.sensor_callback)
self.right_sensor = self.robot.getDistanceSensor('distance_sensor_right')
self.right_sensor.enable(self.service_node_vel_timestep)
self.sensor_publisher_right = self.create_publisher(Float64, 'right_IR', 1)
self.mid_sensor = self.robot.getDistanceSensor('distance_sensor_mid')
self.mid_sensor.enable(self.service_node_vel_timestep)
self.sensor_publisher_mid = self.create_publisher(Float64, 'mid_IR', 1)
self.left_sensor = self.robot.getDistanceSensor('distance_sensor_left')
self.left_sensor.enable(self.service_node_vel_timestep)
self.sensor_publisher_left = self.create_publisher(
Float64, 'left_IR', 1)
# Front wheels
self.left_motor_front = self.robot.getMotor('left_front_wheel')
self.left_motor_front.setPosition(float('inf'))
self.left_motor_front.setVelocity(0)
self.right_motor_front = self.robot.getMotor('right_front_wheel')
self.right_motor_front.setPosition(float('inf'))
self.right_motor_front.setVelocity(0)
# Rear wheels
self.left_motor_rear = self.robot.getMotor('left_rear_wheel')
self.left_motor_rear.setPosition(float('inf'))
self.left_motor_rear.setVelocity(0)
self.right_motor_rear = self.robot.getMotor('right_rear_wheel')
self.right_motor_rear.setPosition(float('inf'))
self.right_motor_rear.setVelocity(0)
# position sensors
self.left_wheel_sensor = self.robot.getPositionSensor('left_rear_position')
self.right_wheel_sensor = self.robot.getPositionSensor('right_rear_position')
self.left_wheel_sensor.enable(self.timestep)
self.right_wheel_sensor.enable(self.timestep)
self.motor_max_speed = self.left_motor_rear.getMaxVelocity()
# Create Subscriber
self.cmd_vel_subscriber = self.create_subscription(
Twist, 'cmd_vel', self.cmdVel_callback, 1)
# Create Lidar subscriber
self.lidar_sensor = self.robot.getLidar('lidar_sensor')
self.lidar_sensor.enable(self.service_node_vel_timestep)
self.laser_publisher = self.create_publisher(LaserScan, '/scan', 1)
##########################
self.x = 0.0
self.y = 0.0
self.th = 0.0
self.vx = 0.0
self.vy = 0.0
self.vth = 0.0
self.time_step=0.032
self.left_omega = 0.0
self.right_omega = 0.0
self.odom_pub = self.create_publisher(Odometry,"odom",1)
self.odom_timer = self.create_timer(self.time_step, self.odom_callback)
#########################
self.get_logger().info('Sensor enabled')
self.prev_angle = 0.0
self.prev_left_wheel_ticks = 0.0
self.prev_right_wheel_ticks = 0.0
self.last_time = 0.0
self.wheel_gap = 0.12 # in meter
self.wheel_radius = 0.04 # in meter
self.front_back = 0.1 # in meter
####################################
def odom_callback(self):
self.publish_odom()
def publish_odom(self):
stamp = Time(seconds=self.robot.getTime()).to_msg()
self.odom_broadcaster = TransformBroadcaster(self)
time_diff_s = self.robot.getTime() - self.last_time
# time_diff_s = self.time_step
left_wheel_ticks = self.left_wheel_sensor.getValue()
right_wheel_ticks = self.right_wheel_sensor.getValue()
if time_diff_s == 0.0:
return
# Calculate velocities
v_left_rad = (left_wheel_ticks - self.prev_left_wheel_ticks) / time_diff_s
v_right_rad = (right_wheel_ticks - self.prev_right_wheel_ticks) / time_diff_s
v_left = v_left_rad * self.wheel_radius
v_right = v_right_rad * self.wheel_radius
v = (v_left + v_right) / 2
omega = (v_right - v_left) / 2 * 2 * self.wheel_gap # (Vright - Vleft) / 2* wheel_gap
# ################################################################
# angle_v = self.th+omega
# vx=v*cos(omega)
# vy=v*sin(omega)
# # self.get_logger().info('th = %f , v = %f , omega = %f' % (self.th ,v , omega) )
# dx = (cos(angle_v)*vx - sin(angle_v)*vy)*time_diff_s
# dy = (sin(angle_v)*vx + cos(angle_v)*vy)*time_diff_s
# dth = tan(omega)*vx*time_diff_s / self.front_back
# self.x += dx
# self.y += dy
# self.th += omega
# # Calculate position & angle
# # Fourth order Runge - Kutta
# # Reference: https://www.cs.cmu.edu/~16311/s07/labs/NXTLabs/Lab%203.html
# k00 = v * cos(self.prev_angle)
# k01 = v * sin(self.prev_angle)
# k02 = omega
# k10 = v * cos(self.prev_angle + time_diff_s * k02 / 2)
# k11 = v * sin(self.prev_angle + time_diff_s * k02 / 2)
# k12 = omega
# k20 = v * cos(self.prev_angle + time_diff_s * k12 / 2)
# k21 = v * sin(self.prev_angle + time_diff_s * k12 / 2)
# k22 = omega
# k30 = v * cos(self.prev_angle + time_diff_s * k22 / 2)
# k31 = v * sin(self.prev_angle + time_diff_s * k22 / 2)
# k32 = omega
self.x += v * cos(self.prev_angle)*time_diff_s
self.y += v * sin(self.prev_angle)*time_diff_s
self.th += omega
################################################################
# Reset section
self.prev_angle = self.th
self.prev_left_wheel_ticks = left_wheel_ticks
self.prev_right_wheel_ticks = right_wheel_ticks
self.last_time = self.robot.getTime()
# since all odometry is 6DOF we'll need a quaternion created from yaw
odom_quat=euler_to_quaternion(0.0, 0.0, self.th)
# first, we'll publish the transform over tf
odom_transform = TransformStamped()
odom_transform.header.stamp = stamp
odom_transform.header.frame_id = 'odom'
odom_transform.child_frame_id = 'base_link'
odom_transform.transform.rotation = odom_quat
odom_transform.transform.translation.x = self.x
odom_transform.transform.translation.y = self.y
odom_transform.transform.translation.z = 0.0
self.odom_broadcaster.sendTransform(odom_transform)
odom = Odometry()
odom.header.stamp = stamp
odom.header.frame_id = "odom"
odom.child_frame_id = "base_link"
# set the position
odom.pose.pose.position.x= self.x
odom.pose.pose.position.y= self.y
odom.pose.pose.orientation = odom_quat
# set the velocity
odom.twist.twist.linear.x = self.vx
odom.twist.twist.angular.z=self.vth
# publish the message
self.odom_pub.publish(odom)
###################################
def cmdVel_callback(self, msg):
self.vx = msg.linear.x
self.vth = msg.angular.z
left_speed = ((2.0 * msg.linear.x - msg.angular.z *
self.wheel_gap) / (2.0 * self.wheel_radius))
right_speed = ((2.0 * msg.linear.x + msg.angular.z *
self.wheel_gap) / (2.0 * self.wheel_radius))
left_speed = min(self.motor_max_speed,
max(-self.motor_max_speed, left_speed))
right_speed = min(self.motor_max_speed,
max(-self.motor_max_speed, right_speed))
self.left_omega = left_speed / (self.wheel_radius)
self.right_omega = right_speed / (self.wheel_radius)
self.left_motor_front.setVelocity(left_speed)
self.right_motor_front.setVelocity(right_speed)
self.left_motor_rear.setVelocity(left_speed)
self.right_motor_rear.setVelocity(right_speed)
def sensor_callback(self):
# Publish distance sensor value
msg_right = Float64()
msg_right.data = self.right_sensor.getValue()
self.sensor_publisher_right.publish(msg_right)
msg_mid = Float64()
msg_mid.data = self.mid_sensor.getValue()
self.sensor_publisher_mid.publish(msg_mid)
msg_left = Float64()
msg_left.data = self.left_sensor.getValue()
self.sensor_publisher_left.publish(msg_left)
self.laser_pub()
def laser_pub(self):
msg_lidar = LaserScan()
msg_lidar.header.frame_id = 'base_link'
stamp = Time(seconds=self.robot.getTime()).to_msg()
msg_lidar.header.stamp = stamp
msg_lidar.angle_min = 0.0
msg_lidar.angle_max = 2 * 22 / 7
msg_lidar.angle_increment = ( 0.25 * 22 ) / (180 * 7 )
msg_lidar.range_min = 0.12
msg_lidar.range_max = 2.0
msg_lidar.scan_time = 0.032
msg_lidar.ranges = self.lidar_sensor.getRangeImage()
self.laser_publisher.publish(msg_lidar)
def main(args=None):
rclpy.init(args=args)
client_vel = ServiceNodeVelocity(args=args)
rclpy.spin(client_vel)
client_vel.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
|
py | b4045419c87e093ef737f0bbc9f1373824191bc5 | from math import ceil
import matplotlib.pyplot as plt
import matplotlib.artist as mplartist
from matplotlib.axes import Axes
class InteractiveLegend(object):
def __init__(self, graph, selection):
self._graph = graph
self._selection = selection
self._groups = [ ]
self._ax = Axes(self._graph.ax.get_figure(), self._graph.ax.get_position(original = True))
# TODO: accept groups as part of constructor?
def add_group(self, label, vertices, default_props = { }, selected_props = { }, position = None):
group = VertexGroup(self, label, vertices, default_props, selected_props)
if position is None:
self._groups.append(group)
else:
self._groups.insert(position, group)
def build(self, n_cols = 2, font_size = 8, pad = 4):
n_rows = int(ceil(len(self._groups) / float(n_cols)))
row_sz = 1.0 / (self._ax.figure.get_dpi() / (font_size + pad))
radius = row_sz * 0.8 / 2.0
for idx, group in enumerate(self._groups):
row, col = idx % n_rows, idx // n_rows
offset_y = 1.0 - (row_sz * row + row_sz / 2.0)
offset_x = (1.0 / n_cols) * col
group.build(self._ax, offset_x + row_sz / 2.0, offset_x + row_sz * 1.2, offset_y, radius, font_size)
self._ax.tick_params(left = False, bottom = False, labelleft = False, labelbottom = False)
self._ax.set_ylim(1.0 - row_sz * n_rows)
self._ax.set_aspect("equal")
self._ax.set_anchor("NW")
self._ax.figure.canvas.toolbar.update()
def update(self, action):
for group in self._groups:
if action == "hide":
if group.selected:
group.mark_hidden()
elif action == "restore":
if group.selected:
group.mark_visible()
elif action == "deselect":
if group.selected:
group.mark_unselected()
elif action == "reset":
group.mark_visible()
elif action == "remove":
group._vertices &= self._graph.vertices
self.ax.figure.canvas.draw()
@property
def ax(self):
return self._ax
class VertexGroup(object):
def __init__(self, legend, label, vertices, default_props, selected_props):
self._legend = legend
self._label = label
self._vertices = vertices
self._default_props = default_props
self._selected_props = selected_props
self._selected = False
self._visible = True
self._patch = None
self._text = None
def build(self, ax, circle_x, text_x, y, radius, font_size):
self._default_props["radius"] = radius
self._selected_props["radius"] = radius
self._patch = plt.Circle((circle_x, y), **self.default_props)
ax.add_patch(self._patch)
self._text = ax.text(text_x, y, self.label, va = "center", ha = "left", size = font_size)
self._connect()
@property
def label(self):
return self._label
@property
def vertices(self):
return self._vertices
@property
def selected(self):
return self._selected
@property
def visible(self):
return self._visible
@property
def default_props(self):
return self._default_props
@property
def selected_props(self):
return self._selected_props
def mark_selected(self):
mplartist.setp(self._patch, **self.selected_props)
self._selected = True
def mark_unselected(self):
mplartist.setp(self._patch, **self.default_props)
self._selected = False
def mark_visible(self):
self._text.set_color((0.0, 0.0, 0.0))
self._visible = True
def mark_hidden(self):
self._text.set_color((0.4, 0.4, 0.4))
self._visible = False
def _on_press(self, event):
if event.inaxes != self._patch.axes:
return
contains, attrd = self._patch.contains(event)
if not contains:
return
if self._selected:
self._legend._selection.remove_vertices(self._vertices)
self.mark_unselected()
else:
self._legend._selection.add_vertices(self._vertices)
self.mark_selected()
self._legend.ax.figure.canvas.draw()
def _connect(self):
self._cidpress = self._patch.figure.canvas.mpl_connect("button_press_event", self._on_press)
def _disconnect(self):
self._patch.figure.canvas.mpl_disconnect(self._cidpress)
|
py | b404569042cc029297a245b025f2ca93a87754b3 | import sys
try:
import pipenv.vendor.chardet as chardet
except ImportError:
import pipenv.vendor.charset_normalizer as chardet
import warnings
warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')
# This code exists for backwards compatibility reasons.
# I don't like it either. Just look the other way. :)
for package in ('urllib3', 'idna'):
locals()[package] = __import__(package)
# This traversal is apparently necessary such that the identities are
# preserved (requests.packages.urllib3.* is urllib3.*)
for mod in list(sys.modules):
if mod == package or mod.startswith(package + '.'):
sys.modules['requests.packages.' + mod] = sys.modules[mod]
target = chardet.__name__
for mod in list(sys.modules):
if mod == target or mod.startswith(target + '.'):
sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]
# Kinda cool, though, right?
|
py | b40456a4b8b16e095b90a619a1e4701459a5c190 | """
yamlalchemy
"""
__version__ = "0.1.2"
__author__ = 'Ahmet Onol'
from yamlalchemy.parser import parse
|
py | b40456ae059546da34e9f5c33a116420d107eaf0 | '''
Next Prime Number - Have the program find prime numbers until the user chooses to stop asking for the next one.
'''
import HeaderOfFiles
def next_prime(number):
'''
Generate the next prime number
'''
while True:
i = 2
number = 2
my_list = []
x = input("Give me next prime number or type 'stop' to stop programm: ")
if x == 'stop':
break
else:
while i > 0:
if number % i == 0:
if i not in my_list:
my_list.append(i)
print(i)
number = number/i
i += 1
print("hr")
if number == 0:
break
continue
|
py | b40456e9d6b4b137acbf461af78e9186a8ee59c5 | # This file is part of the Reproducible and Reusable Data Analysis Workflow
# Server (flowServ).
#
# Copyright (C) 2019-2021 NYU.
#
# flowServ is free software; you can redistribute it and/or modify it under the
# terms of the MIT License; see LICENSE file for more details.
"""Unit tests for executing serial workflow steps in a Docker container
environment.
"""
import docker
import os
import pytest
from flowserv.controller.worker.docker import DockerWorker
from flowserv.model.workflow.step import ContainerStep
# Test files directory
DIR = os.path.dirname(os.path.realpath(__file__))
RUN_DIR = os.path.join(DIR, '../../.files')
# -- Patching for error condition testing -------------------------------------
class MockClient:
"""Mock Docker client."""
@property
def containers(self):
return self
def run(self, image, command, volumes, remove, environment, stdout):
"""Mock run for docker container."""
if command == 'error':
raise docker.errors.ContainerError(
exit_status=1,
image=image,
command=command,
container=None,
stderr='there was an error'.encode('utf-8')
)
return environment[command].encode('utf-8')
@pytest.fixture
def mock_docker(monkeypatch):
"""Raise error in subprocess.run()."""
def mock_client(*args, **kwargs):
return MockClient()
monkeypatch.setattr(docker, "from_env", mock_client)
# -- Unit tests ---------------------------------------------------------------
def test_run_steps_with_error(mock_docker):
"""Test execution of a workflow step where one of the commands raises an
error.
"""
commands = [
'TEST_ENV_1',
'error',
'TEST_ENV_2'
]
env = {'TEST_ENV_1': 'Hello', 'TEST_ENV_2': 'World'}
step = ContainerStep(image='test', commands=commands)
result = DockerWorker().run(step=step, env=env, rundir=RUN_DIR)
assert result.returncode == 1
assert result.exception is not None
assert result.stdout == ['Hello']
assert 'there was an error' in ''.join(result.stderr)
def test_run_successful_steps(mock_docker):
"""Test successful execution of a workflow step with two commands."""
commands = [
'TEST_ENV_1',
'TEST_ENV_2'
]
env = {'TEST_ENV_1': 'Hello', 'TEST_ENV_2': 'World'}
step = ContainerStep(image='test', commands=commands)
result = DockerWorker().run(step=step, env=env, rundir=RUN_DIR)
assert result.returncode == 0
assert result.exception is None
assert result.stdout == ['Hello', 'World']
assert result.stderr == []
|
py | b40457344e1a867e74d9b52677c40634af8135a5 | from socket import *
serverHost = ''
serverPort = 50007
sockobj = socket(AF_INET, SOCK_STREAM)
sockobj.connect((serverHost, serverPort))
while True:
msg = input("Você: ")
sockobj.send(msg.encode())
data = sockobj.recv(1024)
print('Ele: ', data.decode())
sockobj.close()
|
py | b40458359b5eddbdc07c277c53fc588ade3a00c6 | from datetime import datetime, timedelta
import pytz
from icalendar import Timezone, TimezoneStandard
class TimezoneBuilder:
__tzid: str
def __init__(self, tzid):
self.__tzid = tzid
@property
def tzid(self):
return self.__tzid
def to_icalendar(self):
timezone = Timezone()
standard = TimezoneStandard()
tzinfo = pytz.timezone(self.__tzid)
offset = self.__get_tz_offset(datetime.now(tzinfo))
standard.add("dtstart", datetime(2000, 1, 1))
standard.add("tzoffsetfrom", offset)
standard.add("tzoffsetto", offset)
timezone.add("tzid", self.tzid.upper())
timezone.add_component(standard)
return timezone
def __get_tz_offset(self, dt):
offset = dt.strftime("%z")
has_signal = offset[0] == "-" or offset[0] == "+"
multiplier = 1
start = 0
pos = 2
if has_signal:
start = 1
pos = 3
if offset[0] == "-":
multiplier = -1
hours = int(offset[start:pos])
minutes = int(offset[pos:])
return timedelta(minutes=minutes * multiplier, hours=hours * multiplier)
|
py | b404584826c566e21db3ef14bdeda131091aa5a1 | # Copyright (c) 2008 The Hewlett-Packard Development Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
from __future__ import print_function
from m5.defines import buildEnv
from m5.params import *
from BaseCPU import BaseCPU
from DummyChecker import DummyChecker
from BranchPredictor import *
class BaseSimpleCPU(BaseCPU):
type = 'BaseSimpleCPU'
abstract = True
cxx_header = "cpu/simple/base.hh"
def addCheckerCpu(self):
if buildEnv['TARGET_ISA'] in ['arm']:
from ArmTLB import ArmTLB
self.checker = DummyChecker(workload = self.workload)
self.checker.itb = ArmTLB(size = self.itb.size)
self.checker.dtb = ArmTLB(size = self.dtb.size)
else:
print("ERROR: Checker only supported under ARM ISA!")
exit(1)
branchPred = Param.BranchPredictor(NULL, "Branch Predictor")
|
py | b40458b35c903a428939bbf0045080819187ab3c | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import zip
import logging
import json
import re
from django.urls import reverse
from django.http import Http404
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_POST
from thrift.transport.TTransport import TTransportException
from desktop.auth.backend import is_admin
from desktop.context_processors import get_app_name
from desktop.lib.django_util import JsonResponse
from desktop.lib.exceptions import StructuredThriftTransportException
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.i18n import force_unicode
from desktop.lib.parameterization import substitute_variables
from metastore import parser
from notebook.models import escape_rows, MockedDjangoRequest, make_notebook
import beeswax.models
from beeswax.data_export import upload
from beeswax.design import HQLdesign
from beeswax.conf import USE_GET_LOG_API
from beeswax.forms import QueryForm
from beeswax.models import Session, QueryHistory
from beeswax.server import dbms
from beeswax.server.dbms import expand_exception, get_query_server_config, QueryServerException, QueryServerTimeoutException,\
SubQueryTable
from beeswax.views import authorized_get_design, authorized_get_query_history, make_parameterization_form,\
safe_get_design, save_design, massage_columns_for_json, _get_query_handle_and_state, parse_out_jobs
from metastore.conf import FORCE_HS2_METADATA
from metastore.views import _get_db, _get_servername
from useradmin.models import User
LOG = logging.getLogger(__name__)
def error_handler(view_fn):
def decorator(request, *args, **kwargs):
try:
return view_fn(request, *args, **kwargs)
except Http404 as e:
raise e
except Exception as e:
LOG.exception('error in %s' % view_fn)
if not hasattr(e, 'message') or not e.message:
message = str(e)
else:
message = force_unicode(e.message, strings_only=True, errors='replace')
if 'Invalid OperationHandle' in message and 'id' in kwargs:
# Expired state.
query_history = authorized_get_query_history(request, kwargs['id'], must_exist=False)
if query_history:
query_history.set_to_expired()
query_history.save()
response = {
'status': -1,
'message': message,
}
if re.search('database is locked|Invalid query handle|not JSON serializable', message, re.IGNORECASE):
response['status'] = 2 # Frontend will not display this type of error
LOG.warn('error_handler silencing the exception: %s' % e)
return JsonResponse(response)
return decorator
@error_handler
def autocomplete(request, database=None, table=None, column=None, nested=None):
cluster = request.POST.get('cluster')
app_name = None if FORCE_HS2_METADATA.get() else get_app_name(request)
do_as = request.user
if (is_admin(request.user) or request.user.has_hue_permission(action="impersonate", app="security")) and 'doas' in request.GET:
do_as = User.objects.get(username=request.GET.get('doas'))
db = _get_db(user=do_as, source_type=app_name, cluster=cluster)
response = _autocomplete(db, database, table, column, nested, cluster=cluster)
return JsonResponse(response)
def _autocomplete(db, database=None, table=None, column=None, nested=None, query=None, cluster=None):
response = {}
try:
if database is None:
response['databases'] = db.get_databases()
elif table is None:
tables_meta = db.get_tables_meta(database=database)
response['tables_meta'] = tables_meta
elif column is None:
if query is not None:
table = SubQueryTable(db, query)
else:
table = db.get_table(database, table)
response['hdfs_link'] = table.hdfs_link
response['comment'] = table.comment
cols_extended = massage_columns_for_json(table.cols)
if table.is_impala_only: # Expand Kudu table information
if db.client.query_server['server_name'] != 'impala':
query_server = get_query_server_config('impala', connector=cluster)
db = dbms.get(db.client.user, query_server, cluster=cluster)
col_options = db.get_table_describe(database, table.name) # Expand columns information
extra_col_options = dict([(col[0], dict(list(zip(col_options.cols(), col)))) for col in col_options.rows()])
for col_props in cols_extended:
col_props.update(extra_col_options.get(col_props['name'], {}))
primary_keys = [col['name'] for col in extra_col_options.values() if col.get('primary_key') == 'true'] # Until IMPALA-8291
else:
primary_keys = [pk.name for pk in table.primary_keys]
response['support_updates'] = table.is_impala_only
response['columns'] = [column.name for column in table.cols]
response['extended_columns'] = cols_extended
response['is_view'] = table.is_view
response['partition_keys'] = [{'name': part.name, 'type': part.type} for part in table.partition_keys]
response['primary_keys'] = [{'name': pk} for pk in primary_keys]
else:
col = db.get_column(database, table, column)
if col:
parse_tree = parser.parse_column(col.name, col.type, col.comment)
if nested:
parse_tree = _extract_nested_type(parse_tree, nested)
response = parse_tree
# If column or nested type is scalar/primitive, add sample of values
if parser.is_scalar_type(parse_tree['type']):
sample = _get_sample_data(db, database, table, column, cluster=cluster)
if 'rows' in sample:
response['sample'] = sample['rows']
else:
raise Exception('Could not find column `%s`.`%s`.`%s`' % (database, table, column))
except (QueryServerTimeoutException, TTransportException) as e:
response['code'] = 503
response['error'] = str(e)
except TypeError as e:
response['code'] = 500
response['error'] = str(e)
except Exception as e:
LOG.warn('Autocomplete data fetching error: %s' % e)
response['code'] = 500
response['error'] = str(e)
return response
@error_handler
def parameters(request, design_id=None):
response = {'status': -1, 'message': ''}
# Use POST request to not confine query length.
if request.method != 'POST':
response['message'] = _('A POST request is required.')
parameterization_form_cls = make_parameterization_form(request.POST.get('query-query', ''))
if parameterization_form_cls:
parameterization_form = parameterization_form_cls(prefix="parameterization")
response['parameters'] = [{'parameter': field.html_name, 'name': field.name} for field in parameterization_form]
response['status']= 0
else:
response['parameters'] = []
response['status']= 0
return JsonResponse(response)
@error_handler
def execute_directly(request, query, design, query_server, tablename=None, **kwargs):
if design is not None:
design = authorized_get_design(request, design.id)
parameters = kwargs.pop('parameters', None)
db = dbms.get(request.user, query_server)
database = query.query.get('database', 'default')
db.use(database)
history_obj = db.execute_query(query, design)
watch_url = reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': history_obj.id})
if parameters is not None:
history_obj.update_extra('parameters', parameters)
history_obj.save()
response = {
'status': 0,
'id': history_obj.id,
'watch_url': watch_url,
'statement': history_obj.get_current_statement(),
'is_redacted': history_obj.is_redacted
}
return JsonResponse(response)
@error_handler
def watch_query_refresh_json(request, id):
query_history = authorized_get_query_history(request, id, must_exist=True)
db = dbms.get(request.user, query_history.get_query_server_config())
if not request.POST.get('next'): # We need this as multi query would fail as current query is closed
handle, state = _get_query_handle_and_state(query_history)
query_history.save_state(state)
# Go to next statement if asked to continue or when a statement with no dataset finished.
try:
if request.POST.get('next') or (not query_history.is_finished() and query_history.is_success() and not query_history.has_results):
close_operation(request, id)
query_history = db.execute_next_statement(query_history, request.POST.get('query-query'))
handle, state = _get_query_handle_and_state(query_history)
except QueryServerException as ex:
raise ex
except Exception as ex:
LOG.exception(ex)
handle, state = _get_query_handle_and_state(query_history)
try:
start_over = request.POST.get('log-start-over') == 'true'
log = db.get_log(handle, start_over=start_over)
except Exception as ex:
log = str(ex)
jobs = parse_out_jobs(log)
job_urls = massage_job_urls_for_json(jobs)
result = {
'status': -1,
'log': log,
'jobs': jobs,
'jobUrls': job_urls,
'isSuccess': query_history.is_success(),
'isFailure': query_history.is_failure(),
'id': id,
'statement': query_history.get_current_statement(),
'watch_url': reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id}),
'oldLogsApi': USE_GET_LOG_API.get()
}
# Run time error
if query_history.is_failure():
res = db.get_operation_status(handle)
if query_history.is_canceled(res):
result['status'] = 0
elif hasattr(res, 'errorMessage') and res.errorMessage:
result['message'] = res.errorMessage
else:
result['message'] = _('Bad status for request %s:\n%s') % (id, res)
else:
result['status'] = 0
return JsonResponse(result)
def massage_job_urls_for_json(jobs):
massaged_jobs = []
for job in jobs:
massaged_jobs.append({
'name': job,
'url': reverse('jobbrowser.views.single_job', kwargs={'job': job})
})
return massaged_jobs
@error_handler
def close_operation(request, query_history_id):
response = {
'status': -1,
'message': ''
}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
db = dbms.get(query_history.owner, query_history.get_query_server_config())
handle = query_history.get_handle()
db.close_operation(handle)
query_history.set_to_expired()
query_history.save()
response['status'] = 0
return JsonResponse(response)
@error_handler
def explain_directly(request, query_server, query):
explanation = dbms.get(request.user, query_server).explain(query)
response = {
'status': 0,
'explanation': explanation.textual,
'statement': query.get_query_statement(0),
}
return JsonResponse(response)
@error_handler
def execute(request, design_id=None):
response = {'status': -1, 'message': ''}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
query_type = beeswax.models.SavedQuery.TYPES_MAPPING[app_name]
design = safe_get_design(request, query_type, design_id)
try:
query_form = get_query_form(request)
if query_form.is_valid():
query_str = query_form.query.cleaned_data["query"]
explain = request.GET.get('explain', 'false').lower() == 'true'
design = save_design(request, query_form, query_type, design, False)
if query_form.query.cleaned_data['is_parameterized']:
# Parameterized query
parameterization_form_cls = make_parameterization_form(query_str)
if parameterization_form_cls:
parameterization_form = parameterization_form_cls(request.POST.get('query-query', ''), prefix="parameterization")
if parameterization_form.is_valid():
parameters = parameterization_form.cleaned_data
real_query = substitute_variables(query_str, parameters)
query = HQLdesign(query_form, query_type=query_type)
query._data_dict['query']['query'] = real_query
try:
if explain:
return explain_directly(request, query_server, query)
else:
return execute_directly(request, query, design, query_server, parameters=parameters)
except Exception as ex:
db = dbms.get(request.user, query_server)
error_message, log = expand_exception(ex, db)
response['message'] = error_message
return JsonResponse(response)
else:
response['errors'] = parameterization_form.errors
return JsonResponse(response)
# Non-parameterized query
query = HQLdesign(query_form, query_type=query_type)
if request.GET.get('explain', 'false').lower() == 'true':
return explain_directly(request, query_server, query)
else:
return execute_directly(request, query, design, query_server)
else:
response['message'] = _('There was an error with your query.')
response['errors'] = {
'query': [query_form.query.errors],
'settings': query_form.settings.errors,
'file_resources': query_form.file_resources.errors,
'functions': query_form.functions.errors,
}
except RuntimeError as e:
response['message']= str(e)
return JsonResponse(response)
@error_handler
def save_query_design(request, design_id=None):
response = {'status': -1, 'message': ''}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
app_name = get_app_name(request)
query_type = beeswax.models.SavedQuery.TYPES_MAPPING[app_name]
design = safe_get_design(request, query_type, design_id)
try:
query_form = get_query_form(request)
if query_form.is_valid():
design = save_design(request, query_form, query_type, design, True)
response['design_id'] = design.id
response['status'] = 0
else:
response['errors'] = {
'query': [query_form.query.errors],
'settings': query_form.settings.errors,
'file_resources': query_form.file_resources.errors,
'functions': query_form.functions.errors,
'saveform': query_form.saveform.errors,
}
except RuntimeError as e:
response['message'] = str(e)
return JsonResponse(response)
@error_handler
def fetch_saved_design(request, design_id):
response = {'status': 0, 'message': ''}
if request.method != 'GET':
response['message'] = _('A GET request is required.')
app_name = get_app_name(request)
query_type = beeswax.models.SavedQuery.TYPES_MAPPING[app_name]
design = safe_get_design(request, query_type, design_id)
response['design'] = design_to_dict(design)
return JsonResponse(response)
@error_handler
def fetch_query_history(request, query_history_id):
response = {'status': 0, 'message': ''}
if request.method != 'GET':
response['message'] = _('A GET request is required.')
query = authorized_get_query_history(request, query_history_id, must_exist=True)
response['query_history'] = query_history_to_dict(request, query)
return JsonResponse(response)
@error_handler
def cancel_query(request, query_history_id):
response = {'status': -1, 'message': ''}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
try:
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
db = dbms.get(request.user, query_history.get_query_server_config())
db.cancel_operation(query_history.get_handle())
query_history.set_to_expired()
response['status'] = 0
except Exception as e:
response['message'] = str(e)
return JsonResponse(response)
@error_handler
def save_results_hdfs_directory(request, query_history_id):
"""
Save the results of a query to an HDFS directory.
Rerun the query.
"""
response = {'status': 0, 'message': ''}
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
server_id, state = _get_query_handle_and_state(query_history)
query_history.save_state(state)
error_msg, log = None, None
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
if not query_history.is_success():
response['message'] = _('This query is %(state)s. Results unavailable.') % {'state': state}
response['status'] = -1
return JsonResponse(response)
db = dbms.get(request.user, query_history.get_query_server_config())
form = beeswax.forms.SaveResultsDirectoryForm({
'target_dir': request.POST.get('path')
}, fs=request.fs)
if form.is_valid():
target_dir = request.POST.get('path')
try:
response['type'] = 'hdfs-dir'
response['id'] = query_history.id
response['query'] = query_history.query
response['path'] = target_dir
response['success_url'] = '/filebrowser/view=%s' % target_dir
query_history = db.insert_query_into_directory(query_history, target_dir)
response['watch_url'] = reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id})
except Exception as ex:
error_msg, log = expand_exception(ex, db)
response['message'] = _('The result could not be saved: %s.') % error_msg
response['status'] = -3
else:
response['status'] = 1
response['errors'] = form.errors
return JsonResponse(response)
@error_handler
def save_results_hdfs_file(request, query_history_id):
"""
Save the results of a query to an HDFS file.
Do not rerun the query.
"""
response = {'status': 0, 'message': ''}
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
server_id, state = _get_query_handle_and_state(query_history)
query_history.save_state(state)
error_msg, log = None, None
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
if not query_history.is_success():
response['message'] = _('This query is %(state)s. Results unavailable.') % {'state': state}
response['status'] = -1
return JsonResponse(response)
db = dbms.get(request.user, query_history.get_query_server_config())
form = beeswax.forms.SaveResultsFileForm({
'target_file': request.POST.get('path'),
'overwrite': request.POST.get('overwrite', False),
})
if form.is_valid():
target_file = form.cleaned_data['target_file']
overwrite = form.cleaned_data['overwrite']
try:
handle, state = _get_query_handle_and_state(query_history)
except Exception as ex:
response['message'] = _('Cannot find query handle and state: %s') % str(query_history)
response['status'] = -2
return JsonResponse(response)
try:
if overwrite and request.fs.exists(target_file):
if request.fs.isfile(target_file):
request.fs.do_as_user(request.user.username, request.fs.rmtree, target_file)
else:
raise PopupException(_("The target path is a directory"))
upload(target_file, handle, request.user, db, request.fs)
response['type'] = 'hdfs-file'
response['id'] = query_history.id
response['query'] = query_history.query
response['path'] = target_file
response['success_url'] = '/filebrowser/view=%s' % target_file
response['watch_url'] = reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id})
except Exception as ex:
error_msg, log = expand_exception(ex, db)
response['message'] = _('The result could not be saved: %s.') % error_msg
response['status'] = -3
else:
response['status'] = 1
response['errors'] = form.errors
return JsonResponse(response)
@error_handler
def save_results_hive_table(request, query_history_id):
"""
Save the results of a query to a hive table.
Rerun the query.
"""
response = {'status': 0, 'message': ''}
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
server_id, state = _get_query_handle_and_state(query_history)
query_history.save_state(state)
error_msg, log = None, None
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
if not query_history.is_success():
response['message'] = _('This query is %(state)s. Results unavailable.') % {'state': state}
response['status'] = -1
return JsonResponse(response)
db = dbms.get(request.user, query_history.get_query_server_config())
database = query_history.design.get_design().query.get('database', 'default')
form = beeswax.forms.SaveResultsTableForm({
'target_table': request.POST.get('table')
}, db=db, database=database)
if form.is_valid():
try:
handle, state = _get_query_handle_and_state(query_history)
result_meta = db.get_results_metadata(handle)
except Exception as ex:
response['message'] = _('Cannot find query handle and state: %s') % str(query_history)
response['status'] = -2
return JsonResponse(response)
try:
query_history = db.create_table_as_a_select(request, query_history, form.target_database, form.cleaned_data['target_table'], result_meta)
response['id'] = query_history.id
response['query'] = query_history.query
response['type'] = 'hive-table'
response['path'] = form.cleaned_data['target_table']
response['success_url'] = reverse('metastore:describe_table', kwargs={'database': form.target_database, 'table': form.cleaned_data['target_table']})
response['watch_url'] = reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id})
except Exception as ex:
error_msg, log = expand_exception(ex, db)
response['message'] = _('The result could not be saved: %s.') % error_msg
response['status'] = -3
else:
response['status'] = 1
response['message'] = '\n'.join(list(form.errors.values())[0])
return JsonResponse(response)
@error_handler
def clear_history(request):
response = {'status': -1, 'message': ''}
if request.method != 'POST':
response['message'] = _('A POST request is required.')
else:
response['count'] = QueryHistory.objects.filter(owner=request.user, is_cleared=False).update(is_cleared=True)
response['status'] = 0
return JsonResponse(response)
@error_handler
def get_sample_data(request, database, table, column=None):
app_name = get_app_name(request)
cluster = json.loads(request.POST.get('cluster', '{}'))
query_server = get_query_server_config(app_name, connector=cluster)
db = dbms.get(request.user, query_server)
response = _get_sample_data(db, database, table, column, cluster=cluster)
return JsonResponse(response)
def _get_sample_data(db, database, table, column, is_async=False, cluster=None, operation=None):
table_obj = db.get_table(database, table)
if table_obj.is_impala_only and db.client.query_server['server_name'] != 'impala':
query_server = get_query_server_config('impala', connector=cluster)
db = dbms.get(db.client.user, query_server, cluster=cluster)
sample_data = db.get_sample(database, table_obj, column, generate_sql_only=is_async, operation=operation)
response = {'status': -1}
if sample_data:
response['status'] = 0
if is_async:
notebook = make_notebook(
name=_('Table sample for `%(database)s`.`%(table)s`.`%(column)s`') % {'database': database, 'table': table, 'column': column},
editor_type=_get_servername(db),
statement=sample_data,
status='ready-execute',
skip_historify=True,
is_task=False,
compute=cluster if cluster else None
)
response['result'] = notebook.execute(request=MockedDjangoRequest(user=db.client.user), batch=False)
if table_obj.is_impala_only:
response['result']['type'] = 'impala'
else:
sample = escape_rows(sample_data.rows(), nulls_only=True)
if column:
sample = set([row[0] for row in sample])
sample = [[item] for item in sorted(list(sample))]
response['headers'] = sample_data.cols()
response['full_headers'] = sample_data.full_cols()
response['rows'] = sample
else:
response['message'] = _('Failed to get sample data.')
return response
@error_handler
def get_indexes(request, database, table):
query_server = dbms.get_query_server_config(get_app_name(request))
db = dbms.get(request.user, query_server)
response = {'status': -1}
indexes = db.get_indexes(database, table)
if indexes:
response['status'] = 0
response['headers'] = indexes.cols()
response['rows'] = escape_rows(indexes.rows(), nulls_only=True)
else:
response['message'] = _('Failed to get indexes.')
return JsonResponse(response)
@error_handler
def get_settings(request):
query_server = dbms.get_query_server_config(get_app_name(request))
db = dbms.get(request.user, query_server)
response = {'status': -1}
settings = db.get_configuration()
if settings:
response['status'] = 0
response['settings'] = settings
else:
response['message'] = _('Failed to get settings.')
return JsonResponse(response)
@error_handler
def get_functions(request):
query_server = dbms.get_query_server_config(get_app_name(request))
db = dbms.get(request.user, query_server)
response = {'status': -1}
prefix = request.GET.get('prefix', None)
functions = db.get_functions(prefix)
if functions:
response['status'] = 0
rows = escape_rows(functions.rows(), nulls_only=True)
response['functions'] = [row[0] for row in rows]
else:
response['message'] = _('Failed to get functions.')
return JsonResponse(response)
@error_handler
def analyze_table(request, database, table, columns=None):
app_name = get_app_name(request)
cluster = json.loads(request.POST.get('cluster', '{}'))
query_server = get_query_server_config(app_name, connector=cluster)
db = dbms.get(request.user, query_server)
table_obj = db.get_table(database, table)
if table_obj.is_impala_only and app_name != 'impala':
query_server = get_query_server_config('impala')
db = dbms.get(request.user, query_server)
response = {'status': -1, 'message': '', 'redirect': ''}
if request.method == "POST":
if columns is None:
query_history = db.analyze_table(database, table)
else:
query_history = db.analyze_table_columns(database, table)
response['watch_url'] = reverse('beeswax:api_watch_query_refresh_json', kwargs={'id': query_history.id})
response['status'] = 0
else:
response['message'] = _('A POST request is required.')
return JsonResponse(response)
@error_handler
def get_table_stats(request, database, table, column=None):
app_name = get_app_name(request)
cluster = json.loads(request.POST.get('cluster', '{}'))
query_server = get_query_server_config(app_name, connector=cluster)
db = dbms.get(request.user, query_server)
response = {'status': -1, 'message': '', 'redirect': ''}
if column is not None:
stats = db.get_table_columns_stats(database, table, column)
else:
table = db.get_table(database, table)
stats = table.stats
response['columns'] = [column.name for column in table.cols]
response['stats'] = stats
response['status'] = 0
return JsonResponse(response)
@error_handler
def get_top_terms(request, database, table, column, prefix=None):
app_name = get_app_name(request)
cluster = json.loads(request.POST.get('cluster', '{}'))
query_server = get_query_server_config(app_name, connector=cluster)
db = dbms.get(request.user, query_server)
response = {'status': -1, 'message': '', 'redirect': ''}
terms = db.get_top_terms(database, table, column, prefix=prefix, limit=int(request.GET.get('limit', 30)))
response['terms'] = terms
response['status'] = 0
return JsonResponse(response)
@error_handler
def get_session(request, session_id=None):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
response = {'status': -1, 'message': ''}
if session_id:
session = Session.objects.get(id=session_id, owner=request.user, application=query_server['server_name'])
else: # get the latest session for given user and server type
session = Session.objects.get_session(request.user, query_server['server_name'])
if session is not None:
properties = json.loads(session.properties)
# Redact passwords
for key, value in list(properties.items()):
if 'password' in key.lower():
properties[key] = '*' * len(value)
response['status'] = 0
response['session'] = {'id': session.id, 'application': session.application, 'status': session.status_code}
response['properties'] = properties
else:
response['message'] = _('Could not find session or no open sessions found.')
return JsonResponse(response)
@require_POST
@error_handler
def close_session(request, session_id):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
response = {'status': -1, 'message': ''}
try:
filters = {'id': session_id, 'application': query_server['server_name']}
if not is_admin(request.user):
filters['owner'] = request.user
session = Session.objects.get(**filters)
except Session.DoesNotExist:
response['message'] = _('Session does not exist or you do not have permissions to close the session.')
if session:
session = dbms.get(request.user, query_server).close_session(session)
response['status'] = 0
response['message'] = _('Session successfully closed.')
response['session'] = {'id': session_id, 'application': session.application, 'status': session.status_code}
return JsonResponse(response)
# Proxy API for Metastore App
def describe_table(request, database, table):
try:
from metastore.views import describe_table as metastore_describe_table
return metastore_describe_table(request, database, table)
except Exception as e:
LOG.exception('Describe table failed')
raise PopupException(_('Problem accessing table metadata'), detail=e)
def design_to_dict(design):
hql_design = HQLdesign.loads(design.data)
return {
'id': design.id,
'query': hql_design.hql_query,
'name': design.name,
'desc': design.desc,
'database': hql_design.query.get('database', None),
'settings': hql_design.settings,
'file_resources': hql_design.file_resources,
'functions': hql_design.functions,
'is_parameterized': hql_design.query.get('is_parameterized', True),
'email_notify': hql_design.query.get('email_notify', True),
'is_redacted': design.is_redacted
}
def query_history_to_dict(request, query_history):
query_history_dict = {
'id': query_history.id,
'state': query_history.last_state,
'query': query_history.query,
'has_results': query_history.has_results,
'statement_number': query_history.statement_number,
'watch_url': reverse(get_app_name(request) + ':api_watch_query_refresh_json', kwargs={'id': query_history.id}),
'results_url': reverse(get_app_name(request) + ':view_results', kwargs={'id': query_history.id, 'first_row': 0})
}
if query_history.design:
query_history_dict['design'] = design_to_dict(query_history.design)
return query_history_dict
def get_query_form(request):
try:
try:
# Get database choices
query_server = dbms.get_query_server_config(get_app_name(request))
db = dbms.get(request.user, query_server)
databases = [(database, database) for database in db.get_databases()]
except StructuredThriftTransportException as e:
# If Thrift exception was due to failed authentication, raise corresponding message
if 'TSocket read 0 bytes' in str(e) or 'Error validating the login' in str(e):
raise PopupException(_('Failed to authenticate to query server, check authentication configurations.'), detail=e)
else:
raise e
except Exception as e:
raise PopupException(_('Unable to access databases, Query Server or Metastore may be down.'), detail=e)
if not databases:
raise RuntimeError(_("No databases are available. Permissions could be missing."))
query_form = QueryForm()
query_form.bind(request.POST)
query_form.query.fields['database'].choices = databases # Could not do it in the form
return query_form
"""
Utils
"""
def _extract_nested_type(parse_tree, nested_path):
nested_tokens = nested_path.strip('/').split('/')
subtree = parse_tree
for token in nested_tokens:
if token in subtree:
subtree = subtree[token]
elif 'fields' in subtree:
for field in subtree['fields']:
if field['name'] == token:
subtree = field
break
else:
raise Exception('Invalid nested type path: %s' % nested_path)
return subtree
|
py | b40459b10bf4ee478b944c0e8a07b934f74e6325 | from oppgavegen.models import Template
from selectable.base import ModelLookup
from selectable.registry import registry
class TemplateLookup(ModelLookup):
model = Template
search_fields = ('tags__name__icontains')
registry.register(TemplateLookup) |
py | b4045a683c7882fb3042037739666028b249561a | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/medicine/crafted/shared_medpack_damage_c.iff"
result.attribute_template_id = 7
result.stfName("medicine_name","medpack_damage_c")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
py | b4045a9ad1ea2b37cea1822a989177bef184b803 | from finch.processes.constants import ALL_24_MODELS, PCIC_12
from pathlib import Path
import shutil
from unittest import mock
import zipfile
import numpy as np
import pandas as pd
import pytest
from pywps import configuration
import xarray as xr
from finch.processes import ensemble_utils
from finch.processes.ensemble_utils import (
get_bccaqv2_opendap_datasets,
get_bccaqv2_local_files_datasets,
)
from finch.processes.utils import (
drs_filename,
is_opendap_url,
netcdf_file_list_to_csv,
zip_files,
)
test_data = Path(__file__).parent / "data"
@mock.patch("finch.processes.ensemble_utils.Path")
def test_get_local_datasets_bccaqv2(mock_path):
names = [
"/mock_path/tasmin_day_BCCAQv2+ANUSPLIN300_CNRM-CM5_historical+rcp85_r1i1p1_19500101-21001231.nc",
"/mock_path/tasmin_day_BCCAQv2+ANUSPLIN300_CNRM-CM5_historical+rcp45_r1i1p1_19500101-21001231.nc",
"/mock_path/tasmin_day_BCCAQv2+ANUSPLIN300_CanESM2_historical+rcp45_r1i1p1_19500101-21001231.nc",
"/mock_path/tasmax_day_BCCAQv2+ANUSPLIN300_CanESM2_historical+rcp45_r1i1p1_19500101-21001231.nc",
"/mock_path/tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-M_historical+rcp26_r1i1p1_19500101-21001231.nc",
"/mock_path/tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp85_r1i1p1_19500101-21001231.nc",
"/mock_path/tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp45_r1i1p1_19500101-21001231.nc",
]
catalog_url = "/mock_path"
variable = "tasmin"
rcp = "rcp45"
mock_path_instance = mock.MagicMock()
mock_path.return_value = mock_path_instance
mock_path_instance.glob.return_value = [Path(n) for n in names]
files = get_bccaqv2_local_files_datasets(catalog_url, [variable], rcp)
assert len(files) == 2
@mock.patch("finch.processes.ensemble_utils.TDSCatalog")
def test_get_opendap_datasets_bccaqv2(mock_tdscatalog):
names = [
"tasmin_day_BCCAQv2+ANUSPLIN300_CNRM-CM5_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmin_day_BCCAQv2+ANUSPLIN300_CNRM-CM5_historical+rcp45_r1i1p1_19500101-21001231.nc",
"tasmin_day_BCCAQv2+ANUSPLIN300_CanESM2_historical+rcp45_r1i1p1_19500101-21001231.nc",
"tasmax_day_BCCAQv2+ANUSPLIN300_CanESM2_historical+rcp45_r1i1p1_19500101-21001231.nc",
"tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-M_historical+rcp26_r1i1p1_19500101-21001231.nc",
"tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp45_r1i1p1_19500101-21001231.nc",
]
catalog_url = configuration.get_config_value("finch", "dataset_bccaqv2")
variable = "tasmin"
rcp = "rcp45"
mock_catalog = mock.MagicMock()
mock_tdscatalog.return_value = mock_catalog
def make_dataset(name):
dataset = mock.MagicMock()
dataset.access_urls = {"OPENDAP": "url"}
dataset.name = name
return dataset
mock_catalog.datasets = {name: make_dataset(name) for name in names}
urls = get_bccaqv2_opendap_datasets(catalog_url, [variable], rcp)
assert len(urls) == 2
def test_netcdf_file_list_to_csv_to_zip():
here = Path(__file__).parent
folder = here / "data" / "bccaqv2_single_cell"
output_folder = here / "tmp" / "tasmin_csvs"
shutil.rmtree(output_folder, ignore_errors=True)
netcdf_files = list(sorted(folder.glob("tasmin*.nc")))
# only take a small subset of files that have all the calendar types
netcdf_files = netcdf_files[:5] + netcdf_files[40:50]
csv_files, metadata = netcdf_file_list_to_csv(
netcdf_files, output_folder, "file_prefix"
)
output_zip = output_folder / "output.zip"
files = csv_files + [metadata]
zip_files(output_zip, files)
with zipfile.ZipFile(output_zip) as z:
n_calendar_types = 4
n_files = len(netcdf_files)
data_filenames = [n for n in z.namelist() if "metadata" not in n]
metadata_filenames = [n for n in z.namelist() if "metadata" in n]
assert len(z.namelist()) == n_files + n_calendar_types
assert len(metadata_filenames) == n_files
for filename in data_filenames:
csv_lines = z.read(filename).decode().split("\n")[1:-1]
n_lines = len(csv_lines)
n_columns = len(csv_lines[0].split(",")) - 3
if "proleptic_gregorian" in filename:
assert n_lines == 366
assert n_columns == 2
elif "365_day" in filename:
assert n_lines == 365
assert n_columns == 8
elif "360_day" in filename:
assert n_lines == 360
assert n_columns == 3
elif "standard" in filename:
assert n_lines == 366
assert n_columns == 2
else:
assert False, "Unknown calendar type"
def test_netcdf_file_list_to_csv_bad_hours():
here = Path(__file__).parent
folder = here / "data" / "bccaqv2_single_cell"
output_folder = here / "tmp" / "tasmin_csvs"
shutil.rmtree(output_folder, ignore_errors=True)
# these files contain an hour somewhere at 0 (midnight) it should be 12h
bad_hours = [
"pr_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp26_r1i1p1_19500101-21001231_sub.nc",
"pr_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp45_r1i1p1_19500101-21001231_sub.nc",
"pr_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp85_r1i1p1_19500101-21001231_sub.nc",
"tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp26_r1i1p1_19500101-21001231_sub.nc",
"tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp45_r1i1p1_19500101-21001231_sub.nc",
"tasmax_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp85_r1i1p1_19500101-21001231_sub.nc",
"tasmin_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp26_r1i1p1_19500101-21001231_sub.nc",
"tasmin_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp45_r1i1p1_19500101-21001231_sub.nc",
"tasmin_day_BCCAQv2+ANUSPLIN300_NorESM1-ME_historical+rcp85_r1i1p1_19500101-21001231_sub.nc",
]
netcdf_files = [folder / bad for bad in bad_hours]
csv_files, _ = netcdf_file_list_to_csv(netcdf_files, output_folder, "file_prefix")
for csv in csv_files:
df = pd.read_csv(csv, parse_dates=["time"])
assert np.all(df.time.dt.hour == 12)
@pytest.mark.online
def test_is_opendap_url():
# This test uses online requests, and the servers are not as stable as hoped.
# We should record these requests so that the tests don't break when the servers are down.
url = (
"https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/"
"birdhouse/nrcan/nrcan_canada_daily_v2/tasmin/nrcan_canada_daily_tasmin_2017.nc"
)
assert is_opendap_url(url)
url = url.replace("dodsC", "fileServer")
assert not is_opendap_url(url)
# no Content-Description header
# url = "http://test.opendap.org/opendap/netcdf/examples/tos_O1_2001-2002.nc"
# assert is_opendap_url(url)
url = "invalid_schema://something"
assert not is_opendap_url(url)
url = "https://www.example.com"
assert not is_opendap_url(url)
url = "/missing_schema"
assert not is_opendap_url(url)
def test_bccaqv2_make_file_groups():
folder = Path(__file__).parent / "data" / "bccaqv2_single_cell"
files_list = list(folder.glob("*.nc"))
groups = ensemble_utils.make_file_groups(files_list)
assert len(groups) == 85
assert all(len(g) == 3 for g in groups)
def test_drs_filename():
ds = xr.open_dataset(
test_data / "bccaqv2_subset_sample/tasmax_bcc-csm1-1_subset.nc"
)
filename = drs_filename(ds)
assert filename == "tasmax_bcc-csm1-1_historical+rcp85_r1i1p1_19500101-19500410.nc"
def test_drs_filename_unknown_project():
ds = xr.open_dataset(
test_data / "bccaqv2_subset_sample/tasmax_bcc-csm1-1_subset.nc"
)
ds.attrs["project_id"] = "unknown"
filename = drs_filename(ds)
assert filename == "tasmax_day_bcc-csm1-1_historical+rcp85_19500101-19500410.nc"
def test_drs_filename_no_spaces():
ds = xr.open_dataset(
test_data / "bccaqv2_subset_sample/tasmax_bcc-csm1-1_subset.nc"
)
ds.attrs["driving_model_id"] = "bcc csm1 1"
filename = drs_filename(ds)
assert filename == "tasmax_bcc-csm1-1_historical+rcp85_r1i1p1_19500101-19500410.nc"
def test_drs_filename_cordex():
ds = xr.open_dataset(test_data / "cordex_subset.nc")
filename = drs_filename(ds)
expected = "tasmin_NAM-44_MPI-M-MPI-ESM-MR_rcp85_r1i1p1_UQAM-CRCM5_v1_day_20960101-20960409.nc"
assert filename == expected
def test_bccaqv2file():
filename = "tasmin_day_BCCAQv2+ANUSPLIN300_inmcm4_historical+rcp85_r1i1p1_19500101-21001231.nc"
file = ensemble_utils.Bccaqv2File.from_filename(filename)
expected = ensemble_utils.Bccaqv2File(
variable="tasmin",
frequency="day",
driving_model_id="inmcm4",
driving_experiment_id="historical+rcp85",
driving_realization="1",
driving_initialization_method="1",
driving_physics_version="1",
date_start="19500101",
date_end="21001231",
)
assert expected == file
@pytest.mark.parametrize(
"filename,variable,rcp,models,expected",
[
(
"tasmin_day_BCCAQv2+ANUSPLIN300_canesm2_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmin",
"rcp85",
None,
True,
),
(
"tasmin_day_BCCAQv2+ANUSPLIN300_canesm2_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmin",
"rcp85",
[ALL_24_MODELS],
True,
),
(
"tasmin_day_BCCAQv2+ANUSPLIN300_canesm2_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmax",
"rcp85",
[ALL_24_MODELS],
False,
),
(
"tasmin_day_BCCAQv2+ANUSPLIN300_canesm2_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmin",
"rcp45",
[ALL_24_MODELS],
False,
),
(
"tasmin_day_BCCAQv2+ANUSPLIN300_canesm2_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmin",
"rcp85",
["HadGEM2-ES"],
False,
),
(
"tasmin_day_BCCAQv2+ANUSPLIN300_HadGEM2-ES_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmin",
"rcp85",
["HadGEM2-ES"],
True,
),
(
"tasmin_day_BCCAQv2+ANUSPLIN300_MPI-ESM-LR_historical+rcp85_r3i1p1_19500101-21001231.nc",
"tasmin",
"rcp85",
[PCIC_12],
True,
),
(
"tasmin_day_BCCAQv2+ANUSPLIN300_MPI-ESM-LR_historical+rcp85_r1i1p1_19500101-21001231.nc",
"tasmin",
"rcp85",
[PCIC_12],
False,
),
("tasmin_not_proper_filename.nc", "tasmin", None, None, False),
],
)
def test_bccaqv2_filter(filename, variable, rcp, models, expected):
method = ensemble_utils.ParsingMethod.filename
url = None
variables = [variable]
result = ensemble_utils._bccaqv2_filter(
method=method,
filename=filename,
url=url,
variables=variables,
rcp=rcp,
models=models,
)
assert result == expected
|
py | b4045a9c84030aa9533415b6bc774cf82c09c883 |
import os
import sys
from Context import PSPCLContext
from PSPCompiler import Compiler
# Move this to a class like JPS?
def PSPCompile(*args):
pspfilename = args[0]
fil, ext = os.path.splitext(os.path.basename(pspfilename))
classname = fil + '_' + ext
pythonfilename = classname + '.py'
context = PSPCLContext(pspfilename)
context.setClassName(classname)
context.setPythonFileName(pythonfilename)
context.setPythonFileEncoding('utf-8')
clc = Compiler(context)
clc.compile()
if __name__ == '__main__':
PSPCompile(sys.argv[1])
|
wsgi | b4045bb4e0c2438603fb8cde2b92846117972330 | #/usr/bin/env python
###
# A simple script to handle any requests that will be recieved from the
# web front-end and return JSON packets to be drawn using a visualization
# package
import os
import sys
import json
import ConfigParser
from cgi import parse_qs, escape
from neo4jrestclient import GraphDatabase, request
__author__ = "Cesar Arze"
__copyyright__ = "Institute for Genome Science - University of Maryland School of Medicine"
__license__ = "MIT"
__version__ = "1.0"
__maintainer__ = "Cesar Arze"
__email__ = "[email protected]"
# Enable cache-ing of requests to the same URL
request.CACHE = False
# Server URL
config = ConfigParser.RawConfigParser()
config.read(os.path.abspath( os.path.dirname(__file__) + "/../conf/disease_ontology.conf"))
NEO4J_DB_URL = config.get('NEO4J', 'SERVER_URL')
def query_neo4j_packet(gdb, id, query_index=False):
"""
Querries the Neo4j graph database to pull down a "packet" of data. A packet
of data can be defined as a sub-tree of the graph containing the queried
node and its immediate children.
By default this function is expecting a raw Neo4j ID but can accept a
DOID ID if the query_index parameter is set to true.
If the root node is passed in as the query node (denoted by the ID
'root') the root node is pulled down by making use of the root Neo4j
index
"""
node = None
neo4j_children = []
neo4j_parents = []
if (query_index):
index_category = "ontologyid"
if (id == 'root'):
index_category = "root"
id = '1'
node = query_neo4j_index(gdb, id, index_category)
else:
node = gdb.nodes.get(id)
# Deal with any children this node might have
relationships_in = node.relationships.incoming()
for relation in relationships_in:
node_color = "gray"
child = relation.start
# Check if our child has children itself and color accordingly.
#
# GREEN --> Has children
# GRAY --> Has no children
child_relations = child.relationships.incoming()
if child_relations:
node_color = "green"
neo4j_children.append( dict(id=child.id,
doid=child.get('id'),
name=child.get('name'),
direction="out",
color=node_color)
)
# Now deal with any parents the node may have
relationships_out = node.relationships.outgoing()
for relation in relationships_out:
node_color = "gray"
parent = relation.end
# Check if our parent node has more than 1 child and color accordingly
parent_relations = parent.relationships.incoming()
if len(parent_relations) >= 1:
node_color = "green"
neo4j_parents.append( dict(id=parent.id,
doid=parent.get('id'),
name=parent.get('name'),
direction="in",
color=node_color)
)
return dict(children=neo4j_children, parents=neo4j_parents)
def query_neo4j_index(gdb, id, category):
"""
Query a Neo4j index specified by the category passed into this
function.
"""
index = gdb.nodes.indexes.get(category)
nodes = index.get('id', id)
# TODO: When we load the logical defs ontology we will have to deal with
# multiple root nodes
return nodes[0]
def application(environ, start_response):
http_params = parse_qs(environ['QUERY_STRING'])
node_id = http_params.get('id')[0]
query_index = http_params.get('index', False)
gdb = GraphDatabase(NEO4J_DB_URL)
neo4j_json = query_neo4j_packet(gdb, node_id, query_index)
status = '200 OK'
response_headers = [('Content-type', 'application/json')]
start_response(status, response_headers)
return json.dumps(neo4j_json)
|
py | b4045eccbeed424fb4feda59a6b7751875c4be93 | #! /usr/bin/python
import csv
import os
import requests
import json
from datetime import datetime
from output.streamPlot import streamDetectionPlot
from output.multiStreamPlot import multiStreamDetectionPlot
def getData():
# get the data.
filePath = os.path.join(os.getcwd(), "test/data/heartBeat.csv")
file = open(filePath)
allData = csv.reader(file)
# skip the first three lines.
allData.next()
allData.next()
allData.next()
inputData = [x for x in allData]
return inputData
def convertData(dataList):
transformedData = []
# convert the data into json type.
for line in dataList:
transformedData.append(
json.dumps(
{"timestamp": line[0], "actualValue": line[1]}
)
)
return transformedData
def convertMultiData(dataList):
transformedData = []
# convert the data into json type.
for line in dataList:
transformedData.append(
json.dumps(
{
"A": {"timestamp": line[0], "actualValue": line[1]},
"B": {"timestamp": line[0], "actualValue": line[2]},
"C": {"timestamp": line[0], "actualValue": line[3]}
}
)
)
return transformedData
def requestsHTMApi(streamData):
# requests HTM api.
inputData = {"streamData": streamData}
url = "http://127.0.0.1:5000/api/HTM/v1.0/anomalyDetection/1"
r = requests.post(url, inputData)
return r.json()
def requestMultiHTMApi(streamData):
inputData = {"streamData": streamData}
url = "http://127.0.0.1:5000/api/HTM/v1.0/multiAnomalyDetection/1"
r = requests.post(url, inputData)
return r.json()
def run():
# read the data from /output/data/heartBeat.csv, convert it into json type,
# and initial the graph.
data = convertData(getData())
graph = streamDetectionPlot()
graph.initPlot()
for line in data:
# requests the one field api.
requestsData = requestsHTMApi(line)
# print requestsData
# plot the data
graph.anomalyDetectionPlot(
requestsData["timestamp"],
requestsData["actualValue"],
requestsData["predictValue"],
requestsData["anomalyScore"]
)
graph.close()
def runMulti():
# read the data from /output/data/heartBeat.csv, convert it into json type,
# and initial the graph.
data = convertMultiData(getData())
graph2 = multiStreamDetectionPlot()
graph2.initPlot()
for line in data:
# requests the one field api.
requestsData = requestMultiHTMApi(line)
# print requestsData
# print requestsData["A"]["timestamp"]
# plot the data
graph2.anomalyDetectionPlot(requestsData)
graph2.close()
if __name__ == "__main__":
run()
|
py | b4045f45a8bf47903030bdba67938413992fac21 | """
Complex probability distribution model
==============================================
Here we define a bivariate probability model, with a dependence structure defined using a gumbel copula. The goal of
inference is to learn the paremeters of the Gaussian marginals and the copula parameter, i.e., the model has 5 unknown
parameters.
"""
#%% md
#
# Initially we have to import the necessary modules.
#%%
import matplotlib.pyplot as plt
from UQpy.inference import DistributionModel, MLE
from UQpy.distributions import Normal
from UQpy.inference import MinimizeOptimizer
from UQpy.distributions import JointIndependent, JointCopula, Gumbel
from UQpy.sampling import ImportanceSampling
#%% md
#
# First data is generated from a true model. A distribution with copulas does not possess a fit method, thus sampling is
# performed using importance sampling/resampling.
#%%
# dist_true exhibits dependence between the two dimensions, defined using a gumbel copula
dist_true = JointCopula(marginals=[Normal(), Normal()], copula=Gumbel(theta=2.))
# generate data using importance sampling: sample from a bivariate gaussian without copula, then weight samples
u = ImportanceSampling(proposal = JointIndependent(marginals=[Normal(), Normal()]),
log_pdf_target = dist_true.log_pdf,
nsamples=500)
print(u.samples.shape)
print(u.weights.shape)
# Resample to obtain 5,000 data points
u.resample(nsamples=5000)
data_2 = u.unweighted_samples
print('Shape of data: {}'.format(data_2.shape))
fig, ax = plt.subplots()
ax.scatter(data_2[:, 0], data_2[:, 1], alpha=0.2)
ax.set_title('Data points from true bivariate normal with gumbel dependency structure')
plt.show()
#%% md
#
# To define a model for inference, the user must create a custom file, here bivariate_normal_gumbel.py, to compute the
# log_pdf of the distribution, given a bivariate data matrix and a parameter vector of length 5. Note that for any
# probability model that is not one of the simple univariate pdfs supported by UQpy, such a custom file will be
# necessary.
#%%
d_guess = JointCopula(marginals=[Normal(loc=None, scale=None), Normal(loc=None, scale=None)],
copula=Gumbel(theta=None))
print(d_guess.get_parameters())
candidate_model = DistributionModel(n_parameters=5, distributions=d_guess)
print(candidate_model.list_params)
#%% md
#
# When calling MLEstimation, the function minimize from the scipy.optimize package is used by default. The user can
# define bounds for the optimization, a seed, the algorithm to be used, and set the algorithm to perform several
# optimization iterations, starting at a different random seed every time.
#%%
optimizer = MinimizeOptimizer(bounds=[[-5, 5], [0, 10], [-5, 5], [0, 10], [1.1, 4]], method="SLSQP")
ml_estimator = MLE(inference_model=candidate_model, data=data_2, optimizer=optimizer)
ml_estimator = MLE(inference_model=candidate_model, data=data_2, optimizer=optimizer,
initial_parameters=[1., 1., 1., 1., 4.])
print('ML estimates of the mean={0:.3f} and std. dev={1:.3f} of 1st marginal (true: 0.0, 1.0)'.
format(ml_estimator.mle[0], ml_estimator.mle[1]))
print('ML estimates of the mean={0:.3f} and std. dev={1:.3f} of 2nd marginal (true: 0.0, 1.0)'.
format(ml_estimator.mle[2], ml_estimator.mle[3]))
print('ML estimates of the copula parameter={0:.3f} (true: 2.0)'.format(ml_estimator.mle[4]))
#%% md
#
# Again, some known parameters can be fixed during learning.
#%%
d_guess = JointCopula(marginals=[Normal(loc=None, scale=None), Normal(loc=0., scale=1.)],
copula=Gumbel(theta=None))
candidate_model = DistributionModel(n_parameters=3, distributions=d_guess)
optimizer = MinimizeOptimizer(bounds=[[-5, 5], [0, 10], [1.1, 4]],
method="SLSQP")
ml_estimator = MLE(inference_model=candidate_model, data=data_2, optimizer=optimizer,
initial_parameters=[1., 1., 4.])
print('ML estimates of the mean={0:.3f} and std. dev={1:.3f} of 1st marginal (true: 0.0, 1.0)'.
format(ml_estimator.mle[0], ml_estimator.mle[1]))
print('ML estimates of the copula parameter={0:.3f} (true: 2.0)'.format(ml_estimator.mle[2])) |
py | b40460a4eb422efe6049a358d20f9546762377e3 | from django.shortcuts import render
from django.http import HttpResponse
from .models import *
# Create your views here.
def inventoryHome(request):
context = {}
return render(request, 'inventory/main.html', context)
def storageHome(request):
products = Product.objects.all()
context = {'products': products}
return render(request, 'inventory/storage.html', context)
def productInput(request):
pinputs = ProductInput.objects.all()
context = {'pinputs': pinputs}
return render(request, 'inventory/product_inputs.html', context)
|
py | b40462d3083241e864440e3d003c190864add33e | #
# Copyright 2013 Y12Studio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading,time
class BaseWorker():
def __init__(self):
self.stopFlag = False
self.e = threading.Event()
self.t = threading.Thread(target=self.worker)
self.t.start()
self.data = None
def worker(self):
e = self.e
while not self.stopFlag:
#print('wait_for_event_timeout starting')
event_is_set = e.wait(1)
# logging.debug('event set: %s', event_is_set)
if event_is_set:
self.handleEvent()
e.clear()
else:
# logging.debug('doing other work')
pass
if self.stopFlag:
break
def handleEvent(self):
#logging.debug('processing event %d', self.count)
print('processing event %d'% self.data)
pass
def stop(self):
self.stopFlag = True
def notify(self):
self.e.set()
def sendData(self, dvalue):
self.data = dvalue
self.notify()
#logging.debug('Send Data: %d', self.count)
# print('Send Data: %d'% self.count)
class MyBaseWorker(BaseWorker):
def __init__(self):
BaseWorker.__init__(self)
self.data = 100
def handleEvent(self):
self.data += 2
#logging.debug('MyBaseWorker processing event %d', self.data)
print('MyBaseWorker processing event %d'% self.data)
def sendData(self, d):
self.data = d
self.notify()
#logging.debug('sendData event: %d', d)
print('sendData event: %d'% d)
def test():
wcls = MyBaseWorker()
time.sleep(2)
wcls.sendData(12)
print('Event is Send 1')
time.sleep(5)
wcls.sendData(99)
print('Event is Send 2')
time.sleep(2)
wcls.stop() |
py | b404634ef4ae23535365a3e2ea33460545fbe4da | #!/bin/env python
# A script to send an email for any Carbon Black sensors that have changed
# the state of their network isolation since the last running of this script.
# Run this script via a job scheduler, such as cron, to be notified
# when a sensor's network isolation state has changed.
# The script will track isolated sensors between runs via isolated_sensors.txt
__author__ = 'BJSwope'
import sys
import optparse
import warnings
import smtplib
import cbapi
import json
import collections
import socket
from email.mime.text import MIMEText
def build_cli_parser():
parser = optparse.OptionParser(usage="%prog [options]", description="Dump sensor list")
# for each supported output type, add an option
#
parser.add_option("-c", "--cburl", action="store", default=None, dest="url",
help="CB server's URL. e.g., http://127.0.0.1 ")
parser.add_option("-a", "--apitoken", action="store", default=None, dest="token",
help="API Token for Carbon Black server")
parser.add_option("-n", "--no-ssl-verify", action="store", default=False, dest="ssl_verify",
help="Do not verify server SSL certificate.")
parser.add_option("-g", "--group", action="store", default=None, dest="groupid",
help="Limit sensor listing to just those specified by the sensor group id provided")
parser.add_option("-f", "--mailfrom", action="store", default=None, dest="mailfrom",
help="Email from address.")
parser.add_option("-t", "--rcptto", action="store", default="[email protected]", dest="rcptto",
help="Email recipient.")
parser.add_option("-m", "--mailserver", action="store", default="localhost", dest="mailserver",
help="Mail server to route email.")
return parser
def send_mail(sensor,opts):
mail = {}
if sensor['network_isolation_enabled'] == True:
if sensor['is_isolating'] == True:
# Isolation Enabled and Active email
msg="Network Isolation enabled and active!\r\n Host: %s\r\nCarbon Black Console: %s\r\n Last Check-In Time: %s\r\n" \
% (sensor['computer_name'], sensor['url'], sensor['last_checkin_time'])
msg = MIMEText(msg)
msg['Subject'] = 'Host Isolation Activated By Carbon Black'
else:
# Isolation Enabled but Not Active email
msg="Network Isolation enabled and will activate at next sensor check in.\r\n Host: %s\r\nCarbon Black Console: %s\r\n Last Check-In Time: %s\r\nNext Check-In Time: %s" \
% (sensor['computer_name'], sensor['url'], sensor['last_checkin_time'], sensor['next_checkin_time'])
msg = MIMEText(msg)
msg['Subject'] = 'Host Isolation Enabled By Carbon Black'
elif sensor['network_isolation_enabled'] == False:
# Isolation Disabled email
msg="Network Isolation disabled and will deactivate at next sensor check in.\r\n Host: %s\r\nCarbon Black Console: %s\r\n Last Check-In Time: %s\r\nNext Check-In Time: %s" \
% (sensor['computer_name'], sensor['url'], sensor['last_checkin_time'], sensor['next_checkin_time'])
msg = MIMEText(msg)
msg['Subject'] = 'Host Isolation Disabled By Carbon Black'
else:
return
if opts.mailfrom == None:
hostname = socket.getfqdn()
opts.mailfrom = 'sensor_isolation@%s' % (hostname)
msg['From'] = opts.mailfrom
msg['To'] = opts.rcptto
s = smtplib.SMTP(opts.mailserver)
s.sendmail(msg['From'], msg['To'], msg.as_string())
s.quit()
def main(argv):
parser = build_cli_parser()
opts, args = parser.parse_args(argv)
if not opts.url or not opts.token:
print "Missing required param; run with --help for usage"
sys.exit(-1)
cb = cbapi.CbApi(opts.url, token=opts.token, ssl_verify=opts.ssl_verify)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
sensors = cb.sensors()
f = open("isolated_sensors.txt", "w+")
fis = f.read()
f.close()
try:
former_iso_sensors = json.loads(fis)
except ValueError:
former_iso_sensors = collections.defaultdict(dict)
current_iso_sensors = collections.defaultdict(dict)
for sensor in sensors:
if sensor['network_isolation_enabled'] == True:
#sensor should be isolating, add sensor to list of currently iso enabled sensors
sid = str(sensor['id'])
sensor['url'] = opts.url + "/#/host/" + sid
current_iso_sensors[sid]['network_isolation_enabled'] = sensor['network_isolation_enabled']
current_iso_sensors[sid]['is_isolating'] = sensor['is_isolating']
try:
if not sensor['is_isolating'] == former_iso_sensors[sid]['is_isolating']:
#state change, send email
send_mail(sensor,opts)
except KeyError as e:
#sid is not present in former_iso_sensors, new sensor isolation, send email
send_mail(sensor,opts)
f = open("isolated_sensors.txt", "w")
f.write(json.dumps(current_iso_sensors))
f.close()
#remove current isolations from from former isolations leaving the list of sensors removed from
# isolation since the last running of this script
iso_removed = [item for item in former_iso_sensors if item not in current_iso_sensors]
for fixed in iso_removed:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
sensor = cb.sensor(fixed)
sid = str(sensor['id'])
sensor['url'] = opts.url + "/#/host/" + sid
#send notification of isolation removal
send_mail(sensor,opts)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
""" List of fields that can be included in the emails as of CB Version 5.1.1 p1:
boot_id
build_id
build_version_string
clock_delta
computer_dns_name
computer_name
computer_sid
cookie
display
emet_dump_flags
emet_exploit_action
emet_is_gpo
emet_process_count
emet_report_setting
emet_telemetry_path
emet_version
event_log_flush_time
group_id
id
is_isolating
last_checkin_time
last_update
license_expiration
network_adapters
network_isolation_enabled
next_checkin_time
node_id
notes
num_eventlog_bytes
num_storefiles_bytes
os_environment_display_string
os_environment_id
os_type
parity_host_id
physical_memory_size
power_state
registration_time
restart_queued
sensor_health_message
sensor_health_status
sensor_uptime
shard_id
status
supports_2nd_gen_modloads
supports_cblr
supports_isolation
systemvolume_free_size
systemvolume_total_size
uninstall
uninstalled
uptime
"""
|
py | b40463cdfc83063eda582d41f897b23bfccc1dd4 | #!/usr/bin/python
"""
Copyright (c) 2015, BROCADE COMMUNICATIONS SYSTEMS, INC
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.
@authors: Sergei Garbuzov
@status: Development
@version: 1.1.0
"""
import sys
import getopt
from pysdn.controller.controller import Controller
from pysdn.common.status import STATUS
from pysdn.common.utils import load_dict_from_file
def usage(myname):
print(' Usage: %s -i <identifier> -v <version>' % myname)
sys.exit()
if __name__ == "__main__":
f = "cfg.yml"
d = {}
if(load_dict_from_file(f, d) is False):
print("Config file '%s' read error: " % f)
exit()
try:
ctrlIpAddr = d['ctrlIpAddr']
ctrlPortNum = d['ctrlPortNum']
ctrlUname = d['ctrlUname']
ctrlPswd = d['ctrlPswd']
except:
print ("Failed to get Controller device attributes")
exit(0)
model_identifier = None
model_version = None
if(len(sys.argv) == 1):
print(" Error: missing arguments")
usage(sys.argv[0])
argv = sys.argv[1:]
try:
opts, args = getopt.getopt(argv,
"i:v:h",
["identifier=", "version=", "help"])
except getopt.GetoptError, e:
print(" Error: %s" % e.msg)
usage(sys.argv[0])
for opt, arg in opts:
if opt in ("-h", "--help"):
usage(sys.argv[0])
elif opt in ("-i", "--identifier"):
model_identifier = arg
elif opt in ("-v", "--version"):
model_version = arg
else:
print("Error: failed to parse option %s" % opt)
usage(sys.argv[0])
if(model_identifier is None) or (model_version is None):
print("Error: incomplete command")
usage(sys.argv[0])
ctrl = Controller(ctrlIpAddr, ctrlPortNum, ctrlUname, ctrlPswd)
print ("<<< 'Controller': %s" % (ctrlIpAddr))
result = ctrl.get_schema("controller-config",
model_identifier, model_version)
status = result.get_status()
if(status.eq(STATUS.OK)):
print "YANG model definition:"
schema = result.get_data()
print schema.encode('utf-8', 'replace')
else:
print ("\n")
print ("!!!Failed, reason: %s" % status.brief().lower())
print ("%s" % status.detailed())
exit(0)
print ("\n")
|
py | b404652945f5154ca35cb76f54b0289f7ed96f8b | """Tests for `acs service` command."""
from acs.AgentPool import AgentPool
import pytest
import time
import urllib.request
class TestService():
max_deploy_time = 25 # minutes
slow = pytest.mark.skipif(
not pytest.config.getoption("--runslow"),
reason="need --runslow option to run"
)
@slow
def test_create(self, service):
if service.exists():
service.log.debug("The test ACS cluster already exists, deleting")
service.delete(True)
exists = service.exists()
assert not exists
starttime = time.time()
service.create()
assert service.exists()
endtime = time.time()
service.log.info("Total deployment time: " + str((endtime - starttime)/60) + " minutes")
dns_up = False
start_time = time.time()
duration = 0
while not dns_up and duration < (2 * 60):
dns_up = service.exists()
duration- time.time() - start_time
assert dns_up, "DNS for the masters did not seem to come up"
def test_exists(self, service):
exists = service.exists()
assert exists
def test_show(self, service):
result = service.show()
assert "rgDcosTest" in result
assert "azure.com" in result
def test_connect(self, service):
results = service.connect()
isConnected = False
req = urllib.request.Request("http://localhost")
with urllib.request.urlopen(req) as response:
html = response.read()
isConnected = True
assert(isConnected)
service.disconnect()
def test_disconnect(self, service):
results = service.connect()
isConnected = True
results = service.disconnect()
req = urllib.request.Request("http://localhost")
try:
with urllib.request.urlopen(req) as response:
html = response.read()
isConnected = True
except urllib.error.URLError as e:
isConnected = False
assert(not isConnected)
@slow
def test_scale(self, service):
initial_agents = service.config.getint('ACS', 'agentCount')
service.args = {'--agents': initial_agents + 1}
result = service.scale()
assert "Scaled to " + str(initial_agents + 1) == result
|
py | b4046559cf0767ceba0984dc6994166f3ff0bb52 |
# Copyright 1997 - 2018 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class NetTopologyRing(Base):
"""The NetTopologyRing class encapsulates a user managed netTopologyRing node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the NetTopologyRing property from a parent instance.
The internal properties list will be empty when the property is accessed and is populated from the server using the find method.
The internal properties list can be managed by the user by using the add and remove methods.
"""
_SDM_NAME = 'netTopologyRing'
def __init__(self, parent):
super(NetTopologyRing, self).__init__(parent)
@property
def IncludeEntryPoint(self):
"""if true, entry node belongs to ring topology, otherwise it is outside of ring
Returns:
bool
"""
return self._get_attribute('includeEntryPoint')
@IncludeEntryPoint.setter
def IncludeEntryPoint(self, value):
self._set_attribute('includeEntryPoint', value)
@property
def LinkMultiplier(self):
"""number of links between two nodes
Returns:
number
"""
return self._get_attribute('linkMultiplier')
@LinkMultiplier.setter
def LinkMultiplier(self, value):
self._set_attribute('linkMultiplier', value)
@property
def Nodes(self):
"""number of nodes
Returns:
number
"""
return self._get_attribute('nodes')
@Nodes.setter
def Nodes(self, value):
self._set_attribute('nodes', value)
def add(self, IncludeEntryPoint=None, LinkMultiplier=None, Nodes=None):
"""Adds a new netTopologyRing node on the server and retrieves it in this instance.
Args:
IncludeEntryPoint (bool): if true, entry node belongs to ring topology, otherwise it is outside of ring
LinkMultiplier (number): number of links between two nodes
Nodes (number): number of nodes
Returns:
self: This instance with all currently retrieved netTopologyRing data using find and the newly added netTopologyRing data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the netTopologyRing data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, IncludeEntryPoint=None, LinkMultiplier=None, Nodes=None):
"""Finds and retrieves netTopologyRing data from the server.
All named parameters support regex and can be used to selectively retrieve netTopologyRing data from the server.
By default the find method takes no parameters and will retrieve all netTopologyRing data from the server.
Args:
IncludeEntryPoint (bool): if true, entry node belongs to ring topology, otherwise it is outside of ring
LinkMultiplier (number): number of links between two nodes
Nodes (number): number of nodes
Returns:
self: This instance with matching netTopologyRing data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of netTopologyRing data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the netTopologyRing data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def FetchAndUpdateConfigFromCloud(self, Mode):
"""Executes the fetchAndUpdateConfigFromCloud operation on the server.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/globals?deepchild=*|/api/v1/sessions/1/ixnetwork/topology?deepchild=*)): The method internally sets Arg1 to the current href for this instance
Mode (str):
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('FetchAndUpdateConfigFromCloud', payload=locals(), response_object=None)
|
py | b40467639d729026125c5f3ce79c377d7fe78a56 | from ModelBuilder import ModelBuilder
from ModelEvaluator import ModelEvaluator
from DataTransformer import multi_csv_to_dataset
from ModelLoader import ModelLoader
dataset = multi_csv_to_dataset([
'test_data/SHOP_daily.csv',
# 'test_data/TD_daily.csv',
# 'test_data/ENB_daily.csv',
# 'test_data/BA_daily.csv',
# 'test_data/TSLA_daily.csv'
])
model_loader = ModelLoader()
#test_data = ModelBuilder().build_model(dataset, 150)
#model_loader.save_model(test_data.model, 'multistock-2020-04-09')
test_data = ModelBuilder().split_test_data(dataset, 0.7)
test_data.model = model_loader.load_model('multistock-2020-04-09.h5')
evaluator = ModelEvaluator()
evaluator.analyze(test_data)
evaluator.plot(test_data) |
py | b40467beaaf34e98094a3b78247a331672bf9c95 | """
motifscan.region.parsers
------------------------
Parsers to read the genomic region files.
"""
import logging
from motifscan.exceptions import RegionFileFormatError
logger = logging.getLogger(__name__)
def is_track_header(line):
"""Returns if the line is a header line used in genome tracks/browsers."""
line = line.strip()
if line.startswith('#') or line.startswith('track') or line.startswith(
'browser'):
return True
else:
return False
def is_comment_header(line):
"""Returns if the line is a comment header line."""
line = line.strip()
if line.startswith('#'):
return True
else:
return False
def is_macs_header(line):
"""Returns if the line is a header line used in MACS/MACS2 xls."""
line = line.strip()
if line.startswith('#') or line.split('\t')[0] == 'chr':
return True
else:
return False
def is_manorm_header(line):
"""Returns if the line is a header line used in MAnorm xls."""
line = line.strip()
if line.startswith('#') or line.split('\t')[0] == 'chr':
return True
else:
return False
class RegionParser:
"""Base class for region file parsers."""
def __init__(self, format):
self.format = format
@staticmethod
def _is_header(line):
"""Abstract method to check if a line is a header line."""
raise NotImplementedError
@staticmethod
def _parse_line(line):
"""Abstract method to parse a line."""
raise NotImplementedError
def parse(self, path):
"""Read genomic regions from the given file."""
with open(path, 'r') as fin:
line_num = 0
expect_header = True
for line in fin:
line_num += 1
line = line.strip()
if not line: # skip empty lines
continue
if expect_header:
if self._is_header(line):
logger.debug(
f"Detected header at line {line_num}: {line!r}")
continue
else:
expect_header = False
try:
yield self._parse_line(line)
except (IndexError, ValueError, TypeError):
raise RegionFileFormatError(format=self.format,
line_num=line_num, line=line)
class BedRegionParser(RegionParser):
"""Region parser for the BED format."""
def __init__(self):
super().__init__('BED')
@staticmethod
def _is_header(line):
return is_track_header(line)
@staticmethod
def _parse_line(line):
fields = line.strip().split('\t')
chrom = fields[0]
start = int(fields[1])
end = int(fields[2])
try:
score = float(fields[4])
except (TypeError, ValueError, IndexError):
score = None
summit = None
return chrom, start, end, summit, score
class Bed3SummitRegionParser(RegionParser):
"""Region parser for the BED3-summit format."""
def __init__(self):
super().__init__(format='BED3-summit')
@staticmethod
def _is_header(line):
return is_comment_header(line)
@staticmethod
def _parse_line(line):
fields = line.strip().split('\t')
chrom = fields[0]
start = int(fields[1])
end = int(fields[2])
summit = int(fields[3])
score = None
return chrom, start, end, summit, score
class MacsRegionParser(RegionParser):
"""Region parser for the MACS-xls format."""
def __init__(self):
super().__init__(format='MACS-xls')
@staticmethod
def _is_header(line):
return is_macs_header(line)
@staticmethod
def _parse_line(line):
fields = line.strip().split('\t')
chrom = fields[0]
start = int(fields[1]) - 1 # coordinates are 1-based in MACS xls
end = int(fields[2])
summit = int(fields[4]) + start # relative summit pos for MACS1
score = float(fields[6])
return chrom, start, end, summit, score
class Macs2RegionParser(RegionParser):
"""Region parser for the MACS2-xls format."""
def __init__(self):
super().__init__(format='MACS2-xls')
@staticmethod
def _is_header(line):
return is_macs_header(line)
@staticmethod
def _parse_line(line):
fields = line.strip().split('\t')
chrom = fields[0]
start = int(fields[1]) - 1 # coordinates are 1-based in MACS2 xls
end = int(fields[2])
summit = int(fields[4]) - 1 # absolute summit pos for MACS2
score = float(fields[6])
return chrom, start, end, summit, score
class NarrowPeakRegionParser(RegionParser):
"""Region parser for the NarrowPeak format."""
def __init__(self):
super().__init__(format='NarrowPeak')
@staticmethod
def _is_header(line):
return is_track_header(line)
@staticmethod
def _parse_line(line):
fields = line.strip().split('\t')
chrom = fields[0]
start = int(fields[1])
end = int(fields[2])
score = float(fields[4])
# https://genome.ucsc.edu/FAQ/FAQformat.html#format12
summit = int(fields[9])
if summit == -1:
summit = None
else:
summit = start + summit
return chrom, start, end, summit, score
class BroadPeakRegionParser(RegionParser):
"""Region parser for the BroadPeak format."""
def __init__(self):
super().__init__(format='BroadPeak')
@staticmethod
def _is_header(line):
return is_track_header(line)
@staticmethod
def _parse_line(line):
fields = line.strip().split('\t')
chrom = fields[0]
start = int(fields[1])
end = int(fields[2])
score = float(fields[4])
# https://genome.ucsc.edu/FAQ/FAQformat.html#format13
summit = None
return chrom, start, end, summit, score
class ManormRegionParser(RegionParser):
def __init__(self):
super().__init__(format='MAnorm-xls')
@staticmethod
def _is_header(line):
return is_manorm_header(line)
@staticmethod
def _parse_line(line):
fields = line.strip().split('\t')
chrom = fields[0]
start = int(fields[1]) - 1
end = int(fields[2])
summit = int(fields[3]) - 1
score = float(fields[4]) # M-value here
return chrom, start, end, summit, score
def get_region_parser(format):
"""Get proper region parser for the given format.
Parameters
----------
format : str
File format (case-insensitive).
Returns
-------
Corresponding region parser.
"""
format = format.lower()
if format == 'bed':
return BedRegionParser
elif format == 'bed3-summit':
return Bed3SummitRegionParser
elif format == 'macs':
return MacsRegionParser
elif format == 'macs2':
return Macs2RegionParser
elif format == 'narrowpeak':
return NarrowPeakRegionParser
elif format == 'broadpeak':
return BroadPeakRegionParser
elif format == 'manorm':
return ManormRegionParser
else:
raise ValueError(f"unknown region file format: {format!r}")
|
py | b40468b5823c52ca920f99c05fc85728d3cdd9e9 | """File for non sklearn metrics that are to be used for reference for tests."""
from typing import Optional, Union
import numpy as np
from sklearn.metrics._regression import _check_reg_targets
from sklearn.utils import assert_all_finite, check_consistent_length, column_or_1d
def _symmetric_mean_absolute_percentage_error(
y_true: np.ndarray,
y_pred: np.ndarray,
sample_weight: Optional[np.ndarray] = None,
multioutput: str = "uniform_average",
):
r"""Symmetric mean absolute percentage error regression loss (SMAPE_):
.. math:: \text{SMAPE} = \frac{2}{n}\sum_1^n\frac{max(| y_i - \hat{y_i} |}{| y_i | + | \hat{y_i} |, \epsilon)}
Where :math:`y` is a tensor of target values, and :math:`\hat{y}` is a tensor of predictions.
Args:
y_true: array-like of shape (n_samples,) or (n_samples, n_outputs)
Ground truth (correct) target values.
y_pred: array-like of shape (n_samples,) or (n_samples, n_outputs)
Estimated target values.
sample_weight: array-like of shape (n_samples,), default=None
Sample weights.
multioutput: {'raw_values', 'uniform_average'} or array-like
Defines aggregating of multiple output values.
Array-like value defines weights used to average errors.
If input is list then the shape must be (n_outputs,).
- 'raw_values': Returns a full set of errors in case of multioutput input.
- 'uniform_average': Errors of all outputs are averaged with uniform weight.
Returns:
loss: float or ndarray of floats in the range [0, 1]
If multioutput is 'raw_values', then symmetric mean absolute percentage error
is returned for each output separately.
If multioutput is 'uniform_average' or an ndarray of weights, then the
weighted average of all output errors is returned.
MAPE output is non-negative floating point. The best value is 0.0.
But note the fact that bad predictions can lead to arbitarily large
MAPE values, especially if some y_true values are very close to zero.
Note that we return a large value instead of `inf` when y_true is zero.
"""
_, y_true, y_pred, multioutput = _check_reg_targets(y_true, y_pred, multioutput)
check_consistent_length(y_true, y_pred, sample_weight)
epsilon = np.finfo(np.float64).eps
smape = 2 * np.abs(y_pred - y_true) / np.maximum(np.abs(y_true) + np.abs(y_pred), epsilon)
output_errors = np.average(smape, weights=sample_weight, axis=0)
if isinstance(multioutput, str):
if multioutput == "raw_values":
return output_errors
# pass None as weights to np.average: uniform mean
multioutput = None
return np.average(output_errors, weights=multioutput)
# sklearn reference function from
# https://github.com/samronsin/scikit-learn/blob/calibration-loss/sklearn/metrics/_classification.py.
# TODO: when the PR into sklearn is accepted, update this to use the official function.
def _calibration_error(
y_true: np.ndarray,
y_prob: np.ndarray,
sample_weight: Optional[np.ndarray] = None,
norm: str = "l2",
n_bins: int = 10,
strategy: str = "uniform",
pos_label: Optional[Union[int, str]] = None,
reduce_bias: bool = True,
) -> float:
"""Compute calibration error of a binary classifier. Across all items in a set of N predictions, the
calibration error measures the aggregated difference between (1) the average predicted probabilities assigned
to the positive class, and (2) the frequencies of the positive class in the actual outcome. The calibration
error is only appropriate for binary categorical outcomes. Which label is considered to be the positive label
is controlled via the parameter pos_label, which defaults to 1.
Args:
y_true: array-like of shape (n_samples,)
True targets of a binary classification task.
y_prob: array-like of (n_samples,)
Probabilities of the positive class.
sample_weight: array-like of shape (n_samples,)
norm: {'l1', 'l2', 'max'}
Norm method. The l1-norm is the Expected Calibration Error (ECE),
and the max-norm corresponds to Maximum Calibration Error (MCE).
n_bins: int, default=10
The number of bins to compute error on.
strategy: {'uniform', 'quantile'}
Strategy used to define the widths of the bins.
uniform
All bins have identical widths.
quantile
All bins have the same number of points.
pos_label: int or str, default=None
Label of the positive class. If None, the maximum label is used as positive class.
reduce_bias: bool, default=True
Add debiasing term as in Verified Uncertainty Calibration, A. Kumar.
Only effective for the l2-norm.
Returns:
score: float with calibration error
"""
y_true = column_or_1d(y_true)
y_prob = column_or_1d(y_prob)
assert_all_finite(y_true)
assert_all_finite(y_prob)
check_consistent_length(y_true, y_prob, sample_weight)
if any(y_prob < 0) or any(y_prob > 1):
raise ValueError("y_prob has values outside of [0, 1] range")
labels = np.unique(y_true)
if len(labels) > 2:
raise ValueError(f"Only binary classification is supported. Provided labels {labels}.")
if pos_label is None:
pos_label = y_true.max()
if pos_label not in labels:
raise ValueError(f"pos_label={pos_label} is not a valid label: {labels}")
y_true = np.array(y_true == pos_label, int)
norm_options = ("l1", "l2", "max")
if norm not in norm_options:
raise ValueError(f"norm has to be one of {norm_options}, got: {norm}.")
remapping = np.argsort(y_prob)
y_true = y_true[remapping]
y_prob = y_prob[remapping]
if sample_weight is not None:
sample_weight = sample_weight[remapping]
else:
sample_weight = np.ones(y_true.shape[0])
n_bins = int(n_bins)
if strategy == "quantile":
quantiles = np.percentile(y_prob, np.arange(0, 1, 1.0 / n_bins) * 100)
elif strategy == "uniform":
quantiles = np.arange(0, 1, 1.0 / n_bins)
else:
raise ValueError(
f"Invalid entry to 'strategy' input. \
The strategy must be either quantile' or 'uniform'. Got {strategy} instead."
)
threshold_indices = np.searchsorted(y_prob, quantiles).tolist()
threshold_indices.append(y_true.shape[0])
avg_pred_true = np.zeros(n_bins)
bin_centroid = np.zeros(n_bins)
delta_count = np.zeros(n_bins)
debias = np.zeros(n_bins)
loss = 0.0
count = float(sample_weight.sum())
for i, i_start in enumerate(threshold_indices[:-1]):
i_end = threshold_indices[i + 1]
# ignore empty bins
if i_end == i_start:
continue
delta_count[i] = float(sample_weight[i_start:i_end].sum())
avg_pred_true[i] = np.dot(y_true[i_start:i_end], sample_weight[i_start:i_end]) / delta_count[i]
bin_centroid[i] = np.dot(y_prob[i_start:i_end], sample_weight[i_start:i_end]) / delta_count[i]
if norm == "l2" and reduce_bias:
# NOTE: I think there's a mistake in the original implementation.
# delta_debias = (
# avg_pred_true[i] * (avg_pred_true[i] - 1) * delta_count[i]
# )
# delta_debias /= (count * delta_count[i] - 1)
delta_debias = avg_pred_true[i] * (avg_pred_true[i] - 1) * delta_count[i]
delta_debias /= count * (delta_count[i] - 1)
debias[i] = delta_debias
if norm == "max":
loss = np.max(np.abs(avg_pred_true - bin_centroid))
elif norm == "l1":
delta_loss = np.abs(avg_pred_true - bin_centroid) * delta_count
loss = np.sum(delta_loss) / count
elif norm == "l2":
delta_loss = (avg_pred_true - bin_centroid) ** 2 * delta_count
loss = np.sum(delta_loss) / count
if reduce_bias:
# convert nans to zero
loss += np.sum(np.nan_to_num(debias))
loss = np.sqrt(max(loss, 0.0))
return loss
|
py | b40468b90505eee30946af0dea6ac4c3f569f315 | import os
USERNAME = os.environ['WEBSITE_USERNAME'] if 'WEBSITE_USERNAME' in os.environ else ''
PASSWORD = os.environ['WEBSITE_PASSWORD'] if 'WEBSITE_PASSWORD' in os.environ else ''
slack_hook = os.environ['SLACK_HOOK'] if 'SLACK_HOOK' in os.environ else ''
base_url = 'https://www.algolia.com/api/1/docsearch'
def confirm(message="Confirm"):
from builtins import input
prompt = message + ' [y/n]:\n'
while True:
ans = input(prompt)
if ans not in ['y', 'Y', 'n', 'N']:
print('please enter y or n.')
continue
if ans == 'y' or ans == 'Y':
return True
if ans == 'n' or ans == 'N':
return False
def get_user_value(message):
from builtins import input
prompt = message
return input(prompt)
def make_custom_get_request(url):
import requests
return requests.get(url)
def make_request(endpoint, type=None, data=None, username=None, password=None, json_request=False):
import requests
url = base_url + endpoint if "://" not in endpoint else endpoint
success_codes = [200, 201, 204]
username = username if username else USERNAME
password = password if password else PASSWORD
if data and not isinstance( data, dict ):
raise ValueError(data + " must be a dict ")
if type == 'POST':
if json_request:
r = requests.post(url,
auth=(username, password),
json=data)
else:
r = requests.post(url,
auth=(username, password),
data=data)
if r.status_code / 100 != 2:
print("ISSUE for POST request : " + url + " with params: " + str(data))
print (r.text)
return r
if type == 'DELETE':
r = requests.delete(url,
auth=(username, password))
if r.status_code not in success_codes:
print("ISSUE for DELETE request : " + url + " with params: " + str(data))
return r
if type == 'PUT':
r = requests.put(url,
auth=(username, password),
data=data)
print(r.status_code)
if r.status_code / 100 != 2:
print("ISSUE for PUT request : " + url + " with params: " + str(data))
return r
if data != None:
r = requests.get(url,
auth=(username, password),
params=data)
else:
r = requests.get(url,
auth=(username, password))
if r.status_code / 100 != 2:
print("ISSUE for GET request : " + url + " with params:"+ data)
if json_request:
r.json()
return r.text
def send_slack_notif(reports):
if slack_hook == '':
raise ValueError("NO SLACK_HOOK")
from slacker import Slacker
slack = Slacker(None, slack_hook)
slack.incomingwebhook.post({
"text": "",
"channel": "#notif-docsearch",
"username": "Deployer",
"icon_emoji": ":rocket:",
"attachments": reports
}) |
py | b4046912db7cf5de8b724a4c30e4678000fbb591 | ########################################################################################################################
# Module: abc/scenarios/gk.py
# Description: G and K likelihood.
#
# Web: https://github.com/SamDuffield/mocat
########################################################################################################################
from abc import ABC as AbsBaseClass
from typing import Union
from jax import numpy as jnp, random, vmap
from jax.scipy.stats import norm
from mocat.src.abc.abc import ABCScenario
class _GK(ABCScenario):
name: str = 'GK_fewN'
n_unsummarised_data: int = None
dim: int = 4
c: float = 0.8
prior_mins: Union[float, jnp.ndarray] = 0
prior_maxs: Union[float, jnp.ndarray] = 10
buffer: float = 1e-5
def full_data_sample(self,
x: jnp.ndarray,
random_key: jnp.ndarray) -> jnp.ndarray:
raise NotImplementedError(f'{self.name} full_data_sample not implemented')
def summarise_data(self,
data: jnp.ndarray) -> jnp.ndarray:
raise NotImplementedError(f'{self.name} summarise_data not implemented')
def likelihood_sample(self,
x: jnp.ndarray,
random_key: jnp.ndarray) -> jnp.ndarray:
return self.summarise_data(self.full_data_sample(x, random_key))
class GKUniformPrior(_GK, AbsBaseClass):
def full_data_sample(self,
x: jnp.ndarray,
random_key: jnp.ndarray) -> jnp.ndarray:
u = random.uniform(random_key, shape=(self.n_unsummarised_data,), minval=self.buffer, maxval=1 - self.buffer)
z = norm.ppf(u)
expmingz = jnp.exp(-x[2] * z)
return x[0] \
+ x[1] * (1 + self.c * (1 - expmingz) / (1 + expmingz)) \
* z * (1 + z ** 2) ** x[3]
def prior_potential(self,
x: jnp.ndarray,
random_key: jnp.ndarray = None) -> Union[float, jnp.ndarray]:
out = jnp.where(jnp.all(x > self.prior_mins), 1., jnp.inf)
out = jnp.where(jnp.all(x < self.prior_maxs), out, jnp.inf)
return out
def prior_sample(self,
random_key: jnp.ndarray) -> Union[float, jnp.ndarray]:
return self.prior_mins + random.uniform(random_key, (self.dim,)) * (self.prior_maxs - self.prior_mins)
class GKTransformedUniformPrior(_GK, AbsBaseClass):
def constrain(self,
unconstrained_x: jnp.ndarray):
return self.prior_mins + norm.cdf(unconstrained_x) * (self.prior_maxs - self.prior_mins)
def unconstrain(self,
constrained_x: jnp.ndarray):
return norm.ppf((constrained_x - self.prior_mins) / (self.prior_maxs - self.prior_mins))
def full_data_sample(self,
x: jnp.ndarray,
random_key: jnp.ndarray) -> jnp.ndarray:
transformed_x = self.constrain(x)
u = random.uniform(random_key, shape=(self.n_unsummarised_data,), minval=self.buffer, maxval=1 - self.buffer)
z = norm.ppf(u)
expmingz = jnp.exp(-transformed_x[2] * z)
return transformed_x[0] \
+ transformed_x[1] * (1 + self.c * (1 - expmingz) / (1 + expmingz)) \
* z * (1 + z ** 2) ** transformed_x[3]
def prior_potential(self,
x: jnp.ndarray,
random_key: jnp.ndarray = None) -> Union[float, jnp.ndarray]:
return 0.5 * (x ** 2).sum()
def prior_sample(self,
random_key: jnp.ndarray) -> Union[float, jnp.ndarray]:
return random.normal(random_key, (self.dim,))
class GKOnlyAUniformPrior(_GK, AbsBaseClass):
name: str = 'GK_fewN only A'
dim: int = 1
B: float = 1.
g: float = 2.
k: float = 0.5
def full_data_sample(self,
x: jnp.ndarray,
random_key: jnp.ndarray) -> jnp.ndarray:
u = random.uniform(random_key, shape=(self.n_unsummarised_data,), minval=self.buffer, maxval=1 - self.buffer)
z = norm.ppf(u)
expmingz = jnp.exp(-self.g * z)
return x[0] \
+ self.B * (1 + self.c * (1 - expmingz) / (1 + expmingz)) \
* z * (1 + z ** 2) ** self.c
def prior_potential(self,
x: jnp.ndarray,
random_key: jnp.ndarray = None) -> Union[float, jnp.ndarray]:
out = jnp.where(jnp.all(x > self.prior_mins), 1., jnp.inf)
out = jnp.where(jnp.all(x < self.prior_maxs), out, jnp.inf)
return out
def prior_sample(self,
random_key: jnp.ndarray) -> Union[float, jnp.ndarray]:
return self.prior_mins + random.uniform(random_key, (self.dim,)) * (self.prior_maxs - self.prior_mins)
class GKOnlyATransformedUniformPrior(_GK, AbsBaseClass):
name: str = 'GK_fewN only A'
dim: int = 1
B: float = 1.
g: float = 2.
k: float = 0.5
def constrain(self,
unconstrained_x: jnp.ndarray):
return self.prior_mins + norm.cdf(unconstrained_x) * (self.prior_maxs - self.prior_mins)
def unconstrain(self,
constrained_x: jnp.ndarray):
return norm.ppf((constrained_x - self.prior_mins) / (self.prior_maxs - self.prior_mins))
def full_data_sample(self,
x: jnp.ndarray,
random_key: jnp.ndarray) -> jnp.ndarray:
transformed_x = self.constrain(x)
u = random.uniform(random_key, shape=(self.n_unsummarised_data,), minval=self.buffer, maxval=1 - self.buffer)
z = norm.ppf(u)
expmingz = jnp.exp(-self.g * z)
return transformed_x[0] \
+ self.B * (1 + self.c * (1 - expmingz) / (1 + expmingz)) \
* z * (1 + z ** 2) ** self.c
def prior_potential(self,
x: jnp.ndarray,
random_key: jnp.ndarray = None) -> Union[float, jnp.ndarray]:
return 0.5 * (x ** 2).sum()
def prior_sample(self,
random_key: jnp.ndarray) -> Union[float, jnp.ndarray]:
return random.normal(random_key, (self.dim,))
|
py | b40469b78cfc2d541926bb9ae7ee2ed664a2699c | #!/usr/bin/env python
import npyscreen
import time
import curses
class TimeoutApplication(npyscreen.NPSAppManaged):
def onStart(self):
self.mainForm = self.addForm('MAIN', TimeoutForm)
class TimeoutForm(npyscreen.Form):
def create(self):
self.keypress_timeout = 10
self.timeWidget = self.add(npyscreen.TitleText, name="Time:", value=None, editable = None)
def afterEditing(self):
self.parentApp.NEXT_ACTIVE_FORM = None
def while_waiting(self):
self.timeWidget.value = time.asctime()
self.timeWidget.display()
if __name__ == "__main__":
app = TimeoutApplication()
app.run() |
py | b4046a02a1d751401b241a090f3d4431eba89568 | #!/usr/bin/python3
from PIL import Image, ImageOps
from PIL import ImageFont
from PIL import ImageDraw
import currency
import os
import sys
import logging
import RPi.GPIO as GPIO
from waveshare_epd import epd2in7
import time
import requests
import urllib, json
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import yaml
import socket
picdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'images')
fontdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fonts')
configfile = os.path.join(os.path.dirname(os.path.realpath(__file__)),'config.yaml')
fonthiddenprice = ImageFont.truetype(os.path.join(fontdir,'googlefonts/Roboto-Medium.ttf'), 30)
font = ImageFont.truetype(os.path.join(fontdir,'googlefonts/Roboto-Medium.ttf'), 40)
fontHorizontal = ImageFont.truetype(os.path.join(fontdir,'googlefonts/Roboto-Medium.ttf'), 50)
font_date = ImageFont.truetype(os.path.join(fontdir,'PixelSplitter-Bold.ttf'),11)
def internet(host="8.8.8.8", port=53, timeout=3):
"""
Host: 8.8.8.8 (google-public-dns-a.google.com)
OpenPort: 53/tcp
Service: domain (DNS/TCP)
"""
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except socket.error as ex:
logging.info("No internet")
return False
def human_format(num):
num = float('{:.3g}'.format(num))
magnitude = 0
while abs(num) >= 1000:
magnitude += 1
num /= 1000.0
return '{}{}'.format('{:f}'.format(num).rstrip('0').rstrip('.'), ['', 'K', 'M', 'B', 'T'][magnitude])
def getData(config,whichcoin,fiat,other):
"""
The function to update the ePaper display. There are two versions of the layout. One for portrait aspect ratio, one for landscape.
"""
logging.info("Getting Data")
days_ago=int(config['ticker']['sparklinedays'])
endtime = int(time.time())
starttime = endtime - 60*60*24*days_ago
starttimeseconds = starttime
endtimeseconds = endtime
# Get the price
if config['ticker']['exchange']=='default' or fiat!='usd':
geckourl = "https://api.coingecko.com/api/v3/coins/markets?vs_currency="+fiat+"&ids="+whichcoin
logging.info(geckourl)
rawlivecoin = requests.get(geckourl).json()
logging.info(rawlivecoin[0])
liveprice = rawlivecoin[0]
pricenow= float(liveprice['current_price'])
alltimehigh = float(liveprice['ath'])
other['volume'] = float(liveprice['total_volume'])
else:
geckourl= "https://api.coingecko.com/api/v3/exchanges/"+config['ticker']['exchange']+"/tickers?coin_ids="+whichcoin+"&include_exchange_logo=false"
logging.info(geckourl)
rawlivecoin = requests.get(geckourl).json()
liveprice= rawlivecoin['tickers'][0]
if liveprice['target']!='USD':
logging.info("The exhange is not listing in USD, misconfigured - shutting down script")
message="Misconfiguration Problem"
beanaproblem(message)
sys.exit()
pricenow= float(liveprice['last'])
other['volume'] = float(liveprice['converted_volume']['usd'])
alltimehigh = 1000000.0 # For non-default the ATH does not show in the API, so show it when price reaches *pinky in mouth* ONE MILLION DOLLARS
logging.info("Got Live Data From CoinGecko")
geckourlhistorical = "https://api.coingecko.com/api/v3/coins/"+whichcoin+"/market_chart/range?vs_currency="+fiat+"&from="+str(starttimeseconds)+"&to="+str(endtimeseconds)
logging.info(geckourlhistorical)
rawtimeseries = requests.get(geckourlhistorical).json()
logging.info("Got price for the last "+str(days_ago)+" days from CoinGecko")
timeseriesarray = rawtimeseries['prices']
timeseriesstack = []
length=len (timeseriesarray)
i=0
while i < length:
timeseriesstack.append(float (timeseriesarray[i][1]))
i+=1
timeseriesstack.append(pricenow)
if pricenow>alltimehigh:
other['ATH']=True
else:
other['ATH']=False
return timeseriesstack, other
def beanaproblem(message):
# A visual cue that the wheels have fallen off
thebean = Image.open(os.path.join(picdir,'thebean.bmp'))
epd = epd2in7.EPD()
epd.Init_4Gray()
image = Image.new('L', (epd.height, epd.width), 255) # 255: clear the image with white
draw = ImageDraw.Draw(image)
image.paste(thebean, (60,15))
draw.text((15,150),message, font=font_date,fill = 0)
image = ImageOps.mirror(image)
epd.display_4Gray(epd.getbuffer_4Gray(image))
logging.info("epd2in7 BTC Frame")
# Reload last good config.yaml
with open(configfile) as f:
config = yaml.load(f, Loader=yaml.FullLoader)
def makeSpark(pricestack):
# Draw and save the sparkline that represents historical data
# Subtract the mean from the sparkline to make the mean appear on the plot (it's really the x axis)
x = pricestack-np.mean(pricestack)
fig, ax = plt.subplots(1,1,figsize=(10,3))
plt.plot(x, color='k', linewidth=6)
plt.plot(len(x)-1, x[-1], color='r', marker='o')
# Remove the Y axis
for k,v in ax.spines.items():
v.set_visible(False)
ax.set_xticks([])
ax.set_yticks([])
ax.axhline(c='k', linewidth=4, linestyle=(0, (5, 2, 1, 2)))
# Save the resulting bmp file to the images directory
plt.savefig(os.path.join(picdir,'spark.png'), dpi=17)
imgspk = Image.open(os.path.join(picdir,'spark.png'))
file_out = os.path.join(picdir,'spark.bmp')
imgspk.save(file_out)
plt.clf() # Close plot to prevent memory error
def updateDisplay(config,pricestack,whichcoin,fiat,other):
"""
Takes the price data, the desired coin/fiat combo along with the config info for formatting
if config is re-written following adustment we could avoid passing the last two arguments as
they will just be the first two items of their string in config
"""
days_ago=int(config['ticker']['sparklinedays'])
symbolstring=currency.symbol(fiat.upper())
if fiat=="jpy" or fiat=="cny":
symbolstring="¥"
pricenow = pricestack[-1]
currencythumbnail= 'currency/'+whichcoin+'.bmp'
tokenfilename = os.path.join(picdir,currencythumbnail)
sparkbitmap = Image.open(os.path.join(picdir,'spark.bmp'))
ATHbitmap= Image.open(os.path.join(picdir,'ATH.bmp'))
# Check for token image, if there isn't one, get on off coingecko, resize it and pop it on a white background
if os.path.isfile(tokenfilename):
logging.info("Getting token Image from Image directory")
tokenimage = Image.open(tokenfilename)
else:
logging.info("Getting token Image from Coingecko")
tokenimageurl = "https://api.coingecko.com/api/v3/coins/"+whichcoin+"?tickers=false&market_data=false&community_data=false&developer_data=false&sparkline=false"
rawimage = requests.get(tokenimageurl).json()
tokenimage = Image.open(requests.get(rawimage['image']['large'], stream=True).raw)
resize = 100,100
tokenimage.thumbnail(resize, Image.ANTIALIAS)
new_image = Image.new("RGBA", (120,120), "WHITE") # Create a white rgba background with a 10 pixel border
new_image.paste(tokenimage, (10, 10), tokenimage)
tokenimage=new_image
tokenimage.thumbnail((100,100),Image.ANTIALIAS)
tokenimage.save(tokenfilename)
pricechange = str("%+d" % round((pricestack[-1]-pricestack[0])/pricestack[-1]*100,2))+"%"
if pricenow > 1000:
pricenowstring =format(int(pricenow),",")
else:
pricenowstring =str(float('%.5g' % pricenow))
if config['display']['orientation'] == 0 or config['display']['orientation'] == 180 :
epd = epd2in7.EPD()
epd.Init_4Gray()
image = Image.new('L', (epd.width, epd.height), 255) # 255: clear the image with white
draw = ImageDraw.Draw(image)
draw.text((110,80),str(days_ago)+"day :",font =font_date,fill = 0)
draw.text((110,95),pricechange,font =font_date,fill = 0)
# Print price to 5 significant figures
draw.text((15,200),symbolstring+pricenowstring,font =font,fill = 0)
draw.text((10,10),str(time.strftime("%H:%M %a %d %b %Y")),font =font_date,fill = 0)
image.paste(tokenimage, (10,25))
image.paste(sparkbitmap,(10,125))
if config['display']['orientation'] == 180 :
image=image.rotate(180, expand=True)
if config['display']['orientation'] == 90 or config['display']['orientation'] == 270 :
epd = epd2in7.EPD()
epd.Init_4Gray()
image = Image.new('L', (epd.height, epd.width), 255) # 255: clear the image with white
draw = ImageDraw.Draw(image)
draw.text((110,90),str(days_ago)+" day : "+pricechange,font =font_date,fill = 0)
#. uncomment the line below to show volume
# draw.text((110,105),"24h vol : " + human_format(other['volume']),font =font_date,fill = 0)
draw.text((10,120),symbolstring+pricenowstring,font =fontHorizontal,fill = 0)
image.paste(sparkbitmap,(80,40))
image.paste(tokenimage, (0,10))
if other['ATH']==True:
image.paste(ATHbitmap,(190,65))
draw.text((95,15),str(time.strftime("%H:%M %a %d %b %Y")),font =font_date,fill = 0)
if config['display']['orientation'] == 270 :
image=image.rotate(180, expand=True)
# This is a hack to deal with the mirroring that goes on in 4Gray Horizontal
image = ImageOps.mirror(image)
# If the display is inverted, invert the image usinng ImageOps
if config['display']['inverted'] == True:
image = ImageOps.invert(image)
# Send the image to the screen
epd.display_4Gray(epd.getbuffer_4Gray(image))
epd.sleep()
def currencystringtolist(currstring):
# Takes the string for currencies in the config.yaml file and turns it into a list
curr_list = currstring.split(",")
curr_list = [x.strip(' ') for x in curr_list]
return curr_list
def currencycycle(curr_list):
# Rotate the array of currencies from config.... [a b c] becomes [b c a]
curr_list = curr_list[1:]+curr_list[:1]
return curr_list
def main():
def fullupdate():
"""
The steps required for a full update of the display
Earlier versions of the code didn't grab new data for some operations
but the e-Paper is too slow to bother the coingecko API
"""
other={}
try:
pricestack, ATH = getData(config,CURRENCY,FIAT, other)
# generate sparkline
makeSpark(pricestack)
# update display
updateDisplay(config, pricestack, CURRENCY,FIAT, other)
lastgrab=time.time()
time.sleep(.2)
except Exception as e:
message="Data pull/print problem"
beanaproblem(str(e))
time.sleep(10)
lastgrab=lastcoinfetch
return lastgrab
def configwrite():
"""
Write the config file following an adjustment made using the buttons
This is so that the unit returns to its last state after it has been
powered off
"""
config['ticker']['currency']=",".join(crypto_list)
config['ticker']['fiatcurrency']=",".join(fiat_list)
with open(configfile, 'w') as f:
data = yaml.dump(config, f)
logging.basicConfig(level=logging.DEBUG)
try:
logging.info("epd2in7 BTC Frame")
# Get the configuration from config.yaml
with open(configfile) as f:
config = yaml.load(f, Loader=yaml.FullLoader)
logging.info(config)
config['display']['orientation']=int(config['display']['orientation'])
crypto_list = currencystringtolist(config['ticker']['currency'])
logging.info(crypto_list)
fiat_list=currencystringtolist(config['ticker']['fiatcurrency'])
logging.info(fiat_list)
CURRENCY=crypto_list[0]
FIAT=fiat_list[0]
logging.info(CURRENCY)
logging.info(FIAT)
GPIO.setmode(GPIO.BCM)
key1 = 5
key2 = 6
key3 = 13
key4 = 19
GPIO.setup(key1, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(key2, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(key3, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(key4, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Note that there has been no data pull yet
datapulled=False
# Time of start
lastcoinfetch = time.time()
while True:
key1state = GPIO.input(key1)
key2state = GPIO.input(key2)
key3state = GPIO.input(key3)
key4state = GPIO.input(key4)
if internet():
if key1state == False:
logging.info('Cycle currencies')
crypto_list = currencycycle(crypto_list)
CURRENCY=crypto_list[0]
logging.info(CURRENCY)
lastcoinfetch=fullupdate()
if key2state == False:
logging.info('Rotate - 90')
config['display']['orientation'] = (config['display']['orientation']+90) % 360
lastcoinfetch=fullupdate()
if key3state == False:
logging.info('Invert Display')
config['display']['inverted']= not config['display']['inverted']
lastcoinfetch=fullupdate()
if key4state == False:
logging.info('Cycle fiat')
fiat_list = currencycycle(fiat_list)
FIAT=fiat_list[0]
logging.info(FIAT)
lastcoinfetch=fullupdate()
if (time.time() - lastcoinfetch > float(config['ticker']['updatefrequency'])) or (datapulled==False):
if config['display']['cycle']==True:
crypto_list = currencycycle(crypto_list)
CURRENCY=crypto_list[0]
lastcoinfetch=fullupdate()
datapulled = True
# Moved due to suspicion that button pressing was corrupting config file
configwrite()
except IOError as e:
logging.info(e)
except KeyboardInterrupt:
logging.info("ctrl + c:")
epd2in7.epdconfig.module_exit()
exit()
if __name__ == '__main__':
main()
|
py | b4046b591bd9fb3e0760ef14d3d6a3b722b62916 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""String interpolation routines, i.e. the splitting up a given text into some
parts that are literal strings, and others that are Python expressions.
"""
from itertools import chain
import re
from tokenize import PseudoToken
from genshi.core import TEXT
from genshi.template.base import TemplateSyntaxError, EXPR
from genshi.template.eval import Expression
__all__ = ['interpolate']
__docformat__ = 'restructuredtext en'
NAMESTART = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_'
NAMECHARS = NAMESTART + '.0123456789'
PREFIX = '$'
token_re = re.compile('%s|%s(?s)' % (
r'[uU]?[rR]?("""|\'\'\')((?<!\\)\\\1|.)*?\1',
PseudoToken
))
def interpolate(text, filepath=None, lineno=-1, offset=0, lookup='strict'):
"""Parse the given string and extract expressions.
This function is a generator that yields `TEXT` events for literal strings,
and `EXPR` events for expressions, depending on the results of parsing the
string.
>>> for kind, data, pos in interpolate("hey ${foo}bar"):
... print('%s %r' % (kind, data))
TEXT 'hey '
EXPR Expression('foo')
TEXT 'bar'
:param text: the text to parse
:param filepath: absolute path to the file in which the text was found
(optional)
:param lineno: the line number at which the text was found (optional)
:param offset: the column number at which the text starts in the source
(optional)
:param lookup: the variable lookup mechanism; either "lenient" (the
default), "strict", or a custom lookup class
:return: a list of `TEXT` and `EXPR` events
:raise TemplateSyntaxError: when a syntax error in an expression is
encountered
"""
pos = [filepath, lineno, offset]
textbuf = []
textpos = None
for is_expr, chunk in chain(lex(text, pos, filepath), [(True, '')]):
if is_expr:
if textbuf:
yield TEXT, ''.join(textbuf), textpos
del textbuf[:]
textpos = None
if chunk:
try:
expr = Expression(chunk.strip(), pos[0], pos[1],
lookup=lookup)
yield EXPR, expr, tuple(pos)
except SyntaxError as err:
raise TemplateSyntaxError(err, filepath, pos[1],
pos[2] + (err.offset or 0))
else:
textbuf.append(chunk)
if textpos is None:
textpos = tuple(pos)
if '\n' in chunk:
lines = chunk.splitlines()
pos[1] += len(lines) - 1
pos[2] += len(lines[-1])
else:
pos[2] += len(chunk)
def lex(text, textpos, filepath):
offset = pos = 0
end = len(text)
escaped = False
while 1:
if escaped:
offset = text.find(PREFIX, offset + 2)
escaped = False
else:
offset = text.find(PREFIX, pos)
if offset < 0 or offset == end - 1:
break
next = text[offset + 1]
if next == '{':
if offset > pos:
yield False, text[pos:offset]
pos = offset + 2
level = 1
while level:
match = token_re.match(text, pos)
if match is None or not match.group():
# if there isn't a match or the match is the empty
# string, we're not going to match up braces ever
raise TemplateSyntaxError('invalid syntax', filepath,
*textpos[1:])
pos = match.end()
tstart, tend = match.regs[3]
token = text[tstart:tend]
if token == '{':
level += 1
elif token == '}':
level -= 1
yield True, text[offset + 2:pos - 1]
elif next in NAMESTART:
if offset > pos:
yield False, text[pos:offset]
pos = offset
pos += 1
while pos < end:
char = text[pos]
if char not in NAMECHARS:
break
pos += 1
yield True, text[offset + 1:pos].strip()
elif not escaped and next == PREFIX:
if offset > pos:
yield False, text[pos:offset]
escaped = True
pos = offset + 1
else:
yield False, text[pos:offset + 1]
pos = offset + 1
if pos < end:
yield False, text[pos:]
|
py | b4046ba47139821539abc27940dc8710be7ef87e | from utils import login
import json
import requests
import os
try:
os.mkdir("settings")
os.mkdir("settings/devices")
except FileExistsError:
pass
client_id = 2882303761517308695
client_secret = "OrwZHJ/drEXakH1LsfwwqQ=="
data = login.get_token(client_id, client_secret)
settings = {"clientId": client_id, "accessToken": data["access_token"], "region": "de"}
with open("settings/settings.json", "w") as write_file:
json.dump(settings, write_file)
payload = {
"clientId": client_id,
"accessToken": data["access_token"],
}
url = "https://de.openapp.io.mi.com/openapp/user/device_list"
r = requests.post(
url,
headers={"Content-Type": "application/x-www-form-urlencoded"},
data=payload,
)
list_id = []
devices_json = json.loads(r.text)
for x in devices_json["result"]["list"]:
lamps = {"did": x["did"], "name": x["name"], "model": x["model"], "mac": x["mac"]}
with open(f'settings/devices/{x["name"]}.json', "w") as write_file:
json.dump(lamps, write_file)
|
py | b4046c733b5e77cd774f69e1740593f6bc6f9223 | """Adjust some old Python 2 idioms to their modern counterparts.
* Change some type comparisons to isinstance() calls:
type(x) == T -> isinstance(x, T)
type(x) is T -> isinstance(x, T)
type(x) != T -> not isinstance(x, T)
type(x) is not T -> not isinstance(x, T)
* Change "while 1:" into "while True:".
* Change both
v = list(EXPR)
v.sort()
foo(v)
and the more general
v = EXPR
v.sort()
foo(v)
into
v = sorted(EXPR)
foo(v)
"""
# Author: Jacques Frechet, Collin Winter
# Local imports
from .. import fixer_base
from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
class FixIdioms(fixer_base.BaseFix):
explicit = True # The user must ask for this fixer
PATTERN = r"""
isinstance=comparison< %s %s T=any >
|
isinstance=comparison< T=any %s %s >
|
while_stmt< 'while' while='1' ':' any+ >
|
sorted=any<
any*
simple_stmt<
expr_stmt< id1=any '='
power< list='list' trailer< '(' (not arglist<any+>) any ')' > >
>
'\n'
>
sort=
simple_stmt<
power< id2=any
trailer< '.' 'sort' > trailer< '(' ')' >
>
'\n'
>
next=any*
>
|
sorted=any<
any*
simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' >
sort=
simple_stmt<
power< id2=any
trailer< '.' 'sort' > trailer< '(' ')' >
>
'\n'
>
next=any*
>
""" % (TYPE, CMP, CMP, TYPE)
def match(self, node):
r = super(FixIdioms, self).match(node)
# If we've matched one of the sort/sorted subpatterns above, we
# want to reject matches where the initial assignment and the
# subsequent .sort() call involve different identifiers.
if r and "sorted" in r:
if r["id1"] == r["id2"]:
return r
return None
return r
def transform(self, node, results):
if "isinstance" in results:
return self.transform_isinstance(node, results)
elif "while" in results:
return self.transform_while(node, results)
elif "sorted" in results:
return self.transform_sort(node, results)
else:
raise RuntimeError("Invalid match")
def transform_isinstance(self, node, results):
x = results["x"].clone() # The thing inside of type()
T = results["T"].clone() # The type being compared against
x.prefix = ""
T.prefix = " "
test = Call(Name("isinstance"), [x, Comma(), T])
if "n" in results:
test.prefix = " "
test = Node(syms.not_test, [Name("not"), test])
test.prefix = node.prefix
return test
def transform_while(self, node, results):
one = results["while"]
one.replace(Name("True", prefix=one.prefix))
def transform_sort(self, node, results):
sort_stmt = results["sort"]
next_stmt = results["next"]
list_call = results.get("list")
simple_expr = results.get("expr")
if list_call:
list_call.replace(Name("sorted", prefix=list_call.prefix))
elif simple_expr:
new = simple_expr.clone()
new.prefix = ""
simple_expr.replace(Call(Name("sorted"), [new],
prefix=simple_expr.prefix))
else:
raise RuntimeError("should not have reached here")
sort_stmt.remove()
btwn = sort_stmt.prefix
# Keep any prefix lines between the sort_stmt and the list_call and
# shove them right after the sorted() call.
if "\n" in btwn:
if next_stmt:
# The new prefix should be everything from the sort_stmt's
# prefix up to the last newline, then the old prefix after a new
# line.
prefix_lines = (btwn.rpartition("\n")[0], next_stmt[0].prefix)
next_stmt[0].prefix = "\n".join(prefix_lines)
else:
assert list_call.parent
assert list_call.next_sibling is None
# Put a blank line after list_call and set its prefix.
end_line = BlankLine()
list_call.parent.append_child(end_line)
assert list_call.next_sibling is end_line
# The new prefix should be everything up to the first new line
# of sort_stmt's prefix.
end_line.prefix = btwn.rpartition("\n")[0]
|
py | b4046cba6dc7ca67d342cb0efa0d40de115c4614 | from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
import torch
from tests.utils import jitVsGlow
class TestMul(unittest.TestCase):
def test_mul_basic(self):
"""Basic test of the PyTorch mul Node on Glow."""
def test_f(a, b):
c = a.mul(b)
return c.mul(c)
x = torch.randn(4)
y = torch.randn(4)
jitVsGlow(test_f, x, y, expected_fused_ops={"aten::mul"})
def test_mul_broadcast_1(self):
"""Test of the PyTorch mul Node on Glow with broadcasting."""
def test_f(a, b):
c = a.mul(b)
return c.mul(c)
x = torch.randn(8, 3, 4, 2)
y = torch.randn(4, 2)
jitVsGlow(test_f, x, y, expected_fused_ops={"aten::mul"})
def test_mul_broadcast_2(self):
"""Test of the PyTorch mul Node on Glow with broadcasting."""
def test_f(a, b):
c = a.mul(b)
return c.mul(c)
x = torch.randn(8, 3, 4, 2)
y = torch.randn(1, 2)
jitVsGlow(test_f, x, y, expected_fused_ops={"aten::mul"})
def test_mul_broadcast_3(self):
"""Test of the PyTorch mul Node on Glow with broadcasting."""
def test_f(a, b):
c = a.mul(b)
return c.mul(c)
x = torch.randn(4, 2)
y = torch.randn(8, 3, 4, 2)
jitVsGlow(test_f, x, y, expected_fused_ops={"aten::mul"})
def test_mul_float(self):
"""Test of the PyTorch aten::mul Node with a float argument"""
def test_f(a):
return (a + a).mul(3.9)
x = torch.randn(4)
jitVsGlow(test_f, x, expected_fused_ops={"aten::mul"})
def test_mul_int(self):
"""Test of the PyTorch aten::mul Node with an int argument"""
def test_f(a):
return (a + a).mul(20)
x = torch.randn(4)
jitVsGlow(test_f, x, expected_fused_ops={"aten::mul"})
|
py | b4046db40ac07951e6ee096e067a891f1f1f62fd | from .TS3Bot import Bot
from .audit_service import AuditService
from .commander_service import CommanderService
from .guild_service import GuildService
from .reset_roster_service import ResetRosterService
from .user_service import UserService
__all__ = ['Bot', 'UserService', 'CommanderService', 'ResetRosterService', 'AuditService', 'GuildService']
|
py | b4046f800dbc95647e7b748c7b11a73e5ec32111 | import unittest
from translate.bing import BingTransResult
class BingTransTest(unittest.TestCase):
def test_result(self):
html = '''<div><span id="ht_logo"></span><h4>capsule</h4><span class="ht_attr" lang="en">['kæp.sjul]
</span><ul><li><span class="ht_pos">n.</span><span
class="ht_trs">太空舱;荚;航天舱;(装药物的)胶囊</span></li></ul><ul><li><span class="ht_pos">adj.</span><span
class="ht_trs">简略的;小而结实的</span></li></ul><ul><li><span class="ht_pos">v.</span><span
class="ht_trs">节略;以瓶帽密封</span></li></ul></div> '''
res = BingTransResult.from_html(html)
self.assertEqual('capsule', res.get_original())
self.assertEqual("['kæp.sjul]", res.get_pronunciation())
self.assertEqual(3, len(res.get_translation()))
self.assertEqual('n.', res.get_translation()[0].get('pos'))
self.assertEqual('太空舱;荚;航天舱;(装药物的)胶囊', res.get_translation()[0].get('trans'))
self.assertEqual('adj.', res.get_translation()[1].get('pos'))
self.assertEqual('简略的;小而结实的', res.get_translation()[1].get('trans'))
self.assertEqual('v.', res.get_translation()[2].get('pos'))
self.assertEqual('节略;以瓶帽密封', res.get_translation()[2].get('trans'))
|
py | b4047018d4fc7b34f8cf2c46f5055a0f9bec5b5d | #!/usr/bin/python
'''
This program is used to implement the Naive Bayes Algorithm for classification.
To run the program, type the following command:
python NaiveBayes.py <training_file> <test_file>
'''
import sys
import csv
import math
label = "IsBadBuy"
num_attr_list=["WarrantyCost","MMRAcquisitionAuctionCleanPrice","MMRCurrentRetailAveragePrice","MMRAcquisitionRetailAveragePrice","VehOdo","VehicleAge","MMRCurrentRetailCleanPrice","MMRAcquisitionAuctionAveragePrice","MMRAcquisitonRetailCleanPrice","MMRCurrentAuctionAveragePrice","VehBCost","MMRCurrentAuctionCleanPrice","VehOdo_by_VehAge","r2_minus_r1_avg","r2_minus_r1_clean","r2_minus_a1_avg","r2_minus_a1_clean","r2_avg_minus_vehbc","r2_clean_minus_vehbc","vehbc_minus_a1_avg","vehbc_minus_a1_clean","warranty_by_vehbc"]
'''This function mentions the correct usage for running the program.
'''
def usage(program_name):
return "Wrong Usage!\nCorrect Usage is:\t<python "+ program_name + "> <train_file> <test_file> <prediction_file>"
'''This function is used to find all the distinct values that each nominal attribute can take.
This is stored in a dictionary with keys as the attribute names and the value for a
key as a list of distinct values that the attribute can take.
'''
def find_distinct_values_feature(training_data,testing_data,all_features):
values_in_features = {}
total_data = training_data + testing_data
for feature in all_features:
distinct_values = set()
for example in total_data:
distinct_values.add(example[feature])
values_in_features[feature] = distinct_values
return values_in_features
'''This function is used to calculate the prior probabilities of the class labels.
'''
def find_prior_probability(label_value,training_data):
global label
count = 0
for example in training_data:
if example[label] == label_value:
count += 1
return float(count)/float(len(training_data))
'''This function is basically the model that is learned in the Naive Bayes Classifier.
It stores the conditional probability values of each attribute_name -> value -> label
combination. These values are stored in a dictionary and looked up using a
string lookup.
'''
def store_all_feature_value_label_cond_probabilities(training_data,values_in_features):
global label
value_cond_prob = {}
labels = ['0','1']
for feature in values_in_features:
distinct_values = values_in_features[feature]
total_values_feature = len(distinct_values)
for value in distinct_values:
for label_val in labels:
string_lookup = str(feature) + ':' + str(value) + ':' + label_val
counter = 0
total_counter = 0
for example in training_data:
if example[label] == label_val:
total_counter += 1
if example[feature] == value:
counter += 1
if counter == 0:
counter = 1 #Laplacian Correction.
total_counter += total_values_feature
probability = float(counter)/float(total_counter)
value_cond_prob[string_lookup] = probability
return value_cond_prob
def store_mean_std_dev_numeric_attributes(training_data,numeric_features,value_cond_prob):
positive_examples = []
negative_examples = []
for example in training_data:
if example[label] == "1":
positive_examples.append(example)
else:
negative_examples.append(example)
value_cond_prob = get_mean_std_dev_labelled_examples(positive_examples,numeric_features,value_cond_prob,"1")
value_cond_prob = get_mean_std_dev_labelled_examples(negative_examples,numeric_features,value_cond_prob,"0")
return value_cond_prob
def get_mean_std_dev_labelled_examples(labelled_data,numeric_features,value_cond_prob,lab_val):
total = len(labelled_data)
for feature in numeric_features:
summation = 0.0
mean = 0.0
std_dev = 0.0
all_vals = []
for example in labelled_data:
summation = float(summation) + float(example[feature])
all_vals.append(float(example[feature]))
mean = float(summation)/float(total)
summation = 0.0
for value in all_vals:
dev = float(value) - float(mean)
numerator = float(dev)**2
summation = float(summation) + float(numerator)
std_dev = float(summation)/float(total)
std_dev = float(std_dev)**(0.5)
value_cond_prob[feature + "_" + lab_val] = [mean,std_dev]
return value_cond_prob
'''This function is used for training the Naive Bayes classifier and returning the corresponding
conditional probability values which is the model that is learned.
'''
def train_naive_bayes_get_classifier(training_data,values_in_features,numeric_features):
prior_positive = find_prior_probability("1",training_data)
prior_negative = find_prior_probability("0",training_data)
#print "Done finding prior probabilities for class labels."
value_cond_prob = store_all_feature_value_label_cond_probabilities(training_data,values_in_features)
value_cond_prob = store_mean_std_dev_numeric_attributes(training_data,numeric_features,value_cond_prob)
value_cond_prob['prior_positive'] = prior_positive
value_cond_prob['prior_negative'] = prior_negative
#print "Done storing conditional probabilities for attribute values."
return value_cond_prob #Return the model for the Naive Bayes classifier.
def calc_gaussian_prob(value,mean,std_dev):
diff = float(value) - float(mean)
diff_sq = float(diff)**2
variance = float(std_dev)**2
diff_sq = (-1) * float(diff_sq)/float(2.0*variance)
exp_term = float(math.exp(diff_sq))
denom = float(2.0 * math.pi * variance)
denom = float(denom**(0.5))
return float(exp_term)/float(denom)
'''This function is used to return the predictions of the classifier on testing data.
'''
def get_predictions_from_model(value_cond_prob,testing_data,nominal_features,numeric_features):
predictions = []
for example in testing_data:
predicted_label = "0"
features_prob_product_positive = 1.0
features_prob_product_negative = 1.0
for feature in nominal_features:
string_lookup = str(feature) + ':' + str(example[feature]) + ':1'
features_prob_product_positive = float(features_prob_product_positive) * float(value_cond_prob[string_lookup])
string_lookup = str(feature) + ':' + str(example[feature]) + ':0'
features_prob_product_negative = float(features_prob_product_negative) * float(value_cond_prob[string_lookup])
for num_feature in numeric_features:
string_lookup = str(num_feature) + "_" + "1"
mean,std_dev = value_cond_prob[string_lookup]
features_prob_product_positive = float(features_prob_product_positive) * float(calc_gaussian_prob(example[num_feature],mean,std_dev))
string_lookup = str(num_feature) + "_" + "0"
mean,std_dev = value_cond_prob[string_lookup]
features_prob_product_negative = float(features_prob_product_negative) * float(calc_gaussian_prob(example[num_feature],mean,std_dev))
if (float(features_prob_product_positive * value_cond_prob['prior_positive']) >= float(features_prob_product_negative * value_cond_prob['prior_negative'])):
predicted_label = "1"
predictions.append(predicted_label)
return predictions
'''This function is used to evaluate the accuracy/quality of the classifier on the test data
and for printing the metrics like the true positives, negatives, etc.
'''
def print_metrics(testing_data,predictions):
global label
true_positives = 0
false_negatives = 0
false_positives = 0
true_negatives = 0
num_examples = len(testing_data)
for example_num in range(0,num_examples):
predicted_label = predictions[example_num]
if testing_data[example_num][label] == "1":
if predicted_label == "1":
true_positives += 1
elif predicted_label == "0":
false_negatives += 1
elif testing_data[example_num][label] == "0":
if predicted_label == "1":
false_positives += 1
elif predicted_label == "0":
true_negatives += 1
print true_positives,"\t",false_negatives,"\t",false_positives,"\t",true_negatives
def read_csv(fhandle):
data = []
reader = csv.DictReader(fhandle)
data = [row for row in reader]
return data
def csv_process(train_file,test_file):
global label
global num_attr_list
training_data = read_csv(train_file)
testing_data = read_csv(test_file)
all_features = training_data[0].keys()
all_features.remove(label)
max_index = len(all_features)
numeric_features = num_attr_list
for feature in numeric_features:
all_features.remove(feature)
values_in_features = find_distinct_values_feature(training_data,testing_data,all_features)
return training_data,testing_data,values_in_features,max_index,numeric_features
if __name__ == "__main__":
if(len(sys.argv)) != 4:
print usage("GaussianNaiveBayes.py")
sys.exit(1)
else:
train_file_name = sys.argv[1]
test_file_name = sys.argv[2]
pred_file_name = sys.argv[3]
train_file = open(train_file_name,"r")
test_file = open(test_file_name,"r")
training_data,testing_data,values_in_features,max_index,numeric_features = csv_process(train_file,test_file)
train_file.close()
test_file.close()
value_cond_prob = train_naive_bayes_get_classifier(training_data,values_in_features,numeric_features)
nominal_features = values_in_features.keys()
predictions = get_predictions_from_model(value_cond_prob,training_data,nominal_features,numeric_features)
print_metrics(training_data,predictions)
predictions = get_predictions_from_model(value_cond_prob,testing_data,nominal_features,numeric_features)
pred_file = open(pred_file_name,"w")
for pred in predictions:
pred_file.write(str(pred) + "\n")
pred_file.close()
|
py | b40470f74f0ebd55e10e738041f29f6ab678ea6b | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from decimal import Decimal
import re
from google.appengine.ext import ndb
from google.appengine.ext.ndb.model import IntegerProperty, _MAX_STRING_LENGTH
class BoundaryError(Exception):
pass
class BadEmailError(Exception):
pass
# this class is used only to distinguish from StringProperty
class Email(ndb.StringProperty):
def _validate(self, value):
if value and not re.match(r'[^@]+@[^@]+\.[^@]+', value):
raise BadEmailError('Bad formart email "%s"' % value)
class StringBounded(ndb.StringProperty):
def __init__(self, name=None, compressed=False, max_len=_MAX_STRING_LENGTH, exactly_len=None, min_len=None, **kwds):
super(StringBounded, self).__init__(name=name, compressed=compressed, **kwds)
self.max_len = max_len
self.exactly_len = exactly_len
self.min_len = min_len
def _validate(self, value):
if self.max_len is not None and len(value) > self.max_len:
raise BoundaryError('%s should have equal or less then %s characters' % (value, self.max_len))
if self.min_len is not None and len(value) < self.min_len:
raise BoundaryError('%s should have equal or more then %s characters' % (value, self.min_len))
if self.exactly_len is not None and len(value) != self.exactly_len:
raise BoundaryError('%s should have exactly %s characters' % (value, self.exactly_len))
class IntegerBounded(ndb.IntegerProperty):
'''
Property to define a bounded integer based on lower and upper values
default value of properties is None and in this case no validation is executed
'''
def __init__(self, lower=None, upper=None, **kwargs):
self.upper = upper
self.lower = lower
super(IntegerBounded, self).__init__(**kwargs)
def _validate(self, value):
if self.lower is not None and value < self.lower:
raise BoundaryError('%s is less then %s' % (value, self.lower))
if self.upper is not None and value > self.upper:
raise BoundaryError('%s is greater then %s' % (value, self.upper))
class FloatBounded(ndb.FloatProperty):
'''
Property to define a bounded Float based on lower and upper values
default value of properties is None and in this case no validation is executed
'''
def __init__(self, lower=None, upper=None, **kwargs):
self.upper = upper
self.lower = lower
super(FloatBounded, self).__init__(**kwargs)
def _validate(self, value):
if self.lower is not None and value < self.lower:
raise BoundaryError('%s is less then %s' % (value, self.lower))
if self.upper is not None and value > self.upper:
raise BoundaryError('%s is greater then %s' % (value, self.upper))
class SimpleDecimal(IntegerProperty):
'''
Class representing a Decimal. It must be use when decimal places will never change
decimal_places controls decimal places and its default is 2.
Ex: decimal_places=2 -> 1.00, decimal_places=3 -> 1.000
It's representation on db is Integer constructed from it values.
Ex: decimal_places=2 -> 100, decimal_places=3 -> 1000
This is useful so queries keep numeric meaning for comparisons like > or <=
'''
def __init__(self, decimal_places=2, lower=None, upper=None, **kwargs):
self.decimal_places = decimal_places
self.__multipler = (10 ** self.decimal_places)
self.lower = lower and self._from_base_type(self._to_base_type(lower))
self.upper = upper and self._from_base_type(self._to_base_type(upper))
super(SimpleDecimal, self).__init__(**kwargs)
def _validate(self, value):
value = self._from_base_type(self._to_base_type(value))
if self.lower is not None and value < self.lower:
raise BoundaryError('%s is less then %s' % (value, self.lower))
if self.upper is not None and value > self.upper:
raise BoundaryError('%s is greater then %s' % (value, self.upper))
return value
def _to_base_type(self, value):
return int(round(Decimal(value) * self.__multipler))
def _from_base_type(self, value):
return Decimal(value) / self.__multipler
class SimpleCurrency(SimpleDecimal):
def __init__(self, decimal_places=2, lower=0, **kwargs):
super(SimpleCurrency, self).__init__(decimal_places=decimal_places,
lower=lower,
**kwargs)
|
py | b404717a9d34e8a9a52fc5b5990de42453a4efb8 | # program to find sum in Nth group
# Check if given array is Monotonic
def isMonotonic(A):
return (all(A[i] <= A[i + 1] for i in range(len(A) - 1)) or
all(A[i] >= A[i + 1] for i in range(len(A) - 1)))
# Driver program
A = [6, 5, 4, 4]
# Print required result
print(isMonotonic(A))
# This code is contributed by DURVESH KUMAR PAL
|
py | b40471e89182753627ccb9346374a40eab3d88ca | from regex import Regex
class Rules:
def __init__(self, alpha, path):
self.alpha = alpha
self.rules = []
with open(path, 'r') as f:
for l in f.readlines():
l = l.strip()
if len(l) == 0:
continue
l = l.split('=>')
rx = Regex(l[0].strip(), self.alpha)
tag = l[1].strip()
self.rules.append([rx, tag])
|
py | b40473686e31c2122691962109892af241e23b53 | """
Module containing a preprocessor that removes cells if they match
one or more regular expression.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import re
from traitlets import List, Unicode
from .base import Preprocessor
class RegexRemovePreprocessor(Preprocessor):
"""
Removes cells from a notebook that match one or more regular expression.
For each cell, the preprocessor checks whether its contents match
the regular expressions in the `patterns` traitlet which is a list
of unicode strings. If the contents match any of the patterns, the cell
is removed from the notebook.
By default, `patterns = [r'\Z']` which matches the empty string such that
strictly empty cells are removed. To modify the list of matched patterns,
modify the patterns traitlet. For example, execute the following command
to convert a notebook to html and remove cells containing only whitespace:
> jupyter nbconvert --RegexRemovePreprocessor.enabled=True \
--RegexRemovePreprocessor.patterns="['\\s*\\Z']" mynotebook.ipynb
The first command line argument enables the preprocessor and the second
sets the list of patterns to '\\s*\\Z' which matches an arbitrary number
of whitespace characters followed by the end of the string.
See https://regex101.com/ for an interactive guide to regular expressions
(make sure to select the python flavor). See
https://docs.python.org/library/re.html for the official regular expression
documentation in python.
"""
patterns = List(Unicode, default_value=[r'\Z']).tag(config=True)
def check_conditions(self, cell):
"""
Checks that a cell matches the pattern and that (if a code cell)
it does not have any outputs.
Returns: Boolean.
True means cell should *not* be removed.
"""
# Compile all the patterns into one: each pattern is first wrapped
# by a non-capturing group to ensure the correct order of precedence
# and the patterns are joined with a logical or
pattern = re.compile('|'.join('(?:%s)' % pattern
for pattern in self.patterns))
# Filter out cells that meet the pattern and have no outputs
return cell.get('outputs') or not pattern.match(cell.source)
def preprocess(self, nb, resources):
"""
Preprocessing to apply to each notebook. See base.py for details.
"""
# Skip preprocessing if the list of patterns is empty
if not self.patterns:
return nb, resources
# Filter out cells that meet the conditions
nb.cells = [cell for cell in nb.cells if self.check_conditions(cell)]
return nb, resources
|
py | b4047450e02ae216be3c3ba01d42dae23190b202 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mining RPCs
- getmininginfo
- getblocktemplate proposal mode
- submitblock"""
import copy
from binascii import b2a_hex
from decimal import Decimal
from test_framework.blocktools import create_coinbase
from test_framework.mininode import CBlock
from test_framework.test_framework import ElonCoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
def b2x(b):
return b2a_hex(b).decode('ascii')
def assert_template(node, block, expect, rehash=True):
if rehash:
block.hashMerkleRoot = block.calc_merkle_root()
rsp = node.getblocktemplate({'data': b2x(block.serialize()), 'mode': 'proposal'})
assert_equal(rsp, expect)
class MiningTest(ElonCoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = False
def run_test(self):
node = self.nodes[0]
self.log.info('getmininginfo')
mining_info = node.getmininginfo()
assert_equal(mining_info['blocks'], 200)
assert_equal(mining_info['chain'], 'regtest')
assert_equal(mining_info['currentblocktx'], 0)
assert_equal(mining_info['difficulty'], Decimal('0.000244140625'))
assert_equal(mining_info['networkhashps'], Decimal('17476'))
assert_equal(mining_info['pooledtx'], 0)
# Mine a block to leave initial block download
node.generate(1)
tmpl = node.getblocktemplate()
self.log.info("getblocktemplate: Test capability advertised")
assert 'proposal' in tmpl['capabilities']
assert 'coinbasetxn' not in tmpl
coinbase_tx = create_coinbase(height=int(tmpl["height"]) + 1)
# sequence numbers must not be max for nLockTime to have effect
coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
coinbase_tx.rehash()
block = CBlock()
block.nVersion = tmpl["version"]
block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
block.nTime = tmpl["curtime"]
block.nBits = int(tmpl["bits"], 16)
block.nNonce = 0
block.vtx = [coinbase_tx]
self.log.info("getblocktemplate: Test valid block")
assert_template(node, block, None)
self.log.info("submitblock: Test block decode failure")
assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, b2x(block.serialize()[:-15]))
self.log.info("getblocktemplate: Test bad input hash for coinbase transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx[0].vin[0].prevout.hash += 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-cb-missing')
self.log.info("submitblock: Test invalid coinbase transaction")
assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, b2x(bad_block.serialize()))
self.log.info("getblocktemplate: Test truncated final transaction")
assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(block.serialize()[:-1]), 'mode': 'proposal'})
self.log.info("getblocktemplate: Test duplicate transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx.append(bad_block.vtx[0])
assert_template(node, bad_block, 'bad-txns-duplicate')
self.log.info("getblocktemplate: Test invalid transaction")
bad_block = copy.deepcopy(block)
bad_tx = copy.deepcopy(bad_block.vtx[0])
bad_tx.vin[0].prevout.hash = 255
bad_tx.rehash()
bad_block.vtx.append(bad_tx)
assert_template(node, bad_block, 'bad-txns-inputs-missingorspent')
self.log.info("getblocktemplate: Test nonfinal transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx[0].nLockTime = 2 ** 32 - 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-txns-nonfinal')
self.log.info("getblocktemplate: Test bad tx count")
# The tx count is immediately after the block header
TX_COUNT_OFFSET = 112
bad_block_sn = bytearray(block.serialize())
assert_equal(bad_block_sn[TX_COUNT_OFFSET], 1)
bad_block_sn[TX_COUNT_OFFSET] += 1
assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(bad_block_sn), 'mode': 'proposal'})
self.log.info("getblocktemplate: Test bad bits")
bad_block = copy.deepcopy(block)
bad_block.nBits = 469762303 # impossible in the real world
assert_template(node, bad_block, 'bad-diffbits')
self.log.info("getblocktemplate: Test bad merkle root")
bad_block = copy.deepcopy(block)
bad_block.hashMerkleRoot += 1
assert_template(node, bad_block, 'bad-txnmrklroot', False)
self.log.info("getblocktemplate: Test bad timestamps")
bad_block = copy.deepcopy(block)
bad_block.nTime = 2 ** 31 - 1
assert_template(node, bad_block, 'time-too-new')
bad_block.nTime = 0
assert_template(node, bad_block, 'time-too-old')
self.log.info("getblocktemplate: Test not best block")
bad_block = copy.deepcopy(block)
bad_block.hashPrevBlock = 123
assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
if __name__ == '__main__':
MiningTest().main()
|
py | b40474a16b3fdc6feab3f337ce6bab8dd1e96a0e | import random
from pyecharts import options as opts
from pyecharts.charts import HeatMap, Page
from pyecharts.faker import Collector, Faker
C = Collector()
@C.funcs
def heatmap_base() -> HeatMap:
value = [[i, j, random.randint(0, 50)] for i in range(24) for j in range(7)]
c = (
HeatMap()
.add_xaxis(Faker.clock)
.add_yaxis("series0", Faker.week, value)
.set_global_opts(
title_opts=opts.TitleOpts(title="HeatMap-基本示例"),
visualmap_opts=opts.VisualMapOpts(),
)
)
return c
@C.funcs
def heatmap_with_label_show() -> HeatMap:
value = [[i, j, random.randint(0, 50)] for i in range(24) for j in range(7)]
c = (
HeatMap()
.add_xaxis(Faker.clock)
.add_yaxis(
"series0",
Faker.week,
value,
label_opts=opts.LabelOpts(is_show=True, position="inside"),
)
.set_global_opts(
title_opts=opts.TitleOpts(title="HeatMap-Label 显示"),
visualmap_opts=opts.VisualMapOpts(),
)
)
return c
Page().add(*[fn() for fn, _ in C.charts]).render()
|
py | b404755081e239c89a35c3a120e2224ed6808058 | '''Jackson, a math student, is developing an application on prime numbers. for the given two integers on the display of the application, the user has to identify all the prime numbers within the given range (including the given values). afterwards the application will sum all those prime numbers. Jackson has to write an algorithm to find the sum of all the prime numbers of the given range. Write an algorithm to find the sum of all the prime numbers of the given range.
Input Format
two space sepearted integers RL and RR.
Constraints
no
Output Format
sum of the prime numbers between RL and RR.
Sample Input 0
2
10
Sample Output 0
17
Sample Input 1
45
89
Sample Output 1
682
Sample Input 2
3
12
Sample Output 2
26'''
#solution
n1 = int(input())
n2 = int(input())
count = 0
for n in range(n1,n2+1):
for j in range(2,n):
if n%j == 0:
break
else:
count+=n
print(count) |
py | b404756936be37b6a2f494b4e962d6f5688ae695 | from .tersify_mlir import *
from . import utils
|
py | b4047644746a92134f3bb8f4f324ca5e29877f5d | # Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import unittest
from copy import deepcopy
from openpaisdk.utils import OrganizedList as ol
from openpaisdk.utils import Nested
from openpaisdk.utils import randstr
from openpaisdk.io_utils import __flags__, from_file, to_screen
from openpaisdk import get_defaults, update_default, LayeredSettings
from basic_test import separated
class TestIOUtils(unittest.TestCase):
@separated
def test_reading_failures(self):
with self.assertRaises(Exception): # non existing file
from_file(randstr(8) + '.yaml')
with self.assertRaises(AssertionError): # unsupported file extension
from_file(randstr(10))
with self.assertRaises(Exception):
fname = randstr(10) + '.json'
os.system(f"touch {fname}")
from_file(fname)
@separated
def test_returning_default(self):
for dval in [[], ['a', 'b'], {}, {'a': 'b'}]:
ass_fn = self.assertListEqual if isinstance(dval, list) else self.assertDictEqual
with self.assertRaises(AssertionError): # unsupported file extension
from_file(randstr(10))
fname = randstr(8) + '.yaml'
ass_fn(from_file(fname, dval), dval) # non existing
os.system(f"echo '' > {fname}")
ass_fn(from_file(fname, dval), dval)
os.system(f"echo 'abcd' > {fname}")
ass_fn(from_file(fname, dval), dval)
class TestDefaults(unittest.TestCase):
global_default_file = __flags__.get_default_file(is_global=True)
local_default_file = __flags__.get_default_file(is_global=False)
def get_random_var_name(self):
import random
from openpaisdk import LayeredSettings
lst = [x for x in LayeredSettings.keys() if not LayeredSettings.act_append(x)]
ret = lst[random.randint(0, len(lst) - 1)]
to_screen(f"random select {ret} in {lst}")
return ret
@separated
def test_update_defaults(self):
# ! not test global defaults updating, test it in integration tests
test_key, test_value = self.get_random_var_name(), randstr(10)
# add a default key
update_default(test_key, test_value, is_global=False, to_delete=False)
self.assertEqual(get_defaults()[test_key], test_value,
msg=f"failed to check {test_key} in {LayeredSettings.as_dict()}")
# should appear in local
self.assertEqual(from_file(self.local_default_file)[test_key], test_value)
# delete
update_default(test_key, test_value, is_global=False, to_delete=True)
with self.assertRaises(KeyError):
os.system(f"cat {self.local_default_file}")
from_file(self.local_default_file, {})[test_key]
# add not allowed
test_key = randstr(10)
update_default(test_key, test_value, is_global=False, to_delete=False)
with self.assertRaises(KeyError):
from_file(self.local_default_file, {})[test_key]
@separated
def test_layered_settings(self):
from openpaisdk import LayeredSettings, __flags__
__flags__.custom_predefined = [
{
'name': 'test-key-1',
},
{
'name': 'test-key-2',
'action': 'append',
'default': []
}
]
LayeredSettings.reset()
# ? add / update append key
for test_key in ['test-key-1', 'test-key-2']:
for i, layer in enumerate(LayeredSettings.layers):
LayeredSettings.update(layer.name, test_key, i)
if layer.act_append(test_key):
self.assertTrue(isinstance(layer.values[test_key], list), msg=f"{layer.values}")
self.assertEqual(0, LayeredSettings.get('test-key-1'))
self.assertListEqual([0, 1, 2, 3], LayeredSettings.get('test-key-2'))
# ? delete
for test_key in ['test-key-1', 'test-key-2']:
for i, layer in enumerate(LayeredSettings.layers):
LayeredSettings.update(layer.name, test_key, None, delete=True)
# ? reset the predefined
__flags__.custom_predefined = []
LayeredSettings.reset()
@separated
def test_unknown_variable_defined(self):
from openpaisdk import LayeredSettings, __flags__
test_key, test_value = 'test-key-long-existing', randstr(10)
__flags__.custom_predefined = [
{
'name': test_key,
},
]
LayeredSettings.reset()
# ? add / update append key
LayeredSettings.update('local_default', test_key, test_value)
# ? reset the predefined
__flags__.custom_predefined = []
LayeredSettings.reset()
self.assertEqual(test_value, LayeredSettings.get(test_key))
# cannot delete or change the unknown variable
LayeredSettings.update('local_default', test_key, randstr(10))
LayeredSettings.reset()
self.assertEqual(test_value, LayeredSettings.get(test_key))
LayeredSettings.update('local_default', test_key, delete=True)
LayeredSettings.reset()
self.assertEqual(test_value, LayeredSettings.get(test_key))
class TestOrganizedList(unittest.TestCase):
class foo:
def __init__(self, a=None, b=None, c=None, d=None):
self.a, self.b, self.c, self.d = a, b, c, d
@property
def as_dict(self):
return {k: v for k, v in vars(self).items() if v is not None}
def update(self, other):
for key, value in other.as_dict.items():
setattr(self, key, value)
lst_objs = [foo("x", 0), foo("x", 1), foo("y", 2), foo("y", c=1), foo("z", 4)]
lst = [obj.as_dict for obj in lst_objs]
def ol_test_run(self, lst, getter):
def to_dict(obj):
return obj if isinstance(obj, dict) else obj.as_dict
dut = ol(lst[:3], "a", getter)
# find
self.assertEqual(2, dut.first_index("y"))
self.assertDictEqual(to_dict(lst[2]), to_dict(dut.first("y")))
# filter
self.assertListEqual([0, 1], dut.filter_index("x"))
self.assertListEqual(lst[:2], dut.filter("x").as_list)
# as_dict
self.assertDictEqual(dict(x=lst[1], y=lst[2]), dut.as_dict)
# add (update)
elem = lst[-2]
dut.add(elem)
self.assertEqual(2, getter(lst[2], "b"))
self.assertEqual(1, getter(lst[2], "c"))
# add (replace)
elem = lst[-2]
dut.add(elem, replace=True)
self.assertEqual(None, getter(dut[2], "b"))
# add (append)
elem = lst[-1]
dut.add(elem)
self.assertEqual(4, getter(dut[-1], "b"))
# delete
dut.remove("z")
self.assertEqual(3, len(dut))
dut.remove("z")
self.assertEqual(3, len(dut))
def test_dict(self):
self.ol_test_run(deepcopy(self.lst), dict.get)
def test_obj(self):
self.ol_test_run(deepcopy(self.lst_objs), getattr)
class TestNested(unittest.TestCase):
def test_set(self):
nested_obj = {
"a": [
{
"aa0": {
"aaa": "val_aaa"
},
},
{
"aa1": {
"aaa1": "val_aaa1"
}
}
],
"b": "haha"
}
n = Nested(nested_obj, sep="->")
self.assertEqual(n.get("a->0->aa0->aaa"), "val_aaa")
with self.assertRaises(KeyError):
nested_obj["a"][1]["aa2"]["aaa"]
n.set("a->1->aa2->aaa", "val_aaa2")
self.assertEqual(nested_obj["a"][1]["aa2"]["aaa"], "val_aaa2")
|
py | b404768996a8db245d11ff94863ff1f454ea4c5a | import xml.etree.ElementTree as et
import base64
import struct
from Spectrum import *
class MZXMLReader:
def __init__(self, fileName):
self.fileName = fileName
self.baseName = fileName[:fileName.index('.')].split('/')[-1]
def getSpectraList(self, mass, param):
fileName = self.fileName
baseName = self.baseName
basepeakInt = param['basepeakint']
dynamicRange = param['dynamicrange']
xmlObj = et.parse(fileName)
root = xmlObj.getroot()
children = root.getchildren()
children = children[0].getchildren()
spectra = []
for i in range(0, len(children)):
if children[i].tag[-4:] != 'scan':
continue
scanNum = children[i].attrib['num']
retentionTime = int(float(children[i].attrib['retentionTime'][2:-1]))
info = children[i].getchildren()
for j in range(0, len(info)):
if info[j].tag[-11:] == 'precursorMz':
ch = int(info[j].attrib['precursorCharge'])
precursorMZ = float(info[j].text)
elif info[j].tag[-5:] == 'peaks':
base64Peaklist = info[j].text
data = base64.b64decode(base64Peaklist)
if len(data) % 8 != 0:
print 'MZXMLReader: incorrect format of peak content'
numPeaks = len(data) / 8
mz = []
it = []
for k in range(0, numPeaks):
val = data[(k * 8 + 0) : (k * 8 + 4)]
val = val[::-1]
mz.append(struct.unpack('f', val)[0])
val = data[(k * 8 + 4) : (k * 8 + 8)]
val = val[::-1]
it.append(struct.unpack('f', val)[0])
maxInt = max(it)
peaks = zip(mz, it)
peaks = filter(lambda x:x[1] >= dynamicRange * maxInt, peaks)
peaks = zip(*peaks)
mz = list(peaks[0]);
it = list(peaks[1]);
it = map(lambda x : x * basepeakInt / (maxInt), it)
title = baseName + '.' + scanNum + '.' + str(ch)
spectra.append(Spectrum(title, scanNum, precursorMZ, ch, mz, it, retentionTime, mass))
return spectra
|
py | b404778675f1792e257ae2982e279a4c15c3112b | # Special Training Graduate
medal = 1142244
if sm.canHold(medal):
sm.chatScript("You obtained the <Special Training Graduate> medal.")
sm.giveItem(medal)
sm.startQuest(parentID)
sm.completeQuest(parentID) |
py | b40477a10b940b21dd6a2a00b8c1cde5a4289555 | #!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test digidinar-cli"""
from test_framework.test_framework import DigidinarTestFramework
from test_framework.util import assert_equal, assert_raises_process_error, get_auth_cookie
import time
class TestBitcoinCli(DigidinarTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
"""Main test logic"""
self.log.info("Sleeping 30 seconds...")
time.sleep(30)
self.log.info("Compare responses from gewalletinfo RPC and `digidinar-cli getwalletinfo`")
cli_response = self.nodes[0].cli.getwalletinfo()
rpc_response = self.nodes[0].getwalletinfo()
assert_equal(cli_response, rpc_response)
self.log.info("Compare responses from getblockchaininfo RPC and `digidinar-cli getblockchaininfo`")
cli_response = self.nodes[0].cli.getblockchaininfo()
rpc_response = self.nodes[0].getblockchaininfo()
assert_equal(cli_response, rpc_response)
user, password = get_auth_cookie(self.nodes[0].datadir)
self.log.info("Compare responses from `digidinar-cli -getinfo` and the RPCs data is retrieved from.")
cli_get_info = self.nodes[0].cli('getinfo').send_cli()
wallet_info = self.nodes[0].getwalletinfo()
network_info = self.nodes[0].getnetworkinfo()
blockchain_info = self.nodes[0].getblockchaininfo()
assert_equal(cli_get_info['version'], network_info['version'])
assert_equal(cli_get_info['protocolversion'], network_info['protocolversion'])
assert_equal(cli_get_info['walletversion'], wallet_info['walletversion'])
assert_equal(cli_get_info['balance'], wallet_info['balance'])
assert_equal(cli_get_info['blocks'], blockchain_info['blocks'])
assert_equal(cli_get_info['timeoffset'], network_info['timeoffset'])
assert_equal(cli_get_info['connections'], network_info['connections'])
assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy'])
assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty'])
assert_equal(cli_get_info['testnet'], blockchain_info['chain'] == "test")
assert_equal(cli_get_info['balance'], wallet_info['balance'])
assert_equal(cli_get_info['keypoololdest'], wallet_info['keypoololdest'])
assert_equal(cli_get_info['keypoolsize'], wallet_info['keypoolsize'])
assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee'])
assert_equal(cli_get_info['relayfee'], network_info['relayfee'])
# unlocked_until is not tested because the wallet is not encrypted
if __name__ == '__main__':
TestBitcoinCli().main()
|
py | b40477b0b0f245f1418fa34ab7f0b2ba437cd865 | import pathlib
import requests
import pprint
"""
This is a simple Weather script that will retrive weather details of a specific place
using the OpenWeathermap API.
link -> https://openweathermap.org/current
"""
def get_api_token():
"""Read the API token from the file contaiing the token"""
token_file = pathlib.Path(__file__).parent / "api.token"
with open(token_file) as f:
token = f.read().strip()
return token
def get_current_weather(TOKEN, query, unit):
"""
Retriving from the api with certain querys, as well as returning easier data to work with
if the response was a success
"""
URL = f"https://api.openweathermap.org/data/2.5/weather"
parameters = {"q" : query, "units" : unit, "appid": TOKEN}
request = requests.get(URL, params = parameters)
if request.status_code != 200:
return None
response = request.json()
main = response["main"]
description = response["weather"][0]["description"]
wind = response["wind"]
return {
"name" : query.title(),
"Main" : main,
"details": description,
"wind" : wind
}
def display_current_weather(TOKEN, query, degrees, unit):
"""
Retrieving the data out of the simplified dictionary,
and formatting it in the formatted_string
"""
# Getting the data from the api
data = get_current_weather(TOKEN, query, unit[0])
# Retrieving the values from the parsed API data
wind = data["wind"]
main = data["Main"]
name = data["name"]
details = data["details"]
temp = main["temp"]
feels_like = main["feels_like"]
high, low = main["temp_max"], main["temp_min"]
humidity = main["humidity"]
wind_speed = wind["speed"]
wind_direction = wind["deg"]
# Entering the values in a nice formatted string
formatted_text = f"""
Place: {name}
{details.title()}
Currently {temp}{degrees}, feels like {feels_like}{degrees} with a high of {high}{degrees} and a low of {low}{degrees}.
Humitidy is at {humidity}
Wind is at a speed of {wind_speed} {unit[1]}
Direction: {wind_direction}⁰
"""
print(formatted_text)
def main():
TOKEN = get_api_token()
# Setting a imperial and metric measurment dict for the convenience of the user.
units = {"f": ("imperial", "Mph"), "c":("metric", "Km/h")}
choice = "y"
while choice == "y":
unit_query = input("\nEnter a unit of tempreature [C]elcius, [F]arenheit: ").lower()
# Retriving the unit of measurment with .get() so no error is thrown
unit = units.get(unit_query, None)
if not unit:
print("Im sorry but that unit of measurment isnt available")
continue
city_query = input("\nEnter a city: ")
print("----------------"+ "-" * len(city_query))
display_current_weather(TOKEN, city_query, ("⁰" + unit_query), unit)
choice = input("Do you want to continue getting other weather reports? [Y]es or any other character to quit: ").lower()
print("Done!")
if __name__ == "__main__":
main() |
py | b40478923adcf6e1ba9f9275e9ac546af9e83380 | import json
import os
import time
from enum import Enum
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.common.exceptions import (
ElementNotInteractableException, NoSuchElementException, TimeoutException, WebDriverException
)
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from dynamicforms.dynamicforms_migration_mixin import add_filter, add_page_load, add_relation
from dynamicforms.settings import DYNAMICFORMS
MAX_WAIT = 10
MAX_WAIT_ALERT = 5
class Browsers(Enum):
FIREFOX = 'FIREFOX'
CHROME = 'CHROME'
OPERA = 'OPERA'
EDGE = 'EDGE'
SAFARI = 'SAFARI'
IE = 'INTERNETEXPLORER'
# noinspection PyMethodMayBeStatic
class WaitingStaticLiveServerTestCase(StaticLiveServerTestCase):
# host = '0.0.0.0'
binary_location = ''
github_actions = False
def get_browser(self, opts=None):
if self.selected_browser == Browsers.FIREFOX:
return webdriver.Firefox(options=opts)
elif self.selected_browser == Browsers.CHROME:
return webdriver.Chrome(options=opts)
elif self.selected_browser == Browsers.OPERA:
return webdriver.Opera(options=opts)
elif self.selected_browser == Browsers.EDGE:
return webdriver.Edge()
elif self.selected_browser == Browsers.SAFARI:
return webdriver.Safari()
elif self.selected_browser == Browsers.IE:
return webdriver.Ie(options=opts)
self.selected_browser = Browsers.FIREFOX
return webdriver.Firefox(options=opts)
def get_browser_options(self, opts):
if not opts:
return None
if self.selected_browser == Browsers.FIREFOX:
from selenium.webdriver.firefox.options import Options
elif self.selected_browser == Browsers.CHROME:
from selenium.webdriver.chrome.options import Options
elif self.selected_browser == Browsers.OPERA:
from selenium.webdriver.opera.options import Options
elif self.selected_browser == Browsers.EDGE:
from selenium.webdriver.edge.options import Options
elif self.selected_browser == Browsers.IE:
from selenium.webdriver.ie.options import Options
else: # Safari doesn't have Options
return None
options = Options()
opts = json.loads(opts)
for key, val in opts.items():
setattr(options, key, val)
return options
def setUp(self):
# When running tests through github actions sometimes tables are empty, even though they are filled up in
# migrations initialisation
from examples.models import Filter, PageLoad, Relation
if Filter.objects.count() == 0:
add_filter(None, None)
if PageLoad.objects.count() == 0:
add_page_load(None, None)
if Relation.objects.count() == 0:
add_relation(None, None)
self.github_actions = os.environ.get('GITHUB_ACTIONS', False)
# first parameter: remote server
# second parameter: "my" server
# remote_selenium = 'MAC-SERVER:4444,myserver,SAFARI'
# remote_selenium = 'WIN-SERVER:4444,myserver,FIREFOX|{"binary_location": "C:\\\\Program Files\\\\Mozilla
# Firefox\\\\firefox.exe"{comma} "headless": true}'
remote_selenium = os.environ.get('REMOTE_SELENIUM', ',')
# first parameter: selected browser
# second parameter (optional): browser options in JSON format.
browser_selenium = os.environ.get('BROWSER_SELENIUM', ';')
# browser_selenium = 'CHROME;{"no-sandbox": true, "window-size": "1420,1080", "headless": true, ' \
# '"disable-gpu": true}'
# browser_selenium = 'FIREFOX;{"headless": true, ' \
# '"binary_location": "C:\\\\Program Files\\\\Mozilla Firefox\\\\firefox.exe"}'
browser_options = browser_selenium.split(';', 1)
browser = browser_options[0]
if browser:
self.selected_browser = Browsers(browser)
else:
self.selected_browser = Browsers.FIREFOX
# Spodaj je poizkus, da bi naložil driverje za EDGE... Inštalacija je bila sicer uspešna, ampak še vedno dobim
# selenium.common.exceptions.WebDriverException: Message: Unknown error
#
# Bom počakal, da najprej zaključijo issue https://github.com/actions/virtual-environments/issues/99
#
# if self.github_actions and self.selected_browser == Browsers.EDGE:
# import sys
# driver_file = sys.exec_prefix + "\\Scripts\\msedgedriver.exe"
# if not os.path.isfile(driver_file):
# win_temp = os.environ.get('TEMP', '') + '\\'
# import urllib.request
# urllib.request.urlretrieve("https://msedgedriver.azureedge.net/81.0.394.0/edgedriver_win64.zip",
# win_temp + "edgedriver_win64.zip")
# import zipfile
# with zipfile.ZipFile(win_temp + "edgedriver_win64.zip", 'r') as zip_ref:
# zip_ref.extractall(win_temp)
# from shutil import copyfile
# copyfile(win_temp + "msedgedriver.exe", sys.exec_prefix + "\\Scripts\\msedgedriver.exe")
#
# urllib.request.urlretrieve("https://download.microsoft.com/download/F/8/A/"
# "F8AF50AB-3C3A-4BC4-8773-DC27B32988DD/MicrosoftWebDriver.exe",
# win_temp + "MicrosoftWebDriver.exe")
# copyfile(win_temp + "MicrosoftWebDriver.exe", sys.exec_prefix + "\\Scripts\\MicrosoftWebDriver.exe")
opts = None
try:
opts = self.get_browser_options(browser_options[1])
except:
pass
remote, this_server = remote_selenium.split(',')
if remote:
self.browser = webdriver.Remote(
command_executor='http://{remote}/wd/hub'.format(remote=remote),
desired_capabilities=dict(javascriptEnabled=True, **getattr(webdriver.DesiredCapabilities, browser)),
options=opts
)
olsu = self.live_server_url
self.live_server_url = 'http://{this_server}:{port}'.format(this_server=this_server,
port=self.live_server_url.split(':')[2])
print('Listen: ', olsu, ' --> Remotely accessible on: ', self.live_server_url)
else:
self.browser = self.get_browser(opts)
def tearDown(self):
self.browser.refresh()
self.browser.quit()
def wait_for_new_element(self, element_id):
start_time = time.time()
while True:
try:
time.sleep(0.01)
element = self.browser.find_element_by_id(element_id)
self.assertIsNotNone(element)
return
except (AssertionError, WebDriverException) as e:
if time.time() - start_time > MAX_WAIT:
raise e
def wait_for_modal_dialog(self, old_id=None):
start_time = time.time()
while True:
try:
time.sleep(0.01)
element = None
for el in self.browser.find_elements_by_class_name('modal'):
if el.is_displayed():
element = el
break
self.assertIsNotNone(element)
element_id = element.get_attribute('id')
if old_id and element_id == "dialog-{old_id}".format(**locals()):
# The new dialog must not have same id as the old one
# if it does, this means that we're still looking at the old dialog - let's wait for it to go away
continue
self.assertTrue(element_id.startswith('dialog-'))
element_id = element_id.split('-', 1)[1]
# this is a dialog - let's wait for its animations to stop
try:
WebDriverWait(driver=self.browser, timeout=10, poll_frequency=0.2).until(EC.element_to_be_clickable(
(By.CLASS_NAME, 'ui-button' if DYNAMICFORMS.jquery_ui else 'btn'))
)
except TimeoutException as e:
# dialog not ready yet or we found a bad dialog with no buttons
if time.time() - start_time > MAX_WAIT:
raise e
continue
return element, element_id
except (AssertionError, WebDriverException) as e:
if time.time() - start_time > MAX_WAIT:
raise e
def wait_for_modal_dialog_disapear(self, dialog_id):
start_time = time.time()
while True:
try:
time.sleep(0.01)
if self.browser.find_element_by_id('dialog-{dialog_id}'.format(**locals())) is None:
break
self.assertFalse(time.time() - start_time > MAX_WAIT)
except WebDriverException:
break
def get_alert(self, wait_time=None):
if not wait_time:
wait_time = MAX_WAIT_ALERT
WebDriverWait(self.browser, wait_time).until(EC.alert_is_present(), 'No alert dialog.')
alert = self.browser.switch_to.alert
return alert
# noinspection PyMethodMayBeStatic
def check_error_text(self, dialog):
error_text = None
try:
error = dialog.find_element_by_class_name('text-danger')
if error is not None:
error_text = error.get_attribute('innerHTML')
except WebDriverException:
pass
return error_text
def initial_check(self, field, fld_text, fld_name, fld_type):
self.assertEqual(self.get_element_text(field), fld_text)
self.assertEqual(field.get_attribute('name'), fld_name)
field_type = field.get_attribute('type')
if isinstance(fld_type, tuple):
self.assertIn(field_type, fld_type)
else:
self.assertEqual(field_type, fld_type)
return field_type
def get_table_body(self, whole_table=False, expected_rows: int = None):
start_time = time.time()
body = None
while True:
for cls in ['card-body', 'panel-body', 'ui-accordion-content']:
try:
body = self.browser.find_element_by_class_name(cls)
if body:
break
except NoSuchElementException:
self.assertFalse(time.time() - start_time > MAX_WAIT, 'Wait time exceeded for table to appear')
time.sleep(0.01)
if body:
break
table = body.find_element_by_tag_name('table')
if whole_table:
return table
while True:
tbody = table.find_element_by_tag_name('tbody')
rows = tbody.find_elements_by_tag_name('tr')
if expected_rows is not None and len(rows) != expected_rows:
self.assertFalse(time.time() - start_time > MAX_WAIT, 'Wait time exceeded for table rows to appear')
time.sleep(0.01)
continue
else:
break
return rows
def select_option_for_select2(self, driver, element_id, text=None):
element = driver.find_element_by_xpath("//*[@id='{element_id}']/following-sibling::*[1]".format(**locals()))
element.click()
if text:
element = element.parent.switch_to.active_element
element.send_keys(text)
try:
element.send_keys(Keys.ENTER)
except ElementNotInteractableException:
actions = ActionChains(driver)
a = actions.move_to_element_with_offset(element, 50, 30)
a.send_keys(Keys.ENTER)
a.perform()
def check_row(self, row, cell_cnt, cell_values):
cells = row.find_elements_by_tag_name('td')
self.assertEqual(len(cells), cell_cnt)
for i in range(len(cell_values)):
if cell_values[i] is not None:
self.assertEqual(self.get_element_text(cells[i]), cell_values[i])
return cells
def get_current_url(self):
time.sleep(0.05)
return self.browser.current_url
def update_edge_field(self, field_id, value):
self.browser.execute_script('''
$('#%s').val('%s');
dynamicforms.fieldChange('%s', 'final');
''' % (field_id, value, field_id))
@staticmethod
def get_field_id_by_name(dialog, name):
return dialog.find_element_by_name(name).get_attribute('id')
@staticmethod
def get_tag_name(el):
return el.tag_name.lower()
@staticmethod
def get_element_text(el):
return el.text.strip()
|
py | b40478b87ada8e5975f60be608a48313d7cf4a6a | import os
from ddb.__main__ import main
import zipfile
class TestDjp:
def test_djp(self, project_loader):
project_loader("djp")
with zipfile.ZipFile('template.zip', 'r') as zip_ref:
zip_ref.extractall('expected')
main(["download"])
assert os.path.exists('.docker/djp/Dockerfile.jinja')
assert os.path.exists('.docker/djp/djp.libjsonnet')
with open('.docker/djp/djp.libjsonnet') as djp_libjsonnet:
assert not '// Edited\n' in djp_libjsonnet.readlines()
def test_djp_patch(self, project_loader):
project_loader("djp_patch")
main(["download"])
assert os.path.exists('.docker/djp/Dockerfile.jinja')
assert os.path.exists('.docker/djp/djp.libjsonnet')
with open('.docker/djp/Dockerfile.jinja') as djp_libjsonnet:
assert 'replaced' not in djp_libjsonnet.read()
with open('.docker/djp/djp.libjsonnet') as djp_libjsonnet:
assert '// Edited\n' in djp_libjsonnet.readlines()
|
py | b404794ffa44f2db4f0fa8b35c8cfe44dfffb87d | from __future__ import absolute_import
from willow.image import (
Image,
JPEGImageFile,
PNGImageFile,
GIFImageFile,
BMPImageFile,
TIFFImageFile,
RGBImageBuffer,
RGBAImageBuffer,
)
def _PIL_Image():
import PIL.Image
return PIL.Image
class PillowImage(Image):
def __init__(self, image):
self.image = image
@classmethod
def check(cls):
_PIL_Image()
@Image.operation
def get_size(self):
return self.image.size
@Image.operation
def has_alpha(self):
img = self.image
return img.mode in ('RGBA', 'LA') or (img.mode == 'P' and 'transparency' in img.info)
@Image.operation
def has_animation(self):
# Animation is not supported by PIL
return False
@Image.operation
def resize(self, size):
# Convert 1 and P images to RGB to improve resize quality
# (palleted images don't get antialiased or filtered when minified)
if self.image.mode in ['1', 'P']:
if self.has_alpha():
image = self.image.convert('RGBA')
else:
image = self.image.convert('RGB')
else:
image = self.image
return PillowImage(image.resize(size, _PIL_Image().ANTIALIAS))
@Image.operation
def crop(self, rect):
return PillowImage(self.image.crop(rect))
@Image.operation
def set_background_color_rgb(self, color):
if not self.has_alpha():
# Don't change image that doesn't have an alpha channel
return self
# Check type of color
if not isinstance(color, (tuple, list)) or not len(color) == 3:
raise TypeError("the 'color' argument must be a 3-element tuple or list")
# Convert non-RGB colour formats to RGB
# As we only allow the background color to be passed in as RGB, we
# convert the format of the original image to match.
image = self.image.convert('RGBA')
# Generate a new image with background colour and draw existing image on top of it
# The new image must temporarily be RGBA in order for alpha_composite to work
new_image = _PIL_Image().new('RGBA', self.image.size, (color[0], color[1], color[2], 255))
if hasattr(new_image, 'alpha_composite'):
new_image.alpha_composite(image)
else:
# Pillow < 4.2.0 fallback
# This method may be slower as the operation generates a new image
new_image = _PIL_Image().alpha_composite(new_image, image)
return PillowImage(new_image.convert('RGB'))
@Image.operation
def save_as_jpeg(self, f, quality=85, optimize=False, progressive=False):
if self.image.mode in ['1', 'P']:
image = self.image.convert('RGB')
else:
image = self.image
# Pillow only checks presence of optimize kwarg, not its value
kwargs = {}
if optimize:
kwargs['optimize'] = True
if progressive:
kwargs['progressive'] = True
image.save(f, 'JPEG', quality=quality, **kwargs)
return JPEGImageFile(f)
@Image.operation
def save_as_png(self, f, optimize=False):
# Pillow only checks presence of optimize kwarg, not its value
kwargs = {}
if optimize:
kwargs['optimize'] = True
self.image.save(f, 'PNG', **kwargs)
return PNGImageFile(f)
@Image.operation
def save_as_gif(self, f):
image = self.image
# All gif files use either the L or P mode but we sometimes convert them
# to RGB/RGBA to improve the quality of resizing. We must make sure that
# they are converted back before saving.
if image.mode not in ['L', 'P']:
image = image.convert('P', palette=_PIL_Image().ADAPTIVE)
if 'transparency' in image.info:
image.save(f, 'GIF', transparency=image.info['transparency'])
else:
image.save(f, 'GIF')
return GIFImageFile(f)
@Image.operation
def auto_orient(self):
# JPEG files can be orientated using an EXIF tag.
# Make sure this orientation is applied to the data
image = self.image
if hasattr(image, '_getexif'):
try:
exif = image._getexif()
except Exception:
# Blanket cover all the ways _getexif can fail in.
exif = None
if exif is not None:
# 0x0112 = Orientation
orientation = exif.get(0x0112, 1)
if 1 <= orientation <= 8:
Image = _PIL_Image()
ORIENTATION_TO_TRANSPOSE = {
1: (),
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.ROTATE_180, Image.FLIP_LEFT_RIGHT),
5: (Image.ROTATE_270, Image.FLIP_LEFT_RIGHT),
6: (Image.ROTATE_270,),
7: (Image.ROTATE_90, Image.FLIP_LEFT_RIGHT),
8: (Image.ROTATE_90,),
}
for transpose in ORIENTATION_TO_TRANSPOSE[orientation]:
image = image.transpose(transpose)
return PillowImage(image)
@Image.operation
def get_pillow_image(self):
return self.image
@classmethod
@Image.converter_from(JPEGImageFile)
@Image.converter_from(PNGImageFile)
@Image.converter_from(GIFImageFile, cost=200)
@Image.converter_from(BMPImageFile)
@Image.converter_from(TIFFImageFile)
def open(cls, image_file):
image_file.f.seek(0)
image = _PIL_Image().open(image_file.f)
image.load()
return cls(image)
@Image.converter_to(RGBImageBuffer)
def to_buffer_rgb(self):
image = self.image
if image.mode != 'RGB':
image = image.convert('RGB')
return RGBImageBuffer(image.size, image.tobytes())
@Image.converter_to(RGBAImageBuffer)
def to_buffer_rgba(self):
image = self.image
if image.mode != 'RGBA':
image = image.convert('RGBA')
return RGBAImageBuffer(image.size, image.tobytes())
willow_image_classes = [PillowImage]
|
py | b4047a6effdba457f664969c507871e50f3553d8 | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# Modified by Gustav Holm ([email protected]) & Francis J. Gomez ([email protected])
# Modified date: 05/06/2017
from CIM100.Element import Element
class IEC61970CIMVersion(Element):
"""This is the IEC 61970 CIM version number assigned to this UML model file.This is the IEC 61970 CIM version number assigned to this UML model file.
"""
def __init__(self, date='', version='', *args, **kw_args):
"""Initialises a new 'IEC61970CIMVersion' instance.
@param date: Form is YYYY-MM-DD for example for January 5, 2009 it is 2009-01-05.
@param version: Form is IEC61970CIMXXvYY where XX is the major CIM package version and the YY is the minor version. For ecample IEC61970CIM13v18.
"""
#: Form is YYYY-MM-DD for example for January 5, 2009 it is 2009-01-05.
self.date = date
#: Form is IEC61970CIMXXvYY where XX is the major CIM package version and the YY is the minor version. For ecample IEC61970CIM13v18.
self.version = version
super(IEC61970CIMVersion, self).__init__(*args, **kw_args)
_attrs = ["date", "version"]
_attr_types = {"date": str, "version": str}
_defaults = {"date": '', "version": ''}
_enums = {}
_refs = []
_many_refs = []
|
py | b4047ab58d48f6bcb9e18d72964cf358f23c95ac | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.orchestration.experimental.core.task."""
import tensorflow as tf
from tfx.orchestration.experimental.core import task as task_lib
from tfx.orchestration.experimental.core import test_utils
from tfx.orchestration.portable import test_utils as tu
from tfx.proto.orchestration import pipeline_pb2
class TaskTest(tu.TfxTest):
def test_node_uid_from_pipeline_node(self):
pipeline = pipeline_pb2.Pipeline()
pipeline.pipeline_info.id = 'pipeline'
pipeline.runtime_spec.pipeline_run_id.field_value.string_value = 'run0'
node = pipeline_pb2.PipelineNode()
node.node_info.id = 'Trainer'
self.assertEqual(
task_lib.NodeUid(
pipeline_uid=task_lib.PipelineUid(
pipeline_id='pipeline', pipeline_run_id='run0'),
node_id='Trainer'),
task_lib.NodeUid.from_pipeline_node(pipeline, node))
def test_task_type_ids(self):
self.assertEqual('ExecNodeTask', task_lib.ExecNodeTask.task_type_id())
self.assertEqual('CancelNodeTask', task_lib.CancelNodeTask.task_type_id())
def test_task_ids(self):
pipeline_uid = task_lib.PipelineUid(
pipeline_id='pipeline', pipeline_run_id='run0')
node_uid = task_lib.NodeUid(pipeline_uid=pipeline_uid, node_id='Trainer')
exec_node_task = test_utils.create_exec_node_task(node_uid)
self.assertEqual(('ExecNodeTask', node_uid), exec_node_task.task_id)
cancel_node_task = task_lib.CancelNodeTask(node_uid=node_uid)
self.assertEqual(('CancelNodeTask', node_uid), cancel_node_task.task_id)
if __name__ == '__main__':
tf.test.main()
|
py | b4047b3d219021a0a06da07bd4d858eea6b85f1e | import csv
import random
# open csv file
with open('iris.csv', mode='r') as infile:
reader = csv.reader(infile)
csvlist = list(reader)
# create sample: [[answer, [feature 1, ...]]]
samples = list()
maxFeature = [0, 0, 0, 0];
for rows in csvlist:
features = [rows[0], rows[1] , rows[2], rows[3]]
maxFeature[0] = max([maxFeature[0], rows[0]]);
maxFeature[1] = max([maxFeature[1], rows[1]]);
maxFeature[2] = max([maxFeature[2], rows[2]]);
maxFeature[3] = max([maxFeature[3], rows[3]]);
if rows[4] == "setosa":
samples.append([0.333333, features])
elif rows[4] == "versicolor":
samples.append([0.666666, features])
else:
samples.append([1.000000, features])
# normalization
i = 0
for rows in samples:
j = 0
for feature in samples[i][1]:
samples[i][1][j] = float(feature) / float(maxFeature[j])
j += 1
i += 1
# random candidate
predictList = list()
i = 0
while i < 10:
r = random.randint(0, len(samples))
predictList.append((samples[r][0], samples[r][1]))
del samples[r]
i += 1
print 'Sample Length = ' + str(len(samples))
print 'Predict Length = ' + str(len(predictList))
from pyspark.mllib.regression import LabeledPoint
from pyspark.mllib.classification import NaiveBayes
from pyspark import SparkContext
sc = SparkContext("local", "Simple App")
# create labels
labels = list()
i = 0
for sample in samples:
labels.append(LabeledPoint(sample[0], sample[1]))
i += 1
# training model
data = sc.parallelize(labels)
model = NaiveBayes.train(data, 1.0)
# go go go
correct=0
i = 1
for predict in predictList:
answer = model.predict(predict[1])
print str(i) + ' -> ' + str(predict[0]) + ' = ' + str(answer)
if answer == predict[0]:
correct += 1
i += 1
print 'Accuracy = ' + str(float(correct) / float(len(predictList)) * 100) + '%'
|
py | b4047ba06ce4197ace9a77b53f3ef640a3fdb4cf | #! /usr/bin/env python
import tensorflow as tf
def train():
pass
if __name__ == "__main__":
pass
|
py | b4047c708d45756f4626720d04fcfe2f40804931 | from tkinter import Tk, Canvas
import time
import math
import config_values
TEAM_SIZE = config_values.TEAM_SIZE
lines_color = "#fff"
field_color = "#0b0"
goal_color = "#dd0"
red_color = "#f00"
blue_color = "#0cf"
ball_color = "#000"
lines_width = 2
x_margin = 30
y_margin = 30
# measurements in yards, scaled to pixels by factor 10, might adjust later for variable factor
# IMPORTANT: width is x-axis on screen, height is y-axis
field_width = 1200
field_height = 750
center_radius = 100
corner_radius = 10
penalty_area_width = 180
penalty_area_height = 440
penalty_arc_radius = 120
penalty_arc_center = 120
goal_area_width = 60
goal_area_height = 120
goal_height = 80
goal_width = min(x_margin-10, 20)
player_radius = 10
ball_radius = 6
# player positions
red_pos = [(i, i) for i in range(10, 120, 10)]
blue_pos = [(i, i) for i in range(130, 240, 10)]
ball_pos = (x_margin+field_width//2, y_margin+field_height//2)
# player canvas objects
red_players = [None]*TEAM_SIZE
blue_players = [None]*TEAM_SIZE
ball = None
def windowDims():
w = field_width + x_margin*2
h = field_height + y_margin*2
return "{}x{}".format(w, h)
def drawField(canvas):
# draw the background and boundary
canvas.create_rectangle(x_margin, y_margin, x_margin+field_width, y_margin+field_height, outline=lines_color, fill=field_color, width=lines_width)
# draw the half line
canvas.create_line(x_margin+field_width//2, y_margin, x_margin+field_width//2, y_margin+field_height, fill=lines_color, width=lines_width)
# draw the centre circle
canvas.create_oval(x_margin+field_width//2-center_radius, y_margin+field_height//2-center_radius, x_margin+field_width//2+center_radius, y_margin+field_height//2+center_radius, outline = lines_color, width = lines_width)
# draw the corner arcs, top left, top right, bottom left, bottom right
canvas.create_arc(x_margin-corner_radius, y_margin-corner_radius, x_margin+corner_radius, y_margin+corner_radius, start=270, extent=90, outline=lines_color, width=lines_width, style='arc')
canvas.create_arc(x_margin+field_width-corner_radius, y_margin-corner_radius, x_margin+field_width+corner_radius, y_margin+corner_radius, start=180, extent=90, outline=lines_color, width=lines_width, style='arc')
canvas.create_arc(x_margin-corner_radius, y_margin+field_height-corner_radius, x_margin+corner_radius, y_margin+field_height+corner_radius, start=0, extent=90, outline=lines_color, width=lines_width, style='arc')
canvas.create_arc(x_margin+field_width-corner_radius, y_margin+field_height-corner_radius, x_margin+field_width+corner_radius, y_margin+field_height+corner_radius, start=90, extent=90, outline=lines_color, width=lines_width, style='arc')
# draw the penalty arcs, left side, right side
canvas.create_arc(x_margin+penalty_arc_center-penalty_arc_radius, y_margin+field_height//2-penalty_arc_radius, x_margin+penalty_arc_center+penalty_arc_radius, y_margin+field_height//2+penalty_arc_radius, start=270, extent=180, outline=lines_color, width=lines_width, style='arc')
canvas.create_arc(x_margin+field_width-penalty_arc_center-penalty_arc_radius, y_margin+field_height//2-penalty_arc_radius, x_margin+field_width-penalty_arc_center+penalty_arc_radius, y_margin+field_height//2+penalty_arc_radius, start=90, extent=180, outline=lines_color, width=lines_width, style='arc')
# draw the penalty areas, left side, right side
canvas.create_rectangle(x_margin, y_margin+field_height//2-penalty_area_height//2, x_margin+penalty_area_width, y_margin+field_height//2+penalty_area_height//2, fill=field_color, outline=lines_color, width=lines_width)
canvas.create_rectangle(x_margin+field_width-penalty_area_width, y_margin+field_height//2-penalty_area_height//2, x_margin+field_width, y_margin+field_height//2+penalty_area_height//2, fill=field_color, outline=lines_color, width=lines_width)
#draw the goal areas, left side, right side
canvas.create_rectangle(x_margin, y_margin+field_height//2-goal_area_height//2, x_margin+goal_area_width, y_margin+field_height//2+goal_area_height//2, outline=lines_color, width=lines_width)
canvas.create_rectangle(x_margin+field_width-goal_area_width, y_margin+field_height//2-goal_area_height//2, x_margin+field_width, y_margin+field_height//2+goal_area_height//2, outline=lines_color, width=lines_width)
#draw the goals, left side, right side
canvas.create_rectangle(x_margin-goal_width, y_margin+field_height//2-goal_height//2, x_margin, y_margin+field_height//2+goal_height//2, fill=goal_color, outline=lines_color, width=lines_width)
canvas.create_rectangle(x_margin+field_width, y_margin+field_height//2-goal_height//2, x_margin+field_width+goal_width, y_margin+field_height//2+goal_height//2, fill=goal_color, outline=lines_color, width=lines_width)
canvas.pack(fill="both", expand=True)
def initialize_players(canvas):
global red_players, blue_players, ball
for i in range(TEAM_SIZE):
px, py = red_pos[i]
red_players[i] = canvas.create_oval(x_margin+px-player_radius, y_margin+py-player_radius, x_margin+px+player_radius, y_margin+py+player_radius, fill=red_color)
for i in range(TEAM_SIZE):
px, py = blue_pos[i]
blue_players[i] = canvas.create_oval(x_margin+px-player_radius, y_margin+py-player_radius, x_margin+px+player_radius, y_margin+py+player_radius, fill=blue_color)
ball = canvas.create_oval(ball_pos[0]-ball_radius, ball_pos[1]-ball_radius, ball_pos[0]+ball_radius, ball_pos[1]+ball_radius, fill=ball_color)
def update_positions(team_red, team_blue, new_ball_pos, canvas): #list of agent objects
global red_players, red_pos, blue_players, blue_pos, ball, ball_pos
for i in range(TEAM_SIZE):
old_x, old_y = red_pos[i]
new_x, new_y = team_red[i]
diff_x, diff_y = new_x-old_x, new_y-old_y
canvas.move(red_players[i], diff_x, diff_y)
red_pos[i] = (new_x, new_y)
for i in range(TEAM_SIZE):
old_x, old_y = blue_pos[i]
new_x, new_y = team_blue[i]
diff_x, diff_y = new_x-old_x, new_y-old_y
canvas.move(blue_players[i], diff_x, diff_y)
blue_pos[i] = (new_x, new_y)
old_x, old_y = ball_pos
new_x, new_y = new_ball_pos
new_x, new_y = new_x+x_margin, new_y+y_margin
diff_x, diff_y = new_x-old_x, new_y-old_y
step = max(diff_x, diff_y)
mod = math.sqrt(diff_x*diff_x + diff_y*diff_y)
c_x, c_y = old_x, old_y
speed = 10
while ((c_x != new_x) or (c_y != new_y)):
unit_vec = (diff_x / mod, diff_y / mod)
step_x = unit_vec[0] * speed
step_y = unit_vec[1] * speed
if c_x == new_x:
step_x = 0
if c_y == new_y:
step_y = 0
if ((c_x>new_x) and (c_x+step_x < new_x)) or ((c_x < new_x) and (c_x + step_x) > new_x):
step_x = new_x - c_x
c_x = new_x
else:
c_x = c_x + step_x
if ((c_y>new_y) and (c_y+step_y < new_y)) or ((c_y < new_y) and (c_y + step_y) > new_y):
step_y = new_y - c_y
c_y = new_y
else:
c_y = c_y + step_y
ball_pos = (c_x, c_y)
canvas.move(ball, step_x, step_y)
canvas.update()
time.sleep(0.01)
|
py | b4047d7a8365a908f8ec116983c93d855b1ac7f9 | # -*- coding: utf-8 -*-
"""Rendering utility"""
import decimal
import locale
DEFAULT_PRECISION = 4
# create a new context for this task
ctx = decimal.Context()
# Lowering precision from the system default (28) can allow additional control over display
ctx.prec = DEFAULT_PRECISION
def num_to_str(f, precision=DEFAULT_PRECISION, use_locale=False, no_scientific=False):
"""Convert the given float to a string, centralizing standards for precision and decisions about scientific
notation. Adds an approximately equal sign in the event precision loss (e.g. rounding) has occurred.
There's a good discussion of related issues here:
https://stackoverflow.com/questions/38847690/convert-float-to-string-in-positional-format-without-scientific-notation-and-fa
Args:
f: the number to format
precision: the number of digits of precision to display
use_locale: if True, use locale-specific formatting (e.g. adding thousands separators)
no_scientific: if True, print all available digits of precision without scientific notation. This may insert
leading zeros before very small numbers, causing the resulting string to be longer than `precision`
characters
Returns:
A string representation of the float, according to the desired parameters
"""
assert not (use_locale and no_scientific)
if precision != DEFAULT_PRECISION:
local_context = decimal.Context()
local_context.prec = precision
else:
local_context = ctx
# We cast to string; we want to avoid precision issues, but format everything as though it were a float.
# So, if it's not already a float, we will append a decimal point to the string representation
s = repr(f)
if not isinstance(f, float):
s += locale.localeconv().get("decimal_point") + "0"
d = local_context.create_decimal(s)
if no_scientific:
result = format(d, "f")
elif use_locale:
result = format(d, "n")
else:
result = format(d, "g")
if f != locale.atof(result):
# result = '≈' + result
# ≈ # \u2248
result = "≈" + result
if "e" not in result and "E" not in result:
result = result.rstrip("0").rstrip(locale.localeconv().get("decimal_point"))
return result
SUFFIXES = {1: "st", 2: "nd", 3: "rd"}
def ordinal(num):
"""Convert a number to ordinal"""
# Taken from https://codereview.stackexchange.com/questions/41298/producing-ordinal-numbers/41301
# Consider a library like num2word when internationalization comes
if 10 <= num % 100 <= 20:
suffix = "th"
else:
# the second parameter is a default.
suffix = SUFFIXES.get(num % 10, "th")
return str(num) + suffix
|
py | b4047d92f20272ea2ae34b311eac1f2bde0587f4 | """ Functions that are specific to our dataset
"""
import pandas, collections, os, numpy as np
import pandas
import keras.utils
from collections import namedtuple
from utils import utils, io
import config, tfidf
Dataset = namedtuple('Dataset', [
'info', 'labels', 'genres', 'book_sentiment_words_list', 'label_dataset',
'sentiment_dataset'
])
SubDataset = namedtuple(
'SubDataset', ['dict_index_to_label', 'dict_label_to_index', 'word_list'])
# ['train', 'test', 'labels', 'dict_index_to_label', 'dict_label_to_index'])
print(""" Dataset :: namedtuple(
'info': pandas.df
'labels': pandas.df('filename.txt': 'genre')
'genres': ['genre'] # unique genres
'label_dataset': SubDataset
'sentiment_dataset': SubDataset
'book_sentiment_words_list': ['filename']
SubDataset :: namedtuple(
'dict_index_to_label' = dict to convert label_index -> label_name
'dict_label_to_index'= dict to convert label_name -> label_index
'word_list' = raw word list
""")
def init_dataset():
# alt: use utils.Dataset
# labels = pandas.read_csv(config.dataset_dir + 'labels.csv')
# train = os.listdir(config.dataset_dir + 'train/')
# test = os.listdir(config.dataset_dir + 'test/')
info = pandas.read_csv(config.info_file)
labels = pandas.read_csv(config.dataset_dir + 'labels.csv')
genre_list = list(info['genre'])
genre_list.append(config.default_genre_value)
genres = set(genre_list)
label_dataset = init_sub_dataset(genres)
# lists of files
book_sentiment_words_list = os.listdir(config.sentiment_words_dir)
# feature selection
# 1. tfidf on sentiment words (most important sentiment words that define genres)
# sentiment_words = io.read_sw_per_genre(amt=1000, dirname='top200_per_genre/')
# dict_ = {'sentiment_words': list(sentiment_words)}
# io.save_dict_to_csv(config.dataset_dir, 'top_sentiment_words', dict_)
df = pandas.read_csv(config.dataset_dir + 'top_sentiment_words.csv')
sentiment_words = list(df['sentiment_words'])
sentiment_dataset = init_sub_dataset(sentiment_words)
# return data as a namedtuple
return Dataset(info, labels, genres, book_sentiment_words_list,
label_dataset, sentiment_dataset)
def init_sub_dataset(word_list):
# create a label dicts to convert labels to numerical data and vice versa
# the order is arbitrary, as long as we can convert them back to the original classnames
# unique_labels = set(labels['breed'])
word_list = sorted(word_list)
dict_index_to_label_ = dict_index_to_label(word_list)
dict_label_to_index_ = dict_label_to_index(word_list)
return SubDataset(dict_index_to_label_, dict_label_to_index_, word_list)
def extract_genres(info, book_list):
labels = {} # {bookname: [genre]}, with max 1 genre
for filename in book_list[:]:
# name = filename.split('.')[0]
book = info.loc[info['filename'] == filename]
if book.empty:
labels[str(filename)] = config.default_genre_value
else:
genre = book.genre.item()
labels[str(filename)] = [genre]
return labels
def extract_all(dataset, names):
# Collect test data (+labels)
# TODO use actual dir
dirname = config.sentiment_words_dir
x = []
y = []
for name in names:
tokenized = io.read_book(dirname, name)
x.append((name, tokenized))
y.append(get_label(name, dataset.labels))
return x, y
def labels_to_vectors(sub_dataset, train_labels, test_labels):
# dataset contains dicts to convert
# TODO make sure that every label is present in both y_test and y_test
train = textlabels_to_numerical(sub_dataset, train_labels)
test = textlabels_to_numerical(sub_dataset, test_labels)
y_train = keras.utils.to_categorical(train)
y_test = keras.utils.to_categorical(test)
return y_train, y_test
def decode_y(dataset, vector=[], n_best=1):
dict_ = y_to_label_dict(dataset, vector)
# best value
if n_best == 1:
i = vector.argmax()
label = dataset.label_dataset.dict_index_to_label[i]
return dict_, [label]
else:
# return n best label predicitions
ls = list(dict_.items())
ls.sort(key=lambda x: x[1], reverse=True)
selected = ls[:n_best]
return dict_, [label for label, s_ in selected]
def y_to_label_dict(dataset, vector=[]):
n = vector.shape[0]
result = {} # :: {label: score}
for i in range(n):
label = dataset.label_dataset.dict_index_to_label[i]
result[label] = vector[i]
return result
def tokenlist_to_vector(tokens, sub_dataset):
# TODO depending on len(tokens)
selected_words = list(sub_dataset.dict_label_to_index.keys())
selected_words = sub_dataset.word_list
n = len(selected_words)
if n < 1:
return None
counter = collections.Counter(tokens)
vector = np.zeros([n])
for i, word in enumerate(selected_words):
try:
x = counter[word]
vector[i] = (x / float(n))**0.5
except: # KeyError
continue
return vector
def polarization_scores_to_vector(dataset, name='706.txt'):
row = dataset.info.loc[dataset.info['filename'] == name]
keys = ['pos score', 'neg score', 'neu score', 'comp score']
v = []
for key in keys:
if key in row and not row.empty:
v.append(row[key].item())
else:
if config.debug_: print('pol key not found', row)
v.append(0)
return np.array(v)
def textlabels_to_numerical(dataset, labels):
# transform ['label'] => [index]
# (list of text => list of indices)
return [dataset.dict_label_to_index[label] for label in labels]
def dict_index_to_label(labels):
# labels :: list or set()
# return { int: label }
unique_labels = sorted(set(labels))
return collections.OrderedDict({k: v for k, v in enumerate(unique_labels)})
def dict_label_to_index(labels):
# labels :: list or set()
# return { label: int }
unique_labels = sorted(set(labels))
return collections.OrderedDict({k: v for v, k in enumerate(unique_labels)})
def get_label(name='123.txt', labels=[]):
# labels :: pandas.df :: { id: breed }
try:
label = labels[name][0]
except:
print('[WARNING] unkown genre !!!')
label = config.default_genre_value
return label
def normalize_genre(g='horror'):
# remove keywords such as 'fiction'
unused_words = ['fiction', 'novel', 'literature', 'literatur']
known_genres = [
'children', 'christian', 'fantasi', 'histor', 'horror', 'philosophi',
'polit', 'western', 'thriller', 'scienc', 'detective', 'apocalypt'
'romanc'
]
# synonyms and typos (first member is correct)
synonyms = [('satire', 'satirical'), ('histor', 'histori'), ('young adult',
'youngadult'),
('fairy tale', 'fairytale'), ('science fiction', 'scienc',
'science'), ('apocalypt',
'postapocalypt'),
('philosophi', 'philosoph'), ('romance', 'romanc', 'romant')]
# do not confuse 'science' and 'science fiction'
g = g.lower()
if g == 'science':
return g
g = utils.normalize_string(g)
g = utils.rmv_words(g, unused_words)
# remove sub-genres such as 'horror fiction'
g = utils.stem_conditionally(g, known_genres)
# remove unclassfiable words such as 'fiction
g = utils.rmv_words(g, unused_words)
# remove synonyms and typos
for tuple_ in synonyms:
if g in tuple_:
g = tuple_[0]
return g
def reduce_genres(genres=['']):
if type(genres) is str:
return normalize_genre(genres)
# g_ = set([utils.normalize_string(g) for g in genres])
return set([normalize_genre(g) for g in genres])
###### Data analysis
# def analyse_ml_result(dataset, y_test, results, n_best=1):
# correct = 0
# incorrect = 0
# for i, label in enumerate(y_test):
# all_, best = decode_y(dataset, results[i], n_best=n_best)
# _, label = decode_y(dataset, label, n_best=1)
# if label[0] in best:
# correct += 1
# else:
# incorrect += 1
# return correct, incorrect
def analyse_ml_result(dataset, y_test, results, n_best=1):
correct = 0
incorrect = 0
correct_labels = []
incorrect_labels = []
for i, label in enumerate(y_test):
_, best = decode_y(dataset, results[i], n_best=n_best)
_, labels = decode_y(dataset, label, n_best=1)
label = labels[0]
if label in best:
correct += 1
correct_labels.append(label)
else:
incorrect += 1
incorrect_labels.append(label)
return correct, incorrect, correct_labels, incorrect_labels
|
py | b4047ec0e510bb56692f6747322b006a67e82e46 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'tela_acervo.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Tela_Acervo(object):
def setupUi(self, Tela_Acervo):
Tela_Acervo.setObjectName("Tela_Acervo")
Tela_Acervo.resize(926, 547)
self.centralwidget = QtWidgets.QWidget(Tela_Acervo)
self.centralwidget.setObjectName("centralwidget")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(130, 80, 241, 41))
font = QtGui.QFont()
font.setFamily("Lucida Sans Unicode")
font.setPointSize(14)
self.label.setFont(font)
self.label.setText("")
self.label.setObjectName("label")
self.textBrowser = QtWidgets.QTextBrowser(self.centralwidget)
self.textBrowser.setGeometry(QtCore.QRect(-10, -20, 941, 571))
self.textBrowser.setObjectName("textBrowser")
self.sair = QtWidgets.QPushButton(self.centralwidget)
self.sair.setGeometry(QtCore.QRect(690, 90, 93, 28))
font = QtGui.QFont()
font.setFamily("Lucida Sans Unicode")
font.setPointSize(9)
self.sair.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("images/exit.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.sair.setIcon(icon)
self.sair.setObjectName("sair")
self.campoBusca = QtWidgets.QLineEdit(self.centralwidget)
self.campoBusca.setGeometry(QtCore.QRect(160, 90, 401, 28))
self.campoBusca.setObjectName("campoBusca")
self.botaoBusca = QtWidgets.QPushButton(self.centralwidget)
self.botaoBusca.setGeometry(QtCore.QRect(580, 90, 92, 28))
self.botaoBusca.setObjectName("botaoBusca")
self.tabelaLivros = QtWidgets.QTableView(self.centralwidget)
self.tabelaLivros.setGeometry(QtCore.QRect(160, 150, 621, 291))
self.tabelaLivros.setObjectName("tabelaLivros")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(420, 30, 211, 31))
font = QtGui.QFont()
font.setPointSize(20)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.textBrowser.raise_()
self.label.raise_()
self.sair.raise_()
self.campoBusca.raise_()
self.botaoBusca.raise_()
self.tabelaLivros.raise_()
self.label_2.raise_()
Tela_Acervo.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Tela_Acervo)
self.menubar.setGeometry(QtCore.QRect(0, 0, 926, 22))
self.menubar.setObjectName("menubar")
Tela_Acervo.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(Tela_Acervo)
self.statusbar.setObjectName("statusbar")
Tela_Acervo.setStatusBar(self.statusbar)
self.retranslateUi(Tela_Acervo)
QtCore.QMetaObject.connectSlotsByName(Tela_Acervo)
def retranslateUi(self, Tela_Acervo):
_translate = QtCore.QCoreApplication.translate
Tela_Acervo.setWindowTitle(_translate("Tela_Acervo", "MainWindow"))
self.sair.setText(_translate("Tela_Acervo", "SAIR"))
self.botaoBusca.setText(_translate("Tela_Acervo", "Buscar"))
self.label_2.setText(_translate("Tela_Acervo", "Acervo"))
|
py | b4047f507999fb67f536b21fe639d657610714a6 | #!/usr/bin/env python
# Merge2cbf.py
# Copyright (C) 2013 Diamond Light Source, Richard Gildea
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
#
# A wrapper to handle the merge2cbf program that is distributed as part of
# the XDS package.
#
from __future__ import absolute_import, division
import os
import sys
import libtbx
from xia2.Driver.DriverFactory import DriverFactory
# interfaces that this inherits from ...
from xia2.Schema.Interfaces.FrameProcessor import FrameProcessor
# generic helper stuff
from xia2.Wrappers.XDS.XDS import xds_check_version_supported
def Merge2cbf(DriverType=None, params=None):
DriverInstance = DriverFactory.Driver(DriverType)
class Merge2cbfWrapper(DriverInstance.__class__,
FrameProcessor):
'''A wrapper for wrapping merge2cbf.'''
def __init__(self, params=None):
super(Merge2cbfWrapper, self).__init__()
# phil parameters
if not params:
from xia2.Handlers.Phil import master_phil
params = master_phil.extract().xds.merge2cbf
self._params = params
# now set myself up...
# I don't think there is a parallel version
self.set_executable('merge2cbf')
self._input_data_files = { }
self._output_data_files = { }
self._input_data_files_list = []
self._output_data_files_list = []
@property
def data_range(self):
return self._params.data_range
@data_range.setter
def data_range(self, value):
start, end = value
self._params.data_range = (start, end)
@property
def moving_average(self):
return self._params.moving_average
@moving_average.setter
def moving_average(self, value):
self._params.moving_average = value
@property
def merge_n_images(self):
return self._params.merge_n_images
@merge_n_images.setter
def merge_n_images(self, n):
self._params.merge_n_images = n
def run_core(self, data_range, moving_average=False):
'''Actually run merge2cbf itself.'''
# merge2cbf only requires mimimal information in the input file
image_header = self.get_header()
xds_inp = open(os.path.join(self.get_working_directory(),
'MERGE2CBF.INP'), 'w')
name_template = os.path.join(self.get_directory(),
self.get_template().replace('#', '?'))
self._output_template = os.path.join('merge2cbf_averaged_????.cbf')
xds_inp.write(
'NAME_TEMPLATE_OF_DATA_FRAMES=%s\n' %name_template)
xds_inp.write(
'NAME_TEMPLATE_OF_OUTPUT_FRAMES=%s\n' %self._output_template)
xds_inp.write(
'NUMBER_OF_DATA_FRAMES_COVERED_BY_EACH_OUTPUT_FRAME=%s\n' %
self.merge_n_images)
xds_inp.write('DATA_RANGE=%d %d\n' % tuple(data_range))
xds_inp.close()
self.start()
self.close_wait()
xds_check_version_supported(self.get_all_output())
def run(self):
'''Run merge2cbf.'''
if self.moving_average:
i_first, i_last = self.data_range
n_output_images = (i_last - i_first) - self.merge_n_images + 1
for i in range(i_first, i_first+n_output_images):
data_range = (i, i+self.merge_n_images)
self.run_core(data_range, moving_average=False)
self.update_minicbf_headers(moving_average=True)
return
self.run_core(self.data_range, moving_average=False)
self.update_minicbf_headers(moving_average=False)
def update_minicbf_headers(self, moving_average=False):
i_first, i_last = self.data_range
if moving_average:
n_output_images = (i_last - i_first) - self.merge_n_images + 1
else:
n_output_images = (i_last - i_first + 1) // self.merge_n_images
import fileinput
for i in range(n_output_images):
minicbf_header_content = self.get_minicbf_header_contents(
i, moving_average=moving_average)
filename = os.path.join(
self.get_working_directory(),
self._output_template.replace('????', '%04i') %(i+1))
assert os.path.isfile(filename)
f = fileinput.input(filename,
mode='rb', inplace=1)
processing_array_header_contents = False
printed_array_header_contents = False
for line in f:
if processing_array_header_contents and line.startswith('_'):
# we have reached the next data item
processing_array_header_contents = False
elif line.startswith('_array_data.header_contents'):
processing_array_header_contents = True
elif processing_array_header_contents:
if not printed_array_header_contents:
print """;\n%s\n;\n""" %minicbf_header_content
printed_array_header_contents = True
continue
print line,
f.close()
def get_minicbf_header_contents(self, i_output_image, moving_average=False):
from xia2.Wrappers.XDS.XDS import beam_centre_mosflm_to_xds
header_contents = []
image_header = self.get_header()
header_contents.append(
'# Detector: %s' %image_header['detector_class'].upper())
import time
timestamp = time.strftime('%Y-%m-%dT%H:%M:%S.000',
time.gmtime(image_header['epoch']))
header_contents.append(
'# %s' %timestamp)
pixel_size_mm = image_header['pixel']
pixel_size_microns = tuple([mm * 1000 for mm in pixel_size_mm])
header_contents.append(
'# Pixel_size %.0fe-6 m x %.0fe-6 m' %pixel_size_microns)
if 'pilatus' in image_header['detector_class']:
header_contents.append("# Silicon sensor, thickness 0.000320 m")
header_contents.append(
'# Exposure_period %.7f s' %image_header['exposure_time'])
# XXX xia2 doesn't keep track of the overload count cutoff value?
header_contents.append(
'# Count_cutoff %i counts' %1e7)
header_contents.append(
'# Wavelength %.5f A' %image_header['wavelength'])
# mm to m
header_contents.append(
'# Detector_distance %.5f m' %(image_header['distance']/1000))
beam_x, beam_y = image_header['beam']
header_contents.append(
'# Beam_xy (%.2f, %.2f) pixels' %beam_centre_mosflm_to_xds(
beam_x, beam_y, image_header))
input_phi_width = image_header['phi_width']
if moving_average:
output_phi_width = input_phi_width
else:
output_phi_width = input_phi_width * self.merge_n_images
header_contents.append(
'# Start_angle %.4f deg.' %
(image_header['phi_start'] + output_phi_width * i_output_image))
header_contents.append(
'# Angle_increment %.4f deg.' %output_phi_width)
header_contents.append(
'# Detector_2theta %.4f deg.' %image_header['two_theta'])
return "\n".join(header_contents)
return Merge2cbfWrapper(params=params)
if __name__ == '__main__':
import sys
from libtbx.phil import command_line
args = sys.argv[1:]
cmd_line = command_line.argument_interpreter(master_params=master_params)
working_phil, image_files = cmd_line.process_and_fetch(
args=args, custom_processor="collect_remaining")
working_phil.show()
assert len(image_files) > 0
first_image = image_files[0]
params = working_phil.extract()
m2c = Merge2cbf(params=params)
m2c.setup_from_image(first_image)
m2c.run()
|
py | b4047f890956bec47ab2ee59b0d390a368de5f94 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import pytest
import numpy as np
from numpy.testing import assert_allclose
import astropy.units as u
from astropy.coordinates import SkyCoord
from astropy.io import fits
from gammapy.data import GTI
from gammapy.estimators import FluxMaps
from gammapy.maps import MapAxis, Maps, RegionGeom, WcsNDMap
from gammapy.modeling.models import (
LogParabolaSpectralModel,
PointSpatialModel,
PowerLawSpectralModel,
SkyModel,
)
from gammapy.utils.testing import mpl_plot_check, requires_dependency
@pytest.fixture(scope="session")
def reference_model():
return SkyModel(
spatial_model=PointSpatialModel(), spectral_model=PowerLawSpectralModel(index=2)
)
@pytest.fixture(scope="session")
def logpar_reference_model():
logpar = LogParabolaSpectralModel(
amplitude="2e-12 cm-2s-1TeV-1", alpha=1.5, beta=0.5
)
return SkyModel(spatial_model=PointSpatialModel(), spectral_model=logpar)
@pytest.fixture(scope="session")
def wcs_flux_map():
energy_axis = MapAxis.from_energy_bounds(0.1, 10, 2, unit="TeV")
map_dict = {}
map_dict["norm"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["norm"].data += 1.0
map_dict["norm_err"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["norm_err"].data += 0.1
map_dict["norm_errp"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["norm_errp"].data += 0.2
map_dict["norm_errn"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["norm_errn"].data += 0.2
map_dict["norm_ul"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["norm_ul"].data += 2.0
# Add another map
map_dict["sqrt_ts"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["sqrt_ts"].data += 1.0
# Add another map
map_dict["ts"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["ts"].data[1] += 3.0
# Add another map
map_dict["success"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit="", dtype=np.dtype(bool)
)
map_dict["success"].data = True
map_dict["success"].data[0, 0, 1] = False
return map_dict
@pytest.fixture(scope="session")
def partial_wcs_flux_map():
energy_axis = MapAxis.from_energy_bounds(0.1, 10, 2, unit="TeV")
map_dict = {}
map_dict["norm"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["norm"].data += 1.0
map_dict["norm_err"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["norm_err"].data += 0.1
# Add another map
map_dict["sqrt_ts"] = WcsNDMap.create(
npix=10, frame="galactic", axes=[energy_axis], unit=""
)
map_dict["sqrt_ts"].data += 1.0
return map_dict
@pytest.fixture(scope="session")
def region_map_flux_estimate():
axis = MapAxis.from_energy_edges((0.1, 1.0, 10.0), unit="TeV")
geom = RegionGeom.create("galactic;circle(0, 0, 0.1)", axes=[axis])
maps = Maps.from_geom(
geom=geom, names=["norm", "norm_err", "norm_errn", "norm_errp", "norm_ul"]
)
maps["norm"].data = np.array([1.0, 1.0])
maps["norm_err"].data = np.array([0.1, 0.1])
maps["norm_errn"].data = np.array([0.2, 0.2])
maps["norm_errp"].data = np.array([0.15, 0.15])
maps["norm_ul"].data = np.array([2.0, 2.0])
return maps
@pytest.fixture(scope="session")
def map_flux_estimate():
axis = MapAxis.from_energy_edges((0.1, 1.0, 10.0), unit="TeV")
nmap = WcsNDMap.create(npix=5, axes=[axis])
cols = dict()
cols["norm"] = nmap.copy(data=1.0)
cols["norm_err"] = nmap.copy(data=0.1)
cols["norm_errn"] = nmap.copy(data=0.2)
cols["norm_errp"] = nmap.copy(data=0.15)
cols["norm_ul"] = nmap.copy(data=2.0)
return cols
def test_table_properties(region_map_flux_estimate):
model = SkyModel(PowerLawSpectralModel(amplitude="1e-10 cm-2s-1TeV-1", index=2))
fe = FluxMaps(data=region_map_flux_estimate, reference_model=model)
assert fe.dnde.unit == u.Unit("cm-2s-1TeV-1")
assert_allclose(fe.dnde.data.flat, [1e-9, 1e-11])
assert_allclose(fe.dnde_err.data.flat, [1e-10, 1e-12])
assert_allclose(fe.dnde_errn.data.flat, [2e-10, 2e-12])
assert_allclose(fe.dnde_errp.data.flat, [1.5e-10, 1.5e-12])
assert_allclose(fe.dnde_ul.data.flat, [2e-9, 2e-11])
assert fe.e2dnde.unit == u.Unit("TeV cm-2s-1")
assert_allclose(fe.e2dnde.data.flat, [1e-10, 1e-10])
assert fe.flux.unit == u.Unit("cm-2s-1")
assert_allclose(fe.flux.data.flat, [9e-10, 9e-11])
assert fe.eflux.unit == u.Unit("TeV cm-2s-1")
assert_allclose(fe.eflux.data.flat, [2.302585e-10, 2.302585e-10])
def test_missing_column(region_map_flux_estimate):
del region_map_flux_estimate["norm_errn"]
model = SkyModel(PowerLawSpectralModel(amplitude="1e-10 cm-2s-1TeV-1", index=2))
fe = FluxMaps(data=region_map_flux_estimate, reference_model=model)
with pytest.raises(AttributeError):
fe.dnde_errn
def test_map_properties(map_flux_estimate):
model = SkyModel(PowerLawSpectralModel(amplitude="1e-10 cm-2s-1TeV-1", index=2))
fe = FluxMaps(data=map_flux_estimate, reference_model=model)
assert fe.dnde.unit == u.Unit("cm-2s-1TeV-1")
assert_allclose(fe.dnde.quantity.value[:, 2, 2], [1e-9, 1e-11])
assert_allclose(fe.dnde_err.quantity.value[:, 2, 2], [1e-10, 1e-12])
assert_allclose(fe.dnde_errn.quantity.value[:, 2, 2], [2e-10, 2e-12])
assert_allclose(fe.dnde_errp.quantity.value[:, 2, 2], [1.5e-10, 1.5e-12])
assert_allclose(fe.dnde_ul.quantity.value[:, 2, 2], [2e-9, 2e-11])
assert fe.e2dnde.unit == u.Unit("TeV cm-2s-1")
assert_allclose(fe.e2dnde.quantity.value[:, 2, 2], [1e-10, 1e-10])
assert_allclose(fe.e2dnde_err.quantity.value[:, 2, 2], [1e-11, 1e-11])
assert_allclose(fe.e2dnde_errn.quantity.value[:, 2, 2], [2e-11, 2e-11])
assert_allclose(fe.e2dnde_errp.quantity.value[:, 2, 2], [1.5e-11, 1.5e-11])
assert_allclose(fe.e2dnde_ul.quantity.value[:, 2, 2], [2e-10, 2e-10])
assert fe.flux.unit == u.Unit("cm-2s-1")
assert_allclose(fe.flux.quantity.value[:, 2, 2], [9e-10, 9e-11])
assert_allclose(fe.flux_err.quantity.value[:, 2, 2], [9e-11, 9e-12])
assert_allclose(fe.flux_errn.quantity.value[:, 2, 2], [1.8e-10, 1.8e-11])
assert_allclose(fe.flux_errp.quantity.value[:, 2, 2], [1.35e-10, 1.35e-11])
assert_allclose(fe.flux_ul.quantity.value[:, 2, 2], [1.8e-9, 1.8e-10])
assert fe.eflux.unit == u.Unit("TeV cm-2s-1")
assert_allclose(fe.eflux.quantity.value[:, 2, 2], [2.302585e-10, 2.302585e-10])
assert_allclose(fe.eflux_err.quantity.value[:, 2, 2], [2.302585e-11, 2.302585e-11])
assert_allclose(fe.eflux_errn.quantity.value[:, 2, 2], [4.60517e-11, 4.60517e-11])
assert_allclose(
fe.eflux_errp.quantity.value[:, 2, 2], [3.4538775e-11, 3.4538775e-11]
)
assert_allclose(fe.eflux_ul.quantity.value[:, 2, 2], [4.60517e-10, 4.60517e-10])
def test_flux_map_properties(wcs_flux_map, reference_model):
fluxmap = FluxMaps(wcs_flux_map, reference_model)
assert_allclose(fluxmap.dnde.data[:, 0, 0], [1e-11, 1e-13])
assert_allclose(fluxmap.dnde_err.data[:, 0, 0], [1e-12, 1e-14])
assert_allclose(fluxmap.dnde_err.data[:, 0, 0], [1e-12, 1e-14])
assert_allclose(fluxmap.dnde_errn.data[:, 0, 0], [2e-12, 2e-14])
assert_allclose(fluxmap.dnde_errp.data[:, 0, 0], [2e-12, 2e-14])
assert_allclose(fluxmap.dnde_ul.data[:, 0, 0], [2e-11, 2e-13])
assert_allclose(fluxmap.flux.data[:, 0, 0], [9e-12, 9e-13])
assert_allclose(fluxmap.flux_err.data[:, 0, 0], [9e-13, 9e-14])
assert_allclose(fluxmap.flux_errn.data[:, 0, 0], [18e-13, 18e-14])
assert_allclose(fluxmap.flux_errp.data[:, 0, 0], [18e-13, 18e-14])
assert_allclose(fluxmap.flux_ul.data[:, 0, 0], [18e-12, 18e-13])
assert_allclose(fluxmap.eflux.data[:, 0, 0], [2.302585e-12, 2.302585e-12])
assert_allclose(fluxmap.eflux_err.data[:, 0, 0], [2.302585e-13, 2.302585e-13])
assert_allclose(fluxmap.eflux_errp.data[:, 0, 0], [4.60517e-13, 4.60517e-13])
assert_allclose(fluxmap.eflux_errn.data[:, 0, 0], [4.60517e-13, 4.60517e-13])
assert_allclose(fluxmap.eflux_ul.data[:, 0, 0], [4.60517e-12, 4.60517e-12])
assert_allclose(fluxmap.e2dnde.data[:, 0, 0], [1e-12, 1e-12])
assert_allclose(fluxmap.e2dnde_err.data[:, 0, 0], [1e-13, 1e-13])
assert_allclose(fluxmap.e2dnde_errn.data[:, 0, 0], [2e-13, 2e-13])
assert_allclose(fluxmap.e2dnde_errp.data[:, 0, 0], [2e-13, 2e-13])
assert_allclose(fluxmap.e2dnde_ul.data[:, 0, 0], [2e-12, 2e-12])
assert_allclose(fluxmap.sqrt_ts.data, 1)
assert_allclose(fluxmap.ts.data[:, 0, 0], [0, 3])
assert_allclose(fluxmap.success.data[:, 0, 1], [False, True])
assert_allclose(fluxmap.flux.data[:, 0, 1], [np.nan, 9e-13])
assert_allclose(fluxmap.flux_err.data[:, 0, 1], [np.nan, 9e-14])
assert_allclose(fluxmap.eflux.data[:, 0, 1], [np.nan, 2.30258509e-12])
assert_allclose(fluxmap.e2dnde_err.data[:, 0, 1], [np.nan, 1e-13])
def test_flux_map_failed_properties(wcs_flux_map, reference_model):
fluxmap = FluxMaps(wcs_flux_map, reference_model)
fluxmap.filter_success_nan = False
assert_allclose(fluxmap.success.data[:, 0, 1], [False, True])
assert_allclose(fluxmap.flux.data[:, 0, 1], [9.0e-12, 9e-13])
assert not fluxmap.filter_success_nan
def test_flux_map_str(wcs_flux_map, reference_model):
fluxmap = FluxMaps(wcs_flux_map, reference_model)
fm_str = fluxmap.__str__()
assert "WcsGeom" in fm_str
assert "errn" in fm_str
assert "sqrt_ts" in fm_str
assert "pl" in fm_str
assert "n_sigma" in fm_str
assert "n_sigma_ul" in fm_str
assert "sqrt_ts_threshold" in fm_str
@pytest.mark.parametrize("sed_type", ["likelihood", "dnde", "flux", "eflux", "e2dnde"])
def test_flux_map_read_write(tmp_path, wcs_flux_map, logpar_reference_model, sed_type):
fluxmap = FluxMaps(wcs_flux_map, logpar_reference_model)
fluxmap.write(tmp_path / "tmp.fits", sed_type=sed_type, overwrite=True)
new_fluxmap = FluxMaps.read(tmp_path / "tmp.fits")
assert_allclose(new_fluxmap.norm.data[:, 0, 0], [1, 1])
assert_allclose(new_fluxmap.norm_err.data[:, 0, 0], [0.1, 0.1])
assert_allclose(new_fluxmap.norm_errn.data[:, 0, 0], [0.2, 0.2])
assert_allclose(new_fluxmap.norm_ul.data[:, 0, 0], [2, 2])
# check model
assert (
new_fluxmap.reference_model.spectral_model.tag[0] == "LogParabolaSpectralModel"
)
assert new_fluxmap.reference_model.spectral_model.alpha.value == 1.5
assert new_fluxmap.reference_model.spectral_model.beta.value == 0.5
assert new_fluxmap.reference_model.spectral_model.amplitude.value == 2e-12
# check existence and content of additional map
assert_allclose(new_fluxmap.sqrt_ts.data, 1.0)
assert_allclose(new_fluxmap.success.data[:, 0, 1], [False, True])
assert_allclose(new_fluxmap.is_ul.data, True)
@pytest.mark.parametrize("sed_type", ["likelihood", "dnde", "flux", "eflux", "e2dnde"])
def test_partial_flux_map_read_write(
tmp_path, partial_wcs_flux_map, reference_model, sed_type
):
fluxmap = FluxMaps(partial_wcs_flux_map, reference_model)
fluxmap.write(tmp_path / "tmp.fits", sed_type=sed_type, overwrite=True)
new_fluxmap = FluxMaps.read(tmp_path / "tmp.fits")
assert_allclose(new_fluxmap.norm.data[:, 0, 0], [1, 1])
assert_allclose(new_fluxmap.norm_err.data[:, 0, 0], [0.1, 0.1])
# check model
assert new_fluxmap.reference_model.spectral_model.tag[0] == "PowerLawSpectralModel"
assert new_fluxmap.reference_model.spectral_model.index.value == 2
# check existence and content of additional map
assert_allclose(new_fluxmap._data["sqrt_ts"].data, 1.0)
# the TS map shouldn't exist
with pytest.raises(AttributeError):
new_fluxmap.ts
def test_flux_map_read_write_gti(tmp_path, partial_wcs_flux_map, reference_model):
start = u.Quantity([1, 2], "min")
stop = u.Quantity([1.5, 2.5], "min")
gti = GTI.create(start, stop)
fluxmap = FluxMaps(partial_wcs_flux_map, reference_model, gti=gti)
fluxmap.write(tmp_path / "tmp.fits", sed_type="dnde")
new_fluxmap = FluxMaps.read(tmp_path / "tmp.fits")
assert len(new_fluxmap.gti.table) == 2
assert_allclose(gti.table["START"], start.to_value("s"))
@pytest.mark.xfail
def test_flux_map_read_write_no_reference_model(tmp_path, wcs_flux_map, caplog):
fluxmap = FluxMaps(wcs_flux_map)
fluxmap.write(tmp_path / "tmp.fits")
new_fluxmap = FluxMaps.read(tmp_path / "tmp.fits")
assert new_fluxmap.reference_model.spectral_model.tag[0] == "PowerLawSpectralModel"
assert "WARNING" in [_.levelname for _ in caplog.records]
assert "No reference model set for FluxMaps." in [_.message for _ in caplog.records]
def test_flux_map_read_write_missing_reference_model(
tmp_path, wcs_flux_map, reference_model
):
fluxmap = FluxMaps(wcs_flux_map, reference_model)
fluxmap.write(tmp_path / "tmp.fits")
hdulist = fits.open(tmp_path / "tmp.fits")
hdulist[0].header["MODEL"] = "non_existent"
with pytest.raises(FileNotFoundError):
_ = FluxMaps.from_hdulist(hdulist)
@pytest.mark.xfail
def test_flux_map_init_no_reference_model(wcs_flux_map, caplog):
fluxmap = FluxMaps(data=wcs_flux_map)
assert fluxmap.reference_model.spectral_model.tag[0] == "PowerLawSpectralModel"
assert fluxmap.reference_model.spatial_model.tag[0] == "PointSpatialModel"
assert fluxmap.reference_model.spectral_model.index.value == 2
assert "WARNING" in [_.levelname for _ in caplog.records]
assert "No reference model set for FluxMaps." in [_.message for _ in caplog.records]
@requires_dependency("matplotlib")
def test_get_flux_point(wcs_flux_map, reference_model):
fluxmap = FluxMaps(wcs_flux_map, reference_model)
coord = SkyCoord(0.0, 0.0, unit="deg", frame="galactic")
fp = fluxmap.get_flux_points(coord)
table = fp.to_table()
assert_allclose(table["e_min"], [0.1, 1.0])
assert_allclose(table["norm"], [1, 1])
assert_allclose(table["norm_err"], [0.1, 0.1])
assert_allclose(table["norm_errn"], [0.2, 0.2])
assert_allclose(table["norm_errp"], [0.2, 0.2])
assert_allclose(table["norm_ul"], [2, 2])
assert_allclose(table["sqrt_ts"], [1, 1])
assert_allclose(table["ts"], [0, 3], atol=1e-15)
assert_allclose(fp.dnde.data.flat, [1e-11, 1e-13])
assert fp.dnde.unit == "cm-2s-1TeV-1"
with mpl_plot_check():
fp.plot()
def test_get_flux_point_missing_map(wcs_flux_map, reference_model):
other_data = wcs_flux_map.copy()
other_data.pop("norm_errn")
other_data.pop("norm_errp")
fluxmap = FluxMaps(other_data, reference_model)
coord = SkyCoord(0.0, 0.0, unit="deg", frame="galactic")
table = fluxmap.get_flux_points(coord).to_table()
assert_allclose(table["e_min"], [0.1, 1.0])
assert_allclose(table["norm"], [1, 1])
assert_allclose(table["norm_err"], [0.1, 0.1])
assert_allclose(table["norm_ul"], [2, 2])
assert "norm_errn" not in table.columns
assert table["success"].data.dtype == np.dtype(np.bool)
def test_flux_map_from_dict_inconsistent_units(wcs_flux_map, reference_model):
ref_map = FluxMaps(wcs_flux_map, reference_model)
map_dict = dict()
map_dict["eflux"] = ref_map.eflux
map_dict["eflux"].quantity = map_dict["eflux"].quantity.to("keV/m2/s")
map_dict["eflux_err"] = ref_map.eflux_err
map_dict["eflux_err"].quantity = map_dict["eflux_err"].quantity.to("keV/m2/s")
flux_map = FluxMaps.from_maps(map_dict, "eflux", reference_model)
assert_allclose(flux_map.norm.data[:, 0, 0], 1.0)
assert flux_map.norm.unit == ""
assert_allclose(flux_map.norm_err.data[:, 0, 0], 0.1)
assert flux_map.norm_err.unit == ""
|
py | b4047fc808aedac772631c38780e309e95a13455 | #
# Copyright 2020 Logical Clocks AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import humps
from hsfs import util
class Statistics:
def __init__(
self,
commit_time,
feature_group_commit_id,
content,
href=None,
expand=None,
items=None,
count=None,
type=None,
):
self._commit_time = commit_time
self._feature_group_commit_id = feature_group_commit_id
self._content = json.loads(content)
@classmethod
def from_response_json(cls, json_dict):
json_decamelized = humps.decamelize(json_dict)
# Currently getting multiple commits at the same time is not allowed
if json_decamelized["count"] == 0:
return None
elif len(json_decamelized["items"]) == 1:
return cls(**json_decamelized["items"][0])
def to_dict(self):
return {
"commitTime": self._commit_time,
"featureGroupCommitId": self._feature_group_commit_id,
"content": json.dumps(self._content),
}
def json(self):
return json.dumps(self, cls=util.FeatureStoreEncoder)
@property
def commit_time(self):
return self._commit_time
@property
def feature_group_commit_id(self):
return self._feature_group_commit_id
@property
def content(self):
return self._content
|
py | b404801023ebbaf6726fb9ddab546d6058dd98a6 | import numpy as np
import torch
import torch.nn.functional as F
import gym
import time
from spinup.algos.psn_ddpg import core
from spinup.utils.logx_torch import EpochLogger
class ReplayBuffer:
"""
A simple FIFO experience replay buffer for DDPG agents.
"""
def __init__(self, obs_dim, act_dim, size):
self.obs1_buf = np.zeros([size, obs_dim], dtype=np.float32)
self.obs2_buf = np.zeros([size, obs_dim], dtype=np.float32)
self.acts_buf = np.zeros([size, act_dim], dtype=np.float32)
self.rews_buf = np.zeros(size, dtype=np.float32)
self.done_buf = np.zeros(size, dtype=np.float32)
self.ptr, self.size, self.max_size = 0, 0, size
def store(self, obs, act, rew, next_obs, done):
self.obs1_buf[self.ptr] = obs
self.obs2_buf[self.ptr] = next_obs
self.acts_buf[self.ptr] = act
self.rews_buf[self.ptr] = rew
self.done_buf[self.ptr] = done
self.ptr = (self.ptr + 1) % self.max_size
self.size = min(self.size + 1, self.max_size)
def sample_batch(self, batch_size=32):
idxs = np.random.randint(0, self.size, size=batch_size)
return dict(
obs1=self.obs1_buf[idxs],
obs2=self.obs2_buf[idxs],
acts=self.acts_buf[idxs],
rews=self.rews_buf[idxs],
done=self.done_buf[idxs])
"""
Deep Deterministic Policy Gradient (DDPG)
"""
def ddpg(env_fn, actor_critic=core.ActorCritic, ac_kwargs=dict(), seed=0,
steps_per_epoch=5000, epochs=100, replay_size=int(1e6), gamma=0.99,
polyak=0.995, pi_lr=1e-3, q_lr=1e-3, batch_size=100, start_steps=10000,
max_ep_len=1000, logger_kwargs=dict(), save_freq=1, visualize=False,
param_noise_stddev=0.1, desired_action_stddev=0.1, adaption_coefficient=1.01):
"""
Args:
env_fn : A function which creates a copy of the environment.
The environment must satisfy the OpenAI Gym API.
actor_critic: The agent's main model which takes some states ``x`` and
and actions ``a`` and returns a tuple of:
=========== ================ ======================================
Symbol Shape Description
=========== ================ ======================================
``pi`` (batch, act_dim) | Deterministically computes actions
| from policy given states.
``q`` (batch,) | Gives the current estimate of Q* for
| states ``x`` and actions in
| ``a``.
``q_pi`` (batch,) | Gives the composition of ``q`` and
| ``pi`` for states in ``x``:
| q(x, pi(x)).
=========== ================ ======================================
ac_kwargs (dict): Any kwargs appropriate for the actor_critic
class you provided to DDPG.
seed (int): Seed for random number generators.
steps_per_epoch (int): Number of steps of interaction (state-action pairs)
for the agent and the environment in each epoch.
epochs (int): Number of epochs to run and train agent.
replay_size (int): Maximum length of replay buffer.
gamma (float): Discount factor. (Always between 0 and 1.)
polyak (float): Interpolation factor in polyak averaging for target
networks. Target networks are updated towards main networks
according to:
.. math:: \\theta_{\\text{targ}} \\leftarrow
\\rho \\theta_{\\text{targ}} + (1-\\rho) \\theta
where :math:`\\rho` is polyak. (Always between 0 and 1, usually
close to 1.)
pi_lr (float): Learning rate for policy.
q_lr (float): Learning rate for Q-networks.
batch_size (int): Minibatch size for SGD.
start_steps (int): Number of steps for uniform-random action selection,
before running real policy. Helps exploration.
max_ep_len (int): Maximum length of trajectory / episode / rollout.
logger_kwargs (dict): Keyword args for EpochLogger.
save_freq (int): How often (in terms of gap between epochs) to save
the current policy and value function.
visualize (bool): Whether to visualize during training
param_noise_stddev (float): standard deviation of parameter space noise
desired_action_stddev (float): desired standard deviation of action
adaption_coefficient (float): coefficient of adaption
"""
logger = EpochLogger(**logger_kwargs)
logger.save_config(locals())
device = "cuda" if torch.cuda.is_available() else "cpu"
torch.manual_seed(seed)
np.random.seed(seed)
env, test_env = env_fn(), env_fn()
obs_dim = env.observation_space.shape[0]
act_dim = env.action_space.shape[0]
# Action limit for clamping: critically, assumes all dimensions share the same bound!
act_limit = env.action_space.high[0]
# Share information about action space with policy architecture
ac_kwargs['action_space'] = env.action_space
# Main outputs from computation graph
main = actor_critic(in_features=obs_dim, **ac_kwargs).to(device)
# Target networks
target = actor_critic(in_features=obs_dim, **ac_kwargs).to(device)
target.eval()
# Experience buffer
replay_buffer = ReplayBuffer(obs_dim=obs_dim, act_dim=act_dim, size=replay_size)
# Count variables
var_counts = tuple(sum([np.prod(param.size()) for param in network.parameters()])
for network in [main.policy, main.q, main])
print(
'\nNumber of parameters: \t pi: %d, \t q: %d, \t total: %d\n' % (var_counts[0], var_counts[1], sum(var_counts)))
# Separate train ops for pi, q
pi_optimizer = torch.optim.Adam(params=main.policy.parameters(), lr=pi_lr)
q_optimizer = torch.optim.Adam(params=main.q.parameters(), lr=q_lr)
# Initialize target network parameters
target.load_state_dict(main.state_dict())
parameter_noise = core.ParameterNoise(main.policy,
param_noise_stddev=param_noise_stddev,
desired_action_stddev=desired_action_stddev,
adaption_coefficient=adaption_coefficient)
def get_action(o, actor):
# with torch.no_grad():
# a = main.pi(torch.from_numpy(o[None, :]).to(device))[0].detach().cpu().numpy()
# a += noise_scale * np.random.randn(act_dim)
with torch.no_grad():
pi = actor(torch.Tensor(o[None, :]))
a = pi.cpu().numpy()[0]
return np.clip(a, -act_limit, act_limit)
def test_agent(n=10):
for _ in range(n):
o, r, d, ep_ret, ep_len = test_env.reset(), 0, False, 0, 0
while not (d or (ep_len == max_ep_len)):
# Take deterministic actions at test time (noise_scale=0)
o, r, d, _ = test_env.step(get_action(o, main.policy))
ep_ret += r
ep_len += 1
logger.store(TestEpRet=ep_ret, TestEpLen=ep_len)
start_time = time.time()
o, r, d, ep_ret, ep_len = env.reset(), 0, False, 0, 0
total_steps = steps_per_epoch * epochs
# Main loop: collect experience in env and update/log each epoch
for t in range(total_steps):
main.eval()
"""
Until start_steps have elapsed, randomly sample actions
from a uniform distribution for better exploration. Afterwards,
use the learned policy (with some noise, via perturbed_actor).
"""
if t > start_steps:
a = get_action(o, parameter_noise.perturbed_actor)
else:
a = env.action_space.sample()
# Step the env
o2, r, d, _ = env.step(a)
ep_ret += r
ep_len += 1
if visualize:
env.render()
# Ignore the "done" signal if it comes from hitting the time
# horizon (that is, when it's an artificial terminal signal
# that isn't based on the agent's state)
d = False if ep_len == max_ep_len else d
# Store experience to replay buffer
replay_buffer.store(o, a, r, o2, d)
# Super critical, easy to overlook step: make sure to update
# most recent observation!
o = o2
if d or (ep_len == max_ep_len):
main.train()
"""
Perform all DDPG updates at the end of the trajectory,
in accordance with tuning done by TD3 paper authors.
"""
for _ in range(ep_len):
batch = replay_buffer.sample_batch(batch_size)
obs1 = torch.Tensor(batch['obs1']).to(device)
obs2 = torch.Tensor(batch['obs2']).to(device)
acts = torch.Tensor(batch['acts']).to(device)
rews = torch.Tensor(batch['rews']).to(device)
done = torch.Tensor(batch['done']).to(device)
_, q, q_pi = main(obs1, acts)
_, _, q_pi_targ = target(obs2, acts)
# Bellman backup for Q function
with torch.no_grad():
backup = (rews + gamma * (1 - done) * q_pi_targ)
# DDPG losses
pi_loss = -q_pi.mean()
q_loss = F.mse_loss(q, backup)
# Q update
q_optimizer.zero_grad()
q_loss.backward()
q_optimizer.step()
logger.store(LossQ=q_loss.item(), QVals=q.data.numpy())
# Policy update
pi_optimizer.zero_grad()
pi_loss.backward()
pi_optimizer.step()
logger.store(LossPi=pi_loss.item())
# Polyak averaging for target parameters
for p_main, p_target in zip(main.parameters(),
target.parameters()):
p_target.data.copy_(polyak * p_target.data +
(1 - polyak) * p_main.data)
logger.store(ParamNoiseStd=parameter_noise.param_noise_stddev)
logger.store(EpRet=ep_ret, EpLen=ep_len)
o, r, d, ep_ret, ep_len = env.reset(), 0, False, 0, 0
# Update parameter noise
for _ in range(10):
batch = replay_buffer.sample_batch(batch_size)
obs1 = torch.Tensor(batch['obs1']).to(device)
parameter_noise.adapt_param_noise(obs1)
# End of epoch wrap-up
if t > 0 and t % steps_per_epoch == 0:
epoch = t // steps_per_epoch
# Save model
if (epoch % save_freq == 0) or (epoch == epochs - 1):
logger.save_state({'env': env}, main.cpu(), None)
# Test the performance of the deterministic version of the agent.
test_agent()
# Log info about epoch
logger.log_tabular('Epoch', epoch)
logger.log_tabular('EpRet', with_min_and_max=True)
logger.log_tabular('TestEpRet', with_min_and_max=True)
logger.log_tabular('EpLen', average_only=True)
logger.log_tabular('TestEpLen', average_only=True)
logger.log_tabular('TotalEnvInteracts', t)
logger.log_tabular('QVals', with_min_and_max=True)
logger.log_tabular('LossPi', average_only=True)
logger.log_tabular('LossQ', average_only=True)
logger.log_tabular('Time', time.time() - start_time)
logger.log_tabular('ParamNoiseStd', with_min_and_max=True)
logger.dump_tabular()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--env', type=str, default='LunarLanderContinuous-v2')
parser.add_argument('--hid', type=int, default=300)
parser.add_argument('--l', type=int, default=1)
parser.add_argument('--gamma', type=float, default=0.99)
parser.add_argument('--seed', '-s', type=int, default=0)
parser.add_argument('--epochs', type=int, default=50)
parser.add_argument('--exp_name', type=str, default='ddpg')
parser.add_argument('--visuzalize', type=bool, default=False)
args = parser.parse_args()
from spinup.utils.run_utils import setup_logger_kwargs
logger_kwargs = setup_logger_kwargs(args.exp_name, args.seed)
ddpg(
lambda: gym.make(args.env),
actor_critic=core.ActorCritic,
ac_kwargs=dict(hidden_sizes=[args.hid] * args.l),
gamma=args.gamma,
seed=args.seed,
epochs=args.epochs,
logger_kwargs=logger_kwargs)
|
py | b4048020ffc59847cc87c3066b1d44e0380e329e | import numpy as np
def rel_ent_key_list(topic_model, n_top_keywords, relevant_topics):
"""Returns a list of the top n keywords based on relative entropy score
Arguments:
topic_model (TopicModel): a topic by vocabulary word matrix where each entry
is the total word count for that word in that topic
n_top_words (int): the number of keywords the method will return
relevant_topics (iterable of int)
Returns:
keyword_list (iterable of str): list of the top n keywords, sorted
"""
topic_word_matrix = topic_model.components_()
# Log of probabilities of vocab words
vocab_logs = np.log(topic_word_matrix.sum(
axis=0) / topic_word_matrix.sum())
# Log of probabilities of vocab words given they were in each relevant topic
#this is being built to calculate p(w)*log[p(w)/q(w)]
topic_logs = np.log(topic_word_matrix[relevant_topics, :].sum(
axis=0) / topic_word_matrix[relevant_topics, :].sum())
# relative entropy proportions, unsorted
#log rules: log[p(w)/q(w)] = log(p(w)) - log(q(w))
unsorted_props = np.asarray(topic_word_matrix.sum(axis=0) /
topic_word_matrix.sum()) * np.asarray(topic_logs - vocab_logs)
unsorted_props = np.matrix.flatten(unsorted_props)
sorted_props_and_voc = sorted([(unsorted_props[i], topic_model.vocabulary[i]) for i in list(
np.argpartition(unsorted_props, topic_model.n_voc_words - n_top_keywords))[-n_top_keywords:]], reverse=True)
ordered_vocab = []
for (_, voc) in sorted_props_and_voc:
ordered_vocab.append(voc)
return ordered_vocab
# TODO (faunam|6/19/19): implement tfidf and logtf
|
py | b4048052429137ae6744e359c50b081576acb8aa | from codewatch.assertion import assertion
from codewatch.loader import ModuleLoader
from codewatch.run import (
Analyzer,
AssertionChecker,
Runner,
)
from codewatch.node_visitor import (
count_calling_files,
count_import_usages,
inference,
NodeVisitor,
visit,
)
from codewatch.file_walker import FileWalker
from codewatch.stats import Stats
__all__ = [
'assertion',
'count_calling_files',
'count_import_usages',
'ModuleLoader',
'Analyzer',
'AssertionChecker',
'Runner',
'NodeVisitor',
'visit',
'FileWalker',
'Stats',
'inference',
]
|
py | b4048064ace3e4626e6376c88bc5364a9959c394 | # Copyright (c) 2009-2014 Upi Tamminen <[email protected]>
# See the COPYRIGHT file for more information
"""
This module contains code to run a command
"""
import os
import re
import shlex
import stat
import time
from typing import Callable, Optional
from twisted.internet import error
from twisted.python import failure, log
from cowrie.core.config import CowrieConfig
from cowrie.shell import fs
class HoneyPotCommand:
"""
This is the super class for all commands in cowrie/commands
"""
safeoutfile: str = ""
def __init__(self, protocol, *args):
self.protocol = protocol
self.args = list(args)
self.environ = self.protocol.cmdstack[0].environ
self.fs = self.protocol.fs
self.data: bytes = None # output data
self.input_data: Optional[
bytes
] = None # used to store STDIN data passed via PIPE
self.writefn: Callable[[bytes], None] = self.protocol.pp.outReceived
self.errorWritefn: Callable[[bytes], None] = self.protocol.pp.errReceived
# MS-DOS style redirect handling, inside the command
# TODO: handle >>, 2>, etc
if ">" in self.args or ">>" in self.args:
if self.args[-1] in [">", ">>"]:
self.errorWrite("-bash: parse error near '\\n' \n")
return
self.writtenBytes = 0
self.writefn = self.write_to_file
if ">>" in self.args:
index = self.args.index(">>")
b_append = True
else:
index = self.args.index(">")
b_append = False
self.outfile = self.fs.resolve_path(
str(self.args[(index + 1)]), self.protocol.cwd
)
del self.args[index:]
p = self.fs.getfile(self.outfile)
if (
not p
or not p[fs.A_REALFILE]
or p[fs.A_REALFILE].startswith("honeyfs")
or not b_append
):
tmp_fname = "{}-{}-{}-redir_{}".format(
time.strftime("%Y%m%d-%H%M%S"),
self.protocol.getProtoTransport().transportId,
self.protocol.terminal.transport.session.id,
re.sub("[^A-Za-z0-9]", "_", self.outfile),
)
self.safeoutfile = os.path.join(
CowrieConfig.get("honeypot", "download_path"), tmp_fname
)
perm = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
try:
self.fs.mkfile(self.outfile, 0, 0, 0, stat.S_IFREG | perm)
except fs.FileNotFound:
# The outfile locates at a non-existing directory.
self.errorWrite(
f"-bash: {self.outfile}: No such file or directory\n"
)
self.writefn = self.write_to_failed
self.outfile = None
self.safeoutfile = ""
except fs.PermissionDenied:
# The outfile locates in a file-system that doesn't allow file creation
self.errorWrite(f"-bash: {self.outfile}: Permission denied\n")
self.writefn = self.write_to_failed
self.outfile = None
self.safeoutfile = ""
else:
with open(self.safeoutfile, "ab"):
self.fs.update_realfile(
self.fs.getfile(self.outfile), self.safeoutfile
)
else:
self.safeoutfile = p[fs.A_REALFILE]
def write(self, data: str) -> None:
"""
Write a string to the user on stdout
"""
self.writefn(data.encode("utf8"))
def writeBytes(self, data: bytes) -> None:
"""
Like write() but input is bytes
"""
self.writefn(data)
def errorWrite(self, data: str) -> None:
"""
Write errors to the user on stderr
"""
self.errorWritefn(data.encode("utf8"))
def check_arguments(self, application, args):
files = []
for arg in args:
path = self.fs.resolve_path(arg, self.protocol.cwd)
if self.fs.isdir(path):
self.errorWrite(
f"{application}: error reading `{arg}': Is a directory\n"
)
continue
files.append(path)
return files
def set_input_data(self, data: bytes) -> None:
self.input_data = data
def write_to_file(self, data: bytes) -> None:
with open(self.safeoutfile, "ab") as f:
f.write(data)
self.writtenBytes += len(data)
self.fs.update_size(self.outfile, self.writtenBytes)
def write_to_failed(self, data: bytes) -> None:
pass
def start(self) -> None:
if self.writefn != self.write_to_failed:
self.call()
self.exit()
def call(self) -> None:
self.write(f"Hello World! [{repr(self.args)}]\n")
def exit(self) -> None:
"""
Sometimes client is disconnected and command exits after. So cmdstack is gone
"""
if (
self.protocol
and self.protocol.terminal
and hasattr(self, "safeoutfile")
and self.safeoutfile
):
if hasattr(self, "outfile") and self.outfile:
self.protocol.terminal.redirFiles.add((self.safeoutfile, self.outfile))
else:
self.protocol.terminal.redirFiles.add((self.safeoutfile, ""))
if len(self.protocol.cmdstack):
self.protocol.cmdstack.pop()
if len(self.protocol.cmdstack):
self.protocol.cmdstack[-1].resume()
else:
ret = failure.Failure(error.ProcessDone(status=""))
# The session could be disconnected already, when his happens .transport is gone
try:
self.protocol.terminal.transport.processEnded(ret)
except AttributeError:
pass
def handle_CTRL_C(self) -> None:
log.msg("Received CTRL-C, exiting..")
self.write("^C\n")
self.exit()
def lineReceived(self, line: str) -> None:
log.msg(f"QUEUED INPUT: {line}")
# FIXME: naive command parsing, see lineReceived below
# line = "".join(line)
self.protocol.cmdstack[0].cmdpending.append(shlex.split(line, posix=True))
def resume(self) -> None:
pass
def handle_TAB(self) -> None:
pass
def handle_CTRL_D(self) -> None:
pass
def __repr__(self) -> str:
return str(self.__class__.__name__)
|
py | b40480980ba121e2b9e067d6e075d3ef62368155 | from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, DateTime, create_engine
from sqlalchemy.orm import sessionmaker
import datetime
from ._configuration import interception_config
__all__ = [
'new_session',
'Interception',
'InterceptionVars'
]
Base = declarative_base()
class Interception(Base):
__tablename__ = 'INTERCEPTION'
id = Column(Integer, primary_key=True, name="id")
ps = Column(String, name="ps")
x = Column(String, name="x")
trace = Column(String, name="trace")
name = Column(String, name="name")
created_at = Column(DateTime, name="created_at")
def __repr__(self):
return f'<Interception(id={self.id}, ps={self.ps}, x={self.x}, trace={self.trace}, name={self.name})>'
@staticmethod
def of(ps, x, trace, name):
interception = Interception()
interception.ps = ps
interception.name = name
interception.trace = trace
interception.x = x
interception.created_at = datetime.datetime.now()
return interception
class InterceptionVars(Base):
__tablename__ = 'INTERCEPTION_VARS'
id = Column(Integer, primary_key=True, name="id")
active = Column(Integer, name="active")
def new_session():
engine = create_engine(interception_config.connection_string)
session_cls = sessionmaker(bind=engine)
return session_cls()
|
py | b404815960a838dfb5f80d58119875d432bfbbee | from os import makedirs
from os.path import exists
from subprocess import run
from invoke import task
from faasmcli.util.env import (
PROJ_ROOT,
FAASM_BUILD_DIR,
FAASM_INSTALL_DIR,
FAASM_SGX_MODE_DISABLED,
)
DEV_TARGETS = [
"codegen_func",
"codegen_shared_obj",
"func_runner",
"func_sym",
"pool_runner",
"upload",
"tests",
]
SANITISER_NONE = "None"
@task
def cmake(
ctx,
clean=False,
build="Debug",
perf=False,
prof=False,
sanitiser=SANITISER_NONE,
sgx=FAASM_SGX_MODE_DISABLED,
cpu=None,
):
"""
Configures the CMake build
"""
if clean and exists(FAASM_BUILD_DIR):
run("rm -rf {}/*".format(FAASM_BUILD_DIR), shell=True, check=True)
if not exists(FAASM_BUILD_DIR):
makedirs(FAASM_BUILD_DIR)
if not exists(FAASM_INSTALL_DIR):
makedirs(FAASM_INSTALL_DIR)
cmd = [
"cmake",
"-GNinja",
"-DCMAKE_BUILD_TYPE={}".format(build),
"-DCMAKE_CXX_COMPILER=/usr/bin/clang++-13",
"-DCMAKE_C_COMPILER=/usr/bin/clang-13",
"-DCMAKE_INSTALL_PREFIX={}".format(FAASM_INSTALL_DIR),
"-DFAASM_PERF_PROFILING=ON" if perf else "",
"-DFAASM_SELF_TRACING=ON" if prof else "",
"-DFAABRIC_SELF_TRACING=ON" if prof else "",
"-DFAASM_USE_SANITISER={}".format(sanitiser),
"-DFAABRIC_USE_SANITISER={}".format(sanitiser),
"-DFAASM_SGX_MODE={}".format(sgx),
"-DFAASM_TARGET_CPU={}".format(cpu) if cpu else "",
PROJ_ROOT,
]
cmd_str = " ".join(cmd)
print(cmd_str)
run(cmd_str, shell=True, check=True, cwd=FAASM_BUILD_DIR)
@task
def tools(
ctx,
clean=False,
build="Debug",
parallel=0,
sanitiser=SANITISER_NONE,
sgx=FAASM_SGX_MODE_DISABLED,
):
"""
Builds all the targets commonly used for development
"""
if sgx != FAASM_SGX_MODE_DISABLED and sanitiser != SANITISER_NONE:
raise RuntimeError("SGX and sanitised builds are incompatible!")
cmake(ctx, clean=clean, build=build, sanitiser=sanitiser, sgx=sgx)
targets = " ".join(DEV_TARGETS)
cmake_cmd = "cmake --build . --target {}".format(targets)
if parallel > 0:
cmake_cmd += " --parallel {}".format(parallel)
print(cmake_cmd)
run(
cmake_cmd,
cwd=FAASM_BUILD_DIR,
shell=True,
check=True,
)
@task
def cc(ctx, target, clean=False, parallel=0):
"""
Compiles the given CMake target
"""
if clean:
cmake(ctx, clean=True)
if target == "all":
target = ""
else:
target = "--target {}".format(target)
cmake_cmd = "cmake --build . {}".format(target)
if parallel > 0:
cmake_cmd += " --parallel {}".format(parallel)
run(
cmake_cmd,
cwd=FAASM_BUILD_DIR,
shell=True,
check=True,
)
|
py | b4048272fae6298c316ad3f45b0efbba2285b856 | import magma as m
from mantle import Counter
from mantle.util.lfsr import DefineLFSR
from loam.boards.icestick import IceStick
icestick = IceStick()
icestick.Clock.on()
for i in range(8):
icestick.J3[i].output().on()
LFSR = DefineLFSR(8, has_ce=True)
main = icestick.main()
clock = Counter(22)
lfsr = LFSR()
m.wire( lfsr( ce=clock.COUT ), main.J3 )
m.EndCircuit()
|
py | b404830471597063a694b44021643c55d958730b | """
WSGI config for mezan project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mezan.settings')
application = get_wsgi_application()
|
py | b40483e4f956f0eb4fd45014ddb46143a8b5553d | from termcolor import colored
from os import system, name
checklist = list()
# CREATE
def create(item):
checklist.append(item)
# READ
def read(index):
return checklist[index]
# UPDATE
def update(index, item):
checklist[index] = item
# DELETE
def delete(index):
checklist.pop(index)
def list_all_items():
index = 0
for list_item in checklist:
print('{} {}'.format(index, list_item))
index += 1
def mark_completed(index):
checklist[index] = colored(checklist[index], 'green')
def select(function_code):
if function_code == 'C' or function_code == 'c':
input_item = user_input('Input item: ')
clear_terminal()
create(input_item)
elif function_code == 'R' or function_code == 'r':
item_index = valid_index(user_input('Index Number: '))
clear_terminal()
if item_index is 0 or item_index:
print(read(item_index))
elif function_code == 'U' or function_code == 'u':
item_index = valid_index(user_input('Update Checklist Index Number: '))
if item_index is 0 or item_index:
update(item_index, user_input('New Item: '))
elif function_code == 'M' or function_code == 'm':
item_index = valid_index(user_input('Check Index Number: '))
clear_terminal()
if item_index is 0 or item_index:
mark_completed(item_index)
elif function_code == 'D' or function_code == 'd':
item_index = valid_index(user_input('Delete Checklist Index Number: '))
clear_terminal()
if item_index is 0 or item_index:
delete(item_index)
elif function_code == 'P' or function_code == 'p':
clear_terminal()
list_all_items()
elif function_code == 'Q' or function_code == 'q':
return False
else:
input('Unkown Option')
clear_terminal()
return True
def user_input(prompt):
user_input = input(prompt)
return user_input
def valid_index(index):
if index.isnumeric() and len(checklist) != 0 and int(index) <= len(checklist):
return int(index)
else:
input('Invaled, index is either not an int or out of range')
return False
def clear_terminal():
system('clear')
running = True
clear_terminal()
while running:
selection = user_input('Press C to add list, R to read from list, U to update item in list, M to mark item as complete, D to delete item in list, P to display all items, and Q to quit ')
running = select(selection)
def test ():
create('purple sox')
create('red cloaks')
print(read(0))
print(read(1))
update(0, 'purple socks')
delete(1)
print(read(0))
mark_completed(0)
list_all_items()
select('C')
list_all_items()
select('R')
list_all_options()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.