code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Piecewise(SympyFunction): <NEW_LINE> <INDENT> sympy_name = 'Piecewise' <NEW_LINE> def prepare_sympy(self, leaves): <NEW_LINE> <INDENT> if len(leaves) == 1: <NEW_LINE> <INDENT> return leaves[0] <NEW_LINE> <DEDENT> if len(leaves) == 2: <NEW_LINE> <INDENT> return leaves[0].leaves + [ Expression('List', leaves[1], Symbol('True'))] <NEW_LINE> <DEDENT> <DEDENT> def from_sympy(self, sympy_name, args): <NEW_LINE> <INDENT> if str(args[-1].leaves[1]).startswith('System`_True__Dummy_'): <NEW_LINE> <INDENT> args[-1].leaves[1] = Symbol('True') <NEW_LINE> <DEDENT> return Expression(self.get_name(), args) | <dl>
<dt>'Piecewise[{{expr1, cond1}, ...}]'
<dd>represents a piecewise function.
<dt>'Piecewise[{{expr1, cond1}, ...}, expr]'
<dd>represents a piecewise function with default 'expr'.
</dl>
Heaviside function
>> Piecewise[{{0, x <= 0}}, 1]
= Piecewise[{{0, x <= 0}}, 1] | 62599030cad5886f8bdc58c3 |
@skipIf(not HAS_CERTS, 'Cannot find CA cert bundle') <NEW_LINE> @skipIf(NO_MOCK, NO_MOCK_REASON) <NEW_LINE> @patch('salt.cloud.clouds.gce.__virtual__', MagicMock(return_value='gce')) <NEW_LINE> @patch('libcloud.common.google.GoogleInstalledAppAuthConnection.get_new_token', MagicMock(return_value=DUMMY_TOKEN)) <NEW_LINE> @patch('libcloud.compute.drivers.gce.GCENodeDriver.ex_list_zones', MagicMock(return_value=[])) <NEW_LINE> @patch('libcloud.compute.drivers.gce.GCENodeDriver.ex_list_regions', MagicMock(return_value=[])) <NEW_LINE> class GCETestCase(ExtendedTestCase): <NEW_LINE> <INDENT> def test_destroy_call(self): <NEW_LINE> <INDENT> self.assertRaises( SaltCloudSystemExit, gce.destroy, vm_name=VM_NAME, call='function' ) <NEW_LINE> <DEDENT> @patch('libcloud.compute.drivers.gce.GCENodeDriver.list_sizes', MagicMock(return_value=[])) <NEW_LINE> def test_avail_sizes(self): <NEW_LINE> <INDENT> sizes = gce.avail_sizes() <NEW_LINE> self.assertEqual( sizes, [] ) <NEW_LINE> <DEDENT> @patch('libcloud.compute.drivers.gce.GCENodeDriver.list_nodes', MagicMock(return_value=[])) <NEW_LINE> def test_list_nodes(self): <NEW_LINE> <INDENT> nodes = gce.list_nodes() <NEW_LINE> self.assertEqual( nodes, {} ) <NEW_LINE> <DEDENT> @patch('libcloud.compute.drivers.gce.GCENodeDriver.list_locations', MagicMock(return_value=[])) <NEW_LINE> def test_list_locations(self): <NEW_LINE> <INDENT> locations = gce.avail_locations() <NEW_LINE> self.assertEqual( locations, {} ) | Unit TestCase for salt.cloud.clouds.gce module. | 625990303eb6a72ae038b6f6 |
class BaseControllerTest(test_base.ArmadaTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(BaseControllerTest, self).setUp() <NEW_LINE> current_dir = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> sample_conf_dir = os.path.join(current_dir, os.pardir, os.pardir, os.pardir, os.pardir, 'etc', 'armada') <NEW_LINE> sample_conf_files = ['api-paste.ini', 'armada.conf.sample'] <NEW_LINE> with mock.patch.object( armada.conf, '_get_config_files') as mock_get_config_files: <NEW_LINE> <INDENT> mock_get_config_files.return_value = [ os.path.join(sample_conf_dir, x) for x in sample_conf_files ] <NEW_LINE> self.app = falcon_testing.TestClient( server.create(enable_middleware=False)) <NEW_LINE> <DEDENT> self.policy = self.useFixture(fixtures.RealPolicyFixture()) | Base class for unit testing falcon controllers. | 625990301d351010ab8f4ba8 |
class AddPostTests(TestCase): <NEW_LINE> <INDENT> def test_true_forms(self): <NEW_LINE> <INDENT> ad_user = User.objects.create(username="Bob") <NEW_LINE> day = datetime.datetime.now() <NEW_LINE> my_data = copy.deepcopy(form_data) <NEW_LINE> my_data.update({'ad_user': ad_user, 'day': day}) <NEW_LINE> print(my_data) <NEW_LINE> form = PostForm(data=my_data) <NEW_LINE> form.is_valid() <NEW_LINE> print(form.errors) <NEW_LINE> self.assertTrue(form.is_valid()) <NEW_LINE> <DEDENT> def test_forms_min_price(self): <NEW_LINE> <INDENT> ad_user = User.objects.create(username="Bob") <NEW_LINE> day = datetime.datetime.now() <NEW_LINE> my_data = copy.deepcopy(form_data) <NEW_LINE> my_data.update({'ad_user': ad_user, 'day': day, 'price': -1}) <NEW_LINE> form = PostForm(data=my_data) <NEW_LINE> form.is_valid() <NEW_LINE> self.assertFalse(form.is_valid()) <NEW_LINE> <DEDENT> def test_forms_old_year(self): <NEW_LINE> <INDENT> ad_user = User.objects.create(username="Bob") <NEW_LINE> day = datetime.datetime.now() <NEW_LINE> my_data = copy.deepcopy(form_data) <NEW_LINE> my_data.update({'ad_user': ad_user, 'day': day, 'year': 1899}) <NEW_LINE> form = PostForm(data=my_data) <NEW_LINE> form.is_valid() <NEW_LINE> self.assertFalse(form.is_valid()) <NEW_LINE> <DEDENT> def test_forms_future_year(self): <NEW_LINE> <INDENT> ad_user = User.objects.create(username="Bob") <NEW_LINE> day = datetime.datetime.now() <NEW_LINE> my_data = copy.deepcopy(form_data) <NEW_LINE> my_data.update({'ad_user': ad_user, 'day': day, 'year': day.year + 1}) <NEW_LINE> form = PostForm(data=my_data) <NEW_LINE> form.is_valid() <NEW_LINE> self.assertFalse(form.is_valid()) | Тест формы PostForm. | 62599030be8e80087fbc010d |
class InterfaceDuplexModeEvent_registerIDL_args(object): <NEW_LINE> <INDENT> thrift_spec = (None, (1, TType.I32, 'sessionHandle', None, None), (2, TType.I64, 'xosHandle', None, None), (3, TType.I32, 'interfaceType', None, None)) <NEW_LINE> def __init__(self, sessionHandle = None, xosHandle = None, interfaceType = None): <NEW_LINE> <INDENT> self.sessionHandle = sessionHandle <NEW_LINE> self.xosHandle = xosHandle <NEW_LINE> self.interfaceType = interfaceType <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid,) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.sessionHandle = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I64: <NEW_LINE> <INDENT> self.xosHandle = iprot.readI64() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.interfaceType = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('InterfaceDuplexModeEvent_registerIDL_args') <NEW_LINE> if self.sessionHandle != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('sessionHandle', TType.I32, 1) <NEW_LINE> oprot.writeI32(self.sessionHandle) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.xosHandle != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('xosHandle', TType.I64, 2) <NEW_LINE> oprot.writeI64(self.xosHandle) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.interfaceType != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('interfaceType', TType.I32, 3) <NEW_LINE> oprot.writeI32(self.interfaceType) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = [ '%s=%r' % (key, value) for (key, value,) in self.__dict__.iteritems() ] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | Attributes:
- sessionHandle
- xosHandle
- interfaceType | 62599030ec188e330fdf9924 |
class Seller(scrapy.Item): <NEW_LINE> <INDENT> platform_code = scrapy.Field(primary_key=True, foreign_key=True) <NEW_LINE> seller_id = scrapy.Field(primary_key=True) <NEW_LINE> market_id = scrapy.Field() <NEW_LINE> seller_name = scrapy.Field() <NEW_LINE> seller_contact_wangwang = scrapy.Field() <NEW_LINE> seller_contact_phone = scrapy.Field() <NEW_LINE> seller_contact_weixin = scrapy.Field() <NEW_LINE> seller_contact_qq = scrapy.Field() <NEW_LINE> seller_contact_addr = scrapy.Field() <NEW_LINE> status = scrapy.Field(status=True) <NEW_LINE> created_time = scrapy.Field(create=True) <NEW_LINE> deleted_time = scrapy.Field(delete=True) | 货源网站卖家信息 | 62599030ac7a0e7691f73579 |
class CompareFaceRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ImageA = None <NEW_LINE> self.ImageB = None <NEW_LINE> self.UrlA = None <NEW_LINE> self.UrlB = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ImageA = params.get("ImageA") <NEW_LINE> self.ImageB = params.get("ImageB") <NEW_LINE> self.UrlA = params.get("UrlA") <NEW_LINE> self.UrlB = params.get("UrlB") | CompareFace请求参数结构体
| 6259903015baa7234946302a |
class RubiksAction(object): <NEW_LINE> <INDENT> RIGHTC = 0 <NEW_LINE> RIGHTAC = 1 <NEW_LINE> LEFTC = 2 <NEW_LINE> LEFTAC = 3 <NEW_LINE> UPC = 4 <NEW_LINE> UPAC = 5 <NEW_LINE> DOWNC = 6 <NEW_LINE> DOWNAC = 7 <NEW_LINE> FRONTC = 8 <NEW_LINE> FRONTAC = 9 <NEW_LINE> BACKC = 10 <NEW_LINE> BACKAC = 11 | Defines all possible actions the agent can take in the environment. | 62599030a8ecb033258722af |
class CDataModel: <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, CDataModel, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, CDataModel, name) <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> __swig_destroy__ = _ndlml.delete_CDataModel <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def getType(self): <NEW_LINE> <INDENT> return _ndlml.CDataModel_getType(self) <NEW_LINE> <DEDENT> def setType(self, *args): <NEW_LINE> <INDENT> return _ndlml.CDataModel_setType(self, *args) <NEW_LINE> <DEDENT> def getBaseType(self): <NEW_LINE> <INDENT> return _ndlml.CDataModel_getBaseType(self) <NEW_LINE> <DEDENT> def setBaseType(self, *args): <NEW_LINE> <INDENT> return _ndlml.CDataModel_setBaseType(self, *args) <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return _ndlml.CDataModel_getName(self) <NEW_LINE> <DEDENT> def setName(self, *args): <NEW_LINE> <INDENT> return _ndlml.CDataModel_setName(self, *args) <NEW_LINE> <DEDENT> def getOptNumParams(self): <NEW_LINE> <INDENT> return _ndlml.CDataModel_getOptNumParams(self) <NEW_LINE> <DEDENT> def display(self, *args): <NEW_LINE> <INDENT> return _ndlml.CDataModel_display(self, *args) <NEW_LINE> <DEDENT> def getNumData(self): <NEW_LINE> <INDENT> return _ndlml.CDataModel_getNumData(self) <NEW_LINE> <DEDENT> def setNumData(self, *args): <NEW_LINE> <INDENT> return _ndlml.CDataModel_setNumData(self, *args) | Proxy of C++ CDataModel class | 625990305166f23b2e244468 |
class DerivedParameters(parametertools.SubParameters): <NEW_LINE> <INDENT> _PARCLASSES = (arma_derived.Nmb, arma_derived.MaxQ, arma_derived.DiffQ, arma_derived.AR_Order, arma_derived.MA_Order, arma_derived.AR_Coefs, arma_derived.MA_Coefs) | Derived parameters of arma_v1, indirectly defined by the user. | 62599030d99f1b3c44d06735 |
class caMeta(object): <NEW_LINE> <INDENT> def __init__(self, dbf, units='', stamp=0.0, status=0, severity=0, precision=0, **kwargs): <NEW_LINE> <INDENT> self.dbf, self.units, self.stamp=dbf, units, stamp <NEW_LINE> self.status, self.severity = status, severity <NEW_LINE> self.precision=precision <NEW_LINE> l=dbf_default_limits(dbf) <NEW_LINE> for p in ['display', 'warning', 'alarm', 'control']: <NEW_LINE> <INDENT> setattr(self, p, kwargs.pop(p,l)) <NEW_LINE> <DEDENT> self.strs=kwargs.pop('strs',[]) <NEW_LINE> if len(kwargs)>0: <NEW_LINE> <INDENT> raise TypeError('Unexpected keyword arguments %s',str(kwargs.keys())) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def ro(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def meta(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('Meta dbf:%(dbf)d sts:%(status)d sev:%(severity)s '+ 'ts:%(stamp)f egu:%(units)s prec:%(precision)d '+ 'disp:%(display)s W:%(warning)s E:%(alarm)s C:%(control)s') % self.__dict__ <NEW_LINE> <DEDENT> def __eq__(self, o): <NEW_LINE> <INDENT> for a in ['dbf', 'units', 'stamp', 'status','severity','precision', 'display', 'warning', 'alarm', 'control']: <NEW_LINE> <INDENT> if getattr(self,a)!=getattr(o,a): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Represents the meta-information associated with a DBR value.
This includes alarm status/severity, as well as other types.
The parameters given to the constructor can also be accessed
as data members.
:param same_as_value dbf: The field type
:param str units: engineering units
:param float stamp: Time in seconds since the POSIX epoch
:param int status: Alarm status
:param int severity: Alarm severity
:param int precision: Display precision for fractional numbers
:param tuple display: Display limits (Low, High)
:param tuple warning: Display limits (Low, High)
:param tuple alarm: Display limits (Low, High)
:param tuple control: Display limits (Low, High) | 6259903050485f2cf55dc00f |
class RegisteredUsers(ndb.Model): <NEW_LINE> <INDENT> userName = ndb.StringProperty() <NEW_LINE> mainEmail = ndb.StringProperty() <NEW_LINE> userAddress = ndb.StringProperty() <NEW_LINE> orgKey = ndb.KeyProperty(kind=Organization) <NEW_LINE> isOwner = ndb.KeyProperty() <NEW_LINE> isPrincipal = ndb.KeyProperty() <NEW_LINE> isAdmin = ndb.KeyProperty() <NEW_LINE> isStudent = ndb.KeyProperty(kind="Student") | RegisteredUsers -- User profile object | 6259903026238365f5fadbe5 |
class scatterChart(NVD3Chart): <NEW_LINE> <INDENT> def __init__(self, height=450, width=None, **kwargs): <NEW_LINE> <INDENT> NVD3Chart.__init__(self, **kwargs) <NEW_LINE> self.create_x_axis('xAxis', format=".02f") <NEW_LINE> self.create_y_axis('yAxis', format=".02f") <NEW_LINE> if height: <NEW_LINE> <INDENT> self.set_graph_height(height) <NEW_LINE> <DEDENT> if width: <NEW_LINE> <INDENT> self.set_graph_width(width) <NEW_LINE> <DEDENT> <DEDENT> def buildjschart(self): <NEW_LINE> <INDENT> NVD3Chart.buildjschart(self) <NEW_LINE> scatter_jschart = '\n' + stab(3) + '.showDistX(true)\n' + stab(3) + '.showDistY(true)\n' + stab(3) + '.color(d3.scale.category10().range())' <NEW_LINE> start_index = self.jschart.find('.scatterChart()') <NEW_LINE> string_len = len('.scatterChart()') <NEW_LINE> replace_index = start_index + string_len <NEW_LINE> if start_index > 0: <NEW_LINE> <INDENT> self.jschart = self.jschart[:replace_index] + scatter_jschart + self.jschart[replace_index:] | A scatter plot or scattergraph is a type of mathematical diagram using Cartesian
coordinates to display values for two variables for a set of data.
The data is displayed as a collection of points, each having the value of one variable
determining the position on the horizontal axis and the value of the other variable
determining the position on the vertical axis.
.. image:: ../_static/screenshot/scatterChart.png
Python example::
from nvd3 import scatterChart
chart = scatterChart(name='scatterChart', height=400, width=400)
xdata = [3, 4, 0, -3, 5, 7]
ydata = [-1, 2, 3, 3, 15, 2]
ydata = [1, -2, 4, 7, -5, 3]
kwargs1 = {'shape': 'circle', 'size': '1'}
kwargs2 = {'shape': 'cross', 'size': '10'}
extra_serie = {"tooltip": {"y_start": "", "y_end": " call"}}
chart.add_serie(name="series 1", y=ydata, x=xdata, extra=extra_serie, **kwargs1)
extra_serie = {"tooltip": {"y_start": "", "y_end": " min"}}
chart.add_serie(name="series 2", y=ydata, x=xdata, extra=extra_serie, **kwargs2)
chart.buildhtml()
Javascript generated::
data = [{ key: "series 1",
values: [
{
"x": 2,
"y": 10,
"shape": "circle"
},
{
"x": -2,
"y" : 0,
"shape": "circle"
},
{
"x": 5,
"y" : -3,
"shape": "circle"
},
]
},
{ key: "series 2",
values: [
{
"x": 4,
"y": 10,
"shape": "cross"
},
{
"x": 4,
"y" : 0,
"shape": "cross"
},
{
"x": 3,
"y" : -3,
"shape": "cross"
},
]
}]
nv.addGraph(function() {
var chart = nv.models.scatterChart()
.showLabels(true);
chart.showDistX(true);
chart.showDistY(true);
chart.tooltipContent(function(key, y, e, graph) {
var x = String(graph.point.x);
var y = String(graph.point.y);
if(key == 'serie 1'){
var y = String(graph.point.y) + ' calls';
}
if(key == 'serie 2'){
var y = String(graph.point.y) + ' min';
}
tooltip_str = '<center><b>'+key+'</b></center>' + y + ' at ' + x;
return tooltip_str;
});
d3.select("#div_id")
.datum(data)
.transition()
.duration(1200)
.call(chart);
return chart;
}); | 625990303eb6a72ae038b6f8 |
class CentosClientBuilder(LinuxClientBuilder): <NEW_LINE> <INDENT> def CopyFiles(self): <NEW_LINE> <INDENT> shutil.move( os.path.join(self.package_dir, "debian"), os.path.join(self.package_dir, "rpmbuild")) <NEW_LINE> if self.fleetspeak_enabled: <NEW_LINE> <INDENT> rpm_spec_filename = "fleetspeak.grr.spec.in" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rpm_spec_filename = "grr.spec.in" <NEW_LINE> shutil.copy( config_lib.Resource().Filter( "install_data/centos/grr-client.initd.in"), os.path.join(self.package_dir, "rpmbuild/grr-client.initd.in")) <NEW_LINE> shutil.copy( config_lib.Resource().Filter( "install_data/systemd/client/grr-client.service"), os.path.join(self.package_dir, "rpmbuild/grr-client.service.in")) <NEW_LINE> <DEDENT> shutil.copy( config_lib.Resource().Filter("install_data/centos/" + rpm_spec_filename), os.path.join(self.package_dir, "rpmbuild/grr.spec.in")) <NEW_LINE> shutil.copy( config_lib.Resource().Filter( "install_data/centos/prelink_blacklist.conf.in"), os.path.join(self.package_dir, "rpmbuild/prelink_blacklist.conf.in")) | A builder class that produces a client for RPM based distros. | 625990308c3a8732951f75eb |
class TokenForm(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.token = None <NEW_LINE> replace_names = { "token": "token", } <NEW_LINE> if kwargs is not None: <NEW_LINE> <INDENT> for key in kwargs: <NEW_LINE> <INDENT> if key in replace_names: <NEW_LINE> <INDENT> setattr(self, replace_names[key], kwargs[key]) <NEW_LINE> <DEDENT> <DEDENT> if "token" in kwargs: <NEW_LINE> <INDENT> self.token = Token(**kwargs["token"]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def resolve_names(self): <NEW_LINE> <INDENT> replace_names = { "token": "token", } <NEW_LINE> retval = dict() <NEW_LINE> return APIHelper.resolve_names(self, replace_names, retval) | Implementation of the 'Token Form' model.
TODO: type model description here.
Attributes:
token (Token): TODO: type description here. | 625990304e696a045264e66b |
class CompressImage(ProcessBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ProcessBase.__init__(self) <NEW_LINE> self.extensions = ['jpg', 'jpeg', 'png'] <NEW_LINE> <DEDENT> def processfile(self, filename): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(filename, 'rb') as file: <NEW_LINE> <INDENT> img = Image.open(file) <NEW_LINE> format = str(img.format) <NEW_LINE> if format != 'PNG' and format != 'JPEG': <NEW_LINE> <INDENT> print ('Ignoring file "' + filename + '" with unsupported format ' + format) <NEW_LINE> return False <NEW_LINE> <DEDENT> ImageFile.MAXBLOCK = img.size[0] * img.size[1] <NEW_LINE> img.save(filename, quality=65, optimize=True) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> stderr.write('Failure whilst processing "' + filename + '": ' + str(e) + '\n') <NEW_LINE> return False | Processor which attempts to reduce image file size. | 62599030d6c5a102081e31b8 |
class State(Base): <NEW_LINE> <INDENT> __tablename__ = 'states' <NEW_LINE> id = Column(Integer, nullable=False, primary_key=True, autoincrement=True) <NEW_LINE> name = Column(String(128), nullable=False) <NEW_LINE> cities = relationship("City", backref="cities") | Class representing the `states` table.
Columns:
id (int): /NOT NULL/AUTO_INCREMENT/PRIMARY_KEY/
name (string): /VARCHAR(128)/NOT NULL/ | 625990308e05c05ec3f6f6a5 |
class TimeInWeek(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'day': {'key': 'day', 'type': 'str'}, 'hour_slots': {'key': 'hourSlots', 'type': '[int]'}, } <NEW_LINE> def __init__( self, *, day: Optional[Union[str, "WeekDay"]] = None, hour_slots: Optional[List[int]] = None, **kwargs ): <NEW_LINE> <INDENT> super(TimeInWeek, self).__init__(**kwargs) <NEW_LINE> self.day = day <NEW_LINE> self.hour_slots = hour_slots | Time in a week.
:ivar day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday",
"Wednesday", "Thursday", "Friday", "Saturday".
:vartype day: str or ~azure.mgmt.containerservice.v2021_07_01.models.WeekDay
:ivar hour_slots: Each integer hour represents a time range beginning at 0m after the hour
ending at the next hour (non-inclusive). 0 corresponds to 00:00 UTC, 23 corresponds to 23:00
UTC. Specifying [0, 1] means the 00:00 - 02:00 UTC time range.
:vartype hour_slots: list[int] | 62599030c432627299fa4089 |
class UserProfileManager(models.Manager, Searchable): <NEW_LINE> <INDENT> def get_query_set(self): <NEW_LINE> <INDENT> return super(UserProfileManager, self).get_query_set().filter( user__is_active=True ) | A custom manager to add functionallity related to table level of
:model:`user_profile.UserProfile`. | 62599030d10714528d69eed5 |
class PlansViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Plans.objects.all() <NEW_LINE> serializer_class = PlanSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticatedOrReadOnly,) <NEW_LINE> def list(self, request, *args, **kwargs): <NEW_LINE> <INDENT> orgid = self.kwargs['orgid'] <NEW_LINE> data = {"status": 0, "data": {}, "msg": ""} <NEW_LINE> self.queryset = Plans.objects.filter(orgid=orgid) <NEW_LINE> serializer = PlanSerializer(self.queryset, many=True) <NEW_LINE> data['data']['plans'] = serializer.data <NEW_LINE> return processdata(data, request) <NEW_LINE> <DEDENT> def retrieve(self, request, *args, **kwargs): <NEW_LINE> <INDENT> orgid = self.kwargs['orgid'] <NEW_LINE> pk = self.kwargs['pk'] <NEW_LINE> server = request.query_params.get('server',False) <NEW_LINE> data = {"status": 0, "data": {}, "msg": ""} <NEW_LINE> queryset = Plans.objects.filter(orgid=orgid, id=pk) <NEW_LINE> if server and queryset: <NEW_LINE> <INDENT> server_queryset = PlansServer.objects.filter(orgid=orgid, plans=queryset.first()) <NEW_LINE> server_serializer = ServerSerializer(server_queryset, many=True) <NEW_LINE> data['data']['server'] = server_serializer.data <NEW_LINE> <DEDENT> serializer = PlanSerializer(queryset, many=True) <NEW_LINE> data['data']['plans'] = serializer.data <NEW_LINE> return processdata(data, request) <NEW_LINE> <DEDENT> def create(self, request, *args, **kwargs): <NEW_LINE> <INDENT> orgid = self.kwargs['orgid'] <NEW_LINE> request.data.update({'orgid': orgid}) <NEW_LINE> if Plans.objects.filter(name=request.data['name'], orgid=orgid).count() == 0: <NEW_LINE> <INDENT> serializer = PlanSerializer(data=request.data) <NEW_LINE> serializer.is_valid(raise_exception=True) <NEW_LINE> self.perform_create(serializer) <NEW_LINE> return processdata({ "status": 0, "data": "", "msg": "创建套餐成功" }, request) <NEW_LINE> <DEDENT> return processdata({ "status": 1, "data": "", "msg": "创建套餐失败" }, request) <NEW_LINE> <DEDENT> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> partial = kwargs.pop('partial', False) <NEW_LINE> orgid = self.kwargs['orgid'] <NEW_LINE> pk = self.kwargs['pk'] <NEW_LINE> instance = get_object_or_404(self.queryset, name=pk, orgid=orgid) <NEW_LINE> serializer = self.get_serializer(instance, request.data, partial=partial) <NEW_LINE> serializer.is_valid(raise_exception=True) <NEW_LINE> self.perform_update(serializer) <NEW_LINE> return processdata({ "status": 0, "data": serializer.data, "msg": "" }, request) <NEW_LINE> <DEDENT> def destroy(self, request, *args, **kwargs): <NEW_LINE> <INDENT> partial = kwargs.pop('partial', False) <NEW_LINE> orgid = self.kwargs['orgid'] <NEW_LINE> pk = self.kwargs['pk'] <NEW_LINE> instance = get_object_or_404(self.queryset, id=pk, orgid=orgid) <NEW_LINE> self.perform_destroy(instance) <NEW_LINE> return processdata({ "status": 0, "data": "", "msg": "删除套餐成功" }, request) | API 套餐处理函数 | 625990305e10d32532ce414d |
class OpenSSLStreamCrypto(OpenSSLCryptoBase): <NEW_LINE> <INDENT> def __init__(self, cipher_name, key, iv, op): <NEW_LINE> <INDENT> super(OpenSSLStreamCrypto, self).__init__(cipher_name) <NEW_LINE> key_ptr = c_char_p(key) <NEW_LINE> iv_ptr = c_char_p(iv) <NEW_LINE> r = libcrypto.EVP_CipherInit_ex(self._ctx, self._cipher, None, key_ptr, iv_ptr, c_int(op)) <NEW_LINE> if not r: <NEW_LINE> <INDENT> self.clean() <NEW_LINE> raise Exception('can not initialize cipher context') <NEW_LINE> <DEDENT> self.encrypt = self.update <NEW_LINE> self.decrypt = self.update | Crypto for stream modes: cfb, ofb, ctr | 62599030cad5886f8bdc58c5 |
class ModelInput(BaseModel): <NEW_LINE> <INDENT> inputs: Union[List, Dict] | Pydantic Model to receive parameters for the /predict endpoint | 6259903026238365f5fadbe7 |
class EventRuleBase(Base): <NEW_LINE> <INDENT> event_rule: EventRule | CloudWatchEvents基底class | 6259903023e79379d538d59f |
class CommandError(Exception): <NEW_LINE> <INDENT> pass | Raised when a command encouters an error that can be resolved by the user. This is also the base
class for exceptions in this package. | 625990309b70327d1c57fe19 |
class AlbuDataGenerator(keras.utils.Sequence): <NEW_LINE> <INDENT> def __init__(self, images, masks, batch_size, nn_image_size, shuffle, mode, params, mean, use_ceil=False): <NEW_LINE> <INDENT> self.images = images <NEW_LINE> self.masks = masks <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.nn_image_size = nn_image_size <NEW_LINE> self.shuffle = shuffle <NEW_LINE> self.indexes = np.arange(len(self.images)) <NEW_LINE> self.on_epoch_end() <NEW_LINE> self.augmentation = common_aug(mode, params, mean) <NEW_LINE> self.channels = params.channels <NEW_LINE> self.use_ceil = use_ceil <NEW_LINE> assert len(self.images) >= self.batch_size, (len(self.images), self.batch_size) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> func = np.ceil if self.use_ceil else np.floor <NEW_LINE> return int(func(len(self.images) / self.batch_size)) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> indexes = self.indexes[index * self.batch_size:(index + 1) * self.batch_size] <NEW_LINE> X, y = self.__data_generation(indexes) <NEW_LINE> return X, y <NEW_LINE> <DEDENT> def on_epoch_end(self): <NEW_LINE> <INDENT> if self.shuffle == True: <NEW_LINE> <INDENT> np.random.shuffle(self.indexes) <NEW_LINE> <DEDENT> <DEDENT> def __data_generation(self, indexes): <NEW_LINE> <INDENT> X = np.empty((self.batch_size, self.nn_image_size, self.nn_image_size, self.channels), dtype=np.float32) <NEW_LINE> y = np.empty((self.batch_size, self.nn_image_size, self.nn_image_size, 1), dtype=np.float32) <NEW_LINE> for i, index in enumerate(indexes): <NEW_LINE> <INDENT> image = self.images[index] <NEW_LINE> if self.masks is None: <NEW_LINE> <INDENT> aug_res = self.augmentation(image=image) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mask = self.masks[index] <NEW_LINE> aug_res = self.augmentation(image=image, mask=mask) <NEW_LINE> mask = aug_res['mask'] <NEW_LINE> y[i, ...] = mask.reshape(mask.shape[0], mask.shape[1], 1) <NEW_LINE> <DEDENT> image = aug_res['image'] <NEW_LINE> X[i, ...] = image <NEW_LINE> <DEDENT> return X, y | Generates data for Keras | 6259903030c21e258be998a1 |
class BaseAgent(metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.solution = [] <NEW_LINE> self.fitness = np.inf <NEW_LINE> <DEDENT> def __eq__(self, value): <NEW_LINE> <INDENT> if isinstance(value, BaseAgent): <NEW_LINE> <INDENT> return self.fitness == value.fitness <NEW_LINE> <DEDENT> if isinstance(value, numbers.Number): <NEW_LINE> <INDENT> return self.fitness == value <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, value): <NEW_LINE> <INDENT> if isinstance(value, BaseAgent): <NEW_LINE> <INDENT> return self.fitness != value.fitness <NEW_LINE> <DEDENT> if isinstance(value, numbers.Number): <NEW_LINE> <INDENT> return self.fitness != value <NEW_LINE> <DEDENT> <DEDENT> def __gt__(self, value): <NEW_LINE> <INDENT> if isinstance(value, BaseAgent): <NEW_LINE> <INDENT> return self.fitness > value.fitness <NEW_LINE> <DEDENT> if isinstance(value, numbers.Number): <NEW_LINE> <INDENT> return self.fitness > value <NEW_LINE> <DEDENT> <DEDENT> def __lt__(self, value): <NEW_LINE> <INDENT> if isinstance(value, BaseAgent): <NEW_LINE> <INDENT> return self.fitness < value.fitness <NEW_LINE> <DEDENT> if isinstance(value, numbers.Number): <NEW_LINE> <INDENT> return self.fitness < value <NEW_LINE> <DEDENT> <DEDENT> def __ge__(self, value): <NEW_LINE> <INDENT> if isinstance(value, BaseAgent): <NEW_LINE> <INDENT> return self.fitness >= value.fitness <NEW_LINE> <DEDENT> if isinstance(value, numbers.Number): <NEW_LINE> <INDENT> return self.fitness >= value <NEW_LINE> <DEDENT> <DEDENT> def __le__(self, value): <NEW_LINE> <INDENT> if isinstance(value, BaseAgent): <NEW_LINE> <INDENT> return self.fitness <= value.fitness <NEW_LINE> <DEDENT> if isinstance(value, numbers.Number): <NEW_LINE> <INDENT> return self.fitness <= value <NEW_LINE> <DEDENT> <DEDENT> def custom_step(self, obj_fun, domain, step_length): <NEW_LINE> <INDENT> new_solution = self.solution[:] + step_length <NEW_LINE> is_lesser = new_solution < domain.lb <NEW_LINE> new_solution[is_lesser] = domain.lb[is_lesser] <NEW_LINE> is_greater = new_solution > domain.ub <NEW_LINE> new_solution[is_greater] = domain.ub[is_greater] <NEW_LINE> new_fitness = obj_fun(new_solution) <NEW_LINE> if new_fitness < self.fitness: <NEW_LINE> <INDENT> self.fitness = new_fitness <NEW_LINE> self.solution = new_solution[:] | Abstract base class for all swarm agents. | 62599030d4950a0f3b111688 |
class StringLabel(StringType): <NEW_LINE> <INDENT> pass | Type representing an label being a string with class name (e.g. the "hamster" class in CIFAR100). | 62599030a8ecb033258722b3 |
class AddStar(): <NEW_LINE> <INDENT> def __init__(self, selected_classes=None, prob=0.5, L_range=[0,100], var_range=[1,3], verbose=0): <NEW_LINE> <INDENT> self.selected_classes = selected_classes <NEW_LINE> self.prob = prob <NEW_LINE> self.mean_range = L_range <NEW_LINE> self.var_range = var_range <NEW_LINE> self.verbose = verbose <NEW_LINE> <DEDENT> def apply(self, X, Y, features): <NEW_LINE> <INDENT> if self.selected_classes is not None: <NEW_LINE> <INDENT> mask = np.in1d(Y, list(self.selected_classes)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mask = np.ones(len(Y)) <NEW_LINE> <DEDENT> add = np.random.random(len(Y)) < self.prob <NEW_LINE> Xtransformed = copy.deepcopy(X) <NEW_LINE> for i in range(len(Xtransformed)): <NEW_LINE> <INDENT> if add[i]: <NEW_LINE> <INDENT> Xtransformed[i,:] += self._add_star(Xtransformed[i]) <NEW_LINE> <DEDENT> <DEDENT> return Xtransformed, Y, features <NEW_LINE> <DEDENT> def _add_star(self, im): <NEW_LINE> <INDENT> shape = im.shape[1:] <NEW_LINE> x_c = np.random.randint(shape[0]) <NEW_LINE> y_c = np.random.randint(shape[1]) <NEW_LINE> mean, var = np.random.rand(2) <NEW_LINE> mean = self.mean_range[0] + mean*(self.mean_range[1]-self.mean_range[0]) <NEW_LINE> var = self.var_range[0] + var*(self.var_range[1]-self.var_range[0]) <NEW_LINE> delta_im = np.zeros((2*shape[0]+1, 2*shape[1]+1)) <NEW_LINE> delta_im[shape[0], shape[1]] = 1 <NEW_LINE> to_add = gaussian_filter(delta_im, var)[shape[0]-x_c:2*shape[0]-x_c, shape[1]-y_c:2*shape[1]-y_c] <NEW_LINE> to_add = to_add*mean <NEW_LINE> return to_add.astype(np.float32) | With a given probability, add a foreground star to the input image.
Parameters
----------
selected_classes : list or None, default None
Labels of classes to which this augmentation should
be applied. Should be None for regression.
prob : float, default 0.5
Probability that a foreground star is added.
Value between 0 and 1.
L_range : list, default [0,100]
Range of values from which the brightness of the
added star is selected.
var_range : list, default [1,3]
Range of values from which the width of the added
star is selected.
verbose : int, default 0
Verbosity level. | 62599030d10714528d69eed6 |
class alter_table_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'dbname', None, None, ), (2, TType.STRING, 'tbl_name', None, None, ), (3, TType.STRUCT, 'new_tbl', (Table, Table.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, dbname=None, tbl_name=None, new_tbl=None,): <NEW_LINE> <INDENT> self.dbname = dbname <NEW_LINE> self.tbl_name = tbl_name <NEW_LINE> self.new_tbl = new_tbl <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.dbname = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.tbl_name = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.new_tbl = Table() <NEW_LINE> self.new_tbl.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('alter_table_args') <NEW_LINE> if self.dbname is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('dbname', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.dbname) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.tbl_name is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('tbl_name', TType.STRING, 2) <NEW_LINE> oprot.writeString(self.tbl_name) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.new_tbl is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('new_tbl', TType.STRUCT, 3) <NEW_LINE> self.new_tbl.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- dbname
- tbl_name
- new_tbl | 625990305e10d32532ce414e |
class Solution2: <NEW_LINE> <INDENT> def majority_element(self, nums): <NEW_LINE> <INDENT> return sorted(nums)[len(nums) // 2] | 解法2:对数组进行排序,取索引为⌊n/2⌋的数字
时间复杂度:O(n * logn)
空间复杂度:O(n) or O(1), 取决于是否可以直接对数组进行排序 | 62599030cad5886f8bdc58c6 |
class Cache(fixtures.Fixture): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(Cache, self).setUp() <NEW_LINE> if cache.CACHE_REGION.is_configured: <NEW_LINE> <INDENT> del cache.CACHE_REGION.backend <NEW_LINE> <DEDENT> cache.configure_cache() | A fixture for setting up and tearing down the cache between test cases.
| 62599030287bf620b6272c7e |
class ImageHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> image_id = self.request.query <NEW_LINE> image = getImage(image_id) <NEW_LINE> self.set_header("Content-Type", 'image/jpeg') <NEW_LINE> self.write(image) | Render the Image | 625990306fece00bbaccca46 |
class Blog(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=64) <NEW_LINE> theme = models.CharField(max_length=32) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = '博客' <NEW_LINE> verbose_name_plural = verbose_name | 博客信息表 | 625990304e696a045264e66d |
class DotDict(dict): <NEW_LINE> <INDENT> def __init__(self, value=None): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> for key in value: <NEW_LINE> <INDENT> self.__setitem__(key, value[key]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Expected dict') <NEW_LINE> <DEDENT> <DEDENT> def _ensure_dot_dict(self, target, restOfKey, myKey): <NEW_LINE> <INDENT> if not isinstance(target, DotDict): <NEW_LINE> <INDENT> raise KeyError('Cannot set "%s" in "%s" (%s)' % (restOfKey, myKey, repr(target))) <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if '.' in key: <NEW_LINE> <INDENT> myKey, restOfKey = key.split('.', 1) <NEW_LINE> target = self.setdefault(myKey, DotDict()) <NEW_LINE> self._ensure_dot_dict(target, restOfKey, myKey) <NEW_LINE> target[restOfKey] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(value, dict) and not isinstance(value, DotDict): <NEW_LINE> <INDENT> value = DotDict(value) <NEW_LINE> <DEDENT> dict.__setitem__(self, key, value) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if '.' not in key: <NEW_LINE> <INDENT> return dict.__getitem__(self, key) <NEW_LINE> <DEDENT> myKey, restOfKey = key.split('.', 1) <NEW_LINE> target = dict.__getitem__(self, myKey) <NEW_LINE> self._ensure_dot_dict(target, restOfKey, myKey) <NEW_LINE> return target[restOfKey] <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> if '.' not in key: <NEW_LINE> <INDENT> return dict.get(self, key, default) <NEW_LINE> <DEDENT> myKey, restOfKey = key.split('.', 1) <NEW_LINE> if myKey not in self: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> target = dict.__getitem__(self, myKey) <NEW_LINE> self._ensure_dot_dict(target, restOfKey, myKey) <NEW_LINE> return target.get(restOfKey, default) <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> if '.' not in key: <NEW_LINE> <INDENT> return dict.__contains__(self, key) <NEW_LINE> <DEDENT> myKey, restOfKey = key.split('.', 1) <NEW_LINE> target = dict.get(self, myKey) <NEW_LINE> if not isinstance(target, DotDict): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return restOfKey in target <NEW_LINE> <DEDENT> def setdefault(self, key, default): <NEW_LINE> <INDENT> if key not in self: <NEW_LINE> <INDENT> self[key] = default <NEW_LINE> <DEDENT> return self[key] <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> def __setstate__(self, d): <NEW_LINE> <INDENT> self.__dict__ = d <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> return DotDict(deepcopy(dict(self), memo)) <NEW_LINE> <DEDENT> __setattr__ = __setitem__ <NEW_LINE> __getattr__ = __getitem__ | Dictionary class supporting keys like "a.b.c" for nested dictionaries
From http://stackoverflow.com/questions/3797957
- RM added get()
- MVM added __deepcopy__()
Supports get() with a dotted key and a default, e.g.
config.get('fruit.apple.type', 'delicious')
as well as creating dotted keys when no keys in the path exist yet, e.g.
config = DotDict({})
config.fruit.apple.type = 'macoun' | 62599030d6c5a102081e31bc |
class Square: <NEW_LINE> <INDENT> pass | Empty square class
defining square | 625990301f5feb6acb163c88 |
class UnresolvableName(InferenceError): <NEW_LINE> <INDENT> pass | raised when we are unable to resolve a name | 62599030cad5886f8bdc58c7 |
class dwiextract(BaseMtrixCLI): <NEW_LINE> <INDENT> exe = 'dwiextract' <NEW_LINE> class Flags(BaseMtrixCLI.Flags): <NEW_LINE> <INDENT> bzero = '-bzero' | The dwiextract command from the mtrix package. | 6259903026238365f5fadbeb |
class FoodConfig(Config): <NEW_LINE> <INDENT> NAME = "Foods" <NEW_LINE> GPU_COUNT = 1 <NEW_LINE> IMAGES_PER_GPU = 8 <NEW_LINE> NUM_CLASSES = 1 + 40 <NEW_LINE> STEPS_PER_EPOCH = 50 <NEW_LINE> BACKBONE = 'resnet50' <NEW_LINE> RPN_ANCHOR_SCALES = (16, 32, 64, 128, 256) | Configuration for training on the cigarette butts dataset.
Derives from the base Config class and overrides values specific
to the cigarette butts dataset. | 6259903066673b3332c31489 |
class SQLAlchemyHandler(logging.Handler): <NEW_LINE> <INDENT> def emit(self, record): <NEW_LINE> <INDENT> trace = None <NEW_LINE> exc = record.__dict__['exc_info'] <NEW_LINE> if exc: <NEW_LINE> <INDENT> trace = traceback.format_exc() <NEW_LINE> <DEDENT> log = Log( logger=record.__dict__['name'], level=record.__dict__['levelname'], trace=trace, msg=record.__dict__['msg'], ) <NEW_LINE> db.session.add(log) <NEW_LINE> db.session.commit() | Logging handler for SQLAlchemy | 625990301d351010ab8f4bb0 |
class Stemmer: <NEW_LINE> <INDENT> def __init__(self, stemmer_type='snowball'): <NEW_LINE> <INDENT> self.stemmer_type = stemmer_type <NEW_LINE> if self.stemmer_type == 'porter': <NEW_LINE> <INDENT> self.stemmer = nltk.stem.PorterStemmer() <NEW_LINE> <DEDENT> elif self.stemmer_type == 'snowball': <NEW_LINE> <INDENT> self.stemmer = nltk.stem.SnowballStemmer('english') <NEW_LINE> <DEDENT> <DEDENT> def transform(self, text): <NEW_LINE> <INDENT> tokens = [self.stemmer.stem(token) for token in text.split(" ")] <NEW_LINE> return ' '.join(tokens) | Convert uppercase to lowercase
Delete common endings of words
Can't delete white space and
(new line character)
Can't deal with Lemmatization. Ex. working => work
In generally, snowball is better than porter. So snowball method will be default.
| 6259903021bff66bcd723cfe |
class Log(object): <NEW_LINE> <INDENT> ON=True <NEW_LINE> FLUSH=True <NEW_LINE> def __init__(self,dir=None,name=None): <NEW_LINE> <INDENT> self.dir=dir <NEW_LINE> self.name=name <NEW_LINE> self.fout=None <NEW_LINE> <DEDENT> def reset(self,name): <NEW_LINE> <INDENT> self.close() <NEW_LINE> self.name=name <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> if self.fout is None: <NEW_LINE> <INDENT> if self.name is None: <NEW_LINE> <INDENT> self.fout=sys.stdout <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fout=open('%s/%s'%(self.dir,self.name),'a+') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.fout not in (None,sys.stdout): <NEW_LINE> <INDENT> self.fout.close() <NEW_LINE> <DEDENT> self.fout=None <NEW_LINE> <DEDENT> def __lshift__(self,info): <NEW_LINE> <INDENT> if Log.ON: <NEW_LINE> <INDENT> self.open() <NEW_LINE> self.fout.write(str(info)) <NEW_LINE> if Log.FLUSH: self.fout.flush() <NEW_LINE> <DEDENT> return self | The log for code executing.
Attributes
----------
dir : str
The directory where to store the log file.
name : str
The name of the log file.
fout : file
The log file.
Notes
-----
When the log file is the stdout, the attribute `name` is set to be None. | 62599030a4f1c619b294f68f |
class check_command(special_both): <NEW_LINE> <INDENT> def __init__(self, special_base): <NEW_LINE> <INDENT> super(check_command, self).__init__(special_base, 'check_command') <NEW_LINE> <DEDENT> def lookup(self, bindings, pat_context, patterns): <NEW_LINE> <INDENT> if len(patterns) < 1: return knowledge_base.Gen_empty <NEW_LINE> retcode, out, err = run_cmd(pat_context, patterns[0], patterns[1] if len(patterns) > 1 else None, patterns[2] if len(patterns) > 2 else None) <NEW_LINE> if retcode: return knowledge_base.Gen_empty <NEW_LINE> return knowledge_base.Gen_once | >>> from pyke import pattern, contexts
>>> class stub(object):
... def add_fn(self, fn): pass
>>> cc = check_command(stub())
>>> ctxt = contexts.simple_context()
>>> mgr = cc.lookup(ctxt, ctxt, (pattern.pattern_literal(('true',)),))
>>> gen = iter(mgr.__enter__())
>>> next(gen)
>>> ctxt.dump()
>>> next(gen)
Traceback (most recent call last):
...
StopIteration
>>> ctxt.dump()
>>> mgr.__exit__(None, None, None)
>>> mgr = cc.lookup(ctxt, ctxt, (pattern.pattern_literal(('false',)),))
>>> gen = iter(mgr.__enter__())
>>> next(gen)
Traceback (most recent call last):
...
StopIteration
>>> ctxt.dump()
>>> mgr.__exit__(None, None, None)
>>> mgr = cc.prove(ctxt, ctxt, (pattern.pattern_literal(('true',)),))
>>> gen = iter(mgr.__enter__())
>>> next(gen)
>>> ctxt.dump()
>>> next(gen)
Traceback (most recent call last):
...
StopIteration
>>> ctxt.dump()
>>> mgr.__exit__(None, None, None) | 62599030ac7a0e7691f73581 |
class ResourceIdInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ClusterId = None <NEW_LINE> self.InstanceIdSet = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ClusterId = params.get("ClusterId") <NEW_LINE> self.InstanceIdSet = params.get("InstanceIdSet") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | 资源ID信息,包括ClusterID和InstanceID
| 6259903015baa72349463032 |
class PostAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('title','created','publish') <NEW_LINE> list_filter = ('created',) <NEW_LINE> search_fields = ('title',) <NEW_LINE> prepopulated_fields = {'slug': ('title',)} | Admin View for Post | 625990306e29344779b016e9 |
class TextSessionStateExporter(SessionStateExporterBase): <NEW_LINE> <INDENT> def __init__(self, option_list=None, color=None, exporter_unit=None): <NEW_LINE> <INDENT> super().__init__(option_list, exporter_unit=exporter_unit) <NEW_LINE> self.C = Colorizer(color) <NEW_LINE> <DEDENT> def get_session_data_subset(self, session_manager): <NEW_LINE> <INDENT> return session_manager.state <NEW_LINE> <DEDENT> def dump(self, session, stream): <NEW_LINE> <INDENT> for job in session.run_list: <NEW_LINE> <INDENT> state = session.job_state_map[job.id] <NEW_LINE> if state.result.is_hollow: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if self.C.is_enabled: <NEW_LINE> <INDENT> stream.write( " {}: {}\n".format( self.C.custom( outcome_meta(state.result.outcome).unicode_sigil, outcome_meta(state.result.outcome).color_ansi ), state.job.tr_summary(), ).encode("UTF-8")) <NEW_LINE> if len(state.result_history) > 1: <NEW_LINE> <INDENT> stream.write(_(" history: {0}\n").format( ', '.join( self.C.custom( result.outcome_meta().tr_outcome, result.outcome_meta().color_ansi) for result in state.result_history) ).encode("UTF-8")) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> stream.write( "{:^15}: {}\n".format( state.result.tr_outcome(), state.job.tr_summary(), ).encode("UTF-8")) <NEW_LINE> if state.result_history: <NEW_LINE> <INDENT> print(_("History:"), ', '.join( self.C.custom( result.outcome_meta().unicode_sigil, result.outcome_meta().color_ansi) for result in state.result_history).encode("UTF-8")) | Human-readable session state exporter. | 6259903091af0d3eaad3aec6 |
class Softmax(function_node.FunctionNode): <NEW_LINE> <INDENT> def __init__(self, axis=1): <NEW_LINE> <INDENT> self.axis = axis <NEW_LINE> <DEDENT> def check_type_forward(self, in_types): <NEW_LINE> <INDENT> type_check._argname(in_types, ('x',)) <NEW_LINE> x_type, = in_types <NEW_LINE> type_check.expect( x_type.dtype.kind == 'f', -x_type.ndim <= self.axis < x_type.ndim, ) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> xp = backend.get_array_module(*x) <NEW_LINE> if xp is cuda.cupy and chainer.should_use_cudnn('>=auto'): <NEW_LINE> <INDENT> y = cudnn.softmax_forward(x[0], self.axis, _algorithm) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> y = x[0] - x[0].max(axis=self.axis, keepdims=True) <NEW_LINE> xp.exp(y, out=y) <NEW_LINE> y /= y.sum(axis=self.axis, keepdims=True) <NEW_LINE> <DEDENT> self.retain_outputs((0,)) <NEW_LINE> return y, <NEW_LINE> <DEDENT> def backward(self, indexes, grad_outputs): <NEW_LINE> <INDENT> y = self.get_retained_outputs()[0] <NEW_LINE> gy, = grad_outputs <NEW_LINE> return _SoftmaxGrad(self.axis).apply((y, gy)) | Softmax activation function. | 6259903073bcbd0ca4bcb32b |
class EDTestCasePluginExecuteExecAimlessLog2Csvv1_0(EDTestCasePluginExecute): <NEW_LINE> <INDENT> def __init__(self, _strTestName=None): <NEW_LINE> <INDENT> EDTestCasePluginExecute.__init__(self, "EDPluginExecAimlessLog2Csvv1_0") <NEW_LINE> self.setDataInputFile(os.path.join(self.getPluginTestsDataHome(), "XSDataInputAimlessLog2Csv_reference.xml")) <NEW_LINE> <DEDENT> def testExecute(self): <NEW_LINE> <INDENT> self.run() <NEW_LINE> edPlugin = self.getPlugin() <NEW_LINE> csvPath = edPlugin.dataOutput.csvPath.path.value <NEW_LINE> EDAssert.equal(True, os.path.exists(csvPath), "Path to csv file exists") <NEW_LINE> referenceCsvFile = os.path.join(self.getPluginTestsDataHome(), "aimless.csv") <NEW_LINE> with open(referenceCsvFile) as f1: <NEW_LINE> <INDENT> referenceOutput = f1.read() <NEW_LINE> <DEDENT> with open(csvPath) as f2: <NEW_LINE> <INDENT> output = f2.read() <NEW_LINE> <DEDENT> EDAssert.equal(referenceOutput, output, "Content is equal to reference") <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> self.addTestMethod(self.testExecute) | Those are all execution tests for the EDNA Exec plugin dimple | 625990306fece00bbaccca49 |
class TestNodeStateSmartfail(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testNodeStateSmartfail(self): <NEW_LINE> <INDENT> pass | NodeStateSmartfail unit test stubs | 6259903030c21e258be998a7 |
class Unit: <NEW_LINE> <INDENT> def __init__(self, unit_name, unit): <NEW_LINE> <INDENT> self.unit_name = unit_name <NEW_LINE> self.unit = unit <NEW_LINE> <DEDENT> @property <NEW_LINE> def agent_state(self): <NEW_LINE> <INDENT> return self.unit.get('AgentState', 'unknown') <NEW_LINE> <DEDENT> @property <NEW_LINE> def workload_state(self): <NEW_LINE> <INDENT> return self.unit.get('Workload', {}).get('Status', '') <NEW_LINE> <DEDENT> @property <NEW_LINE> def extended_agent_state(self): <NEW_LINE> <INDENT> return self.unit.get('UnitAgent', {}).get('Status', '') <NEW_LINE> <DEDENT> @property <NEW_LINE> def workload_info(self): <NEW_LINE> <INDENT> return self.unit.get('Workload', {}).get('Info', '') <NEW_LINE> <DEDENT> @property <NEW_LINE> def machine_id(self): <NEW_LINE> <INDENT> return self.unit.get('Machine', '-1') <NEW_LINE> <DEDENT> @property <NEW_LINE> def public_address(self): <NEW_LINE> <INDENT> return self.unit.get('PublicAddress', None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def agent_state_info(self): <NEW_LINE> <INDENT> return self.unit.get('AgentStateInfo', None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_compute(self): <NEW_LINE> <INDENT> return 'nova-compute' in self.unit_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_horizon(self): <NEW_LINE> <INDENT> return 'openstack-dashboard' in self.unit_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_jujugui(self): <NEW_LINE> <INDENT> return 'juju-gui' in self.unit_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_cloud_controller(self): <NEW_LINE> <INDENT> return 'nova-cloud-controller' in self.unit_name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Unit: {name}, Machine: {machine}, State: {state}>".format( name=self.unit_name, machine=self.machine_id, state=self.agent_state) | Unit class | 62599030d4950a0f3b11168b |
class WebSocket(object): <NEW_LINE> <INDENT> def __init__(self, handler): <NEW_LINE> <INDENT> self.handler = handler <NEW_LINE> self.tornado_handler = None <NEW_LINE> <DEDENT> async def __call__(self, environ): <NEW_LINE> <INDENT> self.tornado_handler = environ['tornado.handler'] <NEW_LINE> self.environ = environ <NEW_LINE> await self.handler(self) <NEW_LINE> <DEDENT> async def close(self): <NEW_LINE> <INDENT> self.tornado_handler.close() <NEW_LINE> <DEDENT> async def send(self, message): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.tornado_handler.write_message( message, binary=isinstance(message, bytes)) <NEW_LINE> <DEDENT> except tornado.websocket.WebSocketClosedError: <NEW_LINE> <INDENT> raise exceptions.EngineIOError() <NEW_LINE> <DEDENT> <DEDENT> async def wait(self): <NEW_LINE> <INDENT> msg = await self.tornado_handler.get_next_message() <NEW_LINE> if not isinstance(msg, six.binary_type) and not isinstance(msg, six.text_type): <NEW_LINE> <INDENT> raise IOError() <NEW_LINE> <DEDENT> return msg | This wrapper class provides a tornado WebSocket interface that is
somewhat compatible with eventlet's implementation. | 625990304e696a045264e66f |
class PneumoniaClassify(Dataset): <NEW_LINE> <INDENT> def __init__(self, csv_file, root_dir, transform=None): <NEW_LINE> <INDENT> self.label = pd.read_csv(csv_file) <NEW_LINE> self.root_dir = root_dir <NEW_LINE> self.transform = transform <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.label) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> pID = self.label.loc[idx, 'patientId'] <NEW_LINE> dicom = pydicom.dcmread(self.root_dir+pID+'.dcm') <NEW_LINE> img = dicom.pixel_array <NEW_LINE> color_img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB) <NEW_LINE> PIL_img = Image.fromarray(color_img) <NEW_LINE> label = self.label.loc[idx, 'Target'] <NEW_LINE> sample = {'image': PIL_img, 'label': label} <NEW_LINE> if self.transform: <NEW_LINE> <INDENT> sample = self.transform(sample) <NEW_LINE> <DEDENT> return sample | Pneumonia Detection & Localization dataset. | 62599030ec188e330fdf992f |
class CustomScheduler(object): <NEW_LINE> <INDENT> def __init__(self, max_computes=1): <NEW_LINE> <INDENT> self.max_computes = max_computes <NEW_LINE> self.total_computes = 0 <NEW_LINE> <DEDENT> def __call__(self, dsk, keys, **kwargs): <NEW_LINE> <INDENT> import dask <NEW_LINE> self.total_computes += 1 <NEW_LINE> if self.total_computes > self.max_computes: <NEW_LINE> <INDENT> raise RuntimeError("Too many dask computations were scheduled: {}".format(self.total_computes)) <NEW_LINE> <DEDENT> return dask.get(dsk, keys, **kwargs) | Custom dask scheduler that raises an exception if dask is computed too many times. | 6259903015baa72349463034 |
class LLR_slow(ParametrizedTransformation): <NEW_LINE> <INDENT> def _get_par(self, datao): <NEW_LINE> <INDENT> gaussiane = [ estimate_gaussian_per_class(datao, at, common_if_extreme=True) for at in range(len(datao.domain.attributes)) ] <NEW_LINE> normalizec = [] <NEW_LINE> for i,g in zip(range(len(datao.domain.attributes)), gaussiane): <NEW_LINE> <INDENT> r = [ _llrlogratio(ex[i].value, *g) for ex in datao ] <NEW_LINE> normalizec.append((mean(r), std(r))) <NEW_LINE> <DEDENT> return gaussiane, normalizec <NEW_LINE> <DEDENT> def _use_par(self, arr, constructt): <NEW_LINE> <INDENT> gaussiane, normalizec = constructt <NEW_LINE> arr = [ arr[i].value for i in range(len(arr.domain.attributes)) ] <NEW_LINE> return sum ( (_llrlogratio(v, *g)-m)/s for v,g,n in zip(arr, gaussiane, normalizec)) | Slow and rough implementation of LLR (testing correctness). | 62599030d164cc617582200e |
class SomeModel(model.Model): <NEW_LINE> <INDENT> string = basestring, {'repeated': True} <NEW_LINE> integer = int | Some sample model | 62599030d99f1b3c44d0673f |
class KadetTestObjExtendedNewKwargs(KadetTestObj): <NEW_LINE> <INDENT> def new(self): <NEW_LINE> <INDENT> super().new_with(name="test-with-new", size=12) | KadetTestObjExtendedNewKwargs. | 6259903050485f2cf55dc019 |
class Config: <NEW_LINE> <INDENT> config_file = None <NEW_LINE> pattern = {} <NEW_LINE> class Container: <NEW_LINE> <INDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if self.__dict__.has_key(key): <NEW_LINE> <INDENT> return self.__dict__[key] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, config_file, pattern): <NEW_LINE> <INDENT> if not os.path.isfile(config_file): <NEW_LINE> <INDENT> raise os.error("Config %s does not exist!" % config_file) <NEW_LINE> <DEDENT> self.config_file = config_file <NEW_LINE> self.pattern = pattern <NEW_LINE> self.__read_config() <NEW_LINE> <DEDENT> def __read_config(self): <NEW_LINE> <INDENT> conf_parse = SafeConfigParser() <NEW_LINE> for section, options in self.pattern.iteritems(): <NEW_LINE> <INDENT> conf_parse.add_section(section) <NEW_LINE> for option, value in options.iteritems(): <NEW_LINE> <INDENT> conf_parse.set(section, option, value) <NEW_LINE> <DEDENT> <DEDENT> conf_parse.read(self.config_file) <NEW_LINE> for k, v in self.pattern.items(): <NEW_LINE> <INDENT> sect = k.lower() <NEW_LINE> cont = self.Container() <NEW_LINE> for option in v: <NEW_LINE> <INDENT> cont.__dict__[option] = conf_parse.get(k, option) <NEW_LINE> <DEDENT> self.__dict__[sect] = cont | Config class for EleTools CLI Utilities
This class essentially parses the passed configuration file into section
and option values. For every section within the config file, all options
in that section are turned into object attributes. So for example, a file
with this content:
[Local]
db_host
db_port
Would be turned into these values:
* config.local.db_host
* config.local.db_port
It also defines dictionary methods so the attributes are convenient
in text substitution. The same config file would allow this usage:
print "Host: %(db_host)" % config.local | 6259903026238365f5fadbef |
class UCCLogin(BasePage): <NEW_LINE> <INDENT> def match_page_tilte(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.driver.title == "Aviatrix Cloud Controller" <NEW_LINE> <DEDENT> except NoSuchElementException: <NEW_LINE> <INDENT> self.logger.exception("Could not connect to UCC Web Console") <NEW_LINE> <DEDENT> <DEDENT> def is_login_form_present(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> WebDriverWait(self.driver, 10).until( EC.presence_of_element_located( (UCCSignInLocators.LOGIN_FORM)) ) <NEW_LINE> login_form = self.driver.find_element(*UCCSignInLocators.LOGIN_FORM) <NEW_LINE> return 'password?' in login_form.text <NEW_LINE> <DEDENT> except NoSuchElementException: <NEW_LINE> <INDENT> self.logger.exception("Could not find login form") <NEW_LINE> <DEDENT> <DEDENT> def login(self,uemail,passwd): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> WebDriverWait(self.driver, 20).until( EC.presence_of_element_located( (UCCSignInLocators.USERNAME)) ) <NEW_LINE> username = self.driver.find_element(*UCCSignInLocators.USERNAME) <NEW_LINE> password = self.driver.find_element(*UCCSignInLocators.PASSWORD) <NEW_LINE> sign_in_button = self.driver.find_element(*UCCSignInLocators.SIGN_IN_BUTTON) <NEW_LINE> self.logger.debug("User name is %s",uemail) <NEW_LINE> username.send_keys(uemail) <NEW_LINE> self.logger.debug("Password is %s",passwd) <NEW_LINE> password.send_keys(passwd) <NEW_LINE> sign_in_button.submit() <NEW_LINE> <DEDENT> except (WebDriverException,NoSuchElementException): <NEW_LINE> <INDENT> self.logger.exception("Could not sign in UCC successfully") <NEW_LINE> <DEDENT> <DEDENT> def check_for_controller(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> page_url = self.driver.current_url <NEW_LINE> if "/dashboard" not in page_url: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> except NoSuchElementException: <NEW_LINE> <INDENT> self.logger.exception("Login failed") <NEW_LINE> return False | Class method provided by Sam | 6259903023e79379d538d5a6 |
class TextFile(models.Model): <NEW_LINE> <INDENT> base_file = models.ForeignKey(basefile_model.BaseFile, null=True, on_delete=models.CASCADE, blank=True) <NEW_LINE> date_created = models.DateTimeField(auto_now_add=True, null=True) <NEW_LINE> tesseract_text = models.TextField(blank=True, null=True) <NEW_LINE> google_vision_text = models.TextField(blank=True, null=True) <NEW_LINE> date_updated = models.DateTimeField(auto_now=True, null=True) <NEW_LINE> tesseract_response = JSONField(null=True) <NEW_LINE> google_vision_response = JSONField(null=True) <NEW_LINE> google_vision_document_response = JSONField(null=True) <NEW_LINE> google_translate_text = models.TextField(blank=True, null=True) <NEW_LINE> google_translate_text_response = JSONField(null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s' % (self.id) | The Text File class is a model that defines which data is available in the Text File table so a database can be created from it.
Method List:
-none
Argument List:
-models.Model: This argument allows the class to access field types.
Author: Zoe LeBlanc | 625990308a43f66fc4bf3223 |
class ArchiveUrlFetchHandler(BaseFetchHandler): <NEW_LINE> <INDENT> def can_handle(self, source): <NEW_LINE> <INDENT> url_parts = self.parse_url(source) <NEW_LINE> if url_parts.scheme not in ('http', 'https', 'ftp', 'file'): <NEW_LINE> <INDENT> return "Wrong source type" <NEW_LINE> <DEDENT> if get_archive_handler(self.base_url(source)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def download(self, source, dest): <NEW_LINE> <INDENT> proto, netloc, path, params, query, fragment = urlparse(source) <NEW_LINE> if proto in ('http', 'https'): <NEW_LINE> <INDENT> auth, barehost = splituser(netloc) <NEW_LINE> if auth is not None: <NEW_LINE> <INDENT> source = urlunparse((proto, barehost, path, params, query, fragment)) <NEW_LINE> username, password = splitpasswd(auth) <NEW_LINE> passman = HTTPPasswordMgrWithDefaultRealm() <NEW_LINE> passman.add_password(None, source, username, password) <NEW_LINE> authhandler = HTTPBasicAuthHandler(passman) <NEW_LINE> opener = build_opener(authhandler) <NEW_LINE> install_opener(opener) <NEW_LINE> <DEDENT> <DEDENT> response = urlopen(source) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(dest, 'wb') as dest_file: <NEW_LINE> <INDENT> dest_file.write(response.read()) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if os.path.isfile(dest): <NEW_LINE> <INDENT> os.unlink(dest) <NEW_LINE> <DEDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> def download_and_validate(self, url, hashsum, validate="sha1"): <NEW_LINE> <INDENT> tempfile, headers = urlretrieve(url) <NEW_LINE> check_hash(tempfile, hashsum, validate) <NEW_LINE> return tempfile <NEW_LINE> <DEDENT> def install(self, source, dest=None, checksum=None, hash_type='sha1'): <NEW_LINE> <INDENT> url_parts = self.parse_url(source) <NEW_LINE> dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') <NEW_LINE> if not os.path.exists(dest_dir): <NEW_LINE> <INDENT> mkdir(dest_dir, perms=0o755) <NEW_LINE> <DEDENT> dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) <NEW_LINE> try: <NEW_LINE> <INDENT> self.download(source, dld_file) <NEW_LINE> <DEDENT> except URLError as e: <NEW_LINE> <INDENT> raise UnhandledSource(e.reason) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> raise UnhandledSource(e.strerror) <NEW_LINE> <DEDENT> options = parse_qs(url_parts.fragment) <NEW_LINE> for key, value in options.items(): <NEW_LINE> <INDENT> algorithms = hashlib.algorithms_available <NEW_LINE> if key in algorithms: <NEW_LINE> <INDENT> if len(value) != 1: <NEW_LINE> <INDENT> raise TypeError( "Expected 1 hash value, not %d" % len(value)) <NEW_LINE> <DEDENT> expected = value[0] <NEW_LINE> check_hash(dld_file, expected, key) <NEW_LINE> <DEDENT> <DEDENT> if checksum: <NEW_LINE> <INDENT> check_hash(dld_file, checksum, hash_type) <NEW_LINE> <DEDENT> return extract(dld_file, dest) | Handler to download archive files from arbitrary URLs.
Can fetch from http, https, ftp, and file URLs.
Can install either tarballs (.tar, .tgz, .tbz2, etc) or zip files.
Installs the contents of the archive in $CHARM_DIR/fetched/. | 625990300a366e3fb87dda84 |
class TestCaseBase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestCaseBase, self).setUp() <NEW_LINE> self.client = LocalizingClient() <NEW_LINE> ke_flag, created = Flag.objects.get_or_create(name='kumaediting') <NEW_LINE> ke_flag.everyone = True <NEW_LINE> ke_flag.save() <NEW_LINE> self.kumaediting_flag = ke_flag <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.kumaediting_flag.delete() | Base TestCase for the wiki app test cases. | 62599030d4950a0f3b11168c |
class SidewalkAccountInfo(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "AppServerPrivateKey": (str, True), } | `SidewalkAccountInfo <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotwireless-partneraccount-sidewalkaccountinfo.html>`__ | 6259903073bcbd0ca4bcb330 |
class SiteScanData(models.Model): <NEW_LINE> <INDENT> sitescan = models.OneToOneField(SiteScan, null=True) <NEW_LINE> num_rules = models.IntegerField() <NEW_LINE> num_properties = models.IntegerField() <NEW_LINE> scanned_pages = models.IntegerField() <NEW_LINE> css_issues = models.IntegerField() <NEW_LINE> ua_issues = models.BooleanField(default=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"'{0}' has ({1}) css issues and ({2}) ua issues".format( self.sitescan, self.css_issues, self.ua_issues) | Aggregate data model for site scans | 6259903096565a6dacd2d7dd |
class Star: <NEW_LINE> <INDENT> def __init__(self, x=0.0, y=0.0, name="untitled", flux=-1.0, props={}, fwhm=-1.0, elon=-1.0): <NEW_LINE> <INDENT> self.x = float(x) <NEW_LINE> self.y = float(y) <NEW_LINE> self.name = str(name) <NEW_LINE> self.flux = float(flux) <NEW_LINE> self.props = props <NEW_LINE> self.fwhm = float(fwhm) <NEW_LINE> self.elon = float(elon) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return copy.deepcopy(self) <NEW_LINE> <DEDENT> def __getitem__(self, key) : <NEW_LINE> <INDENT> if key == 'flux': <NEW_LINE> <INDENT> return self.flux <NEW_LINE> <DEDENT> if key == 'fwhm': <NEW_LINE> <INDENT> return self.fwhm <NEW_LINE> <DEDENT> if key == 'elon': <NEW_LINE> <INDENT> return self.elon <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%10s : (%8.2f,%8.2f) | %12.2f | %5.2f %5.2f" % (self.name, self.x, self.y, self.flux, self.fwhm, self.elon) <NEW_LINE> <DEDENT> def coords(self, full=False): <NEW_LINE> <INDENT> if full: <NEW_LINE> <INDENT> return np.array([self.x, self.y, self.flux, self.fwhm, self.elon]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return np.array([self.x, self.y]) <NEW_LINE> <DEDENT> <DEDENT> def distance(self, otherstar): <NEW_LINE> <INDENT> return math.sqrt(np.sum((self.coords() - otherstar.coords())**2)) <NEW_LINE> <DEDENT> def trigangle(self, otherstar): <NEW_LINE> <INDENT> return math.atan2(otherstar.y - self.y, otherstar.x - self.x) * (180.0/math.pi) % 360.0 <NEW_LINE> <DEDENT> def distanceandsort(self, otherstarlist): <NEW_LINE> <INDENT> import operator <NEW_LINE> returnlist=[] <NEW_LINE> for i, star in enumerate(otherstarlist): <NEW_LINE> <INDENT> dist = self.distance(star) <NEW_LINE> returnlist.append({'star':star, 'dist':dist, 'origpos':i}) <NEW_LINE> <DEDENT> returnlist = sorted(returnlist, key=operator.itemgetter('dist')) <NEW_LINE> return returnlist | Simple class to represent a single source (usually stars, but not necessarily).
In this module we often manipulate lists of such Star objects. | 625990308e05c05ec3f6f6aa |
class RegularizedRegression(LinearRegression): <NEW_LINE> <INDENT> def __init__(self, learning_rate=1e-3, n_epoch=2000, alpha=0.05, r=0.5): <NEW_LINE> <INDENT> self.alpha = alpha <NEW_LINE> self.r = r <NEW_LINE> super(RegularizedRegression, self).__init__(learning_rate, n_epoch) <NEW_LINE> <DEDENT> def _training_method(self, X, y): <NEW_LINE> <INDENT> for epoch in range(self.n_epoch): <NEW_LINE> <INDENT> gradient_vector = 2 / X.shape[0] * X.T.dot(X.dot(self.theta) - y) + self.alpha * self._regularized_item() <NEW_LINE> self.theta = self.theta - self.learning_rate * gradient_vector <NEW_LINE> print('epoch: {}. loss (mse): {}'.format(epoch, self._loss(X.dot(self.theta), y))) <NEW_LINE> <DEDENT> <DEDENT> def _regularized_item(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _loss(pred, y): <NEW_LINE> <INDENT> return np.sum((pred - y) ** 2) / len(y) | Ridge(l2-norm) Lasso(l1-norm) Regularization | 6259903026238365f5fadbf1 |
class RecordCache(MutableMapping): <NEW_LINE> <INDENT> def __init__(self, record): <NEW_LINE> <INDENT> assert len(record) == 1, "Unexpected RecordCache(%s)" % record <NEW_LINE> self._record = record <NEW_LINE> <DEDENT> def __contains__(self, name): <NEW_LINE> <INDENT> field = self._record._fields[name] <NEW_LINE> return self._record.env.cache.contains(self._record, field) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> field = self._record._fields[name] <NEW_LINE> return self._record.env.cache.get(self._record, field) <NEW_LINE> <DEDENT> def __setitem__(self, name, value): <NEW_LINE> <INDENT> field = self._record._fields[name] <NEW_LINE> self._record.env.cache.set(self._record, field, value) <NEW_LINE> <DEDENT> def __delitem__(self, name): <NEW_LINE> <INDENT> field = self._record._fields[name] <NEW_LINE> self._record.env.cache.remove(self._record, field) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for field in self._record.env.cache.get_fields(self._record): <NEW_LINE> <INDENT> yield field.name <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return sum(1 for name in self) <NEW_LINE> <DEDENT> def has_value(self, name): <NEW_LINE> <INDENT> field = self._record._fields[name] <NEW_LINE> return self._record.env.cache.contains_value(self._record, field) <NEW_LINE> <DEDENT> def get_value(self, name, default=None): <NEW_LINE> <INDENT> field = self._record._fields[name] <NEW_LINE> return self._record.env.cache.get_value(self._record, field, default) <NEW_LINE> <DEDENT> def set_special(self, name, getter): <NEW_LINE> <INDENT> field = self._record._fields[name] <NEW_LINE> self._record.env.cache.set_special(self._record, field, getter) <NEW_LINE> <DEDENT> def set_failed(self, names, exception): <NEW_LINE> <INDENT> fields = [self._record._fields[name] for name in names] <NEW_LINE> self._record.env.cache.set_failed(self._record, fields, exception) | A mapping from field names to values, to read and update the cache of a record. | 625990306fece00bbaccca4e |
class AmcrestSensor(Entity): <NEW_LINE> <INDENT> def __init__(self, name, camera, sensor_type): <NEW_LINE> <INDENT> self._attrs = {} <NEW_LINE> self._camera = camera <NEW_LINE> self._sensor_type = sensor_type <NEW_LINE> self._name = '{0}_{1}'.format(name, SENSORS.get(self._sensor_type)[0]) <NEW_LINE> self._icon = 'mdi:{}'.format(SENSORS.get(self._sensor_type)[2]) <NEW_LINE> self._state = STATE_UNKNOWN <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> return self._attrs <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return self._icon <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return SENSORS.get(self._sensor_type)[1] <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> _LOGGER.debug("Pulling data from %s sensor.", self._name) <NEW_LINE> try: <NEW_LINE> <INDENT> version, build_date = self._camera.software_information <NEW_LINE> self._attrs['Build Date'] = build_date.split('=')[-1] <NEW_LINE> self._attrs['Version'] = version.split('=')[-1] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self._attrs['Build Date'] = 'Not Available' <NEW_LINE> self._attrs['Version'] = 'Not Available' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._attrs['Serial Number'] = self._camera.serial_number <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self._attrs['Serial Number'] = 'Not Available' <NEW_LINE> <DEDENT> if self._sensor_type == 'motion_detector': <NEW_LINE> <INDENT> self._state = self._camera.is_motion_detected <NEW_LINE> self._attrs['Record Mode'] = self._camera.record_mode <NEW_LINE> <DEDENT> elif self._sensor_type == 'ptz_preset': <NEW_LINE> <INDENT> self._state = self._camera.ptz_presets_count <NEW_LINE> <DEDENT> elif self._sensor_type == 'sdcard': <NEW_LINE> <INDENT> sd_used = self._camera.storage_used <NEW_LINE> sd_total = self._camera.storage_total <NEW_LINE> self._attrs['Total'] = '{0} {1}'.format(*sd_total) <NEW_LINE> self._attrs['Used'] = '{0} {1}'.format(*sd_used) <NEW_LINE> self._state = self._camera.storage_used_percent | A sensor implementation for Amcrest IP camera. | 625990301d351010ab8f4bb6 |
class Toy_Extension(ExtensionEasyBlock): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> super(Toy_Extension, self).run(unpack_src=True) <NEW_LINE> EB_toy.configure_step(self.master, name=self.name) <NEW_LINE> EB_toy.build_step(self.master, name=self.name) <NEW_LINE> EB_toy.install_step(self.master, name=self.name) | Support for building/installing toy. | 625990300a366e3fb87dda86 |
class BatchPredict(Resource): <NEW_LINE> <INDENT> def get(self, username, model_name): <NEW_LINE> <INDENT> parser = reqparse.RequestParser() <NEW_LINE> parser.add_argument('file', type=str, help='Filepath to data') <NEW_LINE> parser.add_argument('skipheader', type=bool, help='Should header be skipped?') <NEW_LINE> parser.add_argument('delimiter', type=str, help='What delimiter is used in data?') <NEW_LINE> args = parser.parse_args() <NEW_LINE> file = args['file'] <NEW_LINE> skipheader = args['skipheader'] <NEW_LINE> delimiter = args['delimiter'] <NEW_LINE> model = session.query(Model).join(User). filter(User.username==username).first() <NEW_LINE> model = pickle.loads(model.model) <NEW_LINE> try: <NEW_LINE> <INDENT> data = read_data(file, skipheader, delimiter) <NEW_LINE> <DEDENT> except OSError as ose: <NEW_LINE> <INDENT> abort(404, {'message': str(ose)}) <NEW_LINE> <DEDENT> predictions = model.predict(data).tolist() <NEW_LINE> predictions = json.dumps(predictions) <NEW_LINE> return {'model': model.get_params(), 'file': file, 'predictions': predictions} | Resource for batch prediction | 6259903021bff66bcd723d04 |
class TargetDetailIs(ClientSideCriterion): <NEW_LINE> <INDENT> def __init__(self, dataFlavor, value): <NEW_LINE> <INDENT> self._propertyName = dataFlavor <NEW_LINE> self._value = value <NEW_LINE> <DEDENT> def paintContent(self, target): <NEW_LINE> <INDENT> super(TargetDetailIs, self).paintContent(target) <NEW_LINE> target.addAttribute('p', self._propertyName) <NEW_LINE> if isinstance(self._value, bool): <NEW_LINE> <INDENT> target.addAttribute('v', self._value.booleanValue()) <NEW_LINE> target.addAttribute('t', 'b') <NEW_LINE> <DEDENT> elif isinstance(self._value, str): <NEW_LINE> <INDENT> target.addAttribute('v', self._value) <NEW_LINE> <DEDENT> <DEDENT> def accept(self, dragEvent): <NEW_LINE> <INDENT> data = dragEvent.getTargetDetails().getData(self._propertyName) <NEW_LINE> return self._value == data <NEW_LINE> <DEDENT> def getIdentifier(self): <NEW_LINE> <INDENT> return 'com.vaadin.event.dd.acceptcriteria.TargetDetailIs' | Criterion for checking if drop target details contains the specific
property with the specific value. Currently only String values are
supported.
TODO: add support for other basic data types that we support in UIDL. | 6259903056b00c62f0fb3960 |
class AverageSentenceLengthExtractor(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def average_sentence_length(self, text): <NEW_LINE> <INDENT> return len(text.split(' '))/len(text.split('.')) <NEW_LINE> <DEDENT> def transform(self, X, y=None): <NEW_LINE> <INDENT> return [ [self.average_sentence_length(x)] for x in X] <NEW_LINE> <DEDENT> def fit(self, df, y=None): <NEW_LINE> <INDENT> return self | Takes in dataframe, extracts road name column, outputs average word length | 62599030e76e3b2f99fd9aab |
class IConstraint(EvaluatedElement): <NEW_LINE> <INDENT> def __init__(self, name, expr=None, scale=1.0, func=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(name, expr=expr, scale=scale, func=func) <NEW_LINE> self._value = None <NEW_LINE> self.value = math.inf <NEW_LINE> self._lb = -math.inf <NEW_LINE> self.lb = -math.inf <NEW_LINE> self._ub = 0.0 <NEW_LINE> self.ub = 0.0 <NEW_LINE> for key in kwargs: <NEW_LINE> <INDENT> if key.lower() == 'lb': <NEW_LINE> <INDENT> self.lb = kwargs[key] <NEW_LINE> <DEDENT> elif key.lower() == 'ub': <NEW_LINE> <INDENT> self.ub = kwargs[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unknown keyword argument!") <NEW_LINE> <DEDENT> <DEDENT> if len(kwargs) > 1: <NEW_LINE> <INDENT> info = "Constraint '{}': 'lb' is ignored since 'ub' is specified!" .format(self.name) <NEW_LINE> logger.warning(info) <NEW_LINE> opt_logger.warning(info) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def ub(self): <NEW_LINE> <INDENT> return self._ub <NEW_LINE> <DEDENT> @ub.setter <NEW_LINE> def ub(self, value): <NEW_LINE> <INDENT> self._ub = value <NEW_LINE> self._lb = -math.inf <NEW_LINE> self._value = math.inf <NEW_LINE> <DEDENT> @property <NEW_LINE> def lb(self): <NEW_LINE> <INDENT> return self._lb <NEW_LINE> <DEDENT> @lb.setter <NEW_LINE> def lb(self, value): <NEW_LINE> <INDENT> self._lb = value <NEW_LINE> self._ub = math.inf <NEW_LINE> self._value = -math.inf <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> if self.ub == math.inf: <NEW_LINE> <INDENT> return self._lb - self._value <NEW_LINE> <DEDENT> if self.lb == -math.inf: <NEW_LINE> <INDENT> return self._value - self._ub <NEW_LINE> <DEDENT> raise ValueError("Wrong boundary values in IConstraint!") <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, v): <NEW_LINE> <INDENT> self._value = v <NEW_LINE> <DEDENT> def list_item(self): <NEW_LINE> <INDENT> return '{:12} {:^12.4e} {:^12.4e} {:^12.4e}\n'.format( self.name[:12], self._value, self.lb, self.ub) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{:12} {:^12} {:^12} {:^12}\n'.format( 'Name', 'Value', 'Lower Bound', 'Upper Bound') + self.list_item() | Inequality constraint class. Inherited from EvaluatedElement. | 62599030ac7a0e7691f73587 |
class OperationDisplay(Model): <NEW_LINE> <INDENT> _validation = { 'provider': {'readonly': True}, 'resource': {'readonly': True}, 'operation': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.provider = None <NEW_LINE> self.resource = None <NEW_LINE> self.operation = None | The object that represents the operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar provider: Service provider: Relay.
:vartype provider: str
:ivar resource: Resource on which the operation is performed: Invoice,
etc.
:vartype resource: str
:ivar operation: Operation type: Read, write, delete, etc.
:vartype operation: str | 6259903071ff763f4b5e8837 |
class NullType(object): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self.name is not None: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return repr(self) <NEW_LINE> <DEDENT> <DEDENT> if six.PY3: <NEW_LINE> <INDENT> def bool(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return False | A 'null' type different from, but parallel to, None. Core function
is representing emptyness in a way that doesn't overload None.
Instantiate to create desired Null values. | 62599030d99f1b3c44d06743 |
class Selector(Evaluator): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def evaluate(self, votes, n_seats, *args, **kwargs) -> List[Candidate]: <NEW_LINE> <INDENT> raise NotImplementedError | Elect a given number of candidates.
Requires a number of seats to determine the number of candidates to elect. | 6259903030c21e258be998ac |
class Solution: <NEW_LINE> <INDENT> def preorderTraversal(self, root): <NEW_LINE> <INDENT> result, node = [], root <NEW_LINE> while node: <NEW_LINE> <INDENT> if node.left: <NEW_LINE> <INDENT> pre = node.left <NEW_LINE> while pre.right and pre.right != node: <NEW_LINE> <INDENT> pre = pre.right <NEW_LINE> <DEDENT> if pre.right: <NEW_LINE> <INDENT> node, pre.right = node.right, None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append(node.val) <NEW_LINE> node, pre.right = node.left, node <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result.append(node.val) <NEW_LINE> node = node.right <NEW_LINE> <DEDENT> <DEDENT> return result | @param root: The root of binary tree.
@return: Preorder in ArrayList which contains node values. | 62599030cad5886f8bdc58cb |
class ShowFramerate29970Action(gaupol.RadioAction): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> gaupol.RadioAction.__init__(self, "show_framerate_29_970") <NEW_LINE> framerate = gaupol.conf.editor.framerate <NEW_LINE> self.props.active = (framerate == aeidon.framerates.FPS_29_970) <NEW_LINE> self.props.label = _("2_9.970 fps") <NEW_LINE> self.props.tooltip = _("Calculate nonnative units " "with a framerate of 29.970 fps") <NEW_LINE> self.props.value = aeidon.framerates.FPS_29_970 <NEW_LINE> self.action_group = "main-unsafe" <NEW_LINE> self.framerate = aeidon.framerates.FPS_29_970 <NEW_LINE> self.group = "ShowFramerate23976Action" <NEW_LINE> <DEDENT> def _affirm_doable(self, application, page): <NEW_LINE> <INDENT> aeidon.util.affirm(page is not None) <NEW_LINE> aeidon.util.affirm(page.project.main_file is not None) | Calculate nonnative units with a framerate of 29.970 fps. | 62599030287bf620b6272c87 |
class UserProfile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, on_delete=models.CASCADE) <NEW_LINE> default_phone_number = models.CharField(max_length=20, null=True, blank=True) <NEW_LINE> default_street_address1 = models.CharField(max_length=80, null=True, blank=True) <NEW_LINE> default_street_address2 = models.CharField(max_length=80, null=True, blank=True) <NEW_LINE> default_postcode = models.CharField(max_length=20, null=True, blank=True) <NEW_LINE> default_town_or_city = models.CharField(max_length=40, null=True, blank=True) <NEW_LINE> default_county = models.CharField(max_length=80, null=True, blank=True) <NEW_LINE> default_country = CountryField(blank_label='Country', null=True, blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.user.username | A user profile model for maintaining default
delivery information and order history | 625990306fece00bbaccca50 |
class UserMentionSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> username = serializers.ReadOnlyField(source='slug') <NEW_LINE> image = serializers.ReadOnlyField(source='get_avatar_url') <NEW_LINE> url = serializers.ReadOnlyField(source='get_absolute_url') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ( 'username', 'image', 'url', 'id', ) | Lightweight serializer to meet user's schema of mention.js | 62599030711fe17d825e14eb |
class Counters: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.c = { 'RDF': 0, 'BNB': 0, 'Dewey': 0, 'VIAFPers': 0, 'VIAFOrg': 0, 'ISNIPers': 0, 'ISNIOrg': 0, 'WikiDataPers': 0, 'WikiDataOrg': 0, 'LCSHTopic': 0, 'LCSHPlace': 0, } | Class for counting the number of lines within a file having particular properties. | 6259903056b00c62f0fb3962 |
class UnbanChatMemberRequest(KickChatMemberRequest): <NEW_LINE> <INDENT> pass | Unban chat member request model.
.. seealso:: https://core.telegram.org/bots/api#unbanchatmember | 625990306fece00bbaccca51 |
class _Select(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.read_fds = set() <NEW_LINE> self.write_fds = set() <NEW_LINE> self.error_fds = set() <NEW_LINE> self.fd_sets = (self.read_fds, self.write_fds, self.error_fds) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def register(self, fd, events): <NEW_LINE> <INDENT> if fd in self.read_fds or fd in self.write_fds or fd in self.error_fds: <NEW_LINE> <INDENT> raise IOError("fd %d already registered" % fd) <NEW_LINE> <DEDENT> if events & IOLoop.READ: <NEW_LINE> <INDENT> self.read_fds.add(fd) <NEW_LINE> <DEDENT> if events & IOLoop.WRITE: <NEW_LINE> <INDENT> self.write_fds.add(fd) <NEW_LINE> <DEDENT> if events & IOLoop.ERROR: <NEW_LINE> <INDENT> self.error_fds.add(fd) <NEW_LINE> self.read_fds.add(fd) <NEW_LINE> <DEDENT> <DEDENT> def modify(self, fd, events): <NEW_LINE> <INDENT> self.unregister(fd) <NEW_LINE> self.register(fd, events) <NEW_LINE> <DEDENT> def unregister(self, fd): <NEW_LINE> <INDENT> self.read_fds.discard(fd) <NEW_LINE> self.write_fds.discard(fd) <NEW_LINE> self.error_fds.discard(fd) <NEW_LINE> <DEDENT> def poll(self, timeout): <NEW_LINE> <INDENT> readable, writeable, errors = select.select( self.read_fds, self.write_fds, self.error_fds, timeout) <NEW_LINE> events = {} <NEW_LINE> for fd in readable: <NEW_LINE> <INDENT> events[fd] = events.get(fd, 0) | IOLoop.READ <NEW_LINE> <DEDENT> for fd in writeable: <NEW_LINE> <INDENT> events[fd] = events.get(fd, 0) | IOLoop.WRITE <NEW_LINE> <DEDENT> for fd in errors: <NEW_LINE> <INDENT> events[fd] = events.get(fd, 0) | IOLoop.ERROR <NEW_LINE> <DEDENT> return events.items() | A simple, select()-based IOLoop implementation for non-Linux systems | 62599030287bf620b6272c89 |
class Devlink(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self._path = path <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._path <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> return self._path <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_category(self): <NEW_LINE> <INDENT> base = os.path.dirname(self._path) <NEW_LINE> if not os.path.basename(base).startswith("by-"): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return os.path.dirname(os.path.dirname(base)) == "/dev" <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> if not self.has_category: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return os.path.basename(self._path) <NEW_LINE> <DEDENT> @property <NEW_LINE> def category(self): <NEW_LINE> <INDENT> if not self.has_category: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return os.path.basename(os.path.dirname(self._path)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.path == other.path | Represents a device link.
These are a bit anomalous, since they are recognized by their structure
as paths. | 62599031796e427e5384f81f |
class QDataDepPreview(QtWidgets.QFrame): <NEW_LINE> <INDENT> _SRC_JUMP_TEXT = "Jump to source node?" <NEW_LINE> _DST_JUMP_TEXT = "Jump to destination node?" <NEW_LINE> def __init__(self, parent: QtWidgets.QWidget): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.preview_graph = QDataDepPreviewGraph(self) <NEW_LINE> self._caption = QtWidgets.QLabel(self) <NEW_LINE> self.setStyleSheet(self.styleSheet() + "border: 2px solid black;") <NEW_LINE> self._layout_manager = QtWidgets.QVBoxLayout(self) <NEW_LINE> self._init_widgets() <NEW_LINE> <DEDENT> def set_caption(self, show_src_caption: bool): <NEW_LINE> <INDENT> if show_src_caption: <NEW_LINE> <INDENT> self._caption.setText(self._SRC_JUMP_TEXT) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._caption.setText(self._DST_JUMP_TEXT) <NEW_LINE> <DEDENT> <DEDENT> def _init_widgets(self): <NEW_LINE> <INDENT> parent_background_color = self.parent().palette().color(QtGui.QPalette.Background) <NEW_LINE> self.setStyleSheet(f"background-color: {parent_background_color.name()};") <NEW_LINE> self.setFrameStyle(QtWidgets.QFrame.Raised | QtWidgets.QFrame.Panel) <NEW_LINE> self._layout_manager.addWidget(self.preview_graph, 0, QtCore.Qt.AlignCenter) <NEW_LINE> self._layout_manager.addWidget(self._caption, 1, QtCore.Qt.AlignCenter) | Frame for holding a preview scene | 625990313eb6a72ae038b709 |
class ExposeInternals(Transform): <NEW_LINE> <INDENT> default_priority = 840 <NEW_LINE> def apply(self): <NEW_LINE> <INDENT> if self.document.settings.expose_internals: <NEW_LINE> <INDENT> for node in self.document.traverse(): <NEW_LINE> <INDENT> for att in self.document.settings.expose_internals: <NEW_LINE> <INDENT> value = getattr(node, att, None) <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> node['internal:' + att] = value | Expose internal attributes if ``expose_internals`` setting is set. | 625990318e05c05ec3f6f6ad |
class EndpointMaterialsUnitTest(EntityEndpointsUnitTest): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(EndpointMaterialsUnitTest, self).__init__(*args, **kwargs) <NEW_LINE> self.endpoint_name = "materials" <NEW_LINE> self.endpoints = MaterialEndpoints(self.host, self.port, self.account_id, self.auth_token) <NEW_LINE> <DEDENT> @mock.patch('requests.sessions.Session.request') <NEW_LINE> def test_list(self, mock_request): <NEW_LINE> <INDENT> self.list(mock_request) <NEW_LINE> <DEDENT> @mock.patch('requests.sessions.Session.request') <NEW_LINE> def test_get(self, mock_request): <NEW_LINE> <INDENT> self.get(mock_request) <NEW_LINE> <DEDENT> @mock.patch('requests.sessions.Session.request') <NEW_LINE> def test_create(self, mock_request): <NEW_LINE> <INDENT> self.create(mock_request) <NEW_LINE> <DEDENT> @mock.patch('requests.sessions.Session.request') <NEW_LINE> def test_delete(self, mock_request): <NEW_LINE> <INDENT> self.create(mock_request) | Class for testing materials endpoint. | 6259903150485f2cf55dc021 |
class NextURLMixin(object): <NEW_LINE> <INDENT> next_param = 'next' <NEW_LINE> default_redirect_param = '/' <NEW_LINE> allow_logged_in = False <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if request.user.is_authenticated() and not self.allow_logged_in: <NEW_LINE> <INDENT> return HttpResponseRedirect(self.get_next_url()) <NEW_LINE> <DEDENT> return super(NextURLMixin, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_next_url(self): <NEW_LINE> <INDENT> return self.request.GET.get(self.next_param, self.default_redirect_param) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> return self.get_next_url() | Mixin for Django FormView, with a wrapper to get the 'next' url param after, for expample, logging in | 625990318c3a8732951f75fc |
class Platform(db.Model): <NEW_LINE> <INDENT> __tablename__ = "platforms" <NEW_LINE> platform_id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(64), nullable=False) <NEW_LINE> games = db.relationship("Game", secondary="game_platforms", backref="platforms") <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> p = "<Platform platform_id=%s name=%s>" <NEW_LINE> return p % (self.platform_id, self.name) | Game platform table | 6259903126238365f5fadbf7 |
class MockVerifyToken: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def verify_token_valid(mock_obj, token=None): <NEW_LINE> <INDENT> return 'foo' <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def verify_token_expired(mock_obj, token=None): <NEW_LINE> <INDENT> raise SignatureExpired('bar') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def verify_token_error(mock_obj, token=None): <NEW_LINE> <INDENT> raise EnhancedCrypto.EnhancedCryptoException('bar') | This class just provides necessary mock methods. | 625990318c3a8732951f75fd |
class GeometryApiField(ApiField): <NEW_LINE> <INDENT> dehydrated_type = 'geometry' <NEW_LINE> help_text = 'Geometry data.' <NEW_LINE> def hydrate(self, bundle): <NEW_LINE> <INDENT> value = super(GeometryApiField, self).hydrate(bundle) <NEW_LINE> if value is None: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> elif isinstance(value, GEOSGeometry): <NEW_LINE> <INDENT> return value.json <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return simplejson.dumps(value) <NEW_LINE> <DEDENT> <DEDENT> def dehydrate(self, obj): <NEW_LINE> <INDENT> return self.convert(super(GeometryApiField, self).dehydrate(obj)) <NEW_LINE> <DEDENT> def convert(self, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return simplejson.loads(value.geojson) | Custom ApiField for dealing with data from GeometryFields (by serializing them as GeoJSON). | 62599031a4f1c619b294f69b |
class DeleteImportedKeyMaterialResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId") | DeleteImportedKeyMaterial response structure.
| 6259903121bff66bcd723d0a |
class Actor(nn.Module): <NEW_LINE> <INDENT> def __init__(self, state_size, action_size, seed, hidden_size=256, init_w=3e-3, log_std_min=-20, log_std_max=2): <NEW_LINE> <INDENT> super(Actor, self).__init__() <NEW_LINE> self.seed = torch.manual_seed(seed) <NEW_LINE> self.log_std_min = log_std_min <NEW_LINE> self.log_std_max = log_std_max <NEW_LINE> self.log_std_linear = nn.Linear(hidden_size, action_size) <NEW_LINE> self.cnn_layers = nn.Sequential( nn.Conv2d(3, 4, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(4), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(4, 4, kernel_size=3, stride=1, padding=1), nn.BatchNorm2d(4), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Flatten() ) <NEW_LINE> self.linear_layers1 = nn.Sequential( nn.Linear(16 * 64 * 1, 128), nn.Tanh(), nn.Linear(128, 128), nn.Tanh(), nn.Linear(128, 2) ) <NEW_LINE> self.linear_layers2 = nn.Sequential( nn.Linear(in_features = 16*16*4, out_features = 128), nn.Tanh(), nn.Linear(128, 128), nn.Tanh(), nn.Linear(128, 2) ) <NEW_LINE> <DEDENT> def reset_parameters(self): <NEW_LINE> <INDENT> self.fc1.weight.data.uniform_(*hidden_init(self.fc1)) <NEW_LINE> self.fc2.weight.data.uniform_(*hidden_init(self.fc2)) <NEW_LINE> self.mu.weight.data.uniform_(-init_w, init_w) <NEW_LINE> self.log_std_linear.weight.data.uniform_(-init_w, init_w) <NEW_LINE> <DEDENT> def forward(self, state): <NEW_LINE> <INDENT> x = self.cnn_layers(state) <NEW_LINE> mu = self.linear_layers1(x) <NEW_LINE> log_std = self.linear_layers2(x) <NEW_LINE> log_std = torch.clamp(log_std, self.log_std_min, self.log_std_max) <NEW_LINE> return mu, log_std <NEW_LINE> <DEDENT> def evaluate(self, state, epsilon=1e-6): <NEW_LINE> <INDENT> mu, log_std = self.forward(state) <NEW_LINE> std = log_std.exp() <NEW_LINE> dist = Normal(0, 1) <NEW_LINE> e = dist.sample().to(device) <NEW_LINE> action = torch.tanh(mu + e * std) <NEW_LINE> log_prob = Normal(mu, std).log_prob(mu + e * std) - torch.log(1 - action.pow(2) + epsilon) <NEW_LINE> return action, log_prob <NEW_LINE> <DEDENT> def get_action(self, state): <NEW_LINE> <INDENT> mu, log_std = self.forward(state.unsqueeze(0)) <NEW_LINE> std = log_std.exp() <NEW_LINE> dist = Normal(0, 1) <NEW_LINE> e = dist.sample().to(device) <NEW_LINE> action = torch.tanh(mu + e * std).cpu() <NEW_LINE> return action[0] | Actor (Policy) Model. | 6259903171ff763f4b5e883d |
class ProxyConnection(object): <NEW_LINE> <INDENT> __slots__ = ('server', 'is_closed', 'streams', 'loop', 'authority', 'connection_num') <NEW_LINE> def __init__(self, server: 'proxy.ProxyServer.ProxyServer', stream_operators: Tuple[Tuple[asyncio.StreamReader, asyncio.StreamReader], Tuple[asyncio.StreamWriter, asyncio.StreamWriter]], authority: Tuple[str, int], connection_num: Optional[int]=None): <NEW_LINE> <INDENT> self.server = server <NEW_LINE> self.is_closed = False <NEW_LINE> self.streams = stream_operators <NEW_LINE> self.loop = server.loop <NEW_LINE> self.authority = authority <NEW_LINE> self.connection_num = connection_num <NEW_LINE> self.pipe_streams() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.is_closed = True <NEW_LINE> for writer in self.streams[1]: <NEW_LINE> <INDENT> writer.close() <NEW_LINE> <DEDENT> <DEDENT> def pipe_streams(self): <NEW_LINE> <INDENT> self.loop.create_task(self._pipe(self.streams[0][1], self.streams[1][0], self.server.handle_request)) <NEW_LINE> self.loop.create_task(self._pipe(self.streams[0][0], self.streams[1][1], self.server.handle_response)) <NEW_LINE> <DEDENT> async def _pipe(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter, handler: Callable[[bytes], Awaitable]): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> packet = await self.server.read_packet(reader) <NEW_LINE> if packet == b'': <NEW_LINE> <INDENT> self.close() <NEW_LINE> return None <NEW_LINE> <DEDENT> await self.send(packet, handler, writer) <NEW_LINE> <DEDENT> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> if not self.is_closed: <NEW_LINE> <INDENT> print(f"[x] {writer.get_extra_info('perrname')} error: {traceback.format_exc()}", file=sys.stderr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> traceback.print_exc() <NEW_LINE> <DEDENT> <DEDENT> self.close() <NEW_LINE> <DEDENT> async def send(self, packet: bytes, handler: Awaitable, writer: asyncio.StreamWriter): <NEW_LINE> <INDENT> packet = await handler(self, packet) <NEW_LINE> writer.write(packet) <NEW_LINE> await writer.drain() <NEW_LINE> <DEDENT> async def send_client(packet: bytes): <NEW_LINE> <INDENT> await self.send(packet, self.server.handle_response, self.streams[1][1]) <NEW_LINE> <DEDENT> async def send_server(packet: bytes): <NEW_LINE> <INDENT> await self.send(packet, self.server.handle_request, self.streams[1][0]) | Class to describe a connections between the client and the server.
Exposes the `asyncio.StreamReader`s and `asyncio.StreamWriter`s of both the client and the server. | 6259903130c21e258be998b2 |
class UserBasedExceptionMiddleware(MiddlewareMixin): <NEW_LINE> <INDENT> def process_exception(self, request, exception): <NEW_LINE> <INDENT> if request.user.is_superuser or request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS: <NEW_LINE> <INDENT> return technical_500_response(request, *sys.exc_info()) | Let superuser see debug page when exception happens | 625990318a43f66fc4bf322d |
class PyungoError(Exception): <NEW_LINE> <INDENT> pass | pyungo custom exception | 625990311d351010ab8f4bbe |
class InvalidParameterValue(OWSException): <NEW_LINE> <INDENT> pass | Operation request contains an invalid parameter value | 62599031711fe17d825e14ee |
class CartItem(models.Model): <NEW_LINE> <INDENT> cart = models.ForeignKey(Cart, related_name="items") <NEW_LINE> quantity = models.IntegerField() <NEW_LINE> product = models.ForeignKey(Product) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> app_label = 'shop' <NEW_LINE> verbose_name = _('Cart item') <NEW_LINE> verbose_name_plural = _('Cart items') <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CartItem, self).__init__(*args,**kwargs) <NEW_LINE> self.extra_price_fields = [] <NEW_LINE> self.line_subtotal = Decimal('0.0') <NEW_LINE> self.line_total = Decimal('0.0') <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.line_subtotal = self.product.get_price() * self.quantity <NEW_LINE> self.line_total = self.line_subtotal <NEW_LINE> for modifier in cart_modifiers_pool.get_modifiers_list(): <NEW_LINE> <INDENT> modifier.process_cart_item(self) <NEW_LINE> <DEDENT> for label, value in self.extra_price_fields: <NEW_LINE> <INDENT> self.line_total = self.line_total + value <NEW_LINE> <DEDENT> return self.line_total | This is a holder for the quantity of items in the cart and, obviously, a
pointer to the actual Product being purchased :) | 6259903150485f2cf55dc024 |
class EnergyMeter: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def compute_energy(self, cpu, network, disk): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_data(self, readings): <NEW_LINE> <INDENT> cpu = readings["cpu"] <NEW_LINE> disk = readings["disk"] <NEW_LINE> network = readings["network"] <NEW_LINE> readings["energy"] = self.compute_energy(cpu=cpu, network=network, disk=disk) <NEW_LINE> return readings | Base class for defining energy meter. | 6259903121bff66bcd723d0c |
class Service(object): <NEW_LINE> <INDENT> def __init__(self, executable_path, port=0, service_args=None, log_path=None): <NEW_LINE> <INDENT> self.port = port <NEW_LINE> self.path = executable_path <NEW_LINE> self.service_args= service_args <NEW_LINE> if self.port == 0: <NEW_LINE> <INDENT> self.port = utils.free_port() <NEW_LINE> <DEDENT> if self.service_args is None: <NEW_LINE> <INDENT> self.service_args = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.service_args=service_args[:] <NEW_LINE> <DEDENT> self.service_args.insert(0, self.path) <NEW_LINE> self.service_args.append("--webdriver=%d" % self.port) <NEW_LINE> self.process = None <NEW_LINE> if not log_path: <NEW_LINE> <INDENT> log_path = "ghostdriver.log" <NEW_LINE> <DEDENT> self._log = open(log_path, 'w') <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.stop() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.process = subprocess.Popen(self.service_args, stdin=subprocess.PIPE, close_fds=platform.system() != 'Windows', stdout=self._log, stderr=self._log) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise WebDriverException("Unable to start phantomjs with ghostdriver.", e) <NEW_LINE> <DEDENT> count = 0 <NEW_LINE> while not utils.is_connectable(self.port): <NEW_LINE> <INDENT> count += 1 <NEW_LINE> time.sleep(1) <NEW_LINE> if count == 30: <NEW_LINE> <INDENT> raise WebDriverException("Can not connect to GhostDriver") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def service_url(self): <NEW_LINE> <INDENT> return "http://localhost:%d/wd/hub" % self.port <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if self._log: <NEW_LINE> <INDENT> self._log.close() <NEW_LINE> self._log = None <NEW_LINE> <DEDENT> if self.process is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if self.process: <NEW_LINE> <INDENT> self.process.stdin.close() <NEW_LINE> self.process.kill() <NEW_LINE> self.process.wait() <NEW_LINE> <DEDENT> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass | Object that manages the starting and stopping of PhantomJS / Ghostdriver | 6259903171ff763f4b5e883f |
class InspiralCoherentJob(InspiralAnalysisJob): <NEW_LINE> <INDENT> def __init__(self,cp): <NEW_LINE> <INDENT> exec_name = 'inspiral' <NEW_LINE> sections = ['data'] <NEW_LINE> extension = 'xml' <NEW_LINE> InspiralAnalysisJob.__init__(self,cp,sections,exec_name,extension) <NEW_LINE> self.add_condor_cmd('environment',"KMP_LIBRARY=serial;MKL_SERIAL=yes") | A lalapps_inspiral job used by the inspiral pipeline. The static options
are read from the sections [data] and [inspiral] in the ini file. The
stdout and stderr from the job are directed to the logs directory. The job
runs in the universe specfied in the ini file. The path to the executable
is determined from the ini file. | 625990310a366e3fb87dda8f |
class File(object): <NEW_LINE> <INDENT> def __init__(self, transfer_id, filename): <NEW_LINE> <INDENT> self.transfer_id = transfer_id <NEW_LINE> self.filename = filename <NEW_LINE> self.index = 0 <NEW_LINE> self.error = False <NEW_LINE> self.finished = False <NEW_LINE> self._part_filepath = os.path.join(FULL_EXTRACT_DIR, self.transfer_id, self.filename + ".part") <NEW_LINE> if os.path.exists(self._part_filepath): <NEW_LINE> <INDENT> logger.warning("File '%s' exists already!", self._part_filepath) <NEW_LINE> <DEDENT> if not os.path.exists(os.path.join(FULL_EXTRACT_DIR, self.transfer_id)): <NEW_LINE> <INDENT> os.makedirs(os.path.join(FULL_EXTRACT_DIR, self.transfer_id)) <NEW_LINE> <DEDENT> self.handle = open(self._part_filepath, "wb") <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.handle.close() <NEW_LINE> if self.error is False: <NEW_LINE> <INDENT> final_name = os.path.join(FULL_EXTRACT_DIR, self.filename) <NEW_LINE> if not os.path.exists(final_name): <NEW_LINE> <INDENT> os.rename(self._part_filepath, final_name) <NEW_LINE> try: <NEW_LINE> <INDENT> os.rmdir(os.path.dirname(self._part_filepath)) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> os.rename(self._part_filepath, os.path.join(os.path.dirname(self._part_filepath), self.filename)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.filename | File data | 62599031287bf620b6272c8f |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.