code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Solution(object): <NEW_LINE> <INDENT> def getIntersectionNode(self, headA, headB): <NEW_LINE> <INDENT> p1, p2 = headA, headB <NEW_LINE> while p1 != p2: <NEW_LINE> <INDENT> p1 = p1.next if p1 else headB <NEW_LINE> p2 = p2.next if p2 else headA <NEW_LINE> <DEDENT> return p1 | 输入两个链表,找出它们的第一个公共节点。
输入:intersectVal = 8, listA = [4,1,8,4,5], listB = [5,0,1,8,4,5], skipA = 2, skipB = 3
输出:Reference of the node with value = 8
输入解释:相交节点的值为 8 (注意,如果两个列表相交则不能为 0)。
从各自的表头开始算起,链表 A 为 [4,1,8,4,5],链表 B 为 [5,0,1,8,4,5]。
在 A 中,相交节点前有 2 个节点;在 B 中,相交节点前有 3 个节点。
输入:intersectVal = 2, listA = [0,9,1,2,4], listB = [3,2,4], skipA = 3, skipB = 1
输出:Reference of the node with value = 2
输入解释:相交节点的值为 2 (注意,如果两个列表相交则不能为 0)。
从各自的表头开始算起,链表 A 为 [0,9,1,2,4],链表 B 为 [3,2,4]。
在 A 中,相交节点前有 3 个节点;在 B 中,相交节点前有 1 个节点。
输入:intersectVal = 0, listA = [2,6,4], listB = [1,5], skipA = 3, skipB = 2
输出:null
输入解释:从各自的表头开始算起,链表 A 为 [2,6,4],链表 B 为 [1,5]。
由于这两个链表不相交,所以 intersectVal 必须为 0,而 skipA 和 skipB 可以是任意值。
解释:这两个链表不相交,因此返回 null。
如果两个链表没有交点,返回 null.在返回结果后,两个链表仍须保持原有的结构。
可假定整个链表结构中没有循环。程序尽量满足 O(n) 时间复杂度,且仅用 O(1) 内存。
本题与主站 160 题相同:https://leetcode-cn.com/problems/intersection-of-two-linked-lists/
链接:https://leetcode-cn.com/problems/liang-ge-lian-biao-de-di-yi-ge-gong-gong-jie-dian-lcof
| 62599039c432627299fa41b3 |
class OutOfResourceException(Exception): <NEW_LINE> <INDENT> pass | The implementation has run out of operating system resources,
such as buffers, main memory, or disk space. | 62599039d164cc617582212f |
class CalEvent(DeclarativeBase): <NEW_LINE> <INDENT> __tablename__ = 'CalEvent' <NEW_LINE> uid = Column(Integer, primary_key=True, nullable=False, autoincrement=True) <NEW_LINE> employee_uid = Column(Integer, ForeignKey('Employee.uid', ondelete='CASCADE', onupdate='CASCADE'), nullable=True, index=True) <NEW_LINE> order_phase_uid = Column(Integer, ForeignKey('OrderPhase.uid', ondelete='CASCADE', onupdate='CASCADE'), nullable=True, index=True) <NEW_LINE> event_start = Column(DateTime, nullable=False, index=True) <NEW_LINE> event_end = Column(DateTime, nullable=False, index=True) <NEW_LINE> comment = Column(String(length=200), nullable=True) <NEW_LINE> employee = relationship('Employee', backref=backref('cal_event_list', order_by='CalEvent.event_start', cascade='all,delete-orphan')) <NEW_LINE> order_phase = relationship('OrderPhase', backref=backref('cal_event_list', order_by='CalEvent.event_start', cascade='all,delete-orphan')) <NEW_LINE> def __init__(self, event_start, event_end, comment): <NEW_LINE> <INDENT> self.event_start = event_start <NEW_LINE> self.event_end = event_end <NEW_LINE> self.comment = comment <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> repr_fmt = ("{self.__class__.__name__}(" "employee.uid={self.employee_uid!r}, " "order_phase.uid={self.order_phase_uid!r}, " "event_start={self.event_start!r}, " "event_end={self.event_end!r}, " "comment={self.comment!r})") <NEW_LINE> return repr_fmt.format(self=self) <NEW_LINE> <DEDENT> def event_obj(self): <NEW_LINE> <INDENT> dict_ = dict() <NEW_LINE> dict_['id'] = ('cal_event_{uid}'.format(uid=self.uid)) <NEW_LINE> dict_['title'] = u"{ref}\u00a0: {label}".format(ref=self.order_phase.order.order_ref, label=self.order_phase.label) <NEW_LINE> dict_['allDay'] = False <NEW_LINE> dict_['start'] = self.event_start.strftime('%Y-%m-%dT%H:%M:%SZ') <NEW_LINE> dict_['end'] = self.event_end.strftime('%Y-%m-%dT%H:%M:%SZ') <NEW_LINE> dict_['className'] = self.order_phase.order.project_cat <NEW_LINE> dict_['employee_name'] = self.employee.employee_name <NEW_LINE> dict_['order_ref'] = self.order_phase.order.order_ref <NEW_LINE> dict_['order_phase_label'] = self.order_phase.label <NEW_LINE> dict_['comment'] = self.comment <NEW_LINE> return dict_ | Calendar event.
:see: http://arshaw.com/fullcalendar/docs/event_data/Event_Object/ | 62599039711fe17d825e1579 |
class TestSequence(unittest.TestCase): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> test_string = ("{RACK,66}{IO_BLADE,57}") <NEW_LINE> bigbuf=oh_big_textbuffer() <NEW_LINE> ep=SaHpiEntityPathT() <NEW_LINE> err = oh_encode_entitypath(test_string, ep) <NEW_LINE> self.assertEqual (err!=None,True) <NEW_LINE> self.assertEqual (ep.Entry[0].EntityType != SAHPI_ENT_IO_BLADE,False) <NEW_LINE> self.assertEqual (ep.Entry[0].EntityLocation != 57,False) <NEW_LINE> self.assertEqual (ep.Entry[1].EntityType != SAHPI_ENT_RACK,False) <NEW_LINE> self.assertEqual (ep.Entry[1].EntityLocation != 66,False) <NEW_LINE> oh_init_bigtext(bigbuf) <NEW_LINE> err = oh_decode_entitypath(ep, bigbuf) <NEW_LINE> self.assertEqual (err!=None,True) <NEW_LINE> self.assertEqual (bigbuf.Data!= test_string,False) | main:
epathstr -> epath test
Test if an entity path string is converted properly into an entity path. | 625990390a366e3fb87ddba0 |
class JSONWebSocket: <NEW_LINE> <INDENT> def __init__(self, ws): <NEW_LINE> <INDENT> self.ws = ws <NEW_LINE> self.rlock = threading.RLock() <NEW_LINE> self.wlock = threading.RLock() <NEW_LINE> <DEDENT> def _recv_raw(self): <NEW_LINE> <INDENT> with self.rlock: <NEW_LINE> <INDENT> while 1: <NEW_LINE> <INDENT> message = self.ws.read_frame() <NEW_LINE> if message is None: raise ConnectionClosedError() <NEW_LINE> if message.msgtype == 1: return message.content <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def recv(self): <NEW_LINE> <INDENT> return json.loads(self._recv_raw()) <NEW_LINE> <DEDENT> def _send_raw(self, data): <NEW_LINE> <INDENT> with self.wlock: <NEW_LINE> <INDENT> self.ws.write_text_frame(unicode(data)) <NEW_LINE> <DEDENT> <DEDENT> def send(self, obj): <NEW_LINE> <INDENT> self._send_raw(json.dumps(obj)) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.ws.close() | JSONWebSocketWrapper(ws) -> new instance
JSON-reading/writing WebSocket wrapper.
Provides recv()/send() methods that transparently encode/decode JSON.
Reads and writes are serialized with independent locks; the reading
lock is to be acquired "outside" the write lock. | 6259903907d97122c4217e58 |
class ConfigTypeError(ConfigError, TypeError): <NEW_LINE> <INDENT> pass | :exc:`TypeError` specific for configuration. | 625990393c8af77a43b68819 |
class VanillaLstm_LHUC(LstmBase_LHUC): <NEW_LINE> <INDENT> def __init__(self, rng, x, n_in, n_h, p=0.0, training=0): <NEW_LINE> <INDENT> LstmBase_LHUC.__init__(self, rng, x, n_in, n_h, p, training) <NEW_LINE> self.params = [self.W_xi, self.W_hi, self.w_ci, self.W_xf, self.W_hf, self.w_cf, self.W_xo, self.W_ho, self.w_co, self.W_xc, self.W_hc, self.b_i, self.b_f, self.b_o, self.b_c, self.C] <NEW_LINE> <DEDENT> def lstm_as_activation_function(self, Wix, Wfx, Wcx, Wox, h_tm1, c_tm1): <NEW_LINE> <INDENT> i_t = T.nnet.sigmoid(Wix + T.dot(h_tm1, self.W_hi) + self.w_ci * c_tm1 + self.b_i) <NEW_LINE> f_t = T.nnet.sigmoid(Wfx + T.dot(h_tm1, self.W_hf) + self.w_cf * c_tm1 + self.b_f) <NEW_LINE> c_t = f_t * c_tm1 + i_t * T.tanh(Wcx + T.dot(h_tm1, self.W_hc) + self.b_c) <NEW_LINE> o_t = T.nnet.sigmoid(Wox + T.dot(h_tm1, self.W_ho) + self.w_co * c_t + self.b_o) <NEW_LINE> h_t = o_t * T.tanh(c_t) <NEW_LINE> return h_t, c_t | This class implements the standard LSTM block, inheriting the genetic class :class:`layers.gating.LstmBase`.
| 62599039be383301e02549d2 |
class MetaClass(meta): <NEW_LINE> <INDENT> def __new__(cls, name, this_bases, attribs): <NEW_LINE> <INDENT> return meta(name, bases, attribs) | Indirection for the provided metaclass. | 6259903915baa72349463156 |
class CopyDummyTravelDataWithModWPA(CopyDummyTravelData): <NEW_LINE> <INDENT> def run(self, config, year): <NEW_LINE> <INDENT> logger.start_block("Starting CopyDummyTravelData.run(...)") <NEW_LINE> self.config = config <NEW_LINE> self.travel_model_configuration = config['travel_model_configuration'] <NEW_LINE> self.base_year = self.travel_model_configuration['base_year'] <NEW_LINE> if year == self.base_year+1: <NEW_LINE> <INDENT> logger.log_status("Prepare copying pre-calculated MATSim travel data to OPUS_HOME tmp directory.") <NEW_LINE> self.copy_dummy_travel_data() <NEW_LINE> self.modify_workplace_accessibility() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.log_status("Travel data is already copied in the first iteration.") <NEW_LINE> <DEDENT> logger.end_block() <NEW_LINE> <DEDENT> def modify_workplace_accessibility(self): <NEW_LINE> <INDENT> study_zone = 908 <NEW_LINE> min_wpa = '3.0' <NEW_LINE> max_wpa = '15.0' <NEW_LINE> logger.log_status("Zone ID study zone = %s" %study_zone) <NEW_LINE> in_file = open(self.workplace_accessibility_destination, 'r') <NEW_LINE> str_list = [] <NEW_LINE> line = in_file.readline() <NEW_LINE> get_indices = GetIndices(line) <NEW_LINE> index_zone_id = get_indices.get_zone_id_index() <NEW_LINE> index_wpa = get_indices.get_workplace_asseccibility_index() <NEW_LINE> number_of_colums = get_indices.get_number_of_colums() <NEW_LINE> row = line.split(',') <NEW_LINE> str_list.append( row[index_zone_id].strip('\r\n') +','+ row[index_wpa].strip('\r\n') +'\r\n') <NEW_LINE> line = in_file.readline() <NEW_LINE> while line: <NEW_LINE> <INDENT> row = line.split(',') <NEW_LINE> if len(row) != number_of_colums: <NEW_LINE> <INDENT> raise StandardError('Error in number of colums: %s' %row) <NEW_LINE> <DEDENT> zone_id = int(row[index_zone_id].strip('\r\n')) <NEW_LINE> if zone_id == study_zone: <NEW_LINE> <INDENT> row[index_wpa] = max_wpa <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> row[index_wpa] = min_wpa <NEW_LINE> <DEDENT> str_list.append( row[index_zone_id].strip('\r\n') +','+ row[index_wpa].strip('\r\n') +'\r\n') <NEW_LINE> line = in_file.readline() <NEW_LINE> <DEDENT> in_file.close() <NEW_LINE> out_file = open(self.workplace_accessibility_destination, 'w') <NEW_LINE> logger.log_status("Copying modified travel data onto disc.") <NEW_LINE> for row in str_list: <NEW_LINE> <INDENT> out_file.write(row) <NEW_LINE> <DEDENT> out_file.close(); <NEW_LINE> logger.log_status("Finished copy process.") | Copies dummy travel data for testing reasons into opus home tmp directory to replace the
travel data in urbansim data set. | 6259903971ff763f4b5e8956 |
class ChunkParser: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._buf = b'' <NEW_LINE> <DEDENT> def get_chunks(self, new_data_bytes): <NEW_LINE> <INDENT> self._buf += new_data_bytes <NEW_LINE> while True: <NEW_LINE> <INDENT> buf_decoded = _best_effort_decode(self._buf) <NEW_LINE> buf_utf16 = buf_decoded.encode('utf-16')[2:] <NEW_LINE> length_str_match = LEN_REGEX.match(buf_decoded) <NEW_LINE> if length_str_match is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> length_str = length_str_match.group(1) <NEW_LINE> length = int(length_str) * 2 <NEW_LINE> length_length = len((length_str + '\n').encode('utf-16')[2:]) <NEW_LINE> if len(buf_utf16) - length_length < length: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> submission = buf_utf16[length_length:length_length + length] <NEW_LINE> yield submission.decode('utf-16') <NEW_LINE> drop_length = (len((length_str + '\n').encode()) + len(submission.decode('utf-16').encode())) <NEW_LINE> self._buf = self._buf[drop_length:] | Parse data from the backward channel into chunks.
Responses from the backward channel consist of a sequence of chunks which
are streamed to the client. Each chunk is prefixed with its length,
followed by a newline. The length allows the client to identify when the
entire chunk has been received. | 6259903916aa5153ce4016a9 |
class WindowsChunkedWriter: <NEW_LINE> <INDENT> def __init__(self, wrapped): <NEW_LINE> <INDENT> self.__wrapped = wrapped <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.__wrapped, name) <NEW_LINE> <DEDENT> def write(self, text): <NEW_LINE> <INDENT> total_to_write = len(text) <NEW_LINE> written = 0 <NEW_LINE> while written < total_to_write: <NEW_LINE> <INDENT> to_write = min(total_to_write - written, MAX_BYTES_WRITTEN) <NEW_LINE> self.__wrapped.write(text[written : written + to_write]) <NEW_LINE> written += to_write | Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()' which we wrap to write in
limited chunks due to a Windows limitation on binary console streams. | 6259903923e79379d538d6bc |
class TagAnnotation(mm.Schema): <NEW_LINE> <INDENT> tag = mm.fields.Str(required=True, description="tag to attach to annoation") | a simple tagged annotation | 625990398a349b6b43687400 |
class BasePolicy(tf.keras.Model): <NEW_LINE> <INDENT> def __init__(self, state_dim, action_dim, action_spec, hidden_dims = (256, 256), eps = 1e-6): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> relu_gain = tf.math.sqrt(2.0) <NEW_LINE> relu_orthogonal = tf.keras.initializers.Orthogonal(relu_gain) <NEW_LINE> near_zero_orthogonal = tf.keras.initializers.Orthogonal(1e-2) <NEW_LINE> layers = [] <NEW_LINE> for hidden_dim in hidden_dims: <NEW_LINE> <INDENT> layers.append( tf.keras.layers.Dense( hidden_dim, activation=tf.nn.relu, kernel_initializer=relu_orthogonal)) <NEW_LINE> <DEDENT> inputs = tf.keras.Input(shape=(state_dim,)) <NEW_LINE> outputs = tf.keras.Sequential( layers + [tf.keras.layers.Dense( action_dim, kernel_initializer=near_zero_orthogonal)] )(inputs) <NEW_LINE> self.trunk = tf.keras.Model(inputs=inputs, outputs=outputs) <NEW_LINE> self.action_spec = action_spec <NEW_LINE> self.action_mean = tf.constant( (action_spec.maximum + action_spec.minimum) / 2.0, dtype=tf.float32) <NEW_LINE> self.action_scale = tf.constant( (action_spec.maximum - action_spec.minimum) / 2.0, dtype=tf.float32) <NEW_LINE> self.eps = eps | Base class for policies. | 625990398a43f66fc4bf334a |
class LensCorrection(NonLinearCoordinateTransform): <NEW_LINE> <INDENT> className = 'lenscorrection.NonLinearTransform' | a placeholder for the lenscorrection transform, same as NonLinearTransform
for now | 6259903923e79379d538d6bd |
class ModifyAccessPeriodResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId") | ModifyAccessPeriod返回参数结构体
| 625990393c8af77a43b6881a |
class Meta: <NEW_LINE> <INDENT> model = JobConnectionResult <NEW_LINE> fields = ['results'] <NEW_LINE> qpc_allow_empty_fields = ['results'] | Metadata for serialzer. | 625990391d351010ab8f4cd8 |
class ActivityCounter(object): <NEW_LINE> <INDENT> def __init__(self, activity: str, n_total: int = None, report_every: int = 1000, loglevel: int = logging.DEBUG) -> None: <NEW_LINE> <INDENT> self.activity = activity <NEW_LINE> self.count = 0 <NEW_LINE> self.n_total = n_total <NEW_LINE> self.report_every = report_every <NEW_LINE> self.loglevel = loglevel <NEW_LINE> <DEDENT> def tick(self) -> None: <NEW_LINE> <INDENT> self.count += 1 <NEW_LINE> c = self.count <NEW_LINE> n = self.n_total <NEW_LINE> if c == 1 or c % self.report_every == 0 or c == n: <NEW_LINE> <INDENT> if self.n_total is not None: <NEW_LINE> <INDENT> of_n = f" of {n}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> of_n = "" <NEW_LINE> <DEDENT> log.log(self.loglevel, f"{self.activity} {c}{of_n}") | Simple class to report progress in a repetitive activity. | 62599039a8ecb033258723dc |
class ExtendTool(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "curve.extend_tool" <NEW_LINE> bl_label = "Extend" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> ob = context.active_object <NEW_LINE> return ((ob is not None) and (ob.type == 'CURVE')) <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> main(context) <NEW_LINE> return {'FINISHED'} | Curve Extend Tool | 6259903907d97122c4217e5b |
class WebDriver(_messages.Message): <NEW_LINE> <INDENT> androidDevice = _messages.MessageField('AndroidDevice', 1) <NEW_LINE> browserId = _messages.StringField(2) <NEW_LINE> endpoint = _messages.StringField(3) <NEW_LINE> id = _messages.StringField(4) <NEW_LINE> linuxMachine = _messages.MessageField('LinuxMachine', 5) <NEW_LINE> projectId = _messages.StringField(6) <NEW_LINE> sshPublicKey = _messages.StringField(7) <NEW_LINE> vmDetails = _messages.MessageField('VMDetails', 8) <NEW_LINE> windowsMachine = _messages.MessageField('WindowsMachine', 9) | A WebDriver environment.
Fields:
androidDevice: An Android device.
browserId: The id of the browser to be used. Use the
EnvironmentDiscoveryService to get supported values. Required
endpoint: The endpoint in host:port format where the target running the
specified browser accepts WebDriver protocol commands. @OutputOnly
id: Unique id set by the system. @OutputOnly
linuxMachine: A Linux virtual machine.
projectId: The GCE project for this WebDriver test environment.
@OutputOnly
sshPublicKey: The public key to be set on the VM in order to SSH into it.
vmDetails: The state details of the target device/machine. @OutputOnly
windowsMachine: A Windows virtual machine. | 6259903921bff66bcd723e26 |
class Experiments(Model): <NEW_LINE> <INDENT> _attribute_map = { 'ramp_up_rules': {'key': 'rampUpRules', 'type': '[RampUpRule]'}, } <NEW_LINE> def __init__(self, ramp_up_rules=None): <NEW_LINE> <INDENT> self.ramp_up_rules = ramp_up_rules | Routing rules in production experiments.
:param ramp_up_rules: List of ramp-up rules.
:type ramp_up_rules: list[~azure.mgmt.web.models.RampUpRule] | 6259903923e79379d538d6be |
class TriggerHandle(object): <NEW_LINE> <INDENT> def __init__(self, tid, callback, state): <NEW_LINE> <INDENT> self.tid = tid <NEW_LINE> self.callback = callback <NEW_LINE> self.state = state <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return self.callback(tid=self.tid) <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> if not self.tid: <NEW_LINE> <INDENT> raise Exception('Trigger has already been destroyed.') <NEW_LINE> <DEDENT> self.state.unset_trigger(self.tid) <NEW_LINE> self.callback = None <NEW_LINE> self.tid = None <NEW_LINE> <DEDENT> def wait(self, timeout=10, rate=0.5): <NEW_LINE> <INDENT> t0 = time.time() <NEW_LINE> while not timeout or (time.time() - t0 <= timeout): <NEW_LINE> <INDENT> if self(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> time.sleep(rate) <NEW_LINE> <DEDENT> raise TriggerTimeout | Wraps management routines for inspecting a state trigger. | 6259903916aa5153ce4016ab |
class DbCommands(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @args('version', nargs='?', default=None, help='Database version') <NEW_LINE> def sync(self, version=None): <NEW_LINE> <INDENT> return db_migration.db_sync(version) <NEW_LINE> <DEDENT> def version(self): <NEW_LINE> <INDENT> print(migration.db_version(db_api.get_engine(), db_migration.MIGRATE_REPO_PATH, db_migration.INIT_VERSION)) <NEW_LINE> <DEDENT> @args('age_in_days', type=int, help='Purge deleted rows older than age in days') <NEW_LINE> def purge(self, age_in_days): <NEW_LINE> <INDENT> age_in_days = int(age_in_days) <NEW_LINE> if age_in_days <= 0: <NEW_LINE> <INDENT> print(_("Must supply a positive, non-zero value for age")) <NEW_LINE> exit(1) <NEW_LINE> <DEDENT> ctxt = context.get_admin_context() <NEW_LINE> db.purge_deleted_rows(ctxt, age_in_days) | Class for managing the database. | 6259903a66673b3332c315b4 |
class TestModule(ExtTestCase): <NEW_LINE> <INDENT> def test_check(self): <NEW_LINE> <INDENT> check() <NEW_LINE> _setup_hook() | Test style. | 6259903ab57a9660fecd2c39 |
class CreateTicketNewRequester(Choreography): <NEW_LINE> <INDENT> def __init__(self, temboo_session): <NEW_LINE> <INDENT> Choreography.__init__(self, temboo_session, '/Library/Zendesk/Tickets/CreateTicketNewRequester') <NEW_LINE> <DEDENT> def new_input_set(self): <NEW_LINE> <INDENT> return CreateTicketNewRequesterInputSet() <NEW_LINE> <DEDENT> def _make_result_set(self, result, path): <NEW_LINE> <INDENT> return CreateTicketNewRequesterResultSet(result, path) <NEW_LINE> <DEDENT> def _make_execution(self, session, exec_id, path): <NEW_LINE> <INDENT> return CreateTicketNewRequesterChoreographyExecution(session, exec_id, path) | Create a new instance of the CreateTicketNewRequester Choreography. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied. | 6259903a8e05c05ec3f6f73a |
class TestSetup(object): <NEW_LINE> <INDENT> def __init__(self, test, args): <NEW_LINE> <INDENT> self.test = test <NEW_LINE> self.args = args <NEW_LINE> self.logger = logging.getLogger('RUNNER.TestSetup') <NEW_LINE> self.test_kind = args.kind <NEW_LINE> self.test_version = test.get('version', None) <NEW_LINE> <DEDENT> def validate_env(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.repo_path = os.environ['REPO_PATH'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.logger.error('KeyError: REPO_PATH') <NEW_LINE> self.logger.error("Please run '. ./bin/setenv.sh' to setup test environment") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> def create_dir(self): <NEW_LINE> <INDENT> if not os.path.exists(self.test['output']): <NEW_LINE> <INDENT> os.makedirs(self.test['output']) <NEW_LINE> self.logger.info('Output directory created: %s' % self.test['output']) <NEW_LINE> <DEDENT> <DEDENT> def copy_test_repo(self): <NEW_LINE> <INDENT> self.validate_env() <NEW_LINE> shutil.rmtree(self.test['test_path'], ignore_errors=True) <NEW_LINE> if self.repo_path in self.test['test_path']: <NEW_LINE> <INDENT> self.logger.error("Cannot copy repository into itself. Please choose output directory outside repository path") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> shutil.copytree(self.repo_path, self.test['test_path'], symlinks=True) <NEW_LINE> self.logger.info('Test repo copied to: %s' % self.test['test_path']) <NEW_LINE> <DEDENT> def checkout_version(self): <NEW_LINE> <INDENT> if self.test_version: <NEW_LINE> <INDENT> path = os.getcwd() <NEW_LINE> os.chdir(self.test['test_path']) <NEW_LINE> subprocess.call("git checkout %s" % self.test_version, shell=True) <NEW_LINE> os.chdir(path) <NEW_LINE> <DEDENT> <DEDENT> def create_uuid_file(self): <NEW_LINE> <INDENT> with open('%s/uuid' % self.test['test_path'], 'w') as f: <NEW_LINE> <INDENT> f.write(self.test['uuid']) | Create directories required, then copy files needed to these directories. | 6259903aa4f1c619b294f766 |
class ConstrainedValue: <NEW_LINE> <INDENT> def __init__(self, constraint_set_class): <NEW_LINE> <INDENT> self._constraint_set_class = constraint_set_class <NEW_LINE> <DEDENT> def __set_name__(self, owner, name): <NEW_LINE> <INDENT> self.public_name = name <NEW_LINE> self.private_name = f"_{name}" <NEW_LINE> try: <NEW_LINE> <INDENT> constraint_sets = owner._constraint_sets <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> constraint_sets = [] <NEW_LINE> owner._constraint_sets = constraint_sets <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> owner._constraint_sets.append(self.public_name) <NEW_LINE> <DEDENT> <DEDENT> def __get__(self, instance, typ=None): <NEW_LINE> <INDENT> constraint_set = getattr(instance, self.private_name, None) <NEW_LINE> if constraint_set is None: <NEW_LINE> <INDENT> constraint_set = self._constraint_set_class(name=f"{instance.name}.{self.public_name}") <NEW_LINE> setattr(instance, self.private_name, constraint_set) <NEW_LINE> <DEDENT> return constraint_set <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> constraint_set = self.__get__(instance, None) <NEW_LINE> constraint_set.reset_constraints() <NEW_LINE> if isinstance(value, ConstraintSet): <NEW_LINE> <INDENT> constraint_set.constrain_with(LinkedValueConstraint(value)) <NEW_LINE> return <NEW_LINE> <DEDENT> constraint_set.constrain_with(FixedValueConstraint(value)) | An object which can be passed around to represent a value. | 6259903a30c21e258be999cc |
class Comment(models.Model): <NEW_LINE> <INDENT> blog = models.ForeignKey(Blog,verbose_name='博客') <NEW_LINE> name = models.CharField('称呼',max_length=16) <NEW_LINE> email = models.EmailField('邮箱') <NEW_LINE> content = models.CharField('内容',max_length=240) <NEW_LINE> created = models.DateTimeField('发布时间',auto_now_add=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.content | 评论 | 6259903a50485f2cf55dc13f |
class NABackboneTorsionReport(_AdvancedBaseReport): <NEW_LINE> <INDENT> def __init__(self, report: dict = None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._update({ 'Model ID': '', 'Chain ID': '', 'Residue Num': 0, 'Residue Name': '', "O3'-P-O5'-C5": 0, "P-O5'-C5'-C4'": 0, "O5'-C5'-C4'-C3'": 0, "C5'-C4'-C3'-O3'": 0, "C4'-C3'-O3'-P": 0, "C3'-O3'-P-O5'": 0, "O4'-C1'-N1-9-C2-4": 0 }) <NEW_LINE> if report: <NEW_LINE> <INDENT> self._update(_assign_numbers(report)) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def report_type() -> str: <NEW_LINE> <INDENT> return 'nabt' <NEW_LINE> <DEDENT> @property <NEW_LINE> def model_id(self) -> str: <NEW_LINE> <INDENT> return self._report['Model ID'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def chain_id(self) -> str: <NEW_LINE> <INDENT> return self._report['Chain ID'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def residue_number(self) -> int: <NEW_LINE> <INDENT> return self._report['Residue Num'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def residue_name(self) -> str: <NEW_LINE> <INDENT> return self._report['Residue Name'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def o3_p_o5_c5(self) -> float: <NEW_LINE> <INDENT> return self._report["O3'-P-O5'-C5'"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def p_o5_c5_c4(self) -> float: <NEW_LINE> <INDENT> return self._report["P-O5'-C5'-C4'"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def o5_c5_c4_c3(self) -> float: <NEW_LINE> <INDENT> return self._report["O5'-C5'-C4'-C3'"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def c5_c4_c3_o3(self) -> float: <NEW_LINE> <INDENT> return self._report["C5'-C4'-C3'-O3'"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def c4_c3_o3_p(self) -> float: <NEW_LINE> <INDENT> return self._report["C4'-C3'-O3'-P"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def c3_o3_p_o5(self) -> float: <NEW_LINE> <INDENT> return self._report["C3'-O3'-P-O5'"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def o4_c1_n1_9_c2_4(self) -> float: <NEW_LINE> <INDENT> return self._report["O4'-C1'-N1-9-C2-4"] <NEW_LINE> <DEDENT> def download(self, download_type: DownloadType = DownloadType.Pdb, save: bool = False, target_dir: str = '') -> str: <NEW_LINE> <INDENT> pass | Class for refinement data search report extending _AdvancedBaseReport | 6259903ad6c5a102081e32e5 |
class TestModel_VersionResponse(): <NEW_LINE> <INDENT> def test_version_response_serialization(self): <NEW_LINE> <INDENT> version_response_model_json = {} <NEW_LINE> version_response_model_json['builddate'] = 'testString' <NEW_LINE> version_response_model_json['buildno'] = 'testString' <NEW_LINE> version_response_model_json['commitsha'] = 'testString' <NEW_LINE> version_response_model_json['helm_provider_version'] = 'testString' <NEW_LINE> version_response_model_json['helm_version'] = 'testString' <NEW_LINE> version_response_model_json['supported_template_types'] = { 'foo': 'bar' } <NEW_LINE> version_response_model_json['terraform_provider_version'] = 'testString' <NEW_LINE> version_response_model_json['terraform_version'] = 'testString' <NEW_LINE> version_response_model = VersionResponse.from_dict(version_response_model_json) <NEW_LINE> assert version_response_model != False <NEW_LINE> version_response_model_dict = VersionResponse.from_dict(version_response_model_json).__dict__ <NEW_LINE> version_response_model2 = VersionResponse(**version_response_model_dict) <NEW_LINE> assert version_response_model == version_response_model2 <NEW_LINE> version_response_model_json2 = version_response_model.to_dict() <NEW_LINE> assert version_response_model_json2 == version_response_model_json | Test Class for VersionResponse | 6259903abaa26c4b54d50467 |
class WireModule(ConnectionModule): <NEW_LINE> <INDENT> def __init__(self, id, name, voltage, linked_module): <NEW_LINE> <INDENT> super(WireModule, self).__init__(id, name, voltage, linked_module) <NEW_LINE> self.index = 0 <NEW_LINE> <DEDENT> def set_table_section(self, table): <NEW_LINE> <INDENT> super(WireModule, self).set_table_section(table) <NEW_LINE> self.index = 0 <NEW_LINE> self.linked_module.index = 0 | Transformer module, is linked with another transformer module | 6259903a4e696a045264e701 |
class _StateManagerImpl(StateManager): <NEW_LINE> <INDENT> def __init__(self, layer, trainable): <NEW_LINE> <INDENT> self._trainable = trainable <NEW_LINE> self._layer = layer <NEW_LINE> self._cols_to_vars_map = collections.defaultdict(lambda: {}) <NEW_LINE> <DEDENT> def create_variable(self, feature_column, name, shape, dtype=None, trainable=True, use_resource=True, initializer=None): <NEW_LINE> <INDENT> if name in self._cols_to_vars_map[feature_column]: <NEW_LINE> <INDENT> raise ValueError('Variable already exists.') <NEW_LINE> <DEDENT> var = self._layer.add_variable( name=name, shape=shape, dtype=dtype, initializer=initializer, trainable=self._trainable and trainable, use_resource=use_resource, getter=variable_scope.get_variable) <NEW_LINE> self._cols_to_vars_map[feature_column][name] = var <NEW_LINE> return var <NEW_LINE> <DEDENT> def get_variable(self, feature_column, name): <NEW_LINE> <INDENT> if name in self._cols_to_vars_map[feature_column]: <NEW_LINE> <INDENT> return self._cols_to_vars_map[feature_column][name] <NEW_LINE> <DEDENT> raise ValueError('Variable does not exist.') | Manages the state of DenseFeatures and LinearLayer. | 6259903a3eb6a72ae038b828 |
class ProvenanceEntity(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_type = "ProvenanceEntity" <NEW_LINE> def __init__(self, jsondict=None, strict=True): <NEW_LINE> <INDENT> self.agent = None <NEW_LINE> self.role = None <NEW_LINE> self.what = None <NEW_LINE> super(ProvenanceEntity, self).__init__(jsondict=jsondict, strict=strict) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(ProvenanceEntity, self).elementProperties() <NEW_LINE> js.extend([ ("agent", "agent", ProvenanceAgent, True, None, False), ("role", "role", str, False, None, True), ("what", "what", fhirreference.FHIRReference, False, None, True), ]) <NEW_LINE> return js | An entity used in this activity.
| 6259903a21bff66bcd723e28 |
class Home(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> svnCatalog = None <NEW_LINE> try: <NEW_LINE> <INDENT> conf = load_config('config.json') <NEW_LINE> repo = conf['repo_location'] <NEW_LINE> svnCatalog = comm.to_HTML(git.git_get(repo)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error(e) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if svnCatalog: <NEW_LINE> <INDENT> self.render('catalog.html', main_title = title, tbl = svnCatalog ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.render('error.html', main_title = title, err = "Go to Settings and enter a valid GIT repository." ) | index page | 6259903a23e79379d538d6c0 |
class GroupKFold(_BaseKFold): <NEW_LINE> <INDENT> def __init__(self, n_splits=5): <NEW_LINE> <INDENT> super().__init__(n_splits, shuffle=False, random_state=None) <NEW_LINE> <DEDENT> def _iter_test_indices(self, X, y, groups): <NEW_LINE> <INDENT> if groups is None: <NEW_LINE> <INDENT> raise ValueError("The 'groups' parameter should not be None.") <NEW_LINE> <DEDENT> groups = check_array(groups, input_name="groups", ensure_2d=False, dtype=None) <NEW_LINE> unique_groups, groups = np.unique(groups, return_inverse=True) <NEW_LINE> n_groups = len(unique_groups) <NEW_LINE> if self.n_splits > n_groups: <NEW_LINE> <INDENT> raise ValueError( "Cannot have number of splits n_splits=%d greater" " than the number of groups: %d." % (self.n_splits, n_groups) ) <NEW_LINE> <DEDENT> n_samples_per_group = np.bincount(groups) <NEW_LINE> indices = np.argsort(n_samples_per_group)[::-1] <NEW_LINE> n_samples_per_group = n_samples_per_group[indices] <NEW_LINE> n_samples_per_fold = np.zeros(self.n_splits) <NEW_LINE> group_to_fold = np.zeros(len(unique_groups)) <NEW_LINE> for group_index, weight in enumerate(n_samples_per_group): <NEW_LINE> <INDENT> lightest_fold = np.argmin(n_samples_per_fold) <NEW_LINE> n_samples_per_fold[lightest_fold] += weight <NEW_LINE> group_to_fold[indices[group_index]] = lightest_fold <NEW_LINE> <DEDENT> indices = group_to_fold[groups] <NEW_LINE> for f in range(self.n_splits): <NEW_LINE> <INDENT> yield np.where(indices == f)[0] <NEW_LINE> <DEDENT> <DEDENT> def split(self, X, y=None, groups=None): <NEW_LINE> <INDENT> return super().split(X, y, groups) | K-fold iterator variant with non-overlapping groups.
The same group will not appear in two different folds (the number of
distinct groups has to be at least equal to the number of folds).
The folds are approximately balanced in the sense that the number of
distinct groups is approximately the same in each fold.
Read more in the :ref:`User Guide <group_k_fold>`.
Parameters
----------
n_splits : int, default=5
Number of folds. Must be at least 2.
.. versionchanged:: 0.22
``n_splits`` default value changed from 3 to 5.
Examples
--------
>>> import numpy as np
>>> from sklearn.model_selection import GroupKFold
>>> X = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
>>> y = np.array([1, 2, 3, 4])
>>> groups = np.array([0, 0, 2, 2])
>>> group_kfold = GroupKFold(n_splits=2)
>>> group_kfold.get_n_splits(X, y, groups)
2
>>> print(group_kfold)
GroupKFold(n_splits=2)
>>> for train_index, test_index in group_kfold.split(X, y, groups):
... print("TRAIN:", train_index, "TEST:", test_index)
... X_train, X_test = X[train_index], X[test_index]
... y_train, y_test = y[train_index], y[test_index]
... print(X_train, X_test, y_train, y_test)
...
TRAIN: [0 1] TEST: [2 3]
[[1 2]
[3 4]] [[5 6]
[7 8]] [1 2] [3 4]
TRAIN: [2 3] TEST: [0 1]
[[5 6]
[7 8]] [[1 2]
[3 4]] [3 4] [1 2]
See Also
--------
LeaveOneGroupOut : For splitting the data according to explicit
domain-specific stratification of the dataset. | 6259903a94891a1f408b9fd7 |
class UserProfilePictureOneOffJob(jobs.BaseMapReduceJobManager): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def entity_classes_to_map_over(cls): <NEW_LINE> <INDENT> return [user_models.UserSettingsModel] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def map(item): <NEW_LINE> <INDENT> if item.deleted or item.profile_picture_data_url is not None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> user_services.generate_initial_profile_picture(item.id) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def reduce(key, stringified_values): <NEW_LINE> <INDENT> pass | One-off job that updates profile pictures for users which do not
currently have them. Users who already have profile pictures are
unaffected. | 6259903a30dc7b76659a09f3 |
class Mechanism(Interface): <NEW_LINE> <INDENT> path = 'mechanism' <NEW_LINE> @classmethod <NEW_LINE> def to_python(cls, data): <NEW_LINE> <INDENT> data = upgrade_legacy_mechanism(data) <NEW_LINE> is_valid, errors = validate_and_default_interface(data, cls.path) <NEW_LINE> if not is_valid: <NEW_LINE> <INDENT> raise InterfaceValidationError("Invalid mechanism") <NEW_LINE> <DEDENT> if not data.get('type'): <NEW_LINE> <INDENT> raise InterfaceValidationError("No 'type' present") <NEW_LINE> <DEDENT> meta = data.get('meta', {}) <NEW_LINE> mach_exception = meta.get('mach_exception') <NEW_LINE> if mach_exception is not None: <NEW_LINE> <INDENT> mach_exception = prune_empty_keys({ 'exception': mach_exception['exception'], 'code': mach_exception['code'], 'subcode': mach_exception['subcode'], 'name': mach_exception.get('name'), }) <NEW_LINE> <DEDENT> signal = meta.get('signal') <NEW_LINE> if signal is not None: <NEW_LINE> <INDENT> signal = prune_empty_keys({ 'number': signal['number'], 'code': signal.get('code'), 'name': signal.get('name'), 'code_name': signal.get('code_name'), }) <NEW_LINE> <DEDENT> errno = meta.get('errno') <NEW_LINE> if errno is not None: <NEW_LINE> <INDENT> errno = prune_empty_keys({ 'number': errno['number'], 'name': errno.get('name'), }) <NEW_LINE> <DEDENT> kwargs = { 'type': trim(data['type'], 128), 'description': trim(data.get('description'), 1024), 'help_link': trim(data.get('help_link'), 1024), 'handled': data.get('handled'), 'data': trim(data.get('data'), 4096), 'meta': { 'errno': errno, 'mach_exception': mach_exception, 'signal': signal, }, } <NEW_LINE> return cls(**kwargs) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return prune_empty_keys({ 'type': self.type, 'description': self.description, 'help_link': self.help_link, 'handled': self.handled, 'data': self.data, 'meta': prune_empty_keys(self.meta), }) <NEW_LINE> <DEDENT> def get_path(self): <NEW_LINE> <INDENT> return self.path <NEW_LINE> <DEDENT> def iter_tags(self): <NEW_LINE> <INDENT> yield (self.path, self.type) <NEW_LINE> if self.handled is not None: <NEW_LINE> <INDENT> yield ('handled', self.handled and 'yes' or 'no') | an optional field residing in the exception interface. It carries additional
information about the way the exception was created on the target system.
This includes general exception values obtained from operating system or
runtime APIs, as well as mechanism-specific values.
>>> {
>>> "type": "mach",
>>> "description": "EXC_BAD_ACCESS",
>>> "data": {
>>> "relevant_address": "0x1"
>>> },
>>> "handled": false,
>>> "help_link": "https://developer.apple.com/library/content/qa/qa1367/_index.html",
>>> "meta": {
>>> "mach_exception": {
>>> "exception": 1,
>>> "subcode": 8,
>>> "code": 1
>>> },
>>> "signal": {
>>> "number": 11
>>> }
>>> }
>>> } | 6259903a50485f2cf55dc142 |
class WaitUntilHealthy(ELBBaseActor): <NEW_LINE> <INDENT> def _get_expected_count(self, count, total_count): <NEW_LINE> <INDENT> if '%' in str(count): <NEW_LINE> <INDENT> expected_count = math.ceil(total_count * p2f(count)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> expected_count = int(count) <NEW_LINE> <DEDENT> return expected_count <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def _is_healthy(self, elb, count): <NEW_LINE> <INDENT> name = elb.name <NEW_LINE> self.log.debug('Counting ELB InService instances for : %s' % name) <NEW_LINE> instance_list = yield self.thread(elb.get_instance_health) <NEW_LINE> total_count = len(instance_list) <NEW_LINE> self.log.debug('All instances: %s' % instance_list) <NEW_LINE> in_service_count = [ i.state for i in instance_list].count('InService') <NEW_LINE> expected_count = self._get_expected_count(count, total_count) <NEW_LINE> healthy = (in_service_count >= expected_count) <NEW_LINE> self.log.debug('ELB "%s" healthy state: %s' % (elb.name, healthy)) <NEW_LINE> raise gen.Return(healthy) <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def _execute(self): <NEW_LINE> <INDENT> elb = yield self._find_elb(name=self.option('name')) <NEW_LINE> repeating_log = utils.create_repeating_log( self.log.info, 'Still waiting for %s to become healthy' % self.option('name'), seconds=30) <NEW_LINE> while True: <NEW_LINE> <INDENT> healthy = yield self._is_healthy(elb, count=self.option('count')) <NEW_LINE> if healthy is True: <NEW_LINE> <INDENT> self.log.info('ELB is healthy.') <NEW_LINE> break <NEW_LINE> <DEDENT> if self._dry: <NEW_LINE> <INDENT> self.log.info('Pretending that ELB is healthy.') <NEW_LINE> break <NEW_LINE> <DEDENT> self.log.debug('Retrying in 3 seconds.') <NEW_LINE> yield utils.tornado_sleep(3) <NEW_LINE> <DEDENT> utils.clear_repeating_log(repeating_log) <NEW_LINE> raise gen.Return() | Wait indefinitely until a specified ELB is considered "healthy".
This actor will loop infinitely until a healthy threshold of the ELB is
met. The threshold can be reached when the ``count`` as specified in the
options is less than or equal to the number of InService instances in the
ELB.
Another situation is for ``count`` to be a string specifying a percentage
(see examples). In this case the percent of InService instances has to be
greater than the ``count`` percentage.
**Options**
:name:
The name of the ELB to operate on
:count:
Number, or percentage of InService instance to consider this ELB healthy
:region:
AWS region (or zone) name, such as us-east-1 or us-west-2
**Examples**
.. code-block:: json
{ "actor": "aws.elb.WaitUntilHealthy",
"desc": "Wait until production-frontend has 16 hosts",
"options": {
"name": "production-frontend",
"count": 16,
"region": "us-west-2"
}
}
.. code-block:: json
{ "actor": "aws.elb.WaitUntilHealthy",
"desc": "Wait until production-frontend has 85% of hosts in-service",
"options": {
"name": "production-frontend",
"count": "85%",
"region": "us-west-2"
}
}
**Dry Mode**
This actor performs the finding of the ELB as well as calculating its
health at all times. The only difference in dry mode is that it will not
re-count the instances if the ELB is not healthy. A log message will be
printed indicating that the run is dry, and the actor will exit with
success. | 6259903a30c21e258be999cf |
class SyncStepDetailInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.StepNo = None <NEW_LINE> self.StepName = None <NEW_LINE> self.CanStop = None <NEW_LINE> self.StepId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.StepNo = params.get("StepNo") <NEW_LINE> self.StepName = params.get("StepName") <NEW_LINE> self.CanStop = params.get("CanStop") <NEW_LINE> self.StepId = params.get("StepId") | 同步任务进度
| 6259903a6e29344779b01813 |
class ListaMaterialEntregue(models.Model): <NEW_LINE> <INDENT> entregue = models.BooleanField(default=False) <NEW_LINE> ordem_de_servico = models.ForeignKey('programacao.OrdemDeServico', blank=True, null=True) <NEW_LINE> entregue_por = models.ForeignKey("rh.Funcionario", blank=False, null=False, related_name="entregue_por_set") <NEW_LINE> entregue_para = models.ForeignKey("rh.Funcionario", blank=False, null=False, related_name="entregue_para_set") <NEW_LINE> criado = models.DateTimeField(blank=True, auto_now_add=True, verbose_name="Criado") <NEW_LINE> atualizado = models.DateTimeField(blank=True, auto_now=True, verbose_name="Atualizado") | Lista Consolidada De materiais Entregues
| 6259903aac7a0e7691f736aa |
class ImagFormatter(RealFormatter): <NEW_LINE> <INDENT> def __call__(self, x, pos=None): <NEW_LINE> <INDENT> if x < -self._axes._near_inf: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> elif x > self._axes._near_inf: <NEW_LINE> <INDENT> return self._axes._get_key("symbol.infinity") <NEW_LINE> <DEDENT> elif abs(x) < EPSILON: <NEW_LINE> <INDENT> return "0" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ("%f" % x).rstrip('0').rstrip('.') + "j" | Formatter for the imaginary axis of a SmithAxes. Prints the numbers
as float and removes trailing zeros and commata. Special returns:
- '' for minus infinity
- 'symbol.infinity' from scParams for plus infinity
- '0' for value near zero (prevents -0)
Keyword arguments:
*axes*:
Parent axes
Accepts: SmithAxes instance | 6259903ab57a9660fecd2c3d |
class LiveDBTestRunner(DiscoverRunner): <NEW_LINE> <INDENT> def setup_databases(self, *args, **kwargs): <NEW_LINE> <INDENT> print('WARNING: using LIVE database = {}' .format(DATABASES['archive']['HOST'])) <NEW_LINE> pass <NEW_LINE> <DEDENT> def teardown_databases(self, *args, **kwargs): <NEW_LINE> <INDENT> pass | THIS IS DANGEROUS.
Do NOT create new database. Use live DB (for metadata DB).
Done as stop-gap until we have build a clone of metadata DB on test server.
It should contain a small subset of full metadata content. | 6259903ad99f1b3c44d06868 |
class Account: <NEW_LINE> <INDENT> prefix = 'GIRO' <NEW_LINE> def __init__(self, newname, balance=0): <NEW_LINE> <INDENT> self.name = newname <NEW_LINE> self.balance = balance <NEW_LINE> <DEDENT> def deposit(self, amt): <NEW_LINE> <INDENT> self.balance += amt <NEW_LINE> <DEDENT> def withdraw(self, amt): <NEW_LINE> <INDENT> self.balance -= amt <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{} | {:10s}:{:10.2f}".format(self.prefix, self.name, self.balance) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def info_text(): <NEW_LINE> <INDENT> return """This is a bank account. It keeps your money safe.""" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def prefix_text(cls): <NEW_LINE> <INDENT> return """Bank account has the prefix: {}.""".format(cls.prefix) | Account for a bank client. | 6259903a8a43f66fc4bf3350 |
class UIGraphicsItem(GraphicsObject): <NEW_LINE> <INDENT> def __init__(self, bounds=None, parent=None): <NEW_LINE> <INDENT> GraphicsObject.__init__(self, parent) <NEW_LINE> self.setFlag(self.ItemSendsScenePositionChanges) <NEW_LINE> if bounds is None: <NEW_LINE> <INDENT> self._bounds = QtCore.QRectF(0, 0, 1, 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._bounds = bounds <NEW_LINE> <DEDENT> self._boundingRect = None <NEW_LINE> self._updateView() <NEW_LINE> <DEDENT> def paint(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def itemChange(self, change, value): <NEW_LINE> <INDENT> ret = GraphicsObject.itemChange(self, change, value) <NEW_LINE> if not USE_PYSIDE and change == self.ItemParentChange and isinstance(ret, QtGui.QGraphicsItem): <NEW_LINE> <INDENT> ret = sip.cast(ret, QtGui.QGraphicsItem) <NEW_LINE> <DEDENT> if change == self.ItemScenePositionHasChanged: <NEW_LINE> <INDENT> self.setNewBounds() <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def boundingRect(self): <NEW_LINE> <INDENT> if self._boundingRect is None: <NEW_LINE> <INDENT> br = self.viewRect() <NEW_LINE> if br is None: <NEW_LINE> <INDENT> return QtCore.QRectF() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._boundingRect = br <NEW_LINE> <DEDENT> <DEDENT> return QtCore.QRectF(self._boundingRect) <NEW_LINE> <DEDENT> def dataBounds(self, axis, frac=1.0, orthoRange=None): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def viewRangeChanged(self): <NEW_LINE> <INDENT> self.setNewBounds() <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def setNewBounds(self): <NEW_LINE> <INDENT> self._boundingRect = None <NEW_LINE> self.prepareGeometryChange() <NEW_LINE> <DEDENT> def setPos(self, *args): <NEW_LINE> <INDENT> GraphicsObject.setPos(self, *args) <NEW_LINE> self.setNewBounds() <NEW_LINE> <DEDENT> def mouseShape(self): <NEW_LINE> <INDENT> shape = self.shape() <NEW_LINE> ds = self.mapToDevice(shape) <NEW_LINE> stroker = QtGui.QPainterPathStroker() <NEW_LINE> stroker.setWidh(2) <NEW_LINE> ds2 = stroker.createStroke(ds).united(ds) <NEW_LINE> return self.mapFromDevice(ds2) | Base class for graphics items with boundaries relative to a GraphicsView or ViewBox.
The purpose of this class is to allow the creation of GraphicsItems which live inside
a scalable view, but whose boundaries will always stay fixed relative to the view's boundaries.
For example: GridItem, InfiniteLine
The view can be specified on initialization or it can be automatically detected when the item is painted.
NOTE: Only the item's boundingRect is affected; the item is not transformed in any way. Use viewRangeChanged
to respond to changes in the view. | 6259903a287bf620b6272dad |
class BootstrapGalaxyApplication(object): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> if not self.config.database_connection: <NEW_LINE> <INDENT> self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % str(config.database) <NEW_LINE> <DEDENT> self.model = mapping.init(self.config.file_path, self.config.database_connection, engine_options={}, create_tables=False) <NEW_LINE> self.security = security.SecurityHelper(id_secret=self.config.id_secret) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sa_session(self): <NEW_LINE> <INDENT> return self.model.context.current <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> pass | Creates a basic Tool Shed application in order to discover the database
connection and use SQL to create a user and API key. | 6259903a8da39b475be043b1 |
class InquirerLexKeyGroup(KeyGroup): <NEW_LINE> <INDENT> def __init__(self, lexicon): <NEW_LINE> <INDENT> self.lexicon = lexicon <NEW_LINE> description = "Inquirer features" <NEW_LINE> super(InquirerLexKeyGroup, self).__init__(description, self.mk_fields()) <NEW_LINE> <DEDENT> def mk_field(self, entry): <NEW_LINE> <INDENT> name = '_'.join([self.key_prefix(), entry]) <NEW_LINE> return Key.discrete(name, "Inquirer " + entry) <NEW_LINE> <DEDENT> def mk_fields(self): <NEW_LINE> <INDENT> return [self.mk_field(x) for x in self.lexicon] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def key_prefix(cls): <NEW_LINE> <INDENT> return "inq" <NEW_LINE> <DEDENT> def help_text(self): <NEW_LINE> <INDENT> header_name = (self.key_prefix() + "_...").ljust(KeyGroup.NAME_WIDTH) <NEW_LINE> header_help = "if has token in the given class" <NEW_LINE> header = "[D] %s %s" % (header_name, header_help) <NEW_LINE> lines = [header] <NEW_LINE> for entry in self.lexicon: <NEW_LINE> <INDENT> lines.append(" %s" % entry.ljust(KeyGroup.NAME_WIDTH)) <NEW_LINE> <DEDENT> return "\n".join(lines) <NEW_LINE> <DEDENT> def fill(self, current, edu, target=None): <NEW_LINE> <INDENT> vec = self if target is None else target <NEW_LINE> ctx = current.contexts[edu] <NEW_LINE> tokens = frozenset(t.word.lower() for t in ctx.tokens) <NEW_LINE> for entry in self.lexicon: <NEW_LINE> <INDENT> field = self.mk_field(entry) <NEW_LINE> matching = tokens.intersection(self.lexicon[entry]) <NEW_LINE> vec[field.name] = bool(matching) | One feature per Inquirer lexicon class | 6259903a711fe17d825e157d |
class TCPClient(CustomClient): <NEW_LINE> <INDENT> __serverName = None <NEW_LINE> __serverPort = None <NEW_LINE> __csocket = None <NEW_LINE> __serverResponse = [] <NEW_LINE> def __init__(self, serverName, serverPort): <NEW_LINE> <INDENT> self.__serverName = serverName <NEW_LINE> self.__serverPort = serverPort <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> self.__csocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> self.__csocket.connect((self.__serverName, self.__serverPort)) <NEW_LINE> self.__csocket.send("Hi server\n") <NEW_LINE> self.__csocket.recv(1024) <NEW_LINE> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> self.__csocket.close() <NEW_LINE> <DEDENT> def waitForCommand(self): <NEW_LINE> <INDENT> t = Thread(target = self.__run) <NEW_LINE> t.start() <NEW_LINE> t.join() <NEW_LINE> return self.__serverResponse.pop() <NEW_LINE> <DEDENT> def __run(self): <NEW_LINE> <INDENT> self.__serverResponse.append(self.__csocket.recv(1024)) <NEW_LINE> <DEDENT> def sendResponse(self, data): <NEW_LINE> <INDENT> self.__csocket.send(data) | A TCP client.
The client class is able to send and
receive data without closing the connection.
Attributes:
__serverName: The C&C's IP or domain
__serverPort: The C&C's port
__csocket: Server connection socket.
__serverResponse: The last response coming from the server | 6259903a1d351010ab8f4cde |
class ComparisonFrame(awx.Frame): <NEW_LINE> <INDENT> def __init__(self, parent, dirpaths=None, filepaths=None, wildcard=None, **kwargs): <NEW_LINE> <INDENT> super(ComparisonFrame, self).__init__(parent, -1, **kwargs) <NEW_LINE> main_sizer = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> hsizer = wx.BoxSizer(wx.HORIZONTAL) <NEW_LINE> st1 = wx.StaticText(self, -1, "Quantity:", wx.DefaultPosition, wx.DefaultSize, 0) <NEW_LINE> st1.Wrap(-1) <NEW_LINE> hsizer.Add(st1, 0, wx.ALIGN_CENTER_VERTICAL | wx.TOP | wx.BOTTOM | wx.LEFT, 5) <NEW_LINE> plotter_choices = ["ebands", "edos", "mdf", "sigres"] <NEW_LINE> self.plotter_cbox = wx.ComboBox(self, -1, "ebands", wx.DefaultPosition, wx.DefaultSize, plotter_choices, 0) <NEW_LINE> hsizer.Add(self.plotter_cbox, 0, wx.ALL, 5) <NEW_LINE> compare_button = wx.Button(self, -1, "Compare", wx.DefaultPosition, wx.DefaultSize, 0) <NEW_LINE> compare_button.Bind(wx.EVT_BUTTON, self.OnCompareButton) <NEW_LINE> hsizer.Add(compare_button, 0, wx.ALL | wx.ALIGN_CENTER_VERTICAL, 5) <NEW_LINE> main_sizer.Add(hsizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 5) <NEW_LINE> self.panel = FileCheckBoxPanel(self, filepaths) <NEW_LINE> main_sizer.Add(self.panel, 1, wx.EXPAND, 5) <NEW_LINE> self.SetSizerAndFit(main_sizer) <NEW_LINE> <DEDENT> def OnCompareButton(self, event): <NEW_LINE> <INDENT> selected_files = self.panel.GetSelectedFilepaths() <NEW_LINE> choice = self.plotter_cbox.GetValue() <NEW_LINE> try: <NEW_LINE> <INDENT> if choice == "ebands": <NEW_LINE> <INDENT> plotter = ElectronBandsPlotter() <NEW_LINE> for filepath in selected_files: <NEW_LINE> <INDENT> plotter.add_ebands_from_file(filepath) <NEW_LINE> <DEDENT> plotter.plot() <NEW_LINE> <DEDENT> elif choice == "edos": <NEW_LINE> <INDENT> dos_dialog = ElectronDosDialog(None) <NEW_LINE> if dos_dialog.ShowModal() == wx.ID_OK: <NEW_LINE> <INDENT> p = dos_dialog.GetParams() <NEW_LINE> plotter = ElectronDosPlotter() <NEW_LINE> for filepath in selected_files: <NEW_LINE> <INDENT> plotter.add_edos_from_file(filepath, **p) <NEW_LINE> <DEDENT> plotter.plot() <NEW_LINE> <DEDENT> dos_dialog.Destroy() <NEW_LINE> <DEDENT> elif choice == "mdf": <NEW_LINE> <INDENT> plotter = MdfPlotter() <NEW_LINE> for filepath in selected_files: <NEW_LINE> <INDENT> plotter.add_mdf_from_file(filepath, mdf_type="exc") <NEW_LINE> <DEDENT> plotter.plot() <NEW_LINE> <DEDENT> elif choice == "sigres": <NEW_LINE> <INDENT> plotter = SigresPlotter() <NEW_LINE> plotter.add_files(selected_files) <NEW_LINE> plotter.plot_qpgaps() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> awx.showErrorMessage(self, message="No function registered for choice %s" % choice) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> awx.showErrorMessage(self) | This frame allows the user to select/deselect a list of files and to produce plots
for all the files selected. Useful for convergence studies. | 6259903ab5575c28eb7135ab |
class TeeBytesIO(BytesIO): <NEW_LINE> <INDENT> def __init__(self, tee_fh): <NEW_LINE> <INDENT> self.tee_fh = tee_fh <NEW_LINE> super(TeeBytesIO, self).__init__() <NEW_LINE> <DEDENT> def write(self, s): <NEW_LINE> <INDENT> self.tee_fh.write(s) <NEW_LINE> BytesIO.write(self, s) | duplicate each write command to an additional file object | 6259903a71ff763f4b5e895e |
class LocalSMTPServer(smtp.ESMTP, LEAPInitMixin): <NEW_LINE> <INDENT> def __init__(self, outgoings, soledads, encrypted_only=False): <NEW_LINE> <INDENT> LEAPInitMixin.__init__(self, outgoings, soledads, encrypted_only) <NEW_LINE> smtp.ESMTP.__init__(self) | The Production ESMTP Server: Authentication Needed.
Authenticates against SMTP Token stored in Local Soledad instance.
The Realm will produce a Delivery Object that handles encryption/signing. | 6259903a0fa83653e46f609e |
class SatelliteViewSet(DefaultsMixin, viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Satellite.objects.order_by('norad_number') <NEW_LINE> serializer_class = SatelliteSerializer <NEW_LINE> filter_class = SatelliteFilter <NEW_LINE> search_fields = ('isactive' ) <NEW_LINE> ordering_fields = ('norad_number',) | API endpoint for listing and creating Satellites. | 6259903ae76e3b2f99fd9bd0 |
class BikeSubtype(MobikeDbModel): <NEW_LINE> <INDENT> __tablename__ = "bike_subtypes" <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(Text) <NEW_LINE> type_id = Column(Integer, ForeignKey('bike_types.id')) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<Mobike subtype (name = {:s})>".format(self.name) | Mobike subtypes: Categorical table
Subtype is used directly to express bike type. | 6259903a6fece00bbacccb71 |
class RelayData: <NEW_LINE> <INDENT> def __init__(self, given_ip, provided_socket, derived_key, ec_privkey, given_rsa_key, given_port): <NEW_LINE> <INDENT> self.ip_addr = given_ip <NEW_LINE> self.sock = provided_socket <NEW_LINE> self.key = derived_key <NEW_LINE> self.ec_key_ = ec_privkey <NEW_LINE> self.rsa_key = given_rsa_key <NEW_LINE> self.port = given_port | Relay data class | 6259903a76d4e153a661db55 |
class AcceptForm(Form): <NEW_LINE> <INDENT> user_origin = HiddenField('Origin User ID', validators=[InputRequired(message='Error: user may not exist'), accept_form_validate]) <NEW_LINE> group_id = HiddenField('Group ID', validators=[InputRequired(message='Group does not exist')]) | Creates an accept request form when called | 6259903a50485f2cf55dc146 |
class GwasGeneList(tls.Unicode, TypeMeta): <NEW_LINE> <INDENT> info_text = "KBaseGwasData.GwasGeneList" <NEW_LINE> class v1_0(tls.Unicode, TypeMeta): <NEW_LINE> <INDENT> info_text = "KBaseGwasData.GwasGeneList-1.0" | GwasGeneList type | 6259903a8da39b475be043b3 |
class Din9021Washer (Washer): <NEW_LINE> <INDENT> def __init__(self, metric, axis_h, pos_h, tol = 0, pos = V0, model_type = 0, name = ''): <NEW_LINE> <INDENT> self.metric = metric <NEW_LINE> default_name = 'din9021_washer_m' + str(self.metric) <NEW_LINE> self.set_name (name, default_name, change = 0) <NEW_LINE> washer_dict = kcomp.D9021[metric] <NEW_LINE> Washer.__init__(self, r_out = washer_dict['do']/2., r_in = washer_dict['di']/2., h = washer_dict['t'], axis_h = axis_h, pos_h = pos_h, tol = tol, pos = pos, model_type = model_type) | Din 9021 Washer, this is the larger washer
Parameters
-----------
metric : int (maybe float: 2.5)
axis_h : FreeCAD.Vector
Vector along the cylinder height
pos_h : int
Location of pos along axis_h (0,1)
* 0: the cylinder pos is at its base
* 1: the cylinder pos is centered along its height
tol : float
Tolerance for the inner and outer radius.
It is the tolerance for the diameter, so the radius will be added/subs
have of this tolerance
* tol will be added to the inner radius (so it will be larger)
* tol will be substracted to the outer radius (so it will be smaller)
model_type : int
Type of model:
* 0: exact
* 1: outline
pos : FreeCAD.Vector
Position of the cylinder, taking into account where the center is
Note
----
All the parameters and attributes of father class CylHole
Attributes
-----------
metric : int or float (in case of M2.5) or even str for inches ?
Metric of the washer
model_type : int | 6259903a8a43f66fc4bf3352 |
class ForwardCheckingInference(object): <NEW_LINE> <INDENT> def __init__(self):pass <NEW_LINE> def doInference(self,inferenceInfo,csp,variable,value): <NEW_LINE> <INDENT> assignment = assignment.Assignment() <NEW_LINE> assignment.addVariableToAssignment(variable, value) <NEW_LINE> for con in csp.getConstraints(variable): <NEW_LINE> <INDENT> otherVariables = csp.getNeighbour(variable,con) <NEW_LINE> for ov in otherVariables: <NEW_LINE> <INDENT> someValues = [] <NEW_LINE> changed = False <NEW_LINE> domVals = inferenceInfo.getDomainsOfAffectedVariables(ov) <NEW_LINE> if domVals is None: <NEW_LINE> <INDENT> domVals = csp.getDomainValues(ov) <NEW_LINE> <DEDENT> for domVal in domVals: <NEW_LINE> <INDENT> assignment.addVariableToAssignment(ov, domVal) <NEW_LINE> if not con.isConsistentWith(assignment): <NEW_LINE> <INDENT> changed = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> someValues.append(domVal) <NEW_LINE> <DEDENT> <DEDENT> if changed: <NEW_LINE> <INDENT> inferenceInfo.addToAffectedVariables(ov,someValues) <NEW_LINE> <DEDENT> assignment.removeVariableFromAssignment(ov) <NEW_LINE> <DEDENT> <DEDENT> return [] | classdocs | 6259903abe383301e02549db |
class StepError(logme.LoggingException): <NEW_LINE> <INDENT> pass | Failed to build the command. | 6259903a3c8af77a43b6881e |
class AverageNoneZeroTripletsMetric(Metric): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.values = [] <NEW_LINE> <DEDENT> def __call__(self, outputs, target, loss): <NEW_LINE> <INDENT> self.values.append(loss[1]) <NEW_LINE> return self.value() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.values = [] <NEW_LINE> self.total_triplets = [] <NEW_LINE> <DEDENT> def value(self): <NEW_LINE> <INDENT> return np.mean(self.values) <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return 'nonzeros' | Counts average number of nonzero triplets found in minibatches | 6259903aa8ecb033258723e4 |
class Neural(Agent): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def train(self, mini_batch, discount): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def clear_model(model): <NEW_LINE> <INDENT> session = K.get_session() <NEW_LINE> for layer in model.layers: <NEW_LINE> <INDENT> if isinstance(layer, Network): <NEW_LINE> <INDENT> Neural.clear_model(layer) <NEW_LINE> continue <NEW_LINE> <DEDENT> for v in layer.__dict__: <NEW_LINE> <INDENT> v_arg = getattr(layer, v) <NEW_LINE> if hasattr(v_arg, 'initializer'): <NEW_LINE> <INDENT> initializer_method = getattr(v_arg, 'initializer') <NEW_LINE> initializer_method.run(session=session) | An abstract class to represent an instance of a deep network architecture
to represent an instance of the Q-value function. | 6259903ad10714528d69ef6e |
class HostTestPluginCopyMethod_Silabs(HostTestPluginBase): <NEW_LINE> <INDENT> name = "HostTestPluginCopyMethod_Silabs" <NEW_LINE> type = "CopyMethod" <NEW_LINE> capabilities = ["eACommander", "eACommander-usb"] <NEW_LINE> required_parameters = ["image_path", "destination_disk"] <NEW_LINE> stable = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> HostTestPluginBase.__init__(self) <NEW_LINE> <DEDENT> def setup(self, *args, **kwargs): <NEW_LINE> <INDENT> self.EACOMMANDER_CMD = "eACommander.exe" <NEW_LINE> return True <NEW_LINE> <DEDENT> def execute(self, capability, *args, **kwargs): <NEW_LINE> <INDENT> result = False <NEW_LINE> if self.check_parameters(capability, *args, **kwargs) is True: <NEW_LINE> <INDENT> image_path = os.path.normpath(kwargs["image_path"]) <NEW_LINE> destination_disk = os.path.normpath(kwargs["destination_disk"]) <NEW_LINE> if capability == "eACommander": <NEW_LINE> <INDENT> cmd = [ self.EACOMMANDER_CMD, "--serialno", destination_disk, "--flash", image_path, "--resettype", "2", "--reset", ] <NEW_LINE> result = self.run_command(cmd) <NEW_LINE> <DEDENT> elif capability == "eACommander-usb": <NEW_LINE> <INDENT> cmd = [ self.EACOMMANDER_CMD, "--usb", destination_disk, "--flash", image_path, ] <NEW_LINE> result = self.run_command(cmd) <NEW_LINE> <DEDENT> <DEDENT> return result | Plugin interface adapter for eACommander.exe. | 6259903ae76e3b2f99fd9bd2 |
class VoIPPlan(Model): <NEW_LINE> <INDENT> name = models.CharField(unique=True, max_length=255, verbose_name=_('name'), help_text=_("enter plan name")) <NEW_LINE> pubname = models.CharField(max_length=255, verbose_name=_('publish name'), help_text=_("enter publish name")) <NEW_LINE> lcrtype = models.IntegerField(choices=list(LCR_TYPE), verbose_name=_('LCR type'), help_text=_("select LCR type")) <NEW_LINE> created_date = models.DateTimeField(auto_now_add=True, verbose_name=_('date')) <NEW_LINE> updated_date = models.DateTimeField(auto_now=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = u'voip_plan' <NEW_LINE> verbose_name = _("VoIP plan") <NEW_LINE> verbose_name_plural = _("VoIP plans") <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return '[%s] %s' % (self.id, self.name) | VoIPPlan
VoIPPlans are associated to your clients, this defines the rate at which
the VoIP calls are sold to your clients.
An VoIPPlan is a collection of VoIPRetailPlans, you can have 1 or more
VoIPRetailPlans associated to the VoIPPlan
A client has a single VoIPPlan,
VoIPPlan has many VoIPRetailPlans.
VoIPRetailPlan has VoIPRetailRates
The LCR system will route the VoIP via the lowest cost carrier. | 6259903a07d97122c4217e63 |
class FastaWriter(SequentialSequenceWriter): <NEW_LINE> <INDENT> def __init__(self, handle, wrap=60, record2title=None): <NEW_LINE> <INDENT> SequentialSequenceWriter.__init__(self, handle) <NEW_LINE> self.wrap = None <NEW_LINE> if wrap: <NEW_LINE> <INDENT> if wrap < 1: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> self.wrap = wrap <NEW_LINE> self.record2title = record2title <NEW_LINE> <DEDENT> def write_record(self, record): <NEW_LINE> <INDENT> assert self._header_written <NEW_LINE> assert not self._footer_written <NEW_LINE> self._record_written = True <NEW_LINE> if self.record2title: <NEW_LINE> <INDENT> title = self.clean(self.record2title(record)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> id = self.clean(record.id) <NEW_LINE> description = self.clean(record.description) <NEW_LINE> if description and description.split(None, 1)[0] == id: <NEW_LINE> <INDENT> title = description <NEW_LINE> <DEDENT> elif description: <NEW_LINE> <INDENT> title = "%s %s" % (id, description) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> title = id <NEW_LINE> <DEDENT> <DEDENT> assert "\n" not in title <NEW_LINE> assert "\r" not in title <NEW_LINE> self.handle.write(">%s\n" % title) <NEW_LINE> data = self._get_seq_string(record) <NEW_LINE> assert "\n" not in data <NEW_LINE> assert "\r" not in data <NEW_LINE> if self.wrap: <NEW_LINE> <INDENT> for i in range(0, len(data), self.wrap): <NEW_LINE> <INDENT> self.handle.write(data[i:i + self.wrap] + "\n") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.handle.write(data + "\n") | Class to write Fasta format files. | 6259903ad99f1b3c44d0686c |
class ChatRoom(models.Model): <NEW_LINE> <INDENT> _name = "chat.room" <NEW_LINE> _description = "Chat Room" <NEW_LINE> def _default_name(self, objname='room'): <NEW_LINE> <INDENT> return "odoo-%s-%s" % (objname, str(uuid4())[:8]) <NEW_LINE> <DEDENT> name = fields.Char( "Room Name", required=True, copy=False, default=lambda self: self._default_name()) <NEW_LINE> is_full = fields.Boolean("Full", compute="_compute_is_full") <NEW_LINE> jitsi_server_domain = fields.Char( 'Jitsi Server Domain', compute='_compute_jitsi_server_domain', help='The Jitsi server domain can be customized through the settings to use a different server than the default "meet.jit.si"') <NEW_LINE> lang_id = fields.Many2one( "res.lang", "Language", default=lambda self: self.env["res.lang"].search([("code", "=", self.env.user.lang)], limit=1)) <NEW_LINE> max_capacity = fields.Selection( [("4", "4"), ("8", "8"), ("12", "12"), ("16", "16"), ("20", "20"), ("no_limit", "No limit")], string="Max capacity", default="8", required=True) <NEW_LINE> participant_count = fields.Integer("Participant count", default=0, copy=False) <NEW_LINE> last_activity = fields.Datetime( "Last Activity", copy=False, readonly=True, default=lambda self: fields.Datetime.now()) <NEW_LINE> max_participant_reached = fields.Integer( "Max participant reached", copy=False, readonly=True, help="Maximum number of participant reached in the room at the same time") <NEW_LINE> @api.depends("max_capacity", "participant_count") <NEW_LINE> def _compute_is_full(self): <NEW_LINE> <INDENT> for room in self: <NEW_LINE> <INDENT> if room.max_capacity == "no_limit": <NEW_LINE> <INDENT> room.is_full = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> room.is_full = room.participant_count >= int(room.max_capacity) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _compute_jitsi_server_domain(self): <NEW_LINE> <INDENT> jitsi_server_domain = self.env['ir.config_parameter'].sudo().get_param( 'website_jitsi.jitsi_server_domain', 'meet.jit.si') <NEW_LINE> for room in self: <NEW_LINE> <INDENT> room.jitsi_server_domain = jitsi_server_domain | Store all useful information to manage chat room (currently limited
to Jitsi). This model embeds all information about the chat room. We do not
store them in the related mixin (see chat.room.mixin) to avoid to add too
many fields on the models which want to use the chat room mixin as the
behavior can be optional in those models.
The participant count is automatically updated thanks to the chat room widget
to avoid having a costly computed field with a members model. | 6259903a8a349b6b4368740a |
class AnityaException(Exception): <NEW_LINE> <INDENT> pass | Generic class covering all the exceptions generated by anitya. | 6259903a07d97122c4217e64 |
class SearchRegion(BasePage): <NEW_LINE> <INDENT> _search_box_locator = 'q' <NEW_LINE> def __int__(self, driver): <NEW_LINE> <INDENT> super(SearchRegion, self).__init__(driver) <NEW_LINE> <DEDENT> def searchFor(self, term): <NEW_LINE> <INDENT> self.search_field = self.driver.find_element_by_name( self._search_box_locator ) <NEW_LINE> self.search_field.clear() <NEW_LINE> self.search_field.send_keys(term) <NEW_LINE> self.search_field.submit() <NEW_LINE> return SearchResults(self.driver) | SearchRegion
Handel application search feature. | 6259903a23e79379d538d6c7 |
class LossPanel(QWidget): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> pass | a panel to add neutral losses | 6259903a15baa72349463162 |
class Mol2_Reader(BaseReader): <NEW_LINE> <INDENT> def __init__(self,fnm): <NEW_LINE> <INDENT> super(Mol2_Reader,self).__init__(fnm) <NEW_LINE> self.pdict = mol2_pdict <NEW_LINE> self.atom = [] <NEW_LINE> self.atomnames = [] <NEW_LINE> self.section = None <NEW_LINE> self.mol = None <NEW_LINE> <DEDENT> def feed(self, line): <NEW_LINE> <INDENT> s = line.split() <NEW_LINE> self.ln += 1 <NEW_LINE> if line.strip().lower() == '@<tripos>atom': <NEW_LINE> <INDENT> self.itype = 'COUL' <NEW_LINE> self.section = 'Atom' <NEW_LINE> <DEDENT> elif line.strip().lower() == '@<tripos>bond': <NEW_LINE> <INDENT> self.itype = 'None' <NEW_LINE> self.section = 'Bond' <NEW_LINE> <DEDENT> elif line.strip().lower() == '@<tripos>substructure': <NEW_LINE> <INDENT> self.itype = 'None' <NEW_LINE> self.section = 'Substructure' <NEW_LINE> <DEDENT> elif line.strip().lower() == '@<tripos>molecule': <NEW_LINE> <INDENT> self.itype = 'None' <NEW_LINE> self.section = 'Molecule' <NEW_LINE> <DEDENT> elif self.section == 'Molecule' and self.mol == None: <NEW_LINE> <INDENT> self.mol = '_'.join(s) <NEW_LINE> <DEDENT> elif not is_mol2_atom(line): <NEW_LINE> <INDENT> self.itype = 'None' <NEW_LINE> <DEDENT> if is_mol2_atom(line) and self.itype == 'COUL': <NEW_LINE> <INDENT> self.atomnames.append(s[0]) <NEW_LINE> self.adict.setdefault(self.mol,[]).append(s[0]) <NEW_LINE> <DEDENT> if self.itype in self.pdict: <NEW_LINE> <INDENT> if 'Atom' in self.pdict[self.itype] and match(' *[0-9]', line): <NEW_LINE> <INDENT> self.atom = [s[0]] <NEW_LINE> self.suffix = ':' + '-'.join([self.mol,''.join(self.atom)]) <NEW_LINE> self.molatom = (self.mol, self.atom if type(self.atom) is list else [self.atom]) | Finite state machine for parsing Mol2 force field file. (just for parameterizing the charges) | 6259903abe383301e02549de |
class DBRunnerTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_parse_dbrunner_args(self): <NEW_LINE> <INDENT> options = DBRunner.parse_args([ '-m', 'foo.sqlite', '-s', '20180101', '5']) <NEW_LINE> self.assertEqual( '5', options.process_id) <NEW_LINE> self.assertEqual( 'foo.sqlite', options.mission) <NEW_LINE> self.assertEqual( datetime.datetime(2018, 1, 1), options.startDate) <NEW_LINE> self.assertTrue( datetime.datetime.now() - options.endDate < datetime.timedelta(seconds=10)) <NEW_LINE> self.assertFalse(options.echo) <NEW_LINE> self.assertEqual(1, options.numproc) <NEW_LINE> self.assertFalse(options.update) <NEW_LINE> self.assertFalse(options.ingest) <NEW_LINE> self.assertTrue(options.force is None) <NEW_LINE> <DEDENT> def test_parse_dbrunner_args_other(self): <NEW_LINE> <INDENT> options = DBRunner.parse_args([ '-m', 'foo.sqlite', '-s', '20180101', '5', '--force', '0']) <NEW_LINE> self.assertEqual(0, options.force) <NEW_LINE> options = DBRunner.parse_args([ '-m', 'foo.sqlite', '-s', '20180101', '5', '-u']) <NEW_LINE> self.assertTrue(options.update) <NEW_LINE> options = DBRunner.parse_args([ '-m', 'foo.sqlite', '-s', '20180101', '5', '-u', '-i']) <NEW_LINE> self.assertTrue(options.update) <NEW_LINE> self.assertTrue(options.ingest) <NEW_LINE> options = DBRunner.parse_args([ '-m', 'foo.sqlite', '-s', '20180101', '5', '--force', '1', '-i']) <NEW_LINE> self.assertFalse(options.update) <NEW_LINE> self.assertEqual(1, options.force) <NEW_LINE> self.assertTrue(options.ingest) <NEW_LINE> options = DBRunner.parse_args([ '-m', 'foo.sqlite', '-s', '20180101', 'foo']) <NEW_LINE> self.assertEqual( 'foo' , options.process_id) <NEW_LINE> <DEDENT> def test_parse_dbrunner_args_bad(self): <NEW_LINE> <INDENT> arglist = [ ['--force', '6'], ['--force', '0', '-u'], ['-i'], ] <NEW_LINE> msgs = [ 'argument --force: invalid choice: 6 (choose from 0, 1, 2)', 'argument -u/--update: not allowed with argument --force', 'argument -i/--ingest: requires --force or --update', ] <NEW_LINE> oldstderr = sys.stderr <NEW_LINE> for args, msg in zip(arglist, msgs): <NEW_LINE> <INDENT> sys.stderr = (io.BytesIO if str is bytes else io.StringIO)() <NEW_LINE> try: <NEW_LINE> <INDENT> with self.assertRaises(SystemExit) as cm: <NEW_LINE> <INDENT> DBRunner.parse_args( ['-m', 'foo.sqlite', '-s', '20180101', '5'] + args) <NEW_LINE> <DEDENT> err = sys.stderr.getvalue().split('\n')[-2] <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> sys.stderr.close() <NEW_LINE> sys.stderr = oldstderr <NEW_LINE> <DEDENT> self.assertEqual( '{}: error: {}'.format(os.path.basename(sys.argv[0]), msg), err) | DBRunner tests | 6259903a1d351010ab8f4ce2 |
class Component(Resource): <NEW_LINE> <INDENT> def __init__(self, type: str, value: str): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.type = Resource.init_param(type) <NEW_LINE> self.value = Resource.init_param(value) | Represent the components configuration for the ImageBuilder. | 6259903a3c8af77a43b6881f |
class GetHandlers(Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return server.get_handlers() | Class responsible of sending the differents handlers. | 6259903a63f4b57ef0086657 |
class Bitfinex(models.Exchange): <NEW_LINE> <INDENT> _markets_map = {'btc_usd': 'btcusd', 'ltc_usd': 'ltcusd', 'ltc_btc': 'ltcbtc'} <NEW_LINE> def __init__(self, market="btc_usd"): <NEW_LINE> <INDENT> self.market = market <NEW_LINE> <DEDENT> def depth(self): <NEW_LINE> <INDENT> url = "/".join([base_url, 'book', self._symbol]) <NEW_LINE> resp = self._request('GET', url, verify=False).json() <NEW_LINE> asks = [] <NEW_LINE> for o in resp['asks']: <NEW_LINE> <INDENT> asks.append(models.Order(price=self._create_decimal(o['price']), amount=self._create_decimal(o['amount']))) <NEW_LINE> <DEDENT> bids = [] <NEW_LINE> for o in resp['bids']: <NEW_LINE> <INDENT> bids.append(models.Order(price=self._create_decimal(o['price']), amount=self._create_decimal(o['amount']))) <NEW_LINE> <DEDENT> return asks, bids <NEW_LINE> <DEDENT> def ticker(self): <NEW_LINE> <INDENT> url = "/".join([base_url, 'ticker', self._symbol]) <NEW_LINE> resp = self._request('GET', url, verify=False).json() <NEW_LINE> return models.Ticker(avg=self._create_decimal(resp['mid']), buy=self._create_decimal(resp['bid']), last=self._create_decimal(resp['last_price']), sell=self._create_decimal(resp['ask']), ) <NEW_LINE> <DEDENT> def trades(self): <NEW_LINE> <INDENT> url = "/".join([base_url, 'trades', self._symbol]) <NEW_LINE> resp = self._request('GET', url, verify=False).json() <NEW_LINE> trades = [] <NEW_LINE> for t in resp: <NEW_LINE> <INDENT> date = datetime.fromtimestamp(t['timestamp']) <NEW_LINE> amount = self._create_decimal(t['amount']) <NEW_LINE> price = self._create_decimal(t['price']) <NEW_LINE> tid = None <NEW_LINE> trades.append(models.Trade(date=date, amount=amount, price=price, tid=tid)) <NEW_LINE> <DEDENT> return trades | Docstring for Bitstamp | 6259903a1f5feb6acb163dba |
class ManageUserView(generics.RetrieveUpdateAPIView): <NEW_LINE> <INDENT> serializer_class = UserSerializer <NEW_LINE> authentication_classes = [authentication.TokenAuthentication, ] <NEW_LINE> permission_classes = [permissions.IsAuthenticated, ] <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> return self.request.user | Manage authenticated users. | 6259903a16aa5153ce4016b5 |
class FileRecorder(): <NEW_LINE> <INDENT> def __init__(self, record_dir, max_record_pre_file = 1000): <NEW_LINE> <INDENT> if not os.path.exists(record_dir): <NEW_LINE> <INDENT> os.makedirs(record_dir) <NEW_LINE> print('Successfully creating directory {0}'.format(record_dir)) <NEW_LINE> <DEDENT> self.record_dir = record_dir <NEW_LINE> self.max_record = max_record_pre_file <NEW_LINE> self.curr_id = 0 <NEW_LINE> self.latest_file = self._get_latest_file() <NEW_LINE> <DEDENT> def _get_latest_file(self): <NEW_LINE> <INDENT> files = sorted(os.listdir(self.record_dir)) <NEW_LINE> if len(files) > 0: <NEW_LINE> <INDENT> curr_latest = self.record_dir + files[-1] <NEW_LINE> with open(curr_latest, 'r') as rf: <NEW_LINE> <INDENT> lines_count = sum(1 for line in rf) <NEW_LINE> <DEDENT> if lines_count < self.max_record: <NEW_LINE> <INDENT> self.curr_id = lines_count <NEW_LINE> return curr_latest <NEW_LINE> <DEDENT> <DEDENT> new_csv = self.record_dir + '{0}.csv'.format(datetime.now().strftime("%Y%m%d%H%M%S")) <NEW_LINE> with open(new_csv, 'w') as wf: <NEW_LINE> <INDENT> wf.write('id,pixels,label\n') <NEW_LINE> <DEDENT> return new_csv <NEW_LINE> <DEDENT> def write_record(self, img, label): <NEW_LINE> <INDENT> self.curr_id += 1 <NEW_LINE> params = {'id' : self.curr_id, 'pixels' : img, 'label' : label} <NEW_LINE> with open(self.latest_file, 'a') as wf: <NEW_LINE> <INDENT> wf.write('{id:05d},{pixels},{label}\n'.format(**params)) <NEW_LINE> <DEDENT> if self.curr_id == self.max_record: <NEW_LINE> <INDENT> new_csv = self.record_dir + '{0}.csv'.format(datetime.now().strftime("%Y%m%d%H%M%S")) <NEW_LINE> with open(new_csv, 'w') as wf: <NEW_LINE> <INDENT> wf.write('id,pixels,label\n') <NEW_LINE> <DEDENT> self.latest_file = new_csv <NEW_LINE> self.curr_id = 0 | write records in a file on disk
Attributes:
curr_id (int): id of the current record to write on disk
latest_file (str): path of the record file
max_record (int): maximum records allowed to store in a file
record_dir (str): directory containing the record files | 6259903aec188e330fdf9a60 |
class ImportOSM(bpy.types.Operator, ImportHelper): <NEW_LINE> <INDENT> bl_idname = "import_osm.xml" <NEW_LINE> bl_label = "Import OSM XML" <NEW_LINE> filepath = bpy.props.StringProperty(name="File Path", default= "") <NEW_LINE> filename_ext = ".osm" <NEW_LINE> filter_glob = bpy.props.StringProperty(default="*.osm", options={'HIDDEN'}) <NEW_LINE> create_tag_list = bpy.props.BoolProperty(name="Create Tag list",description="Creates an internal tags.txt containing listing all tags found in the OSM-xml.",default=False) <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> return import_osm.load(self, context, self.properties.filepath) <NEW_LINE> <DEDENT> def draw(self,context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(self,'create_tag_list') | Load a OSM XML file | 6259903ad4950a0f3b111724 |
class MyView(View): <NEW_LINE> <INDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return TemplateResponse(request, self.get_template_name(), self.get_context_data()) <NEW_LINE> <DEDENT> def get_template_name(self): <NEW_LINE> <INDENT> return "blog/blogpost_detail.html" <NEW_LINE> <DEDENT> def get_context_data(self): <NEW_LINE> <INDENT> return { "blogpost": self.get_object(), } <NEW_LINE> <DEDENT> def get_object(self): <NEW_LINE> <INDENT> return get_object_or_404(BlogPost, pk=self.kwargs.get("pk")) | Displays the details of a BlogPost | 6259903a15baa72349463164 |
class NewSetupMotor: <NEW_LINE> <INDENT> def __init__( self, filename, which_motor, phase_offset=0, optical_element='Polarizer'): <NEW_LINE> <INDENT> self.experiment_start_datetime = None <NEW_LINE> self.filename = filename <NEW_LINE> self.phase_offset = phase_offset <NEW_LINE> self.optical_element = optical_element <NEW_LINE> self.load_motor_file( filename, which_motor ) <NEW_LINE> <DEDENT> def load_motor_file( self, filename, which_motor ): <NEW_LINE> <INDENT> md = np.loadtxt( filename, delimiter='\t', skiprows=1, converters={0: lambda s: deal_with_date_time_string(self, s), 3: lambda s: s=='open'} ) <NEW_LINE> timestamps = md[:,0] <NEW_LINE> emisangles = md[:,1] <NEW_LINE> exciangles = md[:,2] <NEW_LINE> shutter = md[:,3] <NEW_LINE> self.timestamps = timestamps <NEW_LINE> if which_motor=='excitation': <NEW_LINE> <INDENT> self.angles = exciangles * np.pi/180.0 <NEW_LINE> <DEDENT> elif which_motor=='emission': <NEW_LINE> <INDENT> self.angles = emisangles * np.pi/180.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Input argument which_motor to class NewSetupMotor must take values 'excitation' or 'emission'. Got: %s" % (which_motor)) <NEW_LINE> <DEDENT> self.shutter = shutter <NEW_LINE> if self.optical_element=='L/2 Plate': <NEW_LINE> <INDENT> self.angles *= 2 <NEW_LINE> <DEDENT> <DEDENT> def angle(self, time, exposuretime=.1, respectShutter=True, raw=False ): <NEW_LINE> <INDENT> if respectShutter: <NEW_LINE> <INDENT> mintime = time - exposuretime <NEW_LINE> maxtime = time + exposuretime <NEW_LINE> tt = self.timestamps - mintime <NEW_LINE> first = tt[tt<0].size <NEW_LINE> tt = self.timestamps - maxtime <NEW_LINE> last = tt[tt<0].size -1 <NEW_LINE> if np.any( self.shutter[first:last]==0 ): <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> phi = np.interp( time, self.timestamps, self.angles ) + self.phase_offset <NEW_LINE> if raw: <NEW_LINE> <INDENT> return phi <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return phi % np.pi <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> phi = np.interp( time, self.timestamps, self.angles ) + self.phase_offset <NEW_LINE> if raw: <NEW_LINE> <INDENT> return phi <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return phi % np.pi | This class represents either of the two motors in the new
setup. For both, emission and excitation motor, this class should
be used. There are only two reasons we have separate motor classes
for the old setup: i) because the data generated by labview is a
separate, differently formatted file for the excitation motor, and
ii) the polarization phase offset is implemented for the
excitation motor.
This new class is _very_ similar to the emission motor class
below. In fact, it is worth considering to consolidate the three
different classes into one, or at least to use this generic class
for the emission motor in the old setup, and have only one extra
class specialized on the old excitation motor.
(Alternatively, we could create separate branches for the old and
new setup, however i think this is unnecessary, since the
differences in the analysis software are limited to motor data
import --- so far.)
Class constructor arguments are the filename to be read in, a flag
denoting which_motor should be read in (either 'emission' or
'excitation'), as well as a phase offset, which, by convention,
will always be zero for the emission motor, but can take non-zero
values for the excitation motor. | 6259903abe383301e02549e0 |
class Assignment(base.Assignment): <NEW_LINE> <INDENT> implements(ILatestNITFPortlet) <NEW_LINE> limit = 10 <NEW_LINE> pretty_date = True <NEW_LINE> anonymous_only = True <NEW_LINE> def __init__(self, limit = 10, pretty_date=True, anonymous_only=True): <NEW_LINE> <INDENT> self.limit = limit <NEW_LINE> self.pretty_date = pretty_date <NEW_LINE> self.anonymous_only = anonymous_only <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> return _(u"Latest NITF") | Portlet assignment.
This is what is actually managed through the portlets UI and associated
with columns. | 6259903a3c8af77a43b68820 |
class SentenceSelectionInstance(TextInstance): <NEW_LINE> <INDENT> def __init__(self, question_text: str, sentences: List[str], label: int, index: int=None): <NEW_LINE> <INDENT> super(SentenceSelectionInstance, self).__init__(label, index) <NEW_LINE> self.question_text = question_text <NEW_LINE> self.sentences = sentences <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('SentenceSelectionInstance(' + self.question_text + ', ' + self.sentences + ', ' + str(self.label) + ')') <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def words(self) -> Dict[str, List[str]]: <NEW_LINE> <INDENT> words = self._words_from_text(self.question_text) <NEW_LINE> sentences_words = [self._words_from_text(sentence) for sentence in self.sentences] <NEW_LINE> for namespace in words: <NEW_LINE> <INDENT> for sentence_words in sentences_words: <NEW_LINE> <INDENT> words[namespace].extend(sentence_words[namespace]) <NEW_LINE> <DEDENT> <DEDENT> return words <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def to_indexed_instance(self, data_indexer: DataIndexer): <NEW_LINE> <INDENT> question_indices = self._index_text(self.question_text, data_indexer) <NEW_LINE> sentences_indices = [self._index_text(sentence, data_indexer) for sentence in self.sentences] <NEW_LINE> return IndexedSentenceSelectionInstance(question_indices, sentences_indices, self.label, self.index) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def read_from_line(cls, line: str, default_label: bool=None): <NEW_LINE> <INDENT> fields = line.split("\t") <NEW_LINE> if len(fields) == 4: <NEW_LINE> <INDENT> index_string, question, sentences, label_string = fields <NEW_LINE> index = int(index_string) <NEW_LINE> <DEDENT> elif len(fields) == 3: <NEW_LINE> <INDENT> question, sentences, label_string = fields <NEW_LINE> index = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError("Unrecognized line format: " + line) <NEW_LINE> <DEDENT> sentences_split = sentences.split("###") <NEW_LINE> label = int(label_string) <NEW_LINE> return cls(question, sentences_split, label, index) | A SentenceSelectionInstance is an instance for the sentence selection
task. A SentenceSelectionInstance stores a question as a string, and a set of sentences
as a list of strings. The labels is a single int, indicating the index of
the sentence that contains the answer to the question. | 6259903a23849d37ff852281 |
class CategoricalEncoder(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, variables=None): <NEW_LINE> <INDENT> if not isinstance(variables, list): <NEW_LINE> <INDENT> self.variables = [variables] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.variables = variables <NEW_LINE> <DEDENT> <DEDENT> def fit(self, X, y): <NEW_LINE> <INDENT> temp = pd.concat([X, y], axis=1) <NEW_LINE> temp.columns = list(X.columns) + ['target'] <NEW_LINE> self.encoder_dict_ = {} <NEW_LINE> for var in self.variables: <NEW_LINE> <INDENT> t = temp.groupby([var])['target'].mean().sort_values( ascending=True).index <NEW_LINE> self.encoder_dict_[var] = {k: i for i, k in enumerate(t, 0)} <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def transform(self, X): <NEW_LINE> <INDENT> X = X.copy() <NEW_LINE> for feature in self.variables: <NEW_LINE> <INDENT> X[feature] = X[feature].map(self.encoder_dict_[feature]) <NEW_LINE> <DEDENT> if X[self.variables].isnull().any().any(): <NEW_LINE> <INDENT> null_counts = X[self.variables].isnull().any() <NEW_LINE> vars_ = {key: value for (key, value) in null_counts.items() if value} <NEW_LINE> raise InvalidModelInputError( f'Categorical encoder has introduced NaN when ' f'transforming categorical variables: {vars_.keys()}') <NEW_LINE> <DEDENT> return X | String to numbers categorical encoder | 6259903ab5575c28eb7135ae |
class TextArea(_ATextInput): <NEW_LINE> <INDENT> def __init__(self, parent, *args, **kwargs): <NEW_LINE> <INDENT> wrap_style = _wx.TE_DONTWRAP if not kwargs.pop('do_wrap', True) else 0 <NEW_LINE> super(TextArea, self).__init__(parent, *args, style=wrap_style, **kwargs) | A multi-line text edit widget. See the documentation for _ATextInput
for a list of the events this component offers. | 6259903a287bf620b6272db4 |
class PredatorPreyMapData(object): <NEW_LINE> <INDENT> field = [] <NEW_LINE> def __init__(self, map_path): <NEW_LINE> <INDENT> tiled_data = pygame_renderer.TiledData(map_path) <NEW_LINE> tiled_data.load() <NEW_LINE> tile_pos = tiled_data.get_tile_positions() <NEW_LINE> self.field = tile_pos['ground']['FIELD'] | The map data as the geographical info.
| 6259903ad10714528d69ef70 |
class EventObjectFilter(interface.FilterObject): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(EventObjectFilter, self).__init__() <NEW_LINE> self._event_filter = None <NEW_LINE> self._filter_expression = None <NEW_LINE> <DEDENT> def CompileFilter(self, filter_expression): <NEW_LINE> <INDENT> parser = expression_parser.EventFilterExpressionParser() <NEW_LINE> expression = parser.Parse(filter_expression) <NEW_LINE> self._event_filter = expression.Compile() <NEW_LINE> self._filter_expression = filter_expression <NEW_LINE> <DEDENT> def Match(self, event, event_data, event_data_stream, event_tag): <NEW_LINE> <INDENT> if not self._event_filter: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self._event_filter.Matches( event, event_data, event_data_stream, event_tag) | Event filter. | 6259903a6e29344779b0181c |
class TransactionCreateView(generic.CreateView): <NEW_LINE> <INDENT> model = Transaction <NEW_LINE> template_name = 'financial/create_transaction.html' <NEW_LINE> fields = ['date', 'description', 'acc_from', 'acc_to', 'value'] <NEW_LINE> success_url = 'create' | Esta classe eh utilizada para criar entradas na base de dados. | 6259903a16aa5153ce4016b7 |
class CityAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> raw_id_fields = ["subregion", "region"] <NEW_LINE> list_display = ( 'name', 'subregion', 'region', 'country', 'geoname_id', 'timezone' ) <NEW_LINE> search_fields = ( 'search_names', 'geoname_id', 'timezone' ) <NEW_LINE> list_filter = ( 'country__continent', 'country', 'timezone' ) <NEW_LINE> form = forms.CityForm <NEW_LINE> def get_changelist(self, request, **kwargs): <NEW_LINE> <INDENT> return CityChangeList | ModelAdmin for City. | 6259903a07d97122c4217e67 |
class LruDictR(LruDict): <NEW_LINE> <INDENT> def __init__(self, cap): <NEW_LINE> <INDENT> super(LruDictR, self).__init__(cap) <NEW_LINE> self._lock = threading.Lock() <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> return super(LruDictR, self).__setitem__(key, value) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> return super(LruDictR, self).__getitem__(key) <NEW_LINE> <DEDENT> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> return super(LruDictR, self).__delitem__(key) | Thread safe version | 6259903abaa26c4b54d50472 |
class LibcxxPrettyPrinter(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> super(LibcxxPrettyPrinter, self).__init__() <NEW_LINE> self.name = name <NEW_LINE> self.enabled = True <NEW_LINE> self.lookup = { "basic_string": StdStringPrinter, "string": StdStringPrinter, "string_view": StdStringViewPrinter, "tuple": StdTuplePrinter, "unique_ptr": StdUniquePtrPrinter, "shared_ptr": StdSharedPointerPrinter, "weak_ptr": StdSharedPointerPrinter, "bitset": StdBitsetPrinter, "deque": StdDequePrinter, "list": StdListPrinter, "queue": StdQueueOrStackPrinter, "stack": StdQueueOrStackPrinter, "priority_queue": StdPriorityQueuePrinter, "map": StdMapPrinter, "multimap": StdMapPrinter, "set": StdSetPrinter, "multiset": StdSetPrinter, "vector": StdVectorPrinter, "__map_iterator": MapIteratorPrinter, "__map_const_iterator": MapIteratorPrinter, "__tree_iterator": SetIteratorPrinter, "__tree_const_iterator": SetIteratorPrinter, "fpos": StdFposPrinter, "unordered_set": StdUnorderedSetPrinter, "unordered_multiset": StdUnorderedSetPrinter, "unordered_map": StdUnorderedMapPrinter, "unordered_multimap": StdUnorderedMapPrinter, "__hash_map_iterator": StdUnorderedMapIteratorPrinter, "__hash_map_const_iterator": StdUnorderedMapIteratorPrinter, "__hash_iterator": StdUnorderedSetIteratorPrinter, "__hash_const_iterator": StdUnorderedSetIteratorPrinter, } <NEW_LINE> self.subprinters = [] <NEW_LINE> for name, subprinter in self.lookup.items(): <NEW_LINE> <INDENT> if subprinter not in self.subprinters: <NEW_LINE> <INDENT> subprinter.name = name <NEW_LINE> self.subprinters.append(subprinter) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __call__(self, val): <NEW_LINE> <INDENT> if val.type.strip_typedefs().code != gdb.TYPE_CODE_STRUCT: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> typename = val.type.name or val.type.tag or str(val.type) <NEW_LINE> match = re.match("^std::(__.*?)::", typename) <NEW_LINE> if match is not None and match.group(1) in ["__cxx1998", "__debug", "__7", "__g"]: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> typename = _prettify_typename(val.type) <NEW_LINE> if not typename: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> without_generics = _remove_generics(typename) <NEW_LINE> lookup_name = _remove_std_prefix(without_generics) <NEW_LINE> if lookup_name in self.lookup: <NEW_LINE> <INDENT> return self.lookup[lookup_name](val) <NEW_LINE> <DEDENT> return None | PrettyPrinter object so gdb-commands like 'info pretty-printers' work. | 6259903a76d4e153a661db58 |
class Square: <NEW_LINE> <INDENT> def __init__(self, size=0, position=(0, 0)): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.position = position <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return(self.__size) <NEW_LINE> <DEDENT> @property <NEW_LINE> def position(self): <NEW_LINE> <INDENT> return(self.__position) <NEW_LINE> <DEDENT> @size.setter <NEW_LINE> def size(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError("size must be an integer") <NEW_LINE> <DEDENT> elif value < 0: <NEW_LINE> <INDENT> raise ValueError("size must be >= 0") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__size = value <NEW_LINE> <DEDENT> <DEDENT> @position.setter <NEW_LINE> def position(self, value): <NEW_LINE> <INDENT> if type(value) is not tuple or len(value) is not 2 or type(value[0]) is not int or value[0] < 0 or type(value[1]) is not int or value[1] < 0: <NEW_LINE> <INDENT> raise TypeError("position must be a tuple of 2 positive integers") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__position = value <NEW_LINE> <DEDENT> <DEDENT> def area(self): <NEW_LINE> <INDENT> return(self.__size**2) <NEW_LINE> <DEDENT> def my_print(self): <NEW_LINE> <INDENT> if self.__size is 0: <NEW_LINE> <INDENT> print() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("\n" * self.__position[1], end="") <NEW_LINE> for i in range(self.__size): <NEW_LINE> <INDENT> print(" " * self.__position[0], end="") <NEW_LINE> print("#" * self.__size) | Class Square that defines a square
Attributes:
__size (int): size of a size of the square
__position (tuple): position of the square in 2D | 6259903ad53ae8145f919630 |
class EdxNotesPage(CoursePage): <NEW_LINE> <INDENT> url_path = "edxnotes/" <NEW_LINE> MAPPING = { "recent": RecentActivityView, "structure": CourseStructureView, "search": SearchResultsView, } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(EdxNotesPage, self).__init__(*args, **kwargs) <NEW_LINE> self.current_view = self.MAPPING["recent"](self.browser) <NEW_LINE> <DEDENT> def is_browser_on_page(self): <NEW_LINE> <INDENT> return self.q(css=".wrapper-student-notes").present <NEW_LINE> <DEDENT> def switch_to_tab(self, tab_name): <NEW_LINE> <INDENT> self.current_view = self.MAPPING[tab_name](self.browser) <NEW_LINE> self.current_view.visit() <NEW_LINE> <DEDENT> def close_tab(self, tab_name): <NEW_LINE> <INDENT> self.current_view.close() <NEW_LINE> self.current_view = self.MAPPING["recent"](self.browser) <NEW_LINE> <DEDENT> def search(self, text): <NEW_LINE> <INDENT> self.q(css="#search-notes-form #search-notes-input").first.fill(text) <NEW_LINE> self.q(css='#search-notes-form .search-notes-submit').first.click() <NEW_LINE> self.current_view = self.MAPPING["search"](self.browser) <NEW_LINE> if text.strip(): <NEW_LINE> <INDENT> self.current_view.wait_for_page() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def tabs(self): <NEW_LINE> <INDENT> tabs = self.q(css=".tabs .tab-label") <NEW_LINE> if tabs: <NEW_LINE> <INDENT> return map(lambda x: x.replace("Current tab\n", ""), tabs.text) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_error_visible(self): <NEW_LINE> <INDENT> return self.q(css=".inline-error").visible <NEW_LINE> <DEDENT> @property <NEW_LINE> def error_text(self): <NEW_LINE> <INDENT> element = self.q(css=".inline-error").first <NEW_LINE> if element and self.is_error_visible: <NEW_LINE> <INDENT> return element.text[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def notes(self): <NEW_LINE> <INDENT> children = self.q(css='.note') <NEW_LINE> return [EdxNotesPageItem(self.browser, child.get_attribute("id")) for child in children] <NEW_LINE> <DEDENT> @property <NEW_LINE> def groups(self): <NEW_LINE> <INDENT> children = self.q(css='.note-group') <NEW_LINE> return [EdxNotesPageGroup(self.browser, child.get_attribute("id")) for child in children] <NEW_LINE> <DEDENT> @property <NEW_LINE> def sections(self): <NEW_LINE> <INDENT> children = self.q(css='.note-section') <NEW_LINE> return [EdxNotesPageSection(self.browser, child.get_attribute("id")) for child in children] <NEW_LINE> <DEDENT> @property <NEW_LINE> def no_content_text(self): <NEW_LINE> <INDENT> element = self.q(css=".is-empty").first <NEW_LINE> if element: <NEW_LINE> <INDENT> return element.text[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None | EdxNotes page. | 6259903a82261d6c527307aa |
class JRPCAutomatedBookingTest(test.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.__verbose_testing = False <NEW_LINE> if not self.__verbose_testing: <NEW_LINE> <INDENT> logging.getLogger('common').setLevel(level=logging.CRITICAL) <NEW_LINE> logging.getLogger('configuration').setLevel(level=logging.CRITICAL) <NEW_LINE> logging.getLogger('scheduling').setLevel(level=logging.CRITICAL) <NEW_LINE> <DEDENT> operational.OperationalSlot.objects.get_simulator().set_debug() <NEW_LINE> operational.OperationalSlot.objects.set_debug() <NEW_LINE> self.__sc_1_id = 'xatcobeo-sc' <NEW_LINE> self.__sc_1_tle_id = 'HUMSAT-D' <NEW_LINE> self.__sc_1_ch_1_id = 'xatcobeo-fm' <NEW_LINE> self.__sc_1_ch_1_cfg = { channel_serializers.FREQUENCY_K: '437000000', channel_serializers.MODULATION_K: 'FM', channel_serializers.POLARIZATION_K: 'LHCP', channel_serializers.BITRATE_K: '300', channel_serializers.BANDWIDTH_K: '12.500000000' } <NEW_LINE> self.__gs_1_id = 'gs-la' <NEW_LINE> self.__gs_1_ch_1_id = 'gs-la-fm' <NEW_LINE> self.__gs_1_ch_1_cfg = { channel_serializers.BAND_K: 'UHF / U / 435000000.000000 / 438000000.000000', channel_serializers.AUTOMATED_K: True, channel_serializers.MODULATIONS_K: ['FM'], channel_serializers.POLARIZATIONS_K: ['LHCP'], channel_serializers.BITRATES_K: [300, 600, 900], channel_serializers.BANDWIDTHS_K: [12.500000000, 25.000000000] } <NEW_LINE> self.__band = db_tools.create_band() <NEW_LINE> self.__user_profile = db_tools.create_user_profile() <NEW_LINE> self.__sc_1 = db_tools.create_sc( user_profile=self.__user_profile, identifier=self.__sc_1_id, tle_id=self.__sc_1_tle_id, ) <NEW_LINE> self.__gs_1 = db_tools.create_gs( user_profile=self.__user_profile, identifier=self.__gs_1_id, ) <NEW_LINE> <DEDENT> def _book_automated(self): <NEW_LINE> <INDENT> pass | JRPC Automated Booking test.
The tests included in this class validate the booking process that involves
the usage of fully automated GroundStation channels. | 6259903a4e696a045264e707 |
class TripleSign(_SingularSign): <NEW_LINE> <INDENT> def __init__(self, pos: int): <NEW_LINE> <INDENT> _SingularSign.__init__(self, -1, 3, pos) | Sign representing pitch composed from one prime ** 3.
'pos' argument declares which of the three present primes
(d, l or b) shall be used. | 6259903a30c21e258be999d9 |
class Signature(Definition): <NEW_LINE> <INDENT> def __init__(self, inference_state, signature): <NEW_LINE> <INDENT> super(Signature, self).__init__(inference_state, signature.name) <NEW_LINE> self._signature = signature <NEW_LINE> <DEDENT> @property <NEW_LINE> def params(self): <NEW_LINE> <INDENT> return [ParamDefinition(self._inference_state, n) for n in self._signature.get_param_names(resolve_stars=True)] <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> return self._signature.to_string() | `Signature` objects is the return value of `Script.function_definition`.
It knows what functions you are currently in. e.g. `isinstance(` would
return the `isinstance` function. without `(` it would return nothing. | 6259903a1f5feb6acb163dbe |
class EpisodeNameNotFound(DataRetrievalError): <NEW_LINE> <INDENT> pass | Raised when the name of the episode cannot be found
| 6259903acad5886f8bdc5962 |
class SerialServerError(Exception): <NEW_LINE> <INDENT> pass | Exception class for serial server. | 6259903a30c21e258be999da |
class GoddardKneller(Ref): <NEW_LINE> <INDENT> author = "Goddard, T.D. and Kneller, D.G." <NEW_LINE> author2 = [["Tom", "Goddard", "T.", "D."], ["Donald", "Kneller", "D.", "G."]] <NEW_LINE> journal = "University of California, San Francisco." <NEW_LINE> title = "Sparky 3." <NEW_LINE> status = "unpublished" <NEW_LINE> type = "internet" | Bibliography container. | 6259903a73bcbd0ca4bcb455 |
class MinimaxAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> def maxValue(state,depth): <NEW_LINE> <INDENT> if depth == self.depth or state.isWin() or state.isLose(): <NEW_LINE> <INDENT> return self.evaluationFunction(state) <NEW_LINE> <DEDENT> v=-999999 <NEW_LINE> listValue = state.getLegalActions(0) <NEW_LINE> if Directions.STOP in listValue: <NEW_LINE> <INDENT> listValue.remove(Directions.STOP) <NEW_LINE> <DEDENT> scores = [minValue(state.generateSuccessor(0, action), depth) for action in listValue] <NEW_LINE> bestScore = max(scores) <NEW_LINE> v = max( v,bestScore) <NEW_LINE> return v <NEW_LINE> <DEDENT> def minValue(state,depth): <NEW_LINE> <INDENT> v=99999 <NEW_LINE> if depth == self.depth or state.isWin() or state.isLose(): <NEW_LINE> <INDENT> return self.evaluationFunction(state) <NEW_LINE> <DEDENT> depth += 1 <NEW_LINE> if state.getNumAgents()<2: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> listValue = state.getLegalActions(1) <NEW_LINE> stateofghost = [state.generateSuccessor(1, action) for action in listValue ] <NEW_LINE> for i in range(2,state.getNumAgents()): <NEW_LINE> <INDENT> ghosts=[] <NEW_LINE> legalact=state.getLegalActions(i) <NEW_LINE> for g in stateofghost: <NEW_LINE> <INDENT> if g.isLose(): <NEW_LINE> <INDENT> return self.evaluationFunction(g) <NEW_LINE> <DEDENT> newghosts = [g.generateSuccessor(i, action) for action in state.getLegalActions(i)] <NEW_LINE> ghosts=newghosts+ghosts <NEW_LINE> <DEDENT> stateofghost=ghosts <NEW_LINE> <DEDENT> scores = [maxValue(g,depth) for g in stateofghost] <NEW_LINE> v = min(v,min(scores)) <NEW_LINE> return v <NEW_LINE> <DEDENT> legalMoves = gameState.getLegalActions(0) <NEW_LINE> if Directions.STOP in legalMoves: <NEW_LINE> <INDENT> legalMoves.remove(Directions.STOP) <NEW_LINE> <DEDENT> scores = [minValue(gameState.generateSuccessor(0, action), 0) for action in legalMoves] <NEW_LINE> bestScore = max(scores) <NEW_LINE> bestIndices = [index for index in range(len(scores)) if scores[index] == bestScore] <NEW_LINE> chosenIndex = random.choice(bestIndices) <NEW_LINE> move= legalMoves[chosenIndex] <NEW_LINE> return move <NEW_LINE> util.raiseNotDefined() | Your minimax agent (question 2) | 6259903ad53ae8145f919632 |
class ElectricCar(Car): <NEW_LINE> <INDENT> def __init__(self, make, model, year): <NEW_LINE> <INDENT> super().__init__(make, model, year) <NEW_LINE> self.battery = Battery() | 电动车的独特之处 | 6259903a4e696a045264e708 |
@tag(TestType='FVT', FeatureID='IOTOS-1156') <NEW_LINE> class solettaPWMApiTest(TestCaseInterface): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> sys.stdout.write('\nDownloading the repository of soletta...') <NEW_LINE> sys.stdout.flush() <NEW_LINE> soletta_url = 'https://github.com/solettaproject/soletta.git' <NEW_LINE> get_test_module_repo(soletta_url, 'soletta') <NEW_LINE> sys.stdout.write('\nCopying necessary files to target device...') <NEW_LINE> sys.stdout.flush() <NEW_LINE> binding = "pwm" <NEW_LINE> copy_test_files(self, "pwm") <NEW_LINE> sys.stdout.write( '\nCopy all files related to testing to target device done!' ) <NEW_LINE> sys.stdout.flush() <NEW_LINE> self.target.run('python %s/update_suite_js.py' % self.target_path ) <NEW_LINE> <DEDENT> @tag(CasesNumber=1) <NEW_LINE> def test_sol_pwm_api(self): <NEW_LINE> <INDENT> sys.stdout.write( '\nExecuting soletta pwm upstream test cases...' ) <NEW_LINE> sys.stdout.flush() <NEW_LINE> run_grunt_cmd = ''.join([ 'cd ', self.target_path, '; node tests/suite.js' ]) <NEW_LINE> (status, output) = self.target.run(run_grunt_cmd) <NEW_LINE> sys.stdout.write('\r' + ' ' * 78 + '\r') <NEW_LINE> sys.stdout.flush() <NEW_LINE> result_log = self.target_path + "tests/results.json" <NEW_LINE> local_path = "/tmp" <NEW_LINE> json_path = local_path + "/results.json" <NEW_LINE> self.target.copy_from(result_log, local_path) <NEW_LINE> format_results(self, json_path) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> sys.stdout.write("\nClean test files on host") <NEW_LINE> sys.stdout.flush() <NEW_LINE> os.system('rm -rf %s/soletta-tests*' % self.repo_test_dir) <NEW_LINE> sys.stdout.write("\nClean test files on device") <NEW_LINE> sys.stdout.flush() <NEW_LINE> self.target.run('rm -rf /usr/lib/node_modules/soletta/soletta-tests*') <NEW_LINE> sys.stdout.write('\nClean all files related to testing done!!\n') <NEW_LINE> sys.stdout.flush() | @class solettaPWMApiTest
Update suite.js for testing | 6259903a3eb6a72ae038b836 |
class OrgFocus(db.Model): <NEW_LINE> <INDENT> __tablename__ = "orgfocus" <NEW_LINE> id=db.Column(db.Integer,db.ForeignKey("organisation.id"),primary_key=True) <NEW_LINE> focus_id=db.Column(db.Integer,db.ForeignKey("focus.id"),primary_key=True) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<OrgFocus:%r:%r>" % (self.id,self.focus_id) | 'UserFocus' relates users to their interests | 6259903a23849d37ff852285 |
class InteractiveReplaceInput(app.editor.InteractiveFindInput): <NEW_LINE> <INDENT> def __init__(self, view): <NEW_LINE> <INDENT> app.editor.InteractiveFindInput.__init__(self, view) <NEW_LINE> <DEDENT> def focus(self): <NEW_LINE> <INDENT> self.view.parent.parent.textBuffer.set_message( "Press ctrl+g to replace and find next; ctrl+r to replace and find" " prior." ) <NEW_LINE> <DEDENT> def set_text_buffer(self, textBuffer): <NEW_LINE> <INDENT> app.editor.InteractiveFindInput.set_text_buffer(self, textBuffer) <NEW_LINE> commandSet = init_command_set(self, textBuffer) <NEW_LINE> commandSet.update( { KEY_BTAB: self.prior_focusable_window, KEY_ESCAPE: self.change_to_host_window, KEY_F1: self.info, KEY_F3: self.save_event_change_to_host_window, KEY_SHIFT_F3: self.save_event_change_to_host_window, CTRL_E: self.extend_find_window, CTRL_F: self.find_next, CTRL_G: self.replace_and_next, CTRL_I: self.next_focusable_window, CTRL_J: self.change_to_host_window, CTRL_N: self.save_event_change_to_host_window, CTRL_O: self.change_to_file_manager_window, CTRL_P: self.change_to_prediction, CTRL_R: self.replace_and_prior, } ) <NEW_LINE> self.commandSet = commandSet <NEW_LINE> self.commandDefault = self.textBuffer.insert_printable <NEW_LINE> <DEDENT> def extend_find_window(self): <NEW_LINE> <INDENT> self.view.host.toggleExtendedFindWindow() | Find text within the current document. | 6259903a71ff763f4b5e8968 |
class _Component: <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> for key in kwargs: <NEW_LINE> <INDENT> setattr(self, key, kwargs[key]) <NEW_LINE> <DEDENT> components.append(self) <NEW_LINE> <DEDENT> def get_predicate(self): <NEW_LINE> <INDENT> assert self.predicate and not isinstance(self.invalid, str) <NEW_LINE> if self.predefault: <NEW_LINE> <INDENT> predicate = f"gdbarch->{self.name} != {self.predefault}" <NEW_LINE> <DEDENT> elif isinstance(c, Value): <NEW_LINE> <INDENT> predicate = f"gdbarch->{self.name} != 0" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> predicate = f"gdbarch->{self.name} != NULL" <NEW_LINE> <DEDENT> return predicate | Base class for all components. | 6259903ad10714528d69ef72 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.