language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def find_matching_endpoints(self, swagger_ns): """ Compute current matching endpoints. Evaluated as a property to defer evaluation. """ def match_func(operation, ns, rule): # only expose endpoints that have the correct path prefix and operation return ( rule.rule.startswith(self.graph.build_route_path(swagger_ns.path, swagger_ns.prefix)) and operation in self.matching_operations ) return list(iter_endpoints(self.graph, match_func))
python
def parse_temperature_response( temperature_string: str) -> Mapping[str, Optional[float]]: ''' Example input: "T:none C:25" ''' err_msg = 'Unexpected argument to parse_temperature_response: {}'.format( temperature_string) if not temperature_string or \ not isinstance(temperature_string, str): raise ParseError(err_msg) parsed_values = temperature_string.strip().split(' ') if len(parsed_values) < 2: log.error(err_msg) raise ParseError(err_msg) data = { parse_key_from_substring(s): parse_number_from_substring(s) for s in parsed_values[:2] } if 'C' not in data or 'T' not in data: raise ParseError(err_msg) data = { 'current': data['C'], 'target': data['T'] } return data
python
def dump_registers_peek(registers, data, separator = ' ', width = 16): """ Dump data pointed to by the given registers, if any. @type registers: dict( str S{->} int ) @param registers: Dictionary mapping register names to their values. This value is returned by L{Thread.get_context}. @type data: dict( str S{->} str ) @param data: Dictionary mapping register names to the data they point to. This value is returned by L{Thread.peek_pointers_in_registers}. @rtype: str @return: Text suitable for logging. """ if None in (registers, data): return '' names = compat.keys(data) names.sort() result = '' for reg_name in names: tag = reg_name.lower() dumped = HexDump.hexline(data[reg_name], separator, width) result += '%s -> %s\n' % (tag, dumped) return result
python
def _set_port_channel_redundancy_group(self, v, load=False): """ Setter method for port_channel_redundancy_group, mapped from YANG variable /port_channel_redundancy_group (list) If this variable is read-only (config: false) in the source YANG file, then _set_port_channel_redundancy_group is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_port_channel_redundancy_group() directly. YANG Description: The list of port-channel redundancy groups in a managed device. Each entry represents a port-channel-redundancy-group. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("group_id",port_channel_redundancy_group.port_channel_redundancy_group, yang_name="port-channel-redundancy-group", rest_name="port-channel-redundancy-group", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper, yang_keys='group-id', extensions={u'tailf-common': {u'info': u'The list of port-channel redundancy groups.', u'cli-no-key-completion': None, u'sort-priority': u'92', u'cli-suppress-show-path': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-full-command': None, u'callpoint': u'po_redundant_grp_cp'}}), is_container='list', yang_name="port-channel-redundancy-group", rest_name="port-channel-redundancy-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The list of port-channel redundancy groups.', u'cli-no-key-completion': None, u'sort-priority': u'92', u'cli-suppress-show-path': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-full-command': None, u'callpoint': u'po_redundant_grp_cp'}}, namespace='urn:brocade.com:mgmt:brocade-lag', defining_module='brocade-lag', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """port_channel_redundancy_group must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("group_id",port_channel_redundancy_group.port_channel_redundancy_group, yang_name="port-channel-redundancy-group", rest_name="port-channel-redundancy-group", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper, yang_keys='group-id', extensions={u'tailf-common': {u'info': u'The list of port-channel redundancy groups.', u'cli-no-key-completion': None, u'sort-priority': u'92', u'cli-suppress-show-path': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-full-command': None, u'callpoint': u'po_redundant_grp_cp'}}), is_container='list', yang_name="port-channel-redundancy-group", rest_name="port-channel-redundancy-group", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'The list of port-channel redundancy groups.', u'cli-no-key-completion': None, u'sort-priority': u'92', u'cli-suppress-show-path': None, u'cli-suppress-list-no': None, u'cli-suppress-key-abbreviation': None, u'cli-no-match-completion': None, u'cli-full-command': None, u'callpoint': u'po_redundant_grp_cp'}}, namespace='urn:brocade.com:mgmt:brocade-lag', defining_module='brocade-lag', yang_type='list', is_config=True)""", }) self.__port_channel_redundancy_group = t if hasattr(self, '_set'): self._set()
java
@Override public JsonElement serialize(Duration src, Type typeOfSrc, JsonSerializationContext context) { return new JsonPrimitive(src.toString()); }
python
def error_leader(self, infile=None, lineno=None): "Emit a C-compiler-like, Emacs-friendly error-message leader." if infile is None: infile = self.infile if lineno is None: lineno = self.lineno return "\"%s\", line %d: " % (infile, lineno)
java
private void nextFromPrologBang(boolean isProlog) throws XMLStreamException { int i = getNext(); if (i < 0) { throwUnexpectedEOF(SUFFIX_IN_PROLOG); } if (i == 'D') { // Doctype declaration? String keyw = checkKeyword('D', "DOCTYPE"); if (keyw != null) { throwParseError("Unrecognized XML directive '<!"+keyw+"' (misspelled DOCTYPE?)."); } if (!isProlog) { // Still possibly ok in multidoc mode... if (mConfig.inputParsingModeDocuments()) { if (!mStDoctypeFound) { mCurrToken = handleMultiDocStart(DTD); return; } } else { throwParseError(ErrorConsts.ERR_DTD_IN_EPILOG); } } if (mStDoctypeFound) { throwParseError(ErrorConsts.ERR_DTD_DUP); } mStDoctypeFound = true; // Ok; let's read main input (all but internal subset) mCurrToken = DTD; startDTD(); return; } else if (i == '-') { // comment char c = getNextChar(isProlog ? SUFFIX_IN_PROLOG : SUFFIX_IN_EPILOG); if (c != '-') { throwUnexpectedChar(i, " (malformed comment?)"); } // Likewise, let's delay actual parsing/skipping. mTokenState = TOKEN_STARTED; mCurrToken = COMMENT; return; } else if (i == '[') { // erroneous CDATA? i = peekNext(); // Let's just add bit of heuristics, to get better error msg if (i == 'C') { throwUnexpectedChar(i, ErrorConsts.ERR_CDATA_IN_EPILOG); } } throwUnexpectedChar(i, " after '<!' (malformed comment?)"); }
java
private void forward() { try { CmsDbSettingsPanel panel = m_panel[0]; panel.saveToSetupBean(); boolean createDb = panel.getCreateDb(); boolean dropDb = panel.getDropDb(); boolean createTables = panel.getCreateTables(); setupDb(createDb, createTables, dropDb); } catch (DBException e) { CmsSetupErrorDialog.showErrorDialog(e.getMessage(), e.getDetails()); } catch (Exception e) { CmsSetupErrorDialog.showErrorDialog(e); } }
python
def load(cls, filename=None): """Load a test report configuration.""" if filename is None: LOGGER.debug("Loading default configuration.") with open_text(templates, "test_config.yml", encoding="utf-8") as file_handle: content = yaml.load(file_handle) else: LOGGER.debug("Loading custom configuration '%s'.", filename) try: with open(filename, encoding="utf-8") as file_handle: content = yaml.load(file_handle) except IOError as err: LOGGER.error( "Failed to load the custom configuration '%s'. Skipping.", filename) LOGGER.debug(str(err)) content = dict() return cls(content)
python
def compute_topk_scores_and_seq(sequences, scores, scores_to_gather, flags, beam_size, batch_size, prefix="default", states_to_gather=None, use_tpu=False, use_top_k_with_unique=True): """Given sequences and scores, will gather the top k=beam size sequences. This function is used to grow alive, and finished. It takes sequences, scores, and flags, and returns the top k from sequences, scores_to_gather, and flags based on the values in scores. This method permits easy introspection using tfdbg. It adds three named ops that are prefixed by `prefix`: - _topk_seq: the tensor for topk_seq returned by this method. - _topk_flags: the tensor for topk_finished_flags returned by this method. - _topk_scores: the tensor for tokp_gathered_scores returned by this method. Args: sequences: Tensor of sequences that we need to gather from. [batch_size, beam_size, seq_length] scores: Tensor of scores for each sequence in sequences. [batch_size, beam_size]. We will use these to compute the topk. scores_to_gather: Tensor of scores for each sequence in sequences. [batch_size, beam_size]. We will return the gathered scores from here. Scores to gather is different from scores because for grow_alive, we will need to return log_probs, while for grow_finished, we will need to return the length penalized scores. flags: Tensor of bools for sequences that say whether a sequence has reached EOS or not beam_size: int batch_size: int prefix: string that will prefix unique names for the ops run. states_to_gather: dict (possibly nested) of decoding states. use_tpu: A bool, whether to compute topk scores and sequences on TPU. use_top_k_with_unique: bool, whether to use a fast (but decreased precision) top_k during TPU beam search. Returns: Tuple of (topk_seq [batch_size, beam_size, decode_length], topk_gathered_scores [batch_size, beam_size], topk_finished_flags[batch_size, beam_size]) """ if not use_tpu: _, topk_indexes = tf.nn.top_k(scores, k=beam_size) # The next three steps are to create coordinates for tf.gather_nd to pull # out the topk sequences from sequences based on scores. # batch pos is a tensor like [[0,0,0,0,],[1,1,1,1],..]. It says which # batch the beam item is in. This will create the i of the i,j coordinate # needed for the gather batch_pos = compute_batch_indices(batch_size, beam_size) # top coordinates will give us the actual coordinates to do the gather. # stacking will create a tensor of dimension batch * beam * 2, where the # last dimension contains the i,j gathering coordinates. top_coordinates = tf.stack([batch_pos, topk_indexes], axis=2) # Gather up the highest scoring sequences. For each operation added, give # it a concrete name to simplify observing these operations with tfdbg. # Clients can capture these tensors by watching these node names. def gather(tensor, name): return tf.gather_nd(tensor, top_coordinates, name=(prefix + name)) topk_seq = gather(sequences, "_topk_seq") topk_flags = gather(flags, "_topk_flags") topk_gathered_scores = gather(scores_to_gather, "_topk_scores") if states_to_gather: topk_gathered_states = nest.map_structure( lambda state: gather(state, "_topk_states"), states_to_gather) else: topk_gathered_states = states_to_gather else: if use_top_k_with_unique: _, topk_indexes = top_k_with_unique(scores, k=beam_size) else: _, topk_indexes = tf.nn.top_k(scores, k=beam_size) # Gather up the highest scoring sequences. For each operation added, give # it a concrete name to simplify observing these operations with tfdbg. # Clients can capture these tensors by watching these node names. topk_seq = fast_tpu_gather(sequences, topk_indexes, prefix + "_topk_seq") topk_flags = fast_tpu_gather(flags, topk_indexes, prefix + "_topk_flags") topk_gathered_scores = fast_tpu_gather(scores_to_gather, topk_indexes, prefix + "_topk_scores") if states_to_gather: topk_gathered_states = nest.map_structure( # pylint: disable=g-long-lambda lambda state: fast_tpu_gather(state, topk_indexes, prefix + "_topk_states"), states_to_gather) else: topk_gathered_states = states_to_gather return topk_seq, topk_gathered_scores, topk_flags, topk_gathered_states
python
def register(self, callback, name): 'Register a callback on server and on connected clients.' server.CALLBACKS[name] = callback self.run(''' window.skink.%s = function(args=[]) { window.skink.call("%s", args); }''' % (name, name))
java
public final boolean isTransactional() { // Take a snapshot of the value with first use (or reuse from pool) of the managed connection. // This value will be cleared when the managed connection is returned to the pool. if (transactional == null) { transactional = mcf.dsConfig.get().transactional; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(this, tc, "transactional=", transactional); } return transactional; }
python
def sendToReplica(self, msg, frm): """ Send the message to the intended replica. :param msg: the message to send :param frm: the name of the node which sent this `msg` """ # TODO: discard or stash messages here instead of doing # this in msgHas* methods!!! if self.msgHasAcceptableInstId(msg, frm): self.replicas.pass_message((msg, frm), msg.instId)
java
public static void bufferedToGray(DataBufferUShort buffer , WritableRaster src, GrayI16 dst) { short[] srcData = buffer.getData(); int numBands = src.getNumBands(); int size = dst.getWidth() * dst.getHeight(); int srcStride = stride(src); int srcOffset = getOffset(src); int srcStrideDiff = srcStride-src.getNumDataElements()*dst.width; if (numBands == 1) { if (dst.startIndex == 0 && dst.width == dst.stride && srcStrideDiff == 0 && srcOffset == 0 ) System.arraycopy(srcData, 0, dst.data, 0, size); else { //CONCURRENT_BELOW BoofConcurrency.loopFor(0, dst.height, y -> { for (int y = 0; y < dst.height; y++) { int indexDst = dst.startIndex + dst.stride * y; int indexSrc = srcOffset + srcStride * y; System.arraycopy(srcData, indexSrc, dst.data, indexDst, dst.width); } //CONCURRENT_ABOVE }); } } else { throw new RuntimeException("Only single band images are currently support for 16bit"); } }
java
public int size() { int size = cleared ? 0 : snapshot.size(); for ( Map.Entry<RowKey,AssociationOperation> op : currentState.entrySet() ) { switch ( op.getValue().getType() ) { case PUT: if ( cleared || !snapshot.containsKey( op.getKey() ) ) { size++; } break; case REMOVE: if ( !cleared && snapshot.containsKey( op.getKey() ) ) { size--; } break; } } return size; }
java
public UpdateEventConfigurationsRequest withEventConfigurations(java.util.Map<String, Configuration> eventConfigurations) { setEventConfigurations(eventConfigurations); return this; }
java
String readNonEscaped() throws IOException { StringBuilder s = new StringBuilder(); do { int c = reader.read(); if (c < 0) { return s.toString().trim(); } if (c == SEPARATOR || c == '\n') { reader.unread(c); return s.toString().trim(); } else { s.append((char) c); } } while (true); }
java
public static byte[] toByteArray(InputStream input) throws IOException { ByteArrayOutputStream os = new ByteArrayOutputStream(); byte[] buf = new byte[1024]; for (int n = input.read(buf); n != -1; n = input.read(buf)) { os.write(buf, 0, n); } return os.toByteArray(); }
java
@SuppressWarnings("unchecked") private static <T> T newInstance(Class<?> type) { try { return (T) type.newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new ServiceException("Cannot instantiate service class " + type, e); } }
python
async def container_size( self, container_len=None, container_type=None, params=None ): """ Container size :param container_len: :param container_type: :param params: :return: """ if hasattr(container_type, "serialize_archive"): raise ValueError("not supported") if self.writing: return await self._dump_container_size( self.iobj, container_len, container_type, params ) else: raise ValueError("Not supported")
java
public static base_responses add(nitro_service client, lbmonitor resources[]) throws Exception { base_responses result = null; if (resources != null && resources.length > 0) { lbmonitor addresources[] = new lbmonitor[resources.length]; for (int i=0;i<resources.length;i++){ addresources[i] = new lbmonitor(); addresources[i].monitorname = resources[i].monitorname; addresources[i].type = resources[i].type; addresources[i].action = resources[i].action; addresources[i].respcode = resources[i].respcode; addresources[i].httprequest = resources[i].httprequest; addresources[i].rtsprequest = resources[i].rtsprequest; addresources[i].customheaders = resources[i].customheaders; addresources[i].maxforwards = resources[i].maxforwards; addresources[i].sipmethod = resources[i].sipmethod; addresources[i].sipuri = resources[i].sipuri; addresources[i].sipreguri = resources[i].sipreguri; addresources[i].send = resources[i].send; addresources[i].recv = resources[i].recv; addresources[i].query = resources[i].query; addresources[i].querytype = resources[i].querytype; addresources[i].scriptname = resources[i].scriptname; addresources[i].scriptargs = resources[i].scriptargs; addresources[i].dispatcherip = resources[i].dispatcherip; addresources[i].dispatcherport = resources[i].dispatcherport; addresources[i].username = resources[i].username; addresources[i].password = resources[i].password; addresources[i].secondarypassword = resources[i].secondarypassword; addresources[i].logonpointname = resources[i].logonpointname; addresources[i].lasversion = resources[i].lasversion; addresources[i].radkey = resources[i].radkey; addresources[i].radnasid = resources[i].radnasid; addresources[i].radnasip = resources[i].radnasip; addresources[i].radaccounttype = resources[i].radaccounttype; addresources[i].radframedip = resources[i].radframedip; addresources[i].radapn = resources[i].radapn; addresources[i].radmsisdn = resources[i].radmsisdn; addresources[i].radaccountsession = resources[i].radaccountsession; addresources[i].lrtm = resources[i].lrtm; addresources[i].deviation = resources[i].deviation; addresources[i].units1 = resources[i].units1; addresources[i].interval = resources[i].interval; addresources[i].units3 = resources[i].units3; addresources[i].resptimeout = resources[i].resptimeout; addresources[i].units4 = resources[i].units4; addresources[i].resptimeoutthresh = resources[i].resptimeoutthresh; addresources[i].retries = resources[i].retries; addresources[i].failureretries = resources[i].failureretries; addresources[i].alertretries = resources[i].alertretries; addresources[i].successretries = resources[i].successretries; addresources[i].downtime = resources[i].downtime; addresources[i].units2 = resources[i].units2; addresources[i].destip = resources[i].destip; addresources[i].destport = resources[i].destport; addresources[i].state = resources[i].state; addresources[i].reverse = resources[i].reverse; addresources[i].transparent = resources[i].transparent; addresources[i].iptunnel = resources[i].iptunnel; addresources[i].tos = resources[i].tos; addresources[i].tosid = resources[i].tosid; addresources[i].secure = resources[i].secure; addresources[i].validatecred = resources[i].validatecred; addresources[i].domain = resources[i].domain; addresources[i].ipaddress = resources[i].ipaddress; addresources[i].group = resources[i].group; addresources[i].filename = resources[i].filename; addresources[i].basedn = resources[i].basedn; addresources[i].binddn = resources[i].binddn; addresources[i].filter = resources[i].filter; addresources[i].attribute = resources[i].attribute; addresources[i].database = resources[i].database; addresources[i].sqlquery = resources[i].sqlquery; addresources[i].evalrule = resources[i].evalrule; addresources[i].mssqlprotocolversion = resources[i].mssqlprotocolversion; addresources[i].Snmpoid = resources[i].Snmpoid; addresources[i].snmpcommunity = resources[i].snmpcommunity; addresources[i].snmpthreshold = resources[i].snmpthreshold; addresources[i].snmpversion = resources[i].snmpversion; addresources[i].metrictable = resources[i].metrictable; addresources[i].application = resources[i].application; addresources[i].sitepath = resources[i].sitepath; addresources[i].storename = resources[i].storename; addresources[i].storefrontacctservice = resources[i].storefrontacctservice; addresources[i].hostname = resources[i].hostname; addresources[i].netprofile = resources[i].netprofile; addresources[i].originhost = resources[i].originhost; addresources[i].originrealm = resources[i].originrealm; addresources[i].hostipaddress = resources[i].hostipaddress; addresources[i].vendorid = resources[i].vendorid; addresources[i].productname = resources[i].productname; addresources[i].firmwarerevision = resources[i].firmwarerevision; addresources[i].authapplicationid = resources[i].authapplicationid; addresources[i].acctapplicationid = resources[i].acctapplicationid; addresources[i].inbandsecurityid = resources[i].inbandsecurityid; addresources[i].supportedvendorids = resources[i].supportedvendorids; addresources[i].vendorspecificvendorid = resources[i].vendorspecificvendorid; addresources[i].vendorspecificauthapplicationids = resources[i].vendorspecificauthapplicationids; addresources[i].vendorspecificacctapplicationids = resources[i].vendorspecificacctapplicationids; addresources[i].kcdaccount = resources[i].kcdaccount; addresources[i].storedb = resources[i].storedb; } result = add_bulk_request(client, addresources); } return result; }
java
private int getInitialPartitionCount() throws IllegalAccessException { AppointerState currentState = m_state.get(); if (currentState != AppointerState.INIT && currentState != AppointerState.CLUSTER_START) { throw new IllegalAccessException("Getting cached partition count after cluster " + "startup"); } return m_initialPartitionCount; }
python
def _netbsd_interfaces_ifconfig(out): ''' Uses ifconfig to return a dictionary of interfaces with various information about each (up/down state, ip address, netmask, and hwaddr) ''' ret = dict() piface = re.compile(r'^([^\s:]+)') pmac = re.compile('.*?address: ([0-9a-f:]+)') pip = re.compile(r'.*?inet [^\d]*(.*?)/([\d]*)\s') pip6 = re.compile(r'.*?inet6 ([0-9a-f:]+)%([a-zA-Z0-9]*)/([\d]*)\s') pupdown = re.compile('UP') pbcast = re.compile(r'.*?broadcast ([\d\.]+)') groups = re.compile('\r?\n(?=\\S)').split(out) for group in groups: data = dict() iface = '' updown = False for line in group.splitlines(): miface = piface.match(line) mmac = pmac.match(line) mip = pip.match(line) mip6 = pip6.match(line) mupdown = pupdown.search(line) if miface: iface = miface.group(1) if mmac: data['hwaddr'] = mmac.group(1) if mip: if 'inet' not in data: data['inet'] = list() addr_obj = dict() addr_obj['address'] = mip.group(1) mmask = mip.group(2) if mip.group(2): addr_obj['netmask'] = cidr_to_ipv4_netmask(mip.group(2)) mbcast = pbcast.match(line) if mbcast: addr_obj['broadcast'] = mbcast.group(1) data['inet'].append(addr_obj) if mupdown: updown = True if mip6: if 'inet6' not in data: data['inet6'] = list() addr_obj = dict() addr_obj['address'] = mip6.group(1) mmask6 = mip6.group(3) addr_obj['scope'] = mip6.group(2) addr_obj['prefixlen'] = mip6.group(3) data['inet6'].append(addr_obj) data['up'] = updown ret[iface] = data del data return ret
java
public int getDays() { switch (_units) { case Days: return _amount; case Weeks: return _amount * 7; case Months: if (_amount <= 1) return _amount * 30; else if (_amount < 12) return (_amount * 61) / 2; else return (_amount * 365) / 12; default: throw new UnsupportedOperationException(); } }
java
public boolean add(ByteBuf buf) { if (count == IOV_MAX) { // No more room! return false; } else if (buf.nioBufferCount() == 1) { final int len = buf.readableBytes(); if (len == 0) { return true; } if (buf.hasMemoryAddress()) { return add(buf.memoryAddress(), buf.readerIndex(), len); } else { ByteBuffer nioBuffer = buf.internalNioBuffer(buf.readerIndex(), len); return add(Buffer.memoryAddress(nioBuffer), nioBuffer.position(), len); } } else { ByteBuffer[] buffers = buf.nioBuffers(); for (ByteBuffer nioBuffer : buffers) { final int len = nioBuffer.remaining(); if (len != 0 && (!add(Buffer.memoryAddress(nioBuffer), nioBuffer.position(), len) || count == IOV_MAX)) { return false; } } return true; } }
python
def get_smart_task(self, task_id): """ Return specified transition. Returns a Command. """ def process_result(result): return SmartTask(self, result) return Command('get', [ROOT_SMART_TASKS, task_id], process_result=process_result)
python
def fmt_second(time_total): """ >>> fmt_second(100) '00:01:40' """ def _ck(t): return t < 10 and "0%s" % t or t times = int(time_total) h = times / 3600 m = times % 3600 / 60 s = times % 3600 % 60 return "%s:%s:%s" % (_ck(h), _ck(m), _ck(s))
java
@SuppressWarnings("unchecked") @Override public EList<IfcCovering> getRelatedCoverings() { return (EList<IfcCovering>) eGet(Ifc4Package.Literals.IFC_REL_COVERS_SPACES__RELATED_COVERINGS, true); }
java
public static void badConversion(Field destinationField, Class<?> destinationClass, Field sourceField, Class<?> sourceClass,String plus){ throw new UndefinedMappingException(MSG.INSTANCE.message(undefinedMappingException,destinationField.getName(),destinationClass.getSimpleName(),sourceField.getName(),sourceClass.getSimpleName()) + ". More information: "+plus); }
java
@Nullable public static Resource mergeResources(List<Resource> resources) { Resource currentResource = null; for (Resource resource : resources) { currentResource = merge(currentResource, resource); } return currentResource; }
python
def with_name(self, name): """Return a new path with the file name changed.""" if not self.name: raise ValueError("%r has an empty name" % (self,)) return self._from_parsed_parts(self._drv, self._root, self._parts[:-1] + [name])
java
@SuppressWarnings("unchecked") public EList<IfcClassificationNotationSelect> getRelatedClassifications() { return (EList<IfcClassificationNotationSelect>) eGet( Ifc2x3tc1Package.Literals.IFC_CONSTRAINT_CLASSIFICATION_RELATIONSHIP__RELATED_CLASSIFICATIONS, true); }
java
public static SameDiff fromFlatBuffers(ByteBuffer bbIn) throws IOException { FlatGraph fg = FlatGraph.getRootAsFlatGraph(bbIn); int numOps = fg.nodesLength(); int numVars = fg.variablesLength(); List<FlatNode> ops = new ArrayList<>(numOps); for( int i=0; i<numOps; i++ ){ ops.add(fg.nodes(i)); } List<FlatVariable> vars = new ArrayList<>(numVars); for( int i = 0; i < numVars; i++) { vars.add(fg.variables(i)); } FlatConfiguration conf = fg.configuration(); /* Reconstruct the graph We'll do the reconstruction manually here, rather than using sd.var(...), so that we have more control over the final result. */ SameDiff sd = SameDiff.create(); //Reconstruct placeholders int numPlaceholders = fg.placeholdersLength(); Set<String> ph = new LinkedHashSet<>(); for(int i=0; i<numPlaceholders; i++ ){ ph.add(fg.placeholders(i)); } //Reconstruct variables: Map<Integer,SDVariable> varNodeIds = new HashMap<>(); Map<Pair<Integer,Integer>, SDVariable> variablesByNodeAndOutNum = new HashMap<>(); Map<String,List<SDVariable>> variablesByName = new HashMap<>(); for(FlatVariable v : vars){ int shapeLength = v.shapeLength(); long[] shape = new long[shapeLength]; for( int i = 0; i < shapeLength; i++) { shape[i] = v.shape(i); } String n = v.name(); byte dtypeByte = v.dtype(); org.nd4j.linalg.api.buffer.DataType dtype = FlatBuffersMapper.getDataTypeFromByte(dtypeByte); //TODO Infer this properly! Could be constant, etc. VariableType vt = VariableType.values()[v.variabletype()]; SDVariable var = new SDVariable(n, vt, sd, shape, dtype, null); sd.variables.put(n, Variable.builder().name(n).variable(var).build()); sd.variableNameToShape.put(n, shape); FlatArray fa = v.ndarray(); if(fa != null && vt != VariableType.ARRAY){ INDArray arr; try(MemoryWorkspace ws = Nd4j.getWorkspaceManager().scopeOutOfWorkspaces()) { arr = Nd4j.createFromFlatArray(fa); } sd.setArrayForVariable(n, arr); } IntPair id = v.id(); //First value: node (op) id. Second: output number variablesByNodeAndOutNum.put(new Pair<>(id.first(), id.second()), var); if(!variablesByName.containsKey(n)){ variablesByName.put(n, new ArrayList<SDVariable>()); } List<SDVariable> list = variablesByName.get(n); list.add(var); } //Reconstruct ops: for(FlatNode fn : ops){ DifferentialFunction df = FlatBuffersMapper.fromFlatNode(fn); String name = fn.name(); df.setSameDiff(sd); df.setOwnName(name); if(sd.ops.containsKey(name)){ sd.ops.get(name).setOp(df); } else { sd.ops.put(name, SameDiffOp.builder().name(name).op(df).build()); } int outLength = fn.outputLength(); int[] outs = new int[outLength]; for( int i=0; i<outLength; i++ ){ outs[i] = fn.output(i); } int opId = fn.id(); //Work out inputs and outputs: int[] output = new int[fn.outputLength()]; for (int i = 0; i < output.length; i++) { output[i] = fn.output(i); } int[] input = new int[fn.inputLength()]; for (int i = 0; i < input.length; i++) { input[i] = fn.input(i); } IntPair[] inputPaired = new IntPair[fn.inputPairedLength()]; List<Pair<Integer,Integer>> intPairList = new ArrayList<>(); for (int i = 0; i < inputPaired.length; i++) { inputPaired[i] = fn.inputPaired(i); intPairList.add(new Pair<>(inputPaired[i].first(), inputPaired[i].second())); } String[] inputNames = new String[inputPaired.length]; for(int i=0; i<inputPaired.length; i++ ){ int nodeId = inputPaired[i].first(); int nodeOutNum = inputPaired[i].second(); SDVariable varIn = variablesByNodeAndOutNum.get(new Pair<>(nodeId, nodeOutNum)); if(varIn == null){ //The variable corresponding to this op was not } inputNames[i] = varIn.getVarName(); } sd.ops.get(df.getOwnName()).setInputsToOp(Arrays.asList(inputNames)); //Record that input variables are input to this op for(String inName : inputNames) { Variable v = sd.getVariables().get(inName); if(v.getInputsForOp() == null){ v.setInputsForOp(new ArrayList<String>()); } if(!v.getInputsForOp().contains(df.getOwnName())){ v.getInputsForOp().add(df.getOwnName()); } } List<SDVariable> varsForOp = variablesByName.get(name); //Can't assume that variables for the op have all been defined. For example, if we export before execution in SameDiff //In theory, we can reconstruct the output variables (minus names) if we know the number of op outputs //And we can calculate the op outputs - in most cases - after the op has been created and parameters set int numOutputs = df.getNumOutputs(); if(numOutputs <= 0){ numOutputs = fn.outputLength(); } String[] varNames = null; if(varsForOp != null && varsForOp.size() == numOutputs){ varNames = new String[varsForOp.size()]; for( int i=0; i<varNames.length; i++ ){ varNames[i] = varsForOp.get(i).getVarName(); sd.getVariables().get(varNames[i]).setOutputOfOp(df.getOwnName()); } sd.ops.get(df.getOwnName()).setOutputsOfOp(Arrays.asList(varNames)); } else { //We're missing some variables... int outputNamesLength = fn.outputNamesLength(); varNames = new String[outputNamesLength]; for( int i=0; i<outputNamesLength; i++ ){ String n = fn.outputNames(i); varNames[i] = n; if(!sd.variables.containsKey(n)){ //Need to create the variable - perhaps it wasn't exported. Note output of node -> can only be VARIABLE type SDVariable var = new SDVariable(n, VariableType.VARIABLE, sd, null, null, null); sd.variables.put(n, Variable.builder().name(n).variable(var).build()); variablesByNodeAndOutNum.put(new Pair<>(opId, i), var); } sd.getVariables().get(varNames[i]).setOutputOfOp(df.getOwnName()); } sd.ops.get(df.getOwnName()).setOutputsOfOp(Arrays.asList(varNames)); } //Check the op mapping int he variablesByNodeAndOutputNum //For multi-output ops, variables will have their own index, not related to the op index for( int i=0; i<varNames.length; i++ ){ Pair<Integer,Integer> p = new Pair<>(opId, i); if(!variablesByNodeAndOutNum.containsKey(p)){ variablesByNodeAndOutNum.put(p, sd.getVariable(varNames[i])); } } } //Reconstruct loss variables if(fg.lossVariablesLength() > 0){ for(int i=0; i<fg.lossVariablesLength(); i++ ){ sd.addLossVariable(fg.lossVariables(i)); } } return sd; }
python
def _path(self, s): """Parse a path.""" if s.startswith(b'"'): if not s.endswith(b'"'): self.abort(errors.BadFormat, '?', '?', s) else: return _unquote_c_string(s[1:-1]) return s
java
public static Workbook createBook(boolean isXlsx) { Workbook workbook; if (isXlsx) { workbook = new XSSFWorkbook(); } else { workbook = new org.apache.poi.hssf.usermodel.HSSFWorkbook(); } return workbook; }
java
@SuppressWarnings("PMD.UseStringBufferForStringAppends") public static String getContextMarkers(final AdminContext context) { String res = ""; if (isForAdmin(context)) { if (isForMain(context)) { res += "M"; } res += "A"; } return res; }
java
public static Object newInstance(Class type) { Constructor _constructor = null; Object[] _constructorArgs = new Object[0]; try { _constructor = type.getConstructor(new Class[] {});// 先尝试默认的空构造函数 } catch (NoSuchMethodException e) { // ignore } if (_constructor == null) {// 没有默认的构造函数,尝试别的带参数的函数 Constructor[] constructors = type.getConstructors(); if (constructors.length == 0) { throw new UnsupportedOperationException("Class[" + type.getName() + "] has no public constructors"); } _constructor = constructors[0];// 默认取第一个参数 Class[] params = _constructor.getParameterTypes(); _constructorArgs = new Object[params.length]; for (int i = 0; i < params.length; i++) { _constructorArgs[i] = getDefaultValue(params[i]); } } return ReflectUtils.newInstance(_constructor, _constructorArgs); }
java
boolean shouldCollect(@NonNull Context context, @NonNull CoreConfiguration config, @NonNull ReportField collect, @NonNull ReportBuilder reportBuilder) { return config.reportContent().contains(collect); }
python
def tagify(suffix='', prefix='', base=SALT): ''' convenience function to build a namespaced event tag string from joining with the TABPART character the base, prefix and suffix If string prefix is a valid key in TAGS Then use the value of key prefix Else use prefix string If suffix is a list Then join all string elements of suffix individually Else use string suffix ''' parts = [base, TAGS.get(prefix, prefix)] if hasattr(suffix, 'append'): # list so extend parts parts.extend(suffix) else: # string so append parts.append(suffix) for index, _ in enumerate(parts): try: parts[index] = salt.utils.stringutils.to_str(parts[index]) except TypeError: parts[index] = str(parts[index]) return TAGPARTER.join([part for part in parts if part])
python
def applicant(self, column=None, value=None, **kwargs): """ Find the applicant information for a grant. >>> GICS().applicant('zip_code', 94105) """ return self._resolve_call('GIC_APPLICANT', column, value, **kwargs)
java
public String addLock(Session session, String path) throws LockException { String repoPath = session.getRepository().hashCode() + "/" + session.getWorkspace().getName() + "/" + path; if (!nullResourceLocks.containsKey(repoPath)) { String newLockToken = IdGenerator.generate(); session.addLockToken(newLockToken); nullResourceLocks.put(repoPath, newLockToken); return newLockToken; } // check if lock owned by this session String currentToken = nullResourceLocks.get(repoPath); for (String t : session.getLockTokens()) { if (t.equals(currentToken)) return t; } throw new LockException("Resource already locked " + repoPath); }
java
@Pure public Iterable<RoadSegment> roadSegments() { return new Iterable<RoadSegment>() { @Override public Iterator<RoadSegment> iterator() { return roadSegmentsIterator(); } }; }
python
def build_model(self, n_features, n_classes): """Create the computational graph. :param n_features: number of features :param n_classes: number of classes :return: self """ self._create_placeholders(n_features, n_classes) self._create_variables(n_features, n_classes) self.mod_y = tf.nn.softmax( tf.add(tf.matmul(self.input_data, self.W_), self.b_)) self.cost = self.loss.compile(self.mod_y, self.input_labels) self.train_step = tf.train.GradientDescentOptimizer( self.learning_rate).minimize(self.cost) self.accuracy = Evaluation.accuracy(self.mod_y, self.input_labels)
java
private String computeEventDescriptor(Method method) { StringBuilder sb = new StringBuilder(); // Add event class and method name sb.append(method.getDeclaringClass().getName()); sb.append("."); sb.append(method.getName()); // Add event arguments Class [] parms = method.getParameterTypes(); sb.append("("); for (int i = 0; i < parms.length; i++) appendTypeDescriptor(sb, parms[i]); sb.append(")"); // Add event return type appendTypeDescriptor(sb, method.getReturnType()); return sb.toString(); }
java
@Override public HttpResponse serve(ServiceRequestContext ctx, HttpRequest req) throws Exception { try { switch (req.method()) { case OPTIONS: return doOptions(ctx, req); case GET: return doGet(ctx, req); case HEAD: return doHead(ctx, req); case POST: return doPost(ctx, req); case PUT: return doPut(ctx, req); case PATCH: return doPatch(ctx, req); case DELETE: return doDelete(ctx, req); case TRACE: return doTrace(ctx, req); default: return HttpResponse.of(HttpStatus.METHOD_NOT_ALLOWED); } } finally { final RequestLogBuilder logBuilder = ctx.logBuilder(); if (!logBuilder.isRequestContentDeferred()) { // Set the requestContent to null by default. // An implementation can override this behavior by setting the requestContent in do*() // implementation or by calling deferRequestContent(). logBuilder.requestContent(null, null); } // do*() methods are expected to set the serialization format before returning. logBuilder.serializationFormat(SerializationFormat.NONE); } }
java
public Map<String, Object> evaluateVerifyCredentialsResponse(String responseBody) { String endpoint = TwitterConstants.TWITTER_ENDPOINT_VERIFY_CREDENTIALS; Map<String, Object> responseValues = null; try { responseValues = populateJsonResponse(responseBody); } catch (JoseException e) { return createErrorResponse("TWITTER_RESPONSE_NOT_JSON", new Object[] { endpoint, e.getLocalizedMessage(), responseBody }); } Map<String, Object> result = checkForEmptyResponse(endpoint, responseBody, responseValues); if (result != null) { return result; } // Ensure response contains email result = checkForRequiredParameters(endpoint, responseValues, TwitterConstants.RESPONSE_EMAIL); if (result != null) { return result; } responseValues.put(TwitterConstants.RESULT_RESPONSE_STATUS, TwitterConstants.RESULT_SUCCESS); return responseValues; }
java
public void makeSpace(int sizeNeeded) { int needed = count + sizeNeeded - buf.length; if (needed > 0) { bump(needed); } }
java
public static PactDslJsonRootValue stringType(String example) { PactDslJsonRootValue value = new PactDslJsonRootValue(); value.setValue(example); value.setMatcher(TypeMatcher.INSTANCE); return value; }
python
def predict(self, Xnew, full_cov=False, Y_metadata=None, kern=None, likelihood=None, include_likelihood=True): """ Predict the function(s) at the new point(s) Xnew. This includes the likelihood variance added to the predicted underlying function (usually referred to as f). In order to predict without adding in the likelihood give `include_likelihood=False`, or refer to self.predict_noiseless(). :param Xnew: The points at which to make a prediction :type Xnew: np.ndarray (Nnew x self.input_dim) :param full_cov: whether to return the full covariance matrix, or just the diagonal :type full_cov: bool :param Y_metadata: metadata about the predicting point to pass to the likelihood :param kern: The kernel to use for prediction (defaults to the model kern). this is useful for examining e.g. subprocesses. :param include_likelihood: Whether or not to add likelihood noise to the predicted underlying latent function f. :type include_likelihood: bool :returns: (mean, var): mean: posterior mean, a Numpy array, Nnew x self.input_dim var: posterior variance, a Numpy array, Nnew x 1 if full_cov=False, Nnew x Nnew otherwise If full_cov and self.input_dim > 1, the return shape of var is Nnew x Nnew x self.input_dim. If self.input_dim == 1, the return shape is Nnew x Nnew. This is to allow for different normalizations of the output dimensions. Note: If you want the predictive quantiles (e.g. 95% confidence interval) use :py:func:"~GPy.core.gp.GP.predict_quantiles". """ # Predict the latent function values mean, var = self._raw_predict(Xnew, full_cov=full_cov, kern=kern) if include_likelihood: # now push through likelihood if likelihood is None: likelihood = self.likelihood mean, var = likelihood.predictive_values(mean, var, full_cov, Y_metadata=Y_metadata) if self.normalizer is not None: mean = self.normalizer.inverse_mean(mean) # We need to create 3d array for the full covariance matrix with # multiple outputs. if full_cov & (mean.shape[1] > 1): var = self.normalizer.inverse_covariance(var) else: var = self.normalizer.inverse_variance(var) return mean, var
python
def encode_date_optional_time(obj): """ ISO encode timezone-aware datetimes """ if isinstance(obj, datetime.datetime): return timezone("UTC").normalize(obj.astimezone(timezone("UTC"))).strftime('%Y-%m-%dT%H:%M:%SZ') raise TypeError("{0} is not JSON serializable".format(repr(obj)))
python
def discard_until(fd, s, deadline): """Read chunks from `fd` until one is encountered that ends with `s`. This is used to skip output produced by ``/etc/profile``, ``/etc/motd`` and mandatory SSH banners while waiting for :attr:`Stream.EC0_MARKER` to appear, indicating the first stage is ready to receive the compressed :mod:`mitogen.core` source. :param int fd: File descriptor to read from. :param bytes s: Marker string to discard until encountered. :param float deadline: Absolute UNIX timestamp after which timeout should occur. :raises mitogen.core.TimeoutError: Attempt to read beyond deadline. :raises mitogen.parent.EofError: All streams indicated EOF, suggesting the child process has exitted. :raises mitogen.core.StreamError: Attempt to read past end of file. """ it = iter_read([fd], deadline) try: for buf in it: if IOLOG.level == logging.DEBUG: for line in buf.splitlines(): IOLOG.debug('discard_until: discarding %r', line) if buf.endswith(s): return finally: it.close()
python
def show_system_monitor_output_switch_status_component_status_component_name(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") show_system_monitor = ET.Element("show_system_monitor") config = show_system_monitor output = ET.SubElement(show_system_monitor, "output") switch_status = ET.SubElement(output, "switch-status") component_status = ET.SubElement(switch_status, "component-status") component_name = ET.SubElement(component_status, "component-name") component_name.text = kwargs.pop('component_name') callback = kwargs.pop('callback', self._callback) return callback(config)
java
@Override public String toImplementationClassName(final String className) { final String implementationClassName = interfaceToImplementationMap.get(className); if (implementationClassName != null) { return implementationClassName; } final int index = className.lastIndexOf('.'); if (index < 0) { return getImplementationPackageName() + "." + className + implementationSuffix; } return className.substring(0, index) + "." + getImplementationPackageName() + "." + className.substring(index + 1) + implementationSuffix; }
java
@POST @Path("{id}/email/confirm") @PermitAll public Response confirmChangeEmail(@PathParam("id") Long userId, EmailRequest request) { checkNotNull(userId); checkNotNull(request.getToken()); boolean isSuccess = userService.confirmEmailAddressChangeUsingToken(userId, request.getToken()); return isSuccess ? Response.noContent().build() : Response.status(Response.Status.BAD_REQUEST).build(); }
python
def set_warning_handler(codec, handler, data=None): """Wraps openjp2 library function opj_set_warning_handler. Set the warning handler use by openjpeg. Parameters ---------- codec : CODEC_TYPE Codec initialized by create_compress function. handler : python function The callback function to be used. user_data : anything User/client data. Raises ------ RuntimeError If the OpenJPEG library routine opj_set_warning_handler fails. """ OPENJP2.opj_set_warning_handler.argtypes = [CODEC_TYPE, ctypes.c_void_p, ctypes.c_void_p] OPENJP2.opj_set_warning_handler.restype = check_error OPENJP2.opj_set_warning_handler(codec, handler, data)
java
public List executeQuery(String query, EntityMetadata m, KunderaQuery kunderaQuery) { DataFrame dataFrame = getDataFrame(query, m, kunderaQuery); // dataFrame.show(); return dataHandler.loadDataAndPopulateResults(dataFrame, m, kunderaQuery); }
python
def IntegerLike(msg=None): ''' Checks whether a value is: - int, or - long, or - float without a fractional part, or - str or unicode composed only of digits ''' def fn(value): if not any([ isinstance(value, numbers.Integral), (isinstance(value, float) and value.is_integer()), (isinstance(value, basestring) and value.isdigit()) ]): raise Invalid(msg or ( 'Invalid input <{0}>; expected an integer'.format(value)) ) else: return value return fn
java
public static ZonedDateTime from(TemporalAccessor temporal) { if (temporal instanceof ZonedDateTime) { return (ZonedDateTime) temporal; } try { ZoneId zone = ZoneId.from(temporal); if (temporal.isSupported(INSTANT_SECONDS)) { long epochSecond = temporal.getLong(INSTANT_SECONDS); int nanoOfSecond = temporal.get(NANO_OF_SECOND); return create(epochSecond, nanoOfSecond, zone); } else { LocalDate date = LocalDate.from(temporal); LocalTime time = LocalTime.from(temporal); return of(date, time, zone); } } catch (DateTimeException ex) { throw new DateTimeException("Unable to obtain ZonedDateTime from TemporalAccessor: " + temporal + " of type " + temporal.getClass().getName(), ex); } }
java
int ingestFSEdits() throws IOException { FSDirectory fsDir = fsNamesys.dir; int numEdits = 0; long recentOpcodeOffsets[] = new long[2]; Arrays.fill(recentOpcodeOffsets, -1); EnumMap<FSEditLogOpCodes, Holder<Integer>> opCounts = new EnumMap<FSEditLogOpCodes, Holder<Integer>>(FSEditLogOpCodes.class); boolean error = false; boolean reopen = false; boolean quitAfterScan = false; long sharedLogTxId = FSEditLogLoader.TXID_IGNORE; long localLogTxId = FSEditLogLoader.TXID_IGNORE; FSEditLogOp op = null; FSEditLog localEditLog = fsDir.fsImage.getEditLog(); while (running && !quitAfterScan) { // if the application requested that we make a final pass over // the transaction log, then we remember it here. We close and // reopen the file to ensure that we can see all the data in the // file, one reason being that NFS has open-to-close cache // coherancy and the edit log could be stored in NFS. // if (reopen || lastScan) { inputEditStream.close(); inputEditStream = standby.setupIngestStreamWithRetries(startTxId); if (lastScan) { // QUIESCE requested by Standby thread LOG.info("Ingest: Starting last scan of transaction log: " + this.toString()); quitAfterScan = true; } // discard older buffers and start a fresh one. inputEditStream.refresh(currentPosition, localEditLog.getLastWrittenTxId()); setCatchingUp(); reopen = false; } // // Process all existing transactions till end of file // while (running) { if (lastScan && !quitAfterScan) { // Standby thread informed the ingest to quiesce // we should refresh the input stream as soon as possible // then quitAfterScan will be true break; } // record the current file offset. currentPosition = inputEditStream.getPosition(); InjectionHandler.processEvent(InjectionEvent.INGEST_BEFORE_LOAD_EDIT); fsNamesys.writeLock(); try { error = false; op = ingestFSEdit(inputEditStream); /* * In the case of segments recovered on primary namenode startup, we * have segments that are finalized (by name), but not containing the * ending transaction. Without this check, we will keep looping until * the next checkpoint to discover this situation. */ if (!inputEditStream.isInProgress() && standby.getLastCorrectTxId() == inputEditStream.getLastTxId()) { // this is a correct segment with no end segment transaction LOG.info("Ingest: Reached finalized log segment end with no end marker. " + this.toString()); tearDown(localEditLog, false, true); break; } if (op == null) { FSNamesystem.LOG.debug("Ingest: Invalid opcode, reached end of log " + "Number of transactions found " + numEdits); break; // No more transactions. } sharedLogTxId = op.txid; // Verify transaction ids match. localLogTxId = localEditLog.getLastWrittenTxId() + 1; // Fatal error only when the log contains transactions from the future // we allow to process a transaction with smaller txid than local // we will simply skip it later after reading from the ingest edits if (localLogTxId < sharedLogTxId || InjectionHandler .falseCondition(InjectionEvent.INGEST_TXID_CHECK)) { String message = "The transaction id in the edit log : " + sharedLogTxId + " does not match the transaction id inferred" + " from FSIMAGE : " + localLogTxId; LOG.fatal(message); throw new RuntimeException(message); } // skip previously loaded transactions if (!canApplyTransaction(sharedLogTxId, localLogTxId, op)) continue; // for recovery, we do not want to re-load transactions, // but we want to populate local log with them if (shouldLoad(sharedLogTxId)) { FSEditLogLoader.loadEditRecord( logVersion, inputEditStream, recentOpcodeOffsets, opCounts, fsNamesys, fsDir, numEdits, op); } LOG.info("Ingest: " + this.toString() + ", size: " + inputEditStream.length() + ", processing transaction at offset: " + currentPosition + ", txid: " + op.txid + ", opcode: " + op.opCode); if (op.opCode == FSEditLogOpCodes.OP_START_LOG_SEGMENT) { LOG.info("Ingest: Opening log segment: " + this.toString()); localEditLog.open(); } else if (op.opCode == FSEditLogOpCodes.OP_END_LOG_SEGMENT) { InjectionHandler .processEventIO(InjectionEvent.INGEST_CLEAR_STANDBY_STATE); LOG.info("Ingest: Closing log segment: " + this.toString()); tearDown(localEditLog, true, true); numEdits++; LOG.info("Ingest: Reached log segment end. " + this.toString()); break; } else { localEditLog.logEdit(op); if (inputEditStream.getReadChecksum() != FSEditLog .getChecksumForWrite().getValue()) { throw new IOException( "Ingest: mismatched r/w checksums for transaction #" + numEdits); } } numEdits++; standby.setLastCorrectTxId(op.txid); } catch (ChecksumException cex) { LOG.info("Checksum error reading the transaction #" + numEdits + " reopening the file"); reopen = true; break; } catch (IOException e) { LOG.info("Encountered error reading transaction", e); error = true; // if we haven't reached eof, then error. break; } finally { if (localEditLog.isOpen()) { localEditLog.logSyncIfNeeded(); } fsNamesys.writeUnlock(); } } // end inner while(running) -- all breaks come here // if we failed to read the entire transaction from disk, // then roll back to the offset where there was a last good // read, sleep for sometime for new transaction to // appear in the file and then continue; if (error || running) { // discard older buffers and start a fresh one. inputEditStream.refresh(currentPosition, localEditLog.getLastWrittenTxId()); setCatchingUp(); if (error) { LOG.info("Ingest: Incomplete transaction record at offset " + inputEditStream.getPosition() + " but the file is of size " + inputEditStream.length() + ". Continuing...."); } if (running && !lastScan) { try { Thread.sleep(100); // sleep for a second } catch (InterruptedException e) { // break out of waiting if we receive an interrupt. } } } } //end outer while(running) ///////////////////// FINAL ACTIONS ///////////////////// // This was the last scan of the file but we could not read a full // transaction from disk. If we proceed this will corrupt the image if (error) { String errorMessage = FSEditLogLoader.getErrorMessage(recentOpcodeOffsets, currentPosition); LOG.error(errorMessage); throw new IOException("Failed to read the edits log. " + "Incomplete transaction at " + currentPosition); } // If the last Scan was completed, then stop the Ingest thread. if (lastScan && quitAfterScan) { LOG.info("Ingest: lastScan completed. " + this.toString()); running = false; if(localEditLog.isOpen()) { // quiesced non-finalized segment LOG.info("Ingest: Reached non-finalized log segment end. "+ this.toString()); tearDown(localEditLog, false, localLogTxId != startTxId); } } FSEditLogLoader.dumpOpCounts(opCounts); return numEdits; // total transactions consumed }
java
protected void redirectToReferrer(String defaultReference) { String referrer = context.requestHeader("Referer"); referrer = referrer == null? defaultReference: referrer; redirect(referrer); }
java
@Override public AttributeDefinition[] getAttributeDefinitions(int filter) { List<AttributeDefinition> adList; if (filter == ObjectClassDefinition.ALL) { adList = new ArrayList<AttributeDefinition>(requiredAttributeDefinitions.size() + optionalAttributeDefinitions.size()); adList.addAll(requiredAttributeDefinitions); adList.addAll(optionalAttributeDefinitions); } else if (filter == ObjectClassDefinition.OPTIONAL) { adList = optionalAttributeDefinitions; } else if (filter == ObjectClassDefinition.REQUIRED) { adList = requiredAttributeDefinitions; } else { throw new IllegalArgumentException("Unexpected filter value: " + filter); } AttributeDefinition[] ads = new AttributeDefinition[adList.size()]; return adList.toArray(ads); }
java
public void addData(byte[] data) throws InternalLogException { if (tc.isEntryEnabled()) Tr.entry(tc, "addData",new java.lang.Object[] {RLSUtils.toHexString(data,RLSUtils.MAX_DISPLAY_BYTES), this}); // If the parent recovery log instance has experienced a serious internal error then prevent // this operation from executing. if (_recLog.failed()) { if (tc.isEntryEnabled()) Tr.exit(tc, "addData",this); throw new InternalLogException(null); } if (_singleData) { // This recoverable unit section can hold only a single data item. if (tc.isDebugEnabled()) Tr.debug(tc, "Section can hold only a single data item."); if (_writtenData.size() > 0) { // It already contains written data. This must be replaced. if (tc.isDebugEnabled()) Tr.debug(tc, "There is existing WRITTEN data."); // Clear out the written data array and move this into the unwritten data array _writtenData.clear(); _unwrittenData.add(0, data); _singleDataUpdated = true; // Ensure that the last data reference is accurate. _lastDataItem = data; } else if (_unwrittenData.size() > 0) { // It already contains unwritten data. This must be replaced. if (tc.isDebugEnabled()) Tr.debug(tc, "There is existing UNWRITTEN data."); _unwrittenData.clear(); _unwrittenData.add(0, data); // Ensure that the last data reference is accurate. _lastDataItem = data; } else { // It contains no existing data. Create a new DataItem wrapper if (tc.isDebugEnabled()) Tr.debug(tc, "There is no existing data."); _unwrittenData.add(0, data); _lastDataItem = data; } } else { // This recoverable unit section can hold multiple data item. if (tc.isDebugEnabled()) Tr.debug(tc, "Section holds multiple data items"); _unwrittenData.add(data); _lastDataItem = data; } if (tc.isEntryEnabled()) Tr.exit(tc, "addData"); }
java
public void listAll(final String orderBy, final SuccessCallback<List<T>> successCallback, final ErrorCallback errorCallback) { checkNotNull(successCallback); checkNotNull(errorCallback); final Callable<List<T>> call = new Callable<List<T>>() { @Override public List<T> call() throws Exception { return SugarRecord.listAll(getSugarClass(), orderBy); } }; final Future<List<T>> future = doInBackground(call); List<T> objects; try { objects = future.get(); if (null == objects || objects.isEmpty()) { errorCallback.onError(new Exception("There are no objects in the database")); } else { successCallback.onSuccess(objects); } } catch (Exception e) { errorCallback.onError(e); } }
python
def generate_VJ_junction_transfer_matrices(self): """Compute the transfer matrices for the VJ junction. Sets the attributes Tvj, Svj, Dvj, lTvj, and lDvj. """ nt2num = {'A': 0, 'C': 1, 'G': 2, 'T': 3} #Compute Tvj Tvj = {} for aa in self.codons_dict.keys(): current_Tvj = np.zeros((4, 4)) for init_nt in 'ACGT': for codon in self.codons_dict[aa]: current_Tvj[nt2num[codon[2]], nt2num[init_nt]] += self.Rvj[nt2num[codon[2]],nt2num[codon[1]]]*self.Rvj[nt2num[codon[1]],nt2num[codon[0]]] * self.Rvj[nt2num[codon[0]],nt2num[init_nt]] Tvj[aa] = current_Tvj #Compute Svj Svj = {} for aa in self.codons_dict.keys(): current_Svj = np.zeros((4, 4)) for ins_nt in 'ACGT': if any([codon.startswith(ins_nt) for codon in self.codons_dict[aa]]): current_Svj[nt2num[ins_nt], :] = self.Rvj[nt2num[ins_nt], :] Svj[aa] = current_Svj #Compute Dvj Dvj = {} for aa in self.codons_dict.keys(): current_Dvj = np.zeros((4, 4)) for init_nt in 'ACGT': for codon in self.codons_dict[aa]: current_Dvj[nt2num[codon[2]], nt2num[init_nt]] += self.Rvj[nt2num[codon[1]],nt2num[codon[0]]] * self.Rvj[nt2num[codon[0]],nt2num[init_nt]] Dvj[aa] = current_Dvj #Compute lTvj lTvj = {} for aa in self.codons_dict.keys(): current_lTvj = np.zeros((4, 4)) for codon in self.codons_dict[aa]: current_lTvj[nt2num[codon[2]], nt2num[codon[0]]] += self.Rvj[nt2num[codon[2]],nt2num[codon[1]]]*self.first_nt_bias_insVJ[nt2num[codon[1]]] lTvj[aa] = current_lTvj #Compute lDvj lDvj = {} for aa in self.codons_dict.keys(): current_lDvj = np.zeros((4, 4)) for codon in self.codons_dict[aa]: current_lDvj[nt2num[codon[2]], nt2num[codon[0]]] += self.first_nt_bias_insVJ[nt2num[codon[1]]] lDvj[aa] = current_lDvj #Set the attributes self.Tvj = Tvj self.Svj = Svj self.Dvj = Dvj self.lTvj = lTvj self.lDvj = lDvj
java
public Transliterator safeClone() { UnicodeFilter filter = getFilter(); if (filter != null && filter instanceof UnicodeSet) { filter = new UnicodeSet((UnicodeSet)filter); } return new AnyTransliterator(getID(), filter, target, targetScript, widthFix, cache); }
java
public static boolean skipUntil(InputStream in, byte separator) throws IOException { int r; while((r = in.read()) >= 0) { if(((byte) r) == separator) { return true; } } return false; }
python
def _swap_rows(self, i, j): """Swap i and j rows As the side effect, determinant flips. """ L = np.eye(3, dtype='intc') L[i, i] = 0 L[j, j] = 0 L[i, j] = 1 L[j, i] = 1 self._L.append(L.copy()) self._A = np.dot(L, self._A)
python
def _handle_switch(self, node, scope, ctxt, stream): """Handle break node :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO """ def exec_case(idx, cases): # keep executing cases until a break is found, # or they've all been executed for case in cases[idx:]: stmts = case.stmts try: for stmt in stmts: self._handle_node(stmt, scope, ctxt, stream) except errors.InterpBreak as e: break def get_stmts(stmts, res=None): if res is None: res = [] stmts = self._flatten_list(stmts) for stmt in stmts: if isinstance(stmt, tuple): stmt = stmt[1] res.append(stmt) if stmt.__class__ in [AST.Case, AST.Default]: get_stmts(stmt.stmts, res) return res def get_cases(nodes, acc=None): cases = [] stmts = get_stmts(nodes) for stmt in stmts: if stmt.__class__ in [AST.Case, AST.Default]: cases.append(stmt) stmt.stmts = [] else: cases[-1].stmts.append(stmt) return cases cond = self._handle_node(node.cond, scope, ctxt, stream) default_idx = None found_match = False cases = getattr(node, "pfp_cases", None) if cases is None: cases = get_cases(node.stmt.children()) node.pfp_cases = cases for idx,child in enumerate(cases): if child.__class__ == AST.Default: default_idx = idx continue elif child.__class__ == AST.Case: expr = self._handle_node(child.expr, scope, ctxt, stream) if expr == cond: found_match = True exec_case(idx, cases) break if default_idx is not None and not found_match: exec_case(default_idx, cases)
java
private String buildReportChapter(final BuildData buildData) { log.info("\tBuilding Report Chapter"); final ContentSpec contentSpec = buildData.getContentSpec(); final String locale = buildData.getBuildLocale(); final ZanataDetails zanataDetails = buildData.getZanataDetails(); String reportChapter = ""; final List<TopicErrorData> noContentTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.NO_CONTENT); final List<TopicErrorData> invalidInjectionTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.INVALID_INJECTION); final List<TopicErrorData> invalidContentTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.INVALID_CONTENT); final List<TopicErrorData> invalidImageTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.INVALID_IMAGES); final List<TopicErrorData> untranslatedTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.UNTRANSLATED); final List<TopicErrorData> incompleteTranslatedTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.INCOMPLETE_TRANSLATION); final List<TopicErrorData> fuzzyTranslatedTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.FUZZY_TRANSLATION); final List<TopicErrorData> notPushedTranslatedTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.NOT_PUSHED_FOR_TRANSLATION); final List<TopicErrorData> oldTranslatedTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.OLD_TRANSLATION); final List<TopicErrorData> oldUntranslatedTopics = buildData.getErrorDatabase().getErrorsOfType(locale, ErrorType.OLD_UNTRANSLATED); final List<String> list = new LinkedList<String>(); list.add(DocBookUtilities.buildListItem("Total Number of Errors: " + getNumErrors())); list.add(DocBookUtilities.buildListItem("Total Number of Warnings: " + getNumWarnings())); list.add(DocBookUtilities.buildListItem("Number of Topics with No Content: " + noContentTopics.size())); list.add(DocBookUtilities.buildListItem("Number of Topics with Invalid Injection points: " + invalidInjectionTopics.size())); list.add(DocBookUtilities.buildListItem("Number of Topics with Invalid Content: " + invalidContentTopics.size())); list.add(DocBookUtilities.buildListItem("Number of Topics with Invalid Image references: " + invalidImageTopics.size())); if (buildData.isTranslationBuild()) { list.add(DocBookUtilities.buildListItem( "Number of Topics that haven't been pushed for Translation: " + notPushedTranslatedTopics.size())); list.add(DocBookUtilities.buildListItem("Number of Topics that haven't been Translated: " + untranslatedTopics.size())); list.add(DocBookUtilities.buildListItem( "Number of Topics that have incomplete Translations: " + incompleteTranslatedTopics.size())); list.add(DocBookUtilities.buildListItem("Number of Topics that have fuzzy Translations: " + fuzzyTranslatedTopics.size())); list.add(DocBookUtilities.buildListItem( "Number of Topics that haven't been Translated but are using previous revisions: " + oldUntranslatedTopics.size())); list.add(DocBookUtilities.buildListItem( "Number of Topics that have been Translated using a previous revision: " + oldTranslatedTopics.size())); } reportChapter += DocBookUtilities.wrapListItems(list, "Build Statistics"); // Add a link to show the zanata statistics if (buildData.isTranslationBuild()) { reportChapter += generateAllTopicZanataUrl(buildData); } final boolean showEditorLinks = buildData.getBuildOptions().getInsertEditorLinks(); // Create the Report Tables reportChapter += ReportUtilities.buildReportTable(noContentTopics, "Topics that have no Content", showEditorLinks, zanataDetails); reportChapter += ReportUtilities.buildReportTable(invalidContentTopics, "Topics that have Invalid XML Content", showEditorLinks, zanataDetails); reportChapter += ReportUtilities.buildReportTable(invalidInjectionTopics, "Topics that have Invalid Injection points in the XML", showEditorLinks, zanataDetails); reportChapter += ReportUtilities.buildReportTable(invalidImageTopics, "Topics that have Invalid Image references in the XML", showEditorLinks, zanataDetails); if (buildData.isTranslationBuild()) { reportChapter += ReportUtilities.buildReportTable(notPushedTranslatedTopics, "Topics that haven't been pushed for Translation", showEditorLinks, zanataDetails); reportChapter += ReportUtilities.buildReportTable(untranslatedTopics, "Topics that haven't been Translated", showEditorLinks, zanataDetails); reportChapter += ReportUtilities.buildReportTable(incompleteTranslatedTopics, "Topics that have Incomplete Translations", showEditorLinks, zanataDetails); reportChapter += ReportUtilities.buildReportTable(fuzzyTranslatedTopics, "Topics that have fuzzy Translations", showEditorLinks, zanataDetails); reportChapter += ReportUtilities.buildReportTable(oldUntranslatedTopics, "Topics that haven't been Translated but are using previous revisions", showEditorLinks, zanataDetails); reportChapter += ReportUtilities.buildReportTable(oldTranslatedTopics, "Topics that have been Translated using a previous revision", showEditorLinks, zanataDetails); } if (contentSpec.getBookType() == BookType.ARTICLE || contentSpec.getBookType() == BookType.ARTICLE_DRAFT) { return DocBookBuildUtilities.addDocBookPreamble(buildData.getDocBookVersion(), DocBookUtilities.buildSection(reportChapter, "Status Report"), "section", buildData.getEntityFileName()); } else { return DocBookBuildUtilities.addDocBookPreamble(buildData.getDocBookVersion(), DocBookUtilities.buildChapter(reportChapter, "Status Report"), "chapter", buildData.getEntityFileName()); } }
java
public void reset(AccessibilityNodeInfoRef newNode) { reset(newNode.get()); mOwned = newNode.mOwned; newNode.mOwned = false; }
python
def format_entry(record, show_level=False, colorize=False): """ Format a log entry according to its level and context """ if show_level: log_str = u'{}: {}'.format(record.levelname, record.getMessage()) else: log_str = record.getMessage() if colorize and record.levelname in LOG_COLORS: log_str = u'<span color="{}">'.format(LOG_COLORS[record.levelname]) + log_str + u'</span>' return log_str
java
public void createEnterpriseCustomFieldMap(Props props, Class<?> c) { byte[] fieldMapData = null; for (Integer key : ENTERPRISE_CUSTOM_KEYS) { fieldMapData = props.getByteArray(key); if (fieldMapData != null) { break; } } if (fieldMapData != null) { int index = 4; while (index < fieldMapData.length) { //Looks like the custom fields have varying types, it may be that the last byte of the four represents the type? //System.out.println(ByteArrayHelper.hexdump(fieldMapData, index, 4, false)); int typeValue = MPPUtility.getInt(fieldMapData, index); FieldType type = getFieldType(typeValue); if (type != null && type.getClass() == c && type.toString().startsWith("Enterprise Custom Field")) { int varDataKey = (typeValue & 0xFFFF); FieldItem item = new FieldItem(type, FieldLocation.VAR_DATA, 0, 0, varDataKey, 0, 0); m_map.put(type, item); //System.out.println(item); } //System.out.println((type == null ? "?" : type.getClass().getSimpleName() + "." + type) + " " + Integer.toHexString(typeValue)); index += 4; } } }
python
def nice_repr(name, param_kvs, line_width=30, line_offset=5, decimals=3, args=None, flatten_attrs=True): """ tool to do a nice repr of a class. Parameters ---------- name : str class name param_kvs : dict dict containing class parameters names as keys, and the corresponding values as values line_width : int desired maximum line width. default: 30 line_offset : int desired offset for new lines default: 5 decimals : int number of decimal places to keep for float values default: 3 Returns ------- out : str nicely formatted repr of class instance """ if not param_kvs and not args : # if the object has no params it's easy return '{}()'.format(name) # sort keys and values ks = list(param_kvs.keys()) vs = list(param_kvs.values()) idxs = np.argsort(ks) param_kvs = [(ks[i],vs[i]) for i in idxs] if args is not None: param_kvs = [(None, arg) for arg in args] + param_kvs param_kvs = param_kvs[::-1] out = '' current_line = name + '(' while len(param_kvs) > 0: # flatten sub-term properties, but not `terms` k, v = param_kvs.pop() if flatten_attrs and k is not 'terms': v = flatten(v) # round the floats first if issubclass(v.__class__, (float, np.ndarray)): v = round_to_n_decimal_places(v, n=decimals) v = str(v) else: v = repr(v) # handle args if k is None: param = '{},'.format(v) else: param = '{}={},'.format(k, v) # print if len(current_line + param) <= line_width: current_line += param else: out += current_line + '\n' current_line = ' '*line_offset + param if len(current_line) < line_width and len(param_kvs) > 0: current_line += ' ' out += current_line[:-1] # remove trailing comma out += ')' return out
java
public void getDescriptorsInRegion(int pixelX0 , int pixelY0 , int pixelX1 , int pixelY1 , List<TupleDesc_F64> output ) { int gridX0 = (int)Math.ceil(pixelX0/(double) pixelsPerCell); int gridY0 = (int)Math.ceil(pixelY0/(double) pixelsPerCell); int gridX1 = pixelX1/ pixelsPerCell - cellsPerBlockX; int gridY1 = pixelY1/ pixelsPerCell - cellsPerBlockY; for (int y = gridY0; y <= gridY1; y++) { int index = y*cellCols + gridX0; for (int x = gridX0; x <= gridX1; x++ ) { output.add( descriptions.get(index++) ); } } }
python
def add_equipamento_remove(self, id, id_ip, ids_ips_vips): '''Adiciona um equipamento na lista de equipamentos para operação de remover um grupo virtual. :param id: Identificador do equipamento. :param id_ip: Identificador do IP do equipamento. :param ids_ips_vips: Lista com os identificadores de IPs criados para cada VIP e associados ao equipamento. :return: None ''' equipament_map = dict() equipament_map['id'] = id equipament_map['id_ip'] = id_ip equipament_map['vips'] = {'id_ip_vip': ids_ips_vips} self.lista_equipamentos_remove.append(equipament_map)
java
@Nonnull public static <T> PredicateBuilder<T> predicate(Consumer<Predicate<T>> consumer) { return new PredicateBuilder(consumer); }
java
public com.squareup.okhttp.Call getCorporationsCorporationIdContractsAsync(Integer corporationId, String datasource, String ifNoneMatch, Integer page, String token, final ApiCallback<List<CorporationContractsResponse>> callback) throws ApiException { com.squareup.okhttp.Call call = getCorporationsCorporationIdContractsValidateBeforeCall(corporationId, datasource, ifNoneMatch, page, token, callback); Type localVarReturnType = new TypeToken<List<CorporationContractsResponse>>() { }.getType(); apiClient.executeAsync(call, localVarReturnType, callback); return call; }
java
public Response leave(String groupId, boolean deletePhotos) throws JinxException { JinxUtils.validateParams(groupId); Map<String, String> params = new TreeMap<>(); params.put("method", "flickr.groups.leave"); params.put("group_id", groupId); if (deletePhotos) { params.put("delete_photos", "true"); } return jinx.flickrPost(params, Response.class); }
java
List<Type> erasedSupertypes(Type t) { ListBuffer<Type> buf = new ListBuffer<>(); for (Type sup : closure(t)) { if (sup.hasTag(TYPEVAR)) { buf.append(sup); } else { buf.append(erasure(sup)); } } return buf.toList(); }
java
protected static void appendEntityNode(String alias, EntityKeyMetadata entityKeyMetadata, StringBuilder queryBuilder) { appendEntityNode( alias, entityKeyMetadata, queryBuilder, 0 ); }
python
def _get_param_names(cls): """Get parameter names for the estimator""" # fetch the constructor or the original constructor before # deprecation wrapping if any init = getattr(cls.__init__, 'deprecated_original', cls.__init__) if init is object.__init__: # No explicit constructor to introspect return [] # introspect the constructor arguments to find the model parameters # to represent init_signature = signature(init) # Consider the constructor parameters excluding 'self' parameters = [p for p in init_signature.parameters.values() if p.name != 'self' and p.kind != p.VAR_KEYWORD] # Extract and sort argument names excluding 'self' parameters = set([p.name for p in parameters]) # recurse for superclass in cls.__bases__: try: parameters.update(superclass._get_param_names()) except AttributeError: # object and pygsp.graphs.Graph don't have this method pass return parameters
java
public static void main(String[] args) { Scanner scan = new Scanner(System.in); while (scan.hasNextLine()) { String line = scan.nextLine(); LogicExpression<String> expr = LogicExpressionParsers.trivial.parse(line); System.out.println("string: " + expr.toString()); System.out.println("value: " + expr.apply(null)); System.out.println(); } scan.close(); }
python
def LateBind(self, target=None): """Late binding callback. This method is called on this field descriptor when the target RDFValue class is finally defined. It gives the field descriptor an opportunity to initialize after the point of definition. Args: target: The target nested class. Raises: TypeError: If the target class is not of the expected type. """ if not issubclass(target, RDFProtoStruct): raise TypeError("Field %s expects a protobuf, but target is %s" % (self, target)) self.late_bound = False # The target type is now resolved. self.type = target # Register us in our owner. self.owner.AddDescriptor(self)
python
def get_ilo_firmware_version_as_major_minor(self): """Gets the ilo firmware version for server capabilities :returns: String with the format "<major>.<minor>" or None. """ try: manager, reset_uri = self._get_ilo_details() ilo_fw_ver_str = ( manager['Oem']['Hp']['Firmware']['Current']['VersionString'] ) return common.get_major_minor(ilo_fw_ver_str) except Exception: return None
java
public BatchGetNamedQueryResult withNamedQueries(NamedQuery... namedQueries) { if (this.namedQueries == null) { setNamedQueries(new java.util.ArrayList<NamedQuery>(namedQueries.length)); } for (NamedQuery ele : namedQueries) { this.namedQueries.add(ele); } return this; }
java
public static Bitmap colorToBitmap(final int width, final int height, @ColorInt final int color) { Condition.INSTANCE.ensureAtLeast(width, 1, "The width must be at least 1"); Condition.INSTANCE.ensureAtLeast(height, 1, "The height must be at least 1"); Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); Paint paint = new Paint(); paint.setColor(color); canvas.drawRect(0, 0, width, height, paint); return bitmap; }
python
def dimensions(self): """Get width and height of a PDF""" size = self.pdf.getPage(0).mediaBox return {'w': float(size[2]), 'h': float(size[3])}
python
def subsample(self, down_to=1, new_path=None): """Pick a number of sequences from the file pseudo-randomly.""" # Auto path # if new_path is None: subsampled = self.__class__(new_temp_path()) elif isinstance(new_path, FASTA): subsampled = new_path else: subsampled = self.__class__(new_path) # Check size # if down_to > len(self): message = "Can't subsample %s down to %i. Only down to %i." print Color.ylw + message % (self, down_to, len(self)) + Color.end self.copy(new_path) return # Do it # subsampled.create() for seq in isubsample(self, down_to): subsampled.add_seq(seq) subsampled.close() # Did it work # assert len(subsampled) == down_to return subsampled
java
public static void listHandlers( List<IMonitoringHandler> handlers, Logger logger ) { if( handlers.isEmpty()) { logger.info( "No monitoring handler was found." ); } else { StringBuilder sb = new StringBuilder( "Available monitoring handlers: " ); for( Iterator<IMonitoringHandler> it = handlers.iterator(); it.hasNext(); ) { sb.append( it.next().getName()); if( it.hasNext()) sb.append( ", " ); } sb.append( "." ); logger.info( sb.toString()); } }
java
private void checkReferenceForeignkeys(ModelDef modelDef, ReferenceDescriptorDef refDef) throws ConstraintException { String foreignkey = refDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY); if ((foreignkey == null) || (foreignkey.length() == 0)) { throw new ConstraintException("The reference "+refDef.getName()+" in class "+refDef.getOwner().getName()+" has no foreignkeys"); } // we know that the class is present because the reference constraints have been checked already ClassDescriptorDef ownerClass = (ClassDescriptorDef)refDef.getOwner(); ArrayList keyFields; FieldDescriptorDef keyField; try { keyFields = ownerClass.getFields(foreignkey); } catch (NoSuchFieldException ex) { throw new ConstraintException("The reference "+refDef.getName()+" in class "+refDef.getOwner().getName()+" specifies a foreignkey "+ex.getMessage()+" that is not a persistent field in its owner class "+ownerClass.getName()); } for (int idx = 0; idx < keyFields.size(); idx++) { keyField = (FieldDescriptorDef)keyFields.get(idx); if (keyField.getBooleanProperty(PropertyHelper.OJB_PROPERTY_IGNORE, false)) { throw new ConstraintException("The reference "+refDef.getName()+" in class "+ownerClass.getName()+" uses the field "+keyField.getName()+" as foreignkey although this field is ignored in this class"); } } // for the referenced class and any subtype that is instantiable (i.e. not an interface or abstract class) // there must be the same number of primary keys and the jdbc types of the primary keys must // match the jdbc types of the foreignkeys (in the correct order) String targetClassName = refDef.getProperty(PropertyHelper.OJB_PROPERTY_CLASS_REF); ArrayList queue = new ArrayList(); ClassDescriptorDef referencedClass; ArrayList primFields; FieldDescriptorDef primField; String primType; String keyType; queue.add(modelDef.getClass(targetClassName)); while (!queue.isEmpty()) { referencedClass = (ClassDescriptorDef)queue.get(0); queue.remove(0); for (Iterator it = referencedClass.getExtentClasses(); it.hasNext();) { queue.add(it.next()); } if (!referencedClass.getBooleanProperty(PropertyHelper.OJB_PROPERTY_GENERATE_REPOSITORY_INFO, true)) { continue; } primFields = referencedClass.getPrimaryKeys(); if (primFields.size() != keyFields.size()) { throw new ConstraintException("The number of foreignkeys ("+keyFields.size()+") of the reference "+refDef.getName()+" in class "+refDef.getOwner().getName()+" doesn't match the number of primarykeys ("+primFields.size()+") of the referenced class (or its subclass) "+referencedClass.getName()); } for (int idx = 0; idx < primFields.size(); idx++) { keyField = (FieldDescriptorDef)keyFields.get(idx); primField = (FieldDescriptorDef)primFields.get(idx); primType = primField.getProperty(PropertyHelper.OJB_PROPERTY_JDBC_TYPE); keyType = keyField.getProperty(PropertyHelper.OJB_PROPERTY_JDBC_TYPE); if (!primType.equals(keyType)) { throw new ConstraintException("The jdbc-type of foreignkey "+keyField.getName()+" of the reference "+refDef.getName()+" in class "+refDef.getOwner().getName()+" doesn't match the jdbc-type of the corresponding primarykey "+primField.getName()+" of the referenced class (or its subclass) "+referencedClass.getName()); } } } }
java
protected void callChildVisitors(XSLTVisitor visitor, boolean callAttrs) { if(callAttrs) { if(null != m_name_avt) m_name_avt.callVisitors(visitor); if(null != m_namespace_avt) m_namespace_avt.callVisitors(visitor); } super.callChildVisitors(visitor, callAttrs); }
python
def highest_minor(python_versions): '''Return highest minor of a list of stable (semantic) versions. Example: >>> python_versions = [ ... '2.6.9', '2.7.14', '3.3.7', '3.4.8', '3.5.5', '3.6.4'] >>> highest_minor(python_versions) '3.6' ''' highest = python_versions[-1] major, minor, patch = highest.split('.', 2) return flo('{major}.{minor}')
python
def cable_from_row(row): """\ Returns a cable from the provided row (a tuple/list). Format of the row: <identifier>, <creation-date>, <reference-id>, <origin>, <classification-level>, <references-to-other-cables>, <header>, <body> Note: The `<identifier>` and `<references-to-other-cables>` columns are ignored. `row` A tuple or list with 8 items. """ def format_creation_date(created): date, time = created.split() month, day, year, hour, minute = [x.zfill(2) for x in chain(date.split(u'/'), time.split(u':'))] return u'%s-%s-%s %s:%s' % (year, month, day, hour, minute) _, created, reference_id, origin, classification, _, header, body = row cable = Cable(reference_id) cable.created = format_creation_date(created) cable.origin = origin cable.classification = classification.upper() cable.header = header cable.content = body return cable
java
public void queryEvents(@NonNull final String conversationId, @NonNull final Long from, @NonNull final Integer limit, @Nullable Callback<ComapiResult<EventsQueryResponse>> callback) { adapter.adapt(queryEvents(conversationId, from, limit), callback); }
python
def to_jacobian(self): """ Converts this point to a Jacobian representation. Returns: JacobianPoint: The Jacobian representation. """ if not self: return JacobianPoint(X=0, Y=0, Z=0) return JacobianPoint(X=self.X, Y=self.Y, Z=1)
java
public void setConstraints(Component component, CellConstraints constraints) { checkNotNull(component, "The component must not be null."); checkNotNull(constraints, "The constraints must not be null."); constraints.ensureValidGridBounds(getColumnCount(), getRowCount()); constraintMap.put(component, (CellConstraints) constraints.clone()); }
python
def missing_output_files(self): """Make and return a dictionary of the missing output files. This returns a dictionary mapping filepath to list of links that produce the file as output. """ missing = self.check_output_files(return_found=False) ret_dict = {} for miss_file in missing: ret_dict[miss_file] = [self.linkname] return ret_dict
python
async def run_action(self, action_name, **params): """Run an action on this unit. :param str action_name: Name of action to run :param **params: Action parameters :returns: A :class:`juju.action.Action` instance. Note that this only enqueues the action. You will need to call ``action.wait()`` on the resulting `Action` instance if you wish to block until the action is complete. """ action_facade = client.ActionFacade.from_connection(self.connection) log.debug('Starting action `%s` on %s', action_name, self.name) res = await action_facade.Enqueue([client.Action( name=action_name, parameters=params, receiver=self.tag, )]) action = res.results[0].action error = res.results[0].error if error and error.code == 'not found': raise ValueError('Action `%s` not found on %s' % (action_name, self.name)) elif error: raise Exception('Unknown action error: %s' % error.serialize()) action_id = action.tag[len('action-'):] log.debug('Action started as %s', action_id) # we mustn't use wait_for_action because that blocks until the # action is complete, rather than just being in the model return await self.model._wait_for_new('action', action_id)
python
def add_song(fpath, g_songs, g_artists, g_albums): """ parse music file metadata with Easymp3 and return a song model. """ try: if fpath.endswith('mp3') or fpath.endswith('ogg') or fpath.endswith('wma'): metadata = EasyMP3(fpath) elif fpath.endswith('m4a'): metadata = EasyMP4(fpath) except MutagenError as e: logger.exception('Mutagen parse metadata failed, ignore.') return None metadata_dict = dict(metadata) for key in metadata.keys(): metadata_dict[key] = metadata_dict[key][0] if 'title' not in metadata_dict: title = fpath.rsplit('/')[-1].split('.')[0] metadata_dict['title'] = title metadata_dict.update(dict( url=fpath, duration=metadata.info.length * 1000 # milesecond )) schema = EasyMP3MetadataSongSchema(strict=True) try: data, _ = schema.load(metadata_dict) except ValidationError: logger.exeception('解析音乐文件({}) 元数据失败'.format(fpath)) return # NOTE: use {title}-{artists_name}-{album_name} as song identifier title = data['title'] album_name = data['album_name'] artist_name_list = [ name.strip() for name in re.split(r'[,&]', data['artists_name'])] artists_name = ','.join(artist_name_list) duration = data['duration'] album_artist_name = data['album_artist_name'] # 生成 song model # 用来生成 id 的字符串应该尽量减少无用信息,这样或许能减少 id 冲突概率 song_id_str = ''.join([title, artists_name, album_name, str(int(duration))]) song_id = gen_id(song_id_str) if song_id not in g_songs: # 剩下 album, lyric 三个字段没有初始化 song = LSongModel(identifier=song_id, artists=[], title=title, url=fpath, duration=duration, comments=[], # 下面这些字段不向外暴露 genre=data['genre'], cover=data['cover'], date=data['date'], desc=data['desc'], disc=data['disc'], track=data['track']) g_songs[song_id] = song else: song = g_songs[song_id] logger.debug('Duplicate song: %s %s', song.url, fpath) return # 生成 album artist model album_artist_id = gen_id(album_artist_name) if album_artist_id not in g_artists: album_artist = create_artist(album_artist_id, album_artist_name) g_artists[album_artist_id] = album_artist else: album_artist = g_artists[album_artist_id] # 生成 album model album_id_str = album_name + album_artist_name album_id = gen_id(album_id_str) if album_id not in g_albums: album = create_album(album_id, album_name) g_albums[album_id] = album else: album = g_albums[album_id] # 处理专辑的歌手信息和歌曲信息,专辑歌手的专辑列表信息 if album not in album_artist.albums: album_artist.albums.append(album) if album_artist not in album.artists: album.artists.append(album_artist) if song not in album.songs: album.songs.append(song) # 处理歌曲的歌手和专辑信息,以及歌手的歌曲列表 song.album = album for artist_name in artist_name_list: artist_id = gen_id(artist_name) if artist_id in g_artists: artist = g_artists[artist_id] else: artist = create_artist(identifier=artist_id, name=artist_name) g_artists[artist_id] = artist if artist not in song.artists: song.artists.append(artist) if song not in artist.songs: artist.songs.append(song)
python
def do_reload(module: types.ModuleType, newer_than: int) -> bool: """ Executes the reload of the specified module if the source file that it was loaded from was updated more recently than the specified time :param module: A module object to be reloaded :param newer_than: The time in seconds since epoch that should be used to determine if the module needs to be reloaded. If the module source was modified more recently than this time, the module will be refreshed. :return: Whether or not the module was reloaded """ path = getattr(module, '__file__') directory = getattr(module, '__path__', [None])[0] if path is None and directory: path = os.path.join(directory, '__init__.py') last_modified = os.path.getmtime(path) if last_modified < newer_than: return False try: importlib.reload(module) return True except ImportError: return False