language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
private URI createFileSystemURI(URI configKey) throws URISyntaxException, IOException { // Validate the scheme String configKeyScheme = configKey.getScheme(); if (!configKeyScheme.startsWith(getSchemePrefix())) { throw new IllegalArgumentException( String.format("Scheme for configKey \"%s\" must begin with \"%s\"!", configKey, getSchemePrefix())); } if (Strings.isNullOrEmpty(configKey.getAuthority())) { return new URI(getPhysicalScheme(), getDefaultStoreFsLazy().getUri().getAuthority(), "", "", ""); } String uriPhysicalScheme = configKeyScheme.substring(getSchemePrefix().length(), configKeyScheme.length()); return new URI(uriPhysicalScheme, configKey.getAuthority(), "", "", ""); }
python
def abort(http_status_code, **kwargs): """Raise a HTTPException for the given http_status_code. Attach any keyword arguments to the exception for later processing. """ #noinspection PyUnresolvedReferences try: original_flask_abort(http_status_code) except HTTPException as e: if len(kwargs): e.data = kwargs raise
python
def sendWakeOnLan(self, macAddress, lanInterfaceId=1, timeout=1): """Send a wake up package to a device specified by its MAC address. :param str macAddress: MAC address in the form ``38:C9:86:26:7E:38``; be aware that the MAC address might be case sensitive, depending on the router :param int lanInterfaceId: the id of the LAN interface :param float timeout: the timeout to wait for the action to be executed :return: the amount of known hosts. :rtype: int .. seealso:: :meth:`~simpletr64.actions.Lan.getHostDetailsByMACAddress` """ namespace = Fritz.getServiceType("sendWakeOnLan") + str(lanInterfaceId) uri = self.getControlURL(namespace) self.execute(uri, namespace, "X_AVM-DE_WakeOnLANByMACAddress", timeout=timeout, NewMACAddress=macAddress)
java
public char getChar(String name, char defaultValue) { String value = getString(name, null); if (!StringUtils.isNullOrEmpty(value)) { return value.trim().charAt(0); } return defaultValue; }
python
def _clear_done_waiters(self): """Remove waiters that are done (should only happen if they are cancelled)""" while self._waiters and self._waiters[0].done(): self._waiters.popleft() if not self._waiters: self._stop_listening()
java
@Override public int read() throws IOException { synchronized (lock) { if (!isOpen()) { throw new IOException("InputStreamReader is closed."); } char buf[] = new char[1]; return read(buf, 0, 1) != -1 ? buf[0] : -1; } }
python
def save(self, filename): """ Save validator object to pickle. """ with open(filename, 'wb') as output: pickle.dump(self, output, protocol=pickle.HIGHEST_PROTOCOL)
python
def assign_version_default_trigger(plpy, td): """Trigger to fill in legacy data fields. A compatibilty trigger to fill in legacy data fields that are not populated when inserting publications from cnx-publishing. If this is not a legacy publication the ``version`` will be set based on the ``major_version`` value. """ modified_state = "OK" portal_type = td['new']['portal_type'] version = td['new']['version'] minor_version = td['new']['minor_version'] # Set the minor version on collections, because by default it is # None/Null, which is the correct default for modules. if minor_version is None and portal_type in ('Collection', 'SubCollection'): modified_state = "MODIFY" td['new']['minor_version'] = 1 # Set the legacy version field based on the major version. if version is None: major_version = td['new']['major_version'] version = "1.{}".format(major_version) modified_state = "MODIFY" td['new']['version'] = version return modified_state
java
public void setLinearVelocity(float x, float y, float z) { Native3DRigidBody.setLinearVelocity(getNative(), x, y, z); }
java
public ConnectionPoolConfigurationBuilder withPoolProperties(Properties properties) { TypedProperties typed = TypedProperties.toTypedProperties(properties); exhaustedAction(typed.getEnumProperty(ConfigurationProperties.CONNECTION_POOL_EXHAUSTED_ACTION, ExhaustedAction.class, ExhaustedAction.values()[typed.getIntProperty("whenExhaustedAction", exhaustedAction.ordinal(), true)], true)); maxActive(typed.getIntProperty(ConfigurationProperties.CONNECTION_POOL_MAX_ACTIVE, typed.getIntProperty("maxActive", maxActive, true), true)); maxWait(typed.getLongProperty(ConfigurationProperties.CONNECTION_POOL_MAX_WAIT, typed.getLongProperty("maxWait", maxWait, true), true)); minIdle(typed.getIntProperty(ConfigurationProperties.CONNECTION_POOL_MIN_IDLE, typed.getIntProperty("minIdle", minIdle, true), true)); minEvictableIdleTime(typed.getLongProperty(ConfigurationProperties.CONNECTION_POOL_MIN_EVICTABLE_IDLE_TIME, typed.getLongProperty("minEvictableIdleTimeMillis", minEvictableIdleTime, true), true)); maxPendingRequests(typed.getIntProperty(ConfigurationProperties.CONNECTION_POOL_MAX_PENDING_REQUESTS, typed.getIntProperty("maxPendingRequests", maxPendingRequests, true), true)); lifo(typed.getBooleanProperty("lifo", lifo, true)); maxTotal(typed.getIntProperty("maxTotal", maxTotal, true)); maxIdle(typed.getIntProperty("maxIdle", maxIdle, true)); numTestsPerEvictionRun(typed.getIntProperty("numTestsPerEvictionRun", numTestsPerEvictionRun, true)); timeBetweenEvictionRuns(typed.getLongProperty("timeBetweenEvictionRunsMillis", timeBetweenEvictionRuns, true)); testOnBorrow(typed.getBooleanProperty("testOnBorrow", testOnBorrow, true)); testOnReturn(typed.getBooleanProperty("testOnReturn", testOnReturn, true)); testWhileIdle(typed.getBooleanProperty("testWhileIdle", testWhileIdle, true)); return this; }
java
static String equalsParameterType(Map<ObjectMethod, ExecutableElement> methodsToGenerate) { ExecutableElement equals = methodsToGenerate.get(ObjectMethod.EQUALS); if (equals == null) { return ""; // this will not be referenced because no equals method will be generated } TypeMirror parameterType = equals.getParameters().get(0).asType(); return TypeEncoder.encodeWithAnnotations(parameterType); }
java
protected void initializeLocalTypes(GenerationContext context, JvmFeature feature, XExpression expression) { if (expression != null) { int localTypeIndex = context.getLocalTypeIndex(); final TreeIterator<EObject> iterator = EcoreUtil2.getAllNonDerivedContents(expression, true); final String nameStub = "__" + feature.getDeclaringType().getSimpleName() + "_"; //$NON-NLS-1$ //$NON-NLS-2$ while (iterator.hasNext()) { final EObject next = iterator.next(); if (next.eClass() == XtendPackage.Literals.ANONYMOUS_CLASS) { inferLocalClass((AnonymousClass) next, nameStub + localTypeIndex, feature); iterator.prune(); ++localTypeIndex; } } context.setLocalTypeIndex(localTypeIndex); } }
python
def flush_buffer(self): ''' Flush the buffer of the tail ''' if len(self.buffer) > 0: return_value = ''.join(self.buffer) self.buffer.clear() self.send_message(return_value) self.last_flush_date = datetime.datetime.now()
python
def generate(self, api): """Generates a module for each namespace.""" for namespace in api.namespaces.values(): # One module per namespace is created. The module takes the name # of the namespace. with self.output_to_relative_path('{}.py'.format(namespace.name)): self._generate_namespace_module(namespace)
java
public Map<String, Map<String, CmsJspResourceWrapper>> getLocaleResource() { return CmsCollectionsGenericWrapper.createLazyMap(new Transformer() { @SuppressWarnings("synthetic-access") public Object transform(Object arg) { if (!(arg instanceof String)) { return new HashMap<String, CmsJspResourceWrapper>(); } String path = (String)arg; try { CmsResource res = m_cms.readResource(path); CmsJspResourceWrapper wrapper = CmsJspResourceWrapper.wrap(m_cms, res); return wrapper.getLocaleResource(); } catch (Exception e) { LOG.warn(e.getLocalizedMessage(), e); return new HashMap<String, CmsJspResourceWrapper>(); } } }); }
python
def heappushpop(heap, item): """Fast version of a heappush followed by a heappop.""" if heap and heap[0] < item: item, heap[0] = heap[0], item _siftup(heap, 0) return item
java
public void setProgress(float progress) { if (isSpinning) { mProgress = 0.0f; isSpinning = false; runCallback(); } if (progress > 1.0f) { progress -= 1.0f; } else if (progress < 0) { progress = 0; } if (progress == mTargetProgress) { return; } // If we are currently in the right position // we set again the last time animated so the // animation starts smooth from here if (mProgress == mTargetProgress) { lastTimeAnimated = SystemClock.uptimeMillis(); } mTargetProgress = Math.min(progress * 360.0f, 360.0f); invalidate(); }
python
def event_date(self, event_date): """ Updates the event_date. Args: event_date: Converted to %Y-%m-%dT%H:%M:%SZ date format. Returns: """ if not self.can_update(): self._tcex.handle_error(910, [self.type]) event_date = self._utils.format_datetime(event_date, date_format='%Y-%m-%dT%H:%M:%SZ') self._data['eventDate'] = event_date request = {'eventDate': event_date} return self.tc_requests.update(self.api_type, self.api_sub_type, self.unique_id, request)
python
def _get_instance_params(cls, instance): """ Parse instance configuration and perform minimal verification """ url = instance.get("url") if url is None: raise ConfigurationError("You must specify a url for your instance in `conf.yaml`") username = instance.get("username") password = instance.get("password") ssl_params = None if url.startswith("ldaps"): ssl_params = { "key": instance.get("ssl_key"), "cert": instance.get("ssl_cert"), "ca_certs": instance.get("ssl_ca_certs"), "verify": is_affirmative(instance.get("ssl_verify", True)), } custom_queries = instance.get("custom_queries", []) tags = list(instance.get("tags", [])) tags.append("url:{}".format(url)) return url, username, password, ssl_params, custom_queries, tags
java
boolean hasOption(String opt) { Boolean has = options.get(opt); if (has == null) { throw new InternalError("hasOption called before allowedOptions or on bad option"); } return has; }
java
public void registerComponentBindingsProvider(ServiceReference reference) { log.info("registerComponentBindingsProvider"); log.info("Registering Component Bindings Provider {} - {}", new Object[] { reference.getProperty(Constants.SERVICE_ID), reference.getProperty(Constants.SERVICE_PID) }); String[] resourceTypes = OsgiUtil.toStringArray(reference .getProperty(ComponentBindingsProvider.RESOURCE_TYPE_PROP), new String[0]); for (String resourceType : resourceTypes) { if (!this.containsKey(resourceType)) { put(resourceType, new HashSet<ServiceReference>()); } log.debug("Adding to resource type {}", resourceType); get(resourceType).add(reference); } }
java
public Object repushContextClassLoaderForUnprivileged(Object origLoader, ClassLoader loader) { if (origLoader == UNCHANGED) { return pushContextClassLoaderForUnprivileged(loader); } setContextClassLoaderForUnprivileged(Thread.currentThread(), loader); return origLoader; }
java
public VoltXMLElement findChild(String elementName, String attributeName) { String attName = attributeName; if (attName == null) { attName = "default"; } return findChild(elementName + attName); }
java
public static void projViewFromRectangle( Vector3d eye, Vector3d p, Vector3d x, Vector3d y, double nearFarDist, boolean zeroToOne, Matrix4d projDest, Matrix4d viewDest) { double zx = y.y * x.z - y.z * x.y, zy = y.z * x.x - y.x * x.z, zz = y.x * x.y - y.y * x.x; double zd = zx * (p.x - eye.x) + zy * (p.y - eye.y) + zz * (p.z - eye.z); double zs = zd >= 0 ? 1 : -1; zx *= zs; zy *= zs; zz *= zs; zd *= zs; viewDest.setLookAt(eye.x, eye.y, eye.z, eye.x + zx, eye.y + zy, eye.z + zz, y.x, y.y, y.z); double px = viewDest.m00 * p.x + viewDest.m10 * p.y + viewDest.m20 * p.z + viewDest.m30; double py = viewDest.m01 * p.x + viewDest.m11 * p.y + viewDest.m21 * p.z + viewDest.m31; double tx = viewDest.m00 * x.x + viewDest.m10 * x.y + viewDest.m20 * x.z; double ty = viewDest.m01 * y.x + viewDest.m11 * y.y + viewDest.m21 * y.z; double len = Math.sqrt(zx * zx + zy * zy + zz * zz); double near = zd / len, far; if (Double.isInfinite(nearFarDist) && nearFarDist < 0.0) { far = near; near = Double.POSITIVE_INFINITY; } else if (Double.isInfinite(nearFarDist) && nearFarDist > 0.0) { far = Double.POSITIVE_INFINITY; } else if (nearFarDist < 0.0) { far = near; near = near + nearFarDist; } else { far = near + nearFarDist; } projDest.setFrustum(px, px + tx, py, py + ty, near, far, zeroToOne); }
java
public void setProperty(String columnName, Object newValue) { try { getResultSet().updateObject(columnName, newValue); updated = true; } catch (SQLException e) { throw new MissingPropertyException(columnName, GroovyResultSetProxy.class, e); } }
java
public final void setEntries(@NonNull final CharSequence[] entries) { Condition.INSTANCE.ensureNotNull(entries, "The entries may not be null"); this.entries = entries; }
java
public void format(long number, StringBuilder toInsertInto, int pos, int recursionCount) { if (recursionCount >= RECURSION_LIMIT) { throw new IllegalStateException("Recursion limit exceeded when applying ruleSet " + name); } NFRule applicableRule = findNormalRule(number); applicableRule.doFormat(number, toInsertInto, pos, ++recursionCount); }
java
public static double[] rotate90Equals(final double[] v1) { assert v1.length == 2 : "rotate90Equals is only valid for 2d vectors."; final double temp = v1[0]; v1[0] = v1[1]; v1[1] = -temp; return v1; }
python
def set_simulate(self, status): """Set the simulation status. :param status: Value to set the simulation :type status: bool :returns: None :raises: InvalidInput """ if type(status) != bool: raise InvalidInput("Status value must be bool") self._simulate = bool2int(status)
python
def _patch_for_tf1_13(tf): """Monkey patch tf 1.13 so tfds can use it.""" if not hasattr(tf.io.gfile, "GFile"): tf.io.gfile.GFile = tf.gfile.GFile if not hasattr(tf, "nest"): tf.nest = tf.contrib.framework.nest if not hasattr(tf.compat, "v2"): tf.compat.v2 = types.ModuleType("tf.compat.v2") tf.compat.v2.data = types.ModuleType("tf.compat.v2.data") from tensorflow.python.data.ops import dataset_ops tf.compat.v2.data.Dataset = dataset_ops.DatasetV2 if not hasattr(tf.compat.v2.data.Dataset, "output_shapes"): from tensorflow.python.data.ops import dataset_ops if hasattr(dataset_ops, "get_legacy_output_shapes"): tf.compat.v2.data.Dataset.output_shapes = property( dataset_ops.get_legacy_output_shapes) tf.compat.v2.data.Dataset.output_types = property( dataset_ops.get_legacy_output_types)
python
def end_time(self): """ Return the end time of the current valid segment of data """ return float(self.strain.start_time + (len(self.strain) - self.total_corruption) / self.sample_rate)
python
def GetMessages(self, formatter_mediator, event): """Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. """ if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() document_type = event_values.get('document_type', None) if document_type: event_values['document_type'] = self._DOC_TYPES.get( document_type, 'UNKNOWN') shared = event_values.get('shared', False) if shared: event_values['shared'] = 'Shared' else: event_values['shared'] = 'Private' return self._ConditionalFormatMessages(event_values)
java
public void setConnectTimeout(int timeout) { Assert.isTrue(timeout >= 0, "Timeout must be a non-negative value"); this.httpClient.getHttpConnectionManager().getParams().setConnectionTimeout(timeout); }
python
def build_url (self): """ Calls super.build_url() and adds a trailing slash to directories. """ self.build_base_url() if self.parent_url is not None: # URL joining with the parent URL only works if the query # of the base URL are removed first. # Otherwise the join function thinks the query is part of # the file name. from .urlbase import url_norm # norm base url - can raise UnicodeError from url.idna_encode() base_url, is_idn = url_norm(self.base_url, self.encoding) urlparts = list(urlparse.urlsplit(base_url)) # ignore query part for filesystem urls urlparts[3] = '' self.base_url = urlutil.urlunsplit(urlparts) super(FileUrl, self).build_url() # ignore query and fragment url parts for filesystem urls self.urlparts[3] = self.urlparts[4] = '' if self.is_directory() and not self.urlparts[2].endswith('/'): self.add_warning(_("Added trailing slash to directory."), tag=WARN_FILE_MISSING_SLASH) self.urlparts[2] += '/' self.url = urlutil.urlunsplit(self.urlparts)
python
def save_voxel_grid(voxel_grid, file_name): """ Saves binary voxel grid as a binary file. The binary file is structured in little-endian unsigned int format. :param voxel_grid: binary voxel grid :type voxel_grid: list, tuple :param file_name: file name to save :type file_name: str """ try: with open(file_name, 'wb') as fp: for voxel in voxel_grid: fp.write(struct.pack("<I", voxel)) except IOError as e: print("An error occurred: {}".format(e.args[-1])) raise e except Exception: raise
java
@NotNull public Mode with(@NotNull Key<Boolean> key) { return new Mode(Map(this.defs, key, true)); }
java
public DescribeDirectConnectGatewaysResult withDirectConnectGateways(DirectConnectGateway... directConnectGateways) { if (this.directConnectGateways == null) { setDirectConnectGateways(new com.amazonaws.internal.SdkInternalList<DirectConnectGateway>(directConnectGateways.length)); } for (DirectConnectGateway ele : directConnectGateways) { this.directConnectGateways.add(ele); } return this; }
python
def send_report(self, report_parts): """ Publish by sending the report by e-mail """ logger.info('Creating an email message') report_parts = sorted( filter(lambda x: x.fmt in self.formats, report_parts), key=lambda x: self.formats.index(x.fmt) ) fmtname = '{0}-{1}.{2}' if len(report_parts) > 1 else '{0}.{2}' root_part = MIMEMultipart('mixed') root_part.preamble = 'This is a multi-part message in MIME format.' logger.debug('Creating the text/"text_type" parts') for i, text_part in enumerate(report_parts): attachment_name = fmtname.format(socket.gethostname(), i, text_part.ext) attach_part = MIMEText(text_part.text, text_part.ext, 'utf-8') attach_part.add_header('Content-Disposition', 'attachment', filename=attachment_name) root_part.attach(attach_part) if self.rawlogs: out = BytesIO() do_chunked_gzip(self.rawfh, out, filename=u'raw.log.gz') out.seek(0, os.SEEK_END) size = out.tell() if size > self.rawlogs_limit: logger.warning('%d is over the defined max of %r', size, self.rawlogs_limit) logger.warning('Not attaching the raw logs') else: logger.debug('Creating the application/x-gzip part') attach_part = MIMEBase('application', 'x-gzip') attach_part.set_payload(out.getvalue()) from email.encoders import encode_base64 logger.debug('Encoding the gzipped raw logs with base64') encode_base64(attach_part) attach_part.add_header('Content-Disposition', 'attachment', filename='raw.log.gz') root_part.attach(attach_part) if self.gpg_encrypt: import gpgme try: if self.gpg_keyringdir and os.path.exists(self.gpg_keyringdir): logger.debug('Setting keyring dir to %r', self.gpg_keyringdir) os.environ['GNUPGHOME'] = self.gpg_keyringdir cleartext = BytesIO(root_part.as_string().encode()) ciphertext = BytesIO() ctx = gpgme.Context() ctx.armor = True if self.gpg_recipients: recipients = [ctx.get_key(recipient) for recipient in self.gpg_recipients] else: recipients = [] for key in ctx.keylist(): for subkey in key.subkeys: if subkey.can_encrypt: logger.debug('Found can_encrypt key = %d', subkey.keyid) recipients.append(key) break signers = [ctx.get_key(signer) for signer in self.gpg_signers] if signers: logger.info('Encrypting and signing the report') ctx.signers = signers ctx.encrypt_sign(recipients, gpgme.ENCRYPT_ALWAYS_TRUST, cleartext, ciphertext) else: logger.info('Encrypting the report') ctx.encrypt(recipients, gpgme.ENCRYPT_ALWAYS_TRUST, cleartext, ciphertext) logger.debug('Creating the MIME envelope for PGP') gpg_envelope_part = MIMEMultipart('encrypted') gpg_envelope_part.set_param('protocol', 'application/pgp-encrypted', header='Content-Type') gpg_envelope_part.preamble = 'This is an OpenPGP/MIME encrypted message (RFC 2440 and 3156)' gpg_mime_version_part = MIMEBase('application', 'pgp-encrypted') gpg_mime_version_part.add_header('Content-Disposition', 'PGP/MIME version identification') gpg_mime_version_part.set_payload('Version: 1') gpg_payload_part = MIMEBase('application', 'octet-stream', name='encrypted.asc') gpg_payload_part.add_header('Content-Disposition', 'OpenPGP encrypted message') gpg_payload_part.add_header('Content-Disposition', 'inline', filename='encrypted.asc') gpg_payload_part.set_payload(ciphertext.getvalue()) gpg_envelope_part.attach(gpg_mime_version_part) gpg_envelope_part.attach(gpg_payload_part) # envelope becomes the new root part root_part = gpg_envelope_part except ImportError: logger.error('Need crypto libraries for gpg_encrypt.') logger.error('Install pygpgme for GPG encryption support.') logger.error('Not mailing the report out of caution.') return # Define headers root_part['Date'] = formatdate() root_part['From'] = self.email_address root_part['To'] = ', '.join(self.mailto) root_part['Subject'] = '{0} system events: {1}'.format( socket.gethostname(), time.strftime('%c', time.localtime()) ) root_part['Message-Id'] = make_msgid() root_part['X-Mailer'] = u'{0}-{1}'.format(package_name, __version__) mail_message(self.smtp_server, root_part.as_string(), self.email_address, self.mailto) print('Mailed the report to: {0}'.format(','.join(self.mailto)))
python
def userinfo(access_token, scope_request=None, claims_request=None): """ Returns data required for an OpenID Connect UserInfo response, according to: http://openid.net/specs/openid-connect-basic-1_0.html#UserInfoResponse Supports scope and claims request parameter as described in: - http://openid.net/specs/openid-connect-core-1_0.html#ScopeClaims - http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter Arguments: access_token (:class:`AccessToken`): Associated access token. scope_request (list): Optional list of requested scopes. Only scopes authorized in the `access_token` will be considered. claims_request (dict): Optional dictionary with a claims request parameter. Information on the claims request parameter specification: - http://openid.net/specs/openid-connect-core-1_0.html#ClaimsParameter As a convinience, if neither `scope_request` or user_info claim is specified in the `claims_request`, it will return the claims for all the scopes in the `access_token`. Returns an :class:`IDToken` instance with the scopes from the `scope_request` and the corresponding claims. Claims in the `claims_request` paramater userinfo section will be included *in addition* to the ones corresponding to `scope_request`. """ handlers = HANDLERS['userinfo'] # Select only the relevant section of the claims request. claims_request_section = claims_request.get('userinfo', {}) if claims_request else {} # If nothing is requested, return the claims for the scopes in the access token. if not scope_request and not claims_request_section: scope_request = provider.scope.to_names(access_token.scope) else: scope_request = scope_request scopes, claims = collect( handlers, access_token, scope_request=scope_request, claims_request=claims_request_section, ) return IDToken(access_token, scopes, claims)
java
public int calculateNodePathDistance(String nodePath1, String nodePath2) { String[] nodePathFragment1 = nodePath1.split("\\."); String[] nodePathFragment2 = nodePath2.split("\\."); int overlapBlock = 0; while (overlapBlock < nodePathFragment1.length && overlapBlock < nodePathFragment2.length && nodePathFragment1[overlapBlock].equals(nodePathFragment2[overlapBlock])) { overlapBlock++; } return nodePathFragment1.length + nodePathFragment2.length - overlapBlock * 2; }
java
public void processEvent(EventObject event) { if (event instanceof RequestEvent) { processRequest((RequestEvent) event); } else if (event instanceof ResponseEvent) { processResponse((ResponseEvent) event); } else if (event instanceof TimeoutEvent) { processTimeout((TimeoutEvent) event); } else { LOG.error("invalid event type received: {}: {}", event.getClass().getName(), event.toString()); } }
java
protected void addIndex(Content body) { addIndexContents(packages, "doclet.Package_Summary", configuration.getText("doclet.Member_Table_Summary", configuration.getText("doclet.Package_Summary"), configuration.getText("doclet.packages")), body); }
python
def AuxPlane(s1, d1, r1): """ Get Strike and dip of second plane. Adapted from MATLAB script `bb.m <http://www.ceri.memphis.edu/people/olboyd/Software/Software.html>`_ written by Andy Michael and Oliver Boyd. """ r2d = 180 / np.pi z = (s1 + 90) / r2d z2 = d1 / r2d z3 = r1 / r2d # slick vector in plane 1 sl1 = -np.cos(z3) * np.cos(z) - np.sin(z3) * np.sin(z) * np.cos(z2) sl2 = np.cos(z3) * np.sin(z) - np.sin(z3) * np.cos(z) * np.cos(z2) sl3 = np.sin(z3) * np.sin(z2) (strike, dip) = StrikeDip(sl2, sl1, sl3) n1 = np.sin(z) * np.sin(z2) # normal vector to plane 1 n2 = np.cos(z) * np.sin(z2) h1 = -sl2 # strike vector of plane 2 h2 = sl1 # note h3=0 always so we leave it out # n3 = np.cos(z2) z = h1 * n1 + h2 * n2 z = z / np.sqrt(h1 * h1 + h2 * h2) z = np.arccos(z) rake = 0 if sl3 > 0: rake = z * r2d if sl3 <= 0: rake = -z * r2d return (strike, dip, rake)
python
def parse_zone(zonefile=None, zone=None): ''' Parses a zone file. Can be passed raw zone data on the API level. CLI Example: .. code-block:: bash salt ns1 dnsutil.parse_zone /var/lib/named/example.com.zone ''' if zonefile: try: with salt.utils.files.fopen(zonefile, 'r') as fp_: zone = salt.utils.stringutils.to_unicode(fp_.read()) except Exception: pass if not zone: return 'Error: Zone data was not found' zonedict = {} mode = 'single' for line in zone.splitlines(): comps = line.split(';') line = comps[0].strip() if not line: continue comps = line.split() if line.startswith('$'): zonedict[comps[0].replace('$', '')] = comps[1] continue if '(' in line and ')' not in line: mode = 'multi' multi = '' if mode == 'multi': multi += ' {0}'.format(line) if ')' in line: mode = 'single' line = multi.replace('(', '').replace(')', '') else: continue if 'ORIGIN' in zonedict: comps = line.replace('@', zonedict['ORIGIN']).split() else: comps = line.split() if 'SOA' in line: if comps[1] != 'IN': comps.pop(1) zonedict['ORIGIN'] = comps[0] zonedict['NETWORK'] = comps[1] zonedict['SOURCE'] = comps[3] zonedict['CONTACT'] = comps[4].replace('.', '@', 1) zonedict['SERIAL'] = comps[5] zonedict['REFRESH'] = _to_seconds(comps[6]) zonedict['RETRY'] = _to_seconds(comps[7]) zonedict['EXPIRE'] = _to_seconds(comps[8]) zonedict['MINTTL'] = _to_seconds(comps[9]) continue if comps[0] == 'IN': comps.insert(0, zonedict['ORIGIN']) if not comps[0].endswith('.') and 'NS' not in line: comps[0] = '{0}.{1}'.format(comps[0], zonedict['ORIGIN']) if comps[2] == 'NS': zonedict.setdefault('NS', []).append(comps[3]) elif comps[2] == 'MX': if 'MX' not in zonedict: zonedict.setdefault('MX', []).append({'priority': comps[3], 'host': comps[4]}) elif comps[3] in ('A', 'AAAA'): zonedict.setdefault(comps[3], {})[comps[0]] = { 'TARGET': comps[4], 'TTL': comps[1], } else: zonedict.setdefault(comps[2], {})[comps[0]] = comps[3] return zonedict
python
def __initialize_menu_bar(self): """ Initializes Component menu_bar. """ self.__file_menu = QMenu("&File", parent=self.__menu_bar) self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|&New", shortcut=QKeySequence.New, slot=self.__new_file_action__triggered)) self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|&Load ...", shortcut=QKeySequence.Open, slot=self.__load_file_action__triggered)) self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|Source ...", slot=self.__source_file_action__triggered)) self.__file_menu.addSeparator() self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|Add Project ...", slot=self.__add_project_action__triggered)) self.__file_menu.addSeparator() self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|&Save", shortcut=QKeySequence.Save, slot=self.__save_file_action__triggered)) self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|Save As ...", shortcut=QKeySequence.SaveAs, slot=self.__save_file_as_action__triggered)) self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|Save All", slot=self.__save_all_files_action__triggered)) self.__file_menu.addSeparator() self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|Revert", slot=self.__revert_file_action__triggered)) self.__file_menu.addSeparator() self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|Close ...", shortcut=QKeySequence.Close, slot=self.__close_file_action__triggered)) self.__file_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&File|Close All ...", shortcut=Qt.SHIFT + Qt.ControlModifier + Qt.Key_W, slot=self.__close_all_files_action__triggered)) self.__file_menu.addSeparator() for action in self.__recent_files_actions: self.__file_menu.addAction(action) self.__set_recent_files_actions() self.__menu_bar.addMenu(self.__file_menu) self.__edit_menu = QMenu("&Edit", parent=self.__menu_bar) self.__edit_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Edit|&Undo", shortcut=QKeySequence.Undo, slot=self.__undo_action__triggered)) self.__edit_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Edit|&Redo", shortcut=QKeySequence.Redo, slot=self.__redo_action__triggered)) self.__edit_menu.addSeparator() self.__edit_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Edit|Cu&t", shortcut=QKeySequence.Cut, slot=self.__cut_action__triggered)) self.__edit_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Edit|&Copy", shortcut=QKeySequence.Copy, slot=self.__copy_action__triggered)) self.__edit_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Edit|&Paste", shortcut=QKeySequence.Paste, slot=self.__paste_action__triggered)) self.__edit_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Edit|Delete", slot=self.__delete_action__triggered)) self.__edit_menu.addSeparator() self.__edit_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Edit|Select All", shortcut=QKeySequence.SelectAll, slot=self.__select_all_action__triggered)) self.__menu_bar.addMenu(self.__edit_menu) self.__source_menu = QMenu("&Source", parent=self.__menu_bar) self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Delete Line(s)", shortcut=Qt.ControlModifier + Qt.Key_D, slot=self.__delete_lines_action__triggered)) self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Duplicate Line(s)", shortcut=Qt.SHIFT + Qt.ControlModifier + Qt.Key_D, slot=self.__duplicate_lines_action__triggered)) self.__source_menu.addSeparator() self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Move Up", shortcut=Qt.SHIFT + Qt.ControlModifier + Qt.ALT + Qt.Key_Up, slot=self.__move_up_action__triggered)) self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Move Down", shortcut=Qt.SHIFT + Qt.ControlModifier + Qt.ALT + Qt.Key_Down, slot=self.__move_down_action__triggered)) self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Indent Selection", shortcut=Qt.Key_Tab, slot=self.__indent_selection_action__triggered)) self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Unindent Selection", shortcut=Qt.Key_Backtab, slot=self.__unindent_selection_action__triggered)) self.__source_menu.addSeparator() self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Convert Indentation To Tabs", slot=self.__convert_indentation_to_tabs_action__triggered)) self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Convert Indentation To Spaces", slot=self.__convert_indentation_to_spaces_action__triggered)) self.__source_menu.addSeparator() self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Remove Trailing WhiteSpaces", slot=self.__remove_trailing_white_spaces_action__triggered)) self.__source_menu.addSeparator() self.__source_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Source|Toggle Comments", shortcut=Qt.ControlModifier + Qt.Key_Slash, slot=self.__toggle_comments_action__triggered)) self.__menu_bar.addMenu(self.__source_menu) self.__navigate_menu = QMenu("&Navigate", parent=self.__menu_bar) self.__navigate_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Navigate|Goto Line ...", shortcut=Qt.ControlModifier + Qt.Key_L, slot=self.__go_to_line_action__triggered)) self.__navigate_menu.addSeparator() self.__menu_bar.addMenu(self.__navigate_menu) self.__search_menu = QMenu("&Search", parent=self.__menu_bar) self.__search_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Search|Search And Replace ...", shortcut=Qt.ControlModifier + Qt.Key_F, slot=self.__search_and_replace_action__triggered)) self.__search_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Search|Search In Files ...", shortcut=Qt.ALT + Qt.ControlModifier + Qt.Key_F, slot=self.__search_in_files_action__triggered)) self.__search_menu.addSeparator() self.__search_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Search|Search Next", shortcut=Qt.ControlModifier + Qt.Key_K, slot=self.__search_next_action__triggered)) self.__search_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Search|Search Previous", shortcut=Qt.SHIFT + Qt.ControlModifier + Qt.Key_K, slot=self.__search_previous_action__triggered)) self.__menu_bar.addMenu(self.__search_menu) self.__command_menu = QMenu("&Command", parent=self.__menu_bar) self.__command_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Command|&Evaluate Selection", shortcut=Qt.ControlModifier + Qt.Key_Return, slot=self.__evaluate_selection_action__triggered)) self.__command_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&Command|Evaluate &Script", shortcut=Qt.SHIFT + Qt.CTRL + Qt.Key_Return, slot=self.__evaluate_script_action__triggered)) self.__menu_bar.addMenu(self.__command_menu) self.__view_menu = QMenu("&View", parent=self.__menu_bar) self.__view_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&View|Increase Font Size", shortcut=Qt.ControlModifier + Qt.Key_Plus, slot=self.__increase_font_size_action__triggered)) self.__view_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&View|Decrease Font Size", shortcut=Qt.ControlModifier + Qt.Key_Minus, slot=self.__decrease_font_size_action__triggered)) self.__view_menu.addSeparator() self.__view_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&View|Toggle Word Wrap", slot=self.__toggle_word_wrap_action__triggered)) self.__view_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&View|Toggle White Spaces", slot=self.__toggle_white_spaces_action__triggered)) self.__view_menu.addSeparator() self.__view_menu.addAction(self.__engine.actions_manager.register_action( "Actions|Umbra|Components|factory.script_editor|&View|Loop Through Editors", shortcut=Qt.AltModifier + Qt.SHIFT + Qt.Key_Tab, slot=self.__loop_through_editors_action__triggered)) self.__menu_bar.addMenu(self.__view_menu)
python
def _read_syncmap_file(self, path, extension, text=False): """ Read labels from a SyncMap file """ syncmap = SyncMap(logger=self.logger) syncmap.read(extension, path, parameters=None) if text: return [(f.begin, f.end, u" ".join(f.text_fragment.lines)) for f in syncmap.fragments] return [(f.begin, f.end, f.text_fragment.identifier) for f in syncmap.fragments]
java
public static boolean startAny(String target, Integer toffset, List<String> startWith) { if (isNull(target)) { return false; } return matcher(target).starts(toffset, startWith); }
java
private void removeNodeFromList(RepairQueueNode el) { // the head case // if (null == el.prev) { if (null != el.next) { this.head = el.next; this.head.prev = null; el=null; } else { // can't happen? yep. if there is only one element exists... this.head = null; } } // the tail case // else if (null == el.next) { if (null != el.prev) { el.prev.next = null; } else { // can't happen? throw new RuntimeException("Unrecognized situation here..."); } } // all others // else { el.prev.next = el.next; el.next.prev = el.prev; } }
python
def convert_coco_stuff_mat(data_dir, out_dir): """Convert to png and save json with path. This currently only contains the segmentation labels for objects+stuff in cocostuff - if we need to combine with other labels from original COCO that will be a TODO.""" sets = ['train', 'val'] categories = [] json_name = 'coco_stuff_%s.json' ann_dict = {} for data_set in sets: file_list = os.path.join(data_dir, '%s.txt') images = [] with open(file_list % data_set) as f: for img_id, img_name in enumerate(f): img_name = img_name.replace('coco', 'COCO').strip('\n') image = {} mat_file = os.path.join( data_dir, 'annotations/%s.mat' % img_name) data = h5py.File(mat_file, 'r') labelMap = data.get('S') if len(categories) == 0: labelNames = data.get('names') for idx, n in enumerate(labelNames): categories.append( {"id": idx, "name": ''.join(chr(i) for i in data[ n[0]])}) ann_dict['categories'] = categories scipy.misc.imsave( os.path.join(data_dir, img_name + '.png'), labelMap) image['width'] = labelMap.shape[0] image['height'] = labelMap.shape[1] image['file_name'] = img_name image['seg_file_name'] = img_name image['id'] = img_id images.append(image) ann_dict['images'] = images print("Num images: %s" % len(images)) with open(os.path.join(out_dir, json_name % data_set), 'wb') as outfile: outfile.write(json.dumps(ann_dict))
java
public void onModuleLoad() { ToolbarRegistry.put(RasterizingToolId.GET_LEGEND_IMAGE, new ToolCreator() { public ToolbarBaseAction createTool(MapWidget mapWidget) { return new GetLegendImageAction(mapWidget); } }); ToolbarRegistry.put(RasterizingToolId.GET_MAP_IMAGE, new ToolCreator() { public ToolbarBaseAction createTool(MapWidget mapWidget) { return new GetMapImageAction(mapWidget); } }); }
python
def send_file(self, local_path, remote_path, unix_mode=None): """Send a file to the remote host. :param local_path: the local path of the file :type local_path: str :param remote_path: the remote path of the file :type remote_path: str :return: the file attributes :rtype: paramiko.sftp_attr.SFTPAttributes """ self._check_started() sftp = paramiko.SFTPClient.from_transport(self._transport) sftp.put(local_path, remote_path) if unix_mode: sftp.chmod(remote_path, unix_mode)
java
public void propertiesReloaded(PropertiesReloadedEvent event) { Properties oldProperties = lastMergedProperties; try { // Properties newProperties = mergeProperties(); // // θŽ·ε–ε“ͺδΊ› dynamic property 蒫影响 // Set<String> placeholders = placeholderToDynamics.keySet(); Set<DynamicProperty> allDynamics = new HashSet<DynamicProperty>(); for (String placeholder : placeholders) { String newValue = newProperties.getProperty(placeholder); String oldValue = oldProperties.getProperty(placeholder); if (newValue != null && !newValue.equals(oldValue) || newValue == null && oldValue != null) { if (logger.isInfoEnabled()) { logger.info("Property changed detected: " + placeholder + (newValue != null ? "=" + newValue : " removed")); } List<DynamicProperty> affectedDynamics = placeholderToDynamics.get(placeholder); allDynamics.addAll(affectedDynamics); } } // // θŽ·ε–ε—ε½±ε“ηš„beans // Map<String, List<DynamicProperty>> dynamicsByBeanName = new HashMap<String, List<DynamicProperty>>(); Map<String, Object> beanByBeanName = new HashMap<String, Object>(); for (DynamicProperty dynamic : allDynamics) { String beanName = dynamic.getBeanName(); List<DynamicProperty> l = dynamicsByBeanName.get(beanName); if (l == null) { dynamicsByBeanName.put(beanName, (l = new ArrayList<DynamicProperty>())); Object bean = null; try { bean = applicationContext.getBean(beanName); beanByBeanName.put(beanName, bean); } catch (BeansException e) { // keep dynamicsByBeanName list, warn only once. logger.error("Error obtaining bean " + beanName, e); } // // say hello // try { if (bean instanceof IReconfigurationAware) { ((IReconfigurationAware) bean).beforeReconfiguration(); // hello! } } catch (Exception e) { logger.error("Error calling beforeReconfiguration on " + beanName, e); } } l.add(dynamic); } // // ε€„η†ε—ε½±ε“ηš„bean // Collection<String> beanNames = dynamicsByBeanName.keySet(); for (String beanName : beanNames) { Object bean = beanByBeanName.get(beanName); if (bean == null) // problems obtaining bean, earlier { continue; } BeanWrapper beanWrapper = new BeanWrapperImpl(bean); // for all affected ... List<DynamicProperty> dynamics = dynamicsByBeanName.get(beanName); for (DynamicProperty dynamic : dynamics) { String propertyName = dynamic.getPropertyName(); String unparsedValue = dynamic.getUnparsedValue(); // obtain an updated value, including dependencies String newValue; removeDynamic(dynamic); currentBeanName = beanName; currentPropertyName = propertyName; try { newValue = parseStringValue(unparsedValue, newProperties, new HashSet()); } finally { currentBeanName = null; currentPropertyName = null; } if (logger.isInfoEnabled()) { logger.info("Updating property " + beanName + "." + propertyName + " to " + newValue); } // assign it to the bean try { beanWrapper.setPropertyValue(propertyName, newValue); } catch (BeansException e) { logger.error("Error setting property " + beanName + "." + propertyName + " to " + newValue, e); } } } // // say goodbye. // for (String beanName : beanNames) { Object bean = beanByBeanName.get(beanName); try { if (bean instanceof IReconfigurationAware) { ((IReconfigurationAware) bean).afterReconfiguration(); } } catch (Exception e) { logger.error("Error calling afterReconfiguration on " + beanName, e); } } } catch (IOException e) { logger.error("Error trying to reload net.unicon.iamlabs.spring.properties.example.net.unicon.iamlabs" + ".spring" + ".properties: " + e.getMessage(), e); } }
java
private void assignAsymIds(List<List<Chain>> polys, List<List<Chain>> nonPolys, List<List<Chain>> waters) { for (int i=0; i<polys.size(); i++) { String asymId = "A"; for (Chain poly:polys.get(i)) { poly.setId(asymId); asymId = getNextAsymId(asymId); } for (Chain nonPoly:nonPolys.get(i)) { nonPoly.setId(asymId); asymId = getNextAsymId(asymId); } for (Chain water:waters.get(i)) { water.setId(asymId); asymId = getNextAsymId(asymId); } } }
java
@Override public MessageResourcesGateway getMessageResourceGateway() { final MessageResourcesGateway gateway = messageResourcesHolder.getGateway(); if (gateway == null) { String msg = "Not found the gateway for message resource: holder=" + messageResourcesHolder; throw new IllegalStateException(msg); } return gateway; }
python
def get_families(data_dir=None): '''Return a list of all basis set families''' data_dir = fix_data_dir(data_dir) metadata = get_metadata(data_dir) families = set() for v in metadata.values(): families.add(v['family']) return sorted(list(families))
java
public static Map.Entry<Object, Object> bayes(DataTable2D payoffMatrix, AssociativeArray eventProbabilities) { if(payoffMatrix.isValid()==false) { throw new IllegalArgumentException("The payoff matrix does not have a rectangular format."); } AssociativeArray expectedPayoffs = new AssociativeArray(); for(Map.Entry<Object, AssociativeArray> entry : payoffMatrix.entrySet()) { Object event = entry.getKey(); AssociativeArray optionList = entry.getValue(); for(Map.Entry<Object, Object> entry2 : optionList.entrySet()) { Object option = entry2.getKey(); Double payoff = TypeInference.toDouble(entry2.getValue()); Double value = expectedPayoffs.getDouble(option); if(value==null) { value=0.0; } expectedPayoffs.put(option, value + payoff*eventProbabilities.getDouble(event)); } } Map.Entry<Object, Object> entry = MapMethods.selectMaxKeyValue(expectedPayoffs); return entry; }
java
public boolean shouldShowInContext(CmsContainerElementData elementData) { CmsTemplateContextInfo contextInfo = getData().getTemplateContextInfo(); if (contextInfo.getCurrentContext() == null) { return true; } CmsDefaultSet<String> allowedContexts = contextInfo.getAllowedContexts().get(elementData.getResourceType()); if ((allowedContexts != null) && !allowedContexts.contains(contextInfo.getCurrentContext())) { return false; } String settingValue = elementData.getSettings().get(CmsTemplateContextInfo.SETTING); return (settingValue == null) || settingValue.contains(contextInfo.getCurrentContext()); }
java
private void clearPrefOnBranchKeyChange() { // If stored key isn't the same as the current key, we need to clean up // Note: Link Click Identifier is not cleared because of the potential for that to mess up a deep link String linkClickID = getLinkClickID(); String linkClickIdentifier = getLinkClickIdentifier(); String appLink = getAppLink(); String pushIdentifier = getPushIdentifier(); prefsEditor_.clear(); setLinkClickID(linkClickID); setLinkClickIdentifier(linkClickIdentifier); setAppLink(appLink); setPushIdentifier(pushIdentifier); prefHelper_.prefsEditor_.apply(); }
python
def get_logger(name='', log_stream=None, log_file=None, quiet=False, verbose=False): """Convenience function for getting a logger.""" # configure root logger log_level = logging.INFO if quiet: log_level = logging.WARNING elif verbose: log_level = logging.DEBUG if log_stream is None: log_stream = sys.stdout new_logger = configure_logger(name, log_stream=log_stream, log_file=log_file, log_level=log_level) return new_logger
python
def add_overlay_to_slice_file( self, filename, overlay, i_overlay, filename_out=None ): """ Function adds overlay to existing file. """ if filename_out is None: filename_out = filename filename = op.expanduser(filename) data = dicom.read_file(filename) data = self.encode_overlay_slice(data, overlay, i_overlay) data.save_as(filename_out)
java
public static Set<Bugsnag> uncaughtExceptionClients() { UncaughtExceptionHandler handler = Thread.getDefaultUncaughtExceptionHandler(); if (handler instanceof ExceptionHandler) { ExceptionHandler bugsnagHandler = (ExceptionHandler) handler; return Collections.unmodifiableSet(bugsnagHandler.uncaughtExceptionClients()); } return Collections.emptySet(); }
python
def policy_create(request, **kwargs): """Create a QoS Policy. :param request: request context :param name: name of the policy :param description: description of policy :param shared: boolean (true or false) :return: QoSPolicy object """ body = {'policy': kwargs} policy = neutronclient(request).create_qos_policy(body=body).get('policy') return QoSPolicy(policy)
python
def closure(self): """ Returns a new `Independencies()`-object that additionally contains those `IndependenceAssertions` that are implied by the the current independencies (using with the `semi-graphoid axioms <https://en.wikipedia.org/w/index.php?title=Conditional_independence&oldid=708760689#Rules_of_conditional_independence>`_; see (Pearl, 1989, `Conditional Independence and its representations <http://www.cs.technion.ac.il/~dang/journal_papers/pearl1989conditional.pdf>`_)). Might be very slow if more than six variables are involved. Examples -------- >>> from pgmpy.independencies import Independencies >>> ind1 = Independencies(('A', ['B', 'C'], 'D')) >>> ind1.closure() (A _|_ B | D, C) (A _|_ B, C | D) (A _|_ B | D) (A _|_ C | D, B) (A _|_ C | D) >>> ind2 = Independencies(('W', ['X', 'Y', 'Z'])) >>> ind2.closure() (W _|_ Y) (W _|_ Y | X) (W _|_ Z | Y) (W _|_ Z, X, Y) (W _|_ Z) (W _|_ Z, X) (W _|_ X, Y) (W _|_ Z | X) (W _|_ Z, Y | X) [..] """ def single_var(var): "Checks if var represents a single variable" if not hasattr(var, '__iter__'): return True else: return len(var) == 1 def sg0(ind): "Symmetry rule: 'X βŸ‚ Y | Z' -> 'Y βŸ‚ X | Z'" return IndependenceAssertion(ind.event2, ind.event1, ind.event3) # since XβŸ‚Y|Z == YβŸ‚X|Z in pgmpy, sg0 (symmetry) is not used as an axiom/rule. # instead we use a decorator for the other axioms to apply them on both sides def apply_left_and_right(func): def symmetric_func(*args): if len(args) == 1: return func(args[0]) + func(sg0(args[0])) if len(args) == 2: return (func(*args) + func(args[0], sg0(args[1])) + func(sg0(args[0]), args[1]) + func(sg0(args[0]), sg0(args[1]))) return symmetric_func @apply_left_and_right def sg1(ind): "Decomposition rule: 'X βŸ‚ Y,W | Z' -> 'X βŸ‚ Y | Z', 'X βŸ‚ W | Z'" if single_var(ind.event2): return [] else: return [IndependenceAssertion(ind.event1, ind.event2 - {elem}, ind.event3) for elem in ind.event2] @apply_left_and_right def sg2(ind): "Weak Union rule: 'X βŸ‚ Y,W | Z' -> 'X βŸ‚ Y | W,Z', 'X βŸ‚ W | Y,Z' " if single_var(ind.event2): return [] else: return [IndependenceAssertion(ind.event1, ind.event2 - {elem}, {elem} | ind.event3) for elem in ind.event2] @apply_left_and_right def sg3(ind1, ind2): "Contraction rule: 'X βŸ‚ W | Y,Z' & 'X βŸ‚ Y | Z' -> 'X βŸ‚ W,Y | Z'" if ind1.event1 != ind2.event1: return [] Y = ind2.event2 Z = ind2.event3 Y_Z = ind1.event3 if Y < Y_Z and Z < Y_Z and Y.isdisjoint(Z): return [IndependenceAssertion(ind1.event1, ind1.event2 | Y, Z)] else: return [] # apply semi-graphoid axioms as long as new independencies are found. all_independencies = set() new_inds = set(self.independencies) while new_inds: new_pairs = (set(itertools.permutations(new_inds, 2)) | set(itertools.product(new_inds, all_independencies)) | set(itertools.product(all_independencies, new_inds))) all_independencies |= new_inds new_inds = set(sum([sg1(ind) for ind in new_inds] + [sg2(ind) for ind in new_inds] + [sg3(*inds) for inds in new_pairs], [])) new_inds -= all_independencies return Independencies(*list(all_independencies))
python
def eth_sendTransaction(self, from_, to=None, gas=None, gas_price=None, value=None, data=None, nonce=None): """https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_sendtransaction :param from_: From account address :type from_: str :param to: To account address (optional) :type to: str :param gas: Gas amount for current transaction (optional) :type gas: int :param gas_price: Gas price for current transaction (optional) :type gas_price: int :param value: Amount of ether to send (optional) :type value: int :param data: Additional data for transaction (optional) :type data: hex :param nonce: Unique nonce for transaction (optional) :type nonce: int :return: txhash :rtype: str """ obj = {} obj['from'] = from_ if to is not None: obj['to'] = to if gas is not None: obj['gas'] = hex(gas) if gas_price is not None: obj['gasPrice'] = hex(gas_price) if value is not None: obj['value'] = hex(ether_to_wei(value)) if data is not None: obj['data'] = data if nonce is not None: obj['nonce'] = hex(nonce) return (yield from self.rpc_call('eth_sendTransaction', [obj]))
java
private String getScalaClassName(PsiClass c) { String baseName = c.getName(); return baseName.replaceAll("\\$", ""); }
java
final void install() { Intent intent = new Intent(Intent.ACTION_INSTALL_PACKAGE); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); Uri uri = AndPermission.getFileUri(mSource.getContext(), mFile); intent.setDataAndType(uri, "application/vnd.android.package-archive"); mSource.startActivity(intent); }
python
def _parse_non_negative_int(self, istr, name): """Parse integer from string (istr). The (name) parameter is used just for IIIFRequestError message generation to indicate what the error is in. """ if (istr == ''): return(None) try: i = int(istr) except ValueError: raise ValueError("Failed to extract integer value for %s" % (name)) if (i < 0): raise ValueError("Illegal negative value for %s" % (name)) return(i)
python
def delete(ctx, slot, force): """ Deletes the configuration of a slot. """ controller = ctx.obj['controller'] if not force and not controller.slot_status[slot - 1]: ctx.fail('Not possible to delete an empty slot.') force or click.confirm( 'Do you really want to delete' ' the configuration of slot {}?'.format(slot), abort=True, err=True) click.echo('Deleting the configuration of slot {}...'.format(slot)) try: controller.zap_slot(slot) except YkpersError as e: _failed_to_write_msg(ctx, e)
python
def get_dataframe_row(self, dataset_cases, predicted_data, pdb_data, record_id, additional_prediction_data_columns): '''Create a dataframe row for a prediction.''' # Ignore derived mutations if appropriate record = dataset_cases[record_id] if self.is_this_record_a_derived_mutation(record) and not self.include_derived_mutations: return None amino_acid_details, CAA, PAA, HAA = self.amino_acid_details, self.CAA, self.PAA, self.HAA burial_cutoff = self.burial_cutoff # Initialize variables. For ambiguous cases where the set of distinct values has multiple values, we default to None residue_charge, residue_charges = None, set() exposure, exposures = None, set() volume_change, volume_changes = None, set() record_wtaa, wtaas = None, set() record_mutaa, mutaas = None, set() DSSPSimpleSSType, DSSPSimpleSSTypes = None, set() DSSPType, DSSPTypes = None, set() DSSPExposure, DSSPExposures = None, set() scops = set() mutation_string = [] num_derivative_errors = predicted_data.get('Errors', {}).get('Derivative error count', 0) run_time = predicted_data.get('RunTime', None) max_memory = predicted_data.get('MaxMemory', None) mutations = self.get_record_mutations(record) for m in mutations: wtaa = m['WildTypeAA'] mutaa = m['MutantAA'] mutation_string.append('{0} {1}{2}{3}'.format(m['Chain'], m['WildTypeAA'], m['ResidueID'], m['MutantAA'])) # Residue types and chain wtaas.add(wtaa) mutaas.add(mutaa) if m.get('SCOP class'): scops.add(m['SCOP class']) DSSPSimpleSSTypes.add(m['DSSPSimpleSSType']) DSSPTypes.add(m['DSSPType']) DSSPExposures.add(m['DSSPExposure']) # Burial if m['DSSPExposure'] != None: if m['DSSPExposure'] > burial_cutoff: exposures.add('E') else: exposures.add('B') else: exposures.add(None) # Volume if amino_acid_details[wtaa]['van der Waals volume'] < amino_acid_details[mutaa]['van der Waals volume']: volume_changes.add('SL') elif amino_acid_details[wtaa]['van der Waals volume'] > amino_acid_details[mutaa]['van der Waals volume']: volume_changes.add('LS') elif amino_acid_details[wtaa]['van der Waals volume'] == amino_acid_details[mutaa]['van der Waals volume']: volume_changes.add('XX') # Charge if ((wtaa in CAA or wtaa in PAA) and (mutaa in HAA)) or ((mutaa in CAA or mutaa in PAA) and (wtaa in HAA)): residue_charges.add('Change') elif (wtaa in CAA or wtaa in PAA) and (mutaa in CAA or mutaa in PAA): residue_charges.add('Polar/Charged') elif (wtaa in HAA) and (mutaa in HAA): residue_charges.add('Hydrophobic/Non-polar') else: raise colortext.Exception('Should not reach here.') # Create a string representing the mutations (useful for labeling rather than analysis) mutation_string = '; '.join(mutation_string) # Taking unique values, determine the residue charges of the wildtype and mutant residues, the wildtype residue exposure, and the relative change in van der Waals volume if len(residue_charges) == 1: residue_charge = residue_charges.pop() if len(exposures) == 1: exposure = exposures.pop() if len(volume_changes) == 1: volume_change = volume_changes.pop() # Taking unique values, determine the wildtype and mutant residue types all_residues = wtaas.union(mutaas) if len(wtaas) == 1: record_wtaa = wtaas.pop() if len(mutaas) == 1: record_mutaa = mutaas.pop() # Taking unique values, determine the secondary structure and residue exposures from the DSSP data in the dataset if len(DSSPSimpleSSTypes) == 1: DSSPSimpleSSType = DSSPSimpleSSTypes.pop() if len(DSSPTypes) == 1: DSSPType = DSSPTypes.pop() if len(DSSPExposures) == 1: DSSPExposure = DSSPExposures.pop() # Determine the SCOP classification from the SCOPe data in the dataset full_scop_classification, scop_class, scop_fold = None, None, None if len(scops) > 1: self.log('Warning: There is more than one SCOPe class for record {0}.'.format(record_id), colortext.warning) elif len(scops) == 1: full_scop_classification = scops.pop() scop_tokens = full_scop_classification.split('.') scop_class = scop_tokens[0] if len(scop_tokens) > 1: scop_fold = '.'.join(scop_tokens[0:2]) # Partition the data by PDB resolution with bins: N/A, <1.5, 1.5-<2.0, 2.0-<2.5, >=2.5 pdb_record = pdb_data.get(self.get_record_pdb_file_id(record).upper()) pdb_resolution_bin = None pdb_resolution = pdb_record.get('Resolution') if pdb_resolution != None: if pdb_resolution < 1.5: pdb_resolution_bin = '<1.5' elif pdb_resolution < 2.0: pdb_resolution_bin = '1.5-2.0' elif pdb_resolution < 2.5: pdb_resolution_bin = '2.0-2.5' else: pdb_resolution_bin = '>=2.5' pdb_resolution_bin = pdb_resolution_bin or 'N/A' # Mark mutations involving glycine or proline has_gp_mutation = 'G' in all_residues or 'P' in all_residues # Create the data matrix dataframe_record = dict( DatasetID = record_id, PDBFileID = self.get_record_pdb_file_id(record), Mutations = mutation_string, NumberOfMutations = len(mutations), Predicted = predicted_data[self.ddg_analysis_type], ResidueCharges = residue_charge, VolumeChange = volume_change, HasGPMutation = int(has_gp_mutation), WildTypeDSSPType = DSSPType, WildTypeDSSPSimpleSSType = DSSPSimpleSSType, WildTypeDSSPExposure = DSSPExposure, WildTypeSCOPClass = scop_class, WildTypeSCOPFold = scop_fold, WildTypeSCOPClassification = full_scop_classification, WildTypeExposure = exposure, WildTypeAA = record_wtaa, MutantAA = record_mutaa, PDBResolution = pdb_record.get('Resolution'), PDBResolutionBin = pdb_resolution_bin, NumberOfResidues = self.count_residues(record, pdb_record) or None, NumberOfDerivativeErrors = num_derivative_errors, RunTime = run_time, MaxMemory = max_memory, ) for c in additional_prediction_data_columns: dataframe_record[c] = predicted_data.get(c) if self.contains_experimental_data: # These fields are particular to dataframes containing experimental values e.g. for benchmarking runs or for # datasets where we have associated experimental values self.get_experimental_ddg_values(record, dataframe_record) self.compute_stability_classification(predicted_data, record, dataframe_record) self.compute_absolute_error(predicted_data, record, dataframe_record) return dataframe_record
python
def split(string, separator_regexp=None, maxsplit=0): """Split a string to a list >>> split('fred, was, here') ['fred', ' was', ' here'] """ if not string: return [] if separator_regexp is None: separator_regexp = _default_separator() if not separator_regexp: return string.split() return re.split(separator_regexp, string, maxsplit)
python
def update(self, instance, validated_data): """ change password """ instance.user.set_password(validated_data["password1"]) instance.user.full_clean() instance.user.save() # mark password reset object as reset instance.reset = True instance.full_clean() instance.save() return instance
python
def abs_timedelta(delta): """Returns an "absolute" value for a timedelta, always representing a time distance.""" if delta.days < 0: now = _now() return now - (now + delta) return delta
java
private static void enlargeTables(float[][] data, int[][] rowIndex, int cols, int currentRow, int currentCol) { while (data[currentRow].length < currentCol + cols) { if(data[currentRow].length == SPARSE_MATRIX_DIM) { currentCol = 0; cols -= (data[currentRow].length - currentCol); currentRow++; data[currentRow] = malloc4f(ALLOCATED_ARRAY_LEN); rowIndex[currentRow] = malloc4(ALLOCATED_ARRAY_LEN); } else { int newLen = (int) Math.min((long) data[currentRow].length << 1L, (long) SPARSE_MATRIX_DIM); data[currentRow] = Arrays.copyOf(data[currentRow], newLen); rowIndex[currentRow] = Arrays.copyOf(rowIndex[currentRow], newLen); } } }
java
public static void mkdir(File dir) throws IOException{ if(!dir.exists() && !dir.mkdir()) throw new IOException("couldn't create directory: "+dir); }
python
def remove_tag(self, task, params={}, **options): """Removes a tag from the task. Returns an empty data block. Parameters ---------- task : {Id} The task to remove a tag from. [data] : {Object} Data for the request - tag : {Id} The tag to remove from the task. """ path = "/tasks/%s/removeTag" % (task) return self.client.post(path, params, **options)
python
def diff(compiled_requirements, installed_dists): """ Calculate which packages should be installed or uninstalled, given a set of compiled requirements and a list of currently installed modules. """ requirements_lut = {r.link or key_from_req(r.req): r for r in compiled_requirements} satisfied = set() # holds keys to_install = set() # holds InstallRequirement objects to_uninstall = set() # holds keys pkgs_to_ignore = get_dists_to_ignore(installed_dists) for dist in installed_dists: key = key_from_req(dist) if key not in requirements_lut or not requirements_lut[key].match_markers(): to_uninstall.add(key) elif requirements_lut[key].specifier.contains(dist.version): satisfied.add(key) for key, requirement in requirements_lut.items(): if key not in satisfied and requirement.match_markers(): to_install.add(requirement) # Make sure to not uninstall any packages that should be ignored to_uninstall -= set(pkgs_to_ignore) return (to_install, to_uninstall)
java
public InputStreamReader getDecompressionStream(String path, String encoding) throws IOException { File fileToUncompress = new File(path); BufferedInputStream fileStream = new BufferedInputStream(new FileInputStream(fileToUncompress)); // read bzip2 prefix: BZ fileStream.read(); fileStream.read(); BufferedInputStream bufferedStream = new BufferedInputStream(fileStream); CBZip2InputStream input = new CBZip2InputStream(bufferedStream); return new InputStreamReader(input, encoding); }
java
@Override public JmsQueue createQueue(String name) throws JMSException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "createQueue", name); JmsQueue queue = null; // if name string is null, empty or just "queue://", throw exception if ((name == null) || ("".equals(name)) || (JmsQueueImpl.QUEUE_PREFIX.equals(name))) { throw (InvalidDestinationException) JmsErrorUtils.newThrowable( InvalidDestinationException.class, "INVALID_VALUE_CWSIA0003", new Object[] { "Queue name", name }, tc ); } // if name is "topic://" throw exception if (name.startsWith(JmsTopicImpl.TOPIC_PREFIX)) { throw (InvalidDestinationException) JmsErrorUtils.newThrowable( InvalidDestinationException.class, "MALFORMED_DESCRIPTOR_CWSIA0047", new Object[] { "Queue", name }, tc ); } name = name.trim(); queue = (JmsQueue) destCreator.createDestinationFromString(name, URIDestinationCreator.DestType.QUEUE); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "createQueue", queue); return queue; }
java
public static void main(String[] args) throws Exception { if (args.length < 2) { logger.info("using BatchReplaceMain dir patternfile encoding"); return; } String dir = args[0]; if (!new File(dir).exists()) { logger.error("{} not a valid file or directory", dir); return; } String properties = args[1]; if (!new File(properties).exists()) { logger.info("{} not valid file or directory", properties); } Charset charset = null; if (args.length >= 3) { charset = Charset.forName(args[2]); } List<String> lines = Files.readLines(new File(properties)); Map<String, List<Replacer>> profiles = CollectUtils.newHashMap(); List<Replacer> replacers = null; for (String line : lines) { if (Strings.isEmpty(line)) { continue; } if (-1 == line.indexOf('=')) { replacers = CollectUtils.newArrayList(); profiles.put(line, replacers); } else { line = Strings.replace(line, "\\=", "~~~~"); String older = Strings.replace(Strings.substringBefore(line, "="), "~~~~", "="); String newer = Strings.replace(Strings.substringAfter(line, "="), "~~~~", "="); older = Strings.replace(older, "\\n", "\n"); older = Strings.replace(older, "\\t", "\t"); newer = Strings.replace(newer, "\\n", "\n"); newer = Strings.replace(newer, "\\t", "\t"); Replacer pair = new Replacer(older, newer); replacers.add(pair); } } replaceFile(dir, profiles, charset); }
java
public AssessmentRunInProgressException withAssessmentRunArns(String... assessmentRunArns) { if (this.assessmentRunArns == null) { setAssessmentRunArns(new java.util.ArrayList<String>(assessmentRunArns.length)); } for (String ele : assessmentRunArns) { this.assessmentRunArns.add(ele); } return this; }
java
public final void init(Key key) throws InvalidKeyException { try { if (spi != null && (key == null || lock == null)) { spi.engineInit(key, null); } else { chooseProvider(key, null); } } catch (InvalidAlgorithmParameterException e) { throw new InvalidKeyException("init() failed", e); } initialized = true; /* Android-removed: this debugging mechanism is not used in Android. if (!skipDebug && pdebug != null) { pdebug.println("Mac." + algorithm + " algorithm from: " + this.provider.getName()); } */ }
java
public static CommerceOrderNote fetchByC_ERC(long companyId, String externalReferenceCode, boolean retrieveFromCache) { return getPersistence() .fetchByC_ERC(companyId, externalReferenceCode, retrieveFromCache); }
java
public Object getFieldData(int iSFieldSeq) { ScreenField sField = null; if (iSFieldSeq != -1) sField = m_gridScreen.getSField(this.getRelativeSField(iSFieldSeq)); if (sField != null) return sField.getConverter().getData(); return null; }
java
public final void key_sentence() throws RecognitionException { CommonTree vtl=null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:88:5: ( variable_definition |vtl= VT_LITERAL | VT_SPACE ) int alt10=3; switch ( input.LA(1) ) { case VT_VAR_DEF: { alt10=1; } break; case VT_LITERAL: { alt10=2; } break; case VT_SPACE: { alt10=3; } break; default: NoViableAltException nvae = new NoViableAltException("", 10, 0, input); throw nvae; } switch (alt10) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:88:7: variable_definition { pushFollow(FOLLOW_variable_definition_in_key_sentence277); variable_definition(); state._fsp--; } break; case 2 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:89:7: vtl= VT_LITERAL { vtl=(CommonTree)match(input,VT_LITERAL,FOLLOW_VT_LITERAL_in_key_sentence287); entry_stack.peek().keybuffer.append((vtl!=null?vtl.getText():null)); entry_stack.peek().sentenceKeyBuffer.append((vtl!=null?vtl.getText():null)); } break; case 3 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:94:7: VT_SPACE { match(input,VT_SPACE,FOLLOW_VT_SPACE_in_key_sentence301); entry_stack.peek().keybuffer.append("\\s+"); entry_stack.peek().sentenceKeyBuffer.append(" "); } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } }
python
def get_json_val(source, path, *, ignore_bad_path=False): """Get the nested value identified by the json path, rooted at source.""" try: return JP.find(source, path) except JP.JSONPathError as ex: if ignore_bad_path: return None else: raise
java
protected void eventPostCommitRemove(SIMPMessage msg, TransactionCommon transaction) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "eventPostCommitRemove", new Object[] { msg, transaction }); // NB: ideally this would go in afterCompletion but we // dont have the transaction reference to register for the callback synchronized (orderLock) { // Remove the transaction from the transactionSet if applicable // Use the orderLock to synch with attachConsumer if (transactionSet != null) { transactionSet.remove(transaction.getPersistentTranId()); if (transactionSet.isEmpty()) { transactionSet = null; // Get the consumer and resume it consumerPoints.get(0). getConsumerPoint(). resumeConsumer(DispatchableConsumerPoint.SUSPEND_FLAG_INITIAL_INDOUBTS); } } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "eventPostCommitRemove"); }
python
def pkg_config(pkg_libraries): """Use pkg-config to query for the location of libraries, library directories, and header directories Arguments: pkg_libries(list): A list of packages as strings Returns: libraries(list), library_dirs(list), include_dirs(list) """ libraries=[] library_dirs=[] include_dirs=[] # Check that we have the packages for pkg in pkg_libraries: if os.system('pkg-config --exists %s 2>/dev/null' % pkg) == 0: pass else: print("Could not find library {0}".format(pkg)) sys.exit(1) # Get the pck-config flags if len(pkg_libraries)>0 : # PKG_CONFIG_ALLOW_SYSTEM_CFLAGS explicitly lists system paths. # On system-wide LAL installs, this is needed for swig to find lalswig.i for token in getoutput("PKG_CONFIG_ALLOW_SYSTEM_CFLAGS=1 pkg-config --libs --cflags %s" % ' '.join(pkg_libraries)).split(): if token.startswith("-l"): libraries.append(token[2:]) elif token.startswith("-L"): library_dirs.append(token[2:]) elif token.startswith("-I"): include_dirs.append(token[2:]) return libraries, library_dirs, include_dirs
java
@Override public void write(byte[] b, int off, int len) throws PacketException { baq.add(b, off, len); while (packetSize > 0 || (packetSize = parser.getSize(baq.array(), baq.offset(), baq.length())) > 0) { if (baq.length() < packetSize) { return; } consumer.accept(baq.array(), baq.offset(), packetSize); baq.remove(packetSize); packetSize = 0; } }
python
def _add(self, lines): '''Add can also handle https, and compressed files. Parameters ========== line: the line from the recipe file to parse for ADD ''' lines = self._setup('ADD', lines) for line in lines: values = line.split(" ") frompath = values.pop(0) # Custom parsing for frompath # If it's a web address, add to install routine to get if frompath.startswith('http'): for topath in values: self._parse_http(frompath, topath) # Add the file, and decompress in install elif re.search("[.](gz|gzip|bz2|xz)$", frompath.strip()): for topath in values: self._parse_archive(frompath, topath) # Just add the files else: for topath in values: self._add_files(frompath, topath)
python
def VerifyScripts(verifiable): """ Verify the scripts of the provided `verifiable` object. Args: verifiable (neo.IO.Mixins.VerifiableMixin): Returns: bool: True if verification is successful. False otherwise. """ try: hashes = verifiable.GetScriptHashesForVerifying() except Exception as e: logger.debug("couldn't get script hashes %s " % e) return False if len(hashes) != len(verifiable.Scripts): logger.debug(f"hash - verification script length mismatch ({len(hashes)}/{len(verifiable.Scripts)})") return False blockchain = GetBlockchain() for i in range(0, len(hashes)): verification = verifiable.Scripts[i].VerificationScript if len(verification) == 0: sb = ScriptBuilder() sb.EmitAppCall(hashes[i].Data) verification = sb.ms.getvalue() else: verification_hash = Crypto.ToScriptHash(verification, unhex=False) if hashes[i] != verification_hash: logger.debug(f"hash {hashes[i]} does not match verification hash {verification_hash}") return False state_reader = GetStateReader() script_table = CachedScriptTable(DBCollection(blockchain._db, DBPrefix.ST_Contract, ContractState)) engine = ApplicationEngine(TriggerType.Verification, verifiable, script_table, state_reader, Fixed8.Zero()) engine.LoadScript(verification) invocation = verifiable.Scripts[i].InvocationScript engine.LoadScript(invocation) try: success = engine.Execute() state_reader.ExecutionCompleted(engine, success) except Exception as e: state_reader.ExecutionCompleted(engine, False, e) if engine.ResultStack.Count != 1 or not engine.ResultStack.Pop().GetBoolean(): Helper.EmitServiceEvents(state_reader) if engine.ResultStack.Count > 0: logger.debug(f"Result stack failure! Count: {engine.ResultStack.Count} bool value: {engine.ResultStack.Pop().GetBoolean()}") else: logger.debug(f"Result stack failure! Count: {engine.ResultStack.Count}") return False Helper.EmitServiceEvents(state_reader) return True
java
@Override public int compareTo(ValueArray<CharValue> o) { CharValueArray other = (CharValueArray) o; int min = Math.min(position, other.position); for (int i = 0; i < min; i++) { int cmp = Character.compare(data[i], other.data[i]); if (cmp != 0) { return cmp; } } return Integer.compare(position, other.position); }
java
public OperationFuture<List<Server>> removeAutoscalePolicyOnServer(List<Server> serverList) { List<JobFuture> jobs = serverList .stream() .map(server -> removeAutoscalePolicyOnServer(server).jobFuture()) .collect(toList()); return new OperationFuture<>( serverList, new ParallelJobsFuture(jobs) ); }
java
public static void validateClusterNodeCounts(final Cluster lhs, final Cluster rhs) { if(!lhs.getNodeIds().equals(rhs.getNodeIds())) { throw new VoldemortException("Node ids are not the same [ lhs cluster node ids (" + lhs.getNodeIds() + ") not equal to rhs cluster node ids (" + rhs.getNodeIds() + ") ]"); } }
python
def from_json(cls, data): """Create a Data Collection from a dictionary. Args: { "header": A Ladybug Header, "values": An array of values, "datetimes": An array of datetimes, "validated_a_period": Boolean for whether header analysis_period is valid } """ assert 'header' in data, 'Required keyword "header" is missing!' assert 'values' in data, 'Required keyword "values" is missing!' assert 'datetimes' in data, 'Required keyword "datetimes" is missing!' collection = cls(Header.from_json(data['header']), data['values'], [DateTime.from_json(dat) for dat in data['datetimes']]) if 'validated_a_period' in data: collection._validated_a_period = data['validated_a_period'] return collection
java
protected Catalog getCatalog(String url, String user, String password, String infoLevelName, String bundledDriverName, Properties properties) throws IOException { // Determine info level InfoLevel level = InfoLevel.valueOf(infoLevelName.toLowerCase()); SchemaInfoLevel schemaInfoLevel = level.getSchemaInfoLevel(); // Set options for (InfoLevelOption option : InfoLevelOption.values()) { String value = properties.getProperty(option.getPropertyName()); if (value != null) { LOGGER.info("Setting option " + option.name() + "=" + value); option.set(schemaInfoLevel, Boolean.valueOf(value.toLowerCase())); } } SchemaCrawlerOptions options; if (bundledDriverName != null) { options = getOptions(bundledDriverName, level); } else { options = new SchemaCrawlerOptions(); } options.setSchemaInfoLevel(schemaInfoLevel); LOGGER.debug("Scanning database schemas on '" + url + "' (user='" + user + "', info level='" + level.name() + "')"); Catalog catalog; try (Connection connection = DriverManager.getConnection(url, user, password)) { catalog = SchemaCrawlerUtility.getCatalog(connection, options); } catch (SQLException | SchemaCrawlerException e) { throw new IOException(String.format("Cannot scan schema (url='%s', user='%s'", url, user), e); } return catalog; }
java
@javax.annotation.Nonnull public java.util.Optional<net.morimekta.util.Binary> optionalData() { return java.util.Optional.ofNullable(mData); }
python
def set_config(**kwargs): """Set up the configure of profiler (only accepts keyword arguments). Parameters ---------- filename : string, output file for profile data profile_all : boolean, all profile types enabled profile_symbolic : boolean, whether to profile symbolic operators profile_imperative : boolean, whether to profile imperative operators profile_memory : boolean, whether to profile memory usage profile_api : boolean, whether to profile the C API contiguous_dump : boolean, whether to periodically dump profiling data to file dump_period : float, seconds between profile data dumps aggregate_stats : boolean, whether to maintain aggregate stats in memory for console dump. Has some negative performance impact. profile_process : string whether to profile kvstore `server` or `worker`. server can only be profiled when kvstore is of type dist. if this is not passed, defaults to `worker` """ kk = kwargs.keys() vv = kwargs.values() check_call(_LIB.MXSetProcessProfilerConfig(len(kwargs), c_str_array([key for key in kk]), c_str_array([str(val) for val in vv]), profiler_kvstore_handle))
python
def ind_lm(index, lmax): """Convert single index to corresponding (ell, m) pair""" import numpy as np lm = np.empty(2, dtype=np.float64) _ind_lm(index, lmax, lm) return lm
java
public static <D> Predicate eqConst(Expression<D> left, D constant) { return eq(left, ConstantImpl.create(constant)); }
java
void compute_Rv() { double t = Math.sqrt(v1*v1 + v2*v2); double s = Math.sqrt(t*t + 1); double cosT = 1.0/s; double sinT = Math.sqrt(1-1.0/(s*s)); K_x.data[0] = 0; K_x.data[1] = 0; K_x.data[2] = v1; K_x.data[3] = 0; K_x.data[4] = 0; K_x.data[5] = v2; K_x.data[6] = -v1; K_x.data[7] = -v2; K_x.data[8] = 0; CommonOps_DDRM.divide(K_x,t); CommonOps_DDRM.setIdentity(R_v); CommonOps_DDRM.addEquals(R_v,sinT,K_x); CommonOps_DDRM.multAdd(1.0-cosT,K_x,K_x,R_v); }