language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def _setbin_safe(self, binstring): """Reset the bitstring to the value given in binstring.""" binstring = tidy_input_string(binstring) # remove any 0b if present binstring = binstring.replace('0b', '') self._setbin_unsafe(binstring)
python
def retryable(a_func, retry_options, **kwargs): """Creates a function equivalent to a_func, but that retries on certain exceptions. Args: a_func (callable): A callable. retry_options (RetryOptions): Configures the exceptions upon which the callable should retry, and the parameters to the exponential backoff retry algorithm. kwargs: Addtional arguments passed through to the callable. Returns: Callable: A function that will retry on exception. """ delay_mult = retry_options.backoff_settings.retry_delay_multiplier max_delay_millis = retry_options.backoff_settings.max_retry_delay_millis has_timeout_settings = _has_timeout_settings(retry_options.backoff_settings) if has_timeout_settings: timeout_mult = retry_options.backoff_settings.rpc_timeout_multiplier max_timeout = (retry_options.backoff_settings.max_rpc_timeout_millis / _MILLIS_PER_SECOND) total_timeout = (retry_options.backoff_settings.total_timeout_millis / _MILLIS_PER_SECOND) def inner(*args): """Equivalent to ``a_func``, but retries upon transient failure. Retrying is done through an exponential backoff algorithm configured by the options in ``retry``. """ delay = retry_options.backoff_settings.initial_retry_delay_millis exc = errors.RetryError('Retry total timeout exceeded before any' 'response was received') if has_timeout_settings: timeout = ( retry_options.backoff_settings.initial_rpc_timeout_millis / _MILLIS_PER_SECOND) now = time.time() deadline = now + total_timeout else: timeout = None deadline = None while deadline is None or now < deadline: try: to_call = add_timeout_arg(a_func, timeout, **kwargs) return to_call(*args) except Exception as exception: # pylint: disable=broad-except code = config.exc_to_code(exception) if code not in retry_options.retry_codes: raise errors.RetryError( 'Exception occurred in retry method that was not' ' classified as transient', exception) exc = errors.RetryError( 'Retry total timeout exceeded with exception', exception) # Sleep a random number which will, on average, equal the # expected delay. to_sleep = random.uniform(0, delay * 2) time.sleep(to_sleep / _MILLIS_PER_SECOND) delay = min(delay * delay_mult, max_delay_millis) if has_timeout_settings: now = time.time() timeout = min( timeout * timeout_mult, max_timeout, deadline - now) raise exc return inner
python
def write_to_cache(self, data, filename=''): ''' Writes data to file as JSON. Returns True. ''' if not filename: filename = self.cache_path_cache json_data = json.dumps(data) with open(filename, 'w') as cache: cache.write(json_data) return True
java
public synchronized void start() { // ggf. laufenden Thread beenden stop(); this.thread = new Thread("Flicker Update-Thread") { public void run() { // Wir fangen beim ersten Halbbyte an. halfbyteid = 0; // Die Clock, die immer hin und her kippt. Wir beginnen bei 1. // Sonst wuerde das allererste Zeichen nur einmal uebertragen // werden, was bewirkt, dass der Code erst einmal komplett // durchlaufen muesste, bevor wir einen kompletten gesendet haetten clock = 1; try { // Die Endlos-Schleife mit der Uebertragung while (true) { int[] bits = bitarray.get(halfbyteid); bits[0] = clock; paint(bits[0] == 1, bits[1] == 1, bits[2] == 1, bits[3] == 1, bits[4] == 1); clock--; if (clock < 0) { clock = 1; // Jedes Zeichen muss doppelt uebertragen werden. Einmal mit clock 0 // und einmal mit clock 1. halfbyteid++; if (halfbyteid >= bitarray.size()) { halfbyteid = 0; // Wir sind einmal mit dem Code komplett durch iterations++; done(iterations); } } // Warten // Wir errechnen die Wartezeit in jedem Durchlauf. // Dann kann die Frequenz auch waehrend des Blinkens geaendert werden. long sleep = 1000L / freq; sleep(sleep); } } catch (InterruptedException e) { // Ende der Anzeige } } }; thread.start(); }
python
def train(self, data, label, batch_size): """ Description : training for LipNet """ # pylint: disable=no-member sum_losses = 0 len_losses = 0 with autograd.record(): losses = [self.loss_fn(self.net(X), Y) for X, Y in zip(data, label)] for loss in losses: sum_losses += mx.nd.array(loss).sum().asscalar() len_losses += len(loss) loss.backward() self.trainer.step(batch_size) return sum_losses, len_losses
java
public static XslTransformer getTransformer(File xslFile) throws TransformerConfigurationException { if (xslFile == null) { throw new IllegalArgumentException("xslFile is null"); } return getTransformer(new StreamSource(xslFile)); }
java
public void addRegexRoute(String urlPattern, Class<? extends Actor> actorClass) throws RouteAlreadyMappedException { addRoute(new Route(urlPattern, true), actorClass); }
java
public int deleteByMetadata(long fileId) throws SQLException { DeleteBuilder<MetadataReference, Void> db = deleteBuilder(); db.where().eq(MetadataReference.COLUMN_FILE_ID, fileId); int deleted = db.delete(); return deleted; }
java
public static List<String> readLines(File targetFile, String charsetName) { return readLines(targetFile.toPath(), charsetName); }
python
def get_by_id(self, id): """Find user by his id and return user model.""" user = super(ExtendedUsersService, self).get_by_id(id) user.first_name = 'John' + str(id) user.last_name = 'Smith' + str(id) user.gender = 'male' return user
java
public static Version from(int major, int minor, int patch, String build) { return new Version(major, minor, patch, build); }
python
def geocode(self, query, lang='en', exactly_one=True, timeout=DEFAULT_SENTINEL): """ Return a location point for a `3 words` query. If the `3 words` address doesn't exist, a :class:`geopy.exc.GeocoderQueryError` exception will be thrown. :param str query: The 3-word address you wish to geocode. :param str lang: two character language codes as supported by the API (https://docs.what3words.com/api/v2/#lang). :param bool exactly_one: Return one result or a list of results, if available. Due to the address scheme there is always exactly one result for each `3 words` address, so this parameter is rather useless for this geocoder. .. versionchanged:: 1.14.0 ``exactly_one=False`` now returns a list of a single location. This option wasn't respected before. :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. Set this only if you wish to override, on this call only, the value set during the geocoder's initialization. :rtype: :class:`geopy.location.Location` or a list of them, if ``exactly_one=False``. """ if not self._check_query(query): raise exc.GeocoderQueryError( "Search string must be 'word.word.word'" ) params = { 'addr': self.format_string % query, 'lang': lang.lower(), 'key': self.api_key, } url = "?".join((self.geocode_api, urlencode(params))) logger.debug("%s.geocode: %s", self.__class__.__name__, url) return self._parse_json( self._call_geocoder(url, timeout=timeout), exactly_one=exactly_one )
python
def vspec_magic(data): """ Takes average vector of replicate measurements """ vdata, Dirdata, step_meth = [], [], "" if len(data) == 0: return vdata treat_init = ["treatment_temp", "treatment_temp_decay_rate", "treatment_temp_dc_on", "treatment_temp_dc_off", "treatment_ac_field", "treatment_ac_field_decay_rate", "treatment_ac_field_dc_on", "treatment_ac_field_dc_off", "treatment_dc_field", "treatment_dc_field_decay_rate", "treatment_dc_field_ac_on", "treatment_dc_field_ac_off", "treatment_dc_field_phi", "treatment_dc_field_theta"] treats = [] # # find keys that are used # for key in treat_init: if key in list(data[0].keys()): treats.append(key) # get a list of keys stop = {} stop["er_specimen_name"] = "stop" for key in treats: stop[key] = "" # tells program when to quit and go home data.append(stop) # # set initial states # DataState0, newstate = {}, 0 for key in treats: DataState0[key] = data[0][key] # set beginning treatment k, R = 1, 0 for i in range(k, len(data)): FDirdata, Dirdata, DataStateCurr, newstate = [], [], {}, 0 for key in treats: # check if anything changed DataStateCurr[key] = data[i][key] if DataStateCurr[key].strip() != DataState0[key].strip(): newstate = 1 # something changed if newstate == 1: if i == k: # sample is unique vdata.append(data[i - 1]) else: # measurement is not unique # print "averaging: records " ,k,i for l in range(k - 1, i): if 'orientation' in data[l]['measurement_description']: data[l]['measurement_description'] = "" Dirdata.append([float(data[l]['measurement_dec']), float( data[l]['measurement_inc']), float(data[l]['measurement_magn_moment'])]) FDirdata.append( [float(data[l]['measurement_dec']), float(data[l]['measurement_inc'])]) dir, R = vector_mean(Dirdata) Fpars = fisher_mean(FDirdata) vrec = data[i - 1] vrec['measurement_dec'] = '%7.1f' % (dir[0]) vrec['measurement_inc'] = '%7.1f' % (dir[1]) vrec['measurement_magn_moment'] = '%8.3e' % ( old_div(R, (i - k + 1))) vrec['measurement_csd'] = '%7.1f' % (Fpars['csd']) vrec['measurement_positions'] = '%7.1f' % (Fpars['n']) vrec['measurement_description'] = 'average of multiple measurements' if "magic_method_codes" in list(vrec.keys()): meths = vrec["magic_method_codes"].strip().split(":") if "DE-VM" not in meths: meths.append("DE-VM") methods = "" for meth in meths: methods = methods + meth + ":" vrec["magic_method_codes"] = methods[:-1] else: vrec["magic_method_codes"] = "DE-VM" vdata.append(vrec) # reset state to new one for key in treats: DataState0[key] = data[i][key] # set beginning treatment k = i + 1 if data[i]["er_specimen_name"] == "stop": del data[-1] # get rid of dummy stop sign return vdata, treats
java
static Interval removeIntervalFromEnd(Interval largeInterval, Interval smallInterval) { Preconditions.checkArgument( largeInterval.getEnd().equals(smallInterval.getEnd()), "end should be same. largeInterval[%s] smallInterval[%s]", largeInterval, smallInterval ); return new Interval(largeInterval.getStart(), smallInterval.getStart()); }
java
public void setNotificationProject(String notificationProject) { m_notificationProject = notificationProject; if (CmsLog.INIT.isInfoEnabled()) { CmsLog.INIT.info( Messages.get().getBundle().key(Messages.INIT_NOTIFICATION_PROJECT_1, m_notificationProject)); } }
python
def __parse_direct_mention(self, message_text): """ Finds a direct mention (a mention that is at the beginning) in message text and returns the user ID which was mentioned. If there is no direct mention, returns None """ matches = re.search(MENTION_REGEX, message_text) # the first group contains the username, the second group contains the remaining message return (matches.group(1), listify(matches.group(2).strip())) if matches else (None, None)
python
def _gather_topk_beams(nested, score_or_log_prob, batch_size, beam_size): """Gather top beams from nested structure.""" _, topk_indexes = tf.nn.top_k(score_or_log_prob, k=beam_size) return _gather_beams(nested, topk_indexes, batch_size, beam_size)
python
def _handle_message_flow(self, app_message): """ Handle protocol flow for incoming and outgoing messages, depending on service level and according to MQTT spec. paragraph 4.3-Quality of Service levels and protocol flows :param app_message: PublishMessage to handle :return: nothing. """ if app_message.qos == QOS_0: yield from self._handle_qos0_message_flow(app_message) elif app_message.qos == QOS_1: yield from self._handle_qos1_message_flow(app_message) elif app_message.qos == QOS_2: yield from self._handle_qos2_message_flow(app_message) else: raise HBMQTTException("Unexcepted QOS value '%d" % str(app_message.qos))
java
public EClass getGCFLT() { if (gcfltEClass == null) { gcfltEClass = (EClass)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(450); } return gcfltEClass; }
python
def graph(data): """Draws graph of rating vs episode number""" title = data['name'] + ' (' + data['rating'] + ') ' plt.title(title) plt.xlabel('Episode Number') plt.ylabel('Ratings') rf,ef=graphdata(data) col=['red', 'green' , 'orange'] for i in range(len(rf)): x,y=ef[i],rf[i] k = i + 1 plt.plot(x, y,color=col[i%3]) x1, x2, y1, y2 = plt.axis() y2 = 10 if y1 > 7: y1 = 7 plt.axis([x1, x2, y1, y2]) plt.show()
java
public void setInstanceStates(java.util.Collection<String> instanceStates) { if (instanceStates == null) { this.instanceStates = null; return; } this.instanceStates = new com.amazonaws.internal.SdkInternalList<String>(instanceStates); }
java
public void end () { Texture tex = (Texture)tile(); Image image = canvas.image; // if our texture is already the right size, just update it if (tex != null && tex.pixelWidth == image.pixelWidth() && tex.pixelHeight == image.pixelHeight()) tex.update(image); // otherwise we need to create a new texture (setTexture will unreference the old texture which // will cause it to be destroyed) else super.setTile(canvas.image.createTexture(Texture.Config.DEFAULT)); }
java
public OvhContact contact_POST(String city, String country, String email, String firstname, String lastname, String phone, String province, String street, String title, String zip) throws IOException { String qPath = "/store/contact"; StringBuilder sb = path(qPath); HashMap<String, Object>o = new HashMap<String, Object>(); addBody(o, "city", city); addBody(o, "country", country); addBody(o, "email", email); addBody(o, "firstname", firstname); addBody(o, "lastname", lastname); addBody(o, "phone", phone); addBody(o, "province", province); addBody(o, "street", street); addBody(o, "title", title); addBody(o, "zip", zip); String resp = exec(qPath, "POST", sb.toString(), o); return convertTo(resp, OvhContact.class); }
java
public Attribute getTypeAttribute() { final Attribute ret; if (this.typeAttributeName == null && getParentType() != null) { ret = getParentType().getTypeAttribute(); } else { ret = this.attributes.get(this.typeAttributeName); } return ret; }
java
public static byte[] serialize(Object value) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); serialize(value, out); return out.toByteArray(); }
java
public Pair<List<Group>> getSurfaceResidues(double minAsaForSurface) { List<Group> surf1 = new ArrayList<Group>(); List<Group> surf2 = new ArrayList<Group>(); for (GroupAsa groupAsa:groupAsas1.values()) { if (groupAsa.getAsaU()>minAsaForSurface) { surf1.add(groupAsa.getGroup()); } } for (GroupAsa groupAsa:groupAsas2.values()) { if (groupAsa.getAsaU()>minAsaForSurface) { surf2.add(groupAsa.getGroup()); } } return new Pair<List<Group>>(surf1, surf2); }
java
public void disposeAndValidate() throws ObjectNotFoundException, ComposedException { List<NlsObject> errorList = this.duplicateIdErrors; this.duplicateIdErrors = null; for (Resolver resolver : this.id2callableMap.values()) { if (!resolver.resolved) { errorList.add(this.bundle.errorObjectNotFound(resolver.type, resolver.id)); } } this.id2valueMap = null; this.id2callableMap = null; int errorCount = errorList.size(); if (errorCount > 0) { NlsObject[] errors = errorList.toArray(new NlsObject[errorCount]); throw new ComposedException(errors); } }
java
private final byte getFlags() { if (!gotFlags) { flags = ((Byte)jmo.getField(ControlAccess.FLAGS)).byteValue(); gotFlags = true; } return flags; }
python
def sub_list(self, from_index, to_index): """ Returns a sublist from this list, whose range is specified with from_index(inclusive) and to_index(exclusive). The returned list is backed by this list, so non-structural changes in the returned list are reflected in this list, and vice-versa. :param from_index: (int), the start point(inclusive) of the sub_list. :param to_index: (int), th end point(exclusive) of the sub_list. :return: (Sequence), a view of the specified range within this list. """ return self._encode_invoke(list_sub_codec, from_=from_index, to=to_index)
java
public void marshall(SnsAction snsAction, ProtocolMarshaller protocolMarshaller) { if (snsAction == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(snsAction.getTargetArn(), TARGETARN_BINDING); protocolMarshaller.marshall(snsAction.getRoleArn(), ROLEARN_BINDING); protocolMarshaller.marshall(snsAction.getMessageFormat(), MESSAGEFORMAT_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def k_modes(X, n_clusters, max_iter, dissim, init, n_init, verbose, random_state, n_jobs): """k-modes algorithm""" random_state = check_random_state(random_state) if sparse.issparse(X): raise TypeError("k-modes does not support sparse data.") X = check_array(X, dtype=None) # Convert the categorical values in X to integers for speed. # Based on the unique values in X, we can make a mapping to achieve this. X, enc_map = encode_features(X) n_points, n_attrs = X.shape assert n_clusters <= n_points, "Cannot have more clusters ({}) " \ "than data points ({}).".format(n_clusters, n_points) # Are there more n_clusters than unique rows? Then set the unique # rows as initial values and skip iteration. unique = get_unique_rows(X) n_unique = unique.shape[0] if n_unique <= n_clusters: max_iter = 0 n_init = 1 n_clusters = n_unique init = unique results = [] seeds = random_state.randint(np.iinfo(np.int32).max, size=n_init) if n_jobs == 1: for init_no in range(n_init): results.append(k_modes_single(X, n_clusters, n_points, n_attrs, max_iter, dissim, init, init_no, verbose, seeds[init_no])) else: results = Parallel(n_jobs=n_jobs, verbose=0)( delayed(k_modes_single)(X, n_clusters, n_points, n_attrs, max_iter, dissim, init, init_no, verbose, seed) for init_no, seed in enumerate(seeds)) all_centroids, all_labels, all_costs, all_n_iters = zip(*results) best = np.argmin(all_costs) if n_init > 1 and verbose: print("Best run was number {}".format(best + 1)) return all_centroids[best], enc_map, all_labels[best], \ all_costs[best], all_n_iters[best]
python
def get_snapshot(nexus_url, repository, group_id, artifact_id, packaging, version, snapshot_version=None, target_dir='/tmp', target_file=None, classifier=None, username=None, password=None): ''' Gets snapshot of the desired version of the artifact nexus_url URL of nexus instance repository Snapshot repository in nexus to retrieve artifact from, for example: libs-snapshots group_id Group Id of the artifact artifact_id Artifact Id of the artifact packaging Packaging type (jar,war,ear,etc) version Version of the artifact target_dir Target directory to download artifact to (default: /tmp) target_file Target file to download artifact to (by default it is target_dir/artifact_id-snapshot_version.packaging) classifier Artifact classifier name (ex: sources,javadoc,etc). Optional parameter. username nexus username. Optional parameter. password nexus password. Optional parameter. ''' log.debug('======================== MODULE FUNCTION: nexus.get_snapshot(nexus_url=%s, repository=%s, group_id=%s, artifact_id=%s, packaging=%s, version=%s, target_dir=%s, classifier=%s)', nexus_url, repository, group_id, artifact_id, packaging, version, target_dir, classifier) headers = {} if username and password: headers['Authorization'] = 'Basic {0}'.format(base64.encodestring('{0}:{1}'.format(username, password)).replace('\n', '')) snapshot_url, file_name = _get_snapshot_url(nexus_url=nexus_url, repository=repository, group_id=group_id, artifact_id=artifact_id, version=version, packaging=packaging, snapshot_version=snapshot_version, classifier=classifier, headers=headers) target_file = __resolve_target_file(file_name, target_dir, target_file) return __save_artifact(snapshot_url, target_file, headers)
java
final synchronized public boolean[] readCoils(int serverAddress, int startAddress, int quantity) throws ModbusProtocolException, ModbusNumberException, ModbusIOException { ModbusRequest request = ModbusRequestBuilder.getInstance().buildReadCoils(serverAddress, startAddress, quantity); ReadCoilsResponse response = (ReadCoilsResponse) processRequest(request); return response.getCoils(); }
python
def new(self, tag_ident, tag_serial=0): # type: (int, int) -> None ''' A method to create a new UDF Descriptor Tag. Parameters: tag_ident - The tag identifier number for this tag. tag_serial - The tag serial number for this tag. Returns: Nothing ''' if self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF Tag already initialized') self.tag_ident = tag_ident self.desc_version = 2 self.tag_serial_number = tag_serial self.tag_location = 0 # This will be set later. self._initialized = True
java
String getIndexRoots() { String roots = StringUtil.getList(getIndexRootsArray(), " ", ""); StringBuffer s = new StringBuffer(roots); /* s.append(' '); s.append(identitySequence.peek()); */ return s.toString(); }
java
protected CmsContainerElementBean getParentElement(CmsContainerElementBean element) { if (m_elementInstances == null) { initPageData(); } CmsContainerElementBean parent = null; CmsContainerBean cont = m_parentContainers.get(element.getInstanceId()); if ((cont != null) && cont.isNestedContainer()) { parent = m_elementInstances.get(cont.getParentInstanceId()); } return parent; }
python
def determine_result(self, returncode, returnsignal, output, isTimeout): """ Parse the output of the tool and extract the verification result. This method always needs to be overridden. If the tool gave a result, this method needs to return one of the benchexec.result.RESULT_* strings. Otherwise an arbitrary string can be returned that will be shown to the user and should give some indication of the failure reason (e.g., "CRASH", "OUT_OF_MEMORY", etc.). """ for line in reversed(output): if line.startswith('ERROR:'): if 'timeout' in line.lower(): return "TIMEOUT" else: return "ERROR ({0})".format(returncode) elif line.startswith('TBF') and 'FALSE' in line: return result.RESULT_FALSE_REACH elif line.startswith('TBF') and 'TRUE' in line: return result.RESULT_TRUE_PROP elif line.startswith('TBF') and 'DONE' in line: return result.RESULT_DONE return result.RESULT_UNKNOWN
python
def start_client(self, host, port=5001, protocol='TCP', timeout=5, parallel=None, bandwidth=None): """iperf -D -c host -t 60 """ cmd = ['iperf', '-c', host, '-p', str(port), '-t', str(timeout)] if not (protocol, 'UDP'): cmd.append('-u') if parallel: cmd.extend(['-P', str(parallel)]) if bandwidth: cmd.extend(['-b', '%sM' % bandwidth]) stdcode, stdout, stderr = utils.execute_wait(cmd) if (not stdcode) or (not stderr): out_dict = stdout.split('\n') if not out_dict[-1]: out_dict.pop() out_data = out_dict[-1].split() data = dict() data['Bandwidth'] = out_data[-2] + ' ' + out_data[-1] data['Transfer'] = out_data[-4] + ' ' + out_data[-3] data['Interval'] = out_data[-6] return data raise Exception('Start iperf failed, please check on the node.')
python
def _chart_support(self, name, data, caller, **kwargs): "template chart support function" id = 'chart-%s' % next(self.id) name = self._chart_class_name(name) options = dict(self.environment.options) options.update(name=name, id=id) # jinja2 prepends 'l_' or 'l_{{ n }}'(ver>=2.9) to keys if jinja2.__version__ >= '2.9': kwargs = dict((k[4:], v) for (k, v) in kwargs.items()) else: kwargs = dict((k[2:], v) for (k, v) in kwargs.items()) if self._library is None: self._library = self.load_library() id = kwargs.get('id', '') library = self._library.get(id, {}) # apply options from a tag library.update(kwargs.get('library', {})) # apply options from chartkick.json kwargs.update(library=library) options.update(kwargs) return CHART_HTML.format(data=data, options=json.dumps(kwargs), **options)
python
def go_to(self, url_or_text): """Go to page *address*""" if is_text_string(url_or_text): url = QUrl(url_or_text) else: url = url_or_text self.webview.load(url)
python
def compose_matrix(scale=None, shear=None, angles=None, translate=None, perspective=None): """Return transformation matrix from sequence of transformations. This is the inverse of the decompose_matrix function. Sequence of transformations: scale : vector of 3 scaling factors shear : list of shear factors for x-y, x-z, y-z axes angles : list of Euler angles about static x, y, z axes translate : translation vector along x, y, z axes perspective : perspective partition of matrix >>> scale = np.random.random(3) - 0.5 >>> shear = np.random.random(3) - 0.5 >>> angles = (np.random.random(3) - 0.5) * (2*math.pi) >>> trans = np.random.random(3) - 0.5 >>> persp = np.random.random(4) - 0.5 >>> M0 = compose_matrix(scale, shear, angles, trans, persp) >>> result = decompose_matrix(M0) >>> M1 = compose_matrix(*result) >>> is_same_transform(M0, M1) True """ M = np.identity(4) if perspective is not None: P = np.identity(4) P[3, :] = perspective[:4] M = np.dot(M, P) if translate is not None: T = np.identity(4) T[:3, 3] = translate[:3] M = np.dot(M, T) if angles is not None: R = euler_matrix(angles[0], angles[1], angles[2], 'sxyz') M = np.dot(M, R) if shear is not None: Z = np.identity(4) Z[1, 2] = shear[2] Z[0, 2] = shear[1] Z[0, 1] = shear[0] M = np.dot(M, Z) if scale is not None: S = np.identity(4) S[0, 0] = scale[0] S[1, 1] = scale[1] S[2, 2] = scale[2] M = np.dot(M, S) M /= M[3, 3] return M
java
void checkPermissions(MultiplePermissionsListener listener, Collection<String> permissions, Thread thread) { checkMultiplePermissions(listener, permissions, thread); }
python
def copy(self): "Return a clone of this hash object." other = _ChainedHashAlgorithm(self._algorithms) other._hobj = deepcopy(self._hobj) other._fobj = deepcopy(self._fobj) return other
python
def loadNelderMeadData(name): ''' Reads the progress of a parallel Nelder-Mead search from a text file, as created by saveNelderMeadData(). Parameters ---------- name : string Name of the txt file from which to read search progress. Returns ------- simplex : np.array The current state of the simplex of parameter guesses. fvals : np.array The objective function value at each row of simplex. iters : int The number of completed Nelder-Mead iterations. evals : int The cumulative number of function evaluations in the search process. ''' f = open(name + '.txt','rb') my_reader = csv.reader(f,delimiter=' ') my_shape_txt = next(my_reader) shape0 = int(my_shape_txt[0]) shape1 = int(my_shape_txt[1]) my_nums_txt = next(my_reader) iters = int(my_nums_txt[0]) evals = int(my_nums_txt[1]) simplex_flat = np.array(next(my_reader),dtype=float) simplex = np.reshape(simplex_flat,(shape0,shape1)) fvals = np.array(next(my_reader),dtype=float) f.close() return simplex, fvals, iters, evals
java
public boolean verifyDrawable(Drawable who) { for (int i = 0; i < mHolders.size(); ++i) { if (who == get(i).getTopLevelDrawable()) { return true; } } return false; }
java
private void onPublicKeysGroupAdded(int uid, ApiEncryptionKeyGroup keyGroup) { UserKeys userKeys = getCachedUserKeys(uid); if (userKeys == null) { return; } UserKeysGroup validatedKeysGroup = validateUserKeysGroup(uid, keyGroup); if (validatedKeysGroup != null) { UserKeys updatedUserKeys = userKeys.addUserKeyGroup(validatedKeysGroup); cacheUserKeys(updatedUserKeys); context().getEncryption().getEncryptedChatManager(uid) .send(new EncryptedPeerActor.KeyGroupUpdated(userKeys)); } }
python
def prev_window(self, widget, data=None): """ Function returns to Main Window """ self.path_window.hide() self.parent.open_window(widget, self.data)
python
def read_wv_master_file(wv_master_file, lines='brightest', debugplot=0): """read arc line wavelengths from external file. Parameters ---------- wv_master_file : string File name of txt file containing the wavelength database. lines : string Indicates which lines to read. For files with a single column or two columns this parameter is irrelevant. For files with three columns, lines='brightest' indicates that only the brightest lines are read, whereas lines='all' means that all the lines are considered. debugplot : int Determines whether intermediate computations and/or plots are displayed. The valid codes are defined in numina.array.display.pause_debugplot. Returns ------- wv_master : 1d numpy array Array with arc line wavelengths. """ # protection if lines not in ['brightest', 'all']: raise ValueError('Unexpected lines=' + str(lines)) # read table from txt file master_table = np.genfromtxt(wv_master_file) wv_master = read_wv_master_from_array(master_table, lines) if abs(debugplot) >= 10: print("Reading master table: " + wv_master_file) print("wv_master:\n", wv_master) return wv_master
java
@Override public void doRender(final WComponent component, final WebXmlRenderContext renderContext) { WSubordinateControl subordinate = (WSubordinateControl) component; XmlStringBuilder xml = renderContext.getWriter(); if (!subordinate.getRules().isEmpty()) { int seq = 0; for (Rule rule : subordinate.getRules()) { xml.appendTagOpen("ui:subordinate"); xml.appendAttribute("id", subordinate.getId() + "-c" + seq++); xml.appendClose(); paintRule(rule, xml); xml.appendEndTag("ui:subordinate"); } } }
java
public String getPoolName() { if (CmsStringUtil.isEmpty(m_poolName)) { // use default pool as pool name m_poolName = OpenCms.getSqlManager().getDefaultDbPoolName(); } return m_poolName; }
java
public EEnum getGCBIMGFORMAT() { if (gcbimgformatEEnum == null) { gcbimgformatEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(136); } return gcbimgformatEEnum; }
java
@SuppressWarnings("unused") public void setMaxDate(Calendar calendar) { mDefaultLimiter.setMaxDate(calendar); if (mDayPickerView != null) { mDayPickerView.onChange(); } }
python
def json_pretty_print(s): '''pretty print JSON''' s = json.loads(s) return json.dumps(s, sort_keys=True, indent=4, separators=(',', ': '))
python
def resize(im, short, max_size): """ only resize input image to target size and return scale :param im: BGR image input by opencv :param short: one dimensional size (the short side) :param max_size: one dimensional max size (the long side) :return: resized image (NDArray) and scale (float) """ im_shape = im.shape im_size_min = np.min(im_shape[0:2]) im_size_max = np.max(im_shape[0:2]) im_scale = float(short) / float(im_size_min) # prevent bigger axis from being more than max_size: if np.round(im_scale * im_size_max) > max_size: im_scale = float(max_size) / float(im_size_max) im = cv2.resize(im, None, None, fx=im_scale, fy=im_scale, interpolation=cv2.INTER_LINEAR) return im, im_scale
python
def confirm(self, msg, _timeout=-1): ''' Send a confirm prompt to the GUI Arguments: msg (string): The message to display to the user. _timeout (int): The optional amount of time for which the prompt should be displayed to the user before a timeout occurs. Defaults to -1 which indicates there is no timeout limit. ''' return self.msgBox('confirm', _timeout=_timeout, msg=msg)
python
def set_def_xml_acl(self, acl_str, key_name='', headers=None): """sets or changes a bucket's default object""" return self.set_xml_acl(acl_str, key_name, headers, query_args=DEF_OBJ_ACL)
python
def run_info(template): """ Print information about a specific template. """ template.project_name = 'TowelStuff' # fake project name, always the same name = template_name_from_class_name(template.__class__.__name__) term = TerminalView() term.print_info("Content of template {} with an example project " \ "named 'TowelStuff':".format(term.text_in_color(name, TERM_GREEN))) dir_name = None for file_info in sorted(template.files(), key=lambda dir: dir[0]): directory = file_name = template_name = '' if file_info[0]: directory = file_info[0] if file_info[1]: file_name = file_info[1] if file_info[2]: template_name = '\t\t - ' + file_info[2] if (directory != dir_name): term.print_info('\n\t' + term.text_in_color(directory + '/', TERM_PINK)) dir_name = directory term.print_info('\t\t' + term.text_in_color(file_name, TERM_YELLOW) + template_name) # print substitutions try: subs = template.substitutes().keys() if len(subs) > 0: subs.sort() term.print_info("\nSubstitutions of this template are: ") max_len = 0 for key in subs: if max_len < len(key): max_len = len(key) for key in subs: term.print_info(u"\t{0:{1}} -> {2}". format(key, max_len, template.substitutes()[key])) except AttributeError: pass
python
def build_index_from_labels(df, indices, remove_prefix=None, types=None, axis=1): """ Build a MultiIndex from a list of labels and matching regex Supply with a dictionary of Hierarchy levels and matching regex to extract this level from the sample label :param df: :param indices: Tuples of indices ('label','regex') matches :param strip: Strip these strings from labels before matching (e.g. headers) :param axis=1: Axis (1 = columns, 0 = rows) :return: """ df = df.copy() if remove_prefix is None: remove_prefix = [] if types is None: types = {} idx = [df.index, df.columns][axis] indexes = [] for l in idx.get_level_values(0): for s in remove_prefix: l = l.replace(s+" ", '') ixr = [] for n, m in indices: m = re.search(m, l) if m: r = m.group(1) if n in types: # Map this value to a new type r = types[n](r) else: r = None ixr.append(r) indexes.append( tuple(ixr) ) if axis == 0: df.index = pd.MultiIndex.from_tuples(indexes, names=[n for n, _ in indices]) else: df.columns = pd.MultiIndex.from_tuples(indexes, names=[n for n, _ in indices]) return df
python
def partial_results(self): '''The results that the RPC has received *so far* This may also be the complete results if :attr:`complete` is ``True``. ''' results = [] for r in self._results: if isinstance(r, Exception): results.append(type(r)(*deepcopy(r.args))) elif hasattr(r, "__iter__") and not hasattr(r, "__len__"): # pass generators straight through results.append(r) else: results.append(deepcopy(r)) return results
java
public static void createWordNetCaches(String componentKey, Properties properties) throws SMatchException { properties = getComponentProperties(makeComponentPrefix(componentKey, InMemoryWordNetBinaryArray.class.getSimpleName()), properties); if (properties.containsKey(JWNL_PROPERTIES_PATH_KEY)) { // initialize JWNL (this must be done before JWNL library can be used) try { final String configPath = properties.getProperty(JWNL_PROPERTIES_PATH_KEY); log.info("Initializing JWNL from " + configPath); JWNL.initialize(new FileInputStream(configPath)); } catch (JWNLException e) { final String errMessage = e.getClass().getSimpleName() + ": " + e.getMessage(); log.error(errMessage, e); throw new SMatchException(errMessage, e); } catch (FileNotFoundException e) { final String errMessage = e.getClass().getSimpleName() + ": " + e.getMessage(); log.error(errMessage, e); throw new SMatchException(errMessage, e); } } else { final String errMessage = "Cannot find configuration key " + JWNL_PROPERTIES_PATH_KEY; log.error(errMessage); throw new SMatchException(errMessage); } log.info("Creating WordNet caches..."); writeNominalizations(properties); writeSynonymsAdj(properties); writeOppAdverbs(properties); writeOppAdjectives(properties); writeOppNouns(properties); writeNounMG(properties); writeVerbMG(properties); log.info("Done"); }
java
protected void removeNormalization( TrifocalTensor solution ) { DMatrixRMaj N2_inv = N2.matrixInv(); DMatrixRMaj N3_inv = N3.matrixInv(); DMatrixRMaj N1 = this.N1.matrix(); for( int i = 0; i < 3; i++ ) { DMatrixRMaj T = solution.getT(i); for( int j = 0; j < 3; j++ ) { for( int k = 0; k < 3; k++ ) { double sum = 0; for( int r = 0; r < 3; r++ ) { double n1 = N1.get(r,i); DMatrixRMaj TN = solutionN.getT(r); for( int s = 0; s < 3; s++ ) { double n2 = N2_inv.get(j,s); for( int t = 0; t < 3; t++ ) { sum += n1*n2*N3_inv.get(k,t)*TN.get(s,t); } } } T.set(j,k,sum); } } } }
python
def pipe_substr(context=None, _INPUT=None, conf=None, **kwargs): """A string module that returns a substring. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : iterable of items or strings conf : { 'from': {'type': 'number', value': <starting position>}, 'length': {'type': 'number', 'value': <count of characters to return>} } Returns ------- _OUTPUT : generator of substrings """ conf['start'] = conf.pop('from', dict.get(conf, 'start')) splits = get_splits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = utils.dispatch(splits, *get_dispatch_funcs()) _OUTPUT = starmap(parse_result, parsed) return _OUTPUT
python
def start_with(self, request): """Start the crawler using the given request. Args: request (:class:`nyawc.http.Request`): The startpoint for the crawler. """ HTTPRequestHelper.patch_with_options(request, self.__options) self.queue.add_request(request) self.__crawler_start()
java
public synchronized void close() throws IOException { // // Kill running tasks. Do this in a 2nd vector, called 'tasksToClose', // because calling jobHasFinished() may result in an edit to 'tasks'. // TreeMap<TaskAttemptID, TaskInProgress> tasksToClose = new TreeMap<TaskAttemptID, TaskInProgress>(); tasksToClose.putAll(tasks); for (TaskInProgress tip : tasksToClose.values()) { tip.jobHasFinished(false); } this.running = false; if (pulseChecker != null) { pulseChecker.shutdown(); } if (versionBeanName != null) { MBeanUtil.unregisterMBean(versionBeanName); } // Clear local storage if (asyncDiskService != null) { // Clear local storage asyncDiskService.cleanupAllVolumes(); // Shutdown all async deletion threads with up to 10 seconds of delay asyncDiskService.shutdown(); try { if (!asyncDiskService.awaitTermination(10000)) { asyncDiskService.shutdownNow(); asyncDiskService = null; } } catch (InterruptedException e) { asyncDiskService.shutdownNow(); asyncDiskService = null; } } // Shutdown the fetcher thread if (this.mapEventsFetcher != null) { this.mapEventsFetcher.interrupt(); } // Stop the launchers this.mapLauncher.interrupt(); this.reduceLauncher.interrupt(); if (this.heartbeatMonitor != null) { this.heartbeatMonitor.interrupt(); } // Stop memory manager thread if (this.taskMemoryManager != null) { this.taskMemoryManager.shutdown(); } // Stop cgroup memory watcher this.cgroupMemoryWatcher.shutdown(); // All tasks are killed. So, they are removed from TaskLog monitoring also. // Interrupt the monitor. getTaskLogsMonitor().interrupt(); jvmManager.stop(); // shutdown RPC connections RPC.stopProxy(jobClient); // wait for the fetcher thread to exit for (boolean done = false; !done; ) { try { if (this.mapEventsFetcher != null) { this.mapEventsFetcher.join(); } done = true; } catch (InterruptedException e) { } } if (taskReportServer != null) { taskReportServer.stop(); taskReportServer = null; } if (healthChecker != null) { //stop node health checker service healthChecker.stop(); healthChecker = null; } if (this.server != null) { try { LOG.info("Shutting down StatusHttpServer"); this.server.stop(); LOG.info("Shutting down Netty MapOutput Server"); if (this.nettyMapOutputServer != null) { this.nettyMapOutputServer.stop(); } } catch (Exception e) { LOG.warn("Exception shutting down TaskTracker", e); } } }
java
void readInnerClasses(ClassSymbol c) { int n = nextChar(); for (int i = 0; i < n; i++) { nextChar(); // skip inner class symbol ClassSymbol outer = readClassSymbol(nextChar()); Name name = readName(nextChar()); if (name == null) name = names.empty; long flags = adjustClassFlags(nextChar()); if (outer != null) { // we have a member class if (name == names.empty) name = names.one; ClassSymbol member = enterClass(name, outer); if ((flags & STATIC) == 0) { ((ClassType)member.type).setEnclosingType(outer.type); if (member.erasure_field != null) ((ClassType)member.erasure_field).setEnclosingType(types.erasure(outer.type)); } if (c == outer) { member.flags_field = flags; enterMember(c, member); } } } }
java
public synchronized void releasePreservingBuffers() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "releasePreservingBuffers"); released = true; valid = false; if (receivedData != null) { receivedData.release(); receivedData = null; } // Simply null out the received buffer receivedBuffer = null; dataList.clear(); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "releasePreservingBuffers"); }
java
public static <V> CompletionHandler<V, Void> createAsyncHandler(final Consumer<V> success, final Consumer<Throwable> fail) { return new CompletionHandler<V, Void>() { @Override public void completed(V result, Void attachment) { if (success != null) success.accept(result); } @Override public void failed(Throwable exc, Void attachment) { if (fail != null) fail.accept(exc); } }; }
python
def _termination_callback(self, process_name, returncode): """ Called when the process has stopped. :param returncode: Process returncode """ self._terminate_process_iou() if returncode != 0: if returncode == -11: message = 'IOU VM "{}" process has stopped with return code: {} (segfault). This could be an issue with the IOU image, using a different image may fix this.\n{}'.format(self.name, returncode, self.read_iou_stdout()) else: message = 'IOU VM "{}" process has stopped with return code: {}\n{}'.format(self.name, returncode, self.read_iou_stdout()) log.warning(message) self.project.emit("log.error", {"message": message}) if self._telnet_server: self._telnet_server.close() self._telnet_server = None
python
def create(container, portal_type, *args, **kwargs): """Creates an object in Bika LIMS This code uses most of the parts from the TypesTool see: `Products.CMFCore.TypesTool._constructInstance` :param container: container :type container: ATContentType/DexterityContentType/CatalogBrain :param portal_type: The portal type to create, e.g. "Client" :type portal_type: string :param title: The title for the new content object :type title: string :returns: The new created object """ from bika.lims.utils import tmpID if kwargs.get("title") is None: kwargs["title"] = "New {}".format(portal_type) # generate a temporary ID tmp_id = tmpID() # get the fti types_tool = get_tool("portal_types") fti = types_tool.getTypeInfo(portal_type) if fti.product: obj = _createObjectByType(portal_type, container, tmp_id) else: # newstyle factory factory = getUtility(IFactory, fti.factory) obj = factory(tmp_id, *args, **kwargs) if hasattr(obj, '_setPortalTypeName'): obj._setPortalTypeName(fti.getId()) notify(ObjectCreatedEvent(obj)) # notifies ObjectWillBeAddedEvent, ObjectAddedEvent and # ContainerModifiedEvent container._setObject(tmp_id, obj) # we get the object here with the current object id, as it might be # renamed already by an event handler obj = container._getOb(obj.getId()) # handle AT Content if is_at_content(obj): obj.processForm() # Edit after processForm; processForm does AT unmarkCreationFlag. obj.edit(**kwargs) # explicit notification modified(obj) return obj
java
public void setMaxPayloadSize(int max) { addField(ConfigureNodeFields.max_payload_size, FormField.Type.text_single); setAnswer(ConfigureNodeFields.max_payload_size.getFieldName(), max); }
python
def destination_uri_file_counts(self): """Return file counts from job statistics, if present. See: https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.extract.destinationUriFileCounts Returns: a list of integer counts, each representing the number of files per destination URI or URI pattern specified in the extract configuration. These values will be in the same order as the URIs specified in the 'destinationUris' field. Returns None if job is not yet complete. """ counts = self._job_statistics().get("destinationUriFileCounts") if counts is not None: return [int(count) for count in counts] return None
java
@Override public LoginConfiguration getLoginConfig() { LoginConfiguration loginConfig = securityMetadata != null ? securityMetadata.getLoginConfiguration() : null; return loginConfig; }
java
protected Validator createValidator(FaceletContext ctx) { if (this.validatorId == null) { throw new TagException( this.tag, "Default behavior invoked of requiring a validator-id passed in the " + "constructor, must override ValidateHandler(ValidatorConfig)"); } return ctx.getFacesContext().getApplication().createValidator(this.validatorId); }
java
public static TaxNumberMapSharedConstants create() { if (taxNumberMapConstants == null) { // NOPMD it's thread save! synchronized (TaxNumberMapConstantsImpl.class) { if (taxNumberMapConstants == null) { taxNumberMapConstants = new TaxNumberMapConstantsImpl( readMapFromProperties("TaxNumberMapConstants", "taxNumbers")); } } } return taxNumberMapConstants; }
python
def plot_rh(self, rh, plot_range=None): """ Required input: RH: Relative humidity (%) Optional Input: plot_range: Data range for making figure (list of (min,max,step)) """ # PLOT RELATIVE HUMIDITY if not plot_range: plot_range = [0, 100, 4] self.ax3 = fig.add_subplot(4, 1, 3, sharex=self.ax1) self.ax3.plot(self.dates, rh, 'g-', label='Relative Humidity') self.ax3.legend(loc='upper center', bbox_to_anchor=(0.5, 1.22), prop={'size': 12}) self.ax3.grid(b=True, which='major', axis='y', color='k', linestyle='--', linewidth=0.5) self.ax3.set_ylim(plot_range[0], plot_range[1], plot_range[2]) self.ax3.fill_between(self.dates, rh, self.ax3.get_ylim()[0], color='g') self.ax3.set_ylabel('Relative Humidity\n(%)', multialignment='center') self.ax3.xaxis.set_major_formatter(mpl.dates.DateFormatter('%d/%H UTC')) axtwin = self.ax3.twinx() axtwin.set_ylim(plot_range[0], plot_range[1], plot_range[2])
java
public String updateIndiceMapping(String action,String indexMapping) throws ElasticSearchException { try { return this.client.executeHttp(action, indexMapping, ClientUtil.HTTP_POST); } catch(ElasticSearchException e){ return ResultUtil.hand404HttpRuntimeException(e,String.class,ResultUtil.OPERTYPE_updateIndiceMapping); } }
python
def remove_son(self, son): """ Remove the son node. Do nothing if the node is not a son Args: fathers: list of fathers to add """ self._sons = [x for x in self._sons if x.node_id != son.node_id]
java
public void delete(long oid) throws SQLException { FastpathArg[] args = new FastpathArg[1]; args[0] = Fastpath.createOIDArg(oid); fp.fastpath("lo_unlink", args); }
java
public Duration newDurationDayTime( final boolean isPositive, final int day, final int hour, final int minute, final int second) { return newDuration(isPositive, DatatypeConstants.FIELD_UNDEFINED, DatatypeConstants.FIELD_UNDEFINED, day, hour, minute, second); }
java
public void setPlacementGroups(java.util.Collection<PlacementGroup> placementGroups) { if (placementGroups == null) { this.placementGroups = null; return; } this.placementGroups = new com.amazonaws.internal.SdkInternalList<PlacementGroup>(placementGroups); }
python
def differences(self, n): """ Returns a TimeSeriesRDD where each time series is differenced with the given order. The new RDD will be missing the first n date-times. Parameters ---------- n : int The order of differencing to perform. """ return TimeSeriesRDD(None, None, self._jtsrdd.differences(n), self.ctx)
java
public void initServer() { if (m_initialized) { return; } try { m_registry = LocateRegistry.createRegistry(m_port); m_provider = new CmsRemoteShellProvider(m_port); I_CmsRemoteShellProvider providerStub = (I_CmsRemoteShellProvider)(UnicastRemoteObject.exportObject( m_provider, m_port)); m_registry.bind(CmsRemoteShellConstants.PROVIDER, providerStub); m_initialized = true; } catch (Exception e) { LOG.error(e.getLocalizedMessage(), e); } }
java
@Override public CommerceTierPriceEntry removeByC_ERC(long companyId, String externalReferenceCode) throws NoSuchTierPriceEntryException { CommerceTierPriceEntry commerceTierPriceEntry = findByC_ERC(companyId, externalReferenceCode); return remove(commerceTierPriceEntry); }
java
@Override public void encodeTo(FLEncoder enc) { final Encoder encoder = new Encoder(enc); internalArray.encodeTo(encoder); encoder.release(); }
java
public void createPool(String poolId, String virtualMachineSize, CloudServiceConfiguration cloudServiceConfiguration, int targetDedicatedNodes) throws BatchErrorException, IOException { createPool(poolId, virtualMachineSize, cloudServiceConfiguration, targetDedicatedNodes, 0, null); }
python
def unmapped(sam, mates): """ get unmapped reads """ for read in sam: if read.startswith('@') is True: continue read = read.strip().split() if read[2] == '*' and read[6] == '*': yield read elif mates is True: if read[2] == '*' or read[6] == '*': yield read for i in read: if i == 'YT:Z:UP': yield read
python
async def get_response(self, message=None, *, timeout=None): """ Returns a coroutine that will resolve once a response arrives. Args: message (`Message <telethon.tl.custom.message.Message>` | `int`, optional): The message (or the message ID) for which a response is expected. By default this is the last sent message. timeout (`int` | `float`, optional): If present, this `timeout` (in seconds) will override the per-action timeout defined for the conversation. """ return await self._get_message( message, self._response_indices, self._pending_responses, timeout, lambda x, y: True )
python
def x(self, d): """ Allows to configure the X of the grid with one method call. Keys for the dictionary: property, min, max, step, base, expression Types: property=str, min=float, max=float, step=float, base=float, expression=str :param d: the dictionary with the parameters :type d: dict """ if "property" in d: javabridge.call(self.jobject, "setXProperty", "(Ljava/lang/String;)V", d["property"]) if "min" in d: javabridge.call(self.jobject, "setXMin", "(D)V", d["min"]) if "max" in d: javabridge.call(self.jobject, "setXMax", "(D)V", d["max"]) if "step" in d: javabridge.call(self.jobject, "setXStep", "(D)V", d["step"]) if "base" in d: javabridge.call(self.jobject, "setXBase", "(D)V", d["base"]) if "expression" in d: javabridge.call(self.jobject, "setXExpression", "(Ljava/lang/String;)V", d["expression"])
java
private CompletableFuture<SegmentProperties> mergeInStorage(SegmentMetadata transactionMetadata, MergeSegmentOperation mergeOp, TimeoutTimer timer) { return this.storage .getStreamSegmentInfo(transactionMetadata.getName(), timer.getRemaining()) .thenAcceptAsync(transProperties -> { // One last verification before the actual merger: // Check that the Storage agrees with our metadata (if not, we have a problem ...) if (transProperties.getLength() != transactionMetadata.getStorageLength()) { throw new CompletionException(new DataCorruptionException(String.format( "Transaction Segment '%s' cannot be merged into parent '%s' because its metadata disagrees with the Storage. Metadata.StorageLength=%d, Storage.StorageLength=%d", transactionMetadata.getName(), this.metadata.getName(), transactionMetadata.getStorageLength(), transProperties.getLength()))); } if (transProperties.getLength() != mergeOp.getLength()) { throw new CompletionException(new DataCorruptionException(String.format( "Transaction Segment '%s' cannot be merged into parent '%s' because the declared length in the operation disagrees with the Storage. Operation.Length=%d, Storage.StorageLength=%d", transactionMetadata.getName(), this.metadata.getName(), mergeOp.getLength(), transProperties.getLength()))); } }, this.executor) .thenComposeAsync(v -> createSegmentIfNecessary( () -> storage.concat(this.handle.get(), mergeOp.getStreamSegmentOffset(), transactionMetadata.getName(), timer.getRemaining()), timer.getRemaining()), this.executor) .exceptionally(ex -> { ex = Exceptions.unwrap(ex); if (transactionMetadata.getLength() == 0 && ex instanceof StreamSegmentNotExistsException && ((StreamSegmentNotExistsException) ex).getStreamSegmentName().equals(transactionMetadata.getName())) { log.warn("{}: Not applying '{}' because source segment is missing (storage) and had no data.", this.traceObjectId, mergeOp); return null; } else { throw new CompletionException(ex); } }) .thenComposeAsync(v -> storage.getStreamSegmentInfo(this.metadata.getName(), timer.getRemaining()), this.executor); }
python
def knot_insertion(degree, knotvector, ctrlpts, u, **kwargs): """ Computes the control points of the rational/non-rational spline after knot insertion. Part of Algorithm A5.1 of The NURBS Book by Piegl & Tiller, 2nd Edition. Keyword Arguments: * ``num``: number of knot insertions. *Default: 1* * ``s``: multiplicity of the knot. *Default: computed via :func:`.find_multiplicity`* * ``span``: knot span. *Default: computed via :func:`.find_span_linear`* :param degree: degree :type degree: int :param knotvector: knot vector :type knotvector: list, tuple :param ctrlpts: control points :type ctrlpts: list :param u: knot to be inserted :type u: float :return: updated control points :rtype: list """ # Get keyword arguments num = kwargs.get('num', 1) # number of knot insertions s = kwargs.get('s', find_multiplicity(u, knotvector)) # multiplicity k = kwargs.get('span', find_span_linear(degree, knotvector, len(ctrlpts), u)) # knot span # Initialize variables np = len(ctrlpts) nq = np + num # Initialize new control points array (control points may be weighted or not) ctrlpts_new = [[] for _ in range(nq)] # Initialize a local array of length p + 1 temp = [[] for _ in range(degree + 1)] # Save unaltered control points for i in range(0, k - degree + 1): ctrlpts_new[i] = ctrlpts[i] for i in range(k - s, np): ctrlpts_new[i + num] = ctrlpts[i] # Start filling the temporary local array which will be used to update control points during knot insertion for i in range(0, degree - s + 1): temp[i] = deepcopy(ctrlpts[k - degree + i]) # Insert knot "num" times for j in range(1, num + 1): L = k - degree + j for i in range(0, degree - j - s + 1): alpha = knot_insertion_alpha(u, tuple(knotvector), k, i, L) if isinstance(temp[i][0], float): temp[i][:] = [alpha * elem2 + (1.0 - alpha) * elem1 for elem1, elem2 in zip(temp[i], temp[i + 1])] else: for idx in range(len(temp[i])): temp[i][idx][:] = [alpha * elem2 + (1.0 - alpha) * elem1 for elem1, elem2 in zip(temp[i][idx], temp[i + 1][idx])] ctrlpts_new[L] = deepcopy(temp[0]) ctrlpts_new[k + num - j - s] = deepcopy(temp[degree - j - s]) # Load remaining control points L = k - degree + num for i in range(L + 1, k - s): ctrlpts_new[i] = deepcopy(temp[i - L]) # Return control points after knot insertion return ctrlpts_new
python
async def disconnect(self): """ Disconnect from target. """ if not self.connected: return self.writer.close() self.reader = None self.writer = None
java
@Override public BSONObject create(boolean array, List<String> pathParts) { if (rootClass == null) { return array ? new BasicDBList() : new BasicDBObject(); } if (pathParts == null) { try { return (DBObject) rootClass.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } } String path = buildPath(pathParts); Class returnClass; if ((returnClass = getClassFromCache(path)) == null) { returnClass = deriveClass(path, pathParts.get(pathParts.size() - 1), array); } // At this point, we know what class to construct and the class cache is properly set if (DBObject.class.isAssignableFrom(returnClass)) { try { return (DBObject) returnClass.newInstance(); } catch (Exception e) { throw new RuntimeException(e); } } else if (Map.class.isAssignableFrom(returnClass)) { if (Modifier.isAbstract(returnClass.getModifiers()) || Modifier.isInterface(returnClass.getModifiers())) { return new DirtyableDBObjectMap(); } else { try { return new DirtyableDBObjectMap((Map) returnClass.newInstance()); } catch (Exception e) { throw new RuntimeException(e); } } } else if (List.class.isAssignableFrom(returnClass)) { if (Modifier.isAbstract(returnClass.getModifiers()) || Modifier.isInterface(returnClass.getModifiers())) { return new DirtyableDBObjectList(); } else { try { return new DirtyableDBObjectList((List) returnClass.newInstance(), false); } catch (Exception e) { throw new RuntimeException(e); } } } else if (Set.class.isAssignableFrom(returnClass)) { DirtyableDBObjectSet dirtyableDBObjectSet; if (Modifier.isAbstract(returnClass.getModifiers()) || Modifier.isInterface(returnClass.getModifiers())) { dirtyableDBObjectSet = new DirtyableDBObjectSet(); } else { try { dirtyableDBObjectSet = new DirtyableDBObjectSet((Set) returnClass.newInstance(), false); } catch (Exception e) { throw new RuntimeException(e); } } // The MongoDB Java Driver adds objects to the container before populating them. To maintain // set semantics (which may require using the objects' values), we run a decoding hook to // properly configure the set. BSON.addDecodingHook(DirtyableDBObjectSet.class, dirtyableDBObjectSet.getDecodingTransformer()); return dirtyableDBObjectSet; } else { return new BasicDBObject(); } }
python
async def stop(self): """ Irreversibly stop the receiver. """ if self.__started: self.__transport._unregister_rtp_receiver(self) self.__stop_decoder() self.__rtcp_task.cancel() await self.__rtcp_exited.wait()
python
def subscribe(self, topic, callback, qos): """Subscribe to an MQTT topic.""" if topic in self.topics: return def _message_callback(mqttc, userdata, msg): """Callback added to callback list for received message.""" callback(msg.topic, msg.payload.decode('utf-8'), msg.qos) self._mqttc.subscribe(topic, qos) self._mqttc.message_callback_add(topic, _message_callback) self.topics[topic] = callback
python
def _log_control(self, s): """Write control characters to the appropriate log files""" if self.encoding is not None: s = s.decode(self.encoding, 'replace') self._log(s, 'send')
python
def synteny_scan(points, xdist, ydist, N): """ This is the core single linkage algorithm which behaves in O(n): iterate through the pairs, foreach pair we look back on the adjacent pairs to find links """ clusters = Grouper() n = len(points) points.sort() for i in range(n): for j in range(i - 1, -1, -1): # x-axis distance del_x = points[i][0] - points[j][0] if del_x > xdist: break # y-axis distance del_y = points[i][1] - points[j][1] if abs(del_y) > ydist: continue # otherwise join clusters.join(points[i], points[j]) # select clusters that are at least >=N clusters = [sorted(cluster) for cluster in list(clusters) \ if _score(cluster) >= N] return clusters
java
public static InetAddress findLocalAddressViaNetworkInterface() { Enumeration<NetworkInterface> networkInterfaces; try { networkInterfaces = NetworkInterface.getNetworkInterfaces(); } catch (SocketException e) { return null; } while (networkInterfaces.hasMoreElements()) { NetworkInterface nif = networkInterfaces.nextElement(); for (Enumeration<InetAddress> addrEnum = nif.getInetAddresses(); addrEnum.hasMoreElements(); ) { InetAddress interfaceAddress = addrEnum.nextElement(); if (useInetAddress(nif, interfaceAddress)) { return interfaceAddress; } } } return null; }
python
def open_session(self): """ Open a new session to modify this server. You can either call this fnc directly, or turn on autosession which will open/commit sessions for you transparently. """ if self.session is not None: msg = "session already open; commit it or rollback before opening another one in %s" % self logger.error(msg) raise RuntimeError(msg) logger.info("opening a new session") logger.info("removing %s" % self.loc_session) try: shutil.rmtree(self.loc_session) except: logger.info("failed to delete %s" % self.loc_session) logger.info("cloning server from %s to %s" % (self.loc_stable, self.loc_session)) shutil.copytree(self.loc_stable, self.loc_session) self.session = SimServer(self.loc_session, use_locks=self.use_locks) self.lock_update.acquire()
java
public void marshall(SignalExternalWorkflowExecutionFailedEventAttributes signalExternalWorkflowExecutionFailedEventAttributes, ProtocolMarshaller protocolMarshaller) { if (signalExternalWorkflowExecutionFailedEventAttributes == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(signalExternalWorkflowExecutionFailedEventAttributes.getWorkflowId(), WORKFLOWID_BINDING); protocolMarshaller.marshall(signalExternalWorkflowExecutionFailedEventAttributes.getRunId(), RUNID_BINDING); protocolMarshaller.marshall(signalExternalWorkflowExecutionFailedEventAttributes.getCause(), CAUSE_BINDING); protocolMarshaller.marshall(signalExternalWorkflowExecutionFailedEventAttributes.getInitiatedEventId(), INITIATEDEVENTID_BINDING); protocolMarshaller.marshall(signalExternalWorkflowExecutionFailedEventAttributes.getDecisionTaskCompletedEventId(), DECISIONTASKCOMPLETEDEVENTID_BINDING); protocolMarshaller.marshall(signalExternalWorkflowExecutionFailedEventAttributes.getControl(), CONTROL_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
private void setContentTypeHeader(final HttpResponse response, final URLConnection connection) { response.headers().set(CONTENT_TYPE, mimeTypesMap.getContentType(connection.getURL().getPath())); }