language
stringclasses
2 values
func_code_string
stringlengths
63
466k
python
def set_style(network_id, ndex_cred=None, template_id=None): """Set the style of the network to a given template network's style Parameters ---------- network_id : str The UUID of the NDEx network whose style is to be changed. ndex_cred : dict A dictionary of NDEx credentials. template_id : Optional[str] The UUID of the NDEx network whose style is used on the network specified in the first argument. """ if not template_id: template_id = "ea4ea3b7-6903-11e7-961c-0ac135e8bacf" server = 'http://public.ndexbio.org' username, password = get_default_ndex_cred(ndex_cred) source_network = ndex2.create_nice_cx_from_server(username=username, password=password, uuid=network_id, server=server) source_network.apply_template(server, template_id) source_network.update_to(network_id, server=server, username=username, password=password)
python
def format_metadata_to_key(key_metadata): """ <Purpose> Construct a key dictionary (e.g., securesystemslib.formats.RSAKEY_SCHEMA) according to the keytype of 'key_metadata'. The dict returned by this function has the exact format as the dict returned by one of the key generations functions, like generate_ed25519_key(). The dict returned has the form: {'keytype': keytype, 'scheme': scheme, 'keyid': 'f30a0870d026980100c0573bd557394f8c1bbd6...', 'keyval': {'public': '...', 'private': '...'}} For example, RSA key dictionaries in RSAKEY_SCHEMA format should be used by modules storing a collection of keys, such as with keydb.py. RSA keys as stored in metadata files use a different format, so this function should be called if an RSA key is extracted from one of these metadata files and need converting. The key generation functions create an entirely new key and return it in the format appropriate for 'keydb.py'. >>> ed25519_key = generate_ed25519_key() >>> key_val = ed25519_key['keyval'] >>> keytype = ed25519_key['keytype'] >>> scheme = ed25519_key['scheme'] >>> ed25519_metadata = \ format_keyval_to_metadata(keytype, scheme, key_val, private=True) >>> ed25519_key_2, junk = format_metadata_to_key(ed25519_metadata) >>> securesystemslib.formats.ED25519KEY_SCHEMA.matches(ed25519_key_2) True >>> ed25519_key == ed25519_key_2 True <Arguments> key_metadata: The key dictionary as stored in Metadata files, conforming to 'securesystemslib.formats.KEY_SCHEMA'. It has the form: {'keytype': '...', 'scheme': scheme, 'keyval': {'public': '...', 'private': '...'}} <Exceptions> securesystemslib.exceptions.FormatError, if 'key_metadata' does not conform to 'securesystemslib.formats.KEY_SCHEMA'. <Side Effects> None. <Returns> In the case of an RSA key, a dictionary conformant to 'securesystemslib.formats.RSAKEY_SCHEMA'. """ # Does 'key_metadata' have the correct format? # This check will ensure 'key_metadata' has the appropriate number # of objects and object types, and that all dict keys are properly named. # Raise 'securesystemslib.exceptions.FormatError' if the check fails. securesystemslib.formats.KEY_SCHEMA.check_match(key_metadata) # Construct the dictionary to be returned. key_dict = {} keytype = key_metadata['keytype'] scheme = key_metadata['scheme'] key_value = key_metadata['keyval'] # Convert 'key_value' to 'securesystemslib.formats.KEY_SCHEMA' and generate # its hash The hash is in hexdigest form. default_keyid = _get_keyid(keytype, scheme, key_value) keyids = set() keyids.add(default_keyid) for hash_algorithm in securesystemslib.settings.HASH_ALGORITHMS: keyid = _get_keyid(keytype, scheme, key_value, hash_algorithm) keyids.add(keyid) # All the required key values gathered. Build 'key_dict'. # 'keyid_hash_algorithms' key_dict['keytype'] = keytype key_dict['scheme'] = scheme key_dict['keyid'] = default_keyid key_dict['keyid_hash_algorithms'] = securesystemslib.settings.HASH_ALGORITHMS key_dict['keyval'] = key_value return key_dict, keyids
java
private MediaType findContentType(final String url) { if (url == null) { return null; } if (url.startsWith("file")) { return APPLICATION_OCTET_STREAM_TYPE; } else if (url.startsWith("http")) { try (CloseableHttpClient httpClient = HttpClients.createDefault()) { final HttpHead httpHead = new HttpHead(url); try (CloseableHttpResponse response = httpClient.execute(httpHead)) { if (response.getStatusLine().getStatusCode() == SC_OK) { final Header contentType = response.getFirstHeader(CONTENT_TYPE); if (contentType != null) { return MediaType.valueOf(contentType.getValue()); } } } } catch (final IOException e) { LOGGER.warn("Unable to retrieve external content from {} due to {}", url, e.getMessage()); } catch (final Exception e) { throw new RepositoryRuntimeException(e); } } LOGGER.debug("Defaulting to octet stream for media type"); return APPLICATION_OCTET_STREAM_TYPE; }
java
@Override public DescriptorValue calculate(IAtomContainer container) { IAtomContainer local = AtomContainerManipulator.removeHydrogens(container); int tradius = PathTools.getMolecularGraphRadius(local); int tdiameter = PathTools.getMolecularGraphDiameter(local); DoubleArrayResult retval = new DoubleArrayResult(); retval.add((double) (tdiameter - tradius) / (double) tradius); // get the 3D distance matrix if (GeometryUtil.has3DCoordinates(container)) { int natom = container.getAtomCount(); double[][] distanceMatrix = new double[natom][natom]; for (int i = 0; i < natom; i++) { for (int j = 0; j < natom; j++) { if (i == j) { distanceMatrix[i][j] = 0.0; continue; } Point3d a = container.getAtom(i).getPoint3d(); Point3d b = container.getAtom(j).getPoint3d(); distanceMatrix[i][j] = Math.sqrt((a.x - b.x) * (a.x - b.x) + (a.y - b.y) * (a.y - b.y) + (a.z - b.z) * (a.z - b.z)); } } double gradius = 999999; double gdiameter = -999999; double[] geta = new double[natom]; for (int i = 0; i < natom; i++) { double max = -99999; for (int j = 0; j < natom; j++) { if (distanceMatrix[i][j] > max) max = distanceMatrix[i][j]; } geta[i] = max; } for (int i = 0; i < natom; i++) { if (geta[i] < gradius) gradius = geta[i]; if (geta[i] > gdiameter) gdiameter = geta[i]; } retval.add((gdiameter - gradius) / gradius); } else { retval.add(Double.NaN); } return new DescriptorValue(getSpecification(), getParameterNames(), getParameters(), retval, getDescriptorNames()); }
java
public static double ioa(double[] prediction, double[] validation, double pow) { double ioa; int td_size = prediction.length; int vd_size = validation.length; if (td_size != vd_size) { throw new IllegalArgumentException("Data sets in ioa does not match!"); } int steps = td_size; double sum_td = 0; double sum_vd = 0; /**summing up both data sets */ for (int i = 0; i < steps; i++) { sum_td = sum_td + prediction[i]; sum_vd = sum_vd + validation[i]; } /** calculating mean values for both data sets */ double mean_td = sum_td / steps; double mean_vd = sum_vd / steps; /** calculating mean cubic deviations */ double td_vd = 0; double vd_mean = 0; for (int i = 0; i < steps; i++) { td_vd = td_vd + (Math.pow((Math.abs(validation[i] - prediction[i])), pow)); vd_mean = vd_mean + (Math.pow((Math.abs(validation[i] - mean_vd)), pow)); } /** calculating absolute squared sum of deviations from verification mean */ double ad_test = 0; double ad_veri = 0; double abs_sqDevi = 0; for (int i = 0; i < steps; i++) { abs_sqDevi = abs_sqDevi + Math.pow(Math.abs(prediction[i] - mean_vd) + Math.abs(validation[i] - mean_vd), pow); } /** calculating ioa */ ioa = 1.0 - (td_vd / abs_sqDevi); return ioa; }
java
public void delete(Vertex vtx) { if (vtx.prev == null) { head = vtx.next; } else { vtx.prev.next = vtx.next; } if (vtx.next == null) { tail = vtx.prev; } else { vtx.next.prev = vtx.prev; } }
python
def deleteoutputfile(project, filename, credentials=None): """Delete an output file""" user, oauth_access_token = parsecredentials(credentials) #pylint: disable=unused-variable if filename: filename = filename.replace("..","") #Simple security if not filename or len(filename) == 0: #Deleting all output files and resetting Project.reset(project, user) msg = "Deleted" return withheaders(flask.make_response(msg), 'text/plain',{'Content-Length':len(msg), 'allow_origin': settings.ALLOW_ORIGIN}) #200 elif os.path.isdir(Project.path(project, user) + filename): #Deleting specified directory shutil.rmtree(Project.path(project, user) + filename) msg = "Deleted" return withheaders(flask.make_response(msg), 'text/plain',{'Content-Length':len(msg), 'allow_origin': settings.ALLOW_ORIGIN}) #200 else: try: file = clam.common.data.CLAMOutputFile(Project.path(project, user), filename) except: raise flask.abort(404) success = file.delete() if not success: raise flask.abort(404) else: msg = "Deleted" return withheaders(flask.make_response(msg), 'text/plain',{'Content-Length':len(msg), 'allow_origin': settings.ALLOW_ORIGIN})
python
def short_description(self): """ Ensure that the admin ``list_display`` renders the correct verbose name for translated fields. The :func:`~django.contrib.admin.utils.label_for_field` function uses :func:`~django.db.models.Options.get_field_by_name` to find the find and ``verbose_name``. However, for translated fields, this option does not exist, hence it falls back to reading the attribute and trying ``short_description``. Ideally, translated fields should also appear in this list, to be treated like regular fields. """ translations_model = self.field.meta.model if translations_model is None: # This only happens with abstract models. The code is accessing the descriptor at the base model directly, # not the upgraded descriptor version that contribute_translations() installed. # Fallback to what the admin label_for_field() would have done otherwise. return pretty_name(self.field.name) field = translations_model._meta.get_field(self.field.name) return field.verbose_name
python
def simplex_find_tree(self): ''' API: simplex_find_tree(self) Description: Assumes a feasible flow solution stored in 'flow' attribute's of arcs and converts this solution to a feasible spanning tree solution. Pre: (1) 'flow' attributes represents a feasible flow solution. Post: (1) 'flow' attributes may change when eliminating cycles. Return: Return a Graph instance that is a spanning tree solution. ''' # find a cycle solution_g = self.get_simplex_solution_graph() cycle = solution_g.simplex_find_cycle() while cycle is not None: # find amount to augment and direction amount = self.simplex_augment_cycle(cycle) # augment along the cycle self.augment_cycle(amount, cycle) # find a new cycle solution_g = self.get_simplex_solution_graph() cycle = solution_g.simplex_find_cycle() # check if the solution is connected while self.simplex_connect(solution_g): pass # add attributes for e in self.edge_attr: flow = self.edge_attr[e]['flow'] capacity = self.edge_attr[e]['capacity'] cost = self.edge_attr[e]['cost'] self.edge_attr[e]['label'] = "%d/%d/%d" %(flow,capacity,cost) if e in solution_g.edge_attr: solution_g.edge_attr[e]['flow'] = flow solution_g.edge_attr[e]['capacity'] = capacity solution_g.edge_attr[e]['cost'] = cost solution_g.edge_attr[e]['label'] = "%d/%d/%d" %(flow,capacity,cost) return solution_g
java
public void printScreen(PrintWriter out, ResourceBundle reg) throws DBException { this.getScreenFieldView().printScreen(out, reg); }
java
public void sendToUser(String topicURI, Object event, String eligibleUser) { sendToUsers(topicURI, event, Collections.singleton(eligibleUser)); }
java
private int getHydrogenCount(IAtomContainer ac, IAtom atom) { List<IAtom> neighbours = ac.getConnectedAtomsList(atom); int hcounter = 0; for (IAtom neighbour : neighbours) { if (neighbour.getSymbol().equals("H")) { hcounter += 1; } } return hcounter; }
java
public final String toJsonStringAndClose( final Reader source) throws Exception { StringBuffer sb = new StringBuffer(); int chi; boolean isStartSpaces = false; while ((chi = source.read()) != -1) { char ch = (char) chi; isStartSpaces = addJsonChar(ch, sb, isStartSpaces); } source.close(); return sb.toString(); }
python
def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'): ''' .. versionadded:: 0.17.0 Create a block device, character device, or fifo pipe. Identical to the gnu mknod. CLI Examples: .. code-block:: bash salt '*' file.mknod /dev/chr c 180 31 salt '*' file.mknod /dev/blk b 8 999 salt '*' file.nknod /dev/fifo p ''' ret = False makedirs_(name, user, group) if ntype == 'c': ret = mknod_chrdev(name, major, minor, user, group, mode) elif ntype == 'b': ret = mknod_blkdev(name, major, minor, user, group, mode) elif ntype == 'p': ret = mknod_fifo(name, user, group, mode) else: raise SaltInvocationError( 'Node type unavailable: \'{0}\'. Available node types are ' 'character (\'c\'), block (\'b\'), and pipe (\'p\').'.format(ntype) ) return ret
python
def addTrail(self, offset=None, maxlength=None, n=25, c=None, alpha=None, lw=1): """Add a trailing line to actor. :param offset: set an offset vector from the object center. :param maxlength: length of trailing line in absolute units :param n: number of segments to control precision :param lw: line width of the trail .. hint:: |trail| |trail.py|_ """ if maxlength is None: maxlength = self.diagonalSize() * 20 if maxlength == 0: maxlength = 1 if self.trail is None: pos = self.GetPosition() self.trailPoints = [None] * n self.trailSegmentSize = maxlength / n self.trailOffset = offset ppoints = vtk.vtkPoints() # Generate the polyline poly = vtk.vtkPolyData() ppoints.SetData(numpy_to_vtk([pos] * n)) poly.SetPoints(ppoints) lines = vtk.vtkCellArray() lines.InsertNextCell(n) for i in range(n): lines.InsertCellPoint(i) poly.SetPoints(ppoints) poly.SetLines(lines) mapper = vtk.vtkPolyDataMapper() if c is None: if hasattr(self, "GetProperty"): col = self.GetProperty().GetColor() else: col = (0.1, 0.1, 0.1) else: col = colors.getColor(c) if alpha is None: alpha = 1 if hasattr(self, "GetProperty"): alpha = self.GetProperty().GetOpacity() mapper.SetInputData(poly) tline = Actor() tline.SetMapper(mapper) tline.GetProperty().SetColor(col) tline.GetProperty().SetOpacity(alpha) tline.GetProperty().SetLineWidth(lw) self.trail = tline # holds the vtkActor return self
java
public Long getLong(String nameSpace, String cellName) { return getValue(nameSpace, cellName, Long.class); }
java
public void setRuleGroups(java.util.Collection<SubscribedRuleGroupSummary> ruleGroups) { if (ruleGroups == null) { this.ruleGroups = null; return; } this.ruleGroups = new java.util.ArrayList<SubscribedRuleGroupSummary>(ruleGroups); }
java
@SuppressWarnings("unchecked") public Map toMap() { HashMap map = new HashMap(); map.put("code", code); map.put("message", message); if (data != null) map.put("data", data); return map; }
java
public SDVariable lstmCell(String baseName, LSTMCellConfiguration configuration) { return new LSTMCell(sd, configuration).outputVariables(baseName)[0]; }
python
def local_get_state(self, device, id_override=None, type_override=None): """ Get device state via local API, and fall back to online API. Args: device (WinkDevice): The device the change is being requested for. id_override (String, optional): A device ID used to override the passed in device's ID. Used to make changes on sub-devices. i.e. Outlet in a Powerstrip. The Parent device's ID. type_override (String, optional): Used to override the device type when a device inherits from a device other than WinkDevice. Returns: response_json (Dict): The API's response in dictionary format """ if ALLOW_LOCAL_CONTROL: if device.local_id() is not None: hub = HUBS.get(device.hub_id()) if hub is not None and hub["token"] is not None: ip = hub["ip"] access_token = hub["token"] else: return self.get_device_state(device, id_override, type_override) else: return self.get_device_state(device, id_override, type_override) _LOGGER.info("Getting local state") local_id = id_override or device.local_id() object_type = type_override or device.object_type() LOCAL_API_HEADERS['Authorization'] = "Bearer " + access_token url_string = "https://{}:8888/{}s/{}".format(ip, object_type, local_id) try: arequest = requests.get(url_string, headers=LOCAL_API_HEADERS, verify=False, timeout=3) except requests.exceptions.RequestException: _LOGGER.error("Error sending local control request. Sending request online") return self.get_device_state(device, id_override, type_override) response_json = arequest.json() _LOGGER.debug('%s', response_json) temp_state = device.json_state for key, value in response_json["data"]["last_reading"].items(): temp_state["last_reading"][key] = value return temp_state else: return self.get_device_state(device, id_override, type_override)
python
def flash(self, flash): """ Turn on or off flashing of the device's LED for physical identification purposes. """ self.m_objPCANBasic.SetValue(self.m_PcanHandle, PCAN_CHANNEL_IDENTIFYING, bool(flash))
python
def indicators(self, indicator_data): """Generator for indicator values. Some indicator such as Files (hashes) and Custom Indicators can have multiple indicator values (e.g. md5, sha1, sha256). This method provides a generator to iterate over all indicator values. Both the **summary** field and the individual indicator fields (e.g. **md5**, **sha1**, **sha256**) are supported. For indicators that have only one value such as **ip** or **hostName** the generator will only return the one result. .. code-block:: python :linenos: :lineno-start: 1 # the individual indicator JSON from the API for i in resource.indicators(indicator_data): print(i.get('type')) # md5, sha1, sha256, etc print(i.get('value')) # hash or custom indicator value .. Warning:: This method could break for custom indicators that have " : " in the value of the indicator while using the summary field. .. Note:: For ``/v2/indicators`` and ``/v2/indicators/bulk/json`` API endpoints only one hash is returned for a file Indicator even if there are multiple in the platform. If all hashes are required the ``/v2/indicators/files`` or ``/v2/indicators/files/<hash>`` endpoints will provide all hashes. Args: indicator_data (dict): The indicator dictionary. Returns: (dictionary): A dict containing the indicator type and value. """ # indicator_list = [] for indicator_field in self.value_fields: if indicator_field == 'summary': indicators = self.tcex.expand_indicators(indicator_data.get('summary')) if indicator_data.get('type') == 'File': hash_patterns = { 'md5': re.compile(r'^([a-fA-F\d]{32})$'), 'sha1': re.compile(r'^([a-fA-F\d]{40})$'), 'sha256': re.compile(r'^([a-fA-F\d]{64})$'), } for i in indicators: if not i: continue i = i.strip() # clean up badly formatted summary string i_type = None if hash_patterns['md5'].match(i): i_type = 'md5' elif hash_patterns['sha1'].match(i): i_type = 'sha1' elif hash_patterns['sha256'].match(i): i_type = 'sha256' else: msg = u'Cannot determine hash type: "{}"'.format( indicator_data.get('summary') ) self.tcex.log.warning(msg) data = {'type': i_type, 'value': i} yield data else: resource = getattr( self.tcex.resources, self.tcex.safe_rt(indicator_data.get('type')) )(self.tcex) values = resource.value_fields index = 0 for i in indicators: if i is None: continue i = i.strip() # clean up badly formatted summary string # TODO: remove workaround for bug in indicatorTypes API endpoint if len(values) - 1 < index: break data = {'type': values[index], 'value': i} index += 1 yield data else: if indicator_data.get(indicator_field) is not None: yield {'type': indicator_field, 'value': indicator_data.get(indicator_field)}
python
def delete_volume(self, volume_name: str): """Removes/stops a docker volume. Only the manager nodes can delete a volume Args: volume_name (string): Name of the volume """ # Raise an exception if we are not a manager if not self._manager: raise RuntimeError('Volumes can only be deleted ' 'on swarm manager nodes') # Remove volume self._api_client.remove_volume(volume_name)
python
def removeReader(self, selectable): """Remove a FileDescriptor for notification of data available to read.""" try: if selectable.disconnected: self._reads[selectable].kill(block=False) del self._reads[selectable] else: self._reads[selectable].pause() except KeyError: pass
java
@Override public final void visit(final FamilyDocumentMongo document) { gedObject = new Family(parent, new ObjectId(document.getString())); }
python
def calibrate(self, data, calibration): """Calibrate the data.""" tic = datetime.now() channel_name = self.channel_name if calibration == 'counts': res = data elif calibration in ['radiance', 'reflectance', 'brightness_temperature']: # Choose calibration coefficients # a) Internal: Nominal or GSICS? band_idx = self.mda['spectral_channel_id'] - 1 if self.calib_mode != 'GSICS' or self.channel_name in VIS_CHANNELS: # you cant apply GSICS values to the VIS channels coefs = self.prologue["RadiometricProcessing"]["Level15ImageCalibration"] int_gain = coefs['CalSlope'][band_idx] int_offset = coefs['CalOffset'][band_idx] else: coefs = self.prologue["RadiometricProcessing"]['MPEFCalFeedback'] int_gain = coefs['GSICSCalCoeff'][band_idx] int_offset = coefs['GSICSOffsetCount'][band_idx] # b) Internal or external? External takes precedence. gain = self.ext_calib_coefs.get(self.channel_name, {}).get('gain', int_gain) offset = self.ext_calib_coefs.get(self.channel_name, {}).get('offset', int_offset) # Convert to radiance data = data.where(data > 0) res = self._convert_to_radiance(data.astype(np.float32), gain, offset) line_mask = self.mda['image_segment_line_quality']['line_validity'] >= 2 line_mask &= self.mda['image_segment_line_quality']['line_validity'] <= 3 line_mask &= self.mda['image_segment_line_quality']['line_radiometric_quality'] == 4 line_mask &= self.mda['image_segment_line_quality']['line_geometric_quality'] == 4 res *= np.choose(line_mask, [1, np.nan])[:, np.newaxis].astype(np.float32) if calibration == 'reflectance': solar_irradiance = CALIB[self.platform_id][channel_name]["F"] res = self._vis_calibrate(res, solar_irradiance) elif calibration == 'brightness_temperature': cal_type = self.prologue['ImageDescription'][ 'Level15ImageProduction']['PlannedChanProcessing'][self.mda['spectral_channel_id']] res = self._ir_calibrate(res, channel_name, cal_type) logger.debug("Calibration time " + str(datetime.now() - tic)) return res
java
public void importAccessControlEntries( CmsRequestContext context, CmsResource resource, List<CmsAccessControlEntry> acEntries) throws CmsException, CmsSecurityException { CmsDbContext dbc = m_dbContextFactory.getDbContext(context); try { checkOfflineProject(dbc); checkPermissions(dbc, resource, CmsPermissionSet.ACCESS_CONTROL, true, CmsResourceFilter.ALL); m_driverManager.importAccessControlEntries(dbc, resource, acEntries); } catch (Exception e) { dbc.report( null, Messages.get().container(Messages.ERR_IMPORT_ACL_ENTRIES_1, context.getSitePath(resource)), e); } finally { dbc.clear(); } }
python
def default_capability(self): """Set capability name in md. Every ResourceSync document should have the top-level capability attributes. """ if ('capability' not in self.md and self.capability_name is not None): self.md['capability'] = self.capability_name
python
def get_create_options(self): """Returns valid options for ordering a dedicated host.""" package = self._get_package() # Locations locations = [] for region in package['regions']: locations.append({ 'name': region['location']['location']['longName'], 'key': region['location']['location']['name'], }) # flavors dedicated_host = [] for item in package['items']: if item['itemCategory']['categoryCode'] == \ 'dedicated_virtual_hosts': dedicated_host.append({ 'name': item['description'], 'key': item['keyName'], }) return {'locations': locations, 'dedicated_host': dedicated_host}
python
def load_stylesheet(): """ Loads the stylesheet for use in a pyqt5 application. :return the stylesheet string """ # Smart import of the rc file f = QtCore.QFile(':qdarkgraystyle/style.qss') if not f.exists(): _logger().error('Unable to load stylesheet, file not found in ' 'resources') return '' else: f.open(QtCore.QFile.ReadOnly | QtCore.QFile.Text) ts = QtCore.QTextStream(f) stylesheet = ts.readAll() if platform.system().lower() == 'darwin': # see issue #12 on github mac_fix = ''' QDockWidget::title { background-color: #31363b; text-align: center; height: 12px; } ''' stylesheet += mac_fix return stylesheet
python
def replace_baseline_repr(self, linenum, update): """Replace individual baseline representation. :param int linenum: location of baseline representation :param str update: new baseline representation text (with delimiters) """ # use property to access lines to read them from file if necessary lines = self.lines count = 0 delimiter = None for index in range(linenum - 1, -1, -1): line = lines[index] if delimiter is None: single_quote_index = line.rfind("'''") double_quote_index = line.rfind('"""') if double_quote_index >= 0: if double_quote_index > single_quote_index: delimiter = '"""' else: delimiter = "'''" elif single_quote_index >= 0: delimiter = "'''" else: continue count += lines[index].count(delimiter) if count >= 2: linenum = index break else: docstr_not_found = ( '{}:{}: could not find baseline docstring' ''.format(self.showpath(self.path), linenum)) raise RuntimeError(docstr_not_found) old_content = '\n'.join(lines[linenum:]) match = self.REGEX.match(old_content) if match is None: docstr_not_found = ( '{}:{}: could not find docstring'.format(self.path, linenum)) raise RuntimeError(docstr_not_found) new_content = match.group('prefix') + update + match.group('suffix') lines[linenum:] = new_content.split('\n')
java
public static void solveBlock( final int blockLength , final boolean upper , final DSubmatrixD1 T , final DSubmatrixD1 B , final boolean transT ,final boolean transB ) { int Trows = T.row1-T.row0; if( Trows > blockLength ) throw new IllegalArgumentException("T can be at most the size of a block"); // number of rows in a block. The submatrix can be smaller than a block final int blockT_rows = Math.min(blockLength,T.original.numRows-T.row0); final int blockT_cols = Math.min(blockLength,T.original.numCols-T.col0); int offsetT = T.row0*T.original.numCols+blockT_rows*T.col0; final double dataT[] = T.original.data; final double dataB[] = B.original.data; if( transB ) { if( upper ) { if ( transT ) { throw new IllegalArgumentException("Operation not yet supported"); } else { throw new IllegalArgumentException("Operation not yet supported"); } } else { if ( transT ) { throw new IllegalArgumentException("Operation not yet supported"); } else { for( int i = B.row0; i < B.row1; i += blockLength ) { int N = Math.min(B.row1 , i + blockLength ) - i; int offsetB = i*B.original.numCols + N*B.col0; InnerTriangularSolver_DDRB.solveLTransB(dataT,dataB,blockT_rows,N,blockT_rows,offsetT,offsetB); } } } } else { if( Trows != B.row1-B.row0 ) throw new IllegalArgumentException("T and B must have the same number of rows."); if( upper ) { if ( transT ) { for( int i = B.col0; i < B.col1; i += blockLength ) { int offsetB = B.row0*B.original.numCols + Trows*i; int N = Math.min(B.col1 , i + blockLength ) - i; InnerTriangularSolver_DDRB.solveTransU(dataT,dataB,Trows,N,Trows,offsetT,offsetB); } } else { for( int i = B.col0; i < B.col1; i += blockLength ) { int offsetB = B.row0*B.original.numCols + Trows*i; int N = Math.min(B.col1 , i + blockLength ) - i; InnerTriangularSolver_DDRB.solveU(dataT,dataB,Trows,N,Trows,offsetT,offsetB); } } } else { if ( transT ) { for( int i = B.col0; i < B.col1; i += blockLength ) { int offsetB = B.row0*B.original.numCols + Trows*i; int N = Math.min(B.col1 , i + blockLength ) - i; InnerTriangularSolver_DDRB.solveTransL(dataT,dataB,Trows,N,blockT_cols,offsetT,offsetB); } } else { for( int i = B.col0; i < B.col1; i += blockLength ) { int offsetB = B.row0*B.original.numCols + Trows*i; int N = Math.min(B.col1 , i + blockLength ) - i; InnerTriangularSolver_DDRB.solveL(dataT,dataB,Trows,N,blockT_cols,offsetT,offsetB); } } } } }
java
public Set<DOCUMENT> query(Iterable<KEY> keys) { return query(keys, d -> true); }
java
@Override public void writeTo(Object object, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException { OutputStreamWriter outputStreamWriter = null; try { outputStreamWriter = new OutputStreamWriter(entityStream, CHARSET); Type jsonType = getAppropriateType(type, genericType); String json = getGson().toJson(object, jsonType); if(logger.isLoggable(Level.FINE)) logger.fine("Outgoing JSON Entity: "+json); getGson().toJson(object, jsonType, outputStreamWriter); } finally { if(outputStreamWriter != null) outputStreamWriter.close(); } }
java
public void registerHanders(String packageString) { List<String> list = AnnotationDetector.scanAsList(ExceptionHandler.class, packageString); for (String handler : list) { // System.out.println(handler); JKExceptionHandler<? extends Throwable> newInstance = JKObjectUtil.newInstance(handler); Class<? extends Throwable> clas = JKObjectUtil.getGenericParamter(handler); setHandler(clas, newInstance); } }
java
public long getAndDecrement(T obj) { long prev, next; do { prev = get(obj); next = prev - 1; } while (!compareAndSet(obj, prev, next)); return prev; }
java
public static void updateFilter(FullFrameRect rect, int newFilter) { Texture2dProgram.ProgramType programType; float[] kernel = null; float colorAdj = 0.0f; if (VERBOSE) Log.d(TAG, "Updating filter to " + newFilter); switch (newFilter) { case FILTER_NONE: programType = Texture2dProgram.ProgramType.TEXTURE_EXT; break; case FILTER_BLACK_WHITE: // (In a previous version the TEXTURE_EXT_BW variant was enabled by a flag called // ROSE_COLORED_GLASSES, because the shader set the red channel to the B&W color // and green/blue to zero.) programType = Texture2dProgram.ProgramType.TEXTURE_EXT_BW; break; case FILTER_NIGHT: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_NIGHT; break; case FILTER_CHROMA_KEY: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_CHROMA_KEY; break; case FILTER_SQUEEZE: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_SQUEEZE; break; case FILTER_TWIRL: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_TWIRL; break; case FILTER_TUNNEL: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_TUNNEL; break; case FILTER_BULGE: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_BULGE; break; case FILTER_DENT: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_DENT; break; case FILTER_FISHEYE: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FISHEYE; break; case FILTER_STRETCH: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_STRETCH; break; case FILTER_MIRROR: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_MIRROR; break; case FILTER_BLUR: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT; kernel = new float[] { 1f/16f, 2f/16f, 1f/16f, 2f/16f, 4f/16f, 2f/16f, 1f/16f, 2f/16f, 1f/16f }; break; case FILTER_SHARPEN: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT; kernel = new float[] { 0f, -1f, 0f, -1f, 5f, -1f, 0f, -1f, 0f }; break; case FILTER_EDGE_DETECT: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT; kernel = new float[] { -1f, -1f, -1f, -1f, 8f, -1f, -1f, -1f, -1f }; break; case FILTER_EMBOSS: programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT; kernel = new float[] { 2f, 0f, 0f, 0f, -1f, 0f, 0f, 0f, -1f }; colorAdj = 0.5f; break; default: throw new RuntimeException("Unknown filter mode " + newFilter); } // Do we need a whole new program? (We want to avoid doing this if we don't have // too -- compiling a program could be expensive.) if (programType != rect.getProgram().getProgramType()) { rect.changeProgram(new Texture2dProgram(programType)); } // Update the filter kernel (if any). if (kernel != null) { rect.getProgram().setKernel(kernel, colorAdj); } }
python
def get_link_text_from_selector(selector): """ A basic method to get the link text from a link text selector. """ if selector.startswith('link='): return selector.split('link=')[1] elif selector.startswith('link_text='): return selector.split('link_text=')[1] return selector
java
public void setChannel(PrivateChannel channel) { if (this.channel != channel) { if (this.channel != null) { ((Cleanupable) this.channel).cleanup(); } this.channel = channel; } }
python
def configure_error_handlers(app): """ Configure application error handlers """ def render_error(error): return (render_template('errors/%s.html' % error.code, title=error_messages[error.code], code=error.code), error.code) for (errcode, title) in error_messages.iteritems(): app.errorhandler(errcode)(render_error)
python
def publish_message(self, exchange, routing_key, properties, body, no_serialization=False, no_encoding=False, channel=None, connection=None): """Publish a message to RabbitMQ on the same channel the original message was received on. By default, if you pass a non-string object to the body and the properties have a supported ``content_type`` set, the body will be auto-serialized in the specified ``content_type``. If the properties do not have a timestamp set, it will be set to the current time. If you specify a ``content_encoding`` in the properties and the encoding is supported, the body will be auto-encoded. Both of these behaviors can be disabled by setting ``no_serialization`` or ``no_encoding`` to ``True``. If you pass an unsupported content-type or content-encoding when using the auto-serialization and auto-encoding features, a :exc:`ValueError` will be raised. .. versionchanged:: 4.0.0 The method returns a :py:class:`~tornado.concurrent.Future` if `publisher confirmations <https://www.rabbitmq.com/confirms.html>`_ are enabled on for the connection. In addition, The ``channel`` parameter is deprecated and will be removed in a future release. :param str exchange: The exchange to publish to :param str routing_key: The routing key to publish with :param dict properties: The message properties :param mixed body: The message body to publish :param bool no_serialization: Turn off auto-serialization of the body :param bool no_encoding: Turn off auto-encoding of the body :param str channel: **Deprecated in 4.0.0** Specify the connection parameter instead. :param str connection: The connection to use. If it is not specified, the channel that the message was delivered on is used. :rtype: tornado.concurrent.Future or None :raises: ValueError """ # Auto-serialize the content if needed is_string = (isinstance(body, str) or isinstance(body, bytes) or isinstance(body, unicode)) if not no_serialization and not is_string and \ properties.get('content_type'): body = self._serialize( body, headers.parse_content_type(properties['content_type'])) # Auto-encode the message body if needed if not no_encoding and \ properties.get('content_encoding') in self._CODEC_MAP.keys(): body = self._compress( body, self._CODEC_MAP[properties['content_encoding']]) return super(SmartConsumer, self).publish_message( exchange, routing_key, properties, body, channel or connection)
java
protected boolean checkvalue(String colorvalue) { boolean valid = validateColorValue(colorvalue); if (valid) { if (colorvalue.length() == 4) { char[] chr = colorvalue.toCharArray(); for (int i = 1; i < 4; i++) { String foo = String.valueOf(chr[i]); colorvalue = colorvalue.replaceFirst(foo, foo + foo); } } m_textboxColorValue.setValue(colorvalue, true); m_colorField.getElement().getStyle().setBackgroundColor(colorvalue); m_colorValue = colorvalue; } return valid; }
java
private Manifest createManifest( File jar, Map<String, String> manifestentries ) throws MojoExecutionException { JarFile jarFile = null; try { jarFile = new JarFile( jar ); // read manifest from jar Manifest manifest = jarFile.getManifest(); if ( manifest == null || manifest.getMainAttributes().isEmpty() ) { manifest = new Manifest(); manifest.getMainAttributes().putValue( Attributes.Name.MANIFEST_VERSION.toString(), "1.0" ); } // add or overwrite entries Set<Map.Entry<String, String>> entrySet = manifestentries.entrySet(); for ( Map.Entry<String, String> entry : entrySet ) { manifest.getMainAttributes().putValue( entry.getKey(), entry.getValue() ); } return manifest; } catch ( IOException e ) { throw new MojoExecutionException( "Error while reading manifest from " + jar.getAbsolutePath(), e ); } finally { ioUtil.close( jarFile ); } }
java
public DateFormat getDateFormat(int dateStyle, int timeStyle) { if (dateStyle == DF_NONE && timeStyle == DF_NONE || dateStyle < 0 || dateStyle >= DF_LIMIT || timeStyle < 0 || timeStyle >= DF_LIMIT) { throw new IllegalArgumentException("Illegal date format style arguments"); } DateFormat result = null; if (dateFormats != null) { result = dateFormats[dateStyle][timeStyle]; } if (result != null) { result = (DateFormat) result.clone(); // clone for safety // Not sure overriding configuration is what we really want... result.setTimeZone(getTimeZone()); } else { result = guessDateFormat(dateStyle, timeStyle); } return result; }
java
public static <T> GenericResponseBuilder<T> notAcceptable(List<Variant> variants) { return GenericResponses.<T>status(Response.Status.NOT_ACCEPTABLE).variants(variants); }
java
public void put(String classname, ClassDescriptor cld) { cld.setRepository(this); // BRJ synchronized (descriptorTable) { descriptorTable.put(classname, cld); List extentClasses = cld.getExtentClasses(); for (int i = 0; i < extentClasses.size(); ++i) { addExtent(((Class) extentClasses.get(i)).getName(), cld); } changeDescriptorEvent(); } }
java
public static String toHex(final byte[] data, final String sep) { final StringBuilder sb = new StringBuilder(); for (int i = 0; i < data.length; ++i) { final int no = data[i] & 0xff; if (no < 0x10) sb.append('0'); sb.append(Integer.toHexString(no)); if (sep != null && i < data.length - 1) sb.append(sep); } return sb.toString(); }
java
protected boolean isEquivalentInTheSet(Node node, boolean direction, Set<Node> set) { for (Node eq : direction == UPWARD ? node.getUpperEquivalent() : node.getLowerEquivalent()) { if (set.contains(eq)) return true; boolean isIn = isEquivalentInTheSet(eq, direction, set); if (isIn) return true; } return false; }
python
def send_data_on_udp(ip_address, port, data): """Helper function to send a string over UDP to a specific IP/port.""" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: sock.sendto(data.encode('utf-8'), (ip_address, port)) except: LOGGER.exception('Failed to send trace to X-Ray Daemon') finally: sock.close()
java
public static KFMsgRecord msgrecordGetrecord(String access_token, int endtime, int pageindex, int pagesize, int starttime) { String jsonPostData = String.format("{\"endtime\":%1d,\"pageindex\":%2d,\"pagesize\":%3d,\"starttime\":%4d}", endtime, pageindex, pagesize, starttime); HttpUriRequest httpUriRequest = RequestBuilder.post() .setHeader(jsonHeader) .setUri(BASE_URI + "/customservice/msgrecord/getrecord") .addParameter(PARAM_ACCESS_TOKEN, API.accessToken(access_token)) .setEntity(new StringEntity(jsonPostData, Charset.forName("utf-8"))) .build(); return LocalHttpClient.executeJsonResult(httpUriRequest, KFMsgRecord.class); }
java
public EEnum getRenderingIntentIOCARI() { if (renderingIntentIOCARIEEnum == null) { renderingIntentIOCARIEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(181); } return renderingIntentIOCARIEEnum; }
java
@Override public List<CPAttachmentFileEntry> findByC_C_LtD_S(long classNameId, long classPK, Date displayDate, int status) { return findByC_C_LtD_S(classNameId, classPK, displayDate, status, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null); }
python
def pyxb_is_v1(pyxb_obj): """ Args: pyxb_obj : PyXB object PyXB object holding an unknown type. Returns: bool: **True** if ``pyxb_obj`` holds an API v1 type. """ # TODO: Will not detect v1.2 as v1. return ( pyxb_obj._element().name().namespace() == d1_common.types.dataoneTypes_v1.Namespace )
python
async def send_photo(self, path, entity): """Sends the file located at path to the desired entity as a photo""" await self.send_file( entity, path, progress_callback=self.upload_progress_callback ) print('Photo sent!')
java
private void grantRecursiveRawAclEntry(String entityPath, String rawAclEntry) throws NotFoundException { Entity[] children = storageDao.getEntityChildren(entityPath); if (children.length == 0) { return; } for (Entity child : children) { grantNodeRawAclEntry(child.getPath(), rawAclEntry, false); grantRecursiveRawAclEntry(child.getPath(), rawAclEntry); } }
java
private int groupIndexForPoint(int pointIndex) { if (this.pointCoordinates == null || pointIndex < 0 || pointIndex >= this.pointCoordinates.length) { throw new IndexOutOfBoundsException(); } if (this.partIndexes == null) { return 0; } for (int i = 0; i < this.partIndexes.length; ++i) { if (pointIndex < this.partIndexes[i]) { return i; } } return this.partIndexes.length; }
java
@Override public InformationMessage createINF(int cic) { InformationMessage msg = createINF(); CircuitIdentificationCode code = this.parameterFactory.createCircuitIdentificationCode(); code.setCIC(cic); msg.setCircuitIdentificationCode(code); return msg; }
python
def imageinfo(self, files): """ Returns imageinfo query string """ files = '|'.join([safequote(x) for x in files]) self.set_status('imageinfo', files) return self.IMAGEINFO.substitute( WIKI=self.uri, ENDPOINT=self.endpoint, FILES=files)
python
def unpack(self, buff, offset=0): """Unpack a binary message into this object's attributes. Unpack the binary value *buff* and update this object attributes based on the results. It is an inplace method and it receives the binary data of the message **without the header**. This class' unpack method is like the :meth:`.GenericMessage.unpack` one, except for the ``body`` attribute which has its type determined by the ``multipart_type`` attribute. Args: buff (bytes): Binary data package to be unpacked, without the header. """ super().unpack(buff[offset:]) self._unpack_body()
python
def get_index_range(working_dir): """ Get the bitcoin block index range. Mask connection failures with timeouts. Always try to reconnect. The last block will be the last block to search for names. This will be NUM_CONFIRMATIONS behind the actual last-block the cryptocurrency node knows about. """ bitcoind_session = get_bitcoind(new=True) assert bitcoind_session is not None first_block = None last_block = None wait = 1.0 while last_block is None and is_running(): first_block, last_block = virtualchain.get_index_range('bitcoin', bitcoind_session, virtualchain_hooks, working_dir) if first_block is None or last_block is None: # try to reconnnect log.error("Reconnect to bitcoind in {} seconds".format(wait)) time.sleep(wait) wait = min(wait * 2.0 + random.random() * wait, 60) bitcoind_session = get_bitcoind( new=True ) continue else: return first_block, last_block - NUM_CONFIRMATIONS return None, None
java
public void register(Context context) { if (mRegisteredContext != null) { throw new IllegalStateException("Already registered"); } mRegisteredContext = context; context.registerReceiver(this, mPackageFilter); }
python
def export(self, validate=True): """ Method to output the xml as string. It will finalize the batches and then calculate the checksums (amount sum and transaction count), fill these into the group header and output the XML. """ self._finalize_batch() ctrl_sum_total = 0 nb_of_txs_total = 0 for ctrl_sum in self._xml.iter('CtrlSum'): if ctrl_sum.text is None: continue ctrl_sum_total += decimal_str_to_int(ctrl_sum.text) for nb_of_txs in self._xml.iter('NbOfTxs'): if nb_of_txs.text is None: continue nb_of_txs_total += int(nb_of_txs.text) n = self._xml.find(self.root_el) GrpHdr_node = n.find('GrpHdr') CtrlSum_node = GrpHdr_node.find('CtrlSum') NbOfTxs_node = GrpHdr_node.find('NbOfTxs') CtrlSum_node.text = int_to_decimal_str(ctrl_sum_total) NbOfTxs_node.text = str(nb_of_txs_total) # Prepending the XML version is hacky, but cElementTree only offers this # automatically if you write to a file, which we don't necessarily want. out = b"<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + ET.tostring( self._xml, "utf-8") if validate and not is_valid_xml(out, self.schema): raise ValidationError( "The output SEPA file contains validation errors. This is likely due to an illegal value in one of " "your input fields." ) return out
java
public OperationFuture<List<Server>> reboot(Server... serverRefs) { return powerOperationResponse( Arrays.asList(serverRefs), "Reboot", client.reboot(ids(serverRefs)) ); }
java
public static CProduct fetchByUuid_C_Last(String uuid, long companyId, OrderByComparator<CProduct> orderByComparator) { return getPersistence() .fetchByUuid_C_Last(uuid, companyId, orderByComparator); }
java
public int compareSpecificness(final Class<?>[] a, boolean a_varArgs, final Class<?>[] b, boolean b_varArgs) { final int a_fixLen = a.length - (a_varArgs ? 1 : 0); final int b_fixLen = b.length - (b_varArgs ? 1 : 0); final int fixLen = Math.min(a_fixLen, b_fixLen); int c = 0; // result of comparison // compare fix args for (int i = 0; i < fixLen; i++) { int newC = compareParameter(a[i], b[i]); if (newC != 0 && newC != c) { if (c == 0) c = newC; else return 0; // ambiguous } } // compare varargs of the shorter signature against fixargs of other Class<?> a_varType = a_varArgs ? a[a_fixLen] : null; Class<?> b_varType = b_varArgs ? b[b_fixLen] : null; if (a_varArgs && a_fixLen < b_fixLen) { // compare remaining fixArgs of b for (int i = fixLen; i < b_fixLen; i++) { int newC = compareParameter(a_varType, b[i]); if (newC != 0 && newC != c) { if (c == 0) c = newC; else return 0; // ambiguous } } } if (b_varArgs && b_fixLen < a_fixLen) { // compare remaining fixArgs of a for (int i = fixLen; i < a_fixLen; i++) { int newC = compareParameter(a[i], b_varType); if (newC != 0 && newC != c) { if (c == 0) c = newC; else return 0; // ambiguous } } } boolean a_varArgsUsed = a_varArgs && argTypes.length > a_fixLen; boolean b_varArgsUsed = b_varArgs && argTypes.length > b_fixLen; if (a_varArgsUsed && b_varArgsUsed) { // if var args are needed, compare them int newC = compareParameter(a_varType, b_varType); if (newC != 0 && newC != c) { if (c == 0) c = newC; else return 0; // ambiguous } } if (c == 0) { // if still equal, more fix args wins if (a_fixLen > b_fixLen) { return -1; } else if (b_fixLen > a_fixLen) { return 1; } } return c; }
python
def smooth_image(image, sigma, sigma_in_physical_coordinates=True, FWHM=False, max_kernel_width=32): """ Smooth an image ANTsR function: `smoothImage` Arguments --------- image Image to smooth sigma Smoothing factor. Can be scalar, in which case the same sigma is applied to each dimension, or a vector of length dim(inimage) to specify a unique smoothness for each dimension. sigma_in_physical_coordinates : boolean If true, the smoothing factor is in millimeters; if false, it is in pixels. FWHM : boolean If true, sigma is interpreted as the full-width-half-max (FWHM) of the filter, not the sigma of a Gaussian kernel. max_kernel_width : scalar Maximum kernel width Returns ------- ANTsImage Example ------- >>> import ants >>> image = ants.image_read( ants.get_ants_data('r16')) >>> simage = ants.smooth_image(image, (1.2,1.5)) """ if image.components == 1: return _smooth_image_helper(image, sigma, sigma_in_physical_coordinates, FWHM, max_kernel_width) else: imagelist = utils.split_channels(image) newimages = [] for image in imagelist: newimage = _smooth_image_helper(image, sigma, sigma_in_physical_coordinates, FWHM, max_kernel_width) newimages.append(newimage) return utils.merge_channels(newimages)
java
Response delete(URI uri) { HttpConnection connection = Http.DELETE(uri); return executeToResponse(connection); }
python
def update_user_display_name(user,**kwargs): """ Update a user's display name """ #check_perm(kwargs.get('user_id'), 'edit_user') try: user_i = db.DBSession.query(User).filter(User.id==user.id).one() user_i.display_name = user.display_name return user_i except NoResultFound: raise ResourceNotFoundError("User (id=%s) not found"%(user.id))
python
def html(self, text=TEXT): """ Generate an HTML file from the report data. """ self.logger.debug("Generating the HTML report{}..." .format(["", " (text only)"][text])) html = [] for piece in self._pieces: if isinstance(piece, string_types): html.append(markdown2.markdown(piece, extras=["tables"])) elif isinstance(piece, Element): html.append(piece.html()) return "\n\n".join(html)
java
@Override public boolean removeByValue(int value) { int index=binarySearch(value); if(index<0) return false; removeAtIndex(index); return true; }
python
def find_amplitude(chunk): """ Calculate the 0-1 amplitude of an ndarray chunk of audio samples. Samples in the ndarray chunk are signed int16 values oscillating anywhere between -32768 and 32767. Find the amplitude between 0 and 1 by summing the absolute values of the minimum and maximum, and dividing by 32767. Args: chunk (numpy.ndarray): An array of int16 audio samples Returns: float: The amplitude of the sample between 0 and 1. Note that this is not a decibel representation of the amplitude. """ return (abs(int(chunk.max() - chunk.min())) / config.SAMPLE_RANGE)
java
@Override public int advance(int target) throws IOException { reset(); if (docId == NO_MORE_DOCS) { return docId; } else if (target < docId) { // should not happen docId = NO_MORE_DOCS; return docId; } else { // advance 1 int spans1DocId = spans1.spans.docID(); int newTarget = target; if (spans1DocId < newTarget) { spans1DocId = spans1.spans.advance(target); if (spans1DocId == NO_MORE_DOCS) { docId = NO_MORE_DOCS; return docId; } newTarget = Math.max(newTarget, spans1DocId); } int spans2DocId = spans2.spans.docID(); // advance 2 if (spans2DocId < newTarget) { spans2DocId = spans2.spans.advance(newTarget); if (spans2DocId == NO_MORE_DOCS) { docId = NO_MORE_DOCS; return docId; } } // check equal docId, otherwise next if (spans1DocId == spans2DocId) { docId = spans1DocId; // check match if (goToNextStartPosition()) { return docId; } else { return nextDoc(); } } else { return nextDoc(); } } }
python
def isLocked(self): ''' Checks if the device screen is locked. @return True if the device screen is locked ''' self.__checkTransport() lockScreenRE = re.compile('mShowingLockscreen=(true|false)') dwp = self.shell('dumpsys window policy') m = lockScreenRE.search(dwp) if m: return m.group(1) == 'true' dreamingLockscreenRE = re.compile('mDreamingLockscreen=(true|false)') m = dreamingLockscreenRE.search(dwp) if m: return m.group(1) == 'true' raise RuntimeError("Couldn't determine screen lock state")
java
public ServerBuilder http2MaxStreamsPerConnection(long http2MaxStreamsPerConnection) { checkArgument(http2MaxStreamsPerConnection > 0 && http2MaxStreamsPerConnection <= 0xFFFFFFFFL, "http2MaxStreamsPerConnection: %s (expected: a positive 32-bit unsigned integer)", http2MaxStreamsPerConnection); this.http2MaxStreamsPerConnection = http2MaxStreamsPerConnection; return this; }
python
def main(command_line=True, **kwargs): """ NAME cit_magic.py DESCRIPTION converts CIT and .sam format files to magic_measurements format files SYNTAX cit_magic.py [command line options] OPTIONS -h: prints the help message and quits. -usr USER: identify user, default is "" -f FILE: specify .sam format input file, required -WD Working Directory: output directory where files are going to go -fsi SITEFILE : specify file with site names and locations [tab delimited magic file] -F FILE: specify output measurements file, default is magic_measurements.txt -Fsp FILE: specify output er_specimens.txt file, default is er_specimens.txt -Fsi FILE: specify output er_sites.txt file, default is er_sites.txt -Fsa FILE: specify output er_samples.txt file, default is er_samples.txt # LORI -n [gm,kg,cc,m3]: specify normalization -A: don't average replicate measurements -spc NUM : specify number of characters to designate a specimen, default = 0 -ncn NCON: specify naming convention -loc LOCNAME : specify location/study name, must have either LOCNAME or SITEFILE or be a synthetic -mcd [FS-FD:SO-MAG,.....] colon delimited list for method codes applied to all specimens in .sam file -dc B PHI THETA: dc lab field (in microTesla), phi,and theta must be input as a tuple "(DC,PHI,THETA)". If not input user will be asked for values, this is advantagious if there are differing dc fields between steps or specimens. Note: this currently only works with the decimal IZZI naming convetion (XXX.0,1,2 where XXX is the treatment temperature and 0 is a zero field step, 1 is in field, and 2 is a pTRM check). All other steps are hardcoded dc_field = 0. INPUT Best to put separate experiments in separate files (all AF, thermal, thellier, trm aquisition, Shaw, etc.) NOTES: Sample naming convention: [1] XXXXY: where XXXX is an arbitrary length site designation and Y is the single character sample designation. e.g., TG001a is the first sample from site TG001. [default] [2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length) [3: default] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length) [4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX [5] site name = sample name [6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED [7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY NB: all others you will have to either customize your self or e-mail [email protected] or [email protected] for help. """ # # NB: use PHI, THETA = -1 -1 to signal that it changes, i.e. in anisotropy experiment # -ac B : peak AF field (in mT) for ARM acquisition, default is none # #initialize variables norm='cc' samp_con,Z='3',1 meas_file='magic_measurements.txt' spec_file='er_specimens.txt' samp_file='er_samples.txt' site_file='er_sites.txt' ErSpecs,ErSamps,ErSites,ErLocs,ErCits=[],[],[],[],[] MeasRecs=[] specnum,units,locname=0,"1","unknown" citation="This study" dir_path='.' args=sys.argv if command_line: if '-WD' in args: ind=args.index("-WD") dir_path=args[ind+1] if "-h" in args: print(main.__doc__) return False if "-usr" in args: ind=args.index("-usr") user=args[ind+1] if '-F' in args: ind=args.index("-F") meas_file=args[ind+1] if '-Fsp' in args: ind=args.index("-Fsp") spec_file=args[ind+1] if '-Fsa' in args: ind=args.index("-Fsa") samp_file=args[ind+1] if '-Fsi' in args: # LORI addition ind=args.index("-Fsi") site_file=args[ind+1] if '-loc' in args: ind=args.index("-loc") locname=args[ind+1] if '-mcd' in args: ind=args.index("-mcd") methods=args[ind+1] else: methods='SO-MAG' if '-spc' in args: ind=args.index("-spc") specnum=-int(args[ind+1]) if '-n' in args: ind=args.index("-n") norm=args[ind+1] if "-A" in args: avg=1 else: avg=0 if '-dc' in args: ind=args.index('-dc') DC_FIELD,DC_PHI,DC_THETA=list(map(float,args[ind+1].strip('( ) [ ]').split(','))) DC_FIELD *= 1e-6 yn='' GET_DC_PARAMS=False else: GET_DC_PARAMS,DC_FIELD,DC_PHI,DC_THETA,yn=True,0,0,-90,'' if "-ncn" in args: ind=args.index("-ncn") samp_con=sys.argv[ind+1] if "4" in samp_con: if "-" not in samp_con: print("option [4] must be in form 4-Z where Z is an integer") return False, "naming convention option [4] must be in form 4-Z where Z is an integer" else: Z=samp_con.split("-")[1] samp_con="4" elif "7" in samp_con: if "-" not in samp_con: print("option [7] must be in form 7-Z where Z is an integer") return False, "naming convention option [7] must be in form 7-Z where Z is an integer" else: Z=samp_con.split("-")[1] samp_con="7" if '-f' in args: ind=args.index("-f") magfile=args[ind+1] if '-ID' in args: ind = args.index('-ID') input_dir_path = args[ind+1] else: input_dir_path = os.path.split(magfile)[0] output_dir_path = dir_path # LJ # if you are running as a module: elif not command_line: dir_path = kwargs.get('dir_path', '.') user = kwargs.get('user', '') meas_file = kwargs.get('meas_file', 'magic_measurements.txt') # outfile spec_file = kwargs.get('spec_file', 'er_specimens.txt') # specimen outfile samp_file = kwargs.get('samp_file', 'er_samples.txt') # sample outfile site_file = kwargs.get('site_file', 'er_sites.txt') # site outfile locname = kwargs.get('locname', '') methods = kwargs.get('methods', ['SO-MAG']) specnum = -int(kwargs.get('specnum', 0)) norm = kwargs.get('norm', 'cc') avg = kwargs.get('avg', 0) # 0 means do average, 1 means don't samp_con = kwargs.get('samp_con', '3') magfile = kwargs.get('magfile', '') input_dir_path = kwargs.get('input_dir_path', os.path.split(magfile)[0]) output_dir_path = dir_path DC_FIELD,DC_PHI,DC_THETA = list(map(float, kwargs.get('dc_params', (0,0,-90)))) DC_FIELD *= 1e-6 yn = '' if DC_FIELD==0 and DC_PHI==0 and DC_THETA==-90: GET_DC_PARAMS=True else: GET_DC_PARAMS=False # done with module-specific stuff # formatting and checking variables if "4" in samp_con: if "-" not in samp_con: print("option [4] must be in form 4-Z where Z is an integer") return False, "naming convention option [4] must be in form 4-Z where Z is an integer" else: Z=samp_con.split("-")[1] samp_con="4" elif "7" in samp_con: if "-" not in samp_con: print("option [7] must be in form 7-Z where Z is an integer") return False, "naming convention option [7] must be in form 7-Z where Z is an integer" else: Z=samp_con.split("-")[1] samp_con="7" magfile = os.path.join(input_dir_path, magfile) spec_file = os.path.join(output_dir_path, spec_file) samp_file = os.path.join(output_dir_path, samp_file) site_file = os.path.join(output_dir_path, site_file) meas_file= os.path.join(output_dir_path, meas_file) FIRST_GET_DC=True try: with open(magfile,'r') as file_input: File = file_input.readlines() except Exception as ex: print("bad sam file name: ", magfile) return False, "bad sam file name" if len(File) == 1: File = File[0].split('\r'); File = [x+"\r\n" for x in File] sids,ln,format=[],0,'CIT' formats=['CIT','2G','APP','JRA'] if File[ln].strip()=='CIT': ln+=1 ErLocRec={} ErLocRec["er_location_name"]=locname ErLocRec["er_citation_names"]=citation comment=File[ln] if comment=='CIT': format=comment ln+=1 comment=File[ln] print(comment) ln+=1 specimens,samples,sites=[],[],[] if format=='CIT': line=File[ln].split() site_lat=line[0] site_lon=line[1] ErLocRec["location_begin_lat"]=site_lat ErLocRec["location_begin_lon"]=site_lon ErLocRec["location_end_lat"]=site_lat ErLocRec["location_end_lon"]=site_lon ErLocs.append(ErLocRec) try: Cdec=float(line[2]) except ValueError: pdb.set_trace() for k in range(ln+1,len(File)): line=File[k] rec=line.split() if rec == []: continue specimen=rec[0] specimens.append(specimen) for specimen in specimens: ErSpecRec,ErSampRec,ErSiteRec={},{},{} if specnum!=0: sample=specimen[:specnum] else: sample=specimen site=pmag.parse_site(sample,samp_con,Z) ErSpecRec['er_specimen_name']=specimen ErSpecRec['er_sample_name']=sample ErSpecRec['er_site_name']=site ErSpecRec['er_location_name']=locname ErSpecRec['er_citation_names']=citation ErSampRec['er_sample_name']=sample ErSampRec['er_site_name']=site ErSampRec['er_location_name']=locname ErSampRec['er_citation_names']=citation ErSampRec['magic_method_codes']=methods ErSampRec['sample_declination_correction']='%7.1f'%(Cdec) ErSiteRec['er_site_name']=site ErSiteRec['er_location_name']=locname ErSiteRec['er_citation_names']=citation ErSiteRec['site_lat']=site_lat ErSiteRec['site_lon']=site_lon with open(os.path.join(input_dir_path,specimen),'r') as finput: Lines = list(finput.readlines()) comment = "" line=Lines[0].split() if len(line)>2: comment=line[2] info=Lines[1].split() vol=float(info[-1]) if vol!=1.0: if norm=='cc':units="1" if norm=='m3':units="2" ErSpecRec['specimen_weight']="" if units=="1" or "": ErSpecRec['specimen_volume']='%10.3e'%(vol*1e-6) else: ErSpecRec['specimen_volume']='%10.3e'%(vol) else: if norm=='cc':units="1" if norm=='m3':units="2" ErSpecRec['specimen_volume']="" if units=="1" or "": ErSpecRec['specimen_weight']='%10.3e'%(vol*1e-3) else: ErSpecRec['specimen_weight']='%10.3e'%(vol) dip=float(info[-2]) dip_direction=float(info[-3])+Cdec+90. sample_dip=-float(info[-4]) sample_azimuth=float(info[-5])+Cdec-90. if len(info)>5: ErSampRec['sample_height']=info[-6] else: ErSampRec['sample_height']='0' ErSampRec['sample_azimuth']='%7.1f'%(sample_azimuth) ErSampRec['sample_dip']='%7.1f'%(sample_dip) ErSampRec['sample_bed_dip']='%7.1f'%(dip) ErSampRec['sample_bed_dip_direction']='%7.1f'%(dip_direction) ErSampRec['sample_class']='' ErSampRec['sample_type']='' ErSampRec['sample_lithology']='' if Cdec!=0 or Cdec!="": ErSampRec['magic_method_codes']='SO-CMD-NORTH' else: ErSampRec['magic_method_codes']='SO-MAG' for line in Lines[2:len(Lines)]: if line == '\n': continue MeasRec=ErSpecRec.copy() # Remove specimen_volume and specimen_weight as they do not exits in the magic_measurement table del MeasRec["specimen_volume"] del MeasRec["specimen_weight"] treat_type=line[0:3] if treat_type[1] == '.': treat_type = 'NRM' treat=line[2:6] try: float(treat) except ValueError: treat = line[3:6] if treat_type.startswith('NRM'): MeasRec['magic_method_codes']='LT-NO' MeasRec['measurement_temp']='273' MeasRec['treatment_temp']='273' MeasRec['treatment_dc_field']='0' MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA MeasRec['treatment_ac_field']='0' elif treat_type.startswith('AF'): MeasRec['magic_method_codes']='LT-AF-Z' MeasRec['measurement_temp']='273' MeasRec['treatment_temp']='273' MeasRec['treatment_dc_field']='0' MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA if treat.strip() == '': MeasRec['treatment_ac_field']='0' else: MeasRec['treatment_ac_field']='%10.3e'%(float(treat)*1e-3) elif treat_type.startswith('ARM'): MeasRec['magic_method_codes']="LP-ARM" MeasRec['measurement_temp']='273' MeasRec['treatment_temp']='273' MeasRec['treatment_dc_field']='0' MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA if treat.strip() == '': MeasRec['treatment_ac_field']='0' else: MeasRec['magic_method_codes']="LP-ARM-AFD" MeasRec['treatment_ac_field']='%10.3e'%(float(treat)*1e-3) elif treat_type.startswith('TT'): MeasRec['magic_method_codes']='LT-T-Z' MeasRec['measurement_temp']='273' if treat.strip() == '': MeasRec['treatment_temp']='273' else: MeasRec['treatment_temp']='%7.1f'%(float(treat)+273) MeasRec['treatment_dc_field']='0' MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA MeasRec['treatment_ac_field']='0' elif treat_type.startswith('LT') or treat_type.startswith('LN2'): MeasRec['magic_method_codes']='LT-LT-Z' MeasRec['measurement_temp']='273' MeasRec['treatment_temp']='77' MeasRec['treatment_dc_field']='0' MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA MeasRec['treatment_ac_field']='0' elif line[4] == '0': #assume decimal IZZI format 0 field thus can hardcode the dc fields MeasRec['magic_method_codes']='LT-T-Z' MeasRec['measurement_temp']='273' MeasRec['treatment_temp']=str(int(treat_type) + 273) MeasRec['treatment_dc_field']='0' MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA MeasRec['treatment_ac_field']='0' elif line[4] == '1': #assume decimal IZZI format in constant field if GET_DC_PARAMS: GET_DC_PARAMS, FIRST_GET_DC, yn, DC_FIELD, DC_PHI, DC_THETA = get_dc_params(FIRST_GET_DC,specimen,treat_type,yn) MeasRec['magic_method_codes']='LT-T-I' MeasRec['measurement_temp']='273' MeasRec['treatment_temp']=str(int(treat_type) + 273) MeasRec['treatment_dc_field']='%1.2e'%DC_FIELD MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA MeasRec['treatment_ac_field']='0' elif line[4] == '2': #assume decimal IZZI format PTRM step if GET_DC_PARAMS: GET_DC_PARAMS, FIRST_GET_DC, yn, DC_FIELD, DC_PHI, DC_THETA = get_dc_params(FIRST_GET_DC,specimen,treat_type,yn) MeasRec['magic_method_codes']='LT-PTRM-I' MeasRec['measurement_temp']='273' MeasRec['treatment_temp']=str(int(treat_type) + 273) MeasRec['treatment_dc_field']='%1.2e'%DC_FIELD MeasRec['treatment_dc_field_phi'] = '%1.2f'%DC_PHI MeasRec['treatment_dc_field_theta'] = '%1.2f'%DC_THETA MeasRec['treatment_ac_field']='0' else: print("trouble with your treatment steps") MeasRec['measurement_dec']=line[46:51] MeasRec['measurement_inc']=line[52:58] M='%8.2e'%(float(line[31:39])*vol*1e-3) # convert to Am2 MeasRec['measurement_magn_moment']=M MeasRec['measurement_csd']='%7.1f'%(eval(line[41:46])) MeasRec["measurement_positions"]='1' MeasRec['measurement_standard']='u' if len(line)>60: MeasRec['magic_instrument_codes']=line[85:] MeasRec['measurement_sd_x']='%8.2e'%(float(line[58:67])*1e-8) #(convert e-5emu to Am2) MeasRec['measurement_sd_y']='%8.2e'%(float(line[67:76])*1e-8) MeasRec['measurement_sd_z']='%8.2e'%(float(line[76:85])*1e-8) MeasRecs.append(MeasRec) ErSpecs.append(ErSpecRec) if sample not in samples: samples.append(sample) ErSamps.append(ErSampRec) site=pmag.parse_site(sample,samp_con,Z) if site not in sites: sites.append(site) ErSites.append(ErSiteRec) pmag.magic_write(spec_file,ErSpecs,'er_specimens') print('specimens stored in ',spec_file) pmag.magic_write(samp_file,ErSamps,'er_samples') print('samples stored in ',samp_file) pmag.magic_write(site_file,ErSites,'er_sites') print('sites stored in ', site_file) Fixed=pmag.measurements_methods(MeasRecs,avg) pmag.magic_write(meas_file,Fixed,'magic_measurements') print('data stored in ',meas_file) return True, meas_file
python
def from_ranges(ranges, name, data_key, start_key='offset', length_key='length'): """ Creates a list of commands from a list of ranges. Each range is converted to two commands: a start_* and a stop_*. """ commands = [] for r in ranges: data = r[data_key] start = r[start_key] stop = start + r[length_key] commands.extend(Command.start_stop(name, start, stop, data)) return commands
python
def calculate_error(self): """Estimate the numerical error based on the fluxes calculated by the current and the last method. >>> from hydpy.models.test_v1 import * >>> parameterstep() >>> model.numvars.idx_method = 2 >>> results = numpy.asarray(fluxes.fastaccess._q_results) >>> results[:4] = 0., 3., 4., 0. >>> model.calculate_error() >>> from hydpy import round_ >>> round_(model.numvars.error) 1.0 """ self.numvars.error = 0. fluxes = self.sequences.fluxes for flux in fluxes.numerics: results = getattr(fluxes.fastaccess, '_%s_results' % flux.name) diff = (results[self.numvars.idx_method] - results[self.numvars.idx_method-1]) self.numvars.error = max(self.numvars.error, numpy.max(numpy.abs(diff)))
python
def epcrparsethreads(self): """ Parse the ePCR results, and run BLAST on the parsed results """ from Bio import SeqIO # Create the threads for the BLAST analysis for sample in self.metadata: if sample.general.bestassemblyfile != 'NA': threads = Thread(target=self.epcrparse, args=()) threads.setDaemon(True) threads.start() for sample in self.metadata: if sample.general.bestassemblyfile != 'NA': if sample[self.analysistype].primers != 'NA': # Initialise a dictionary to store the SeqIO records of each assembly record = dict() # Initialise dictionaries to store results in the object sample[self.analysistype].blastresults = dict() sample[self.analysistype].rawblastresults = dict() # Load the records from the assembly into the dictionary for rec in SeqIO.parse(sample.general.bestassemblyfile, 'fasta'): record[rec.id] = str(rec.seq) # Iterate through the ePCR results for line in sample[self.analysistype].epcrresults: # The data of interest is in the lines that do not start with a # # TLH 2016-SEQ-0359_4_length_321195_cov_28.6354_ID_3773 + 227879 228086 0 0 208/1000-1000 if not line.startswith('#'): # Add the variables to the queue self.epcrparsequeue.put((sample, record, line)) self.epcrparsequeue.join()
java
public void addEdge(DiEdge e) { if (edges.add(e)) { int s = e.get1(); int t = e.get2(); addNode(s); addNode(t); predecessors.get(t).add(s); successors.get(s).add(t); } }
python
def min_temperature(self, unit='kelvin'): """Returns a tuple containing the min value in the temperature series preceeded by its timestamp :param unit: the unit of measure for the temperature values. May be among: '*kelvin*' (default), '*celsius*' or '*fahrenheit*' :type unit: str :returns: a tuple :raises: ValueError when invalid values are provided for the unit of measure or the measurement series is empty """ if unit not in ('kelvin', 'celsius', 'fahrenheit'): raise ValueError("Invalid value for parameter 'unit'") minimum = min(self._purge_none_samples(self.temperature_series()), key=itemgetter(1)) if unit == 'kelvin': result = minimum if unit == 'celsius': result = (minimum[0], temputils.kelvin_to_celsius(minimum[1])) if unit == 'fahrenheit': result = (minimum[0], temputils.kelvin_to_fahrenheit(minimum[1])) return result
java
@Nonnull public AS2ClientResponse sendSynchronous () throws AS2ClientBuilderException { // Perform SMP client lookup performSMPClientLookup (); // Set derivable values setDefaultDerivedValues (); // Verify the whole data set verifyContent (); // Build message // 1. read business document into memory - this may be a bottleneck! Element aBusinessDocumentXML = null; if (m_aBusinessDocumentRes != null) { final Document aXMLDocument = DOMReader.readXMLDOM (m_aBusinessDocumentRes); if (aXMLDocument == null) throw new AS2ClientBuilderException ("Failed to read business document '" + m_aBusinessDocumentRes.getPath () + "' as XML"); aBusinessDocumentXML = aXMLDocument.getDocumentElement (); LOGGER.info ("Successfully parsed the business document"); } else { aBusinessDocumentXML = m_aBusinessDocumentElement; } if (aBusinessDocumentXML == null) throw new AS2ClientBuilderException ("No XML business content present!"); // 2. validate the business document if (m_aVESID != null) validateOutgoingBusinessDocument (aBusinessDocumentXML); // 3. build PEPPOL SBDH data final PeppolSBDHDocument aSBDHDoc = PeppolSBDHDocument.create (aBusinessDocumentXML, PeppolIdentifierFactory.INSTANCE); aSBDHDoc.setSenderWithDefaultScheme (m_aPeppolSenderID.getValue ()); aSBDHDoc.setReceiver (m_aPeppolReceiverID.getScheme (), m_aPeppolReceiverID.getValue ()); aSBDHDoc.setDocumentType (m_aPeppolDocumentTypeID.getScheme (), m_aPeppolDocumentTypeID.getValue ()); aSBDHDoc.setProcess (m_aPeppolProcessID.getScheme (), m_aPeppolProcessID.getValue ()); // 4. set client properties // Start building the AS2 client settings final AS2ClientSettings aAS2ClientSettings = new AS2ClientSettings (); // Key store aAS2ClientSettings.setKeyStore (m_aKeyStoreType, m_aKeyStoreFile, m_sKeyStorePassword); aAS2ClientSettings.setSaveKeyStoreChangesToFile (m_bSaveKeyStoreChangesToFile); // Fixed sender aAS2ClientSettings.setSenderData (m_sSenderAS2ID, m_sSenderAS2Email, m_sSenderAS2KeyAlias); // Dynamic receiver aAS2ClientSettings.setReceiverData (m_sReceiverAS2ID, m_sReceiverAS2KeyAlias, m_sReceiverAS2Url); aAS2ClientSettings.setReceiverCertificate (m_aReceiverCert); // AS2 stuff - no need to change anything in this block aAS2ClientSettings.setPartnershipName (aAS2ClientSettings.getSenderAS2ID () + "-" + aAS2ClientSettings.getReceiverAS2ID ()); aAS2ClientSettings.setMDNOptions (new DispositionOptions ().setMICAlg (m_eSigningAlgo) .setMICAlgImportance (DispositionOptions.IMPORTANCE_REQUIRED) .setProtocol (DispositionOptions.PROTOCOL_PKCS7_SIGNATURE) .setProtocolImportance (DispositionOptions.IMPORTANCE_REQUIRED)); aAS2ClientSettings.setEncryptAndSign (null, m_eSigningAlgo); aAS2ClientSettings.setMessageIDFormat (m_sMessageIDFormat); aAS2ClientSettings.setConnectTimeoutMS (m_nConnectTimeoutMS); aAS2ClientSettings.setReadTimeoutMS (m_nReadTimeoutMS); // Add a custom header to request an MDN for IBM implementation aAS2ClientSettings.customHeaders ().addHeader (CHttpHeader.DISPOSITION_NOTIFICATION_TO, "dummy"); final AS2ClientRequest aRequest = new AS2ClientRequest (m_sAS2Subject); // 5. assemble and send // Version with huge memory consumption final StandardBusinessDocument aSBD = new PeppolSBDHDocumentWriter ().createStandardBusinessDocument (aSBDHDoc); try (final NonBlockingByteArrayOutputStream aBAOS = new NonBlockingByteArrayOutputStream ()) { final SBDMarshaller aSBDMarshaller = new SBDMarshaller (); // Set custom namespace context (work around an OpusCapita problem) if (m_aNamespaceContext != null) aSBDMarshaller.setNamespaceContext (m_aNamespaceContext); else { // Ensure default marshaller without a prefix is used! aSBDMarshaller.setNamespaceContext (new MapBasedNamespaceContext ().setDefaultNamespaceURI (CSBDH.SBDH_NS)); } // Write to BAOS if (aSBDMarshaller.write (aSBD, aBAOS).isFailure ()) throw new AS2ClientBuilderException ("Failed to serialize SBD!"); if (false) { // Use data to force aRequest.setData (aBAOS.toByteArray ()); } else { // Using a String is better when having a // com.sun.xml.ws.encoding.XmlDataContentHandler installed! aRequest.setData (aBAOS.getAsString (StandardCharsets.UTF_8), StandardCharsets.UTF_8); } // Explicitly add application/xml even though the "setData" may have // suggested something else (like text/plain) aRequest.setContentType (CMimeType.APPLICATION_XML.getAsString ()); // Set the custom content transfer encoding aRequest.setContentTransferEncoding (m_eCTE); } final AS2Client aAS2Client = m_aAS2ClientFactory.get (); if (false) { // Local Fiddler proxy aAS2Client.setHttpProxy (new Proxy (Proxy.Type.HTTP, new InetSocketAddress ("127.0.0.1", 8888))); } final AS2ClientResponse aResponse = aAS2Client.sendSynchronous (aAS2ClientSettings, aRequest); return aResponse; }
java
@Override public void resetValue() { super.resetValue(); this.setSubmittedValue(null); getStateHelper().remove(PropertyKeys.localValueSet); getStateHelper().remove(PropertyKeys.valid); }
java
public List<Map.Entry<String, Float>> analogy(String A, String B, String C, int size) { Vector a = storage.get(A); Vector b = storage.get(B); Vector c = storage.get(C); if (a == null || b == null || c == null) { return Collections.emptyList(); } List<Map.Entry<String, Float>> resultList = nearest(a.minus(b).add(c), size + 3); ListIterator<Map.Entry<String, Float>> listIterator = resultList.listIterator(); while (listIterator.hasNext()) { String key = listIterator.next().getKey(); if (key.equals(A) || key.equals(B) || key.equals(C)) { listIterator.remove(); } } if (resultList.size() > size) { resultList = resultList.subList(0, size); } return resultList; }
java
@Override public synchronized void run() { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "run: size=" + ivAllRemoteAsyncResults.size()); if (ivIsCanceled) { //if this instance has been canceled, we do no more processing. // this also guarantees that a future alarm is not created. if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "run: cancelled"); return; } ivFuture = null; int numRemoved = 0; // Re-write the alarm to use the LinkedHashSet, iterating thru the oldest entries removing // the server-side Future objects that have timed out until an object that hasn't reached // timeout or the list is empty. This will avoid iterating over every entry in the list. d623593 if (!ivAllRemoteAsyncResults.isEmpty()) { long currentTime = System.currentTimeMillis(); for (Iterator<RemoteAsyncResultImpl> iterator = ivAllRemoteAsyncResults.iterator(); iterator.hasNext();) { RemoteAsyncResultImpl asyncResult = iterator.next(); long staleDuration = currentTime - asyncResult.getTimeoutStartTime(); if (staleDuration >= ivFutureObjectTimeoutMillis) { releaseResources(asyncResult); // d690014.3 iterator.remove(); numRemoved++; } else { // d690014 - Schedule an alarm to be fired at the time that the // next future should time out. If that is soon, then use a // minimum interval to avoid scheduling too many alarms. long alarmTime = Math.max(ivFutureObjectTimeoutMillis - staleDuration, MINIMUM_ALARM_INTERVAL_MILLIS); ivFuture = ivScheduledExecutor.schedule(this, alarmTime, TimeUnit.MILLISECONDS); if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "next " + asyncResult + "; alarm=" + alarmTime); break; } } // d690014.1 - If we previously issued a warning because there were // too many results, check if this sweep reduced the number of results // enough that we should warn again if the number of we get too many // results again in the future. if (ivWarnedNearMax) { int size = ivAllRemoteAsyncResults.size(); ivWarnedNearMax &= size >= ivSafeResultsThreshold; ivWarnedExceededMax &= size >= ivNearMaxResultsThreshold; } } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "run: size=" + ivAllRemoteAsyncResults.size() + ", removed=" + numRemoved); }
java
public Page<Dataset> listDatasets() { // [START bigquery_list_datasets] // List datasets in the default project Page<Dataset> datasets = bigquery.listDatasets(DatasetListOption.pageSize(100)); for (Dataset dataset : datasets.iterateAll()) { // do something with the dataset } // [END bigquery_list_datasets] return datasets; }
python
def dynamic_content_item_show(self, id, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/dynamic_content#show-item" api_path = "/api/v2/dynamic_content/items/{id}.json" api_path = api_path.format(id=id) return self.call(api_path, **kwargs)
python
def transform_sqlvm_group_output(result): ''' Transforms the result of SQL virtual machine group to eliminate unnecessary parameters. ''' from collections import OrderedDict from msrestazure.tools import parse_resource_id try: resource_group = getattr(result, 'resource_group', None) or parse_resource_id(result.id)['resource_group'] wsfc_object = format_wsfc_domain_profile(result.wsfc_domain_profile) # Create a dictionary with the relevant parameters output = OrderedDict([('id', result.id), ('location', result.location), ('name', result.name), ('provisioningState', result.provisioning_state), ('sqlImageOffer', result.sql_image_offer), ('sqlImageSku', result.sql_image_sku), ('resourceGroup', resource_group), ('wsfcDomainProfile', wsfc_object), ('tags', result.tags)]) return output except AttributeError: # Return the response object if the formating fails return result
java
@Override public void write(TextWriterStream out, String label, Object object) { StringBuilder buf = new StringBuilder(100); if(label != null) { buf.append(label).append('='); } if(object != null) { buf.append(object.toString()); } out.commentPrintLn(buf); }
python
def record_stage_state(self, phase, stage): """Record the completion times of phases and stages""" key = '{}-{}'.format(phase, stage if stage else 1) self.buildstate.state[key] = time()
python
def remote(self): """ Return the remote for this partition :return: """ from ambry.exc import NotFoundError ds = self.dataset if 'remote_name' not in ds.data: raise NotFoundError('Could not determine remote for partition: {}'.format(self.identity.fqname)) return self._bundle.library.remote(ds.data['remote_name'])
java
public static void splitAssociated( List<AssociatedTriple> pairs , List<Point2D_F64> view1 , List<Point2D_F64> view2 , List<Point2D_F64> view3 ) { for( AssociatedTriple p : pairs ) { view1.add(p.p1); view2.add(p.p2); view3.add(p.p3); } }
java
public static Builder in (TimeZone zone, Locale locale) { return with(Calendar.getInstance(zone, locale)); }
python
def _transpose_dict_list(dict_list): """Transpose a nested dict[list] into a list[nested dict].""" # 1. Unstack numpy arrays into list dict_list = utils.map_nested(np_to_list, dict_list, dict_only=True) # 2. Extract the sequence length (and ensure the length is constant for all # elements) length = {'value': None} # dict because `nonlocal` is Python3 only def update_length(elem): if length['value'] is None: length['value'] = len(elem) elif length['value'] != len(elem): raise ValueError( 'The length of all elements of one sequence should be the same. ' 'Got {} != {}'.format(length['value'], len(elem))) return elem utils.map_nested(update_length, dict_list, dict_only=True) # 3. Extract each individual elements return [ utils.map_nested(lambda elem: elem[i], dict_list, dict_only=True) # pylint: disable=cell-var-from-loop for i in range(length['value']) ]
java
public IJsonMarshaller getJsonMarshaller(Annotation[] annotations) throws JsonMarshallerException { if (annotations != null) { for (Annotation annotation : annotations) { if (JsonUnmarshaller.class.isAssignableFrom(annotation.annotationType())) { return getJsonMarshallerFromAnnotation((JsonUnmarshaller) annotation); } } } return null; }
python
def xrange(self, stream, start='-', stop='+', count=None): """Retrieve messages from a stream.""" if count is not None: extra = ['COUNT', count] else: extra = [] fut = self.execute(b'XRANGE', stream, start, stop, *extra) return wait_convert(fut, parse_messages)
python
def tarbell_spreadsheet(command, args): """ Open context spreadsheet """ with ensure_settings(command, args) as settings, ensure_project(command, args) as site: try: # First, try to get the Google Spreadsheet URL spreadsheet_url = _google_spreadsheet_url(site.project.SPREADSHEET_KEY) except AttributeError: # The project doesn't seem to be using a Google Spreadsheet. # Try the URL or path specified in the CONTEXT_SOURCE_FILE setting try: spreadsheet_url = _context_source_file_url( site.project.CONTEXT_SOURCE_FILE) print(spreadsheet_url) except AttributeError: puts(colored.red("No Google spreadsheet or context source file " "has been configured.\n")) return # Use the webbrowser package to try to open the file whether it's a # remote URL on the web, or a local file. On some platforms it will # successfully open local files in the default application. # This seems preferable to trying to do os detection and calling # the system-specific command for opening files in default # applications. # See # http://stackoverflow.com/questions/434597/open-document-with-default-application-in-python webbrowser.open(spreadsheet_url)
java
private void start() { System.out.print("Starting " + DISPLAY_NAME + "..."); System.out.flush(); // Consume configuration from Grakn config file into Cassandra config file initialiseConfig(); Future<Executor.Result> result = daemonExecutor.executeAsync(storageCommand(), graknHome.toFile()); LocalDateTime timeout = LocalDateTime.now().plusSeconds(STORAGE_STARTUP_TIMEOUT_SECOND); while (LocalDateTime.now().isBefore(timeout) && !result.isDone()) { System.out.print("."); System.out.flush(); if (storageStatus().equals("running")) { System.out.println("SUCCESS"); return; } try { Thread.sleep(WAIT_INTERVAL_SECOND * 1000); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } System.out.println("FAILED!"); System.err.println("Unable to start " + DISPLAY_NAME + "."); try { String errorMessage = "Process exited with code '" + result.get().exitCode() + "': '" + result.get().stderr() + "'"; System.err.println(errorMessage); throw new GraknDaemonException(errorMessage); } catch (InterruptedException | ExecutionException e) { throw new GraknDaemonException(e.getMessage(), e); } }
java
public static <T, U extends Comparable<? super U>> Collector<T, ?, Seq<T>> maxAllBy(Function<? super T, ? extends U> function) { return maxAllBy(function, naturalOrder()); }
python
def get_params_from_func(func: Callable, signature: Signature=None) -> Params: """Gets all parameters from a function signature. :param func: The function to inspect. :param signature: An inspect.Signature instance. :returns: A named tuple containing information about all, optional, required and logic function parameters. """ if signature is None: # Check if the function already parsed the signature signature = getattr(func, '_doctor_signature', None) # Otherwise parse the signature if signature is None: signature = inspect.signature(func) # Check if a `req_obj_type` was provided for the function. If so we should # derrive the parameters from that defined type instead of the signature. if getattr(func, '_doctor_req_obj_type', None): annotation = func._doctor_req_obj_type all_params = list(annotation.properties.keys()) required = annotation.required optional = list(set(all_params) - set(required)) else: # Required is a positional argument with no defualt value and it's # annotation must sub class SuperType. This is so we don't try to # require parameters passed to a logic function by a decorator that are # not part of a request. required = [key for key, p in signature.parameters.items() if p.default == p.empty and issubclass(p.annotation, SuperType)] optional = [key for key, p in signature.parameters.items() if p.default != p.empty] all_params = [key for key in signature.parameters.keys()] # Logic params are all parameters that are part of the logic signature. logic_params = copy(all_params) return Params(all_params, required, optional, logic_params)
java
public final int childItemPosition(int childAdapterPosition) { int itemCount = 0; int parentCount = parentItemCount(); for (int i = 0; i < parentCount; i++) { itemCount += 1; if (isExpanded(i)) { int childCount = childItemCount(i); itemCount += childCount; if (childAdapterPosition < itemCount) { return childCount - (itemCount - childAdapterPosition); } } else { // itemCount += 1; } } throw new IllegalStateException("The adapter position is invalid: " + childAdapterPosition); }