language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public final void setInstructionObserverThreshold(int threshold) { if (sealed) onSealedMutation(); if (threshold < 0) throw new IllegalArgumentException(); instructionThreshold = threshold; setGenerateObserverCount(threshold > 0); }
java
@SuppressWarnings("resource") public static Table getTableMeta(DataSource ds, String tableName) { final Table table = Table.create(tableName); Connection conn = null; ResultSet rs = null; try { conn = ds.getConnection(); final DatabaseMetaData metaData = conn.getMetaData(); // 获得主键 rs = metaData.getPrimaryKeys(conn.getCatalog(), null, tableName); while (rs.next()) { table.addPk(rs.getString("COLUMN_NAME")); } // 获得列 rs = metaData.getColumns(conn.getCatalog(), null, tableName, null); while (rs.next()) { table.setColumn(Column.create(tableName, rs)); } } catch (SQLException e) { throw new DbRuntimeException("Get columns error!", e); } finally { DbUtil.close(rs, conn); } return table; }
java
private SherdogBaseObject getEvent(Element td) { Element link = td.select("a").get(0); SherdogBaseObject event = new SherdogBaseObject(); event.setName(link.html().replaceAll("<span itemprop=\"award\">|</span>", "")); event.setSherdogUrl(link.attr("abs:href")); return event; }
java
protected static LazyNode readFromBuffer(byte[] raw){ ByteBuffer buf=ByteBuffer.wrap(raw); return readFromBuffer(buf); }
java
public static void addDatatablesResourceIfNecessary(String defaultFilename, String type) { boolean loadDatatables = shouldLibraryBeLoaded(P_GET_DATATABLE_FROM_CDN, true); // Do we have to add datatables.min.{css|js}, or are the resources already there? FacesContext context = FacesContext.getCurrentInstance(); UIViewRoot root = context.getViewRoot(); String[] positions = { "head", "body", "form" }; for (String position : positions) { if (loadDatatables) { List<UIComponent> availableResources = root.getComponentResources(context, position); for (UIComponent ava : availableResources) { if (ava.isRendered()) { String name = (String) ava.getAttributes().get("name"); if (null != name) { name = name.toLowerCase(); if (name.contains("datatables") && name.endsWith("." + type)) { loadDatatables = false; break; } } } } } } if (loadDatatables) { addResourceIfNecessary(defaultFilename); } }
java
public static Map<String, Object> parseModelConfig(String modelJson, String modelYaml) throws IOException, InvalidKerasConfigurationException { Map<String, Object> modelConfig; if (modelJson != null) modelConfig = parseJsonString(modelJson); else if (modelYaml != null) modelConfig = parseYamlString(modelYaml); else throw new InvalidKerasConfigurationException("Requires model configuration as either JSON or YAML string."); return modelConfig; }
python
def setup(self): """Initialize filter just before it will be used.""" super(CleanCSSFilter, self).setup() self.root = current_app.config.get('COLLECT_STATIC_ROOT')
java
public static nsmode get(nitro_service service) throws Exception{ nsmode obj = new nsmode(); nsmode[] response = (nsmode[])obj.get_resources(service); return response[0]; }
python
def extend(self, expire=None): """Extends expiration time of the lock. :param expire: New expiration time. If ``None`` - `expire` provided during lock initialization will be taken. """ if expire is None: if self._expire is not None: expire = self._expire else: raise TypeError( "To extend a lock 'expire' must be provided as an " "argument to extend() method or at initialization time." ) error = _eval_script(self._client, EXTEND, self._name, args=(expire, self._id)) if error == 1: raise NotAcquired("Lock %s is not acquired or it already expired." % self._name) elif error == 2: raise NotExpirable("Lock %s has no assigned expiration time" % self._name) elif error: raise RuntimeError("Unsupported error code %s from EXTEND script" % error)
java
public void writeTo(WritableByteChannel channel) throws IOException { for (ByteBuffer buffer : toDirectByteBuffers()) { channel.write(buffer); } }
java
public FluentSelect deselectByValue(final String value) { executeAndWrapReThrowIfNeeded(new DeselectByValue(value), Context.singular(context, "deselectByValue", null, value), true); return new FluentSelect(super.delegate, currentElement.getFound(), this.context, monitor, booleanInsteadOfNotFoundException); }
python
def cpu_load_send(self, sensLoad, ctrlLoad, batVolt, force_mavlink1=False): ''' Sensor and DSC control loads. sensLoad : Sensor DSC Load (uint8_t) ctrlLoad : Control DSC Load (uint8_t) batVolt : Battery Voltage in millivolts (uint16_t) ''' return self.send(self.cpu_load_encode(sensLoad, ctrlLoad, batVolt), force_mavlink1=force_mavlink1)
python
def upload_image(request): """ Вюха, яка зберігає завантажений файл. Структура запиту: FILES images[]: файли зображеннь POST DATA profile: назва профілю (для визначення налаштувань збреження) (опціонально) label: додаток до назви файлу при збереженні (опціонально) Структура відповіді: Тип відповіді: JSON { 'uploaded': [ { 'url': 'повний url до головного файла', 'rel_url': 'відносний від MEDIA url головного файла', 'img_id': 'ідентифікатор для збереження в БД', // 'profilename:abcdef_abcd_label.png', 'variants': { 'variant label': { 'url': 'повний url до варіанта', 'rel_url': 'відносний від MEDIA url головного файла' }, ... } }, ... ], 'errors': ['error message', ...] } """ if request.method != 'POST': return HttpResponseNotAllowed(('POST',)) result = {'uploaded': [], 'errors': []} files = request.FILES.getlist('images[]') if not files: result['errors'].append(unicode(ERROR_MESSAGES['no_uploaded_files'])) return send_json(result) try: profile = request.POST.get('profile', 'default') conf = get_profile_configs(profile) except ValueError, e: result['errors'].append(unicode(e)) return send_json(result) for i in xrange(min(len(files), dju_settings.DJU_IMG_UPLOAD_MAX_FILES)): f = files[i] if not is_image(f, types=conf['TYPES']): result['errors'].append( unicode(ERROR_MESSAGES['wrong_file_format']) % {'name': f.name, 'formats': ', '.join(map(lambda t: t.upper(), conf['TYPES']))} ) continue adjust_image(f, max_size=conf['MAX_SIZE'], new_format=conf['FORMAT'], jpeg_quality=conf['JPEG_QUALITY'], fill=conf['FILL'], stretch=conf['STRETCH']) img_id = generate_img_id(profile, ext=image_get_format(f), label=request.POST.get('label'), tmp=True) relative_path = get_relative_path_from_img_id(img_id) full_path = media_path(relative_path) save_file(f, full_path) data = { 'url': settings.MEDIA_URL + relative_path, 'rel_url': relative_path, 'img_id': img_id, 'variants': {}, } for v_conf in conf['VARIANTS']: label = v_conf['LABEL'] if not label: label = get_variant_label(v_conf) v_f = adjust_image(f, max_size=v_conf['MAX_SIZE'], new_format=v_conf['FORMAT'], jpeg_quality=v_conf['JPEG_QUALITY'], fill=v_conf['FILL'], stretch=v_conf['STRETCH'], return_new_image=True) v_relative_path = get_relative_path_from_img_id(img_id, variant_label=label, ext=image_get_format(v_f)) v_full_path = media_path(v_relative_path) save_file(v_f, v_full_path) data['variants'][label] = { 'url': settings.MEDIA_URL + v_relative_path, 'rel_url': v_relative_path, } result['uploaded'].append(data) return send_json(result)
java
public CloseableReference<Bitmap> createBitmap( DisplayMetrics display, int width, int height, Bitmap.Config config) { return createBitmap(display, width, height, config, null); }
python
async def upload_file(self, Filename, Bucket, Key, ExtraArgs=None, Callback=None, Config=None): """Upload a file to an S3 object. Usage:: import boto3 s3 = boto3.resource('s3') s3.meta.client.upload_file('/tmp/hello.txt', 'mybucket', 'hello.txt') Similar behavior as S3Transfer's upload_file() method, except that parameters are capitalized. """ with open(Filename, 'rb') as open_file: await upload_fileobj(self, open_file, Bucket, Key, ExtraArgs=ExtraArgs, Callback=Callback, Config=Config)
java
public static String getAdiVarna(String str) { if (str.length() == 0) return null; String adiVarna = String.valueOf(str.charAt(0)); if (str.length() > 1 && str.charAt(1) == '3') // for pluta { adiVarna += String.valueOf(str.charAt(1)); } return adiVarna; }
java
private void deleteFile(File file, String vfsName) { try { if (file.exists() && file.canWrite()) { file.delete(); // write log message if (LOG.isInfoEnabled()) { LOG.info(Messages.get().getBundle().key(Messages.LOG_FILE_DELETED_1, getRfsName(file, vfsName))); } // delete the parent folder if it is empty (don't do this recursive) File parent = new File(file.getParent()); if (parent.listFiles().length == 0) { if (parent.canWrite()) { parent.delete(); if (LOG.isInfoEnabled()) { LOG.info( Messages.get().getBundle().key(Messages.LOG_FILE_DELETED_1, getRfsName(file, vfsName))); } } } } } catch (Throwable t) { // ignore, nothing to do about this if (LOG.isWarnEnabled()) { LOG.warn( Messages.get().getBundle().key(Messages.LOG_FILE_DELETION_FAILED_1, getRfsName(file, vfsName)), t); } } }
java
@Override public final Class<? extends T> enhanceClass(Class<T> baseClass) { logger.info("Enhancing {}", baseClass); CtClass original = null; try { original = pool.get(baseClass.getName()); TemplateHelper templateHelper = new TemplateHelper(pool); VelocityContext velocityContext = new VelocityContext(); velocityContext.put("_", templateHelper); velocityContext.put("base", original); velocityContext.put("getters", findGetters(original)); velocityContext.put("abstractMethods", findAbstractMethods(original)); Map<String, Object> contextItems = getAdditionalContextItems(); for (Map.Entry<String, Object> contextItem : contextItems.entrySet()) { velocityContext.put(contextItem.getKey(), contextItem.getValue()); } StringWriter writer = new StringWriter(); engine.getTemplate(getTemplateLocation()).merge(velocityContext, writer); logger.debug("Enhanced {} to form new class {} with source:\n{}", baseClass.getSimpleName(), templateHelper.clsName(), writer); return ClassLoadingUtil.toClass(templateHelper.compile()); } catch (Exception e) { logger.error("An error occurred while enhancing {}", baseClass); throw ExceptionUtil.propagate(e); } finally { if (original != null) { original.detach(); } } }
java
public void setDefault(boolean newDefault) { boolean oldDefault = default_; default_ = newDefault; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TypesPackage.JVM_OPERATION__DEFAULT, oldDefault, default_)); }
python
def potential_radiation(dates, lon, lat, timezone, terrain_slope=0, terrain_slope_azimuth=0, cloud_fraction=0, split=False): """ Calculate potential shortwave radiation for a specific location and time. This routine calculates global radiation as described in: Liston, G. E. and Elder, K. (2006): A Meteorological Distribution System for High-Resolution Terrestrial Modeling (MicroMet), J. Hydrometeorol., 7, 217–234. Corrections for eccentricity are carried out following: Paltridge, G.W., Platt, C.M.R., 1976. Radiative processes in Meteorology and Climatology. Elsevier Scientific Publishing Company, Amsterdam, Oxford, New York. Parameters ---------- dates : DatetimeIndex or array-like The dates for which potential radiation shall be calculated lon : float Longitude (degrees) lat : float Latitude (degrees) timezone : float Time zone terrain_slope : float, default 0 Terrain slope as defined in Liston & Elder (2006) (eq. 12) terrain_slope_azimuth : float, default 0 Terrain slope azimuth as defined in Liston & Elder (2006) (eq. 13) cloud_fraction : float, default 0 Cloud fraction between 0 and 1 split : boolean, default False If True, return a DataFrame containing direct and diffuse radiation, otherwise return a Series containing total radiation """ solar_constant = 1367. days_per_year = 365.25 tropic_of_cancer = np.deg2rad(23.43697) solstice = 173.0 dates = pd.DatetimeIndex(dates) dates_hour = np.array(dates.hour) dates_minute = np.array(dates.minute) day_of_year = np.array(dates.dayofyear) # compute solar decline in rad solar_decline = tropic_of_cancer * np.cos(2.0 * np.pi * (day_of_year - solstice) / days_per_year) # compute the sun hour angle in rad standard_meridian = timezone * 15. delta_lat_time = (lon - standard_meridian) * 24. / 360. hour_angle = np.pi * (((dates_hour + dates_minute / 60. + delta_lat_time) / 12.) - 1.) # get solar zenith angle cos_solar_zenith = (np.sin(solar_decline) * np.sin(np.deg2rad(lat)) + np.cos(solar_decline) * np.cos(np.deg2rad(lat)) * np.cos(hour_angle)) cos_solar_zenith = cos_solar_zenith.clip(min=0) solar_zenith_angle = np.arccos(cos_solar_zenith) # compute transmissivities for direct and diffus radiation using cloud fraction transmissivity_direct = (0.6 + 0.2 * cos_solar_zenith) * (1.0 - cloud_fraction) transmissivity_diffuse = (0.3 + 0.1 * cos_solar_zenith) * cloud_fraction # modify solar constant for eccentricity beta = 2. * np.pi * (day_of_year / days_per_year) radius_ratio = (1.00011 + 0.034221 * np.cos(beta) + 0.00128 * np.sin(beta) + 0.000719 * np.cos(2. * beta) + 0.000077 * np.sin(2 * beta)) solar_constant_times_radius_ratio = solar_constant * radius_ratio mu = np.arcsin(np.cos(solar_decline) * np.sin(hour_angle) / np.sin(solar_zenith_angle)) cosi = (np.cos(terrain_slope) * cos_solar_zenith + np.sin(terrain_slope) * np.sin(solar_zenith_angle) * np.cos(mu - terrain_slope_azimuth)) # get total shortwave radiation direct_radiation = solar_constant_times_radius_ratio * transmissivity_direct * cosi diffuse_radiation = solar_constant_times_radius_ratio * transmissivity_diffuse * cos_solar_zenith direct_radiation = direct_radiation.clip(min=0) df = pd.DataFrame(index=dates, data=dict(direct=direct_radiation, diffuse=diffuse_radiation)) if split: return df else: return df.direct + df.diffuse
java
@JmxGetter(name = "avgSlopUpdateNetworkTimeMs", description = "average time spent on network, for streaming operations") public double getAvgSlopUpdateNetworkTimeMs() { return networkTimeCounterMap.get(Operation.SLOP_UPDATE).getAvgEventValue() / Time.NS_PER_MS; }
python
def write_generator_cost_data(self, file): """ Writes generator cost data to file. """ file.write("\n%%%% generator cost data\n") file.write("%%\t1\tstartup\tshutdown\tn\tx1\ty1\t...\txn\tyn\n") file.write("%%\t2\tstartup\tshutdown\tn\tc(n-1)\t...\tc0\n") file.write("%sgencost = [\n" % self._prefix) for generator in self.case.generators: n = len(generator.p_cost) template = '\t%d\t%g\t%g\t%d' for _ in range(n): template = '%s\t%%g' % template template = '%s;\n' % template if generator.pcost_model == PW_LINEAR: t = 2 # cp = [p for p, q in generator.p_cost] # cq = [q for p, q in generator.p_cost] # c = zip(cp, cq) c = [v for pc in generator.p_cost for v in pc] elif generator.pcost_model == POLYNOMIAL: t = 1 c = list(generator.p_cost) else: raise vals = [t, generator.c_startup, generator.c_shutdown, n] + c file.write(template % tuple(vals)) file.write("];\n")
java
public long copy(InputStream in, Node dest) throws IOException { long result; try (OutputStream out = dest.newOutputStream()) { result = copy(in, out); } return result; }
python
def _get_galaxy_data_table(name, dt_config_file): """Parse data table config file for details on tool *.loc location and columns. """ out = {} if os.path.exists(dt_config_file): tdtc = ElementTree.parse(dt_config_file) for t in tdtc.getiterator("table"): if t.attrib.get("name", "") in [name, "%s_indexes" % name]: out["column"] = [x.strip() for x in t.find("columns").text.split(",")] out["file"] = t.find("file").attrib.get("path", "") return out
python
def _try_to_get_extension(obj): """ Try to get file extension from given path or file object. :param obj: a file, file-like object or something :return: File extension or None >>> _try_to_get_extension("a.py") 'py' """ if is_path(obj): path = obj elif is_path_obj(obj): return obj.suffix[1:] elif is_file_stream(obj): try: path = get_path_from_stream(obj) except ValueError: return None elif is_ioinfo(obj): path = obj.path else: return None if path: return get_file_extension(path) return None
java
public static SecapiPayProfitsharingResult secapiPayProfitsharing(SecapiPayProfitsharing secapiPayProfitsharing,String key){ Map<String,String> map = MapUtil.objectToMap(secapiPayProfitsharing, "receivers"); if(secapiPayProfitsharing.getReceivers() != null){ map.put("receivers", JsonUtil.toJSONString(secapiPayProfitsharing.getReceivers())); } String sign = SignatureUtil.generateSign(map,secapiPayProfitsharing.getSign_type() == null? "HMAC-SHA256": secapiPayProfitsharing.getSign_type(),key); secapiPayProfitsharing.setSign(sign); String xml = XMLConverUtil.convertToXML(secapiPayProfitsharing); HttpUriRequest httpUriRequest = RequestBuilder.post() .setHeader(xmlHeader) .setUri(baseURI() + "/secapi/pay/profitsharing") .setEntity(new StringEntity(xml,Charset.forName("utf-8"))) .build(); return LocalHttpClient.keyStoreExecuteXmlResult(secapiPayProfitsharing.getMch_id(), httpUriRequest,SecapiPayProfitsharingResult.class, secapiPayProfitsharing.getSign_type() == null? "HMAC-SHA256": secapiPayProfitsharing.getSign_type(),key); }
python
def html_to_rgb(html): """Convert the HTML color to (r, g, b). Parameters: :html: the HTML definition of the color (#RRGGBB or #RGB or a color name). Returns: The color as an (r, g, b) tuple in the range: r[0...1], g[0...1], b[0...1] Throws: :ValueError: If html is neither a known color name or a hexadecimal RGB representation. >>> '(%g, %g, %g)' % html_to_rgb('#ff8000') '(1, 0.501961, 0)' >>> '(%g, %g, %g)' % html_to_rgb('ff8000') '(1, 0.501961, 0)' >>> '(%g, %g, %g)' % html_to_rgb('#f60') '(1, 0.4, 0)' >>> '(%g, %g, %g)' % html_to_rgb('f60') '(1, 0.4, 0)' >>> '(%g, %g, %g)' % html_to_rgb('lemonchiffon') '(1, 0.980392, 0.803922)' """ html = html.strip().lower() if html[0]=='#': html = html[1:] elif html in NAMED_COLOR: html = NAMED_COLOR[html][1:] if len(html)==6: rgb = html[:2], html[2:4], html[4:] elif len(html)==3: rgb = ['%c%c' % (v,v) for v in html] else: raise ValueError("input #%s is not in #RRGGBB format" % html) return tuple(((int(n, 16) / 255.0) for n in rgb))
python
def residual_conv(x, repeat, k, hparams, name, reuse=None): """A stack of convolution blocks with residual connections.""" with tf.variable_scope(name, reuse=reuse): dilations_and_kernels = [((1, 1), k) for _ in range(3)] for i in range(repeat): with tf.variable_scope("repeat_%d" % i): y = common_layers.conv_block( common_layers.layer_norm(x, hparams.hidden_size, name="lnorm"), hparams.hidden_size, dilations_and_kernels, padding="SAME", name="residual_conv") y = tf.nn.dropout(y, 1.0 - hparams.dropout) x += y return x
java
public static Map<double[], int[]> getTransformMap(BioAssemblyInfo bioassemblyInfo, Map<String, Integer> chainIdToIndexMap) { Map<Matrix4d, List<Integer>> matMap = new LinkedHashMap<>(); List<BiologicalAssemblyTransformation> transforms = bioassemblyInfo.getTransforms(); for (BiologicalAssemblyTransformation transformation : transforms) { Matrix4d transMatrix = transformation.getTransformationMatrix(); String transChainId = transformation.getChainId(); if (!chainIdToIndexMap.containsKey(transChainId)){ continue; } int chainIndex = chainIdToIndexMap.get(transformation.getChainId()); if(matMap.containsKey(transMatrix)){ matMap.get(transMatrix).add(chainIndex); } else{ List<Integer> chainIdList = new ArrayList<>(); chainIdList.add(chainIndex); matMap.put(transMatrix, chainIdList); } } Map<double[], int[]> outMap = new LinkedHashMap<>(); for (Entry<Matrix4d, List<Integer>> entry : matMap.entrySet()) { outMap.put(convertToDoubleArray(entry.getKey()), CodecUtils.convertToIntArray(entry.getValue())); } return outMap; }
java
public void addFilePart(final String fieldName, final URL urlToUploadFile) throws IOException { // // Maybe try and extract a filename from the last part of the url? // Or have the user pass it in? // Or just leave it blank as I have already done? // addFilePart(fieldName, urlToUploadFile.openStream(), null, URLConnection.guessContentTypeFromName(urlToUploadFile.toString())); }
java
private void handleBindStat(long elapsedTime) { String METHODNAME = "handleBindStat(long)"; if (elapsedTime < LDAP_CONNECT_TIMEOUT_TRACE) { QUICK_LDAP_BIND.getAndIncrement(); } long now = System.currentTimeMillis(); /* * Print out at most every 30 minutes the latest number of "quick" binds */ if (now - LDAP_STATS_TIMER.get() > 1800000) { //Update the last update time, then make certain no one beat us to it long lastUpdated = LDAP_STATS_TIMER.getAndSet(now); if (now - lastUpdated > 1800000) { if (tc.isDebugEnabled()) Tr.debug(tc, METHODNAME + " **LDAPBindStat: " + QUICK_LDAP_BIND.get() + " binds took less then " + LDAP_CONNECT_TIMEOUT_TRACE + " ms"); } } }
java
@Override public boolean containsKey(Object key) { return key != null ? getProperty(key.toString(), _scope) != null : false; }
java
private InputStream getDefault(String fileName) { InputStream result = null; try { result = new BufferedInputStream(new FileInputStream(fileName)); } catch (IOException ignore) { } return result; }
java
@Deprecated public static <K, V> Function<K, V> map(java.util.Map<K, V> m) { return m::get; }
java
public static KeyStore loadKeyStore(InputStream keyStore, String password) throws CertificateException, NoSuchAlgorithmException, IOException, KeyStoreException { try { KeyStore trustStore = KeyStore.getInstance("JKS"); trustStore.load(keyStore, password.toCharArray()); return trustStore; } finally { IOUtils.closeQuietly(keyStore); } }
python
def get(self, name): """Get the set of compatible packages given a resolvable name.""" resolvable, packages, parent, constraint_only = self._collapse().get( self.normalize(name), _ResolvedPackages.empty()) return packages
python
def to_dict(self): '''Return a dict of the attributes.''' return dict( raw=self.raw, scheme=self.scheme, authority=self.authority, netloc=self.authority, path=self.path, query=self.query, fragment=self.fragment, userinfo=self.userinfo, username=self.username, password=self.password, host=self.host, hostname=self.hostname, port=self.port, resource=self.resource, url=self.url, encoding=self.encoding, )
java
public <T extends Evaluation> T evaluate(JavaRDD<DataSet> data, List<String> labelsList, int evalBatchSize) { Evaluation e = new org.deeplearning4j.eval.Evaluation(); e = doEvaluation(data, e, evalBatchSize); if (labelsList != null) { e.setLabelsList(labelsList); } return (T)e; }
python
def scrypt(password, salt, N=SCRYPT_N, r=SCRYPT_r, p=SCRYPT_p, olen=64): """Returns a key derived using the scrypt key-derivarion function N must be a power of two larger than 1 but no larger than 2 ** 63 (insane) r and p must be positive numbers such that r * p < 2 ** 30 The default values are: N -- 2**14 (~16k) r -- 8 p -- 1 Memory usage is proportional to N*r. Defaults require about 16 MiB. Time taken is proportional to N*p. Defaults take <100ms of a recent x86. The last one differs from libscrypt defaults, but matches the 'interactive' work factor from the original paper. For long term storage where runtime of key derivation is not a problem, you could use 16 as in libscrypt or better yet increase N if memory is plentiful. """ check_args(password, salt, N, r, p, olen) if _scrypt_ll: out = ctypes.create_string_buffer(olen) if _scrypt_ll(password, len(password), salt, len(salt), N, r, p, out, olen): raise ValueError return out.raw if len(salt) != _scrypt_salt or r != 8 or (p & (p - 1)) or (N*p <= 512): return scr_mod.scrypt(password, salt, N, r, p, olen) s = next(i for i in range(1, 64) if 2**i == N) t = next(i for i in range(0, 30) if 2**i == p) m = 2**(10 + s) o = 2**(5 + t + s) if s > 53 or t + s > 58: raise ValueError out = ctypes.create_string_buffer(olen) if _scrypt(out, olen, password, len(password), salt, o, m) != 0: raise ValueError return out.raw
java
public static FileInfo parseFileName(File file) { // Owned filenames have the form: // dbname/CURRENT // dbname/LOCK // dbname/LOG // dbname/LOG.old // dbname/MANIFEST-[0-9]+ // dbname/[0-9]+.(log|sst|dbtmp) String fileName = file.getName(); if ("CURRENT".equals(fileName)) { return new FileInfo(FileType.CURRENT); } else if ("LOCK".equals(fileName)) { return new FileInfo(FileType.DB_LOCK); } else if ("LOG".equals(fileName)) { return new FileInfo(FileType.INFO_LOG); } else if ("LOG.old".equals(fileName)) { return new FileInfo(FileType.INFO_LOG); } else if (fileName.startsWith("MANIFEST-")) { long fileNumber = Long.parseLong(removePrefix(fileName, "MANIFEST-")); return new FileInfo(FileType.DESCRIPTOR, fileNumber); } else if (fileName.endsWith(".log")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".log")); return new FileInfo(FileType.LOG, fileNumber); } else if (fileName.endsWith(".sst")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".sst")); return new FileInfo(FileType.TABLE, fileNumber); } else if (fileName.endsWith(".dbtmp")) { long fileNumber = Long.parseLong(removeSuffix(fileName, ".dbtmp")); return new FileInfo(FileType.TEMP, fileNumber); } return null; }
java
private boolean delete(final File f) throws IOException { if (f.isDirectory()) { final File[] files = f.listFiles(); if (files != null) { for (File file : files) { final boolean del = delete(file); if (!del) { return false; } } } } else { return f.delete(); } // Now directory is empty return f.delete(); }
python
def logical_not(f): # function factory '''Logical not from functions. Parameters ---------- f1, f2 : function Function that takes array and returns true or false for each item in array. Returns ------- Function. ''' def f(value): return np.logical_not(f(value)) f.__name__ = "not_" + f.__name__ return f
java
@Nonnull public static <T> ObjDoubleConsumerBuilder<T> objDblConsumer(Consumer<ObjDoubleConsumer<T>> consumer) { return new ObjDoubleConsumerBuilder(consumer); }
python
def closed(self, reason): """Callback performed when the transport is closed.""" self.server.remove_connection(self) self.protocol.connection_lost(reason) if not isinstance(reason, ConnectionClosed): logger.warn("connection closed, reason: %s" % str(reason)) else: logger.info("connection closed")
python
def _get_port_range(range_str): """ Given a string with a port or port range: '80', '80-120' Returns tuple with range start and end ports: (80, 80), (80, 120) """ if range_str == '*': return PortsRangeHelper.PortsRange(start=0, end=65535) s = range_str.split('-') if len(s) == 2: return PortsRangeHelper.PortsRange(start=int(s[0]), end=int(s[1])) return PortsRangeHelper.PortsRange(start=int(s[0]), end=int(s[0]))
java
public void stop() { releaseWakeLock(); if (null != recorder && prepared && recording) { try { recorder.stop(); recorder.reset(); recorder.release(); prepared = false; recording = false; recorder = null; if (!wannaCancel && null != mOnRecordPreparedListener) { mOnRecordPreparedListener.onRecordComplete(fullPath); } } catch (Exception e) { e.printStackTrace(); } } }
python
def putResult(self, result): """Register the *result* by putting it on all the output tubes.""" self._lock_prev_output.acquire() for tube in self._tubes_result_output: tube.put((result, 0)) self._lock_next_output.release()
python
def set_prbs(self, rx_ports=None, tx_ports=None): """ Set TX ports and RX streams for stream statistics. :param ports: list of ports to set RX PRBS. If empty set for all ports. :type ports: list[ixexplorer.ixe_port.IxePort] :param tx_ports: list of streams to set TX PRBS. If empty set for all streams. :type tx_ports: dict[ixexplorer.ixe_port.IxePort, list[ixexplorer.ixe_stream.IxeStream]] """ if not rx_ports: rx_ports = self.ports.values() if not tx_ports: tx_ports = {} for port in self.ports.values(): tx_ports[port] = port.streams.values() for port in rx_ports: port.set_receive_modes(IxeReceiveMode.widePacketGroup, IxeReceiveMode.sequenceChecking, IxeReceiveMode.prbs) port.enableAutoDetectInstrumentation = True port.autoDetectInstrumentation.ix_set_default() port.write() for port, streams in tx_ports.items(): for stream in streams: stream.autoDetectInstrumentation.enableTxAutomaticInstrumentation = True stream.autoDetectInstrumentation.enablePRBS = True port.write()
java
public List<Long> getIds(final int count) throws SnowizardClientException { for (final String host : hosts) { try { final SnowizardResponse snowizard = executeRequest(host, count); if (snowizard != null) { return snowizard.getIdList(); } } catch (final Exception ex) { LOGGER.warn("Unable to get ID from host ({})", host); } } throw new SnowizardClientException( "Unable to generate batch of IDs from Snowizard"); }
java
public static PathAddress transformAddress(final PathAddress original, final TransformationTarget target) { return TransformersImpl.transformAddress(original, target); }
python
def estimate_shift(signal, genome=None, windowsize=5000, thresh=None, nwindows=1000, maxlag=500, array_kwargs=None, verbose=False): """ Experimental: cross-correlation to estimate the shift width of ChIP-seq data This can be interpreted as the binding site footprint. For ChIP-seq, the plus and minus strand reads tend to be shifted in the 5' direction away from each other. Various ChIP-seq peak-callers estimate this distance; this function provides a quick, tunable way to do so using cross-correlation. The resulting shift can then be incorporated into subsequent calls to `array` by adding the shift_width kwarg. :param signal: genomic_signal object :param genome: String assembly for constructing windows :param nwindows: Number of windows to compute cross-correlation on :param windowsize: Size of each window to compute cross-correlation on. :param thresh: Threshold read coverage to run cross-correlation on. This is likely to be a function of the fragment size provided in `array_kwargs` `windowsize`. If `thresh` is small, then the cross correlation can be noisy. :param maxlag: Max shift to look for :param array_kwargs: Kwargs passed directly to genomic_signal.array, with the default of `bins=windowsize` for single-bp resolution, and `read_strand` will be overwritten. :param verbose: Be verbose. Returns lags and a `maxlag*2+1` x `nwindows` matrix of cross-correlations. You can then plot the average cross-correlation function with:: plt.plot(lags, shift.mean(axis=0)) and get the distance to shift with:: d = lags[np.argmax(shift.mean(axis=0))] and then plot that with:: plt.axvline(d, color='k', linestyle='--') The number of windows with at least `thresh` coverage is:: shift.shape[0] """ if thresh is None: thresh = 0 if genome is None: genome = signal.genome() if array_kwargs is None: array_kwargs = {} array_kwargs.pop('read_strand', None) if 'bins' not in array_kwargs: array_kwargs['bins'] = windowsize def add_strand(f, strand): fields = f.fields[:] while len(fields) < 5: fields.append('.') fields.append(strand) return pybedtools.create_interval_from_list(fields) windows = pybedtools.BedTool()\ .window_maker(genome=genome, w=windowsize) random_subset = pybedtools.BedTool(windows[:nwindows])\ .shuffle(genome=genome).saveas() if verbose: sys.stderr.write("Getting plus-strand signal for %s regions...\n" % nwindows) sys.stderr.flush() plus = signal.array( features=random_subset, read_strand="+", **array_kwargs).astype(float) if verbose: sys.stderr.write("Getting minus-strand signal for %s regions...\n" % nwindows) sys.stderr.flush() minus = signal.array( features=random_subset, read_strand="-", **array_kwargs).astype(float) # only do cross-correlation if you have enough reads to do so enough = ((plus.sum(axis=1) / windowsize) > thresh) \ & ((minus.sum(axis=1) / windowsize) > thresh) if verbose: sys.stderr.write( "Running cross-correlation on %s regions that passed " "threshold\n" % sum(enough)) results = np.zeros((sum(enough), 2 * maxlag + 1)) for i, xy in enumerate(izip(plus[enough], minus[enough])): x, y = xy results[i] = xcorr(x, y, maxlag) lags = np.arange(-maxlag, maxlag + 1) return lags, results
java
private Throwable getResult(FutureTask<Throwable> task, Thread thread) { try { if (timeout > 0) { return task.get(timeout, timeUnit); } else { return task.get(); } } catch (InterruptedException e) { return e; // caller will re-throw; no need to call Thread.interrupt() } catch (ExecutionException e) { // test failed; have caller re-throw the exception thrown by the test return e.getCause(); } catch (TimeoutException e) { return createTimeoutException(thread); } }
python
def run_file(self, path, all_errors_exit=True): """Execute a Python file.""" path = fixpath(path) with self.handling_errors(all_errors_exit): module_vars = run_file(path) self.vars.update(module_vars) self.store("from " + splitname(path)[1] + " import *")
python
def generateVariant(self, referenceName, position, randomNumberGenerator): """ Generate a random variant for the specified position using the specified random number generator. This generator should be seeded with a value that is unique to this position so that the same variant will always be produced regardless of the order it is generated in. """ variant = self._createGaVariant() variant.reference_name = referenceName variant.start = position variant.end = position + 1 # SNPs only for now bases = ["A", "C", "G", "T"] ref = randomNumberGenerator.choice(bases) variant.reference_bases = ref alt = randomNumberGenerator.choice( [base for base in bases if base != ref]) variant.alternate_bases.append(alt) randChoice = randomNumberGenerator.randint(0, 2) if randChoice == 0: variant.filters_applied = False elif randChoice == 1: variant.filters_applied = True variant.filters_passed = True else: variant.filters_applied = True variant.filters_passed = False variant.filters_failed.append('q10') for callSet in self.getCallSets(): call = variant.calls.add() call.call_set_id = callSet.getId() # for now, the genotype is either [0,1], [1,1] or [1,0] with equal # probability; probably will want to do something more # sophisticated later. randomChoice = randomNumberGenerator.choice( [[0, 1], [1, 0], [1, 1]]) call.genotype.extend(randomChoice) # TODO What is a reasonable model for generating these likelihoods? # Are these log-scaled? Spec does not say. call.genotype_likelihood.extend([-100, -100, -100]) variant.id = self.getVariantId(variant) return variant
python
def camel_2_snake(name): "Converts CamelCase to camel_case" s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
java
@SuppressWarnings("unchecked") <K, E extends PoolableObject<V>> E flagOwner() { this.owner = Thread.currentThread(); return (E) this; }
python
def parser(rules=None, **kwargs): """Instantiate a parser with the default Splunk command rules.""" rules = RULES_SPLUNK if rules is None else dict(RULES_SPLUNK, **rules) return Parser(rules, **kwargs)
python
def conv_precip_frac(precip_largescale, precip_convective): """Fraction of total precip that is from convection parameterization. Parameters ---------- precip_largescale, precip_convective : xarray.DataArrays Precipitation from grid-scale condensation and from convective parameterization, respectively. Returns ------- xarray.DataArray """ total = total_precip(precip_largescale, precip_convective) # Mask using xarray's `where` method to prevent divide-by-zero. return precip_convective / total.where(total)
python
def modify_target_group(TargetGroupArn=None, HealthCheckProtocol=None, HealthCheckPort=None, HealthCheckPath=None, HealthCheckIntervalSeconds=None, HealthCheckTimeoutSeconds=None, HealthyThresholdCount=None, UnhealthyThresholdCount=None, Matcher=None): """ Modifies the health checks used when evaluating the health state of the targets in the specified target group. To monitor the health of the targets, use DescribeTargetHealth . See also: AWS API Documentation Examples This example changes the configuration of the health checks used to evaluate the health of the targets for the specified target group. Expected Output: :example: response = client.modify_target_group( TargetGroupArn='string', HealthCheckProtocol='HTTP'|'HTTPS', HealthCheckPort='string', HealthCheckPath='string', HealthCheckIntervalSeconds=123, HealthCheckTimeoutSeconds=123, HealthyThresholdCount=123, UnhealthyThresholdCount=123, Matcher={ 'HttpCode': 'string' } ) :type TargetGroupArn: string :param TargetGroupArn: [REQUIRED] The Amazon Resource Name (ARN) of the target group. :type HealthCheckProtocol: string :param HealthCheckProtocol: The protocol to use to connect with the target. :type HealthCheckPort: string :param HealthCheckPort: The port to use to connect with the target. :type HealthCheckPath: string :param HealthCheckPath: The ping path that is the destination for the health check request. :type HealthCheckIntervalSeconds: integer :param HealthCheckIntervalSeconds: The approximate amount of time, in seconds, between health checks of an individual target. :type HealthCheckTimeoutSeconds: integer :param HealthCheckTimeoutSeconds: The amount of time, in seconds, during which no response means a failed health check. :type HealthyThresholdCount: integer :param HealthyThresholdCount: The number of consecutive health checks successes required before considering an unhealthy target healthy. :type UnhealthyThresholdCount: integer :param UnhealthyThresholdCount: The number of consecutive health check failures required before considering the target unhealthy. :type Matcher: dict :param Matcher: The HTTP codes to use when checking for a successful response from a target. HttpCode (string) -- [REQUIRED]The HTTP codes. You can specify values between 200 and 499. The default value is 200. You can specify multiple values (for example, '200,202') or a range of values (for example, '200-299'). :rtype: dict :return: { 'TargetGroups': [ { 'TargetGroupArn': 'string', 'TargetGroupName': 'string', 'Protocol': 'HTTP'|'HTTPS', 'Port': 123, 'VpcId': 'string', 'HealthCheckProtocol': 'HTTP'|'HTTPS', 'HealthCheckPort': 'string', 'HealthCheckIntervalSeconds': 123, 'HealthCheckTimeoutSeconds': 123, 'HealthyThresholdCount': 123, 'UnhealthyThresholdCount': 123, 'HealthCheckPath': 'string', 'Matcher': { 'HttpCode': 'string' }, 'LoadBalancerArns': [ 'string', ] }, ] } :returns: (string) -- """ pass
python
def add_embedding(self, tag, embedding, labels=None, images=None, global_step=None): """Adds embedding projector data to the event file. It will also create a config file used by the embedding projector in TensorBoard. The folder containing the embedding data is named using the formula: If global_step is not None, the folder name is `tag + '_' + str(global_step).zfill(6)`; else, the folder name is `tag`. For example, tag = 'mnist', global_step = 12, the folder's name is 'mnist_000012'; when global_step = None, the folder's name is 'mnist'. See the following reference for the meanings of labels and images. Ref: https://www.tensorflow.org/versions/r1.2/get_started/embedding_viz Note: This function internally calls `asnumpy()` for MXNet `NDArray` inputs. Since `asnumpy()` is a blocking function call, this function would block the main thread till it returns. It may consequently affect the performance of async execution of the MXNet engine. Parameters ---------- tag : str Name for the `embedding`. embedding : MXNet `NDArray` or `numpy.ndarray` A matrix whose each row is the feature vector of a data point. labels : MXNet `NDArray` or `numpy.ndarray` or a list of elements convertible to str. Labels corresponding to the data points in the `embedding`. If the labels are 2D the first row is considered the column names. images : MXNet `NDArray` or `numpy.ndarray` Images of format NCHW corresponding to the data points in the `embedding`. global_step : int Global step value to record. If not set, default to zero. """ embedding_shape = embedding.shape if len(embedding_shape) != 2: raise ValueError('expected 2D NDArray as embedding data, while received an array with' ' ndim=%d' % len(embedding_shape)) data_dir = _get_embedding_dir(tag, global_step) save_path = os.path.join(self.get_logdir(), data_dir) try: os.makedirs(save_path) except OSError: logging.warning('embedding dir %s exists, files under this dir will be overwritten', save_path) if labels is not None: if (embedding_shape[0] != len(labels) and (not _is_2D_matrix(labels) or len(labels) != embedding_shape[0] + 1)): raise ValueError('expected equal values of embedding first dim and length of ' 'labels or embedding first dim + 1 for 2d labels ' ', while received %d and %d for each' % (embedding_shape[0], len(labels))) if self._logger is not None: self._logger.info('saved embedding labels to %s', save_path) _make_metadata_tsv(labels, save_path) if images is not None: img_labels_shape = images.shape if embedding_shape[0] != img_labels_shape[0]: raise ValueError('expected equal first dim size of embedding and images,' ' while received %d and %d for each' % (embedding_shape[0], img_labels_shape[0])) if self._logger is not None: self._logger.info('saved embedding images to %s', save_path) _make_sprite_image(images, save_path) if self._logger is not None: self._logger.info('saved embedding data to %s', save_path) _save_embedding_tsv(embedding, save_path) _add_embedding_config(self.get_logdir(), data_dir, labels is not None, images.shape if images is not None else None)
python
def get_feature_from_key(self, feature_key): """ Get feature for the provided feature key. Args: feature_key: Feature key for which feature is to be fetched. Returns: Feature corresponding to the provided feature key. """ feature = self.feature_key_map.get(feature_key) if feature: return feature self.logger.error('Feature "%s" is not in datafile.' % feature_key) return None
python
def get_supports(self): """Returns set of extension support strings referenced in this Registry :return: set of extension support strings """ out = set() for ext in self.extensions.values(): out.update(ext.get_supports()) return out
python
def mean_absolute_error(df, col_true, col_pred=None): """ Compute mean absolute error of a predicted DataFrame. Note that this method will trigger the defined flow to execute. :param df: predicted data frame :type df: DataFrame :param col_true: column name of true value :type col_true: str :param col_true: column name of predicted value, 'prediction_score' by default. :type col_pred: str :return: Mean absolute error :rtype: float """ if not col_pred: col_pred = get_field_name_by_role(df, FieldRole.PREDICTED_VALUE) return _run_evaluation_node(df, col_true, col_pred)['mae']
java
public String radio (String name, String value, String defaultValue) { StringBuilder buf = new StringBuilder(); buf.append("<input type=\"radio\""); buf.append(" name=\"").append(name).append("\""); buf.append(" value=\"").append(value).append("\""); String selectedValue = getValue(name, defaultValue); if (value.equals(selectedValue)) { buf.append(_useXHTML ? " checked=\"checked\"" : " checked"); } buf.append(getCloseBrace()); return buf.toString(); }
python
def close(self): """Release libpci resources.""" if self._access is not None: _logger.debug("Cleaning up") pci_cleanup(self._access) self._access = None
python
def enqueue(self, func, *args, **kwargs): """Enqueue a function call or a :doc:`job <job>`. :param func: Function or a :doc:`job <job>` object. Must be serializable and available to :doc:`workers <worker>`. :type func: callable | :doc:`kq.Job <job>` :param args: Positional arguments for the function. Ignored if **func** is a :doc:`job <job>` object. :param kwargs: Keyword arguments for the function. Ignored if **func** is a :doc:`job <job>` object. :return: Enqueued job. :rtype: :doc:`kq.Job <job>` **Example:** .. testcode:: import requests from kafka import KafkaProducer from kq import Job, Queue # Set up a Kafka producer. producer = KafkaProducer(bootstrap_servers='127.0.0.1:9092') # Set up a queue. queue = Queue(topic='topic', producer=producer) # Enqueue a function call. queue.enqueue(requests.get, 'https://www.google.com/') # Enqueue a job object. job = Job(func=requests.get, args=['https://www.google.com/']) queue.enqueue(job) .. note:: The following rules apply when enqueueing a :doc:`job <job>`: * If ``Job.id`` is not set, a random one is generated. * If ``Job.timestamp`` is set, it is replaced with current time. * If ``Job.topic`` is set, it is replaced with current topic. * If ``Job.timeout`` is set, its value overrides others. * If ``Job.key`` is set, its value overrides others. * If ``Job.partition`` is set, its value overrides others. """ return self._default_enqueue_spec.enqueue(func, *args, **kwargs)
python
def setdoc(self,newdoc): """Set a different document. Usually no need to call this directly, invoked implicitly by :meth:`copy`""" self.doc = newdoc if self.doc and self.id: self.doc.index[self.id] = self for c in self: if isinstance(c, AbstractElement): c.setdoc(newdoc)
python
def create_ip_cert_links(ssl_dir, custom_hostname_link=None): """Create symlinks for SAN records :param ssl_dir: str Directory to create symlinks in :param custom_hostname_link: str Additional link to be created """ hostname = get_hostname(unit_get('private-address')) hostname_cert = os.path.join( ssl_dir, 'cert_{}'.format(hostname)) hostname_key = os.path.join( ssl_dir, 'key_{}'.format(hostname)) # Add links to hostname cert, used if os-hostname vars not set for net_type in [INTERNAL, ADMIN, PUBLIC]: try: addr = resolve_address(endpoint_type=net_type) cert = os.path.join(ssl_dir, 'cert_{}'.format(addr)) key = os.path.join(ssl_dir, 'key_{}'.format(addr)) if os.path.isfile(hostname_cert) and not os.path.isfile(cert): os.symlink(hostname_cert, cert) os.symlink(hostname_key, key) except NoNetworkBinding: log("Skipping creating cert symlink for ip in {} space, no " "local address found".format(net_type), WARNING) if custom_hostname_link: custom_cert = os.path.join( ssl_dir, 'cert_{}'.format(custom_hostname_link)) custom_key = os.path.join( ssl_dir, 'key_{}'.format(custom_hostname_link)) if os.path.isfile(hostname_cert) and not os.path.isfile(custom_cert): os.symlink(hostname_cert, custom_cert) os.symlink(hostname_key, custom_key)
java
private PExp getImpliesExists(ASetCompSetExp exp, ILexNameToken finmap, ILexNameToken findex) { AExistsExp exists = new AExistsExp(); AMapDomainUnaryExp domExp = new AMapDomainUnaryExp(); domExp.setType(new ABooleanBasicType()); domExp.setExp(getVarExp(finmap)); List<PMultipleBind> bindList = getMultipleSetBindList(domExp, findex); exists.setBindList(bindList); exists.setPredicate(getExistsPredicate(exp, finmap, findex)); return exists; }
python
def copy(self): """Create a copy of a BinaryQuadraticModel. Returns: :class:`.BinaryQuadraticModel` Examples: >>> bqm = dimod.BinaryQuadraticModel({1: 1, 2: 2}, {(1, 2): 0.5}, 0.5, dimod.SPIN) >>> bqm2 = bqm.copy() """ # new objects are constructed for each, so we just need to pass them in return BinaryQuadraticModel(self.linear, self.quadratic, self.offset, self.vartype, **self.info)
java
protected void activeStatusChanged(boolean newActiveState) throws PushStateException { active = newActiveState; log.debugf("changed mode %s", this); if (active && singletonConfiguration.pushStateWhenCoordinator()) doPushState(); }
python
def _open_ok(self, args): """ signal that the connection is ready This method signals to the client that the connection is ready for use. PARAMETERS: known_hosts: shortstr """ self.known_hosts = args.read_shortstr() AMQP_LOGGER.debug('Open OK! known_hosts [%s]' % self.known_hosts) return None
python
def scatter_plot(self, ax, topic_dims, t=None, ms_limits=True, **kwargs_plot): """ 2D or 3D scatter plot. :param axes ax: matplotlib axes (use Axes3D if 3D data) :param tuple topic_dims: list of (topic, dims) tuples, where topic is a string and dims is a list of dimensions to be plotted for that topic. :param int t: time indexes to be plotted :param dict kwargs_plot: argument to be passed to matplotlib's plot function, e.g. the style of the plotted points 'or' :param bool ms_limits: if set to True, automatically set axes boundaries to the sensorimotor boundaries (default: True) """ plot_specs = {'marker': 'o', 'linestyle': 'None'} plot_specs.update(kwargs_plot) # t_bound = float('inf') # if t is None: # for topic, _ in topic_dims: # t_bound = min(t_bound, self.counts[topic]) # t = range(t_bound) # data = self.pack(topic_dims, t) data = self.data_t(topic_dims, t) ax.plot(*(data.T), **plot_specs) if ms_limits: ax.axis(self.axes_limits(topic_dims))
java
public static BigDecimal strToAmount(@CurrencyAmountStr final String amount) { if (amount == null) { return null; } final int dot = amount.indexOf('.'); final int scale; final String unscaledStr; if (dot == -1) { scale = 0; unscaledStr = amount; } else { scale = amount.length() - dot - 1; unscaledStr = amount.substring(0, dot) + amount.substring(dot + 1); } final BigInteger unscaled = new BigInteger(unscaledStr); return new BigDecimal(unscaled, scale); }
java
@Override protected boolean isContentType(String schemaId, String schemaVersion, String systemId, String publicId) { return (this.publicId != null && this.publicId.equalsIgnoreCase(publicId)) || (this.systemId != null && this.systemId.equalsIgnoreCase(systemId)); }
java
public static ntp_sync update(nitro_service client, ntp_sync resource) throws Exception { resource.validate("modify"); return ((ntp_sync[]) resource.update_resource(client))[0]; }
python
def _cs_disassemble_one(self, data, address): """Disassemble the data into an instruction in string form. """ asm, size = "", 0 disasm = list(self._disassembler.disasm_lite(bytes(data), address)) if len(disasm) > 0: address, size, mnemonic, op_str = disasm[0] asm = str(mnemonic + " " + op_str).strip() # Quick fix for Capstone 'bug'. if asm in ["repne", "rep", "lock", "data16"]: asm, size = "", 0 return asm, size
python
def log_in(self): """Perform the `log_in` task to setup the API session for future data requests.""" if not self.password: # Password wasn't give, ask for it now self.password = getpass.getpass('Password: ') utils.pending_message('Performing login...') login_result = self.client.login( account=self.account, password=self.password ) if 'error' in login_result: self.handle_failed_login(login_result) utils.info_message('Login successful')
python
def start_traffic(self, blocking=False, *ports): """ Start traffic on list of ports. :param blocking: True - start traffic and wait until traffic ends, False - start traffic and return. :param ports: list of ports to start traffic on. Default - all session ports. """ for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items(): chassis.start_traffic(False, *chassis_ports) if blocking: for chassis, chassis_ports in self._per_chassis_ports(*self._get_operation_ports(*ports)).items(): chassis.wait_traffic(*chassis_ports)
java
public List<O> getItems(int count, int offset) { return getDatastore().find(clazz).offset(offset).limit(count).asList(); }
python
def then(self, success=None, failure=None): """ This method takes two optional arguments. The first argument is used if the "self promise" is fulfilled and the other is used if the "self promise" is rejected. In either case, this method returns another promise that effectively represents the result of either the first of the second argument (in the case that the "self promise" is fulfilled or rejected, respectively). Each argument can be either: * None - Meaning no action is taken * A function - which will be called with either the value of the "self promise" or the reason for rejection of the "self promise". The function may return: * A value - which will be used to fulfill the promise returned by this method. * A promise - which, when fulfilled or rejected, will cascade its value or reason to the promise returned by this method. * A value - which will be assigned as either the value or the reason for the promise returned by this method when the "self promise" is either fulfilled or rejected, respectively. """ ret = Promise() def callAndFulfill(v): """ A callback to be invoked if the "self promise" is fulfilled. """ try: # From 3.2.1, don't call non-functions values if _isFunction(success): newvalue = success(v) if _isPromise(newvalue): newvalue.then(lambda v: ret.fulfill(v), lambda r: ret.reject(r)) else: ret.fulfill(newvalue) elif success!=None: # From 3.2.6.4 ret.fulfill(v) else: pass except Exception as e: ret.reject(e) def callAndReject(r): """ A callback to be invoked if the "self promise" is rejected. """ try: if _isFunction(failure): newvalue = failure(r) if _isPromise(newvalue): newvalue.then(lambda v: ret.fulfill(v), lambda r: ret.reject(r)) else: ret.fulfill(newvalue) elif failure!=None: # From 3.2.6.5 ret.reject(r) else: pass except Exception as e: ret.reject(e) if self._state==self.PENDING: """ If this is still pending, then add callbacks to the existing promise that call either the success or rejected functions supplied and then fulfill the promise being returned by this method """ if success!=None: self._callbacks.append(callAndFulfill) if failure!=None: self._errbacks.append(callAndReject) elif self._state==self.FULFILLED: """ If this promise was already fulfilled, then we need to use the first argument to this method to determine the value to use in fulfilling the promise that we return from this method. """ try: if _isFunction(success): newvalue = success(self.value) if _isPromise(newvalue): newvalue.then(lambda v: ret.fulfill(v), lambda r: ret.reject(r)) else: ret.fulfill(newvalue) elif success!=None: # From 3.2.6.4 ret.fulfill(self.value) else: pass except Exception as e: ret.reject(e) elif self._state==self.REJECTED: """ If this promise was already rejected, then we need to use the second argument to this method to determine the value to use in fulfilling the promise that we return from this method. """ try: if _isFunction(failure): newvalue = failure(self.reason) if _isPromise(newvalue): newvalue.then(lambda v: ret.fulfill(v), lambda r: ret.reject(r)) else: ret.fulfill(newvalue) elif failure!=None: # From 3.2.6.5 ret.reject(self.reason) else: pass except Exception as e: ret.reject(e) return ret
python
def run(self, *args, **kwargs) -> Callable: """Return wrapped function. Haskell: runReader :: Reader r a -> r -> a This is the inverse of unit and returns the wrapped function. """ return self.fn(*args, **kwargs) if args or kwargs else self.fn
python
def _execute(self, stmt, *values): """ Gets a cursor, executes `stmt` and closes the cursor, fetching one row afterwards and returning its result. """ c = self._cursor() try: return c.execute(stmt, values).fetchone() finally: c.close()
java
public static void addGpsLogDataPoint( Connection connection, OmsGeopaparazziProject3To4Converter.GpsPoint point, long gpslogId ) throws Exception { Date timestamp = ETimeUtilities.INSTANCE.TIME_FORMATTER_LOCAL.parse(point.utctime); String insertSQL = "INSERT INTO " + TableDescriptions.TABLE_GPSLOG_DATA + "(" + // TableDescriptions.GpsLogsDataTableFields.COLUMN_ID.getFieldName() + ", " + // TableDescriptions.GpsLogsDataTableFields.COLUMN_LOGID.getFieldName() + ", " + // TableDescriptions.GpsLogsDataTableFields.COLUMN_DATA_LON.getFieldName() + ", " + // TableDescriptions.GpsLogsDataTableFields.COLUMN_DATA_LAT.getFieldName() + ", " + // TableDescriptions.GpsLogsDataTableFields.COLUMN_DATA_ALTIM.getFieldName() + ", " + // TableDescriptions.GpsLogsDataTableFields.COLUMN_DATA_TS.getFieldName() + // ") VALUES" + "(?,?,?,?,?,?)"; try (PreparedStatement writeStatement = connection.prepareStatement(insertSQL)) { writeStatement.setLong(1, point.id); writeStatement.setLong(2, gpslogId); writeStatement.setDouble(3, point.lon); writeStatement.setDouble(4, point.lat); writeStatement.setDouble(5, point.altim); writeStatement.setLong(6, timestamp.getTime()); writeStatement.executeUpdate(); } }
python
def scan(self, cursor=0, match=None, count=None): """ Incrementally return lists of key names. Also return a cursor indicating the scan position. ``match`` allows for filtering the keys by pattern ``count`` allows for hint the minimum number of returns """ f = Future() if self.keyspace is None: with self.pipe as pipe: res = pipe.scan(cursor=cursor, match=match, count=count) def cb(): f.set((res[0], [self.keyparse.decode(v) for v in res[1]])) pipe.on_execute(cb) return f if match is None: match = '*' match = "%s{%s}" % (self.keyspace, match) pattern = re.compile(r'^%s\{(.*)\}$' % self.keyspace) with self.pipe as pipe: res = pipe.scan(cursor=cursor, match=match, count=count) def cb(): keys = [] for k in res[1]: k = self.keyparse.decode(k) m = pattern.match(k) if m: keys.append(m.group(1)) f.set((res[0], keys)) pipe.on_execute(cb) return f
java
static boolean killProcess(final String processName, int id) { int pid; try { pid = processUtils.resolveProcessId(processName, id); if(pid > 0) { try { Runtime.getRuntime().exec(processUtils.getKillCommand(pid)); return true; } catch (Throwable t) { ProcessLogger.ROOT_LOGGER.debugf(t, "failed to kill process '%s' with pid '%s'", processName, pid); } } } catch (Throwable t) { ProcessLogger.ROOT_LOGGER.debugf(t, "failed to resolve pid of process '%s'", processName); } return false; }
python
def download(url, file_name): r = requests.get(url, stream=True) file_size = int(r.headers['Content-length']) ''' if py3: file_size = int(u.getheader("Content-Length")[0]) else: file_size = int(u.info().getheaders("Content-Length")[0]) ''' file_exists = False if os.path.isfile(file_name): local_file_size = os.path.getsize(file_name) if local_file_size == file_size: sha1_file = file_name + '.sha1' if os.path.isfile(sha1_file): print('sha1 found') with open(sha1_file) as f: expected_sha1 = f.read() BLOCKSIZE = 65536 sha1 = hashlib.sha1() with open(file_name) as f: buff = f.read(BLOCKSIZE) while len(buff) > 0: sha1.update(buff) buff = f.read(BLOCKSIZE) if expected_sha1 == sha1: file_exists = True else: print("File corrupt. Downloading again.") os.remove(file_name) else: file_exists = True else: print("File corrupt. Downloading again.") os.remove(file_name) if not file_exists: factor = int(math.floor(math.log(file_size) / math.log(1024))) display_file_size = str(file_size / 1024 ** factor) + \ ['B', 'KB', 'MB', 'GB', 'TB', 'PB'][factor] print("Source: " + url) print("Destination " + file_name) print("Size: " + display_file_size) file_size_dl = 0 block_sz = 8192 f = open(file_name, 'wb') pbar = ProgressBar(file_size) for chunk in r.iter_content(chunk_size=block_sz): if not chunk: continue chunk_size = len(chunk) file_size_dl += chunk_size f.write(chunk) pbar.update(chunk_size) # status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size) # status = status + chr(8)*(len(status)+1) # print(status) f.close() else: print("File already exists - " + file_name) return True
java
public static base_response update(nitro_service client, csparameter resource) throws Exception { csparameter updateresource = new csparameter(); updateresource.stateupdate = resource.stateupdate; return updateresource.update_resource(client); }
java
public Object evaluate(Task k, TaskRequest req, TaskResponse res) { ReturnValueImpl rslt = new ReturnValueImpl(); RuntimeRequestResponse tr = new RuntimeRequestResponse(); tr.enclose(req); tr.setAttribute(Attributes.RETURN_VALUE, rslt); run(k, tr, res); return rslt.getValue(); }
java
public void onServletInit (@Nonnull final Class <? extends GenericServlet> aServletClass) { if (LOGGER.isDebugEnabled ()) LOGGER.debug ("onServletInit: " + aServletClass); _updateStatus (aServletClass, EServletStatus.INITED); }
python
def get_level(self): """Gets the ``Grade`` corresponding to the assessment difficulty. return: (osid.grading.Grade) - the level raise: OperationFailed - unable to complete request *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.resource.Resource.get_avatar_template if not bool(self._my_map['levelId']): raise errors.IllegalState('this Assessment has no level') mgr = self._get_provider_manager('GRADING') if not mgr.supports_grade_lookup(): raise errors.OperationFailed('Grading does not support Grade lookup') lookup_session = mgr.get_grade_lookup_session(proxy=getattr(self, "_proxy", None)) lookup_session.use_federated_gradebook_view() osid_object = lookup_session.get_grade(self.get_level_id()) return osid_object
python
def append_query_param(self, key, value): """ Append a query parameter :param string key: The query param key :param string value: The new value """ values = self.query_param(key, as_list=True, default=[]) values.append(value) return self.query_param(key, values)
python
def change_state_id(self, state_id=None): """ Changes the id of the state to a new id. This functions replaces the old state_id with the new state_id in all data flows and transitions. :param state_id: The new state if of the state """ old_state_id = self.state_id super(ContainerState, self).change_state_id(state_id) # Use private variables to change ids to prevent validity checks # change id in all transitions for transition in self.transitions.values(): if transition.from_state == old_state_id: transition._from_state = self.state_id if transition.to_state == old_state_id: transition._to_state = self.state_id # change id in all data_flows for data_flow in self.data_flows.values(): if data_flow.from_state == old_state_id: data_flow._from_state = self.state_id if data_flow.to_state == old_state_id: data_flow._to_state = self.state_id
python
def verify_signature(message, key, signature): """ This function will verify the authenticity of a digital signature. For security purposes, Nylas includes a digital signature in the headers of every webhook notification, so that clients can verify that the webhook request came from Nylas and no one else. The signing key is your OAuth client secret, which only you and Nylas know. """ digest = hmac.new(key, msg=message, digestmod=hashlib.sha256).hexdigest() return digest == signature
java
public void setAlias(String alias) { m_alias = alias; String attributePath = (String)getAttribute(); boolean allPathsAliased = true; m_userAlias = new UserAlias(alias, attributePath, allPathsAliased); }
python
def feature (name, values, attributes = []): """ Declares a new feature with the given name, values, and attributes. name: the feature name values: a sequence of the allowable values - may be extended later with feature.extend attributes: a sequence of the feature's attributes (e.g. implicit, free, propagated, ...) """ __validate_feature_attributes (name, attributes) feature = Feature(name, [], attributes) __all_features[name] = feature # Temporary measure while we have not fully moved from 'gristed strings' __all_features["<" + name + ">"] = feature name = add_grist(name) if 'subfeature' in attributes: __all_subfeatures.append(name) else: __all_top_features.append(feature) extend (name, values) # FIXME: why his is needed. if 'free' in attributes: __free_features.append (name) return feature
python
def betweenness_bin(G): ''' Node betweenness centrality is the fraction of all shortest paths in the network that contain a given node. Nodes with high values of betweenness centrality participate in a large number of shortest paths. Parameters ---------- A : NxN np.ndarray binary directed/undirected connection matrix BC : Nx1 np.ndarray node betweenness centrality vector Notes ----- Betweenness centrality may be normalised to the range [0,1] as BC/[(N-1)(N-2)], where N is the number of nodes in the network. ''' G = np.array(G, dtype=float) # force G to have float type so it can be # compared to float np.inf n = len(G) # number of nodes I = np.eye(n) # identity matrix d = 1 # path length NPd = G.copy() # number of paths of length |d| NSPd = G.copy() # number of shortest paths of length |d| NSP = G.copy() # number of shortest paths of any length L = G.copy() # length of shortest paths NSP[np.where(I)] = 1 L[np.where(I)] = 1 # calculate NSP and L while np.any(NSPd): d += 1 NPd = np.dot(NPd, G) NSPd = NPd * (L == 0) NSP += NSPd L = L + d * (NSPd != 0) L[L == 0] = np.inf # L for disconnected vertices is inf L[np.where(I)] = 0 NSP[NSP == 0] = 1 # NSP for disconnected vertices is 1 DP = np.zeros((n, n)) # vertex on vertex dependency diam = d - 1 # calculate DP for d in range(diam, 1, -1): DPd1 = np.dot(((L == d) * (1 + DP) / NSP), G.T) * \ ((L == (d - 1)) * NSP) DP += DPd1 return np.sum(DP, axis=0)
java
@Override public String extractClassNameIfProvide(Node node, Node parent) { String namespace = extractClassNameIfGoog(node, parent, "goog.provide"); if (namespace == null) { namespace = extractClassNameIfGoog(node, parent, "goog.module"); } return namespace; }
python
def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): """ Sleep an incremental amount of time after each attempt, starting at wait_incrementing_start and incrementing by wait_incrementing_increment """ result = self._wait_incrementing_start + (self._wait_incrementing_increment * (previous_attempt_number - 1)) if result > self._wait_incrementing_max: result = self._wait_incrementing_max if result < 0: result = 0 return result
java
public static byte[] hex2bin(final String s) { String m = s; if (s == null) { // Allow empty input string. m = ""; } else if (s.length() % 2 != 0) { // Assume leading zero for odd string length m = "0" + s; } byte r[] = new byte[m.length() / 2]; for (int i = 0, n = 0; i < m.length(); n++) { char h = m.charAt(i++); char l = m.charAt(i++); r[n] = (byte) (hex2bin(h) * 16 + hex2bin(l)); } return r; }