language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
private static int processVelocity(VERTICAL_DIRECTION direction, float sensorReading, float maxSensorReading) { switch (direction) { case UP: return (int) (-1 * (maxSensorReading + sensorReading)); case DOWN: return (int) sensorReading; case NONE: default: return 0; } }
python
def remove_delegate(self, callback): """ Unregisters a registered delegate function or a method. Args: callback(function): method to trigger when push center receives events """ if callback not in self._delegate_methods: return self._delegate_methods.remove(callback)
python
def get_head_form_html(req: "Request", forms: List[Form]) -> str: """ Returns the extra HTML that needs to be injected into the ``<head>`` section for a Deform form to work properly. """ # https://docs.pylonsproject.org/projects/deform/en/latest/widget.html#widget-requirements js_resources = [] # type: List[str] css_resources = [] # type: List[str] for form in forms: resources = form.get_widget_resources() # type: Dict[str, List[str]] # Add, ignoring duplicates: js_resources.extend(x for x in resources['js'] if x not in js_resources) css_resources.extend(x for x in resources['css'] if x not in css_resources) js_links = [req.static_url(r) for r in js_resources] css_links = [req.static_url(r) for r in css_resources] js_tags = ['<script type="text/javascript" src="%s"></script>' % link for link in js_links] css_tags = ['<link rel="stylesheet" href="%s"/>' % link for link in css_links] tags = js_tags + css_tags head_html = "\n".join(tags) return head_html
python
def merge_elisions(elided: List[str]) -> str: """ Given a list of strings with different space swapping elisions applied, merge the elisions, taking the most without compounding the omissions. :param elided: :return: >>> merge_elisions([ ... "ignavae agua multum hiatus", "ignav agua multum hiatus" ,"ignavae agua mult hiatus"]) 'ignav agua mult hiatus' """ results = list(elided[0]) for line in elided: for idx, car in enumerate(line): if car == " ": results[idx] = " " return "".join(results)
java
private void doResume(long id) { if (paused) { log.debug(String.format("%s - Resumed connection to %s", this, context.target())); paused = false; checkDrain(); } }
python
def get_user_last_submissions(self, limit=5, request=None): """ Get last submissions of a user """ if request is None: request = {} request.update({"username": self._user_manager.session_username()}) # Before, submissions were first sorted by submission date, then grouped # and then resorted by submission date before limiting. Actually, grouping # and pushing, keeping the max date, followed by result filtering is much more # efficient data = self._database.submissions.aggregate([ {"$match": request}, {"$group": {"_id": {"courseid": "$courseid", "taskid": "$taskid"}, "submitted_on": {"$max": "$submitted_on"}, "submissions": {"$push": { "_id": "$_id", "result": "$result", "status" : "$status", "courseid": "$courseid", "taskid": "$taskid", "submitted_on": "$submitted_on" }}, }}, {"$project": { "submitted_on": 1, "submissions": { # This could be replaced by $filter if mongo v3.2 is set as dependency "$setDifference": [ {"$map": { "input": "$submissions", "as": "submission", "in": { "$cond": [{"$eq": ["$submitted_on", "$$submission.submitted_on"]}, "$$submission", False] } }}, [False] ] } }}, {"$sort": {"submitted_on": pymongo.DESCENDING}}, {"$limit": limit} ]) return [item["submissions"][0] for item in data]
java
@Nonnull public static <R> LBoolFunction<R> boolFunctionFrom(Consumer<LBoolFunctionBuilder<R>> buildingFunction) { LBoolFunctionBuilder builder = new LBoolFunctionBuilder(); buildingFunction.accept(builder); return builder.build(); }
java
@SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case AfplibPackage.BDD__UBASE: setUBASE((Integer)newValue); return; case AfplibPackage.BDD__RESERVED: setReserved((Integer)newValue); return; case AfplibPackage.BDD__XUPUB: setXUPUB((Integer)newValue); return; case AfplibPackage.BDD__YUPUB: setYUPUB((Integer)newValue); return; case AfplibPackage.BDD__XEXTENT: setXEXTENT((Integer)newValue); return; case AfplibPackage.BDD__YEXTENT: setYEXTENT((Integer)newValue); return; case AfplibPackage.BDD__RESERVED2: setReserved2((Integer)newValue); return; case AfplibPackage.BDD__TYPE: setTYPE((Integer)newValue); return; case AfplibPackage.BDD__MOD: setMOD((Integer)newValue); return; case AfplibPackage.BDD__LID: setLID((Integer)newValue); return; case AfplibPackage.BDD__COLOR: setCOLOR((Integer)newValue); return; case AfplibPackage.BDD__MODULEWIDTH: setMODULEWIDTH((Integer)newValue); return; case AfplibPackage.BDD__ELEMENTHEIGHT: setELEMENTHEIGHT((Integer)newValue); return; case AfplibPackage.BDD__MULT: setMULT((Integer)newValue); return; case AfplibPackage.BDD__WENE: setWENE((Integer)newValue); return; case AfplibPackage.BDD__TRIPLETS: getTriplets().clear(); getTriplets().addAll((Collection<? extends Triplet>)newValue); return; } super.eSet(featureID, newValue); }
python
def show(self, n=20, truncate=True, vertical=False): """Prints the first ``n`` rows to the console. :param n: Number of rows to show. :param truncate: If set to True, truncate strings longer than 20 chars by default. If set to a number greater than one, truncates long strings to length ``truncate`` and align cells right. :param vertical: If set to True, print output rows vertically (one line per column value). >>> df DataFrame[age: int, name: string] >>> df.show() +---+-----+ |age| name| +---+-----+ | 2|Alice| | 5| Bob| +---+-----+ >>> df.show(truncate=3) +---+----+ |age|name| +---+----+ | 2| Ali| | 5| Bob| +---+----+ >>> df.show(vertical=True) -RECORD 0----- age | 2 name | Alice -RECORD 1----- age | 5 name | Bob """ if isinstance(truncate, bool) and truncate: print(self._jdf.showString(n, 20, vertical)) else: print(self._jdf.showString(n, int(truncate), vertical))
java
protected TextService getServiceInstance(Map<String,String> headers) throws ServiceException { try { String requestPath = headers.get(Listener.METAINFO_REQUEST_PATH); String[] pathSegments = requestPath != null ? requestPath.split("/") : null; if (pathSegments == null) throw new ServiceException(ServiceException.INTERNAL_ERROR, "Unable to find a service or handler for request path: " + requestPath); String contentType = headers.get(Listener.METAINFO_CONTENT_TYPE); String serviceClassName = MDW_REST_SERVICE_PROVIDER_PACKAGE + "." + pathSegments[0]; try { // normal classloader -- built-in service Class<? extends TextService> serviceClass = Class.forName(serviceClassName).asSubclass(TextService.class); return serviceClass.newInstance(); } catch (ClassNotFoundException ex) { // try dynamic based on annotations eg: api/Users/dxoakes Class<? extends RegisteredService> serviceType = Listener.CONTENT_TYPE_JSON.equals(contentType) ? JsonService.class : XmlService.class; MdwServiceRegistry registry = MdwServiceRegistry.getInstance(); String pkgName = null; for (int i = 0; i < pathSegments.length; i++) { String pathSegment = pathSegments[i]; if (i == 0) pkgName = pathSegment; else pkgName += "." + pathSegment; Package pkg = PackageCache.getPackage(pkgName); if (pkg != null) { // try without any subpath first (@Path="/") TextService service = (TextService)registry.getDynamicServiceForPath(pkg, serviceType, "/"); if (service == null && i < pathSegments.length - 1) { service = (TextService)registry.getDynamicServiceForPath(pkg, serviceType, "/" + pathSegments[i + 1]); } if (service != null) return service; } } // lastly, try process invoker mapping AssetRequest processRequest = ProcessRequests.getRequest(headers.get(Listener.METAINFO_HTTP_METHOD), requestPath); if (processRequest != null) { return new ProcessInvoker(processRequest); } return null; } } catch (Exception ex) { throw new ServiceException(ServiceException.INTERNAL_ERROR, ex.getMessage(), ex); } }
java
public void pushEvent(final String attributeName, final AttributeValue value, final EventType eventType) throws DevFailed { switch (eventType) { case CHANGE_EVENT: case ARCHIVE_EVENT: case USER_EVENT: // set attribute value final AttributeImpl attribute = AttributeGetterSetter.getAttribute(attributeName, device.getAttributeList()); attribute.lock(); try { attribute.updateValue(value); // push the event EventManager.getInstance().pushAttributeValueEvent(name, attributeName, eventType); } catch (final DevFailed e) { EventManager.getInstance().pushAttributeErrorEvent(name, attributeName, e); } finally { attribute.unlock(); } break; default: throw DevFailedUtils.newDevFailed("Only USER, ARCHIVE or CHANGE event can be send"); } }
java
public AuditSink create(Config config, ValueAuditRuntimeMetadata auditRuntimeMetadata) { String sinkClassName = DEFAULT_AUDIT_SINK_CLASS; if (config.hasPath(AUDIT_SINK_CLASS_NAME_KEY)) { sinkClassName = config.getString(AUDIT_SINK_CLASS_NAME_KEY); } log.info("Using audit sink class name/alias " + sinkClassName); try { return (AuditSink)ConstructorUtils.invokeConstructor(Class.forName(this.aliasResolver.resolve( sinkClassName)), config, auditRuntimeMetadata); } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException | ClassNotFoundException e) { throw new RuntimeException(e); } }
python
def showDataDirectoriesData(peInstance): """ Prints the DATA_DIRECTORY fields. """ print "[+] Data directories:\n" dirs = peInstance.ntHeaders.optionalHeader.dataDirectory counter = 1 for dir in dirs: print "[%d] --> Name: %s -- RVA: 0x%08x -- SIZE: 0x%08x" % (counter, dir.name.value, dir.rva.value, dir.size.value) counter += 1
java
private Map<String, CmsGalleryTypeInfo> readGalleryInfosByTypeNames(List<String> resourceTypes) { Map<String, CmsGalleryTypeInfo> galleryTypeInfos = new HashMap<String, CmsGalleryTypeInfo>(); for (String typeName : resourceTypes) { try { addGalleriesForType(galleryTypeInfos, typeName); } catch (CmsLoaderException e1) { logError(e1); } } return galleryTypeInfos; }
java
public ChronicleMapBuilder<K, V> valueReaderAndDataAccess( SizedReader<V> valueReader, @NotNull DataAccess<V> valueDataAccess) { valueBuilder.reader(valueReader); valueBuilder.dataAccess(valueDataAccess); return this; }
python
def update(self, delta_seconds): """update tweeners. delta_seconds is time in seconds since last frame""" for obj in tuple(self.current_tweens): for tween in tuple(self.current_tweens[obj]): done = tween.update(delta_seconds) if done: self.current_tweens[obj].remove(tween) if tween.on_complete: tween.on_complete(tween.target) if not self.current_tweens[obj]: del self.current_tweens[obj] return self.current_tweens
java
@Nullable public T get(int deviceId) { try { locksMap.get(deviceId).readLock().lock(); return backingMap.get(deviceId); } finally { locksMap.get(deviceId).readLock().unlock(); } }
java
@POST @Path("/retry") @ApiOperation("Retry the last failed task for each workflow from the list") public BulkResponse retry(List<String> workflowIds) { return workflowBulkService.retry(workflowIds); }
java
public void setInputMap(final URI map) { assert !map.isAbsolute(); setProperty(INPUT_DITAMAP_URI, map.toString()); // Deprecated since 2.2 setProperty(INPUT_DITAMAP, toFile(map).getPath()); }
java
public void showProgress(boolean show) { Log.v(TAG, show ? "Disabling the button while showing progress" : "Enabling the button and hiding progress"); setEnabled(!show); progress.setVisibility(show ? VISIBLE : GONE); if (show) { icon.setVisibility(INVISIBLE); labeledLayout.setVisibility(INVISIBLE); return; } icon.setVisibility(shouldShowLabel ? GONE : VISIBLE); labeledLayout.setVisibility(!shouldShowLabel ? GONE : VISIBLE); }
python
def _sysfs_attr(name, value=None, log_lvl=None, log_msg=None): ''' Simple wrapper with logging around sysfs.attr ''' if isinstance(name, six.string_types): name = [name] res = __salt__['sysfs.attr'](os.path.join(*name), value) if not res and log_lvl is not None and log_msg is not None: log.log(LOG[log_lvl], log_msg) return res
python
def reject_sv(m, s, y): """ Sample from N(m, s^2) times SV likelihood using rejection. SV likelihood (in x) corresponds to y ~ N(0, exp(x)). """ mp = m + 0.5 * s**2 * (-1. + y**2 * np.exp(-m)) ntries = 0 while True: ntries += 1 x = stats.norm.rvs(loc=mp, scale=s) u = stats.uniform.rvs() if np.log(u) < - 0.5 * y**2 * (np.exp(-x) - np.exp(-m) * (1. + m - x)): break if ntries > 1000: print('1000 failed attempt, m,s,y=%f, %f, %f' % (m, s, y)) break return x
python
def _add_replace_pair(self, name, value, quote): """ Adds a replace part to the map of replace pairs. :param name: The name of the replace pair. :param value: The value of value of the replace pair. """ key = '@' + name + '@' key = key.lower() class_name = value.__class__.__name__ if class_name in ['int', 'float']: value = str(value) elif class_name in ['bool']: value = '1' if value else '0' elif class_name in ['str']: if quote: value = "'" + value + "'" else: self._io.log_verbose("Ignoring constant {} which is an instance of {}".format(name, class_name)) self._replace_pairs[key] = value
python
def set_automaster( name, device, fstype, opts='', config='/etc/auto_salt', test=False, **kwargs): ''' Verify that this mount is represented in the auto_salt, change the mount to match the data passed, or add the mount if it is not present. CLI Example: .. code-block:: bash salt '*' mount.set_automaster /mnt/foo /dev/sdz1 ext4 ''' # Fix the opts type if it is a list if isinstance(opts, list): opts = ','.join(opts) lines = [] change = False present = False automaster_file = "/etc/auto_master" if not os.path.isfile(config): __salt__['file.touch'](config) __salt__['file.append'](automaster_file, "/-\t\t\t{0}".format(config)) name = "/..{0}".format(name) device_fmt = "{0}:{1}".format(fstype, device) type_opts = "-fstype={0},{1}".format(fstype, opts) if fstype == 'smbfs': device_fmt = device_fmt.replace(fstype, "") try: with salt.utils.files.fopen(config, 'r') as ifile: for line in ifile: line = salt.utils.stringutils.to_unicode(line) if line.startswith('#'): # Commented lines.append(line) continue if not line.strip(): # Blank line lines.append(line) continue comps = line.split() if len(comps) != 3: # Invalid entry lines.append(line) continue if comps[0] == name or comps[2] == device_fmt: # check to see if there are changes # and fix them if there are any present = True if comps[0] != name: change = True comps[0] = name if comps[1] != type_opts: change = True comps[1] = type_opts if comps[2] != device_fmt: change = True comps[2] = device_fmt if change: log.debug( 'auto_master entry for mount point %s needs to be ' 'updated', name ) newline = ( '{0}\t{1}\t{2}\n'.format( name, type_opts, device_fmt) ) lines.append(newline) else: lines.append(line) except (IOError, OSError) as exc: msg = 'Couldn\'t read from {0}: {1}' raise CommandExecutionError(msg.format(config, exc)) if change: if not salt.utils.args.test_mode(test=test, **kwargs): try: with salt.utils.files.fopen(config, 'wb') as ofile: # The line was changed, commit it! ofile.writelines(salt.utils.data.encode(lines)) except (IOError, OSError): msg = 'File not writable {0}' raise CommandExecutionError(msg.format(config)) return 'change' if not change: if present: # The right entry is already here return 'present' else: if not salt.utils.args.test_mode(test=test, **kwargs): # The entry is new, add it to the end of the fstab newline = ( '{0}\t{1}\t{2}\n'.format( name, type_opts, device_fmt) ) lines.append(newline) try: with salt.utils.files.fopen(config, 'wb') as ofile: # The line was changed, commit it! ofile.writelines(salt.utils.data.encode(lines)) except (IOError, OSError): raise CommandExecutionError( 'File not writable {0}'.format( config ) ) return 'new'
java
public int getSubscribersCount(EventPublisher source) { GenericEventDispatcher<?> dispatcherObject = dispatchers.get(source); if (dispatcherObject == null) { return 0; } else { return dispatcherObject.getListenersCount(); } }
java
public static StreamShardMetadata convertToStreamShardMetadata(KinesisStreamShard kinesisStreamShard) { StreamShardMetadata streamShardMetadata = new StreamShardMetadata(); streamShardMetadata.setStreamName(kinesisStreamShard.getStreamName()); streamShardMetadata.setShardId(kinesisStreamShard.getShard().getShardId()); streamShardMetadata.setParentShardId(kinesisStreamShard.getShard().getParentShardId()); streamShardMetadata.setAdjacentParentShardId(kinesisStreamShard.getShard().getAdjacentParentShardId()); if (kinesisStreamShard.getShard().getHashKeyRange() != null) { streamShardMetadata.setStartingHashKey(kinesisStreamShard.getShard().getHashKeyRange().getStartingHashKey()); streamShardMetadata.setEndingHashKey(kinesisStreamShard.getShard().getHashKeyRange().getEndingHashKey()); } if (kinesisStreamShard.getShard().getSequenceNumberRange() != null) { streamShardMetadata.setStartingSequenceNumber(kinesisStreamShard.getShard().getSequenceNumberRange().getStartingSequenceNumber()); streamShardMetadata.setEndingSequenceNumber(kinesisStreamShard.getShard().getSequenceNumberRange().getEndingSequenceNumber()); } return streamShardMetadata; }
python
def _rotate(n, x, y, rx, ry): """Rotate and flip a quadrant appropriately Based on the implementation here: https://en.wikipedia.org/w/index.php?title=Hilbert_curve&oldid=797332503 """ if ry == 0: if rx == 1: x = n - 1 - x y = n - 1 - y return y, x return x, y
python
def check_docstring_missing(self, definition, docstring): """D10{0,1,2,3}: Public definitions should have docstrings. All modules should normally have docstrings. [...] all functions and classes exported by a module should also have docstrings. Public methods (including the __init__ constructor) should also have docstrings. Note: Public (exported) definitions are either those with names listed in __all__ variable (if present), or those that do not start with a single underscore. """ if (not docstring and definition.is_public or docstring and is_blank(ast.literal_eval(docstring))): codes = {Module: violations.D100, Class: violations.D101, NestedClass: violations.D106, Method: (lambda: violations.D105() if definition.is_magic else (violations.D107() if definition.is_init else violations.D102())), Function: violations.D103, NestedFunction: violations.D103, Package: violations.D104} return codes[type(definition)]()
java
public void setRowMargin(int l, int t, int r, int b) { mRowMargin[0] = l; mRowMargin[1] = t; mRowMargin[2] = r; mRowMargin[3] = b; }
python
def set_env(self, key, value): """Sets environment variables by prepending the app_name to `key`. Also registers the environment variable with the instance object preventing an otherwise-required call to `reload()`. """ os.environ[make_env_key(self.appname, key)] = str(value) # must coerce to string self._registered_env_keys.add(key) self._clear_memoization()
python
def get_methods(self): """ Retrieves the list of tuples (command, method) for this command handler """ return [ ("levels", self.print_levels), ("make", self.make_report), ("clear", self.clear_report), ("show", self.show_report), ("write", self.write_report), ]
python
def merge_google_napoleon_docs(prnt_doc=None, child_doc=None): """ Merge two google-style docstrings into a single docstring, according to napoleon docstring sections. Given the google-style docstrings from a parent and child's attributes, merge the docstring sections such that the child's section is used, wherever present, otherwise the parent's section is used. Any whitespace that can be uniformly removed from a docstring's second line and onwards is removed. Sections will be separated by a single blank line. Aliased docstring sections are normalized. E.g Args, Arguments -> Parameters Parameters ---------- prnt_doc: Optional[str] The docstring from the parent. child_doc: Optional[str] The docstring from the child. Returns ------- Union[str, None] The merged docstring. """ style = "google" return merge_all_sections(parse_napoleon_doc(prnt_doc, style), parse_napoleon_doc(child_doc, style), style)
python
def leaves(self, name): """ RETURN LEAVES OF GIVEN PATH NAME pull leaves, considering query_path and namespace pull all first-level properties pull leaves, including parent leaves pull the head of any tree by name :param name: :return: """ return list(self.lookup_leaves.get(unnest_path(name), Null))
java
protected void initModule() { Object o; CmsModule module; if (CmsStringUtil.isEmpty(getParamAction()) || CmsDialog.DIALOG_INITIAL.equals(getParamAction())) { // this is the initial dialog call if (CmsStringUtil.isNotEmpty(m_paramModule)) { // edit an existing module, get it from manager o = OpenCms.getModuleManager().getModule(m_paramModule); } else { // create a new module o = null; } } else { // this is not the initial call, get module from session o = getDialogObject(); } if (!(o instanceof CmsModule)) { // create a new module module = new CmsModule(); } else { // reuse module stored in session module = (CmsModule)((CmsModule)o).clone(); } List exportpoints = module.getExportPoints(); m_exportpoint = new CmsExportPoint(); if ((exportpoints != null) && (exportpoints.size() > 0)) { Iterator i = exportpoints.iterator(); while (i.hasNext()) { CmsExportPoint exportpoint = (CmsExportPoint)i.next(); if (exportpoint.getUri().equals(m_paramExportpoint)) { m_exportpoint = exportpoint; } } } }
python
def add_service(self, zeroconf, srv_type, srv_name): """Method called when a new Zeroconf client is detected. Return True if the zeroconf client is a Glances server Note: the return code will never be used """ if srv_type != zeroconf_type: return False logger.debug("Check new Zeroconf server: %s / %s" % (srv_type, srv_name)) info = zeroconf.get_service_info(srv_type, srv_name) if info: new_server_ip = socket.inet_ntoa(info.address) new_server_port = info.port # Add server to the global dict self.servers.add_server(srv_name, new_server_ip, new_server_port) logger.info("New Glances server detected (%s from %s:%s)" % (srv_name, new_server_ip, new_server_port)) else: logger.warning( "New Glances server detected, but Zeroconf info failed to be grabbed") return True
python
def delete_message(self, chat_id, message_id): """ Use this method to delete message. Returns True on success. :param chat_id: in which chat to delete :param message_id: which message to delete :return: API reply. """ return apihelper.delete_message(self.token, chat_id, message_id)
java
private void rejectHandshake(Conversation conversation, int requestNumber, String rejectedField) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "rejectHandshake", new Object[] { conversation, requestNumber, rejectedField }); SIConnectionLostException exception = new SIConnectionLostException( nls.getFormattedMessage("INVALID_PROP_SICO8012", null, null) ); FFDCFilter.processException(exception, CLASS_NAME + ".rejectHandshake", CommsConstants.COMMONSERVERRECEIVELISTENER_HSREJCT_01, this); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Invalid handshake type received - rejecting field:", rejectedField); StaticCATHelper.sendExceptionToClient(exception, CommsConstants.COMMONSERVERRECEIVELISTENER_HSREJCT_01, conversation, requestNumber); // At this point we really don't want anything more to do with this client - so close him closeConnection(conversation); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "rejectHandshake"); }
java
protected void processCollectionRequest(HyperionContext hyperionContext) { EndpointRequest request = hyperionContext.getEndpointRequest(); EndpointResponse response = hyperionContext.getEndpointResponse(); ApiVersionPlugin<ApiObject<Serializable>,PersistentObject<Serializable>,Serializable> apiVersionPlugin = hyperionContext.getVersionPlugin(); EntityPlugin plugin = hyperionContext.getEntityPlugin(); List<ApiObject<Serializable>> clientObjects = null; try { clientObjects = marshaller.unmarshallCollection(request.getInputStream(), apiVersionPlugin.getApiClass()); } catch (WriteLimitException e) { throw new BadRequestException(messageSource.getErrorMessage(ERROR_WRITE_LIMIT,hyperionContext.getLocale(),e.getWriteLimit()),e); } catch (MarshallingException e) { throw new BadRequestException(messageSource.getErrorMessage(ERROR_READING_REQUEST,hyperionContext.getLocale(),e.getMessage()),e); } PersistenceContext persistenceContext = buildPersistenceContext(hyperionContext); Set<String> fieldSet = persistenceContext.getRequestedFields(); if(fieldSet != null) fieldSet.add("id"); List<ApiObject> saved = plugin.getPersistenceOperations().createOrUpdateItems(clientObjects, persistenceContext); processChangeEvents(hyperionContext,persistenceContext); response.setResponseCode(200); EntityList<ApiObject> entityResponse = new EntityList<>(); entityResponse.setEntries(saved); hyperionContext.setResult(entityResponse); }
java
private void clearDeployments(final Handler<AsyncResult<Void>> doneHandler) { context.execute(new Action<Void>() { @Override public Void perform() { Collection<String> sdeploymentsInfo = deployments.get(group); for (String sdeploymentInfo : sdeploymentsInfo) { JsonObject deploymentInfo = new JsonObject(sdeploymentInfo); if (deploymentInfo.getString("address").equals(internal)) { deployments.remove(group, sdeploymentInfo); } } return null; } }, doneHandler); }
python
def first(iterable = None, *, name = None, metric = call_default): """Measure time elapsed to produce first item of an iterable :arg iterable: any iterable :arg function metric: f(name, 1, time) :arg str name: name for the metric """ if iterable is None: return _first_decorator(name, metric) else: return _do_first(iterable, name, metric)
java
public int ENgetlinkindex( String id ) throws EpanetException { int[] index = new int[1]; int error = epanet.ENgetlinkindex(id, index); checkError(error); return index[0]; }
java
@Override public EObject create(EClass eClass) { switch (eClass.getClassifierID()) { case XtypePackage.XFUNCTION_TYPE_REF: return createXFunctionTypeRef(); case XtypePackage.XCOMPUTED_TYPE_REFERENCE: return createXComputedTypeReference(); case XtypePackage.XIMPORT_SECTION: return createXImportSection(); case XtypePackage.XIMPORT_DECLARATION: return createXImportDeclaration(); default: throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier"); } }
java
@SuppressWarnings("rawtypes") public static <E extends Exception> DataSet loadCSV(final File csvFile, final long offset, final long count, final Try.Predicate<String[], E> filter, final Map<String, ? extends Type> columnTypeMap) throws UncheckedIOException, E { InputStream csvInputStream = null; try { csvInputStream = new FileInputStream(csvFile); return loadCSV(csvInputStream, offset, count, filter, columnTypeMap); } catch (IOException e) { throw new UncheckedIOException(e); } finally { IOUtil.closeQuietly(csvInputStream); } }
python
def alias(self, *alias, **kwargs): """ Returns this column aliased with a new name or names (in the case of expressions that return more than one column, such as explode). :param alias: strings of desired column names (collects all positional arguments passed) :param metadata: a dict of information to be stored in ``metadata`` attribute of the corresponding :class: `StructField` (optional, keyword only argument) .. versionchanged:: 2.2 Added optional ``metadata`` argument. >>> df.select(df.age.alias("age2")).collect() [Row(age2=2), Row(age2=5)] >>> df.select(df.age.alias("age3", metadata={'max': 99})).schema['age3'].metadata['max'] 99 """ metadata = kwargs.pop('metadata', None) assert not kwargs, 'Unexpected kwargs where passed: %s' % kwargs sc = SparkContext._active_spark_context if len(alias) == 1: if metadata: jmeta = sc._jvm.org.apache.spark.sql.types.Metadata.fromJson( json.dumps(metadata)) return Column(getattr(self._jc, "as")(alias[0], jmeta)) else: return Column(getattr(self._jc, "as")(alias[0])) else: if metadata: raise ValueError('metadata can only be provided for a single column') return Column(getattr(self._jc, "as")(_to_seq(sc, list(alias))))
java
public List<Group> getGroups(GroupFilter filter) throws GitLabApiException { return (getGroups(filter, getDefaultPerPage()).all()); }
java
private Set<String> getEnvironments(Properties props) { Set<String> environments = new HashSet<>(); for (Object k : props.keySet()) { String environment = k.toString().split("\\.")[0]; environments.add(environment); } return new TreeSet<>(environments); }
java
public static final boolean contains(char[] characters, char[] array) { for (int i = array.length; --i >= 0;) { for (int j = characters.length; --j >= 0;) { if (array[i] == characters[j]) { return true; } } } return false; }
java
private InputStream getMostSpecificStream( String key, String l, String c, String v) { String filePath = baseName.replace('.', '/') + '/' + key + ((l == null) ? "" : ("_" + l)) + ((c == null) ? "" : ("_" + c)) + ((v == null) ? "" : ("_" + v)) + ".text"; // System.err.println("Seeking " + filePath); InputStream is = loader.getResourceAsStream(filePath); // N.b. If were using Class.getRes... instead of ClassLoader.getRes... // we would need to prefix the path with "/". return (is == null && l != null) ? getMostSpecificStream(key, ((c == null) ? null : l), ((v == null) ? null : c), null) : is; }
python
def get_samples(self, sample_count): """ Fetch a number of samples from self.wave_cache Args: sample_count (int): Number of samples to fetch Returns: ndarray """ if self.amplitude.value <= 0: return None # Build samples by rolling the period cache through the buffer rolled_array = numpy.roll(self.wave_cache, -1 * self.last_played_sample) # Append remaining partial period full_count, remainder = divmod(sample_count, self.cache_length) final_subarray = rolled_array[:int(remainder)] return_array = numpy.concatenate((numpy.tile(rolled_array, full_count), final_subarray)) # Keep track of where we left off to prevent popping between chunks self.last_played_sample = int(((self.last_played_sample + remainder) % self.cache_length)) # Multiply output by amplitude return return_array * (self.amplitude.value * self.amplitude_multiplier)
java
public InputStream getBinaryStream(final int columnIndex) throws SQLException { if(protocol.supportsPBMS()) { try { return getValueObject(columnIndex).getPBMSStream(protocol); } catch (QueryException e) { throw SQLExceptionMapper.get(e); } catch (IOException e) { throw SQLExceptionMapper.getSQLException("Could not read back the data using http", e); } } return getValueObject(columnIndex).getBinaryInputStream(); }
python
def body_encode(self, string): """Body-encode a string by converting it first to bytes. The type of encoding (base64 or quoted-printable) will be based on self.body_encoding. If body_encoding is None, we assume the output charset is a 7bit encoding, so re-encoding the decoded string using the ascii codec produces the correct string version of the content. """ if not string: return string if self.body_encoding is BASE64: if isinstance(string, str): string = string.encode(self.output_charset) return email.base64mime.body_encode(string) elif self.body_encoding is QP: # quopromime.body_encode takes a string, but operates on it as if # it were a list of byte codes. For a (minimal) history on why # this is so, see changeset 0cf700464177. To correctly encode a # character set, then, we must turn it into pseudo bytes via the # latin1 charset, which will encode any byte as a single code point # between 0 and 255, which is what body_encode is expecting. if isinstance(string, str): string = string.encode(self.output_charset) string = string.decode('latin1') return email.quoprimime.body_encode(string) else: if isinstance(string, str): string = string.encode(self.output_charset).decode('ascii') return string
python
def _update_dPrxy(self): """Update `dPrxy`.""" if 'kappa' in self.freeparams: scipy.copyto(self.dPrxy['kappa'], self.Prxy / self.kappa, where=CODON_TRANSITION) _fill_diagonals(self.dPrxy['kappa'], self._diag_indices) if 'omega' in self.freeparams: scipy.copyto(self.dPrxy['omega'], self.Frxy_no_omega * self.Qxy, where=CODON_NONSYN) _fill_diagonals(self.dPrxy['omega'], self._diag_indices) if 'beta' in self.freeparams: self.dPrxy['beta'].fill(0) with scipy.errstate(divide='raise', under='raise', over='raise', invalid='ignore'): scipy.copyto(self.dPrxy['beta'], self.Prxy * (1 / self.beta + (self.piAx_piAy_beta * (self.ln_piAx_piAy_beta / self.beta) / (1 - self.piAx_piAy_beta))), where=CODON_NONSYN) scipy.copyto(self.dPrxy['beta'], self.Prxy/self.beta * (1 - self.piAx_piAy_beta), where=scipy.logical_and( CODON_NONSYN, scipy.fabs(1 - self.piAx_piAy_beta) < ALMOST_ZERO)) _fill_diagonals(self.dPrxy['beta'], self._diag_indices) if 'eta' in self.freeparams: for i in range(N_NT - 1): for w in range(i, N_NT): scipy.copyto(self.dPrxy['eta'][i], self.Prxy / (self.eta[i] - int(i == w)), where=CODON_NT_MUT[w]) _fill_diagonals(self.dPrxy['eta'][i], self._diag_indices)
python
def _add_attr_values_from_insert_to_original(original_code, insert_code, insert_code_list, attribute_name, op_list): """ This function appends values of the attribute `attribute_name` of the inserted code to the original values, and changes indexes inside inserted code. If some bytecode instruction in the inserted code used to call argument number i, after modification it calls argument n + i, where n - length of the values in the original code. So it helps to avoid variables mixing between two pieces of code. :param original_code: code to modify :param insert_code: code to insert :param insert_code_obj: bytes sequence of inserted code, which should be modified too :param attribute_name: name of attribute to modify ('co_names', 'co_consts' or 'co_varnames') :param op_list: sequence of bytecodes whose arguments should be changed :return: modified bytes sequence of the code to insert and new values of the attribute `attribute_name` for original code """ orig_value = getattr(original_code, attribute_name) insert_value = getattr(insert_code, attribute_name) orig_names_len = len(orig_value) code_with_new_values = list(insert_code_list) offset = 0 while offset < len(code_with_new_values): op = code_with_new_values[offset] if op in op_list: new_val = code_with_new_values[offset + 1] + orig_names_len if new_val > MAX_BYTE: code_with_new_values[offset + 1] = new_val & MAX_BYTE code_with_new_values = code_with_new_values[:offset] + [EXTENDED_ARG, new_val >> 8] + \ code_with_new_values[offset:] offset += 2 else: code_with_new_values[offset + 1] = new_val offset += 2 new_values = orig_value + insert_value return bytes(code_with_new_values), new_values
python
def tail(self, fname, encoding, window, position=None): """Read last N lines from file fname.""" if window <= 0: raise ValueError('invalid window %r' % window) encodings = ENCODINGS if encoding: encodings = [encoding] + ENCODINGS for enc in encodings: try: f = self.open(encoding=enc) if f: return self.tail_read(f, window, position=position) return False except IOError, err: if err.errno == errno.ENOENT: return [] raise except UnicodeDecodeError: pass
java
static DoubleDistributionSummary get(Registry registry, Id id) { DoubleDistributionSummary instance = INSTANCES.get(id); if (instance == null) { final Clock c = registry.clock(); DoubleDistributionSummary tmp = new DoubleDistributionSummary(c, id, RESET_FREQ); instance = INSTANCES.putIfAbsent(id, tmp); if (instance == null) { instance = tmp; registry.register(tmp); } } return instance; }
python
def atlas_peer_get_zonefile_inventory_range( my_hostport, peer_hostport, bit_offset, bit_count, timeout=None, peer_table=None ): """ Get the zonefile inventory bit vector for a given peer. The returned range will be [bit_offset, bit_offset+count] Update peer health information as well. bit_offset and bit_count are in bits. Return the bit vector on success (padded to the nearest byte with 0's). Return None if we couldn't contact the peer. """ if timeout is None: timeout = atlas_inv_timeout() zf_inv = {} zf_inv_list = None host, port = url_to_host_port( peer_hostport ) RPC = get_rpc_client_class() rpc = RPC( host, port, timeout=timeout, src=my_hostport ) assert not atlas_peer_table_is_locked_by_me() zf_inv = None log.debug("Get zonefile inventory range %s-%s from %s" % (bit_offset, bit_count, peer_hostport)) try: zf_inv = blockstack_get_zonefile_inventory( peer_hostport, bit_offset, bit_count, timeout=timeout, my_hostport=my_hostport, proxy=rpc ) except (socket.timeout, socket.gaierror, socket.herror, socket.error), se: atlas_log_socket_error( "get_zonefile_inventory(%s, %s, %s)" % (peer_hostport, bit_offset, bit_count), peer_hostport, se ) log.error("Failed to ask %s for zonefile inventory over %s-%s (socket-related error)" % (peer_hostport, bit_offset, bit_count)) except Exception, e: if os.environ.get("BLOCKSTACK_DEBUG") == "1": log.exception(e) log.error("Failed to ask %s for zonefile inventory over %s-%s" % (peer_hostport, bit_offset, bit_count)) atlas_peer_update_health( peer_hostport, (zf_inv is not None and zf_inv.has_key('status') and zf_inv['status']), peer_table=peer_table ) if zf_inv is None: log.error("No inventory given for %s-%s from %s" % (bit_offset, bit_count, peer_hostport)) return None if 'error' in zf_inv: log.error("Failed to get inventory for %s-%s from %s: %s" % (bit_offset, bit_count, peer_hostport, zf_inv['error'])) return None else: inv_str = atlas_inventory_to_string(zf_inv['inv']) if len(inv_str) > 40: inv_str = inv_str[:40] + "..." log.debug("Zonefile inventory for %s (%s-%s) is '%s'" % (peer_hostport, bit_offset, bit_count, inv_str)) return zf_inv['inv']
python
def OnButtonApply(self, event): """Updates the preview_textctrl""" try: dialect, self.has_header = self.csvwidgets.get_dialect() except TypeError: event.Skip() return 0 self.preview_textctrl.fill(data=self.data, dialect=dialect) event.Skip()
java
public static int primitiveToGL( final JCGLPrimitives p) { switch (p) { case PRIMITIVE_LINES: return GL11.GL_LINES; case PRIMITIVE_LINE_LOOP: return GL11.GL_LINE_LOOP; case PRIMITIVE_TRIANGLES: return GL11.GL_TRIANGLES; case PRIMITIVE_TRIANGLE_STRIP: return GL11.GL_TRIANGLE_STRIP; case PRIMITIVE_POINTS: return GL11.GL_POINTS; } throw new UnreachableCodeException(); }
python
def run_simulation(c1, c2): """ using character and planet, run the simulation """ print('running simulation...') traits = character.CharacterCollection(character.fldr) c1 = traits.generate_random_character() c2 = traits.generate_random_character() print(c1) print(c2) rules = battle.BattleRules(battle.rules_file) b = battle.Battle(c1, c2, traits, rules, print_console='Yes') print(b.status)
python
def ReadClientFullInfo(self, client_id): """Reads full client information for a single client. Args: client_id: A GRR client id string, e.g. "C.ea3b2b71840d6fa7". Returns: A `ClientFullInfo` instance for given client. Raises: UnknownClientError: if no client with such id was found. """ result = self.MultiReadClientFullInfo([client_id]) try: return result[client_id] except KeyError: raise UnknownClientError(client_id)
java
public Element createElementNS(String namespaceURI, String qualifiedName) throws DOMException { error(XMLErrorResources.ER_FUNCTION_NOT_SUPPORTED); return null; }
python
def parse_path(path): """Parse path string.""" version, project = path[1:].split('/') return dict(version=int(version), project=project)
java
public static TSSLTransportParameters getTSSLTransportParameters() { String SSLTrustStore = System.getProperty("ssl.truststore"); if (SSLTrustStore == null) return null; String SSLTrustStorePassword = System.getProperty("ssl.truststore.password"); String SSLProtocol = System.getProperty("ssl.protocol"); String SSLStoreType = System.getProperty("ssl.store.type"); String SSLCipherSuites = System.getProperty("ssl.cipher.suites"); if (SSLProtocol == null) SSLProtocol = SSL_PROTOCOL; if (SSLStoreType == null) SSLStoreType = SSL_STORE_TYPE; String [] cipherSuites = null; if (SSLCipherSuites != null) cipherSuites = SSLCipherSuites.split(","); TSSLTransportParameters params = new TSSLTransportFactory.TSSLTransportParameters(SSLProtocol, cipherSuites); params.setTrustStore(SSLTrustStore, SSLTrustStorePassword, TRUST_MANAGER_TYPE, SSLStoreType); return params; }
python
def get_port(self, id_or_uri, port_id_or_uri): """ Gets an interconnect port. Args: id_or_uri: Can be either the interconnect id or uri. port_id_or_uri: The interconnect port id or uri. Returns: dict: The interconnect port. """ uri = self._client.build_subresource_uri(id_or_uri, port_id_or_uri, "ports") return self._client.get(uri)
java
public static void populateModuleSpecWithAppImports(ModuleSpec.Builder moduleSpecBuilder, ClassLoader appClassLoader, Set<String> appPackages) { Objects.requireNonNull(moduleSpecBuilder, "moduleSpecBuilder"); Objects.requireNonNull(appClassLoader, "classLoader"); moduleSpecBuilder.addDependency(DependencySpec.createClassLoaderDependencySpec(appClassLoader, appPackages, false)); }
python
def consolidate(self, volume, source, dest, *args, **kwargs): """ Consolidate will move a volume of liquid from a list of sources to a single target location. See :any:`Transfer` for details and a full list of optional arguments. Returns ------- This instance of :class:`Pipette`. Examples -------- .. >>> from opentrons import instruments, labware, robot # doctest: +SKIP >>> robot.reset() # doctest: +SKIP >>> plate = labware.load('96-flat', 'A3') # doctest: +SKIP >>> p300 = instruments.P300_Single(mount='left') # doctest: +SKIP >>> p300.consolidate(50, plate.cols[0], plate[1]) # doctest: +SKIP """ kwargs['mode'] = 'consolidate' kwargs['mix_before'] = (0, 0) kwargs['air_gap'] = 0 kwargs['disposal_vol'] = 0 args = [volume, source, dest, *args] return self.transfer(*args, **kwargs)
java
public static synchronized OmemoManager getInstanceFor(XMPPConnection connection) { TreeMap<Integer, OmemoManager> managers = INSTANCES.get(connection); if (managers == null) { managers = new TreeMap<>(); INSTANCES.put(connection, managers); } OmemoManager manager; if (managers.size() == 0) { manager = new OmemoManager(connection, UNKNOWN_DEVICE_ID); managers.put(UNKNOWN_DEVICE_ID, manager); } else { manager = managers.get(managers.firstKey()); } return manager; }
java
public static boolean writeDocumentToFile(Document doc, String localFile) { try { TransformerFactory transfact = TransformerFactory.newInstance(); Transformer trans = transfact.newTransformer(); trans.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, YES); trans.setOutputProperty(OutputKeys.INDENT, YES); trans.transform(new DOMSource(doc), new StreamResult(new File(localFile))); return true; } catch (TransformerConfigurationException ex) { LOG.warn(ERROR_WRITING, localFile, ex); return false; } catch (TransformerException ex) { LOG.warn(ERROR_WRITING, localFile, ex); return false; } }
java
public void printPattern(int[] row, int[] column, int offset) { int size = row.length; if (size != column.length) throw new IllegalArgumentException( "All arrays must be of the same size"); for (int i = 0; i < size; ++i) format(Locale.ENGLISH, "%10d %10d%n", row[i] + offset, column[i] + offset); }
python
def from_name(cls, name): """Retrieve webacc id associated to a webacc name.""" result = cls.list({'items_per_page': 500}) webaccs = {} for webacc in result: webaccs[webacc['name']] = webacc['id'] return webaccs.get(name)
python
def unscale_samples(params, bounds): """Rescale samples from arbitrary bounds back to [0,1] range Arguments --------- bounds : list list of lists of dimensions num_params-by-2 params : numpy.ndarray numpy array of dimensions num_params-by-N, where N is the number of samples """ # Check bounds are legal (upper bound is greater than lower bound) b = np.array(bounds) lower_bounds = b[:, 0] upper_bounds = b[:, 1] if np.any(lower_bounds >= upper_bounds): raise ValueError("Bounds are not legal") # This scales the samples in-place, by using the optional output # argument for the numpy ufunctions # The calculation is equivalent to: # (sample - lower_bound) / (upper_bound - lower_bound) np.divide(np.subtract(params, lower_bounds, out=params), np.subtract(upper_bounds, lower_bounds), out=params)
java
public static String getOutputPart(ProcessOutput processOutput,String prefix) { //get output String output=processOutput.getOutputText(); if(output!=null) { //set flag boolean validOutput=false; int index=output.indexOf(prefix); if(index!=-1) { //get index index=index+prefix.length(); if(output.length()>index) { //get output output=output.substring(index); output=output.trim(); index=output.indexOf("\n"); if(index!=-1) { output=output.substring(0,index); output=output.trim(); } if(output.length()>0) { //set flag validOutput=true; } } } if(!validOutput) { output=null; } } return output; }
java
public SqlResultSetMapping<OrmDescriptor> getOrCreateSqlResultSetMapping() { List<Node> nodeList = model.get("sql-result-set-mapping"); if (nodeList != null && nodeList.size() > 0) { return new SqlResultSetMappingImpl<OrmDescriptor>(this, "sql-result-set-mapping", model, nodeList.get(0)); } return createSqlResultSetMapping(); }
java
public static boolean runWithSleepUninterrupted(long milliseconds, Runnable runnable) { Assert.isTrue(milliseconds > 0, "Milliseconds [%d] must be greater than 0", milliseconds); runnable.run(); return safeSleep(milliseconds); }
python
def _netname(name: str) -> dict: '''resolute network name, required because some providers use shortnames and other use longnames.''' try: long = net_query(name).name short = net_query(name).shortname except AttributeError: raise UnsupportedNetwork('''This blockchain network is not supported by the pypeerassets, check networks.py for list of supported networks.''') return {'long': long, 'short': short}
python
def discoverable(self, boolean): """Pass through helper function for flag function.""" if(boolean): r = self.flag({ "flag": "make_discoverable" }) else: r = self.flag({ "flag": "make_not_discoverable" }) return r
python
def get_function_argspec(func, is_class_method=None): ''' A small wrapper around getargspec that also supports callable classes :param is_class_method: Pass True if you are sure that the function being passed is a class method. The reason for this is that on Python 3 ``inspect.ismethod`` only returns ``True`` for bound methods, while on Python 2, it returns ``True`` for bound and unbound methods. So, on Python 3, in case of a class method, you'd need the class to which the function belongs to be instantiated and this is not always wanted. ''' if not callable(func): raise TypeError('{0} is not a callable'.format(func)) if six.PY2: if is_class_method is True: aspec = inspect.getargspec(func) del aspec.args[0] # self elif inspect.isfunction(func): aspec = inspect.getargspec(func) elif inspect.ismethod(func): aspec = inspect.getargspec(func) del aspec.args[0] # self elif isinstance(func, object): aspec = inspect.getargspec(func.__call__) del aspec.args[0] # self else: raise TypeError( 'Cannot inspect argument list for \'{0}\''.format(func) ) else: if is_class_method is True: aspec = _getargspec(func) del aspec.args[0] # self elif inspect.isfunction(func): aspec = _getargspec(func) # pylint: disable=redefined-variable-type elif inspect.ismethod(func): aspec = _getargspec(func) del aspec.args[0] # self elif isinstance(func, object): aspec = _getargspec(func.__call__) del aspec.args[0] # self else: raise TypeError( 'Cannot inspect argument list for \'{0}\''.format(func) ) return aspec
java
@Override public ConfirmPublicVirtualInterfaceResult confirmPublicVirtualInterface(ConfirmPublicVirtualInterfaceRequest request) { request = beforeClientExecution(request); return executeConfirmPublicVirtualInterface(request); }
python
def _load_w2v(model_file=_f_model, binary=True): ''' load word2vec model ''' if not os.path.exists(model_file): print("os.path : ", os.path) raise Exception("Model file [%s] does not exist." % model_file) return KeyedVectors.load_word2vec_format( model_file, binary=binary, unicode_errors='ignore')
python
def get_ftr(self): """ Process footer and return the processed string """ if not self.ftr: return self.ftr width = self.size()[0] return re.sub( "%time", "%s\n" % time.strftime("%H:%M:%S"), self.ftr).rjust(width)
python
def db_insert_record(self, table_name, columns): """Insert records into DB. Args: table_name (str): The name of the table. columns (list): List of columns for insert statement. """ bindings = ('?,' * len(columns)).strip(',') values = [None] * len(columns) sql = 'INSERT INTO {} ({}) VALUES ({})'.format(table_name, ', '.join(columns), bindings) cur = self.db_conn.cursor() cur.execute(sql, values)
java
@Override public synchronized CommsServerByteBuffer allocate() { if (tc.isEntryEnabled()) SibTr.entry(this, tc, "allocate"); CommsServerByteBuffer buff = (CommsServerByteBuffer) super.allocate(); if (tc.isEntryEnabled()) SibTr.exit(this, tc, "allocate", buff); return buff; }
python
def query(self, q, data=None, union=True, limit=None): """ Query your database with a raw string. Parameters ---------- q: str Query string to execute data: list, dict Optional argument for handlebars-queries. Data will be passed to the template and rendered using handlebars. union: bool Whether or not "UNION ALL" handlebars templates. This will return any handlebars queries as a single data frame. limit: int Number of records to return Examples -------- >>> from db import DemoDB >>> db = DemoDB() db.query("select * from Track").head(2) TrackId Name AlbumId MediaTypeId \\\r 0 1 For Those About To Rock (We Salute You) 1 1 1 2 Balls to the Wall 2 2 <BLANKLINE> GenreId Composer Milliseconds Bytes \\\r 0 1 Angus Young, Malcolm Young, Brian Johnson 343719 11170334 1 1 None 342562 5510424 <BLANKLINE> UnitPrice 0 0.99 1 0.99 db.query("select * from Track", limit=10) TrackId Name AlbumId MediaTypeId \ 0 1 For Those About To Rock (We Salute You) 1 1 1 2 Balls to the Wall 2 2 2 3 Fast As a Shark 3 2 3 4 Restless and Wild 3 2 4 5 Princess of the Dawn 3 2 5 6 Put The Finger On You 1 1 6 7 Let's Get It Up 1 1 7 8 Inject The Venom 1 1 8 9 Snowballed 1 1 9 10 Evil Walks 1 1 GenreId Composer Milliseconds \ 0 1 Angus Young, Malcolm Young, Brian Johnson 343719 1 1 None 342562 2 1 F. Baltes, S. Kaufman, U. Dirkscneider & W. Ho... 230619 3 1 F. Baltes, R.A. Smith-Diesel, S. Kaufman, U. D... 252051 4 1 Deaffy & R.A. Smith-Diesel 375418 5 1 Angus Young, Malcolm Young, Brian Johnson 205662 6 1 Angus Young, Malcolm Young, Brian Johnson 233926 7 1 Angus Young, Malcolm Young, Brian Johnson 210834 8 1 Angus Young, Malcolm Young, Brian Johnson 203102 9 1 Angus Young, Malcolm Young, Brian Johnson 263497 Bytes UnitPrice 0 11170334 0.99 1 5510424 0.99 2 3990994 0.99 3 4331779 0.99 4 6290521 0.99 5 6713451 0.99 6 7636561 0.99 7 6852860 0.99 8 6599424 0.99 9 8611245 0.99 >>> q = ''' ... SELECT ... a.Title, ... t.Name, ... t.UnitPrice ... FROM ... Album a ... INNER JOIN ... Track t ... on a.AlbumId = t.AlbumId; ... ''' >>> len(db.query(q)) 3503 db.query(q, limit=10) Title \ 0 For Those About To Rock We Salute You 1 Balls to the Wall 2 Restless and Wild 3 Restless and Wild 4 Restless and Wild 5 For Those About To Rock We Salute You 6 For Those About To Rock We Salute You 7 For Those About To Rock We Salute You 8 For Those About To Rock We Salute You 9 For Those About To Rock We Salute You Name UnitPrice 0 For Those About To Rock (We Salute You) 0.99 1 Balls to the Wall 0.99 2 Fast As a Shark 0.99 3 Restless and Wild 0.99 4 Princess of the Dawn 0.99 5 Put The Finger On You 0.99 6 Let's Get It Up 0.99 7 Inject The Venom 0.99 8 Snowballed 0.99 9 Evil Walks 0.99 >>> template = ''' ... SELECT ... '{{ name }}' as table_name, ... COUNT(*) as cnt ... FROM ... {{ name }} ... GROUP BY ... table_name ... ''' >>> data = [ ... {"name": "Album"}, ... {"name": "Artist"}, ... {"name": "Track"} ... ] >>> db.query(q, data=data) table_name cnt 0 Album 347 1 Artist 275 2 Track 3503 >>> q = ''' ... SELECT ... {{#cols}} ... {{#if @last}} ... {{ . }} ... {{else}} ... {{ . }} , ... {{/if}} ... {{/cols}} ... FROM ... Album; ... ''' >>> data = {"cols": ["AlbumId", "Title", "ArtistId"]} >>> len(db.query(q, data=data, union=False)) 347 db.query(q, data=data, union=False) AlbumId Title ArtistId 0 1 For Those About To Rock We Salute You 1 1 2 Balls to the Wall 2 2 3 Restless and Wild 2 3 4 Let There Be Rock 1 4 5 Big Ones 3 """ if data: q = self._apply_handlebars(q, data, union) if limit: q = self._assign_limit(q, limit) return pd.read_sql(q, self.con)
java
public Observable<ServiceResponse<Page<VirtualNetworkGatewayConnectionListEntityInner>>> listConnectionsSinglePageAsync(final String resourceGroupName, final String virtualNetworkGatewayName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (virtualNetworkGatewayName == null) { throw new IllegalArgumentException("Parameter virtualNetworkGatewayName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } final String apiVersion = "2018-08-01"; return service.listConnections(resourceGroupName, virtualNetworkGatewayName, this.client.subscriptionId(), apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<VirtualNetworkGatewayConnectionListEntityInner>>>>() { @Override public Observable<ServiceResponse<Page<VirtualNetworkGatewayConnectionListEntityInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<VirtualNetworkGatewayConnectionListEntityInner>> result = listConnectionsDelegate(response); return Observable.just(new ServiceResponse<Page<VirtualNetworkGatewayConnectionListEntityInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); }
python
def scp_push(self, src, dest, progress=False, preserve_times=True): """ Purpose: Makes an SCP push request for the specified file(s)/dir. @param src: string containing the source file or directory @type src: str @param dest: destination string of where to put the file(s)/dir @type dest: str @param progress: set to `True` to have the progress callback be | printed as the operation is copying. Can also pass | a function pointer to handoff the progress callback | elsewhere. @type progress: bool or function pointer @param preserve_times: Set to false to have the times of the copied | files set at the time of copy. @type preserve_times: bool @returns: `True` if the copy succeeds. @rtype: bool """ # set up the progress callback if they want to see the process if progress is True: self._scp._progress = self._copy_status # redirect to another function elif hasattr(progress, '__call__'): self._scp._progress = progress else: # no progress callback self._scp._progress = None # push the file(s) self._scp.put(src, dest, recursive=True, preserve_times=preserve_times) self._filename = None return False
java
public String findCmisName( String jcrName ) { for (Relation aList : list) { if (aList.jcrName.equals(jcrName)) { return aList.cmisName; } } return jcrName; }
java
public void setMessage(String message) { requireArgument(message != null && !message.isEmpty(), "Message cannot be null or empty."); this.message = message; }
python
def ApprovalFind(object_id, token=None): """Find approvals issued for a specific client.""" user = getpass.getuser() object_id = rdfvalue.RDFURN(object_id) try: approved_token = security.Approval.GetApprovalForObject( object_id, token=token, username=user) print("Found token %s" % str(approved_token)) return approved_token except access_control.UnauthorizedAccess: print("No token available for access to %s" % object_id)
python
def apply_backspaces_and_linefeeds(text): """ Interpret backspaces and linefeeds in text like a terminal would. Interpret text like a terminal by removing backspace and linefeed characters and applying them line by line. If final line ends with a carriage it keeps it to be concatenable with next output chunk. """ orig_lines = text.split('\n') orig_lines_len = len(orig_lines) new_lines = [] for orig_line_idx, orig_line in enumerate(orig_lines): chars, cursor = [], 0 orig_line_len = len(orig_line) for orig_char_idx, orig_char in enumerate(orig_line): if orig_char == '\r' and (orig_char_idx != orig_line_len - 1 or orig_line_idx != orig_lines_len - 1): cursor = 0 elif orig_char == '\b': cursor = max(0, cursor - 1) else: if (orig_char == '\r' and orig_char_idx == orig_line_len - 1 and orig_line_idx == orig_lines_len - 1): cursor = len(chars) if cursor == len(chars): chars.append(orig_char) else: chars[cursor] = orig_char cursor += 1 new_lines.append(''.join(chars)) return '\n'.join(new_lines)
python
def _compute_H(self, t, index, t2, index2, update_derivatives=False, stationary=False): """Helper function for computing part of the ode1 covariance function. :param t: first time input. :type t: array :param index: Indices of first output. :type index: array of int :param t2: second time input. :type t2: array :param index2: Indices of second output. :type index2: array of int :param update_derivatives: whether to update derivatives (default is False) :return h : result of this subcomponent of the kernel for the given values. :rtype: ndarray """ if stationary: raise NotImplementedError, "Error, stationary version of this covariance not yet implemented." # Vector of decays and delays associated with each output. Decay = self.decay[index] Decay2 = self.decay[index2] t_mat = t[:, None] t2_mat = t2[None, :] if self.delay is not None: Delay = self.delay[index] Delay2 = self.delay[index2] t_mat-=Delay[:, None] t2_mat-=Delay2[None, :] diff_t = (t_mat - t2_mat) inv_sigma_diff_t = 1./self.sigma*diff_t half_sigma_decay_i = 0.5*self.sigma*Decay[:, None] ln_part_1, sign1 = ln_diff_erfs(half_sigma_decay_i + t2_mat/self.sigma, half_sigma_decay_i - inv_sigma_diff_t, return_sign=True) ln_part_2, sign2 = ln_diff_erfs(half_sigma_decay_i, half_sigma_decay_i - t_mat/self.sigma, return_sign=True) h = sign1*np.exp(half_sigma_decay_i *half_sigma_decay_i -Decay[:, None]*diff_t+ln_part_1 -np.log(Decay[:, None] + Decay2[None, :])) h -= sign2*np.exp(half_sigma_decay_i*half_sigma_decay_i -Decay[:, None]*t_mat-Decay2[None, :]*t2_mat+ln_part_2 -np.log(Decay[:, None] + Decay2[None, :])) if update_derivatives: sigma2 = self.sigma*self.sigma # Update ith decay gradient dh_ddecay = ((0.5*Decay[:, None]*sigma2*(Decay[:, None] + Decay2[None, :])-1)*h + (-diff_t*sign1*np.exp( half_sigma_decay_i*half_sigma_decay_i-Decay[:, None]*diff_t+ln_part_1 ) +t_mat*sign2*np.exp( half_sigma_decay_i*half_sigma_decay_i-Decay[:, None]*t_mat - Decay2*t2_mat+ln_part_2)) +self.sigma/np.sqrt(np.pi)*( -np.exp( -diff_t*diff_t/sigma2 )+np.exp( -t2_mat*t2_mat/sigma2-Decay[:, None]*t_mat )+np.exp( -t_mat*t_mat/sigma2-Decay2[None, :]*t2_mat )-np.exp( -(Decay[:, None]*t_mat + Decay2[None, :]*t2_mat) ) )) self._dh_ddecay = (dh_ddecay/(Decay[:, None]+Decay2[None, :])).real # Update jth decay gradient dh_ddecay2 = (t2_mat*sign2 *np.exp( half_sigma_decay_i*half_sigma_decay_i -(Decay[:, None]*t_mat + Decay2[None, :]*t2_mat) +ln_part_2 ) -h) self._dh_ddecay2 = (dh_ddecay/(Decay[:, None] + Decay2[None, :])).real # Update sigma gradient self._dh_dsigma = (half_sigma_decay_i*Decay[:, None]*h + 2/(np.sqrt(np.pi) *(Decay[:, None]+Decay2[None, :])) *((-diff_t/sigma2-Decay[:, None]/2) *np.exp(-diff_t*diff_t/sigma2) + (-t2_mat/sigma2+Decay[:, None]/2) *np.exp(-t2_mat*t2_mat/sigma2-Decay[:, None]*t_mat) - (-t_mat/sigma2-Decay[:, None]/2) *np.exp(-t_mat*t_mat/sigma2-Decay2[None, :]*t2_mat) - Decay[:, None]/2 *np.exp(-(Decay[:, None]*t_mat+Decay2[None, :]*t2_mat)))) return h
python
def config(self): ''' Read config automatically if required ''' if self.__config is None: config_path = self.locate_config() if config_path: self.__config = self.read_file(config_path) self.__config_path = config_path return self.__config
java
public StrBuilder deleteAll(final String str) { final int len = (str == null ? 0 : str.length()); if (len > 0) { int index = indexOf(str, 0); while (index >= 0) { deleteImpl(index, index + len, len); index = indexOf(str, index); } } return this; }
python
def auth_app_id(self, app_id, user_id, mount_point='app-id', use_token=True): """POST /auth/<mount point>/login :param app_id: :type app_id: :param user_id: :type user_id: :param mount_point: :type mount_point: :param use_token: :type use_token: :return: :rtype: """ params = { 'app_id': app_id, 'user_id': user_id, } return self.login('/v1/auth/{0}/login'.format(mount_point), json=params, use_token=use_token)
java
public Locale getLocaleOrDefault(RouteContext routeContext) { String language = getLanguageOrDefault(routeContext); return Locale.forLanguageTag(language); }
java
static Writer getWriter(OutputStream output, String encoding) throws UnsupportedEncodingException { for (int i = 0; i < _encodings.length; ++i) { if (_encodings[i].name.equalsIgnoreCase(encoding)) { try { String javaName = _encodings[i].javaName; OutputStreamWriter osw = new OutputStreamWriter(output,javaName); return osw; } catch (java.lang.IllegalArgumentException iae) // java 1.1.8 { // keep trying } catch (UnsupportedEncodingException usee) { // keep trying } } } try { return new OutputStreamWriter(output, encoding); } catch (java.lang.IllegalArgumentException iae) // java 1.1.8 { throw new UnsupportedEncodingException(encoding); } }
python
def _resolve_dut_count(self): """ Calculates total amount of resources required and their types. :return: Nothing, modifies _dut_count, _hardware_count and _process_count :raises: ValueError if total count does not match counts of types separately. """ self._dut_count = len(self._dut_requirements) self._resolve_process_count() self._resolve_hardware_count() if self._dut_count != self._hardware_count + self._process_count: raise ValueError("Missing or invalid type fields in dut configuration!")
java
@Override public void sawOpcode(int seen) { if (seen == Const.INVOKEINTERFACE) { String clsName = getClassConstantOperand(); String methodName = getNameConstantOperand(); if (queryClasses.contains(clsName) && queryMethods.contains(methodName)) { queryLocations.add(Integer.valueOf(getPC())); } } else if (OpcodeUtils.isBranch(seen)) { int branchTarget = getBranchTarget(); int pc = getPC(); if (branchTarget < pc) { loops.add(new LoopLocation(branchTarget, pc)); } } }
python
def nsarg_completions( completion_text: str, entity_types: list, bel_spec: BELSpec, namespace: str, species_id: str, bel_fmt: str, size: int, ): """Namespace completions Args: completion_text entity_types: used to filter namespace search results bel_spec: used to search default namespaces namespace: used to filter namespace search results species_id: used to filter namespace search results bel_fmt: used to select full name or abbrev for default namespaces size: how many completions to return Results: list of replacement text objects """ minimal_nsarg_completion_len = 1 species = [species_id] namespaces = [namespace] replace_list = [] if len(completion_text) >= minimal_nsarg_completion_len: # Use BEL.bio API module if running bel module in BEL.bio API, otherwise call BEL.bio API endpoint # is there a better way to handle this? url = f'{config["bel_api"]["servers"]["api_url"]}/terms/completions/{url_path_param_quoting(completion_text)}' params = { "size": size, "entity_types": entity_types, "namespaces": namespaces, "species": species, } r = get_url(url, params=params) if r.status_code == 200: ns_completions = r.json() else: log.error(f"Status code of {r.status_code} for {url}") ns_completions = {} for complete in ns_completions.get("completions", []): replace_list.append( { "replacement": complete["id"], "label": f"{complete['id']} ({complete['label']})", "highlight": complete["highlight"][-1], "type": "NSArg", } ) # Check default namespaces for entity_type in entity_types: default_namespace = bel_spec["namespaces"].get(entity_type, []) if default_namespace: for obj in default_namespace["info"]: replacement = None if bel_fmt == "long" and re.match( completion_text, obj["name"], re.IGNORECASE ): replacement = obj["name"] elif bel_fmt in ["short", "medium"] and re.match( completion_text, obj["abbreviation"], re.IGNORECASE ): replacement = obj["abbreviation"] if replacement: highlight = replacement.replace( completion_text, f"<em>{completion_text}</em>" ) replace_list.insert( 0, { "replacement": replacement, "label": replacement, "highlight": highlight, "type": "NSArg", }, ) return replace_list[:size]
python
def surface_2D(num_lat=90, num_lon=180, water_depth=10., lon=None, lat=None, **kwargs): """Creates a 2D slab ocean Domain in latitude and longitude with uniform water depth. Domain has a single heat capacity according to the specified water depth. **Function-call argument** \n :param int num_lat: number of latitude points [default: 90] :param int num_lon: number of longitude points [default: 180] :param float water_depth: depth of the slab ocean in meters [default: 10.] :param lat: specification for latitude axis (optional) :type lat: :class:`~climlab.domain.axis.Axis` or latitude array :param lon: specification for longitude axis (optional) :type lon: :class:`~climlab.domain.axis.Axis` or longitude array :raises: :exc:`ValueError` if `lat` is given but neither Axis nor latitude array. :raises: :exc:`ValueError` if `lon` is given but neither Axis nor longitude array. :returns: surface domain :rtype: :class:`SlabOcean` :Example: :: >>> from climlab import domain >>> sfc = domain.surface_2D(num_lat=36, num_lat=72) >>> print sfc climlab Domain object with domain_type=ocean and shape=(36, 72, 1) """ if lat is None: latax = Axis(axis_type='lat', num_points=num_lat) elif isinstance(lat, Axis): latax = lat else: try: latax = Axis(axis_type='lat', points=lat) except: raise ValueError('lat must be Axis object or latitude array') if lon is None: lonax = Axis(axis_type='lon', num_points=num_lon) elif isinstance(lon, Axis): lonax = lon else: try: lonax = Axis(axis_type='lon', points=lon) except: raise ValueError('lon must be Axis object or longitude array') depthax = Axis(axis_type='depth', bounds=[water_depth, 0.]) axes = {'lat': latax, 'lon': lonax, 'depth': depthax} slab = SlabOcean(axes=axes, **kwargs) return slab
java
public final Queue pauseQueue(QueueName name) { PauseQueueRequest request = PauseQueueRequest.newBuilder().setName(name == null ? null : name.toString()).build(); return pauseQueue(request); }