language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
protected void checkIndex(int parameterIndex, int type1, int type2, String getName) throws SQLException { checkIndex(parameterIndex); if (type1 != this.testReturn[parameterIndex - 1] && type2 != this.testReturn[parameterIndex - 1]) { throw new PSQLException( GT.tr("Parameter of type {0} was registered, but call to get{1} (sqltype={2}) was made.", "java.sql.Types=" + testReturn[parameterIndex - 1], getName, "java.sql.Types=" + type1), PSQLState.MOST_SPECIFIC_TYPE_DOES_NOT_MATCH); } }
python
def name(self): """Get the name of the action.""" name = getattr(self.action, "__name__", None) # ascii(action) not defined for all actions, so must only be evaluated if getattr fails return name if name is not None else ascii(self.action)
python
def get_stats_display_height(self, curse_msg): r"""Return the height of the formatted curses message. The height is defined by the number of '\n' (new line). """ try: c = [i['msg'] for i in curse_msg['msgdict']].count('\n') except Exception as e: logger.debug('ERROR: Can not compute plugin height ({})'.format(e)) return 0 else: return c + 1
java
public ActedOnBehalfOf newActedOnBehalfOf(QualifiedName id, QualifiedName delegate, QualifiedName responsible, QualifiedName activity) { ActedOnBehalfOf res = of.createActedOnBehalfOf(); res.setId(id); res.setActivity(activity); res.setDelegate(delegate); res.setResponsible(responsible); return res; }
python
def update_task(client, task_id, revision, title=None, assignee_id=None, completed=None, recurrence_type=None, recurrence_count=None, due_date=None, starred=None, remove=None): ''' Updates the task with the given ID See https://developer.wunderlist.com/documentation/endpoints/task for detailed parameter information ''' if title is not None: _check_title_length(title, client.api) if (recurrence_type is None and recurrence_count is not None) or (recurrence_type is not None and recurrence_count is None): raise ValueError("recurrence_type and recurrence_count are required are required together") if due_date is not None: _check_date_format(due_date, client.api) data = { 'revision' : int(revision), 'title' : title, 'assignee_id' : int(assignee_id) if assignee_id else None, 'completed' : completed, 'recurrence_type' : recurrence_type, 'recurrence_count' : int(recurrence_count) if recurrence_count else None, 'due_date' : due_date, 'starred' : starred, 'remove' : remove, } data = { key: value for key, value in data.items() if value is not None } endpoint = '/'.join([client.api.Endpoints.TASKS, str(task_id)]) response = client.authenticated_request(endpoint, 'PATCH', data=data) return response.json()
java
public Object[] getObjects(String pType, Object[] pIds) throws SQLException { // Create Vector to hold the result Vector result = new Vector(pIds.length); // Loop through Id's and fetch one at a time (no good performance...) for (int i = 0; i < pIds.length; i++) { // Create DBObject, set id and read it DatabaseObject dbObject = (DatabaseObject) getDatabaseReadable(pType); dbObject.setId(pIds[i]); dbObject = (DatabaseObject) mObjectReader.readObject(dbObject); // Add to result if not null if (dbObject != null) { result.add(dbObject.getObject()); } } // Create array of correct type, length equal to Vector Class cl = getType(pType); Object[] arr = (Object[]) Array.newInstance(cl, result.size()); // Return the vector as an array return result.toArray(arr); }
python
async def result(self) -> T: """\ Wait for the task's termination; either the result is returned or a raised exception is reraised. If an event is sent before the task terminates, an `EventException` is raised with the event as argument. """ try: event = await self.recv_event() except Component.Success as succ: # success was thrown; return the result result, = succ.args return cast(T, result) except Component.Failure as fail: # here we don't expect a wrapped result, so we unwrap the failure cause, = fail.args raise cause else: # there was a regular event; shouldn't happen/is exceptional raise Component.EventException(event)
java
public Map<String, Object> retrieve(String entityName, String groupId, String entityId) throws MnoException { return retrieve(entityName, groupId, entityId, getAuthenticatedClient()); }
java
@SafeVarargs public final ServerListenerBuilder addStartingCallbacks(Consumer<? super Server>... consumers) { return addStartingCallbacks(Arrays.asList(consumers)); }
python
def patch_requests(): """ Customize the cacerts.pem file that requests uses. Automatically updates the cert file if the contents are different. """ config.create_config_directory() ca_certs_file = config.CERT_FILE ca_certs_contents = requests.__loader__.get_data('requests/cacert.pem') should_write_certs = True if os.path.isfile(ca_certs_file): with open(ca_certs_file, 'rb') as f: existing_certs = f.read() if existing_certs != ca_certs_contents: should_write_certs = True print("Updating local SSL certificates") else: should_write_certs = False if should_write_certs: with open(ca_certs_file, 'wb') as f: f.write(ca_certs_contents) os.environ['REQUESTS_CA_BUNDLE'] = ca_certs_file
java
public static void encryptPrivateKeyWithPassword(final PrivateKey privateKey, final @NonNull OutputStream outputStream, final String password) throws NoSuchAlgorithmException, InvalidKeySpecException, NoSuchPaddingException, InvalidKeyException, InvalidAlgorithmParameterException, IllegalBlockSizeException, BadPaddingException, InvalidParameterSpecException, IOException { final byte[] encryptedPrivateKeyWithPasswordBytes = encryptPrivateKeyWithPassword( privateKey, password); outputStream.write(encryptedPrivateKeyWithPasswordBytes); outputStream.close(); }
java
private static boolean isBindingCompatible( Tuple tuple, VarBindingDef binding) { VarValueDef currentValue = tuple.getBinding( binding.getVarDef()); return (currentValue == null || currentValue.equals( binding.getValueDef())); }
java
public void removeEmptyBlocks() { Block curr = blocklistHead; Block prev = null; int effId = 0; while (curr != null) { if (!curr.isEmpty()) { curr.id = effId++; if (prev != null) { prev.nextBlock = curr; } else { blocklistHead = curr; } prev = curr; } curr = curr.nextBlock; } if (prev != null) { prev.nextBlock = null; } else { blocklistHead = null; } numBlocks = effId; }
java
protected AstEval eval() throws ScanException, ParseException { AstEval e = eval(false, false); if (e == null) { e = eval(false, true); if (e == null) { fail(START_EVAL_DEFERRED + "|" + START_EVAL_DYNAMIC); } } return e; }
java
private Format getFormat(String desc) { if (registry != null) { String name = desc; String args = ""; int i = desc.indexOf(START_FMT); if (i > 0) { name = desc.substring(0, i).trim(); args = desc.substring(i + 1).trim(); } FormatFactory factory = registry.get(name); if (factory != null) { return factory.getFormat(name, args, getLocale()); } } return null; }
python
def drawBackground(self, painter, rect): """ Draws the background for this scene. :param painter | <QPainter> rect | <QRect> """ if self._dirty: self.rebuild() # draw the alternating rects gantt = self.ganttWidget() # draw the alternating rects painter.setPen(Qt.NoPen) painter.setBrush(gantt.alternateBrush()) for rect in self._alternateRects: painter.drawRect(rect) # draw the weekends painter.setBrush(gantt.weekendBrush()) for rect in self._weekendRects: painter.drawRect(rect) # draw the default background painter.setPen(gantt.gridPen()) painter.drawLines(self._hlines + self._vlines)
python
def find_template_companion(template, extension='', check=True): """ Returns the first found template companion file """ if check and not os.path.isfile(template): yield '' return # May be '<stdin>' (click) template = os.path.abspath(template) template_dirname = os.path.dirname(template) template_basename = os.path.basename(template).split('.') current_path = template_dirname stop_path = os.path.commonprefix((os.getcwd(), current_path)) stop_path = os.path.dirname(stop_path) token = template_basename[0] + '.' while True: for file in sorted(os.listdir(current_path)): if not file.startswith(token): continue if not file.endswith(extension): continue file_parts = file.split('.') for i in range(1, len(template_basename)): if template_basename[:-i] != file_parts[:-1]: continue if current_path == template_dirname: if file_parts == template_basename: continue # Do not accept template itself yield os.path.join(current_path, file) if current_path == stop_path: break # cd .. current_path = os.path.split(current_path)[0]
java
public CMRFidelityRepCMREx createCMRFidelityRepCMRExFromString(EDataType eDataType, String initialValue) { CMRFidelityRepCMREx result = CMRFidelityRepCMREx.get(initialValue); if (result == null) throw new IllegalArgumentException("The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; }
python
def emitDataChanged(self, regItem): """ Emits the dataChagned signal for the regItem """ #self.sourceModel().emitDataChanged(regItem) # Does this work? leftIndex = self.indexFromItem(regItem, col=0) rightIndex = self.indexFromItem(regItem, col=-1) self.dataChanged.emit(leftIndex, rightIndex)
python
def isxmap(xmethod, opt): """Return ``isxmap`` argument for ``.IterStatsConfig`` initialiser. """ if xmethod == 'admm': isx = {'XPrRsdl': 'PrimalRsdl', 'XDlRsdl': 'DualRsdl', 'XRho': 'Rho'} else: isx = {'X_F_Btrack': 'F_Btrack', 'X_Q_Btrack': 'Q_Btrack', 'X_ItBt': 'IterBTrack', 'X_L': 'L', 'X_Rsdl': 'Rsdl'} if not opt['AccurateDFid']: isx.update(evlmap(True)) return isx
java
@SuppressWarnings("unchecked") protected final <E extends Event> EventHandler<E> getHandler(final EventType<E> eventType) throws CoreException { EventType<?> temp = eventType; EventHandler<E> handler = null; while (temp != null && handler == null) { handler = (EventHandler<E>) this.eventHandlerMap.get(temp); temp = temp.getSuperType(); } // // Check supertype (ANY) // if (handler == null) { // // handler = (EventHandler<E>) this.eventHandlerMap.get(); // } // Check if the handler has been created or not if (handler == null) { for (final EventAdapter.Linker linker : EventAdapter.Linker.values()) { if (isEventType(eventType, linker.eventType())) { handler = buildEventHandler(linker.adapterClass(), (Class<? extends EventHandler<E>>) linker.handlerClass()); } } if (handler != null) { // store the handler this.eventHandlerMap.put(eventType, handler); } } return handler; }
java
public BoundCodeDt<PropertyRepresentationEnum> addRepresentation(PropertyRepresentationEnum theValue) { BoundCodeDt<PropertyRepresentationEnum> retVal = new BoundCodeDt<PropertyRepresentationEnum>(PropertyRepresentationEnum.VALUESET_BINDER, theValue); getRepresentation().add(retVal); return retVal; }
python
def is_seq(obj): """ Check if an object is a sequence. """ return (not is_str(obj) and not is_dict(obj) and (hasattr(obj, "__getitem__") or hasattr(obj, "__iter__")))
java
private void initialize(Map context) { // Guard against recursion. context.put(jsTypeTree, this); // Create temporary accumulators for the contents of the fields and variants instance // variables. List tmpFields = new ArrayList(); List tmpVariants = new ArrayList(); // Perform tasks assigned to the recursive 'number' subroutine (see comment there) number(jsTypeTree, context, false, 0, tmpFields, tmpVariants); // Move information from temporaries into permanent instance variables fields = (JSField[])tmpFields.toArray(new JSField[0]); variants = (JSVariant[])tmpVariants.toArray(new JSVariant[0]); // Compute the contents of the 'boxed' instance variable, which is deriveable from the // contents of those JSVariants which have found their way into 'fields.' These are // all intended to be boxed variants because they were detected to be underneath // JSRepeated nodes during the 'number' process. int boxLen = 0; // accumulate total length of boxed array for (int i = 0; i < fields.length; i++) if (fields[i] instanceof JSVariant) // On the first pass, we box the variant and accumulate its accessor count boxLen += ((JSVariant)fields[i]).box(context).getAccessorCount(); boxed = new int[boxLen][]; int boxIndex = 0; // On the second pass through the boxed variants, we get their accessor count a second // time and use it this time to fill in the contents of the 'boxed' instance variable // proper. for (int i = 0; i < fields.length; i++) if (fields[i] instanceof JSVariant) for (int j = 0; j < ((JSVariant)fields[i]).getBoxed().getAccessorCount(); j++) boxed[boxIndex++] = new int[] { i, j }; // Set the 'accessor' property of every JSField in the tree (see comment on // setAccessors). setAccessors(0, this); // Set the multiChoiceCount for each node in the tree. Note this must be done // after boxing any variants found underneath JSRepeated nodes, since boxing changes // the multiChoiceCount. setMultiChoiceCount is a recursive method that will set // counts starting from the leaves working back to the root. jsTypeTree.setMultiChoiceCount(); // Compute the serialForm if it isn't already there due to the deserializing // constructor if (serialForm == null) { // Store the name in a convenient non-null form name = jsTypeTree.getFeatureName(); if (name == null) name = ""; try { byte[] utfname = name.getBytes("UTF8"); serialForm = new byte[2 + utfname.length + 2 + jsTypeTree.encodedTypeLength()]; ArrayUtil.writeShort(serialForm, 0, (short)utfname.length); System.arraycopy(utfname, 0, serialForm, 2, utfname.length); int limits[] = new int[] { 2 + utfname.length, serialForm.length }; JSType.setCount(serialForm, limits, version); jsTypeTree.encodeType(serialForm, limits); } catch (UnsupportedEncodingException e) { FFDCFilter.processException(e, "initialize", "604"); IllegalArgumentException ex = new IllegalArgumentException(); ex.initCause(e); throw ex; } } // Compute the schemaID, which is a 64-bit truncated SHA-1 hash over the serialForm. schemaID = CryptoHash.hash(serialForm); schemaLongID = Long.valueOf(schemaID); }
java
protected String getAccessByField(String target, Field field, Class<?> cls) { if (field.getModifiers() == Modifier.PUBLIC) { return target + ClassHelper.PACKAGE_SEPARATOR + field.getName(); } // check if has getter method String getter; if ("boolean".equalsIgnoreCase(field.getType().getCanonicalName())) { getter = "is" + CodedConstant.capitalize(field.getName()); } else { getter = "get" + CodedConstant.capitalize(field.getName()); } // check method exist try { cls.getMethod(getter, new Class<?>[0]); return target + ClassHelper.PACKAGE_SEPARATOR + getter + "()"; } catch (Exception e) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(e.getMessage(), e); } } String type = field.getType().getCanonicalName(); if ("[B".equals(type) || "[Ljava.lang.Byte;".equals(type) || "java.lang.Byte[]".equals(type)) { type = "byte[]"; } // use reflection to get value String code = "(" + FieldUtils.toObjectType(type) + ") "; code += "FieldUtils.getField(" + target + ", \"" + field.getName() + "\")"; return code; }
python
def geocode(address): '''Query function to obtain a latitude and longitude from a location string such as `Houston, TX` or`Colombia`. This uses an online lookup, currently wrapping the `geopy` library, and providing an on-disk cache of queries. Parameters ---------- address : str Search string to retrieve the location, [-] Returns ------- latitude : float Latitude of address, [degrees] longitude : float Longitude of address, [degrees] Notes ----- If a query has been retrieved before, this function will take under 1 ms; it takes several seconds otherwise. Examples -------- >>> geocode('Fredericton, NB') (45.966425, -66.645813) ''' loc_tuple = None try: cache = geopy_cache() loc_tuple = cache.cached_address(address) except: # Handle bugs in the cache, i.e. if there is no space on disk to create # the database, by ignoring them pass if loc_tuple is not None: return loc_tuple else: geocoder = geopy_geolocator() if geocoder is None: return geopy_missing_msg location = geocoder.geocode(address) try: cache.cache_address(address, location.latitude, location.longitude) except: pass return (location.latitude, location.longitude)
java
public static void setSharedVariable(String name, Object object) { try { FreeMarkerRender.getConfiguration().setSharedVariable(name, object); } catch (TemplateException e) { throw new RuntimeException(e); } }
python
def slice_reStructuredText(input, output): """ Slices given reStructuredText file. :param input: ReStructuredText file to slice. :type input: unicode :param output: Directory to output sliced reStructuredText files. :type output: unicode :return: Definition success. :rtype: bool """ LOGGER.info("{0} | Slicing '{1}' file!".format(slice_reStructuredText.__name__, input)) file = File(input) file.cache() slices = OrderedDict() for i, line in enumerate(file.content): search = re.search(r"^\.\. \.(\w+)", line) if search: slices[search.groups()[0]] = i + SLICE_ATTRIBUTE_INDENT index = 0 for slice, slice_start in slices.iteritems(): slice_file = File(os.path.join(output, "{0}.{1}".format(slice, OUTPUT_FILES_EXTENSION))) LOGGER.info("{0} | Outputing '{1}' file!".format(slice_reStructuredText.__name__, slice_file.path)) slice_end = index < (len(slices.values()) - 1) and slices.values()[index + 1] - SLICE_ATTRIBUTE_INDENT or \ len(file.content) for i in range(slice_start, slice_end): skip_line = False for item in CONTENT_DELETION: if re.search(item, file.content[i]): LOGGER.info("{0} | Skipping Line '{1}' with '{2}' content!".format(slice_reStructuredText.__name__, i, item)) skip_line = True break if skip_line: continue line = file.content[i] for pattern, value in STATEMENT_SUBSTITUTE.iteritems(): line = re.sub(pattern, value, line) search = re.search(r"- `[\w ]+`_ \(([\w\.]+)\)", line) if search: LOGGER.info("{0} | Updating Line '{1}' link: '{2}'!".format(slice_reStructuredText.__name__, i, search.groups()[0])) line = "- :ref:`{0}`\n".format(search.groups()[0]) slice_file.content.append(line) slice_file.write() index += 1 return True
java
public static void print(Object self, PrintWriter out) { if (out == null) { out = new PrintWriter(System.out); } out.print(InvokerHelper.toString(self)); }
java
@Nonnull public JSBlock body () { if (m_aBody == null) m_aBody = new JSBlock ().newlineAtEnd (false); return m_aBody; }
python
def _value(obj): """ make sure to get a float """ # TODO: this is ugly and makes everything ugly # can we handle this with a clean decorator or just requiring that only floats be passed?? if hasattr(obj, 'value'): return obj.value elif isinstance(obj, np.ndarray): return np.array([o.value for o in obj]) elif hasattr(obj, '__iter__'): return [_value(o) for o in obj] return obj
python
def connect(self, host=None, port=None, connect=False, **kwargs): """ Explicitly creates the MongoClient; this method must be used in order to specify a non-default host or port to the MongoClient. Takes arguments identical to MongoClient.__init__""" try: self.__connection = MongoClient(host=host, port=port, connect=connect, **kwargs) except (AutoReconnect, ConnectionFailure, ServerSelectionTimeoutError): raise DatabaseIsDownError("No mongod process is running.")
java
private byte[] getUploadFileContent(RequestElements requestElements) throws FMSException { Attachable attachable = (Attachable) requestElements.getEntity(); InputStream docContent = requestElements.getUploadRequestElements().getDocContent(); // gets the mime value form the filename String mime = getMime(attachable.getFileName(), "."); // if null then gets the mime value from content-type of the file mime = (mime != null) ? mime : getMime(attachable.getContentType(), "/"); if (isImageType(mime)) { return getImageContent(docContent, mime); } else { return getContent(docContent); } }
java
private void readHierarchy(Object object, ObjectStreamClass classDesc) throws IOException, ClassNotFoundException, NotActiveException { if (object == null && mustResolve) { throw new NotActiveException(); } List<ObjectStreamClass> streamClassList = classDesc.getHierarchy(); if (object == null) { for (ObjectStreamClass objectStreamClass : streamClassList) { readObjectForClass(null, objectStreamClass); } } else { List<Class<?>> superclasses = cachedSuperclasses.get(object.getClass()); if (superclasses == null) { superclasses = cacheSuperclassesFor(object.getClass()); } int lastIndex = 0; for (int i = 0, end = superclasses.size(); i < end; ++i) { Class<?> superclass = superclasses.get(i); int index = findStreamSuperclass(superclass, streamClassList, lastIndex); if (index == -1) { readObjectNoData(object, superclass, ObjectStreamClass.lookupStreamClass(superclass)); } else { for (int j = lastIndex; j <= index; j++) { readObjectForClass(object, streamClassList.get(j)); } lastIndex = index + 1; } } } }
java
public Fixture fixtureFor(Object target) { if(!(target instanceof CommandProcessor)) throw new IllegalArgumentException("Can only get a SeleniumFixture for an instance of CommandProcessor."); return new SeleniumFixture((CommandProcessor)target); }
python
def generate_property_deprecation_message(to_be_removed_in_version, old_name, new_name, new_attribute, module_name='Client'): """Generate a message to be used when warning about the use of deprecated properties. :param to_be_removed_in_version: Version of this module the deprecated property will be removed in. :type to_be_removed_in_version: str :param old_name: Deprecated property name. :type old_name: str :param new_name: Name of the new property name to use. :type new_name: str :param new_attribute: The new attribute where the new property can be found. :type new_attribute: str :param module_name: Name of the module containing the new method to use. :type module_name: str :return: Full deprecation warning message for the indicated property. :rtype: str """ message = "Call to deprecated property '{name}'. This property will be removed in version '{version}'".format( name=old_name, version=to_be_removed_in_version, ) message += " Please use the '{new_name}' property on the '{module_name}.{new_attribute}' attribute moving forward.".format( new_name=new_name, module_name=module_name, new_attribute=new_attribute, ) return message
java
public static Map<String, Object> parseCommandArgs(String argString, String[] paramKeys) { return parseArgsArray(argString.split("\\s+"), null, null, Arrays.asList(paramKeys)); }
java
@Override public void warn(String msg, Throwable throwable) { LOGGER.log(Level.WARNING, msg, throwable); }
java
@Override public void remove(Object cacheKey) { if (cacheKey != null) { CacheObject cacheObject = getCachedObject(cacheKey); if (cacheObject != null) { removeCachedObject(cacheObject); } } }
java
private ResourceBundleDefinition buildCustomBundleDefinition(String bundleName, boolean isChildBundle) { // Id for the bundle String bundleId = props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_ID); if (null == bundleId && !isChildBundle) throw new IllegalArgumentException( "No id defined for the bundle with name:" + bundleName + ". Please specify one in configuration. "); // Whether it's a composite or not boolean isComposite = Boolean .parseBoolean(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_COMPOSITE_FLAG, "false")); // Create definition and set its id ResourceBundleDefinition bundle = new ResourceBundleDefinition(); bundle.setBundleId(bundleId); bundle.setBundleName(bundleName); bundle.setBundlePrefix(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_BUNDLE_PREFIX)); // Whether it's global or not boolean isGlobal = Boolean .parseBoolean(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_GLOBAL_FLAG, "false")); bundle.setGlobal(isGlobal); // Set order if its a global bundle if (isGlobal) { int order = Integer.parseInt(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_ORDER, "0")); bundle.setInclusionOrder(order); } // Override bundle postprocessor if (null != props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_POSTPROCESSOR)) bundle.setBundlePostProcessorKeys( props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_POSTPROCESSOR)); // Override unitary postprocessor if (null != props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_FILE_POSTPROCESSOR)) bundle.setUnitaryPostProcessorKeys( props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_FILE_POSTPROCESSOR)); // Use only with debug mode on boolean isDebugOnly = Boolean .parseBoolean(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_DEBUGONLY, "false")); bundle.setDebugOnly(isDebugOnly); // Use only with debug mode off boolean isDebugNever = Boolean .parseBoolean(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_DEBUGNEVER, "false")); bundle.setDebugNever(isDebugNever); // Set conditional comment for IE, in case one is specified if (null != props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_IE_CONDITIONAL_EXPRESSION)) bundle.setIeConditionalExpression( props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_IE_CONDITIONAL_EXPRESSION)); // Sets the alternate URL for production mode. if (null != props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_PRODUCTION_ALT_URL)) bundle.setAlternateProductionURL( props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_PRODUCTION_ALT_URL)); boolean hasDebugURL = false; // Sets the debug URL for debug mode. if (null != props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_DEBUG_URL)) { bundle.setDebugURL(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_DEBUG_URL)); hasDebugURL = true; if (StringUtils.isEmpty(bundle.getAlternateProductionURL())) { throw new IllegalArgumentException("The bundle '" + bundleName + "', which use a static external resource in debug mode, must use an external resource in Production mode.\n" + "Please check your configuration. "); } if (isComposite) { throw new IllegalArgumentException("The bundle '" + bundleName + "', which use a static external resource in debug mode, can't be part of a composite bundle.\n" + "Please check your configuration. "); } if (StringUtils.isNotEmpty(props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_MAPPINGS))) { throw new IllegalArgumentException("The bundle '" + bundleName + "', which use a static external resource in debug mode, can't have a bundle mapping.\n" + "Please check your configuration. "); } } if (isComposite) { String childBundlesProperty = props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_COMPOSITE_NAMES); if (null == childBundlesProperty) throw new IllegalArgumentException( "No child bundle names were defined for the composite bundle with name:" + bundleName + ". Please specify at least one in configuration. "); bundle.setComposite(true); // add children List<ResourceBundleDefinition> children = new ArrayList<>(); StringTokenizer tk = new StringTokenizer(childBundlesProperty, JawrConstant.COMMA_SEPARATOR); while (tk.hasMoreTokens()) { ResourceBundleDefinition childDef = buildCustomBundleDefinition(tk.nextToken().trim(), true); childDef.setBundleId(props.getCustomBundleProperty(childDef.getBundleName(), BUNDLE_FACTORY_CUSTOM_ID)); if (StringUtils.isEmpty(childDef.getDebugURL())) { children.add(childDef); } else { throw new IllegalArgumentException("The external bundle '" + childDef.getBundleName() + "' can't be a child of the composite bundle '" + bundleName + "'. Please check your configuration. "); } } bundle.setChildren(children); } else { String mappingsProperty = props.getCustomBundleProperty(bundleName, BUNDLE_FACTORY_CUSTOM_MAPPINGS); if (!hasDebugURL) { if (null == mappingsProperty) { throw new IllegalArgumentException("No mappings were defined for the bundle with name:" + bundleName + ". Please specify at least one in configuration. "); } else { // Add the mappings List<String> mappings = new ArrayList<>(); StringTokenizer tk = new StringTokenizer(mappingsProperty, JawrConstant.COMMA_SEPARATOR); while (tk.hasMoreTokens()) { String mapping = tk.nextToken().trim(); mappings.add(mapping); } bundle.setMappings(mappings); } } } // dependencies List<String> dependencies = props.getCustomBundlePropertyAsList(bundleName, BUNDLE_FACTORY_CUSTOM_DEPENDENCIES); bundle.setDependencies(dependencies); return bundle; }
python
def strip_ansi(text, c1=False, osc=False): ''' Strip ANSI escape sequences from a portion of text. https://stackoverflow.com/a/38662876/450917 Arguments: line: str osc: bool - include OSC commands in the strippage. c1: bool - include C1 commands in the strippage. Notes: Enabling c1 and osc stripping is less efficient and the two options can mildly conflict with one another. The less problematic order was chosen, so there may still be rare C1 OSC fragments left over. ''' text = ansi_csi0_finder.sub('', text) if osc: text = ansi_osc0_finder.sub('', text) if c1: text = ansi_csi1_finder.sub('', text) # go first, less destructive if osc: text = ansi_osc1_finder.sub('', text) return text
java
private String convertDate(String date) { SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy"); try { Date creationDate = sdf.parse(date); sdf = new SimpleDateFormat("\\'yr'yyyy\\'mo'MM\\'dy'dd\\'hr'HH\\'min'mm\\'sec'ss"); return sdf.format(creationDate); } catch(ParseException pe) { pe.printStackTrace(); return ""; } }
python
def extract_fields(document_data, prefix_path, expand_dots=False): """Do depth-first walk of tree, yielding field_path, value""" if not document_data: yield prefix_path, _EmptyDict else: for key, value in sorted(six.iteritems(document_data)): if expand_dots: sub_key = FieldPath.from_string(key) else: sub_key = FieldPath(key) field_path = FieldPath(*(prefix_path.parts + sub_key.parts)) if isinstance(value, dict): for s_path, s_value in extract_fields(value, field_path): yield s_path, s_value else: yield field_path, value
java
public void goTo(final ContentScene contentScene) { Log.d(TAG, "Go to %s", contentScene.getName()); if (!goBackTo(contentScene)) { mContentSceneViewStack.push(contentScene); } executeHideShowCycle(contentScene); }
python
def windowed_statistic(pos, values, statistic, size=None, start=None, stop=None, step=None, windows=None, fill=np.nan): """Calculate a statistic from items in windows over a single chromosome/contig. Parameters ---------- pos : array_like, int, shape (n_items,) The item positions in ascending order, using 1-based coordinates.. values : array_like, int, shape (n_items,) The values to summarise. May also be a tuple of values arrays, in which case each array will be sliced and passed through to the statistic function as separate arguments. statistic : function The statistic to compute. size : int, optional The window size (number of bases). start : int, optional The position at which to start (1-based). stop : int, optional The position at which to stop (1-based). step : int, optional The distance between start positions of windows. If not given, defaults to the window size, i.e., non-overlapping windows. windows : array_like, int, shape (n_windows, 2), optional Manually specify the windows to use as a sequence of (window_start, window_stop) positions, using 1-based coordinates. Overrides the size/start/stop/step parameters. fill : object, optional The value to use where a window is empty, i.e., contains no items. Returns ------- out : ndarray, shape (n_windows,) The value of the statistic for each window. windows : ndarray, int, shape (n_windows, 2) The windows used, as an array of (window_start, window_stop) positions, using 1-based coordinates. counts : ndarray, int, shape (n_windows,) The number of items in each window. Notes ----- The window stop positions are included within a window. The final window will be truncated to the specified stop position, and so may be smaller than the other windows. Examples -------- Count non-zero (i.e., True) items in non-overlapping windows:: >>> import allel >>> pos = [1, 7, 12, 15, 28] >>> values = [True, False, True, False, False] >>> nnz, windows, counts = allel.windowed_statistic( ... pos, values, statistic=np.count_nonzero, size=10 ... ) >>> nnz array([1, 1, 0]) >>> windows array([[ 1, 10], [11, 20], [21, 28]]) >>> counts array([2, 2, 1]) Compute a sum over items in half-overlapping windows:: >>> values = [3, 4, 2, 6, 9] >>> x, windows, counts = allel.windowed_statistic( ... pos, values, statistic=np.sum, size=10, step=5, fill=0 ... ) >>> x array([ 7, 12, 8, 0, 9]) >>> windows array([[ 1, 10], [ 6, 15], [11, 20], [16, 25], [21, 28]]) >>> counts array([2, 3, 2, 0, 1]) """ # assume sorted positions if not isinstance(pos, SortedIndex): pos = SortedIndex(pos, copy=False) # check lengths are equal if isinstance(values, tuple): # assume multiple values arrays check_equal_length(pos, *values) else: # assume a single values array check_equal_length(pos, values) # setup windows if windows is None: windows = position_windows(pos, size, start, stop, step) else: windows = asarray_ndim(windows, 2) # find window locations locs = window_locations(pos, windows) # setup outputs out = [] counts = [] # iterate over windows for start_idx, stop_idx in locs: # calculate number of values in window n = stop_idx - start_idx if n == 0: # window is empty s = fill else: if isinstance(values, tuple): # assume multiple values arrays wv = [v[start_idx:stop_idx] for v in values] s = statistic(*wv) else: # assume a single values array wv = values[start_idx:stop_idx] s = statistic(wv) # store outputs out.append(s) counts.append(n) # convert to arrays for output return np.asarray(out), windows, np.asarray(counts)
python
def flush(self): """Flush(apply) all changed to datastore.""" self.puts.flush() self.deletes.flush() self.ndb_puts.flush() self.ndb_deletes.flush()
java
@Override public void removeByCommercePriceListId(long commercePriceListId) { for (CommercePriceListAccountRel commercePriceListAccountRel : findByCommercePriceListId( commercePriceListId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) { remove(commercePriceListAccountRel); } }
java
public static final Function<Number,String> toCurrencyStr(Locale locale) { return new ToString(NumberFormatType.CURRENCY, locale); }
java
public static double[][] toDouble(int[][] array) { double[][] n = new double[array.length][array[0].length]; for (int i = 0; i < array.length; i++) { for (int j = 0; j < array[0].length; j++) { n[i][j] = (double) array[i][j]; } } return n; }
python
def all(self, order_by=None, limit=0): """ Fetch all items. :param limit: How many rows to fetch. :param order_by: column on which to order the results. \ To change the sort, prepend with < or >. """ with rconnect() as conn: try: query = self._base() if order_by is not None: query = self._order_by(query, order_by) if limit > 0: query = self._limit(query, limit) log.debug(query) rv = query.run(conn) except Exception as e: log.warn(e) raise else: data = [self._model(_) for _ in rv] return data
python
def parse_venue(data): """ Parse a ``MeetupVenue`` from the given response data. Returns ------- A `pythonkc_meetups.types.`MeetupVenue``. """ return MeetupVenue( id=data.get('id', None), name=data.get('name', None), address_1=data.get('address_1', None), address_2=data.get('address_2', None), address_3=data.get('address_3', None), city=data.get('city', None), state=data.get('state', None), zip=data.get('zip', None), country=data.get('country', None), lat=data.get('lat', None), lon=data.get('lon', None) )
python
def single_node_env(args): """Sets up environment for a single-node TF session. Args: :args: command line arguments as either argparse args or argv list """ # setup ARGV for the TF process if isinstance(args, list): sys.argv = args elif args.argv: sys.argv = args.argv # setup ENV for Hadoop-compatibility and/or GPU allocation num_gpus = args.num_gpus if 'num_gpus' in args else 1 util.single_node_env(num_gpus)
python
def get_batches(self, batch_size, shuffle=True): """Get batch iterator Parameters ---------- batch_size : int size of one batch shuffle : bool whether to shuffle batches. Don't set to True when evaluating on dev or test set. Returns ------- tuple word_inputs, tag_inputs, arc_targets, rel_targets """ batches = [] for bkt_idx, bucket in enumerate(self._buckets): bucket_size = bucket.shape[1] n_tokens = bucket_size * self._bucket_lengths[bkt_idx] n_splits = min(max(n_tokens // batch_size, 1), bucket_size) range_func = np.random.permutation if shuffle else np.arange for bkt_batch in np.array_split(range_func(bucket_size), n_splits): batches.append((bkt_idx, bkt_batch)) if shuffle: np.random.shuffle(batches) for bkt_idx, bkt_batch in batches: word_inputs = self._buckets[bkt_idx][:, bkt_batch, 0] # word_id x sent_id tag_inputs = self._buckets[bkt_idx][:, bkt_batch, 1] arc_targets = self._buckets[bkt_idx][:, bkt_batch, 2] rel_targets = self._buckets[bkt_idx][:, bkt_batch, 3] yield word_inputs, tag_inputs, arc_targets, rel_targets
java
public RegistryConfig setParameters(Map<String, String> parameters) { if (this.parameters == null) { this.parameters = new ConcurrentHashMap<String, String>(); this.parameters.putAll(parameters); } return this; }
python
def search_knn(self, point, k, dist=None): """ Return the k nearest neighbors of point and their distances point must be an actual point, not a node. k is the number of results to return. The actual results can be less (if there aren't more nodes to return) or more in case of equal distances. dist is a distance function, expecting two points and returning a distance value. Distance values can be any comparable type. The result is an ordered list of (node, distance) tuples. """ if k < 1: raise ValueError("k must be greater than 0.") if dist is None: get_dist = lambda n: n.dist(point) else: get_dist = lambda n: dist(n.data, point) results = [] self._search_node(point, k, results, get_dist, itertools.count()) # We sort the final result by the distance in the tuple # (<KdNode>, distance). return [(node, -d) for d, _, node in sorted(results, reverse=True)]
java
public static void publish(MqttSettings settings, String subtopic, Object data) throws JsonProcessingException, MqttException { ObjectMapper objectMapper = new ObjectMapper(); publish(settings, subtopic, objectMapper.writeValueAsBytes(data), 1, false); }
python
def errors_as_text(self): """ only available to Django 1.7+ """ errors = [] errors.append(self.non_field_errors().as_text()) errors_data = self.errors.as_data() for key, value in errors_data.items(): field_label = self.fields[key].label err_descn = ''.join([force_text(e.message) for e in value]) error = "%s %s" % (field_label, err_descn) errors.append(error) return ','.join(errors)
python
def handle_branch(repo, **kwargs): """:return: Local.create()""" log.info('branch: %s %s' %(repo, kwargs)) if type(repo) in [unicode, str]: path = os.path.join(repo, kwargs.get('name', 'Unnamed')) desc = kwargs.get('desc') branch = Repo.new(path=path, desc=desc, bare=True) else: name = kwargs.get('name') path = kwargs.get('path') if path and not name: name = os.path.basename(path) desc = kwargs.get('desc') branch = repo.branch(name=name, desc=desc) return branch.serialize()
java
public static <K, V> Predicate<Map<K, V>> anyEntry( Predicate<? super Map.Entry<K, V>> p) { return forEntries(Predicates.<Map.Entry<K, V>>any(p)); }
python
def _calc_degreeminutes(decimal_degree): ''' Calculate degree, minute second from decimal degree ''' sign = compare(decimal_degree, 0) # Store whether the coordinate is negative or positive decimal_degree = abs(decimal_degree) degree = decimal_degree//1 # Truncate degree to be an integer decimal_minute = (decimal_degree - degree)*60. # Calculate the decimal minutes minute = decimal_minute//1 # Truncate minute to be an integer second = (decimal_minute - minute)*60. # Calculate the decimal seconds # Finally, re-impose the appropriate sign degree = degree*sign minute = minute*sign second = second*sign return (degree, minute, decimal_minute, second)
java
@Override public void discard() // PQ57408 Implemented this function { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "discard : " + this); // If this is a BeanO for a home (i.e. home field will be null) then // the discard should be ignored. Homes work much like singletons in // that a system exception from a home method does not cause the // home to be discarded. And, since the BeanO is not transitioned // to the 'destroyed' state, normal postInvoke will run. d661866 if (home == null) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "discard : Home beans are never discarded"); return; } discarded = true; if (state == DESTROYED) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "discard : Bean already destroyed"); return; } setState(DESTROYED); destroyHandleList(); // Release any JCDI creational contexts that may exist. F743-29174 this.releaseManagedObjectContext(); if (pmiBean != null) { pmiBean.discardCount(); // F743-27070 pmiBean.beanDestroyed(); } // If the number of allowed bean instances is limited, then the number // of created instances needs to be decremented when an instance is // discarded, and the next thread that may be waiting for an instance // must be notified. PK20648 if (ivNumberOfBeansLimited) { synchronized (beanPool) { --home.ivNumberBeansCreated; if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "discard: BeanPool(" + home.ivNumberBeansCreated + "/" + home.beanMetaData.ivMaxCreation + ")"); beanPool.notify(); } } if (isTraceOn && tc.isEntryEnabled()) { Tr.exit(tc, "discard"); } }
python
def get_seebeck_eff_mass(self, output='average', temp=300, doping_levels=False, Lambda=0.5): """ Seebeck effective mass calculated as explained in Ref. Gibbs, Z. M. et al., Effective mass and fermi surface complexity factor from ab initio band structure calculations. npj Computational Materials 3, 8 (2017). Args: output: 'average' returns the seebeck effective mass calculated using the average of the three diagonal components of the seebeck tensor. 'tensor' returns the seebeck effective mass respect to the three diagonal components of the seebeck tensor. doping_levels: False means that the seebeck effective mass is calculated for every value of the chemical potential True means that the seebeck effective mass is calculated for every value of the doping levels for both n and p types temp: temperature of calculated seebeck. Lambda: fitting parameter used to model the scattering (0.5 means constant relaxation time). Returns: a list of values for the seebeck effective mass w.r.t the chemical potential, if doping_levels is set at False; a dict with n an p keys that contain a list of values for the seebeck effective mass w.r.t the doping levels, if doping_levels is set at True; if 'tensor' is selected, each element of the lists is a list containing the three components of the seebeck effective mass. """ if doping_levels: sbk_mass = {} for dt in ('n','p'): conc = self.doping[dt] seebeck = self.get_seebeck(output=output, doping_levels=True)[dt][temp] sbk_mass[dt] = [] for i in range(len(conc)): if output == 'average': sbk_mass[dt].append( seebeck_eff_mass_from_seebeck_carr(abs(seebeck[i]), conc[i], temp, Lambda)) elif output == 'tensor': sbk_mass[dt].append([]) for j in range(3): sbk_mass[dt][-1].append( seebeck_eff_mass_from_seebeck_carr(abs(seebeck[i][j][j]), conc[i], temp, Lambda)) else: seebeck = self.get_seebeck(output=output, doping_levels=False)[temp] conc = self.get_carrier_concentration()[temp] sbk_mass = [] for i in range(len(conc)): if output == 'average': sbk_mass.append( seebeck_eff_mass_from_seebeck_carr(abs(seebeck[i]), conc[i], temp, Lambda)) elif output == 'tensor': sbk_mass.append([]) for j in range(3): sbk_mass[-1].append( seebeck_eff_mass_from_seebeck_carr(abs(seebeck[i][j][j]), conc[i], temp, Lambda)) return sbk_mass
java
public static void keepMainThread(Runnable toContinue) { mainThread = Thread.currentThread(); Thread thread = new Thread(toContinue, "LC Core - Main"); thread.start(); mainThreadLoop(); }
java
@SuppressWarnings("deprecation") public static @ColorInt int resolveColor(@ColorRes int color, Context context) { if (Build.VERSION.SDK_INT >= 23) { return context.getResources().getColor(color, context.getTheme()); } else { return context.getResources().getColor(color); } }
java
public static void convert(DMatrixRMaj src , DMatrixRBlock dst ) { if( src.numRows != dst.numRows || src.numCols != dst.numCols ) throw new IllegalArgumentException("Must be the same size."); for( int i = 0; i < dst.numRows; i += dst.blockLength ) { int blockHeight = Math.min( dst.blockLength , dst.numRows - i); for( int j = 0; j < dst.numCols; j += dst.blockLength ) { int blockWidth = Math.min( dst.blockLength , dst.numCols - j); int indexDst = i*dst.numCols + blockHeight*j; int indexSrcRow = i*dst.numCols + j; for( int k = 0; k < blockHeight; k++ ) { System.arraycopy(src.data,indexSrcRow,dst.data,indexDst,blockWidth); indexDst += blockWidth; indexSrcRow += dst.numCols; } } } }
java
public static ActionForm lookupActionForm(HttpServletRequest request, String formName) { ActionForm actionForm = null; actionForm = (ActionForm) request.getAttribute(formName); if (actionForm == null && request.getSession(false) != null) { HttpSession session = request.getSession(false); actionForm = (ActionForm) session.getAttribute(formName); } return actionForm; }
java
public static SecuritySettingsPlugin get() { final Application app = Application.get(); if (null == app) { throw new IllegalStateException( "No wicket application is bound to the current thread."); } final SecuritySettingsPlugin plugin = app.getMetaData(SECURITY_SETTINGS_PLUGIN_KEY); if (null == plugin) { final String pluginClassName = SecuritySettingsPlugin.class.getSimpleName(); throw new IllegalStateException("A " + pluginClassName + " has not been installed in this Wicket application. You have to call " + pluginClassName + ".install() in " + "your application init()."); } return plugin; }
java
private JPanel getJPanel2() { if (jPanel2 == null) { jPanel2 = new JPanel(); jPanel2.add(getBtnAccept(), null); jPanel2.add(getBtnDecline(), null); } return jPanel2; }
python
def find_funcs_called_with_kwargs(sourcecode, target_kwargs_name='kwargs'): r""" Finds functions that are called with the keyword `kwargs` variable CommandLine: python3 -m utool.util_inspect find_funcs_called_with_kwargs Example: >>> # ENABLE_DOCTEST >>> import utool as ut >>> sourcecode = ut.codeblock( ''' x, y = list(zip(*ut.ichunks(data, 2))) somecall(arg1, arg2, arg3=4, **kwargs) import sys sys.badcall(**kwargs) def foo(): bar(**kwargs) ut.holymoly(**kwargs) baz() def biz(**kwargs): foo2(**kwargs) ''') >>> child_funcnamess = ut.find_funcs_called_with_kwargs(sourcecode) >>> print('child_funcnamess = %r' % (child_funcnamess,)) >>> assert 'foo2' not in child_funcnamess, 'foo2 should not be found' >>> assert 'bar' in child_funcnamess, 'bar should be found' """ import ast sourcecode = 'from __future__ import print_function\n' + sourcecode pt = ast.parse(sourcecode) child_funcnamess = [] debug = False or VERYVERB_INSPECT if debug: print('\nInput:') print('target_kwargs_name = %r' % (target_kwargs_name,)) print('\nSource:') print(sourcecode) import astor print('\nParse:') print(astor.dump(pt)) class KwargParseVisitor(ast.NodeVisitor): """ TODO: understand ut.update_existing and dict update ie, know when kwargs is passed to these functions and then look assume the object that was updated is a dictionary and check wherever that is passed to kwargs as well. """ def visit_FunctionDef(self, node): if debug: print('\nVISIT FunctionDef node = %r' % (node,)) print('node.args.kwarg = %r' % (node.args.kwarg,)) if six.PY2: kwarg_name = node.args.kwarg else: if node.args.kwarg is None: kwarg_name = None else: kwarg_name = node.args.kwarg.arg #import utool as ut #ut.embed() if kwarg_name != target_kwargs_name: # target kwargs is still in scope ast.NodeVisitor.generic_visit(self, node) def visit_Call(self, node): if debug: print('\nVISIT Call node = %r' % (node,)) #print(ut.repr4(node.__dict__,)) if isinstance(node.func, ast.Attribute): try: funcname = node.func.value.id + '.' + node.func.attr except AttributeError: funcname = None elif isinstance(node.func, ast.Name): funcname = node.func.id else: raise NotImplementedError( 'do not know how to parse: node.func = %r' % (node.func,)) if six.PY2: kwargs = node.kwargs kwargs_name = None if kwargs is None else kwargs.id if funcname is not None and kwargs_name == target_kwargs_name: child_funcnamess.append(funcname) if debug: print('funcname = %r' % (funcname,)) print('kwargs_name = %r' % (kwargs_name,)) else: if node.keywords: for kwargs in node.keywords: if kwargs.arg is None: if hasattr(kwargs.value, 'id'): kwargs_name = kwargs.value.id if funcname is not None and kwargs_name == target_kwargs_name: child_funcnamess.append(funcname) if debug: print('funcname = %r' % (funcname,)) print('kwargs_name = %r' % (kwargs_name,)) ast.NodeVisitor.generic_visit(self, node) try: KwargParseVisitor().visit(pt) except Exception: raise pass #import utool as ut #if ut.SUPER_STRICT: # raise return child_funcnamess
python
def do_fit(self, event): """ Re-fit the window to the size of the content. """ #self.grid.ShowScrollbars(wx.SHOW_SB_NEVER, wx.SHOW_SB_NEVER) if event: event.Skip() self.main_sizer.Fit(self) disp_size = wx.GetDisplaySize() actual_size = self.GetSize() rows = self.grid.GetNumberRows() # if there isn't enough room to display new content # resize the frame if disp_size[1] - 75 < actual_size[1]: self.SetSize((actual_size[0], disp_size[1] * .95)) self.Centre()
python
async def get_request_token(self, loop=None, **params): """Get a request_token and request_token_secret from OAuth1 provider.""" params = dict(self.params, **params) data = await self.request('GET', self.request_token_url, params=params, loop=loop) self.oauth_token = data.get('oauth_token') self.oauth_token_secret = data.get('oauth_token_secret') return self.oauth_token, self.oauth_token_secret, data
java
public NodeData[] getAggregatedNodeStates(NodeData nodeState) throws RepositoryException { if (nodeState.getPrimaryTypeName().equals(nodeTypeName)) { List<NodeData> nodeStates = new ArrayList<NodeData>(); for (int i = 0; i < nodeIncludes.length; i++) { nodeStates.addAll(Arrays.asList(nodeIncludes[i].resolve(nodeState))); } if (nodeStates.size() > 0) { return (NodeData[])nodeStates.toArray(new NodeData[nodeStates.size()]); } } return null; }
python
def extract_domain(host): """ Domain name extractor. Turns host names into domain names, ported from pwdhash javascript code""" host = re.sub('https?://', '', host) host = re.match('([^/]+)', host).groups()[0] domain = '.'.join(host.split('.')[-2:]) if domain in _domains: domain = '.'.join(host.split('.')[-3:]) return domain
python
async def identify(self): """Sends the IDENTIFY packet.""" payload = { 'op': self.IDENTIFY, 'd': { 'token': self.token, 'properties': { '$os': sys.platform, '$browser': 'discord.py', '$device': 'discord.py', '$referrer': '', '$referring_domain': '' }, 'compress': True, 'large_threshold': 250, 'v': 3 } } if not self._connection.is_bot: payload['d']['synced_guilds'] = [] if self.shard_id is not None and self.shard_count is not None: payload['d']['shard'] = [self.shard_id, self.shard_count] state = self._connection if state._activity is not None or state._status is not None: payload['d']['presence'] = { 'status': state._status, 'game': state._activity, 'since': 0, 'afk': False } await self.send_as_json(payload) log.info('Shard ID %s has sent the IDENTIFY payload.', self.shard_id)
java
public ArrayList<OvhStreamRule> serviceName_output_graylog_stream_streamId_rule_ruleId_GET(String serviceName, String streamId, String ruleId) throws IOException { String qPath = "/dbaas/logs/{serviceName}/output/graylog/stream/{streamId}/rule/{ruleId}"; StringBuilder sb = path(qPath, serviceName, streamId, ruleId); String resp = exec(qPath, "GET", sb.toString(), null); return convertTo(resp, t6); }
python
def from_authorized_user_info(cls, info, scopes=None): """Creates a Credentials instance from parsed authorized user info. Args: info (Mapping[str, str]): The authorized user info in Google format. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the info is not in the expected format. """ keys_needed = set(('refresh_token', 'client_id', 'client_secret')) missing = keys_needed.difference(six.iterkeys(info)) if missing: raise ValueError( 'Authorized user info was not in the expected format, missing ' 'fields {}.'.format(', '.join(missing))) return Credentials( None, # No access token, must be refreshed. refresh_token=info['refresh_token'], token_uri=_GOOGLE_OAUTH2_TOKEN_ENDPOINT, scopes=scopes, client_id=info['client_id'], client_secret=info['client_secret'])
java
public static HttpMockServer start(ConfigReader configReader, NetworkType networkType) { return HttpMockServer.startMockApiServer(configReader, networkType); }
java
public static <T extends Number> double divide(final T dividend, final T divisor) { if (JudgeUtils.hasNull(dividend, divisor)) { return 0d; } if (JudgeUtils.equals(dividend, divisor) || divisor.doubleValue() == 0) { return 1d; } return dividend.doubleValue() / divisor.doubleValue(); }
java
@Override public final void setHasName(final SeService pHasName) { this.hasName = pHasName; if (this.itsId == null) { this.itsId = new IdI18nSeService(); } this.itsId.setHasName(this.hasName); }
python
def matrix_multiplication_blockwise(self, matrix, blocksize): """ http://en.wikipedia.org/wiki/Block_matrix#Block_matrix_multiplication """ #Create the blockwise version of self and matrix selfBlockwise = self.matrix_to_blockmatrix(blocksize) matrixBlockwise = matrix.matrix_to_blockmatrix(blocksize) return (selfBlockwise * matrixBlockwise).flatten()
python
def _makeResult(self): """Return a Result that doesn't print dots. Nose's ResultProxy will wrap it, and other plugins can still print stuff---but without smashing into our progress bar, care of ProgressivePlugin's stderr/out wrapping. """ return ProgressiveResult(self._cwd, self._totalTests, self.stream, config=self.config)
java
@Override protected Artifact getArtifact(final ArtifactItem item) throws MojoExecutionException { assert item != null; Artifact artifact = null; if (item.getVersion() != null) { // if version is set in ArtifactItem, it will always override the one in project dependency artifact = createArtifact(item); } else { // Return the artifact from the project dependency if it is available and the mojo // should have requiresDependencyResolution=ResolutionScope.COMPILE_PLUS_RUNTIME set artifact = resolveFromProjectDependencies(item); if (artifact != null) { // in case it is not resolved yet if (!artifact.isResolved()) { item.setVersion(artifact.getVersion()); artifact = createArtifact(item); } } else if (resolveFromProjectDepMgmt(item) != null) { // if item has no version set, try to get it from the project dependencyManagement section // get version from dependencyManagement item.setVersion(resolveFromProjectDepMgmt(item).getVersion()); artifact = createArtifact(item); } else { throw new MojoExecutionException( "Unable to find artifact version of " + item.getGroupId() + ":" + item.getArtifactId() + " in either project dependencies or in project dependencyManagement."); } } return artifact; }
java
private void processRegexRule(final String parsed_value) { if (rule.getCompiledRegex() == null) { throw new IllegalArgumentException("Regex was null for rule: " + rule); } final Matcher matcher = rule.getCompiledRegex().matcher(parsed_value); if (matcher.find()) { // The first group is always the full string, so we need to increment // by one to fetch the proper group if (matcher.groupCount() >= rule.getRegexGroupIdx() + 1) { final String extracted = matcher.group(rule.getRegexGroupIdx() + 1); if (extracted == null || extracted.isEmpty()) { // can't use empty values as a branch/leaf name testMessage("Extracted value for rule " + rule + " was null or empty"); } else { // found a branch or leaf! setCurrentName(parsed_value, extracted); } } else { // the group index was out of range testMessage("Regex group index [" + rule.getRegexGroupIdx() + "] for rule " + rule + " was out of bounds [" + matcher.groupCount() + "]"); } } }
python
def backward_word(self, e): # (M-b) u"""Move back to the start of the current or previous word. Words are composed of letters and digits.""" self.l_buffer.backward_word(self.argument_reset) self.finalize()
python
def __read(usr_path, file_type): """ Determine what path needs to be taken to read in file(s) :param str usr_path: Path (optional) :param str file_type: File type to read :return none: """ # is there a file path specified ? if usr_path: # Is this a URL? Download the file and return the local path is_url = re.match(re_url, usr_path) if is_url: # The usr_path will now be a local path to a single file. It will trigger the "elif" statement below usr_path = download_from_url(usr_path, get_download_path()) # Directory path if os.path.isdir(usr_path): __read_directory(usr_path, file_type) # File path elif os.path.isfile(usr_path): __read_file(usr_path, file_type) # Invalid path given else: print("Error: Path given is invalid") # no path specified. ask if they want to load dir or file else: choice = "" count = 3 while not choice: try: print("Choose a read option:\n1. One file\n2. Multi-file select\n3. Directory") choice = input("Option: ") print("\n") # now use the given file type and prompt answer to call _read_file or _read_dir if choice in ["1", "2", "3"]: # open directory picker if choice == "3": __read_directory(usr_path, file_type) else: # open a file picker __read_file(usr_path, file_type) break else: count -= 1 if count == 0: print("Error: Too many failed attempts") break except Exception as e: print("Error: Invalid input: {}".format(e)) return
python
def _fill_cache(self): """Fill the cache from the `astropy.table.Table`""" for irow in range(len(self._table)): file_handle = self._make_file_handle(irow) self._cache[file_handle.path] = file_handle
python
def _put_metadata(self, fs_remote, ds): """Store metadata on a pyfs remote""" from six import text_type from fs.errors import ResourceNotFoundError identity = ds.identity d = identity.dict d['summary'] = ds.config.metadata.about.summary d['title'] = ds.config.metadata.about.title meta_stack = self._meta_infos(ds) def do_metadata(): for path, ident in meta_stack: fs_remote.setcontents(path, ident) try: # Assume the directories already exist do_metadata() except ResourceNotFoundError: # Nope, make them and try again. parts = ['vid', 'id', 'vname', 'name'] for p in parts: dirname = os.path.join('_meta', p) fs_remote.makedir(dirname, allow_recreate=True, recursive=True) do_metadata()
python
def execute_hook(self, event_name): """Execute shell commands related to current event_name""" hook = self.settings.hooks.get_string('{!s}'.format(event_name)) if hook is not None and hook != "": hook = hook.split() try: subprocess.Popen(hook) except OSError as oserr: if oserr.errno == 8: log.error("Hook execution failed! Check shebang at first line of %s!", hook) log.debug(traceback.format_exc()) else: log.error(str(oserr)) except Exception as e: log.error("hook execution failed! %s", e) log.debug(traceback.format_exc()) else: log.debug("hook on event %s has been executed", event_name)
python
def convert_time(self, time): """ A helper function to convert seconds into hh:mm:ss for better readability. time: A string representing time in seconds. """ time_string = str(datetime.timedelta(seconds=int(time))) if time_string.split(':')[0] == '0': time_string = time_string.partition(':')[2] return time_string
java
@Override public <T> OperationFuture<Boolean> prepend(long cas, String key, T val, Transcoder<T> tc) { return asyncCat(ConcatenationType.prepend, cas, key, val, tc); }
python
def _make_sync_method(name): """Helper to synthesize a synchronous method from an async method name. Used by the @add_sync_methods class decorator below. Args: name: The name of the synchronous method. Returns: A method (with first argument 'self') that retrieves and calls self.<name>, passing its own arguments, expects it to return a Future, and then waits for and returns that Future's result. """ def sync_wrapper(self, *args, **kwds): method = getattr(self, name) future = method(*args, **kwds) return future.get_result() return sync_wrapper
python
def create_parameter_map(self): """ Creates a parameter map which takes a tuple of the exchange 'from' and exchange 'to' codes and returns the parameter name for that exchange """ names = self.modelInstance.names db = self.modelInstance.database['items'] parameter_map = {} def get_names_index(my_thing): return[i for i, x in enumerate(names) if x == my_thing][0] for k, this_item in db.items(): if this_item['type'] == 'process': production_id = [x['input'] for x in this_item['exchanges'] if x['type'] == 'production'][0] input_ids = [x['input'] for x in this_item['exchanges'] if x['type'] == 'technosphere'] production_index = get_names_index(db[production_id]['name']) input_indexes = [get_names_index(db[x]['name']) for x in input_ids] parameter_ids = ['n_p_{}_{}'.format(x, production_index) for x in input_indexes] parameter_map_items = {(input_ids[n], k): parameter_ids[n] for n, x in enumerate(input_ids)} #check = [self.modelInstance.params[x]['description'] for x in parameter_ids] #print(check) #print(parameter_map_items) parameter_map.update(parameter_map_items) self.parameter_map = parameter_map
python
def is_legal_sequence(self, packet: DataPacket) -> bool: """ Check if the Sequence number of the DataPacket is legal. For more information see page 17 of http://tsp.esta.org/tsp/documents/docs/E1-31-2016.pdf. :param packet: the packet to check :return: true if the sequence is legal. False if the sequence number is bad """ # if the sequence of the packet is smaller than the last received sequence, return false # therefore calculate the difference between the two values: try: # try, because self.lastSequence might not been initialized diff = packet.sequence - self.lastSequence[packet.universe] # if diff is between ]-20,0], return False for a bad packet sequence if 0 >= diff > -20: return False except: pass # if the sequence is good, return True and refresh the list with the new value self.lastSequence[packet.universe] = packet.sequence return True
python
def get_bounding_box(self, maxdist): """ Bounding box containing all the point sources, enlarged by the maximum distance. """ return utils.get_bounding_box([ps.location for ps in self], maxdist)
java
private static void parseMaxErrors(String current, Pipeline pipeline) throws ParseException { try { String errorsStr = current.replaceFirst("max_errors: ", "").trim(); int errors = parseInt(errorsStr); pipeline.setMaxErrors(errors); LOG.info("+-using {} max_errors", errors); } catch (NumberFormatException e) { throw new ParseException( "could not parse number of max_errors from " + current, -1); } }
java
private void processChildEvent(WatchedEvent event) throws Exception { HashMap<String, JSONObject> cacheCopy = new HashMap<String, JSONObject>(m_publicCache.get()); ByteArrayCallback cb = new ByteArrayCallback(); m_zk.getData(event.getPath(), m_childWatch, cb, null); try { byte payload[] = cb.get(); JSONObject jsObj = new JSONObject(new String(payload, "UTF-8")); cacheCopy.put(cb.getPath(), jsObj); } catch (KeeperException.NoNodeException e) { cacheCopy.remove(event.getPath()); } m_publicCache.set(ImmutableMap.copyOf(cacheCopy)); if (m_cb != null) { m_cb.run(m_publicCache.get()); } }
python
def __str_cleanup(line): """ Remove the unnecessary characters in the line that we don't want :param str line: :return str: """ if '#' in line: line = line.replace("#", "") line = line.strip() if '-----------' in line: line = '' return line
python
def email(self): """ User email(s) """ try: return self.parser.get("general", "email") except NoSectionError as error: log.debug(error) raise ConfigFileError( "No general section found in the config file.") except NoOptionError as error: log.debug(error) raise ConfigFileError( "No email address defined in the config file.")
java
private void computePartialChunkCrc(long blkoff, long ckoff, int bytesPerChecksum, DataChecksum checksum) throws IOException { // find offset of the beginning of partial chunk. // int sizePartialChunk = (int) (blkoff % bytesPerChecksum); int checksumSize = checksum.getChecksumSize(); blkoff = blkoff - sizePartialChunk; LOG.info("computePartialChunkCrc sizePartialChunk " + sizePartialChunk + " block " + block + " offset in block " + blkoff + " offset in metafile " + ckoff); // create an input stream from the block file // and read in partial crc chunk into temporary buffer // byte[] buf = new byte[sizePartialChunk]; byte[] crcbuf = new byte[checksumSize]; FileInputStream dataIn = null, metaIn = null; try { DatanodeBlockInfo info = datanode.data.getDatanodeBlockInfo(namespaceId, block); if (info == null) { throw new IOException("Block " + block + " does not exist in volumeMap."); } File blockFile = info.getDataFileToRead(); if (blockFile == null) { blockFile = info.getBlockDataFile().getTmpFile(namespaceId, block); } RandomAccessFile blockInFile = new RandomAccessFile(blockFile, "r"); if (blkoff > 0) { blockInFile.seek(blkoff); } File metaFile = getMetaFile(blockFile, block); RandomAccessFile metaInFile = new RandomAccessFile(metaFile, "r"); if (ckoff > 0) { metaInFile.seek(ckoff); } dataIn = new FileInputStream(blockInFile.getFD()); metaIn = new FileInputStream(metaInFile.getFD()); IOUtils.readFully(dataIn, buf, 0, sizePartialChunk); // open meta file and read in crc value computer earlier IOUtils.readFully(metaIn, crcbuf, 0, crcbuf.length); } finally { if (dataIn != null) { dataIn.close(); } if (metaIn != null) { metaIn.close(); } } // compute crc of partial chunk from data read in the block file. Checksum partialCrc = new NativeCrc32(); partialCrc.update(buf, 0, sizePartialChunk); LOG.info("Read in partial CRC chunk from disk for block " + block); // paranoia! verify that the pre-computed crc matches what we // recalculated just now if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) { String msg = "Partial CRC " + partialCrc.getValue() + " does not match value computed the " + " last time file was closed " + FSInputChecker.checksum2long(crcbuf); throw new IOException(msg); } // LOG.debug("Partial CRC matches 0x" + // Long.toHexString(partialCrc.getValue())); partialCrcInt = (int) partialCrc.getValue(); }
java
@Override public CreateStageResult createStage(CreateStageRequest request) { request = beforeClientExecution(request); return executeCreateStage(request); }