language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
private void evaluateConstraints(DoubleSolution solution) { double[] constraint = new double[this.getNumberOfConstraints()]; double x1,x2,x3,x4,x5,x6,x7; x1 = solution.getVariableValue(0); x2 = solution.getVariableValue(1); x3 = solution.getVariableValue(2); x4 = solution.getVariableValue(3); x5 = solution.getVariableValue(4); x6 = solution.getVariableValue(5); x7 = solution.getVariableValue(6); constraint[0] = -((1.0/(x1*x2*x2*x3)) -(1.0/27.0)); constraint[1] = -((1.0/(x1*x2*x2*x3*x3))-(1.0/397.5)); constraint[2] = -((x4*x4*x4)/(x2*x3*x3*x6*x6*x6*x6)-(1.0/1.93)); constraint[3] = -((x5*x5*x5)/(x2*x3*x7*x7*x7*x7)- (1.0/1.93)); constraint[4] = -(x2*x3 - 40.0); constraint[5] = -((x1/x2) - 12.0); constraint[6] = -(5.0 - (x1/x2)); constraint[7] = -(1.9 - x4 + 1.5*x6); constraint[8] = -(1.9 - x5 + 1.1*x7); double aux = 745.0 * x4 / (x2 * x3); double f2 = java.lang.Math.sqrt((aux*aux)+1.69e7) / (0.1*x6*x6*x6); constraint[9] = -(f2 - 1300); double a = 745.0*x5/(x2*x3); double b = 1.575e8; constraint[10] = -(java.lang.Math.sqrt(a*a+b)/(0.1*x7*x7*x7)-1100.0); double overallConstraintViolation = 0.0; int violatedConstraints = 0; for (int i = 0; i < getNumberOfConstraints(); i++) { if (constraint[i]<0.0){ overallConstraintViolation+=constraint[i]; violatedConstraints++; } } overallConstraintViolationDegree.setAttribute(solution, overallConstraintViolation); numberOfViolatedConstraints.setAttribute(solution, violatedConstraints); }
python
def add(self, rule, method, target, name=None, static=False): ''' Add a new route or replace the target for an existing route. ''' if static: depr("Use a backslash to escape ':' in routes.") # 0.9 rule = rule.replace(':','\\:') if rule in self.routes: self.routes[rule][method.upper()] = target else: self.routes[rule] = {method.upper(): target} self.rules.append(rule) if self.static or self.dynamic: # Clear precompiler cache. self.static, self.dynamic = {}, {} if name: self.named[name] = (rule, None)
java
private AbstractNode findFirstNodeByName(final SecurityContext securityContext, final HttpServletRequest request, final String path) throws FrameworkException { final String name = PathHelper.getName(path); if (!name.isEmpty()) { logger.debug("Requested name: {}", name); final Query query = StructrApp.getInstance(securityContext).nodeQuery(); final ConfigurationProvider config = StructrApp.getConfiguration(); if (!possiblePropertyNamesForEntityResolving.isEmpty()) { query.and(); resolvePossiblePropertyNamesForObjectResolution(config, query, name); query.parent(); } final List<AbstractNode> results = Iterables.toList(query.getResultStream()); logger.debug("{} results", results.size()); request.setAttribute(POSSIBLE_ENTRY_POINTS_KEY, results); return (results.size() > 0 ? (AbstractNode) results.get(0) : null); } return null; }
java
public synchronized void createWorkspace(String workspaceName) throws RepositoryException { SecurityHelper.validateSecurityPermission(JCRRuntimePermissions.MANAGE_REPOSITORY_PERMISSION); final WorkspaceContainer wsContainer = repositoryContainer.getWorkspaceContainer(workspaceName); if (wsContainer == null) { throw new RepositoryException("Workspace " + workspaceName + " is not configured. Use RepositoryImpl.configWorkspace() method"); } final WorkspaceInitializer workspaceInitializer = repositoryContainer.getWorkspaceContainer(workspaceName).getWorkspaceInitializer(); SystemParametersPersistenceConfigurator sppc = (SystemParametersPersistenceConfigurator)repositoryContainer .getComponentInstanceOfType(SystemParametersPersistenceConfigurator.class); if (sppc != null) { WorkspaceEntry workspaceEntry = repositoryContainer.getWorkspaceEntry(workspaceName); repositoryContainer.setInitializerAndValidateOverriddenParameters(workspaceEntry, workspaceInitializer); } if (isWorkspaceInitialized(workspaceName)) { LOG.warn("Workspace '" + workspaceName + "' is presumably initialized. config canceled"); return; } try { SecurityHelper.doPrivilegedExceptionAction(new PrivilegedExceptionAction<Void>() { public Void run() throws Exception { workspaceInitializer.initWorkspace(); wsContainer.start(); return null; } }); } catch (PrivilegedActionException pae) { Throwable cause = pae.getCause(); if (cause instanceof RepositoryException) { throw (RepositoryException)cause; } else if (cause instanceof RuntimeException) { throw (RuntimeException)cause; } else { throw new RuntimeException(cause); } } LOG.info("Workspace " + workspaceName + "@" + this.name + " is initialized"); }
java
public Object getMCWFromMctoMCWMap(Object mc) { Object mcw = null; mcToMCWMapRead.lock(); try { mcw = mcToMCWMap.get(mc); } finally { mcToMCWMapRead.unlock(); } return mcw; }
python
def resolve_one(self, correlation_id, key): """ Resolves a single connection parameters by its key. :param correlation_id: (optional) transaction id to trace execution through call chain. :param key: a key to uniquely identify the connection. :return: a resolved connection. """ connection = None for item in self._items: if item.key == key and item.connection != None: connection = item.connection break return connection
java
public static long extractMaximumFramesize(Configuration configuration) { String maxFrameSizeStr = configuration.getString(AkkaOptions.FRAMESIZE); String akkaConfigStr = String.format(SIMPLE_AKKA_CONFIG_TEMPLATE, maxFrameSizeStr); Config akkaConfig = ConfigFactory.parseString(akkaConfigStr); return akkaConfig.getBytes(MAXIMUM_FRAME_SIZE_PATH); }
python
def _load_all(self): ''' Load all of them ''' with self._lock: for name in self.file_mapping: if name in self.loaded_files or name in self.missing_modules: continue self._load_module(name) self.loaded = True
python
def print_format_output(dataframe): """Prints output of given dataframe to fit into terminal. Returns: table (pd.DataFrame): Final outputted dataframe. dropped_cols (list): Columns dropped due to terminal size. empty_cols (list): Empty columns (dropped on default). """ print_df = pd.DataFrame() dropped_cols = [] empty_cols = [] # column display priority is based on the info_keys passed in for i, col in enumerate(dataframe): if dataframe[col].isnull().all(): # Don't add col to print_df if is fully empty empty_cols += [col] continue print_df[col] = dataframe[col] test_table = tabulate(print_df, headers="keys", tablefmt="psql") if str(test_table).index("\n") > TERM_WIDTH: # Drop all columns beyond terminal width print_df.drop(col, axis=1, inplace=True) dropped_cols += list(dataframe.columns)[i:] break table = tabulate( print_df, headers="keys", tablefmt="psql", showindex="never") print(table) if dropped_cols: print("Dropped columns:", dropped_cols) print("Please increase your terminal size to view remaining columns.") if empty_cols: print("Empty columns:", empty_cols) return table, dropped_cols, empty_cols
java
public static String getDBQualifiedName(String clusterName, String dbName) { return String.format("%s@%s", dbName.toLowerCase(), clusterName); }
java
public void process(RootDoc root) throws IOException { // out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); out.println("<XMI xmi.version=\"1.1\" >"); out.println(" <XMI.header>"); out.println(" <XMI.metamodel href=\"/vol/acfiles7/egonw/SourceForge/CDK/cdk/reports/javadoc/test2.xmi\" version=\"1.1\" name=\"UML\" />"); out.println(" <XMI.model href=\"/vol/acfiles7/egonw/SourceForge/CDK/cdk/reports/javadoc/test2.xmi\" version=\"1\" name=\"/vol/acfiles7/egonw/SourceForge/CDK/cdk/reports/javadoc/test2.xmi\" />"); out.println(" </XMI.header>"); out.println(" <XMI.content>"); out.println(" <docsettings viewid=\"-1\" documentation=\"\" uniqueid=\"1\" />"); out.println(" <umlobjects>"); generateUMLClass(root.specifiedPackages()); out.println(" </umlobjects>"); out.println(" <diagrams/>"); out.println(" <listview>"); out.println(" <listitem open=\"1\" type=\"800\" id=\"-1\" label=\"Views\" >"); generateLogicalView(root.specifiedPackages()); out.println(" <listitem open=\"1\" type=\"802\" id=\"-1\" label=\"Use Case View\" />"); out.println(" </listitem>"); out.println(" </listview>"); out.println(" </XMI.content>"); out.println("</XMI>"); }
python
def uniq(seq): """ Return a copy of seq without duplicates. """ seen = set() return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
java
static synchronized <T extends Model> void putEntity(T entity) { if (entity.id != null) { sCache.put(getEntityIdentifier(entity.getClass(), entity.id), entity); } }
python
def find_obj(self, env, modname, classname, name, type_name, searchmode=0): """Find a Chapel object for "name", possibly with module or class/record name. Returns a list of (name, object entry) tuples. :arg int searchmode: If 1, search more specific names first. Otherwise, search built-ins first and then get more specific. """ if name[-2:] == '()': name = name[:-2] if not name: return [] objects = self.data['objects'] matches = [] newname = None if searchmode == 1: if type_name is None: objtypes = list(self.object_types) else: objtypes = self.objtypes_for_role(type_name) if objtypes is not None: if modname and classname: fullname = modname + '.' + classname + '.' + name if (fullname in objects and objects[fullname][1] in objtypes): newname = fullname if not newname: if (modname and modname + '.' + name in objects and objects[modname + '.' + name][1] in objtypes): newname = modname + '.' + name elif name in objects and objects[name][1] in objtypes: newname = name else: # "Fuzzy" search mode. searchname = '.' + name matches = [(oname, objects[oname]) for oname in objects if oname.endswith(searchname) and objects[oname][1] in objtypes] else: # NOTE: Search for exact match, object type is not considered. if name in objects: newname = name elif type_name == 'mod': # Only exact matches allowed for modules. return [] elif classname and classname + '.' + name in objects: newname = classname + '.' + name elif modname and modname + '.' + name in objects: newname = modname + '.' + name elif (modname and classname and modname + '.' + classname + '.' + name in objects): newname = modname + '.' + classname + '.' + name if newname is not None: matches.append((newname, objects[newname])) return matches
java
public EClass getIfcBuilding() { if (ifcBuildingEClass == null) { ifcBuildingEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(54); } return ifcBuildingEClass; }
java
protected PType typeCheckLet(INode node, LinkedList<PDefinition> localDefs, INode body, TypeCheckInfo question) throws AnalysisException { // Each local definition is in scope for later local definitions... Environment local = question.env; for (PDefinition d : localDefs) { if (d instanceof AExplicitFunctionDefinition) { // Functions' names are in scope in their bodies, whereas // simple variable declarations aren't local = new FlatCheckedEnvironment(question.assistantFactory, d, local, question.scope); // cumulative question.assistantFactory.createPDefinitionAssistant().implicitDefinitions(d, local); question.assistantFactory.createPDefinitionAssistant().typeResolve(d, THIS, new TypeCheckInfo(question.assistantFactory, local, question.scope, question.qualifiers)); if (question.env.isVDMPP()) { SClassDefinition cdef = question.env.findClassDefinition(); // question.assistantFactory.createPDefinitionAssistant().setClassDefinition(d, cdef); d.setClassDefinition(cdef); d.setAccess(question.assistantFactory.createPAccessSpecifierAssistant().getStatic(d, true)); } d.apply(THIS, new TypeCheckInfo(question.assistantFactory, local, question.scope, question.qualifiers)); } else { question.assistantFactory.createPDefinitionAssistant().implicitDefinitions(d, local); question.assistantFactory.createPDefinitionAssistant().typeResolve(d, THIS, new TypeCheckInfo(question.assistantFactory, local, question.scope, question.qualifiers)); d.apply(THIS, new TypeCheckInfo(question.assistantFactory, local, question.scope)); local = new FlatCheckedEnvironment(question.assistantFactory, d, local, question.scope); // cumulative } } PType r = body.apply(THIS, new TypeCheckInfo(question.assistantFactory, local, question.scope, null, question.constraint, null)); local.unusedCheck(question.env); return r; }
java
private CompositeExpression parseComparisonRange() { Token token = tokens.lookahead(); CompositeExpression expr; switch (token.type) { case EQUAL: tokens.consume(); expr = eq(parseVersion()); break; case NOT_EQUAL: tokens.consume(); expr = neq(parseVersion()); break; case GREATER: tokens.consume(); expr = gt(parseVersion()); break; case GREATER_EQUAL: tokens.consume(); expr = gte(parseVersion()); break; case LESS: tokens.consume(); expr = lt(parseVersion()); break; case LESS_EQUAL: tokens.consume(); expr = lte(parseVersion()); break; default: expr = eq(parseVersion()); } return expr; }
java
private V get(K key, int hash) { if (hash == this.hash && key.equals(this.key)) { return this.value; } int bucket = bucket(hash); int bucketMask = 1 << bucket; return (mask & bucketMask) != 0 ? children[index(bucketMask)].get(key, shift(hash)) : null; }
python
def user_filter(config, message, fasnick=None, *args, **kw): """ A particular user Use this rule to include messages that are associated with a specific user. """ fasnick = kw.get('fasnick', fasnick) if fasnick: return fasnick in fmn.rules.utils.msg2usernames(message, **config)
python
def section(request): """ Determines the current site section from resolved view pattern and adds it to context['section']. Section defaults to the first specified section. """ # If SECTIONS setting is not specified, don't do anything. try: sections = settings.SECTIONS except AttributeError: return {} # Default return is first section. section = sections[0]['name'] try: pattern_name = resolve_to_name(request.path_info) except Resolver404: pattern_name = None if pattern_name: for option in settings.SECTIONS: if pattern_name in option['matching_pattern_names']: section = option['name'] return {'section': section}
python
def transpose(surf, **kwargs): """ Transposes the input surface(s) by swapping u and v parametric directions. Keyword Arguments: * ``inplace``: if False, operation applied to a copy of the object. *Default: False* :param surf: input surface(s) :type surf: abstract.Surface, multi.SurfaceContainer :return: transposed surface(s) """ if surf.pdimension != 2: raise GeomdlException("Can only transpose surfaces") # Keyword arguments inplace = kwargs.get('inplace', False) if not inplace: geom = copy.deepcopy(surf) else: geom = surf for g in geom: # Get existing data degree_u_new = g.degree_v degree_v_new = g.degree_u kv_u_new = g.knotvector_v kv_v_new = g.knotvector_u ctrlpts2d_old = g.ctrlpts2d # Find new control points ctrlpts2d_new = [] for v in range(0, g.ctrlpts_size_v): ctrlpts_u = [] for u in range(0, g.ctrlpts_size_u): temp = ctrlpts2d_old[u][v] ctrlpts_u.append(temp) ctrlpts2d_new.append(ctrlpts_u) g.degree_u = degree_u_new g.degree_v = degree_v_new g.ctrlpts2d = ctrlpts2d_new g.knotvector_u = kv_u_new g.knotvector_v = kv_v_new return geom
java
private static List<String> tokeniseOnLowercaseToUppercase( String name ) { List<String> splits = new ArrayList<>(); // the following stores data in pairs (start, finish, start, ...) ArrayList<Integer> candidateBoundaries = new ArrayList<>(); // now process the array looking for boundaries for ( Integer index = 0; index < name.length(); index++ ) { if ( index == 0 ) { // the first character is always a boundary candidateBoundaries.add( index ); } else { if ( Character.isUpperCase( name.codePointAt( index ) ) && Character.isLowerCase( name.codePointAt( index - 1 ) ) ) { candidateBoundaries.add( index - 1 ); candidateBoundaries.add( index ); } } // now check whether this is the terminal character. // and record it to give us the final boundary if ( index == name.length() - 1 ) { candidateBoundaries.add( index ); } } if ( candidateBoundaries.size() % 2 == 1 ) { LOGGER.warn( "Odd number of boundaries found for: \"{}\"", name ); } for ( int i = 0; i < candidateBoundaries.size(); i += 2 ) { splits.add( name.substring( candidateBoundaries.get( i ), candidateBoundaries.get( i + 1 ) + 1 ) ); } return splits; }
java
private static int put(byte[] block, int offset, byte[] b, int off, int len) { System.arraycopy(b, off, block, offset, len); return len; }
java
@BetaApi public final Operation deleteSignedUrlKeyBackendService(String backendService, String keyName) { DeleteSignedUrlKeyBackendServiceHttpRequest request = DeleteSignedUrlKeyBackendServiceHttpRequest.newBuilder() .setBackendService(backendService) .setKeyName(keyName) .build(); return deleteSignedUrlKeyBackendService(request); }
java
public List<Object> fromThriftRow(Class<?> clazz, EntityMetadata m, List<String> relationNames, boolean isWrapReq, ConsistencyLevel consistencyLevel, Object... rowIds) throws Exception { List<Object> entities = new ArrayList<Object>(); if (rowIds != null) { for (Object rowKey : rowIds) { Object e = fromThriftRow(clazz, m, rowKey, relationNames, isWrapReq, consistencyLevel); if (e != null) { entities.add(e); } } } return entities; }
java
@Override public List<Object> collectMilestoningOverlaps() { List<Object> duplicateData = FastList.newList(); for (SemiUniqueEntry obj : nonDatedTable) { collectMilestoningOverlaps(duplicateData, obj); } return duplicateData; }
java
public static QrcodeTicket qrcodeCreateFinal(String access_token,int scene_id){ String json = String.format("{\"action_name\": \"QR_LIMIT_SCENE\", \"action_info\": {\"scene\": {\"scene_id\":%d}}}", scene_id); return qrcodeCreate(access_token,json); }
java
public static PartialMerkleTree buildFromLeaves(NetworkParameters params, byte[] includeBits, List<Sha256Hash> allLeafHashes) { // Calculate height of the tree. int height = 0; while (getTreeWidth(allLeafHashes.size(), height) > 1) height++; List<Boolean> bitList = new ArrayList<>(); List<Sha256Hash> hashes = new ArrayList<>(); traverseAndBuild(height, 0, allLeafHashes, includeBits, bitList, hashes); byte[] bits = new byte[(int)Math.ceil(bitList.size() / 8.0)]; for (int i = 0; i < bitList.size(); i++) if (bitList.get(i)) Utils.setBitLE(bits, i); return new PartialMerkleTree(params, bits, hashes, allLeafHashes.size()); }
java
@Override public void removeByCommerceCountryId(long commerceCountryId) { for (CommerceAddress commerceAddress : findByCommerceCountryId( commerceCountryId, QueryUtil.ALL_POS, QueryUtil.ALL_POS, null)) { remove(commerceAddress); } }
java
public DB2ESImportBuilder addFieldValue(String fieldName, Object value){ addFieldValue( fieldValues, fieldName, value); return this; }
java
private static Bitmap loadBitmapOptimized(ImageSource source, int limit) throws ImageLoadException { int scale = getScaleFactor(source.getImageMetadata(), limit); return loadBitmap(source, scale); }
java
protected String resolveNamespace(String namespace, String name) { return resolveNamespace(namespace, name, null); }
python
def remote(self, action): """ Function specific for MMI, not BMI. action is one of: "play", "stop", "pause", "rewind", "quit" """ method = "remote" A = None metadata = {method: action} send_array(self.socket, A, metadata) A, metadata = recv_array( self.socket, poll=self.poll, poll_timeout=self.poll_timeout, flags=self.zmq_flags) return metadata[method]
python
def energy(self, spins, break_aux_symmetry=True): """A formula for the exact energy of Theta with spins fixed. Args: spins (dict): Spin values for a subset of the variables in Theta. break_aux_symmetry (bool, optional): Default True. If True, break the aux variable symmetry by setting all aux variable to 1 for one of the feasible configurations. If the energy ranges are not symmetric then this can make finding models impossible. Returns: Formula for the exact energy of Theta with spins fixed. """ subtheta = self.theta.copy() subtheta.fix_variables(spins) # we need aux variables av = next(self._auxvar_counter) auxvars = {v: Symbol('aux{}_{}'.format(av, v), BOOL) for v in subtheta.linear} if break_aux_symmetry and av == 0: # without loss of generality, we can assume that the aux variables are all # spin-up for one configuration self.assertions.update(set(auxvars.values())) trees = self._trees if not trees: # if there are no variables to eliminate, then the offset of # subtheta is the exact value and we can just return it assert not subtheta.linear and not subtheta.quadratic return subtheta.offset energy = Plus(self.message(trees, {}, subtheta, auxvars), subtheta.offset) return energy
python
def position_target_global_int_encode(self, time_boot_ms, coordinate_frame, type_mask, lat_int, lon_int, alt, vx, vy, vz, afx, afy, afz, yaw, yaw_rate): ''' Reports the current commanded vehicle position, velocity, and acceleration as specified by the autopilot. This should match the commands sent in SET_POSITION_TARGET_GLOBAL_INT if the vehicle is being controlled this way. time_boot_ms : Timestamp in milliseconds since system boot. The rationale for the timestamp in the setpoint is to allow the system to compensate for the transport delay of the setpoint. This allows the system to compensate processing latency. (uint32_t) coordinate_frame : Valid options are: MAV_FRAME_GLOBAL_INT = 5, MAV_FRAME_GLOBAL_RELATIVE_ALT_INT = 6, MAV_FRAME_GLOBAL_TERRAIN_ALT_INT = 11 (uint8_t) type_mask : Bitmask to indicate which dimensions should be ignored by the vehicle: a value of 0b0000000000000000 or 0b0000001000000000 indicates that none of the setpoint dimensions should be ignored. If bit 10 is set the floats afx afy afz should be interpreted as force instead of acceleration. Mapping: bit 1: x, bit 2: y, bit 3: z, bit 4: vx, bit 5: vy, bit 6: vz, bit 7: ax, bit 8: ay, bit 9: az, bit 10: is force setpoint, bit 11: yaw, bit 12: yaw rate (uint16_t) lat_int : X Position in WGS84 frame in 1e7 * meters (int32_t) lon_int : Y Position in WGS84 frame in 1e7 * meters (int32_t) alt : Altitude in meters in AMSL altitude, not WGS84 if absolute or relative, above terrain if GLOBAL_TERRAIN_ALT_INT (float) vx : X velocity in NED frame in meter / s (float) vy : Y velocity in NED frame in meter / s (float) vz : Z velocity in NED frame in meter / s (float) afx : X acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float) afy : Y acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float) afz : Z acceleration or force (if bit 10 of type_mask is set) in NED frame in meter / s^2 or N (float) yaw : yaw setpoint in rad (float) yaw_rate : yaw rate setpoint in rad/s (float) ''' return MAVLink_position_target_global_int_message(time_boot_ms, coordinate_frame, type_mask, lat_int, lon_int, alt, vx, vy, vz, afx, afy, afz, yaw, yaw_rate)
java
@RequestMapping(value = "/findByUsername", method = RequestMethod.GET) public @ResponseBody ResponseAccount findByUsername( @RequestParam(value = "username", required = true) String username) { logger.debug("Received request to get account by username"); ResponseAccount acconut = accountService.getAccountByUsername(username); return acconut; }
java
public MethodInfo addMethod(Method method) { Modifiers modifiers = new Modifiers(method.getModifiers()); modifiers.setAbstract(this.getModifiers().isInterface()); TypeVariableDesc[] typeParams = lookupTypeVariables(method); TypeDesc ret = TypeDesc.forClass(method.getReturnType(), method.getGenericReturnType()); MethodDescriptor methodDescriptor = lookupMethodDescriptor(method); Class<?>[] paramClasses = method.getParameterTypes(); Type[] paramTypes = method.getGenericParameterTypes(); TypeDesc[] params = new TypeDesc[paramClasses.length]; String[] paramNames = new String[paramClasses.length]; for (int i = 0; i < paramClasses.length; i++) { params[i] = TypeDesc.forClass(paramClasses[i], paramTypes[i]); if(methodDescriptor != null) { paramNames[i] = methodDescriptor.getParameterDescriptors()[i].getName(); } else { paramNames[i] = "param$" + i; } } MethodInfo mi = addMethod(modifiers, method.getName(), typeParams, ret, params, paramNames); // exception stuff... // TODO: generic exceptions Class<?>[] exceptions = method.getExceptionTypes(); for (int i=0; i<exceptions.length; i++) { mi.addException(exceptions[i].getName()); } return mi; }
java
@Override protected void defineWidgets() { // initialize the project object to use for the dialog initMessageObject(); setKeyPrefix(KEY_PREFIX); addWidget(new CmsWidgetDialogParameter(m_msgInfo, "from", PAGES[0], new CmsDisplayWidget())); addWidget(new CmsWidgetDialogParameter(m_msgInfo, "to", PAGES[0], new CmsDisplayWidget())); addWidget(new CmsWidgetDialogParameter(m_msgInfo, "msg", PAGES[0], new CmsTextareaWidget(12))); }
java
public static policydataset get(nitro_service service, String name) throws Exception{ policydataset obj = new policydataset(); obj.set_name(name); policydataset response = (policydataset) obj.get_resource(service); return response; }
java
public Command findCommand(String name) { for (Command candidate : this.commands) { String candidateName = candidate.getName(); if (candidateName.equals(name) || (isOptionCommand(candidate) && ("--" + candidateName).equals(name))) { return candidate; } } return null; }
python
def logging_on(level=logging.WARNING): """Turn logging on. """ global _is_logging_on if not _is_logging_on: console = logging.StreamHandler() console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", '%Y-%m-%d %H:%M:%S')) console.setLevel(level) logging.getLogger('').addHandler(console) _is_logging_on = True log = logging.getLogger('') log.setLevel(level) for h in log.handlers: h.setLevel(level)
python
def get_objective_query_session(self, proxy): """Gets the ``OsidSession`` associated with the objective query service. :param proxy: a proxy :type proxy: ``osid.proxy.Proxy`` :return: an ``ObjectiveQuerySession`` :rtype: ``osid.learning.ObjectiveQuerySession`` :raise: ``NullArgument`` -- ``proxy`` is ``null`` :raise: ``OperationFailed`` -- unable to complete request :raise: ``Unimplemented`` -- ``supports_objective_query()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_objective_query()`` is ``true``.* """ if not self.supports_objective_query(): raise Unimplemented() try: from . import sessions except ImportError: raise OperationFailed() proxy = self._convert_proxy(proxy) try: session = sessions.ObjectiveQuerySession(proxy=proxy, runtime=self._runtime) except AttributeError: raise OperationFailed() return session
python
def makeLinearxFunc(self,mLvl,pLvl,MedShk,xLvl): ''' Constructs the (unconstrained) expenditure function for this period using bilinear interpolation (over permanent income and the medical shock) among an array of linear interpolations over market resources. Parameters ---------- mLvl : np.array Corresponding market resource points for interpolation. pLvl : np.array Corresponding permanent income level points for interpolation. MedShk : np.array Corresponding medical need shocks for interpolation. xLvl : np.array Expenditure points for interpolation, corresponding to those in mLvl, pLvl, and MedShk. Returns ------- xFuncUnc : BilinearInterpOnInterp1D Unconstrained total expenditure function for this period. ''' # Get state dimensions pCount = mLvl.shape[1] MedCount = mLvl.shape[0] # Loop over each permanent income level and medical shock and make a linear xFunc xFunc_by_pLvl_and_MedShk = [] # Initialize the empty list of lists of 1D xFuncs for i in range(pCount): temp_list = [] pLvl_i = pLvl[0,i,0] mLvlMin_i = self.BoroCnstNat(pLvl_i) for j in range(MedCount): m_temp = mLvl[j,i,:] - mLvlMin_i x_temp = xLvl[j,i,:] temp_list.append(LinearInterp(m_temp,x_temp)) xFunc_by_pLvl_and_MedShk.append(deepcopy(temp_list)) # Combine the nested list of linear xFuncs into a single function pLvl_temp = pLvl[0,:,0] MedShk_temp = MedShk[:,0,0] xFuncUncBase = BilinearInterpOnInterp1D(xFunc_by_pLvl_and_MedShk,pLvl_temp,MedShk_temp) xFuncUnc = VariableLowerBoundFunc3D(xFuncUncBase,self.BoroCnstNat) return xFuncUnc
python
def show(self): """ Display the information (with a pretty print) about the method """ self.show_info() self.show_notes() if self.code != None: self.each_params_by_register(self.code.get_registers_size(), self.get_descriptor()) self.code.show(self.CM.get_vmanalysis().get_method(self)) self.show_xref(self.CM.get_vmanalysis().get_method_analysis(self))
python
def extent(self): """Helper for matplotlib imshow""" return ( self.intervals[1].pix1 - 0.5, self.intervals[1].pix2 - 0.5, self.intervals[0].pix1 - 0.5, self.intervals[0].pix2 - 0.5, )
python
def has_free_moves(self): """Returns whether a move is possible, when there are no free cells.""" return any(self._can_cell_be_merged(x, y) for x in range(self.COUNT_X) for y in range(self.COUNT_Y))
python
def rebin2x2(a): """ Wrapper around rebin that actually rebins 2 by 2 """ inshape = np.array(a.shape) if not (inshape % 2 == np.zeros(2)).all(): # Modulo check to see if size is even raise RuntimeError, "I want even image shapes !" return rebin(a, inshape/2)
java
public int convertPointToAbsolutePosition(int relativePosition) { if (relativePosition < 0 || relativePosition >= length()) throw new IllegalArgumentException("Relative position outside this range (" + relativePosition + ")."); if (reversed) return upper - 1 - relativePosition; else return relativePosition + lower; }
java
public static List<Polygon> createSimpleDirectionArrow( Geometry... geometries ) { List<Polygon> polygons = new ArrayList<>(); for( Geometry geometry : geometries ) { for( int i = 0; i < geometry.getNumGeometries(); i++ ) { Geometry geometryN = geometry.getGeometryN(i); if (geometryN instanceof LineString) { LineString line = (LineString) geometryN; polygons.addAll(makeArrows(line)); } else if (geometryN instanceof Polygon) { Polygon polygonGeom = (Polygon) geometryN; LineString exteriorRing = polygonGeom.getExteriorRing(); polygons.addAll(makeArrows(exteriorRing)); int numInteriorRing = polygonGeom.getNumInteriorRing(); for( int j = 0; j < numInteriorRing; j++ ) { LineString interiorRingN = polygonGeom.getInteriorRingN(j); polygons.addAll(makeArrows(interiorRingN)); } } } } return polygons; }
python
def _resolved_objects(pdf, xobject): """Retrieve rotatation info.""" return [pdf.getPage(i).get(xobject) for i in range(pdf.getNumPages())][0]
java
@Override public T getPluginOrDefaultFor(S delimiter, Supplier<T> defaultSupplier) { Assert.notNull(delimiter, "Delimiter must not be null!"); Assert.notNull(defaultSupplier, "Default supplier must not be null!"); return getPluginFor(delimiter).orElseGet(defaultSupplier); }
python
def _replace_placeholder(self, string, custom_id): """ Replace the prefix placeholder :class:`~aeneas.globalconstants.PPV_OS_TASK_PREFIX` with ``custom_id`` and return the resulting string. :rtype: string """ if string is None: return None self.log([u"Replacing '%s' with '%s' in '%s'", gc.PPV_OS_TASK_PREFIX, custom_id, string]) return string.replace(gc.PPV_OS_TASK_PREFIX, custom_id)
python
def _get_bios_hash_password(self, bios_password): """Get the hashed BIOS password.""" request_headers = {} if bios_password: bios_password_hash = hashlib.sha256((bios_password.encode()). hexdigest().upper()) request_headers['X-HPRESTFULAPI-AuthToken'] = bios_password_hash return request_headers
python
def format_ubuntu_dialog(df): """ Print statements paired with replies, formatted for easy review """ s = '' for i, record in df.iterrows(): statement = list(split_turns(record.Context))[-1] # <1> reply = list(split_turns(record.Utterance))[-1] # <2> s += 'Statement: {}\n'.format(statement) s += 'Reply: {}\n\n'.format(reply) return s
python
def Rz_to_coshucosv(R,z,delta=1.,oblate=False): """ NAME: Rz_to_coshucosv PURPOSE: calculate prolate confocal cosh(u) and cos(v) coordinates from R,z, and delta INPUT: R - radius z - height delta= focus oblate= (False) if True, compute oblate confocal coordinates instead of prolate OUTPUT: (cosh(u),cos(v)) HISTORY: 2012-11-27 - Written - Bovy (IAS) 2017-10-11 - Added oblate coordinates - Bovy (UofT) """ if oblate: d12= (R+delta)**2.+z**2. d22= (R-delta)**2.+z**2. else: d12= (z+delta)**2.+R**2. d22= (z-delta)**2.+R**2. coshu= 0.5/delta*(sc.sqrt(d12)+sc.sqrt(d22)) cosv= 0.5/delta*(sc.sqrt(d12)-sc.sqrt(d22)) if oblate: # cosv is currently really sinv cosv= sc.sqrt(1.-cosv**2.) return (coshu,cosv)
java
@Help(help = "Generate a new Key in the NFVO") public String generateKey(String name) throws SDKException { return (String) requestPost("generate", name); }
java
public static ZonedDateTime updateDateTime(ZonedDateTime value, float range) { range = range != 0 ? range : 10; if (range < 0) return value; float days = RandomFloat.nextFloat(-range, range); return value.plusDays((int) days); }
java
private static String removeStringDelimiters(String key) { String k = key.trim(); if (CmsStringUtil.isNotEmpty(k)) { if (k.charAt(0) == TEXT_DELIMITER) { k = k.substring(1); } if (k.charAt(k.length() - 1) == TEXT_DELIMITER) { k = k.substring(0, k.length() - 1); } } // replace excel protected quotations marks ("") by single quotation marks k = CmsStringUtil.substitute(k, "\"\"", "\""); return k; }
java
public PoolPatchOptions withOcpDate(DateTime ocpDate) { if (ocpDate == null) { this.ocpDate = null; } else { this.ocpDate = new DateTimeRfc1123(ocpDate); } return this; }
java
@Override public void release(Throwable cause) { if (STATE_UPDATER.compareAndSet(this, State.ALIVE, State.RELEASING)) { signalPayloadRelease(cause); } markReleased(); releaseFuture.complete(null); }
java
@CodingStyleguideUnaware public static <T extends Iterable <?>> T notEmpty (final T aValue, final String sName) { if (isEnabled ()) return notEmpty (aValue, () -> sName); return aValue; }
java
protected OutputStream createFileOutputStream(File file) { try { return new FileOutputStream(file); } catch (FileNotFoundException e) { throw new GenException(Message.DOMAGEN9001, e, e); } }
python
def read_pascal_string(fp, encoding='macroman', padding=2): """ Reads pascal string (length + bytes). :param fp: file-like object :param encoding: string encoding :param padding: padding size :return: str """ start_pos = fp.tell() # read_length_block doesn't work for a byte. length = read_fmt('B', fp)[0] data = fp.read(length) assert len(data) == length, (len(data), length) read_padding(fp, fp.tell() - start_pos, padding) return data.decode(encoding)
python
def transformer_base_vq_ada_32ex_packed(): """Set of hyperparameters for lm1b packed following tpu params.""" hparams = transformer_base_v2() expert_utils.update_hparams_for_vq_gating(hparams) hparams.moe_num_experts = 32 hparams.gating_type = "vq" # this gives us a batch size of 16 because each seq is len 256 hparams.batch_size = 5072 hparams.ffn_layer = "local_moe" hparams.shared_embedding_and_softmax_weights = False hparams.learning_rate_warmup_steps = 10000 # one epoch for languagemodel_lm1b32k_packed = 27200 steps w/ bsize 128 hparams.learning_rate_decay_steps = 27200 hparams.num_heads = 4 hparams.num_blocks = 1 hparams.moe_k = 1 hparams.num_decoder_layers = 6 hparams.label_smoothing = 0. hparams.layer_prepostprocess_dropout = 0.1 hparams.layer_postprocess_sequence = "dan" hparams.layer_preprocess_sequence = "none" hparams.weight_decay = 1e-06 hparams.attention_dropout = 0.1 hparams.optimizer = "Adafactor" hparams.learning_rate_schedule = "linear_warmup*rsqrt_decay*linear_decay" hparams.activation_dtype = "float32" hparams.learning_rate = 0.1 hparams.learning_rate_constant = 1.0 return hparams
python
def run_transaction(self, command_list, do_commit=True): '''This can be used to stage multiple commands and roll back the transaction if an error occurs. This is useful if you want to remove multiple records in multiple tables for one entity but do not want the deletion to occur if the entity is tied to table not specified in the list of commands. Performing this as a transaction avoids the situation where the records are partially removed. If do_commit is false, the entire transaction is cancelled.''' pass # I decided against creating this for now. # It may be more useful to create a stored procedure like in e.g. _create_protein_deletion_stored_procedure # in the DDGadmin project and then use callproc for c in command_list: if c.find(";") != -1 or c.find("\\G") != -1: # Catches *some* injections raise Exception("The SQL command '%s' contains a semi-colon or \\G. This is a potential SQL injection." % c) if do_commit: sql = "START TRANSACTION;\n%s;\nCOMMIT" % "\n".join(command_list) else: sql = "START TRANSACTION;\n%s;" % "\n".join(command_list) #print(sql) return
java
public synchronized void update(double sample) { long now = System.currentTimeMillis(); if (lastMillis == 0) { // first sample average = sample; lastMillis = now; return; } long deltaTime = now - lastMillis; double coeff = Math.exp(-1.0 * ((double)deltaTime / windowMillis)); average = (1.0 - coeff) * sample + coeff * average; lastMillis = now; }
java
@SneakyThrows protected String getBackupMetadataFilenamePrefix(final AbstractResource metadataResource, final SamlRegisteredService service) { val mdFileName = metadataResource.getFilename(); if (StringUtils.isBlank(mdFileName)) { throw new FileNotFoundException("Unable to determine filename for " + metadataResource); } val fileName = service.getMetadataLocation(); val sha = DigestUtils.sha(fileName); LOGGER.trace("Metadata backup file for metadata location [{}] is linked to [{}]", fileName, sha); return sha; }
java
public void readBytes(OutputStream out, int length) throws IOException { slice.getBytes(position, out, length); position += length; }
java
private HeaderElement findHeader(HeaderKeys key, int instance) { final int ord = key.getOrdinal(); if (!storage.containsKey(ord) && ord <= HttpHeaderKeys.ORD_MAX) { return null; } HeaderElement elem = null; //If the ordinal created for this key is larger than 1024, the header key //storage has been capped. As such, search the internal header storage //to see if we have a header with this name already added. if (ord > HttpHeaderKeys.ORD_MAX) { for (HeaderElement header : storage.values()) { if (header.getKey().getName().equals(key.getName())) { elem = header; break; } } } else { elem = storage.get(ord); } int i = -1; while (null != elem) { if (!elem.wasRemoved()) { if (++i == instance) { return elem; } } elem = elem.nextInstance; } return null; }
java
public List<Display> getDisplays() { return new EnumParameterList<Display>(DISPLAY) { @Override protected Display _asObject(String value) { return Display.get(value); } }; }
python
def _visit_te_shape(self, shape: ShExJ.shapeExpr, visit_center: _VisitorCenter) -> None: """ Visit a shape expression that was reached through a triple expression. This, in turn, is used to visit additional triple expressions that are referenced by the Shape :param shape: Shape reached through triple expression traverse :param visit_center: context used in shape visitor """ if isinstance(shape, ShExJ.Shape) and shape.expression is not None: visit_center.f(visit_center.arg_cntxt, shape.expression, self)
python
def finalize(self, **kwargs): """ Finalize executes any subclass-specific axes finalization steps. The user calls poof and poof calls finalize. Parameters ---------- kwargs: dict generic keyword arguments """ # Set the title self.set_title( "{} Ranking of {} Features".format( self.ranking_.title(), len(self.features_) ) )
python
def send_venue(chat_id, latitude, longitude, title, address, foursquare_id=None, reply_to_message_id=None, reply_markup=None, disable_notification=False, **kwargs): """ Use this method to send information about a venue. :param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername) :param latitude: Latitude of location. :param longitude: Longitude of location. :param title: Name of the venue. :param address: Address of the venue. :param foursquare_id: Foursquare identifier of the venue. :param reply_to_message_id: If the message is a reply, ID of the original message :param reply_markup: Additional interface options. A JSON-serialized object for a custom reply keyboard, instructions to hide keyboard or to force a reply from the user. :param disable_notification: Sends the message silently. iOS users will not receive a notification, Android users will receive a notification with no sound. Other apps coming soon. :param kwargs: Args that get passed down to :class:`TelegramBotRPCRequest` :type chat_id: int or str :type latitude: float :type longitude: float :type title: str :type address: str :type foursquare_id: str :type reply_to_message_id: int :type reply_markup: ReplyKeyboardMarkup or ReplyKeyboardHide or ForceReply :type disable_notification: bool :returns: On success, the sent Message is returned. :rtype: TelegramBotRPCRequest """ # required args params = dict( chat_id=chat_id, latitude=latitude, longitude=longitude, title=title, address=address, ) # optional args params.update( _clean_params( foursquare_id=foursquare_id, reply_to_message_id=reply_to_message_id, reply_markup=reply_markup, disable_notification=disable_notification, ) ) return TelegramBotRPCRequest('sendVenue', params=params, on_result=Message.from_result, **kwargs)
python
def get_request(self, url): """ Wraps the URL to retrieve to protects against "creative" interpretation of the RFC: http://bugs.python.org/issue8732 """ if isinstance(url, string_types): url = urllib2.Request(url, headers={'Accept-encoding': 'identity'}) return url
python
def servers(self, server='api.telldus.com', port=http.HTTPS_PORT): """Fetch list of servers that can be connected to. :return: list of (address, port) tuples """ logging.debug("Fetching server list from %s:%d", server, port) conn = http.HTTPSConnection(server, port, context=self.ssl_context()) conn.request('GET', "/server/assign?protocolVersion=2") response = conn.getresponse() if response.status != http.OK: raise RuntimeError("Could not connect to {}:{}: {} {}".format( server, port, response.status, response.reason)) servers = [] def extract_servers(name, attributes): if name == "server": servers.append((attributes['address'], int(attributes['port']))) parser = expat.ParserCreate() parser.StartElementHandler = extract_servers parser.ParseFile(response) logging.debug("Found %d available servers", len(servers)) return servers
python
def bayesian_hmm(observations, estimated_hmm, nsample=100, reversible=True, stationary=False, p0_prior='mixed', transition_matrix_prior='mixed', store_hidden=False, call_back=None): r""" Bayesian HMM based on sampling the posterior Generic maximum-likelihood estimation of HMMs Parameters ---------- observations : list of numpy arrays representing temporal data `observations[i]` is a 1d numpy array corresponding to the observed trajectory index `i` estimated_hmm : HMM HMM estimated from estimate_hmm or initialize_hmm reversible : bool, optional, default=True If True, a prior that enforces reversible transition matrices (detailed balance) is used; otherwise, a standard non-reversible prior is used. stationary : bool, optional, default=False If True, the stationary distribution of the transition matrix will be used as initial distribution. Only use True if you are confident that the observation trajectories are started from a global equilibrium. If False, the initial distribution will be estimated as usual from the first step of the hidden trajectories. nsample : int, optional, default=100 number of Gibbs sampling steps p0_prior : None, str, float or ndarray(n) Prior for the initial distribution of the HMM. Will only be active if stationary=False (stationary=True means that p0 is identical to the stationary distribution of the transition matrix). Currently implements different versions of the Dirichlet prior that is conjugate to the Dirichlet distribution of p0. p0 is sampled from: .. math: p0 \sim \prod_i (p0)_i^{a_i + n_i - 1} where :math:`n_i` are the number of times a hidden trajectory was in state :math:`i` at time step 0 and :math:`a_i` is the prior count. Following options are available: | 'mixed' (default), :math:`a_i = p_{0,init}`, where :math:`p_{0,init}` is the initial distribution of initial_model. | 'uniform', :math:`a_i = 1` | ndarray(n) or float, the given array will be used as A. | None, :math:`a_i = 0`. This option ensures coincidence between sample mean an MLE. Will sooner or later lead to sampling problems, because as soon as zero trajectories are drawn from a given state, the sampler cannot recover and that state will never serve as a starting state subsequently. Only recommended in the large data regime and when the probability to sample zero trajectories from any state is negligible. transition_matrix_prior : str or ndarray(n, n) Prior for the HMM transition matrix. Currently implements Dirichlet priors if reversible=False and reversible transition matrix priors as described in [1]_ if reversible=True. For the nonreversible case the posterior of transition matrix :math:`P` is: .. math: P \sim \prod_{i,j} p_{ij}^{b_{ij} + c_{ij} - 1} where :math:`c_{ij}` are the number of transitions found for hidden trajectories and :math:`b_{ij}` are prior counts. | 'mixed' (default), :math:`b_{ij} = p_{ij,init}`, where :math:`p_{ij,init}` is the transition matrix of initial_model. That means one prior count will be used per row. | 'uniform', :math:`b_{ij} = 1` | ndarray(n, n) or broadcastable, the given array will be used as B. | None, :math:`b_ij = 0`. This option ensures coincidence between sample mean an MLE. Will sooner or later lead to sampling problems, because as soon as a transition :math:`ij` will not occur in a sample, the sampler cannot recover and that transition will never be sampled again. This option is not recommended unless you have a small HMM and a lot of data. store_hidden : bool, optional, default=False store hidden trajectories in sampled HMMs call_back : function, optional, default=None a call back function with no arguments, which if given is being called after each computed sample. This is useful for implementing progress bars. Return ------ hmm : :class:`SampledHMM <bhmm.hmm.generic_sampled_hmm.SampledHMM>` References ---------- .. [1] Trendelkamp-Schroer, B., H. Wu, F. Paul and F. Noe: Estimation and uncertainty of reversible Markov models. J. Chem. Phys. 143, 174101 (2015). """ # construct estimator from bhmm.estimators.bayesian_sampling import BayesianHMMSampler as _BHMM sampler = _BHMM(observations, estimated_hmm.nstates, initial_model=estimated_hmm, reversible=reversible, stationary=stationary, transition_matrix_sampling_steps=1000, p0_prior=p0_prior, transition_matrix_prior=transition_matrix_prior, output=estimated_hmm.output_model.model_type) # Sample models. sampled_hmms = sampler.sample(nsamples=nsample, save_hidden_state_trajectory=store_hidden, call_back=call_back) # return model from bhmm.hmm.generic_sampled_hmm import SampledHMM return SampledHMM(estimated_hmm, sampled_hmms)
java
@Override public void reset(final long mNodeKey) { super.reset(mNodeKey); if (mBindingSeq != null) { mBindingSeq.reset(mNodeKey); } }
java
public static boolean isMethodNamed(MethodCallExpression methodCall, String methodNamePattern, Integer numArguments) { Expression method = methodCall.getMethod(); // !important: performance enhancement boolean IS_NAME_MATCH = false; if (method instanceof ConstantExpression) { if (((ConstantExpression) method).getValue() instanceof String) { IS_NAME_MATCH = ((String)((ConstantExpression) method).getValue()).matches(methodNamePattern); } } if (IS_NAME_MATCH && numArguments != null) { return AstUtil.getMethodArguments(methodCall).size() == numArguments; } return IS_NAME_MATCH; }
python
def ask(question, default_answer=False, default_answer_str="no"): """ Ask for user input. This asks a yes/no question with a preset default. You can bypass the user-input and fetch the default answer, if you set Args: question: The question to ask on stdout. default_answer: The default value to return. default_answer_str: The default answer string that we present to the user. Tests: >>> os.putenv("TEST", "yes"); ask("Test?", default_answer=True) True >>> os.putenv("TEST", "yes"); ask("Test?", default_answer=False) False """ response = default_answer def should_ignore_tty(): """ Check, if we want to ignore an opened tty result. """ ret_to_bool = {"yes": True, "no": False, "true": True, "false": False} envs = [os.getenv("CI", default="no"), os.getenv("TEST", default="no")] vals = [ret_to_bool[val] for val in envs if val in ret_to_bool] return any(vals) ignore_stdin_istty = should_ignore_tty() has_tty = sys.stdin.isatty() and not ignore_stdin_istty if has_tty: response = query_yes_no(question, default_answer_str) else: LOG.debug("NoTTY: %s -> %s", question, response) return response
python
async def import_wallet(self, indy_config: dict, path: str, access: str = None) -> None: """ Import a VON anchor wallet. Raise BadAccess on bad access credential value. :param indy_config: indy wallet configuration to use, with: - 'id' - 'storage_type' (optional) - 'storage_config' (optional) :param path: path from which to import wallet file :param access: indy access credentials value (default value from wallet manager) """ LOGGER.debug('WalletManager.import_wallet >>> indy_config %s, path: %s', indy_config, path) try: await wallet.import_wallet( json.dumps(indy_config), json.dumps({'key': access or self.default_access}), json.dumps({'path': path, 'key': access or self.default_access})) except IndyError as x_indy: if x_indy.error_code == ErrorCode.CommonInvalidStructure: # indy-sdk raises on bad access LOGGER.debug( 'WalletManager.import_wallet <!< bad access credential value for wallet %s', indy_config.get('id', '(no id)')) raise BadAccess('Bad access credential value for wallet {}'.format(indy_config.get('id', '(no id)'))) LOGGER.debug( 'WalletManager.import_wallet <!< indy error code %s on wallet %s import', x_indy.error_code, indy_config.get('id', '(no id)')) raise LOGGER.debug('WalletManager.import_wallet <<<')
python
def diff_objects(self, latest_commit_sha): """ Push all changes to origin, based on objects, not on commits. Important: Call this push after every new commit, or we lose commits. """ base = ['git', '--bare', '--git-dir', self.git_path] object_shas = [] summary = {'commits': [], 'trees': [], 'files': []} def read_parents_and_tree_from(commit): if commit in self.synced_object_shas or commit in object_shas: # this commit has already been synced or read return None, None self.synced_object_shas[commit] = True summary['commits'].append(commit) object_shas.append(commit) object_content = subprocess.check_output(base + ['cat-file', '-p', commit]).decode('utf-8').strip() parents = [] tree = '' for line in object_content.splitlines(): if line.startswith('tree '): tree = line[len('tree '):] if line.startswith('parent '): parents.append(line[len('parent '):]) return parents, tree def collect_files_from_tree(tree): if tree in self.synced_object_shas or tree in object_shas: # we have exactly this tree already synced or read, meaning all its objects as well return self.synced_object_shas[tree] = True summary['trees'].append(tree) object_shas.append(tree) object_content = subprocess.check_output(base + ['ls-tree', '-r', '-t', tree]).decode('utf-8').strip() for line in object_content.splitlines(): exploded = line.split(' ') if len(exploded) < 3: sys.stderr.write("Error: Wrong line format of ls-tree for %s: %s\n" % (tree, line,)) sys.exit(1) object_to_add = str(exploded[2][:40]) path = str(exploded[2][41:]) if object_to_add in self.synced_object_shas or object_to_add in object_shas: # have it already in the list or already synced continue object_shas.append(object_to_add) self.synced_object_shas[object_to_add] = True summary['files'].append([object_to_add, path]) commits_to_check = [latest_commit_sha] while len(commits_to_check): sha = commits_to_check.pop(0) parents, tree = read_parents_and_tree_from(sha) if parents: for parent in parents: if parent not in commits_to_check: commits_to_check.append(parent) if tree: collect_files_from_tree(tree) is_debug2() and self.logger.debug("shas_to_check %d: %s " % (len(object_shas), str(object_shas),)) if not object_shas: return [], summary try: is_debug2() and self.logger.debug("Do git-cat-file-check.sh") ssh_stream = create_ssh_stream(read_home_config(), exit_on_failure=False) channel = ssh_stream.get_transport().open_session() channel.exec_command('git-cat-file-check.sh "%s"' % (self.model_name + '.git',)) channel.sendall('\n'.join(object_shas)) channel.shutdown_write() def readall(c): content = b'' while True: try: chunk = c.recv(1024) if chunk == b'': break content += chunk except (KeyboardInterrupt, SystemExit): return return content missing_objects = readall(channel).decode('utf-8').splitlines() channel.close() ssh_stream.close() # make sure we have in summary only SHAs we actually will sync for stype in six.iterkeys(summary): ids = summary[stype][:] for sha in ids: if stype == 'files': if sha[0] not in missing_objects: summary[stype].remove(sha) else: if sha not in missing_objects: summary[stype].remove(sha) return missing_objects, summary except (KeyboardInterrupt, SystemExit): raise except Exception as e: self.logger.error("Failed to generate diff_objects: %s" % (str(e),)) for sha in object_shas: if sha in self.synced_object_shas: del self.synced_object_shas[sha] return None, None
java
public boolean isEqualBasedOn(BiPredicate<? super E, ? super A> compareFunc) { return equals(expectedDef, expectedRoot, actualDef, actualRoot, compareFunc); }
java
public Response filenameHeader(String filename) { if (filename != null && !filename.isEmpty()) { header(HttpConstants.Header.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\""); } else { header(HttpConstants.Header.CONTENT_DISPOSITION, "attachment; filename=\"\""); } return this; }
python
def _override_options(options, **overrides): """Override options.""" for opt, val in overrides.items(): passed_value = getattr(options, opt, _Default()) if opt in ('ignore', 'select') and passed_value: value = process_value(opt, passed_value.value) value += process_value(opt, val) setattr(options, opt, value) elif isinstance(passed_value, _Default): setattr(options, opt, process_value(opt, val))
python
def _generate_identifier_name(self, columns, prefix="", max_size=30): """ Generates an identifier from a list of column names obeying a certain string length. """ hash = "" for column in columns: hash += "%x" % binascii.crc32(encode(str(column))) return (prefix + "_" + hash)[:max_size]
python
def fix_local_scheme(home_dir, symlink=True): """ Platforms that use the "posix_local" install scheme (like Ubuntu with Python 2.7) need to be given an additional "local" location, sigh. """ try: import sysconfig except ImportError: pass else: if sysconfig._get_default_scheme() == 'posix_local': local_path = os.path.join(home_dir, 'local') if not os.path.exists(local_path): os.mkdir(local_path) for subdir_name in os.listdir(home_dir): if subdir_name == 'local': continue copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), \ os.path.join(local_path, subdir_name), symlink)
python
def _readvalue(sock, buf, size): """Read specified amount of bytes from the socket. Read size bytes, followed by the "\r\n" characters, from the socket, and return those bytes and any trailing bytes read after the "\r\n". Args: sock: Socket object, should be connected. buf: String, zero or more characters, returned from an earlier call to _readline or _readvalue (pass an empty string on the first call). size: Integer, number of bytes to read from the socket. Returns: A tuple of (buf, value) where value is the bytes read from the socket (there will be exactly size bytes) and buf is trailing characters read after the "\r\n" following the bytes (but not including the \r\n). """ chunks = [] rlen = size + 2 while rlen - len(buf) > 0: if buf: rlen -= len(buf) chunks.append(buf) buf = _recv(sock, RECV_SIZE) if not buf: raise MemcacheUnexpectedCloseError() # Now we need to remove the \r\n from the end. There are two cases we care # about: the \r\n is all in the last buffer, or only the \n is in the last # buffer, and we need to remove the \r from the penultimate buffer. if rlen == 1: # replace the last chunk with the same string minus the last character, # which is always '\r' in this case. chunks[-1] = chunks[-1][:-1] else: # Just remove the "\r\n" from the latest chunk chunks.append(buf[:rlen - 2]) return buf[rlen:], b''.join(chunks)
java
public static Integer getIntValue(@NonNull byte[] value, @IntFormatType int formatType, @IntRange(from = 0) int offset) { if ((offset + getTypeLen(formatType)) > value.length) { RxBleLog.w( "Int formatType (0x%x) is longer than remaining bytes (%d) - returning null", formatType, value.length - offset ); return null; } switch (formatType) { case FORMAT_UINT8: return unsignedByteToInt(value[offset]); case FORMAT_UINT16: return unsignedBytesToInt(value[offset], value[offset + 1]); case FORMAT_UINT32: return unsignedBytesToInt(value[offset], value[offset + 1], value[offset + 2], value[offset + 3]); case FORMAT_SINT8: return unsignedToSigned(unsignedByteToInt(value[offset]), 8); case FORMAT_SINT16: return unsignedToSigned(unsignedBytesToInt(value[offset], value[offset + 1]), 16); case FORMAT_SINT32: return unsignedToSigned(unsignedBytesToInt(value[offset], value[offset + 1], value[offset + 2], value[offset + 3]), 32); default: RxBleLog.w("Passed an invalid integer formatType (0x%x) - returning null", formatType); return null; } }
python
def mini(description, **kwargs): """Single notification function Simple notification function in one line. Has only one required parameter and attempts to use reasonable defaults for everything else :param string description: Notification message """ kwargs['notifierFactory'] = GrowlNotifier gntp.notifier.mini(description, **kwargs)
java
@Indexable(type = IndexableType.DELETE) @Override public CPDisplayLayout deleteCPDisplayLayout( CPDisplayLayout cpDisplayLayout) { return cpDisplayLayoutPersistence.remove(cpDisplayLayout); }
python
def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Set html field with correct iframe. """ if self.url: iframe_html = '<iframe src="{}" frameborder="0" title="{}" allowfullscreen></iframe>' self.html = iframe_html.format( self.get_embed_url(), self.title ) return super().save(force_insert, force_update, using, update_fields)
python
def _init_math_handler(self): """ Parse math configuration and set up math handler. """ fields = self.settings.math_output.split(None, 1) name = fields[0].lower() option = fields[1] if len(fields) > 1 else None if name == 'html': option = self.settings.math_css or option self.math_handler = HTMLMathHandler(css_filename=option) elif name == 'mathml': if option: raise ValueError(('Math handler "%s" does not support ' + 'option "%s".') % (name, option)) self.math_handler = MathMLMathHandler() elif name == 'mathjax': # The MathJax handler can be configured via different ways: # # - By passing an additional JS url to "--math-output" # (to stay backwards-compatible with docutils) # # - By using "--mathjax-opts" (to stay backwards compatible # with the previous html5css3 mathjax postprocessor) # # - By using "--mathjax-url" and "--mathjax-config" (the # preferred way) js_url = option config = None if self.settings.mathjax_opts: parts = self.settings.mathjax_opts.split(',') options = dict(part.split('=', 1) for part in parts) js_url = options.get('url', js_url) config = options.get('config', config) js_url = self.settings.mathjax_url or js_url config = self.settings.mathjax_config or config self.math_handler = MathJaxMathHandler(js_url=js_url, config_filename=config) elif name == 'latex': if option: raise ValueError(('Math handler "%s" does not support ' + 'option "%s".') % (name, option)) self.math_handler = LaTeXMathHandler() else: raise ValueError('Unknown math handler "%s".' % name)
python
def equation_of_time_spencer71(dayofyear): """ Equation of time from Duffie & Beckman and attributed to Spencer (1971) and Iqbal (1983). The coefficients correspond to the online copy of the `Fourier paper`_ [1]_ in the Sundial Mailing list that was posted in 1998 by Mac Oglesby from his correspondence with Macquarie University Prof. John Pickard who added the following note. In the early 1970s, I contacted Dr Spencer about this method because I was trying to use a hand calculator for calculating solar positions, etc. He was extremely helpful and gave me a reprint of this paper. He also pointed out an error in the original: in the series for E, the constant was printed as 0.000075 rather than 0.0000075. I have corrected the error in this version. There appears to be another error in formula as printed in both Duffie & Beckman's [2]_ and Frank Vignola's [3]_ books in which the coefficient 0.04089 is printed instead of 0.040849, corresponding to the value used in the Bird Clear Sky model implemented by Daryl Myers [4]_ and printed in both the Fourier paper from the Sundial Mailing List and R. Hulstrom's [5]_ book. .. _Fourier paper: http://www.mail-archive.com/[email protected]/msg01050.html Parameters ---------- dayofyear : numeric Returns ------- equation_of_time : numeric Difference in time between solar time and mean solar time in minutes. References ---------- .. [1] J. W. Spencer, "Fourier series representation of the position of the sun" in Search 2 (5), p. 172 (1971) .. [2] J. A. Duffie and W. A. Beckman, "Solar Engineering of Thermal Processes, 3rd Edition" pp. 9-11, J. Wiley and Sons, New York (2006) .. [3] Frank Vignola et al., "Solar And Infrared Radiation Measurements", p. 13, CRC Press (2012) .. [5] Daryl R. Myers, "Solar Radiation: Practical Modeling for Renewable Energy Applications", p. 5 CRC Press (2013) .. [4] Roland Hulstrom, "Solar Resources" p. 66, MIT Press (1989) See Also -------- equation_of_time_pvcdrom """ day_angle = _calculate_simple_day_angle(dayofyear) # convert from radians to minutes per day = 24[h/day] * 60[min/h] / 2 / pi eot = (1440.0 / 2 / np.pi) * ( 0.0000075 + 0.001868 * np.cos(day_angle) - 0.032077 * np.sin(day_angle) - 0.014615 * np.cos(2.0 * day_angle) - 0.040849 * np.sin(2.0 * day_angle) ) return eot
python
def add(self, *matches, **kw): # kw=default=None, boolean=False '''Add an argument; this is optional, and mostly useful for setting up aliases or setting boolean=True Apparently `def add(self, *matches, default=None, boolean=False):` is invalid syntax in Python. Not only is this absolutely ridiculous, but the alternative `def add(self, default=None, boolean=False, *matches):` does not do what you would expect. This syntax works as intended in Python 3. If you provide multiple `matches` that are not dash-prefixed, only the first will be used as a positional argument. Specifying any positional arguments and then using `boolean=True` is just weird, and their will be no special consideration for boolean=True in that case for the position-enabled argument. ''' # python syntax hack default = kw.get('default', None) boolean = kw.get('boolean', False) del kw # do not use kw after this line! It's a hack; it should never have been there in the first place. positional = None names = [] for match in matches: if match.startswith('--'): names.append(match[2:]) elif match.startswith('-'): names.append(match[1:]) elif positional: # positional has already been filled names.append(match) else: # first positional: becomes canonical positional positional = match names.append(match) argument = BooleanArgument(names, default, boolean, positional) self.arguments.append(argument) # chainable return self
java
public static void write(Path self, String text) throws IOException { Writer writer = null; try { writer = new OutputStreamWriter(Files.newOutputStream(self), Charset.defaultCharset()); writer.write(text); writer.flush(); Writer temp = writer; writer = null; temp.close(); } finally { closeWithWarning(writer); } }
python
def construct_schema_validators(schema, context): """ Given a schema object, construct a dictionary of validators needed to validate a response matching the given schema. Special Cases: - $ref: These validators need to be Lazily evaluating so that circular validation dependencies do not result in an infinitely deep validation chain. - properties: These validators are meant to apply to properties of the object being validated rather than the object itself. In this case, we need recurse back into this function to generate a dictionary of validators for the property. """ validators = ValidationDict() if '$ref' in schema: validators.add_validator( '$ref', SchemaReferenceValidator(schema['$ref'], context), ) if 'properties' in schema: for property_, property_schema in schema['properties'].items(): property_validator = generate_object_validator( schema=property_schema, context=context, ) validators.add_property_validator(property_, property_validator) if schema.get('additionalProperties') is False: validators.add_validator( 'additionalProperties', generate_additional_properties_validator(context=context, **schema), ) assert 'context' not in schema for key in schema: if key in validator_mapping: validators.add_validator(key, validator_mapping[key](context=context, **schema)) return validators
java
public void visitApply(JCMethodInvocation tree) { setTypeAnnotationPositions(tree.pos); // Generate code for method. Item m = genExpr(tree.meth, methodType); // Generate code for all arguments, where the expected types are // the parameters of the method's external type (that is, any implicit // outer instance of a super(...) call appears as first parameter). MethodSymbol msym = (MethodSymbol)TreeInfo.symbol(tree.meth); genArgs(tree.args, msym.externalType(types).getParameterTypes()); if (!msym.isDynamic()) { code.statBegin(tree.pos); } result = m.invoke(); }
python
def OneHot(*xs, simplify=True, conj=True): """ Return an expression that means "exactly one input function is true". If *simplify* is ``True``, return a simplified expression. If *conj* is ``True``, return a CNF. Otherwise, return a DNF. """ xs = [Expression.box(x).node for x in xs] terms = list() if conj: for x0, x1 in itertools.combinations(xs, 2): terms.append(exprnode.or_(exprnode.not_(x0), exprnode.not_(x1))) terms.append(exprnode.or_(*xs)) y = exprnode.and_(*terms) else: for i, xi in enumerate(xs): zeros = [exprnode.not_(x) for x in xs[:i] + xs[i+1:]] terms.append(exprnode.and_(xi, *zeros)) y = exprnode.or_(*terms) if simplify: y = y.simplify() return _expr(y)
java
public <I extends Request, O extends Response> LoggingClient<I, O> build(Client<I, O> delegate) { return new LoggingClient<>(delegate, requestLogLevel(), successfulResponseLogLevel(), failedResponseLogLevel(), requestHeadersSanitizer(), requestContentSanitizer(), requestTrailersSanitizer(), responseHeadersSanitizer(), responseContentSanitizer(), responseTrailersSanitizer(), responseCauseSanitizer(), Sampler.create(samplingRate())); }
java
public <E> Predicate<E> toEntityPredicate(EntityAccessor<E> accessor) { return entityPredicate(constraints, schema, accessor, strategy); }