language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public final void ruleOpMulti() throws RecognitionException { int stackSize = keepStackSize(); try { // InternalXbase.g:496:2: ( ( ( rule__OpMulti__Alternatives ) ) ) // InternalXbase.g:497:2: ( ( rule__OpMulti__Alternatives ) ) { // InternalXbase.g:497:2: ( ( rule__OpMulti__Alternatives ) ) // InternalXbase.g:498:3: ( rule__OpMulti__Alternatives ) { if ( state.backtracking==0 ) { before(grammarAccess.getOpMultiAccess().getAlternatives()); } // InternalXbase.g:499:3: ( rule__OpMulti__Alternatives ) // InternalXbase.g:499:4: rule__OpMulti__Alternatives { pushFollow(FOLLOW_2); rule__OpMulti__Alternatives(); state._fsp--; if (state.failed) return ; } if ( state.backtracking==0 ) { after(grammarAccess.getOpMultiAccess().getAlternatives()); } } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; }
python
def dump(device, destination, level=0, label=None, noerase=None): ''' Dump filesystem device to the media (file, tape etc). Required parameters: * **device**: XFS device, content of which to be dumped. * **destination**: Specifies a dump destination. Valid options are: * **label**: Label of the dump. Otherwise automatically generated label is used. * **level**: Specifies a dump level of 0 to 9. * **noerase**: Pre-erase media. Other options are not used in order to let ``xfsdump`` use its default values, as they are most optimal. See the ``xfsdump(8)`` manpage for a more complete description of these options. CLI Example: .. code-block:: bash salt '*' xfs.dump /dev/sda1 /detination/on/the/client salt '*' xfs.dump /dev/sda1 /detination/on/the/client label='Company accountancy' salt '*' xfs.dump /dev/sda1 /detination/on/the/client noerase=True ''' if not salt.utils.path.which("xfsdump"): raise CommandExecutionError("Utility \"xfsdump\" has to be installed or missing.") label = label and label or time.strftime("XFS dump for \"{0}\" of %Y.%m.%d, %H:%M".format(device), time.localtime()).replace("'", '"') cmd = ["xfsdump"] cmd.append("-F") # Force if not noerase: cmd.append("-E") # pre-erase cmd.append("-L '{0}'".format(label)) # Label cmd.append("-l {0}".format(level)) # Dump level cmd.append("-f {0}".format(destination)) # Media destination cmd.append(device) # Device cmd = ' '.join(cmd) out = __salt__['cmd.run_all'](cmd) _verify_run(out, cmd=cmd) return _xfsdump_output(out['stdout'])
python
def add_shape(self, autoshape_type_id, left, top, width, height): """Return new |Shape| object appended to this shape tree. *autoshape_type_id* is a member of :ref:`MsoAutoShapeType` e.g. ``MSO_SHAPE.RECTANGLE`` specifying the type of shape to be added. The remaining arguments specify the new shape's position and size. """ autoshape_type = AutoShapeType(autoshape_type_id) sp = self._add_sp(autoshape_type, left, top, width, height) self._recalculate_extents() return self._shape_factory(sp)
python
def _merge_struct(lhs, rhs, type_): """Helper for '_merge_by_type'.""" fields = type_.struct_type.fields lhs, rhs = list(lhs.list_value.values), list(rhs.list_value.values) candidate_type = fields[len(lhs) - 1].type first = rhs.pop(0) if first.HasField("null_value") or candidate_type.code in _UNMERGEABLE_TYPES: lhs.append(first) else: last = lhs.pop() lhs.append(_merge_by_type(last, first, candidate_type)) return Value(list_value=ListValue(values=lhs + rhs))
java
public static MethodNode findMethod(ClassNode clazz, String name) { for (MethodNode method : clazz.methods) { if (method.name.equals(name)) { return method; } } return null; }
python
def remove_handler(): """Remove the user, group and policies for Blockade.""" logger.debug("[#] Removing user, group and permissions for Blockade") client = boto3.client("iam", region_name=PRIMARY_REGION) iam = boto3.resource('iam') account_id = iam.CurrentUser().arn.split(':')[4] try: logger.debug("[#] Removing %s from %s group" % (BLOCKADE_USER, BLOCKADE_GROUP)) response = client.remove_user_from_group( GroupName=BLOCKADE_GROUP, UserName=BLOCKADE_USER ) except client.exceptions.NoSuchEntityException: logger.debug("[!] Blockade user already removed from group") for label in BLOCKADE_POLICIES + ['PushToCloud', 'APIGatewayAdmin']: logger.debug("[#] Removing %s policy" % (label)) arn = 'arn:aws:iam::{id}:policy/{policy}'.format(id=account_id, policy=label) if label == 'PushToCloud': arn = "arn:aws:iam::aws:policy/service-role/AmazonAPIGatewayPushToCloudWatchLogs" if label == 'APIGatewayAdmin': arn = "arn:aws:iam::aws:policy/AmazonAPIGatewayAdministrator" try: response = client.detach_group_policy( GroupName=BLOCKADE_GROUP, PolicyArn=arn) except: pass try: response = client.detach_role_policy( RoleName=BLOCKADE_ROLE, PolicyArn=arn) except: pass try: response = client.delete_policy(PolicyArn=arn) except Exception as e: print(e) pass logger.debug("[#] Removed all policies") try: logger.debug("[#] Deleting %s user" % (BLOCKADE_USER)) response = client.delete_user( UserName=BLOCKADE_USER ) except client.exceptions.NoSuchEntityException: logger.debug("[!] %s user already deleted" % (BLOCKADE_USER)) try: logger.debug("[#] Removing %s group" % (BLOCKADE_GROUP)) response = client.delete_group(GroupName=BLOCKADE_GROUP) except: logger.debug("[!] Group already removed") try: logger.debug("[#] Removing %s role" % (BLOCKADE_ROLE)) response = client.delete_role(RoleName=BLOCKADE_ROLE) except: logger.debug("[!] Role already removed") return True
java
public static DoubleMatrix2D subdiagonalMultiply(final DoubleMatrix2D A, final DoubleMatrix2D B){ final int r = A.rows(); final int rc = A.columns(); final int c = B.columns(); if(r != c){ throw new IllegalArgumentException("The result must be square"); } boolean useSparsity = A instanceof SparseDoubleMatrix2D; DoubleFactory2D F2 = (useSparsity)? DoubleFactory2D.sparse : DoubleFactory2D.dense; final DoubleMatrix2D ret = F2.make(r, c); if(useSparsity){ IntIntDoubleFunction myFunct = new IntIntDoubleFunction() { @Override public double apply(int t, int s, double pts) { int i = t; for (int j = 0; j < i + 1; j++) { ret.setQuick(i, j, ret.getQuick(i, j) + pts * B.getQuick(s, j)); } return pts; } }; //view A row by row A.forEachNonZero(myFunct); }else{ for (int i = 0; i < r; i++) { for (int j = 0; j < i + 1; j++) { double s = 0; for (int k = 0; k < rc; k++) { s += A.getQuick(i, k) * B.getQuick(k, j); } ret.setQuick(i, j, s); } } } return ret; }
java
public static File createCsvFile(String outputCsvPath, String model, String metaFilePath) { if (!outputCsvPath.endsWith(File.separator) && !outputCsvPath.equals("")) { outputCsvPath += File.separator; } String domeInfo = ""; if (metaFilePath != null) { try { // Read meta data CSVReader reader = new CSVReader(new FileReader(metaFilePath), ',', '"'); List<String[]> metaData = reader.readAll(); reader.close(); // Get Title and first record String[] title = new String[0]; ArrayList<String[]> dataArr = new ArrayList(); for (int i = 0; i < metaData.size() - 1; i++) { if ("#".equals(metaData.get(i)[0])) { title = metaData.get(i); } else if ("*".equals(metaData.get(i)[0])) { dataArr.add(metaData.get(i)); } } // Get the position index of Region, stratum, climate ID, RAP ID and Management ID int region = -1; int crop = -1; int climateId = -1; int rapId = -1; int mgnId = -1; int field = -1; int seasonal = -1; int count = 0; for (int i = 0; i < title.length; i++) { if ("REG_ID".equalsIgnoreCase(title[i])) { region = i; count++; } else if ("CRID_text".equalsIgnoreCase(title[i])) { crop = i; count++; } else if ("CLIM_ID".equalsIgnoreCase(title[i])) { climateId = i; count++; } else if ("RAP_ID".equalsIgnoreCase(title[i])) { rapId = i; count++; } else if ("MAN_ID".equalsIgnoreCase(title[i])) { mgnId = i; count++; } else if ("FIELD_OVERLAY".equalsIgnoreCase(title[i])) { field = i; count++; } else if ("SEASONAL_STRATEGY".equalsIgnoreCase(title[i])) { seasonal = i; count++; } else { continue; } if (count == 7) { break; } } // Get dome info for creating ACMO file name if (!dataArr.isEmpty() && region != -1 && (crop != -1 || rapId != -1 || mgnId != -1 || climateId != -1)) { String str; if ((str = getDomeInfoStr(dataArr.get(0), region)).equals("0-")) { if (!(str = getDomeInfoStr(dataArr.get(0), seasonal)).equals("0-")) { } else if (!(str = getDomeInfoStr(dataArr.get(0), field)).equals("0-")) { } else { str = ""; } if (!"".equals(str)) { HashMap<String, String> domeBase = DomeUtil.unpackDomeName(str); str = MapUtil.getValueOr(domeBase, "reg_id", ""); if (!str.equals("")) { str += "-"; } } } else { if (!str.equals("")) { domeInfo = str; domeInfo += getDomeInfoStr(dataArr.get(0), crop).replaceAll(" ", "").toUpperCase(); domeInfo += getDomeInfoStr(dataArr.get(0), climateId); domeInfo += getDomeInfoStr(dataArr, rapId); domeInfo += getDomeInfoStr(dataArr, mgnId); } } } } catch (IOException ex) { domeInfo = ""; } } // Create CSV file name outputCsvPath += "ACMO-" + domeInfo + model; File f = new File(outputCsvPath + ".csv"); int count = 1; while (f.exists()) { f = new File(outputCsvPath + " (" + count + ").csv"); count++; } return f; }
java
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { // Special case methods are looked up from a map and invoked. // Do not trace because this includes some basic things like hashcode and equals. // Important special case methods will take care of tracing themselves. WSJdbcProxyMethod methImpl = WSJdbcProxyMethod.getSpecialCase(method); if (methImpl != null) return methImpl.invoke(this, proxy, method, args); // end of special cases TraceComponent tc = getTracer(); if (tc.isEntryEnabled()) Tr.entry(this, tc, toString(proxy, method), args); // Activation should be handled by the wrapper to which we are delegating. Object result = null; boolean isOperationComplete = false; // Invoke on the main wrapper if it has the method. DSConfig config = dsConfig.get(); Set<Method> vendorMethods = mcf.vendorMethods; if (!vendorMethods.contains(method)) try { // Locate the equivalent method on the main wrapper and invoke it. Method wrappedMethod = getClass().getMethod(method.getName(), method.getParameterTypes()); result = wrappedMethod.invoke(this, args); isOperationComplete = true; } catch (NoSuchMethodException methX) { // No FFDC needed. Method doesn't exist on the main wrapper. vendorMethods.add(method); } catch (SecurityException secureX) { // No FFDC needed. Method isn't accessible on the main wrapper. vendorMethods.add(method); } catch (IllegalAccessException accessX) { // No FFDC needed. Method isn't accessible on the main wrapper. vendorMethods.add(method); } catch (InvocationTargetException invokeX) { // Method exists on the main wrapper, and it failed. Throwable x = invokeX.getTargetException(); FFDCFilter.processException(x, getClass().getName() + ".invoke", "134", this); x = x instanceof SQLException ? WSJdbcUtil.mapException(this, (SQLException) x) : x; if (tc.isEntryEnabled()) Tr.exit(this, tc, toString(proxy, method), x); throw x; } // If the main wrapper does not have the method, invoke it directly on the // underlying object. if (!isOperationComplete) { if (tc.isDebugEnabled()) Tr.debug(this, tc, "Operation not found on the main wrapper."); // Filter out unsafe operations. if (!WSJdbcProxyMethod.isSafe(method) // method name || !WSJdbcProxyMethod.isSafeReturnType(method.getReturnType()) // return type && !WSJdbcProxyMethod.overrideUnsafeReturnType(method)) { // Unsafe method. Not permitted. Raise a SQL exception if possible. // Otherwise, raise a runtime exception. Throwable unsafeX = new SQLFeatureNotSupportedException( AdapterUtil.getNLSMessage("OPERATION_NOT_PERMITTED", method.getName())); Throwable x = null; for (Class<?> xType : method.getExceptionTypes()) if (xType.equals(SQLException.class) || xType.equals(SQLFeatureNotSupportedException.class)) { x = unsafeX; break; } if (x == null) x = new RuntimeException(unsafeX); if (tc.isEntryEnabled()) Tr.exit(this, tc, toString(proxy, method), unsafeX); throw x; } // Invoke the operation directly on the underlying implementation. activate(); Object implObject = dynamicWrapperToImpl.get(proxy); // A missing entry in the dynamic-wrapper-to-impl map indicates the wrapper is // either, // 1) Closed because the parent wrapper is closed. // 2) No longer valid due to handle association with a managed connection that // doesn't implement the same vendor interface. if (implObject == null) { String message = AdapterUtil.getNLSMessage("OBJECT_CLOSED", "Wrapper"); Throwable closedX = new SQLRecoverableException(message, "08003", 0); // Raise the SQLException if we can. boolean raisesSQLX = false; for (Class<?> xClass : method.getExceptionTypes()) raisesSQLX |= xClass.equals(SQLException.class); // Otherwise use RuntimeException. if (!raisesSQLX) closedX = new RuntimeException(closedX); if (tc.isEntryEnabled()) Tr.exit(this, tc, toString(proxy, method), closedX); throw closedX; } WSJdbcConnection connWrapper = null; // If configured to do so, attempt to enlist in a transaction or start a new one. if (this instanceof WSJdbcObject) { connWrapper = (WSJdbcConnection) ((WSJdbcObject) this).getConnectionWrapper(); if (connWrapper != null && (dsConfig.get().beginTranForVendorAPIs || WSJdbcProxyMethod.alwaysBeginTranMethods.contains(method.getName()))) connWrapper.beginTransactionIfNecessary(); } try { // Allow the data source to override in order to account for // dynamic configuration changes. result = invokeOperation(implObject, method, args); // If a client information setting was changed, update the managed connection // so that we know to reset the client information before pooling the connection. if (connWrapper != null && WSJdbcProxyMethod.isClientInfoSetter(method.getName())) connWrapper.managedConn.clientInfoExplicitlySet = true; } catch (InvocationTargetException invokeX) { Throwable x = invokeX.getTargetException(); FFDCFilter.processException(x, getClass().getName() + ".invoke", "171", this); x = x instanceof SQLException ? WSJdbcUtil.mapException(this, (SQLException) x) : x; if (tc.isEntryEnabled()) Tr.exit(this, tc, toString(proxy, method), x); throw x; } } // reflection error from invocation attempt on main wrapper if (tc.isEntryEnabled()) Tr.exit(this, tc, toString(proxy, method), result); return result; }
java
static int indexOfSerial(final double[] incr, final double v) { int index = -1; for (int i = 0; i < incr.length && index == -1; ++i) { if (incr[i] >= v) { index = i; } } return index; }
java
public Assignment fullModel(final BDD bdd) { final int modelBDD = this.kernel.fullSatOne(bdd.index()); return createAssignment(modelBDD); }
python
def _get_filehandler_with_formatter(logname, formatter=None): """ Return a logging FileHandler for given logname using a given logging formatter :param logname: Name of the file where logs will be stored, ".log" extension will be added :param formatter: An instance of logging.Formatter or None if the default should be used :return: """ handler = logging.FileHandler(logname) if formatter is not None: handler.setFormatter(formatter) return handler
java
@Override public KamNode replaceNode(KamNode kamNode, FunctionEnum function, String label) { final int nodeId = kamNode.getId(); final KamNode replacement = new KamNodeImpl(this, nodeId, function, label); return replaceNode(kamNode, replacement); }
java
public boolean createVolumes() throws TargetException { String zoneName = OpenstackIaasHandler.findZoneName( this.novaApi, this.targetProperties ); for( String storageId : OpenstackIaasHandler.findStorageIds( this.targetProperties )) { // Prepare the parameters String name = OpenstackIaasHandler.findStorageProperty( this.targetProperties, storageId, VOLUME_NAME_PREFIX ); name = OpenstackIaasHandler.expandVolumeName( name, this.applicationName, this.scopedInstance.getName()); VolumeApi volumeApi = this.novaApi.getVolumeExtensionForZone( zoneName ).get(); // If the volume should not volatile (i.e. not deleted on termination), we try to reuse it, if it exists. String deleteOnT = OpenstackIaasHandler.findStorageProperty( this.targetProperties, storageId, VOLUME_DELETE_OT_PREFIX ); boolean deleteOnTermination = Boolean.parseBoolean( deleteOnT ); String volumeId = null; if( ! deleteOnTermination ) { for( Volume vol : volumeApi.list()) { if( name.equals( vol.getName())) { this.logger.info( "Volume " + name + " (" + vol.getId() + ") already exists and is not volatile. It will be reused." ); volumeId = vol.getId(); break; } } } // Otherwise, create it. if( volumeId == null ) { String volumeType = OpenstackIaasHandler.findStorageProperty( this.targetProperties, storageId, VOLUME_TYPE_PREFIX ); String volumeSize = OpenstackIaasHandler.findStorageProperty( this.targetProperties, storageId, VOLUME_SIZE_GB_PREFIX ); int vsize = Integer.parseInt( volumeSize ); CreateVolumeOptions options = CreateVolumeOptions.Builder.name( name ); if( ! Utils.isEmptyOrWhitespaces( volumeType )) options = options.volumeType( volumeType ); if( deleteOnTermination ) { Map<String,String> metadata = new HashMap<>( 1 ); metadata.put( DELETE_ON_TERMINATION, "true" ); options = options.metadata( metadata ); } Volume volume = volumeApi.create( vsize, options ); volumeId = volume.getId(); } if( Utils.isEmptyOrWhitespaces( volumeId )) throw new TargetException( "Volume " + name + " was not found and could not be created." ); this.logger.info( "Volume " + volumeId + " was successfully created." ); this.storageIdToVolumeId.put( storageId, volumeId ); } return true; }
java
public void add(InputSplit s) throws IOException { if (null == splits) { throw new IOException("Uninitialized InputSplit"); } if (fill == splits.length) { throw new IOException("Too many splits"); } splits[fill++] = s; totsize += s.getLength(); }
python
def make_pvc( name, storage_class, access_modes, storage, labels=None, annotations=None, ): """ Make a k8s pvc specification for running a user notebook. Parameters ---------- name: Name of persistent volume claim. Must be unique within the namespace the object is going to be created in. Must be a valid DNS label. storage_class: String of the name of the k8s Storage Class to use. access_modes: A list of specifying what access mode the pod should have towards the pvc storage: The ammount of storage needed for the pvc """ pvc = V1PersistentVolumeClaim() pvc.kind = "PersistentVolumeClaim" pvc.api_version = "v1" pvc.metadata = V1ObjectMeta() pvc.metadata.name = name pvc.metadata.annotations = (annotations or {}).copy() pvc.metadata.labels = (labels or {}).copy() pvc.spec = V1PersistentVolumeClaimSpec() pvc.spec.access_modes = access_modes pvc.spec.resources = V1ResourceRequirements() pvc.spec.resources.requests = {"storage": storage} if storage_class: pvc.metadata.annotations.update({"volume.beta.kubernetes.io/storage-class": storage_class}) pvc.spec.storage_class_name = storage_class return pvc
python
def to_pb(self): """Converts the column family to a protobuf. :rtype: :class:`.table_v2_pb2.ColumnFamily` :returns: The converted current object. """ if self.gc_rule is None: return table_v2_pb2.ColumnFamily() else: return table_v2_pb2.ColumnFamily(gc_rule=self.gc_rule.to_pb())
python
def _update_labels(self, label, crop_box, height, width): """Convert labels according to crop box""" xmin = float(crop_box[0]) / width ymin = float(crop_box[1]) / height w = float(crop_box[2]) / width h = float(crop_box[3]) / height out = label.copy() out[:, (1, 3)] -= xmin out[:, (2, 4)] -= ymin out[:, (1, 3)] /= w out[:, (2, 4)] /= h out[:, 1:5] = np.maximum(0, out[:, 1:5]) out[:, 1:5] = np.minimum(1, out[:, 1:5]) coverage = self._calculate_areas(out[:, 1:]) * w * h / self._calculate_areas(label[:, 1:]) valid = np.logical_and(out[:, 3] > out[:, 1], out[:, 4] > out[:, 2]) valid = np.logical_and(valid, coverage > self.min_eject_coverage) valid = np.where(valid)[0] if valid.size < 1: return None out = out[valid, :] return out
java
@Override public String getType(final Uri uri) { final String name = getDatasetOrThrowException(uri).getClass().getName(); final String type = String.format("vnd.android.cursor.dir/%s", name); return type.toLowerCase(Locale.getDefault()); }
python
def wait_processed(self, timeout): """Wait until time outs, or this event is processed. Event must be waitable for this operation to have described semantics, for non-waitable returns true immediately. in timeout of type int Maximum time to wait for event processing, in ms; 0 = no wait, -1 = indefinite wait. return result of type bool If this event was processed before timeout. """ if not isinstance(timeout, baseinteger): raise TypeError("timeout can only be an instance of type baseinteger") result = self._call("waitProcessed", in_p=[timeout]) return result
python
def to_bytes(self): ''' Create bytes from properties ''' # Verify that properties make sense self.sanitize() # Start with the type bitstream = BitArray('uint:4=%d' % self.message_type) # Add padding bitstream += self._reserved1 # Add the record count bitstream += BitArray('uint:8=%d' % len(self.records)) # Add the nonce bitstream += BitArray(bytes=self.nonce) # Add the records for record in self.records: bitstream += record.to_bitstream() return bitstream.bytes
java
public static HsqlException error(String message, String sqlState, int i) { return new HsqlException(message, sqlState, i); }
java
public final CharsetEncoder onUnmappableCharacter(CodingErrorAction newAction) { if (newAction == null) throw new IllegalArgumentException("Null action"); unmappableCharacterAction = newAction; implOnUnmappableCharacter(newAction); return this; }
python
def get_brightness(self, refresh=False): """Get dimmer brightness. Refresh data from Vera if refresh is True, otherwise use local cache. Refresh is only needed if you're not using subscriptions. Converts the Vera level property for dimmable lights from a percentage to the 0 - 255 scale used by HA. """ if refresh: self.refresh() brightness = 0 percent = self.level if percent > 0: brightness = round(percent * 2.55) return int(brightness)
python
def expect(obj, strict=None, times=None, atleast=None, atmost=None, between=None): """Stub a function call, and set up an expected call count. Usage:: # Given `dog` is an instance of a `Dog` expect(dog, times=1).bark('Wuff').thenReturn('Miau') dog.bark('Wuff') dog.bark('Wuff') # will throw at call time: too many invocations # maybe if you need to ensure that `dog.bark()` was called at all verifyNoUnwantedInteractions() .. note:: You must :func:`unstub` after stubbing, or use `with` statement. See :func:`when`, :func:`when2`, :func:`verifyNoUnwantedInteractions` """ if strict is None: strict = True theMock = _get_mock(obj, strict=strict) verification_fn = _get_wanted_verification( times=times, atleast=atleast, atmost=atmost, between=between) class Expect(object): def __getattr__(self, method_name): return invocation.StubbedInvocation( theMock, method_name, verification=verification_fn, strict=strict) return Expect()
java
public void marshall(DeleteAppLaunchConfigurationRequest deleteAppLaunchConfigurationRequest, ProtocolMarshaller protocolMarshaller) { if (deleteAppLaunchConfigurationRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteAppLaunchConfigurationRequest.getAppId(), APPID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
static CmsSearchReplaceSettings getSettingsFromState(String state) { try { state = new URLCodec().decode(state); } catch (DecoderException e1) { // } CmsSearchReplaceSettings settings = null; String typeString = A_CmsWorkplaceApp.getParamFromState(state, SEARCH_TYPE); if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(typeString)) { SearchType type = SearchType.valueOf(typeString); settings = new CmsSearchReplaceSettings(); settings.setType(type); settings.setIgnoreSubSites( Boolean.parseBoolean(A_CmsWorkplaceApp.getParamFromState(state, IGNORE_SUBSITES))); settings.setSiteRoot(A_CmsWorkplaceApp.getParamFromState(state, SITE_ROOT).replace("%2F", "/")); settings.setPaths( Collections.singletonList(A_CmsWorkplaceApp.getParamFromState(state, FOLDER).replace("%2F", "/"))); String resType = A_CmsWorkplaceApp.getParamFromState(state, RESOURCE_TYPE); if (resType != null) { settings.setTypes(resType); } String project = A_CmsWorkplaceApp.getParamFromState(state, PROJECT); if (project != null) { settings.setProject(project); } settings.setSearchpattern(A_CmsWorkplaceApp.getParamFromState(state, SEARCH_PATTERN).replace("%2F", "/")); if (type.isContentValuesOnly()) { settings.setOnlyContentValues(true); String locale = A_CmsWorkplaceApp.getParamFromState(state, LOCALE); if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(locale)) { settings.setLocale(locale); } settings.setXpath(A_CmsWorkplaceApp.getParamFromState(state, XPATH).replace("%2F", "/")); } if (type.isSolrSearch()) { settings.setQuery(A_CmsWorkplaceApp.getParamFromState(state, QUERY).replace("%2F", "/")); settings.setSource(A_CmsWorkplaceApp.getParamFromState(state, INDEX)); } if (type.isPropertySearch()) { try { settings.setProperty( A_CmsUI.getCmsObject().readPropertyDefinition( A_CmsWorkplaceApp.getParamFromState(state, PROPERTY))); } catch (CmsException e) { // } } } return settings; }
python
def remove_rules(self, doc): """Remove a grammar rules from _self.rules_, _self.rule2func_, and _self.rule2name_ """ # remove blanks lines and comment lines, e.g. lines starting with "#" doc = os.linesep.join([s for s in doc.splitlines() if s and not re.match("^\s*#", s)]) rules = doc.split() index = [] for i in range(len(rules)): if rules[i] == '::=': index.append(i-1) index.append(len(rules)) for i in range(len(index)-1): lhs = rules[index[i]] rhs = rules[index[i]+2:index[i+1]] rule = (lhs, tuple(rhs)) if lhs not in self.rules: return if rule in self.rules[lhs]: self.rules[lhs].remove(rule) del self.rule2func[rule] del self.rule2name[rule] self.ruleschanged = True # If we are profiling, remove this rule from that as well if self.profile_info is not None and len(rule[1]) > 0: rule_str = self.reduce_string(rule) if rule_str and rule_str in self.profile_info: del self.profile_info[rule_str] pass pass pass return
java
public final static boolean isUrlAddress(String str, ADictionary dic) { int prIndex = str.indexOf("://"); if ( prIndex > -1 && ! StringUtil.isLatin(str, 0, prIndex) ) { return false; } int sIdx = prIndex > -1 ? prIndex + 3 : 0; int slIndex = str.indexOf('/', sIdx), sgIndex = str.indexOf('?', sIdx); int eIdx = slIndex > -1 ? slIndex : (sgIndex > -1 ? sgIndex : str.length()); int lpIndex = -1; for ( int i = sIdx; i < eIdx; i++ ) { char chr = str.charAt(i); if ( chr == '.' ) { if ( lpIndex == -1 ) { lpIndex = i; continue; } if ( (i - lpIndex) == 1 || i == (eIdx - 1)) { return false; } lpIndex = i; } else if ( ! StringUtil.isEnLetter(chr) && ! StringUtil.isEnNumeric(chr) ) { return false; } } if ( dic != null && ! dic.match(ILexicon.DOMAIN_SUFFIX, str.substring(lpIndex+1, eIdx)) ) { return false; } //check the path part if ( slIndex > -1 ) { sIdx = slIndex; eIdx = sgIndex > -1 ? sgIndex : str.length(); lpIndex = -1; for ( int i = sIdx; i < eIdx; i++ ) { char chr = str.charAt(i); if ( "./-_".indexOf(chr) > -1 ) { if ( lpIndex == -1 ) { lpIndex = i; continue; } if ( i - lpIndex == 1 || (chr == '.' && i == (eIdx - 1)) ) { return false; } lpIndex = i; } else if ( ! StringUtil.isEnLetter(chr) && ! StringUtil.isEnNumeric(chr) ) { return false; } } } return true; }
java
@Deprecated public OIndex<?> getIndex() { Set<OIndex<?>> indexes = owner.getInvolvedIndexes(name); if (indexes != null && !indexes.isEmpty()) return indexes.iterator().next(); return null; }
java
@Override public CPDefinitionVirtualSetting fetchByPrimaryKey(Serializable primaryKey) { Serializable serializable = entityCache.getResult(CPDefinitionVirtualSettingModelImpl.ENTITY_CACHE_ENABLED, CPDefinitionVirtualSettingImpl.class, primaryKey); if (serializable == nullModel) { return null; } CPDefinitionVirtualSetting cpDefinitionVirtualSetting = (CPDefinitionVirtualSetting)serializable; if (cpDefinitionVirtualSetting == null) { Session session = null; try { session = openSession(); cpDefinitionVirtualSetting = (CPDefinitionVirtualSetting)session.get(CPDefinitionVirtualSettingImpl.class, primaryKey); if (cpDefinitionVirtualSetting != null) { cacheResult(cpDefinitionVirtualSetting); } else { entityCache.putResult(CPDefinitionVirtualSettingModelImpl.ENTITY_CACHE_ENABLED, CPDefinitionVirtualSettingImpl.class, primaryKey, nullModel); } } catch (Exception e) { entityCache.removeResult(CPDefinitionVirtualSettingModelImpl.ENTITY_CACHE_ENABLED, CPDefinitionVirtualSettingImpl.class, primaryKey); throw processException(e); } finally { closeSession(session); } } return cpDefinitionVirtualSetting; }
python
def save(self, *args, **kwargs): """Capitalize the first letter of the block name.""" letter = getattr(self, "block_letter", None) if letter and len(letter) >= 1: self.block_letter = letter[:1].upper() + letter[1:] super(EighthBlock, self).save(*args, **kwargs)
java
private void displayTable(TransferTable t) { tCurrent = t; if (t == null) { return; } tSourceTable.setText(t.Stmts.sSourceTable); tDestTable.setText(t.Stmts.sDestTable); tDestDrop.setText(t.Stmts.sDestDrop); tDestCreateIndex.setText(t.Stmts.sDestCreateIndex); tDestDropIndex.setText(t.Stmts.sDestDropIndex); tDestCreate.setText(t.Stmts.sDestCreate); tDestDelete.setText(t.Stmts.sDestDelete); tSourceSelect.setText(t.Stmts.sSourceSelect); tDestInsert.setText(t.Stmts.sDestInsert); tDestAlter.setText(t.Stmts.sDestAlter); cTransfer.setState(t.Stmts.bTransfer); cDrop.setState(t.Stmts.bDrop); cCreate.setState(t.Stmts.bCreate); cDropIndex.setState(t.Stmts.bDropIndex); cCreateIndex.setState(t.Stmts.bCreateIndex); cDelete.setState(t.Stmts.bDelete); cInsert.setState(t.Stmts.bInsert); cAlter.setState(t.Stmts.bAlter); cFKForced.setState(t.Stmts.bFKForced); cIdxForced.setState(t.Stmts.bIdxForced); }
java
public void billingAccount_line_serviceName_phone_PUT(String billingAccount, String serviceName, OvhPhone body) throws IOException { String qPath = "/telephony/{billingAccount}/line/{serviceName}/phone"; StringBuilder sb = path(qPath, billingAccount, serviceName); exec(qPath, "PUT", sb.toString(), body); }
python
def set_bn_eval(m:nn.Module)->None: "Set bn layers in eval mode for all recursive children of `m`." for l in m.children(): if isinstance(l, bn_types) and not next(l.parameters()).requires_grad: l.eval() set_bn_eval(l)
java
protected final Artifact createArtifact(org.eclipse.aether.artifact.Artifact artifact) { return createArtifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion()); }
python
def DbPutDeviceAlias(self, argin): """ Define alias for a given device name :param argin: Str[0] = device name Str[1] = alias name :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid """ self._log.debug("In DbPutDeviceAlias()") if len(argin) < 2: self.warn_stream("DataBase::DbPutDeviceAlias(): insufficient number of arguments ") th_exc(DB_IncorrectArguments, "insufficient number of arguments to put device alias", "DataBase::DbPutDeviceAlias()") device_name = argin[0] device_alias = argin[1] self.db.put_device_alias(device_name, device_alias)
python
def _execute_xmpp(connected_callback): """Connects to the XMPP server and executes custom code :param connected_callback: function to execute after connecting :return: return value of the callback """ from indico_chat.plugin import ChatPlugin check_config() jid = ChatPlugin.settings.get('bot_jid') password = ChatPlugin.settings.get('bot_password') if '@' not in jid: jid = '{}@{}'.format(jid, ChatPlugin.settings.get('server')) result = [None, None] # result, exception app = current_app._get_current_object() # callback runs in another thread def _session_start(event): try: with app.app_context(): result[0] = connected_callback(xmpp) except Exception as e: result[1] = e if isinstance(e, IqError): current_plugin.logger.exception('XMPP callback failed: %s', e.condition) else: current_plugin.logger.exception('XMPP callback failed') finally: xmpp.disconnect(wait=0) xmpp = ClientXMPP(jid, password) xmpp.register_plugin('xep_0045') xmpp.register_plugin('xep_0004') xmpp.register_plugin('xep_0030') xmpp.add_event_handler('session_start', _session_start) try: xmpp.connect() except Exception: current_plugin.logger.exception('XMPP connection failed') xmpp.disconnect() raise try: xmpp.process(threaded=False) finally: xmpp.disconnect(wait=0) if result[1] is not None: raise result[1] return result[0]
python
def required(self, fn): """Request decorator. Forces authentication.""" @functools.wraps(fn) def decorated(*args, **kwargs): if (not self._check_auth() # Don't try to force authentication if the request is part # of the authentication process - otherwise we end up in a # loop. and request.blueprint != self.blueprint.name): return redirect(url_for("%s.login" % self.blueprint.name, next=request.url)) return fn(*args, **kwargs) return decorated
python
def get_repositories(self, project=None, include_links=None, include_all_urls=None, include_hidden=None): """GetRepositories. [Preview API] Retrieve git repositories. :param str project: Project ID or project name :param bool include_links: [optional] True to include reference links. The default value is false. :param bool include_all_urls: [optional] True to include all remote URLs. The default value is false. :param bool include_hidden: [optional] True to include hidden repositories. The default value is false. :rtype: [GitRepository] """ route_values = {} if project is not None: route_values['project'] = self._serialize.url('project', project, 'str') query_parameters = {} if include_links is not None: query_parameters['includeLinks'] = self._serialize.query('include_links', include_links, 'bool') if include_all_urls is not None: query_parameters['includeAllUrls'] = self._serialize.query('include_all_urls', include_all_urls, 'bool') if include_hidden is not None: query_parameters['includeHidden'] = self._serialize.query('include_hidden', include_hidden, 'bool') response = self._send(http_method='GET', location_id='225f7195-f9c7-4d14-ab28-a83f7ff77e1f', version='5.1-preview.1', route_values=route_values, query_parameters=query_parameters) return self._deserialize('[GitRepository]', self._unwrap_collection(response))
java
public DescribeAccountAuditConfigurationResult withAuditCheckConfigurations(java.util.Map<String, AuditCheckConfiguration> auditCheckConfigurations) { setAuditCheckConfigurations(auditCheckConfigurations); return this; }
java
public @Nonnull JsonBuilder put(String key, String s) { object.put(key, primitive(s)); return this; }
java
public PagedList<SecretItem> listSecretVersions(final String vaultBaseUrl, final String secretName) { return getSecretVersions(vaultBaseUrl, secretName); }
java
public void ready(VirtualConnection readyVc) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "ready", readyVc); AcceptListener acceptListener = (AcceptListener) config.getPropertyBag() .get(JFapChannelFactory.ACCEPT_LISTENER); // begin F189351 if (acceptListener == null) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "null accept listener - obtaining one from factory"); acceptListener = ServerConnectionManagerImpl.getAcceptListenerFactory().manufactureAcceptListener(); } // begin F196678.10 Map properties = config.getPropertyBag(); int heartbeatInterval = determineHeartbeatInterval(properties); int heartbeatTimeout = determineHeartbeatTimeout(properties); // end F196678.10 // At this point here we leave explicit channel framework land and trot into the land of // abstraction. As such, create the channel framework implementation classes directly and // pass them into the JFap channel common code. CFWNetworkConnection conn = new CFWNetworkConnection(readyVc); InboundConnection connection = null; try { connection = new InboundConnection(new CFWNetworkConnectionContext(conn, this), conn, acceptListener, heartbeatInterval, heartbeatTimeout); } catch (FrameworkException fe) { //At this point the underlying TCP/IP connection has gone away. //We can't throw an Exception so there is little we can do here other than FFDC. //The channel framework should close everything down gracefully. FFDCFilter.processException(fe, "com.ibm.ws.sib.jfapchannel.impl.JFapInboundConnLink", JFapChannelConstants.JFAPINBOUNDCONNLINK_READY_03); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Exception occurred creating InboundConnection"); if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(this, tc, fe); } if (connection != null) { ConversationImpl conversation = new ConversationImpl(Connection.FIRST_CONVERSATION_ID, true, connection, null); // begin F176003 // Try asking the user for a conversation receive listener to use ConversationReceiveListener rl = null; try { rl = acceptListener.acceptConnection(conversation); } catch (Throwable t) { FFDCFilter.processException (t, "com.ibm.ws.sib.jfapchannel.impl.JFapInboundConnLink", JFapChannelConstants.JFAPINBOUNDCONNLINK_READY_01); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Exception occurred in acceptConnection callback"); if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(this, tc, t); } // If the user supplied a null receive listener, or an exception occurred in // the callback, supply a receive listener of our own as a temporary mesure // until we get the chance to close the conversation. if (rl == null) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Null receive listener, closing conversation"); conversation.setDefaultReceiveListener(new CloseConversationReceiveListener()); try { conversation = connection.startNewConversation(conversation); conversation.close(); // D196125 } catch (SIException e) { FFDCFilter.processException (e, "com.ibm.ws.sib.jfapchannel.impl.JFapInboundConnLink", JFapChannelConstants.JFAPINBOUNDCONNLINK_READY_02); if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) SibTr.exception(this, tc, e); // Something went badly wrong closing the convesation - take down the // connection. connection.invalidate(true, e, "SIConnectionLostException thrown during conversation close"); // D224570 } } else { conversation.setDefaultReceiveListener(rl); try { connection.startNewConversation(conversation); } catch (SIResourceException e) { // No FFDC code needed // (it will have been FFDC'ed at source) try { conversation.close(); } catch (SIConnectionLostException e2) { // No FFDC code needed // (it is already broken - we don't care) } connection.invalidate(true, e, "Resource exception thrown when starting new conversation"); } } } // end F176003 if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "ready"); }
python
def run_crate( version, env=None, setting=None, crate_root=None, keep_data=False, disable_java_magic=False, ): """Launch a crate instance. Supported version specifications: - Concrete version like "0.55.0" or with wildcard: "1.1.x" - An alias (one of [latest-nightly, latest-stable, latest-testing]) - A URI pointing to a CrateDB tarball (in .tar.gz format) - A URI pointing to a checked out CrateDB repo directory run-crate supports command chaining. To launch a CrateDB node and another sub-command use: cr8 run-crate <ver> -- timeit -s "select 1" --hosts '{node.http_url}' To launch any (blocking) subprocess, prefix the name with '@': cr8 run-crate <version> -- @http '{node.http_url}' If run-crate is invoked using command chaining it will exit once all chained commands finished. The postgres host and port are available as {node.addresses.psql.host} and {node.addresses.psql.port} """ with create_node( version, env, setting, crate_root, keep_data, java_magic=not disable_java_magic, ) as n: try: n.start() n.process.wait() except KeyboardInterrupt: print('Stopping Crate...')
java
public static RDistinct DISTINCT() { RDistinct ret = RFactory.DISTINCT(); ASTNode an = APIObjectAccess.getAstNode(ret); an.setClauseType(ClauseType.RETURN); return ret; }
java
private void loadAllPanels(final JPanel mainPanel) { SwingUtilities.invokeLater(() -> { int numPanels = wizardComponents.length; for (int i1 = 0; i1 < numPanels; i1++) { mainPanel.remove(wizardComponents[i1]); } for (int i2 = 0; i2 < numPanels; i2++) { mainPanel.add(wizardComponents[i2]); } validate(); repaint(); }); }
java
private List<String> getFedoraTables() { try { InputStream in = getClass().getClassLoader() .getResourceAsStream(DBSPEC_LOCATION); List<TableSpec> specs = TableSpec.getTableSpecs(in); ArrayList<String> names = new ArrayList<String>(); for (TableSpec spec: specs) { names.add(spec.getName().toUpperCase()); } return names; } catch (Exception e) { e.printStackTrace(); throw new RuntimeException("Unexpected error reading dbspec file", e); } }
python
def step_impl12(context, runs): """Check called apps / files. :param runs: expected number of records. :param context: test context. """ executor_ = context.fuzz_executor stats = executor_.stats count = stats.cumulated_counts() assert count == runs, "VERIFY: Number of recorded runs."
java
public final void forInit() throws RecognitionException { int forInit_StartIndex = input.index(); ParserRuleReturnScope variableModifier12 =null; ParserRuleReturnScope type13 =null; try { if ( state.backtracking>0 && alreadyParsedRule(input, 101) ) { return; } // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1056:5: ( ( variableModifier )* type variableDeclarators | expressionList ) int alt137=2; switch ( input.LA(1) ) { case 58: case 83: { alt137=1; } break; case Identifier: { switch ( input.LA(2) ) { case 53: { int LA137_26 = input.LA(3); if ( (synpred198_Java()) ) { alt137=1; } else if ( (true) ) { alt137=2; } } break; case 47: { int LA137_27 = input.LA(3); if ( (synpred198_Java()) ) { alt137=1; } else if ( (true) ) { alt137=2; } } break; case 59: { int LA137_28 = input.LA(3); if ( (synpred198_Java()) ) { alt137=1; } else if ( (true) ) { alt137=2; } } break; case Identifier: { alt137=1; } break; case EOF: case 30: case 31: case 32: case 33: case 34: case 35: case 36: case 38: case 39: case 40: case 41: case 42: case 43: case 44: case 45: case 46: case 49: case 50: case 52: case 54: case 55: case 56: case 57: case 61: case 62: case 91: case 122: case 123: case 124: { alt137=2; } break; default: if (state.backtracking>0) {state.failed=true; return;} int nvaeMark = input.mark(); try { input.consume(); NoViableAltException nvae = new NoViableAltException("", 137, 3, input); throw nvae; } finally { input.rewind(nvaeMark); } } } break; case 65: case 67: case 71: case 77: case 85: case 92: case 94: case 105: { switch ( input.LA(2) ) { case 59: { int LA137_55 = input.LA(3); if ( (synpred198_Java()) ) { alt137=1; } else if ( (true) ) { alt137=2; } } break; case Identifier: { alt137=1; } break; case 47: { alt137=2; } break; default: if (state.backtracking>0) {state.failed=true; return;} int nvaeMark = input.mark(); try { input.consume(); NoViableAltException nvae = new NoViableAltException("", 137, 4, input); throw nvae; } finally { input.rewind(nvaeMark); } } } break; case CharacterLiteral: case DecimalLiteral: case FloatingPointLiteral: case HexLiteral: case OctalLiteral: case StringLiteral: case 29: case 36: case 40: case 41: case 44: case 45: case 53: case 70: case 79: case 80: case 82: case 97: case 98: case 108: case 111: case 115: case 118: case 126: { alt137=2; } break; default: if (state.backtracking>0) {state.failed=true; return;} NoViableAltException nvae = new NoViableAltException("", 137, 0, input); throw nvae; } switch (alt137) { case 1 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1056:7: ( variableModifier )* type variableDeclarators { // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1056:7: ( variableModifier )* loop136: while (true) { int alt136=2; int LA136_0 = input.LA(1); if ( (LA136_0==58||LA136_0==83) ) { alt136=1; } switch (alt136) { case 1 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1056:9: variableModifier { pushFollow(FOLLOW_variableModifier_in_forInit4596); variableModifier12=variableModifier(); state._fsp--; if (state.failed) return; if ( state.backtracking==0 ) { VarDecl_stack.peek().descr.updateStart( ((CommonToken)(variableModifier12!=null?(variableModifier12.start):null)).getStartIndex() - 1 ); VarDecl_stack.peek().descr.addModifier( (variableModifier12!=null?input.toString(variableModifier12.start,variableModifier12.stop):null) ); } } break; default : break loop136; } } pushFollow(FOLLOW_type_in_forInit4631); type13=type(); state._fsp--; if (state.failed) return; if ( state.backtracking==0 ) { VarDecl_stack.peek().descr.updateStart( ((CommonToken)(type13!=null?(type13.start):null)).getStartIndex() - 1 ); VarDecl_stack.peek().descr.setType( (type13!=null?input.toString(type13.start,type13.stop):null) ); VarDecl_stack.peek().descr.setEnd( ((CommonToken)(type13!=null?(type13.stop):null)).getStopIndex() ); } pushFollow(FOLLOW_variableDeclarators_in_forInit4651); variableDeclarators(); state._fsp--; if (state.failed) return; } break; case 2 : // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1069:7: expressionList { pushFollow(FOLLOW_expressionList_in_forInit4659); expressionList(); state._fsp--; if (state.failed) return; } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving if ( state.backtracking>0 ) { memoize(input, 101, forInit_StartIndex); } } }
java
public java.util.List<String> getAccountAliases() { if (accountAliases == null) { accountAliases = new com.amazonaws.internal.SdkInternalList<String>(); } return accountAliases; }
python
def extract(self, item): """Creates an readability document and returns an ArticleCandidate containing article title and text. :param item: A NewscrawlerItem to parse. :return: ArticleCandidate containing the recovered article data. """ doc = Document(deepcopy(item['spider_response'].body)) description = doc.summary() article_candidate = ArticleCandidate() article_candidate.extractor = self._name article_candidate.title = doc.short_title() article_candidate.description = description article_candidate.text = self._text(item) article_candidate.topimage = self._topimage(item) article_candidate.author = self._author(item) article_candidate.publish_date = self._publish_date(item) article_candidate.language = self._language(item) return article_candidate
java
public void convertAndWriteXML(String xml, String path) { ClassProject classProject = (ClassProject)this.getMainRecord(); Record recProgramControl = this.getRecord(ProgramControl.PROGRAM_CONTROL_FILE); Model model = (Model)this.unmarshalMessage(xml); String name = model.getName(); CodeType codeType = CodeType.THICK; String thickDir = classProject.getFileName("", null, codeType, true, false); if (name == null) codeType = CodeType.THICK; else if (name.endsWith("model")) codeType = CodeType.INTERFACE; else if (name.endsWith("thin")) codeType = CodeType.THIN; else if (name.endsWith("res")) codeType = CodeType.RESOURCE_PROPERTIES; xml = replaceParams(xml, codeType); model = (Model)this.unmarshalMessage(xml); String strSourcePath = recProgramControl.getField(ProgramControl.CLASS_DIRECTORY).toString(); String destDir = classProject.getFileName("", null, codeType, true, false); if (codeType != CodeType.THICK) if (destDir.equals(thickDir)) return; // Can't be the same as thick dir if (destDir.endsWith(strSourcePath)) destDir = destDir.substring(0, destDir.length() - strSourcePath.length()); if (name != null) if (name.endsWith("reactor")) { if (destDir.endsWith("/")) destDir = destDir.substring(0, destDir.length() - 1); if (destDir.lastIndexOf('/') != -1) destDir = destDir.substring(0, destDir.lastIndexOf('/')); } path = "pom.xml"; path = org.jbundle.base.model.Utility.addToPath(destDir, path); File fileOut = new File(path); File fileDir = fileOut.getParentFile(); if (!fileDir.exists()) fileDir.mkdirs(); //xml = this.marshalObject(model); // Later Reader in = new StringReader(xml); org.jbundle.base.model.Utility.transferURLStream(null, path, in, null); }
java
private void writeObject(java.io.ObjectOutputStream out) throws IOException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.entry(tc, "writeObject : " + ivPuId + ", " + ivJ2eeName); out.writeObject(ivPuId); out.writeObject(ivJ2eeName); // d510184 if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.exit(tc, "writeObject"); }
java
public IpSet withIpAddresses(String... ipAddresses) { if (this.ipAddresses == null) { setIpAddresses(new java.util.ArrayList<String>(ipAddresses.length)); } for (String ele : ipAddresses) { this.ipAddresses.add(ele); } return this; }
python
def decorate_with_validators(func, func_signature=None, # type: Signature **validators # type: Validator ): """ Utility method to decorate the provided function with the provided input and output Validator objects. Since this method takes Validator objects as argument, it is for advanced users. :param func: the function to decorate. It might already be decorated, this method will check it and wont create another wrapper in this case, simply adding the validators to the existing wrapper :param func_signature: the function's signature if it is already known (internal calls), otherwise it will be found again by inspection :param validators: a dictionary of arg_name (or _out_) => Validator or list of Validator :return: """ # first turn the dictionary values into lists only for arg_name, validator in validators.items(): if not isinstance(validator, list): validators[arg_name] = [validator] if hasattr(func, '__wrapped__') and hasattr(func.__wrapped__, '__validators__'): # ---- This function is already wrapped by our validation wrapper ---- # Update the dictionary of validators with the new validator(s) for arg_name, validator in validators.items(): for v in validator: if arg_name in func.__wrapped__.__validators__: func.__wrapped__.__validators__[arg_name].append(v) else: func.__wrapped__.__validators__[arg_name] = [v] # return the function, no need to wrap it further (it is already wrapped) return func else: # ---- This function is not yet wrapped by our validator. ---- # Store the dictionary of validators as an attribute of the function if hasattr(func, '__validators__'): raise ValueError('Function ' + str(func) + ' already has a defined __validators__ attribute, valid8 ' 'decorators can not be applied on it') else: try: func.__validators__ = validators except AttributeError: raise ValueError("Error - Could not add validators list to function '%s'" % func) # either reuse or recompute function signature func_signature = func_signature or signature(func) # create a wrapper with the same signature @wraps(func) def validating_wrapper(*args, **kwargs): """ This is the wrapper that will be called everytime the function is called """ # (a) Perform input validation by applying `_assert_input_is_valid` on all received arguments apply_on_each_func_args_sig(func, args, kwargs, func_signature, func_to_apply=_assert_input_is_valid, func_to_apply_params_dict=func.__validators__) # (b) execute the function as usual res = func(*args, **kwargs) # (c) validate output if needed if _OUT_KEY in func.__validators__: for validator in func.__validators__[_OUT_KEY]: validator.assert_valid(res) return res return validating_wrapper
python
def _get_qgen_var(self, generators, base_mva): """ Returns the generator reactive power variable set. """ Qg = array([g.q / base_mva for g in generators]) Qmin = array([g.q_min / base_mva for g in generators]) Qmax = array([g.q_max / base_mva for g in generators]) return Variable("Qg", len(generators), Qg, Qmin, Qmax)
python
def integrate(ii, r0, c0, r1, c1): """ Use an integral image to integrate over a given window. Parameters ---------- ii : ndarray Integral image. r0, c0 : int Top-left corner of block to be summed. r1, c1 : int Bottom-right corner of block to be summed. Returns ------- S : int Integral (sum) over the given window. """ # This line is modified S = np.zeros(ii.shape[-1]) S += ii[r1, c1] if (r0 - 1 >= 0) and (c0 - 1 >= 0): S += ii[r0 - 1, c0 - 1] if (r0 - 1 >= 0): S -= ii[r0 - 1, c1] if (c0 - 1 >= 0): S -= ii[r1, c0 - 1] return S
python
def merge_modified_section_data(self): """Update the PE image content with any individual section data that has been modified.""" for section in self.sections: section_data_start = adjust_FileAlignment( section.PointerToRawData, self.OPTIONAL_HEADER.FileAlignment ) section_data_end = section_data_start+section.SizeOfRawData if section_data_start < len(self.__data__) and section_data_end < len(self.__data__): self.__data__ = self.__data__[:section_data_start] + section.get_data() + self.__data__[section_data_end:]
java
@Override public void setTypeMap(Map<String, Class<?>> map) throws SQLException { target.setTypeMap(map); }
python
def choose_tool_key(full_configuration, keys): """ Select the key for a tool from a list of supported tools. This function is designed to help when multiple keys can be used to specify an option (e.g., during migration from one name to another). The values in keys should be ordered based on preference, as that's the order they'll be checked. If anything other than the first entry is selected, a warning will be displayed telling the user to migrate their configuration. Arguments: full_configuration - the full configuration for a run of the project keys - a list of keys to consider """ tool_key = _choose_key(full_configuration.config, keys) if tool_key != keys[0]: full_configuration.executor.warning( "{} is deprecated; migrate to {}".format(tool_key, keys[0]) ) return tool_key
python
def get_other_answers(pool, seeded_answers, get_student_item_dict, algo, options): """ Select other student's answers from answer pool or seeded answers based on the selection algorithm Args: pool (dict): answer pool, format: { option1_index: { student_id: { can store algorithm specific info here } }, option2_index: { student_id: { ... } } } seeded_answers (list): seeded answers from instructor [ {'answer': 0, 'rationale': 'rationale A'}, {'answer': 1, 'rationale': 'rationale B'}, ] get_student_item_dict (callable): get student item dict function to return student item dict algo (str): selection algorithm options (dict): answer options for the question Returns: dict: answers based on the selection algorithm """ # "#" means the number of responses returned should be the same as the number of options. num_responses = len(options) \ if 'num_responses' not in algo or algo['num_responses'] == "#" \ else int(algo['num_responses']) if algo['name'] == 'simple': return get_other_answers_simple(pool, seeded_answers, get_student_item_dict, num_responses) elif algo['name'] == 'random': return get_other_answers_random(pool, seeded_answers, get_student_item_dict, num_responses) else: raise UnknownChooseAnswerAlgorithm()
java
public boolean shouldAggregateSoils(HashMap<String, String> currentSoil, HashMap<String, String> previousSoil) { float ruCurrent; float ruPrevious; float resultFirstRule; float resultSecRule; boolean firstRule; boolean secRule; // ru in mm/m ruCurrent = (parseFloat(currentSoil.get(SLDUL)) - parseFloat(currentSoil.get(SLLL))) * 1000.0f; ruPrevious = (parseFloat(previousSoil.get(SLDUL)) - parseFloat(previousSoil.get(SLLL))) * 1000f; resultFirstRule = round(Math.abs(ruCurrent - ruPrevious)); firstRule = resultFirstRule <= FIRST_THRESHOLD_DEFAULT; /** * First rule : (currentRu - previousRu) <= 5 mm/m Second rule : * (currentBdm - previousBdm) <= 0.05 g/cm3 Soil layers are aggregated * if the rules below are both true * */ resultSecRule = round(Math.abs(parseFloat(currentSoil.get(SLBDM)) - parseFloat(previousSoil.get(SLBDM)))); secRule = (round(resultSecRule) <= SECOND_THRESHOLD_DEFAULT); log.debug("*********************"); log.debug("Ru current : "+ruCurrent); log.debug("Ru previous : "+ruPrevious); log.debug("First rule : " + resultFirstRule + " <= " + FIRST_THRESHOLD_DEFAULT + " ? " + firstRule); log.debug("Sec rule : " + resultSecRule + " <= " + SECOND_THRESHOLD_DEFAULT + " ? " + secRule); log.debug("*********************"); return firstRule && secRule; }
python
def initialize(): """ Initializes the cauldron library by confirming that it can be imported by the importlib library. If the attempt to import it fails, the system path will be modified and the attempt retried. If both attempts fail, an import error will be raised. """ cauldron_module = get_cauldron_module() if cauldron_module is not None: return cauldron_module sys.path.append(ROOT_DIRECTORY) cauldron_module = get_cauldron_module() if cauldron_module is not None: return cauldron_module raise ImportError(' '.join(( 'Unable to import cauldron.' 'The package was not installed in a known location.' )))
java
public static String trStyleHtmlContent(String style, String... content) { return tagStyleHtmlContent(Html.Tag.TR, style, content); }
python
def is_valid_address (s): """ returns True if address is a valid Bluetooth address valid address are always strings of the form XX:XX:XX:XX:XX:XX where X is a hexadecimal character. For example, 01:23:45:67:89:AB is a valid address, but IN:VA:LI:DA:DD:RE is not """ try: pairs = s.split (":") if len (pairs) != 6: return False if not all(0 <= int(b, 16) <= 255 for b in pairs): return False except: return False return True
java
public static Date parse(String source, final String pattern) { if (StringUtils.isBlank(pattern)) { return parseISO8601DateString(source); } else { try { // SimpleDateFormat is not fully ISO8601 compatible, so we replace 'Z' by +0000 if (StringUtils.contains(source, "Z")) { source = StringUtils.replace(source, "Z", "+0000"); } return new SimpleDateFormat(pattern).parse(source); } catch (ParseException ignore) { } // try to parse as ISO8601 date (supports multiple formats) return parseISO8601DateString(source); } }
python
def get_float(prompt=None): """ Read a line of text from standard input and return the equivalent float as precisely as possible; if text does not represent a double, user is prompted to retry. If line can't be read, return None. """ while True: s = get_string(prompt) if s is None: return None if len(s) > 0 and re.search(r"^[+-]?\d*(?:\.\d*)?$", s): try: return float(s) except ValueError: pass # Temporarily here for backwards compatibility if prompt is None: print("Retry: ", end="")
java
@Override public void run() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.entry(tc, "run", Thread.currentThread().getName() + " " + this.in.getReadListener()); } try { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Issuing the async read for the data"); } if (_isc==null) { _callback.complete(null); } else { //Call into the HttpInboundService context for the body data, passing in the callback and forcing //the read to go asynchronous //If there is data immediately available Channel will call the callback.complete before returning to this thread _isc.getRequestBodyBuffer(_callback, true); } } catch (Exception e){ if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "An exception occurred during the async read : " + e); } e.printStackTrace(); //There was a problem with the read so we should invoke their onError, since technically it's been set now if(this.in.getReadListener()!= null) this.in.getReadListener().onError(e); } finally { asyncContext.setReadListenerRunning(false); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { Tr.exit(tc, "run", this.in.getReadListener()); } }
python
def as_dict(self): """ Create a dictionary representation of a PWInput object Returns: dict """ pwinput_dict = {'structure': self.structure.as_dict(), 'pseudo': self.pseudo, 'sections': self.sections, 'kpoints_mode': self.kpoints_mode, 'kpoints_grid': self.kpoints_grid, 'kpoints_shift': self.kpoints_shift} return pwinput_dict
java
public long readVarLong() throws IOException { boolean more; long result = 0; do { int b = in.readUnsignedByte(); more = (b & 0x80) == 0x80; result = 128 * result + (b & 0x7F); // TODO Check for long overflow } while (more); return result; }
python
def startsafe(self, project, **parameters): """Start a run. ``project`` is the ID of the project, and ``parameters`` are keyword arguments for the global parameters. Returns a ``CLAMData`` object or raises exceptions. This version, unlike ``start()``, raises Exceptions (``ParameterError``) on parameter errors. response = client.startsafe("myprojectname", parameter1="blah", parameterX=4.2) """ try: data = self.start(project, **parameters) for parametergroup, paramlist in data.parameters: #pylint: disable=unused-variable for parameter in paramlist: if parameter.error: raise clam.common.data.ParameterError(parameter.error) return data except: raise
java
private String format(Object o) { String result; if (o == null) { result = ""; } else { if (o instanceof Boolean == true) { result = LocaleData.getString(m_locale, (((Boolean) o).booleanValue() == true ? LocaleData.YES : LocaleData.NO)); } else { if (o instanceof Float == true || o instanceof Double == true) { result = (m_formats.getDecimalFormat().format(((Number) o).doubleValue())); } else { if (o instanceof Day) { result = Integer.toString(((Day) o).getValue()); } else { result = o.toString(); } } } // // At this point there should be no line break characters in // the file. If we find any, replace them with spaces // result = stripLineBreaks(result, MPXConstants.EOL_PLACEHOLDER_STRING); // // Finally we check to ensure that there are no embedded // quotes or separator characters in the value. If there are, then // we quote the value and escape any existing quote characters. // if (result.indexOf('"') != -1) { result = escapeQuotes(result); } else { if (result.indexOf(m_delimiter) != -1) { result = '"' + result + '"'; } } } return (result); }
python
def is_watching(self, username): """Check if user is being watched by the given user :param username: Check if username is watching you """ if self.standard_grant_type is not "authorization_code": raise DeviantartError("Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint.") response = self._req('/user/friends/watching/{}'.format(username)) return response['watching']
python
def remove_event_type(self, name): """Remove event type based on name.""" if name not in self.event_types: lg.info('Event type ' + name + ' was not found.') events = self.rater.find('events') # list is necessary so that it does not remove in place for e in list(events): if e.get('type') == name: events.remove(e) self.save()
python
def convert_block(G, h, dim, **kwargs): r""" Applies the clique conversion method to a single positive semidefinite block of a cone linear program .. math:: \begin{array}{ll} \mbox{maximize} & -h^T z \\ \mbox{subject to} & G^T z + c = 0 \\ & \mathbf{smat}(z)\ \ \text{psd completable} \end{array} After conversion, the above problem is converted to a block-diagonal one .. math:: \begin{array}{ll} \mbox{maximize} & -h_b^T z_b \\ \mbox{subject to} & G_b^T z_b + c = 0 \\ & G_c^T z_b = 0 \\ & \mathbf{smat}(z_b)\ \ \text{psd block-diagonal} \end{array} where :math:`z_b` is a vector representation of a block-diagonal matrix. The constraint :math:`G_b^T z_b + c = 0` corresponds to the original constraint :math:`G'z + c = 0`, and the constraint :math:`G_c^T z_b = 0` is a coupling constraint. :param G: :py:class:`spmatrix` :param h: :py:class:`matrix` :param dim: integer :param merge_function: routine that implements a merge heuristic (optional) :param coupling: mode of conversion (optional) :param max_density: float (default: 0.4) The following example illustrates how to apply the conversion method to a one-block SDP: .. code-block:: python block = (G, h, dim) blockc, blk2sparse, symb = convert_block(*block) The return value `blk2sparse` is a 4-tuple (`blki,I,J,n`) that defines a mapping between the sparse matrix representation and the converted block-diagonal representation. If `blkvec` represents a block-diagonal matrix, then .. code-block:: python S = spmatrix(blkvec[blki], I, J) maps `blkvec` into is a sparse matrix representation of the matrix. Similarly, a sparse matrix `S` can be converted to the block-diagonal matrix representation using the code .. code-block:: python blkvec = matrix(0.0, (len(S),1), tc=S.typecode) blkvec[blki] = S.V The optional argument `max_density` controls whether or not to perform conversion based on the aggregate sparsity of the block. Specifically, conversion is performed whenever the number of lower triangular nonzeros in the aggregate sparsity pattern is less than or equal to `max_density*dim`. The optional argument `coupling` controls the introduction of equality constraints in the conversion. Possible values are *full* (default), *sparse*, *sparse+tri*, and any nonnegative integer. Full coupling results in a conversion in which all coupling constraints are kept, and hence the converted problem is equivalent to the original problem. Sparse coupling yeilds a conversion in which only the coupling constraints corresponding to nonzero entries in the aggregate sparsity pattern are kept, and sparse-plus-tridiagonal (*sparse+tri*) yeilds a conversion with tridiagonal coupling in addition to coupling constraints corresponding to nonzero entries in the aggregate sparsity pattern. Setting `coupling` to a nonnegative integer *k* yields a conversion with coupling constraints corresponding to entries in a band with half-bandwidth *k*. .. seealso:: M. S. Andersen, A. Hansson, and L. Vandenberghe, `Reduced-Complexity Semidefinite Relaxations of Optimal Power Flow Problems <http://dx.doi.org/10.1109/TPWRS.2013.2294479>`_, IEEE Transactions on Power Systems, 2014. """ merge_function = kwargs.get('merge_function', None) coupling = kwargs.get('coupling', 'full') tskip = kwargs.get('max_density',0.4) tc = G.typecode ### ### Find filled pattern, compute symbolic factorization using AMD ### ordering, and do "symbolic conversion" ### # find aggregate sparsity pattern h = sparse(h) LIa = matrix(list(set(G.I).union(set(h.I)))) Ia = [i%dim for i in LIa] Ja = [j//dim for j in LIa] Va = spmatrix(1.,Ia,Ja,(dim,dim)) # find permutation, symmetrize, and permute Va = symmetrize(tril(Va)) # if not very sparse, skip decomposition if float(len(Va))/Va.size[0]**2 > tskip: return (G, h, None, [dim]), None, None # compute symbolic factorization F = symbolic(Va, merge_function = merge_function, p = amd.order) p = F.p ip = F.ip Va = F.sparsity_pattern(reordered = True, symmetric = True) # symbolic conversion if coupling == 'sparse': coupling = tril(Va) elif coupling == 'sparse+tri': coupling = tril(Va) coupling += spmatrix(1.0,[i for j in range(Va.size[0]) for i in range(j,min(Va.size[0],j+2))],\ [j for j in range(Va.size[0]) for i in range(j,min(Va.size[0],j+2))],Va.size) elif type(coupling) is int: assert coupling >= 0 bw = +coupling coupling = spmatrix(1.0,[i for j in range(Va.size[0]) for i in range(j,min(Va.size[0],j+bw+1))],\ [j for j in range(Va.size[0]) for i in range(j,min(Va.size[0],j+bw+1))],Va.size) dims, sparse_to_block, constraints = symb_to_block(F, coupling = coupling) # dimension of block-diagonal representation N = sum([d**2 for d in dims]) ### ### Convert problem data ### m = G.size[1] # cols in G cp, ri, val = G.CCS IV = [] # list of m (row, value) tuples J = [] for j in range(m): iv = [] for i in range(cp[j+1]-cp[j]): row = ri[cp[j]+i]%dim col = ri[cp[j]+i]//dim if row < col: continue # ignore upper triangular entries k1 = ip[row] k2 = ip[col] blk_idx = sparse_to_block[min(k1,k2)*dim + max(k1,k2)] if k1 == k2: iv.append((blk_idx[0], val[cp[j]+i])) elif k1 > k2: iv.append((blk_idx[0], val[cp[j]+i])) iv.append((blk_idx[1], val[cp[j]+i].conjugate())) else: iv.append((blk_idx[0], val[cp[j]+i].conjugate())) iv.append((blk_idx[1], val[cp[j]+i])) iv.sort(key=lambda x: x[0]) IV.extend(iv) J.extend(len(iv)*[j]) # build G_converted I, V = zip(*IV) G_converted = spmatrix(V, I, J, (N, m), tc = tc) # convert and build new h _, ri, val = h.CCS iv = [] for i in range(len(ri)): row = ri[i]%dim col = ri[i]//dim if row < col: continue # ignore upper triangular entries k1 = ip[row] k2 = ip[col] blk_idx = sparse_to_block[min(k1,k2)*dim + max(k1,k2)] if k1 == k2: iv.append((blk_idx[0], val[i])) elif k1 > k2: iv.append((blk_idx[0], val[i])) iv.append((blk_idx[1], val[i].conjugate())) else: iv.append((blk_idx[0], val[i].conjugate())) iv.append((blk_idx[1], val[i])) iv.sort(key=lambda x: x[0]) if iv: I, V = zip(*iv) else: I, V = [], [] h_converted = spmatrix(V, I, len(I)*[0], (N, 1), tc = tc) ### ### Build matrix representation of coupling constraints ### IV = [] # list of (row, value) tuples J = [] ncon = 0 for j in range(len(constraints)): iv = [] if len(constraints[j]) == 2: ii, jj = constraints[j] iv = sorted([(ii, 1.0), (jj, -1.0)],key=lambda x: x[0]) jl = 2*[ncon] ncon += 1 elif len(constraints[j]) == 4: i1,j1,i2,j2 = constraints[j] iv = sorted([(i1, 1.0), (i2, 1.0), (j1, -1.0), (j2, -1.0)],key=lambda x: x[0]) jl = 4*[ncon] ncon += 1 if tc == 'z': iv.extend(sorted([(i1, complex(0.0,1.0)), (i2, complex(0.0,-1.0)), (j1, complex(0.0,-1.0)), (j2, complex(0.0,1.0))],key=lambda x: x[0])) jl.extend(4*[ncon]) ncon += 1 IV.extend(iv) J.extend(jl) # build G_converted if IV: I, V = zip(*IV) else: I, V = [], [] G_coupling = spmatrix(V, I, J, (N, ncon), tc = tc) # generate indices for reverse mapping (block_to_sparse) idx = [] for k in sparse_to_block.keys(): k1 = p[k%dim] k2 = p[k//dim] idx.append((min(k1,k2)*dim + max(k1,k2), sparse_to_block[k][0])) idx.sort() idx, blki = zip(*idx) blki = matrix(blki) I = [v%dim for v in idx] J = [v//dim for v in idx] n = sum([di**2 for di in dims]) return (G_converted, h_converted, G_coupling, dims), (blki, I, J, n), F
python
def force_bytes(bytes_or_unicode, encoding='utf-8', errors='backslashreplace'): 'Convert passed string type to bytes, if necessary.' if isinstance(bytes_or_unicode, bytes): return bytes_or_unicode return bytes_or_unicode.encode(encoding, errors)
java
public ServiceFuture<List<VirtualMachineImageResourceInner>> listAsync(String location, String publisherName, String offer, String skus, String filter, Integer top, String orderby, final ServiceCallback<List<VirtualMachineImageResourceInner>> serviceCallback) { return ServiceFuture.fromResponse(listWithServiceResponseAsync(location, publisherName, offer, skus, filter, top, orderby), serviceCallback); }
java
@Override public boolean authenticate(String base, String filter, String password, AuthenticationErrorCallback errorCallback) { return authenticate(LdapUtils.newLdapName(base), filter, password, new NullAuthenticatedLdapEntryContextCallback(), errorCallback); }
python
def _mark_updated(self): """Update the updated timestamp.""" timestamp = datetime.datetime.utcnow().isoformat() DB.set_hash_value(self.key, 'updated', timestamp)
java
private static OrderedJSONObject setCommonJSONElements(OrderedJSONObject obj, Object representation, String description) { obj.put("Representation", representation); obj.put("Description", description); return obj; }
java
public DescribeTrainingJobResult withHyperParameters(java.util.Map<String, String> hyperParameters) { setHyperParameters(hyperParameters); return this; }
java
public Collection<Issue> issues(IssueFilter filter) { return get(Issue.class, (filter != null) ? filter : new IssueFilter()); }
java
public static com.liferay.commerce.product.model.CPDefinitionOptionValueRel updateCPDefinitionOptionValueRel( com.liferay.commerce.product.model.CPDefinitionOptionValueRel cpDefinitionOptionValueRel) { return getService() .updateCPDefinitionOptionValueRel(cpDefinitionOptionValueRel); }
java
public static <T1, T2, T3, T4, T5> Func5<T1, T2, T3, T4, T5, Observable<Void>> toAsync(Action5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5> action) { return toAsync(action, Schedulers.computation()); }
java
public static String byte2FitMemoryString(final long byteNum) { if (byteNum < 0) { return "shouldn't be less than zero!"; } else if (byteNum < MemoryConst.KB) { return String.format("%d B", byteNum); } else if (byteNum < MemoryConst.MB) { return String.format("%d KB", byteNum / MemoryConst.KB); } else if (byteNum < MemoryConst.GB) { return String.format("%d MB", byteNum / MemoryConst.MB); } else { return String.format("%d GB", byteNum / MemoryConst.GB); } }
java
protected boolean checkResourcePermissions(CmsPermissionSet required, boolean neededForFolder) { return checkResourcePermissions( required, neededForFolder, Messages.get().container( Messages.GUI_ERR_RESOURCE_PERMISSIONS_2, getParamResource(), required.getPermissionString())); }
python
def do_file_sub(self, srcpath, regexp, subst): '''Apply a regexp substitution to a file archived by sosreport. srcpath is the path in the archive where the file can be found. regexp can be a regexp string or a compiled re object. subst is a string to replace each occurance of regexp in the content of srcpath. This function returns the number of replacements made. ''' try: path = self._get_dest_for_srcpath(srcpath) self._log_debug("substituting scrpath '%s'" % srcpath) self._log_debug("substituting '%s' for '%s' in '%s'" % (subst, regexp, path)) if not path: return 0 readable = self.archive.open_file(path) content = readable.read() if not isinstance(content, six.string_types): content = content.decode('utf8', 'ignore') result, replacements = re.subn(regexp, subst, content) if replacements: self.archive.add_string(result, srcpath) else: replacements = 0 except (OSError, IOError) as e: # if trying to regexp a nonexisting file, dont log it as an # error to stdout if e.errno == errno.ENOENT: msg = "file '%s' not collected, substitution skipped" self._log_debug(msg % path) else: msg = "regex substitution failed for '%s' with: '%s'" self._log_error(msg % (path, e)) replacements = 0 return replacements
java
public static char getChecksum(String text) { int mul = 3; int total = 0; for (int k = text.length() - 1; k >= 0; --k) { int n = text.charAt(k) - '0'; total += mul * n; mul ^= 2; } return (char)(((10 - (total % 10)) % 10) + '0'); }
python
def project_create_notif(self, tenant_id, tenant_name): """Tenant Create notification. """ if not self.fw_init: return self.os_helper.create_router('_'.join([fw_constants.TENANT_EDGE_RTR, tenant_name]), tenant_id, [])
python
def Feature_Engineering(DataFrame,train): """ Extracts important features and writes them in usable form Deletes features of little importance :param DataFrame: This is the file name of a csv file we wish to convert into a usable DataFrame. :param train: This is training set corresponding to our csv file. Should be of type pandas.DataFrame :returns: Returns csv file, after having been modified as a pandas.DataFrame type """ DataFrame= pd.read_csv(DataFrame) titles=DataFrame['Name'].apply(lambda x: x.split(',')[1].split(' ')[1]) title_mapping = {"the":5, "Mr.": 1, "Miss.": 2, "Mrs.": 3, "Master.": 4, "Dr.": 5, "Rev.": 6, "Major.": 7, "Col.": 7, "Mlle.": 2, "Mme.": 3, "Don.": 9, "Lady.": 10, "Countess.": 10, "Jonkheer.": 10, "Sir.": 9, "Capt.": 7, "Ms.": 2, "Dona.": 10} for k,v in title_mapping.items(): titles[titles == k] = v DataFrame["Title"] = titles DataFrame['NameLen']=DataFrame['Name'].apply(lambda x: len(x)) DataFrame['FamSize']=DataFrame['SibSp']+DataFrame['Parch'] DataFrame['Has_Cabin'] = DataFrame["Cabin"].apply(lambda x: 0 if type(x) == float else 1) cabins=DataFrame['Cabin'].apply(lambda x: str(x)[0]) cabin_mapping={'A':3,'B':5,'C':5,'D':4,'E':4,'F':3,'G':2,'T':1,'n':10} for k,v in cabin_mapping.items(): cabins[cabins==k]=v DataFrame['Cabin']=cabins del DataFrame['Parch'] del DataFrame['SibSp'] del DataFrame['PassengerId'] pclass = pd.get_dummies( DataFrame.Pclass , prefix='Pclass' ) sex = pd.get_dummies(DataFrame.Sex) embarked = pd.get_dummies(DataFrame.Embarked, prefix='Embarked') DataFrame=pd.concat([DataFrame,pclass,sex,embarked],axis=1) del DataFrame['Pclass'] del DataFrame['Name'] del DataFrame['Ticket'] del DataFrame['Sex'] del DataFrame['Embarked'] DataFrame['Fare'].fillna(train['Fare'].median(), inplace = True) # Mapping Fare DataFrame.loc[ DataFrame['Fare'] <= 7.91, 'Fare'] = 0 DataFrame.loc[(DataFrame['Fare'] > 7.91) & (DataFrame['Fare'] <= 14.454), 'Fare'] = 1 DataFrame.loc[(DataFrame['Fare'] > 14.454) & (DataFrame['Fare'] <= 31), 'Fare'] = 2 DataFrame.loc[ DataFrame['Fare'] > 31, 'Fare'] = 3 DataFrame['Fare'] = DataFrame['Fare'].astype(int) DataFrame['Age'].fillna(train['Age'].median(), inplace = True) return DataFrame
java
public ServiceFuture<PredictionQueryResult> queryPredictionsAsync(UUID projectId, PredictionQueryToken query, final ServiceCallback<PredictionQueryResult> serviceCallback) { return ServiceFuture.fromResponse(queryPredictionsWithServiceResponseAsync(projectId, query), serviceCallback); }
python
def initialize_ordered_bulk_op(self, bypass_document_validation=False): """**DEPRECATED** - Initialize an ordered batch of write operations. Operations will be performed on the server serially, in the order provided. If an error occurs all remaining operations are aborted. :Parameters: - `bypass_document_validation`: (optional) If ``True``, allows the write to opt-out of document level validation. Default is ``False``. Returns a :class:`~pymongo.bulk.BulkOperationBuilder` instance. See :ref:`ordered_bulk` for examples. .. note:: `bypass_document_validation` requires server version **>= 3.2** .. versionchanged:: 3.5 Deprecated. Use :meth:`~pymongo.collection.Collection.bulk_write` instead. .. versionchanged:: 3.2 Added bypass_document_validation support .. versionadded:: 2.7 """ warnings.warn("initialize_ordered_bulk_op is deprecated", DeprecationWarning, stacklevel=2) return BulkOperationBuilder(self, True, bypass_document_validation)
java
public String getString(String key) { try { return this.getStringImpl(key); } catch (MissingResourceException e) { return handleMissingResourceException(e); } }
java
public void insertAfter(Token before, TokenList list ) { Token after = before.next; before.next = list.first; list.first.previous = before; if( after == null ) { last = list.last; } else { after.previous = list.last; list.last.next = after; } size += list.size; }
java
public static <K, V, P> V getIfAbsentPutWith( Map<K, V> map, K key, Function<? super P, ? extends V> function, P parameter) { V result = map.get(key); if (MapIterate.isAbsent(result, map, key)) { result = function.valueOf(parameter); map.put(key, result); } return result; }
java
public boolean getTransacted() throws IllegalStateException { if (_sessionClosed) { throw new IllegalStateException(NLS.getFormattedMessage( ("ILLEGAL_STATE_CWSJR1131"), new Object[] { "getTransacted"}, null)); } if (_sessionInvalidated) { throw new IllegalStateException(NLS.getFormattedMessage( ("ILLEGAL_STATE_CWSJR1135"), new Object[] { "getTransacted"}, null)); } return _transacted; }
java
private boolean isMapAttribute(PluralAttribute<? super X, ?, ?> attribute) { return attribute != null && attribute.getCollectionType().equals(CollectionType.MAP); }
java
public static void extractChunkPart(Chunk ic, Chunk oc, int startRow, int nrows, Futures fs) { try { NewChunk dst = new NewChunk(oc); dst._len = dst._sparseLen = 0; NewChunk src = new NewChunk(ic); src = ic.inflate_impl(src); assert src._len == ic._len; // Iterate over values skip all 0 int remain = nrows; Iterator<NewChunk.Value> it = src.values(startRow, startRow + nrows); int off = startRow - 1; while (it.hasNext()) { NewChunk.Value v = it.next(); final int rid = v.rowId0(); assert rid < startRow + nrows; int add = rid - off; // number of values to add off = rid; dst.addZeros(add - 1); // append (add-1) zeros v.add2Chunk(dst); // followed by a value remain -= add; assert remain >= 0; } // Handle case when last added value is followed by zeros till startRow+nrows dst.addZeros(remain); assert dst._len == oc._len : "NewChunk.dst.len = " + dst._len + ", oc._len = " + oc._len; dst.close(dst.cidx(), fs); } catch(RuntimeException t){ Log.err("gor exception in chunkSplitter, ic = " + ic + ", oc = " + oc + " startRow = " + startRow + " nrows = " + nrows); throw t; } return ; }
java
public Dispatcher getInstance() { try { return (Dispatcher) Class.forName(dispatcherImpl) .getConstructor(Configuration.class).newInstance(conf); } catch (InstantiationException e) { throw new AssertionError(e); } catch (IllegalAccessException e) { throw new AssertionError(e); } catch (ClassNotFoundException e) { throw new AssertionError(e); } catch (ClassCastException e) { throw new AssertionError(e); } catch (NoSuchMethodException e) { throw new AssertionError(e); } catch (InvocationTargetException e) { throw new AssertionError(e); } }