language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public static void addPropertiesToSpec(ModuleSpec.Builder moduleSpecBuilder, Map<String, String> properties) { for (Entry<String, String> entry : properties.entrySet()) { moduleSpecBuilder.addProperty(entry.getKey(), entry.getValue()); } }
python
def feature_info(self): """ Returns information about the features available for the CPC of this partition. Authorization requirements: * Object-access permission to this partition. Returns: :term:`iterable`: An iterable where each item represents one feature that is available for the CPC of this partition. Each item is a dictionary with the following items: * `name` (:term:`unicode string`): Name of the feature. * `description` (:term:`unicode string`): Short description of the feature. * `state` (bool): Enablement state of the feature (`True` if the enabled, `False` if disabled). Raises: :exc:`ValueError`: Features are not supported on the HMC. :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.AuthError` :exc:`~zhmcclient.ConnectionError` """ feature_list = self.prop('available-features-list', None) if feature_list is None: raise ValueError("Firmware features are not supported on CPC %s" % self.manager.cpc.name) return feature_list
java
public static PomEquippedResolveStage configureResolverViaPlugin(final ClassLoader cl) throws InvalidEnvironmentException, IllegalArgumentException { final ConfigurableMavenResolverSystem resolverSystem = Resolvers.use(ConfigurableMavenResolverSystem.class, cl); return resolverSystem.configureViaPlugin(); }
java
public void setDoi(String v) { if (Header_Type.featOkTst && ((Header_Type)jcasType).casFeat_doi == null) jcasType.jcas.throwFeatMissing("doi", "de.julielab.jules.types.Header"); jcasType.ll_cas.ll_setStringValue(addr, ((Header_Type)jcasType).casFeatCode_doi, v);}
python
def get_map(self): """ Collects all the points coordinates from this ``pyny.Space`` instance. In order to keep the reference, it returns an index with the following key: * The first column is the Place. * The second column is the body (-1: points, 0: surface, n: polyhedron) * The third column is the polygon (-n: holes) * The fourth column is the point. :returns: [index, points] :rtype: list of ndarray .. note:: This method automatically stores the solution in order to do not repeat calculations if the user needs to call it more than once. .. seealso:: * :func:`get_seed` * :func:`map2pyny` * :func:`map2seed` * :func:`explode_map` """ seed = self.get_seed()['places'] # template points = [] index = [] for i, place in enumerate(seed): # Set of points [_, -1, 0, _] n_points = place['set_of_points'].shape[0] if n_points != 0: # It can be False (no set_of_points) points.append(place['set_of_points']) index.append(np.vstack((np.tile(np.array([[i], [-1], [0]]), n_points), np.arange(n_points)))) #Holes [_, 0, -N, _] for ii, hole in enumerate(place['surface']['holes']): n_points = hole.shape[0] points.append(hole) index.append(np.vstack((np.tile(np.array([[i], [0], [-ii-1]]), n_points), np.arange(n_points)))) #Surface [_, 0, N, _] for ii, polygon in enumerate(place['surface']['polygons']): n_points = polygon.shape[0] points.append(polygon) index.append(np.vstack((np.tile(np.array([[i], [0], [ii]]), n_points), np.arange(n_points)))) #Polyhedras [_, N, _, _] if len(place['polyhedra']) != 0: # It can be False (no obstacles) for iii, polygon_list in enumerate(place['polyhedra']): for iv, polygon in enumerate(polygon_list): n_points = polygon.shape[0] points.append(polygon) index.append(np.vstack((np.tile(np.array([[i], [1+iii], [iv]]), n_points), np.arange(n_points)))) index = np.concatenate(index, axis=1).T points = np.concatenate(points) self.map = [index, points] return self.map
java
public void marshall(DescribeJobsRequest describeJobsRequest, ProtocolMarshaller protocolMarshaller) { if (describeJobsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeJobsRequest.getJobs(), JOBS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def break_down_cookie(cookie): """ Breaks down vSphere SOAP cookie :param cookie: vSphere SOAP cookie :type cookie: str :return: Dictionary with cookie_name: cookie_value """ cookie_a = cookie.split(';') cookie_name = cookie_a[0].split('=')[0] cookie_text = ' {0}; ${1}'.format(cookie_a[0].split('=')[1], cookie_a[1].lstrip()) return {cookie_name: cookie_text}
java
public static final void rotate(Atom atom, Matrix m){ double x = atom.getX(); double y = atom.getY(); double z = atom.getZ(); double[][] ad = new double[][]{{x,y,z}}; Matrix am = new Matrix(ad); Matrix na = am.times(m); atom.setX(na.get(0,0)); atom.setY(na.get(0,1)); atom.setZ(na.get(0,2)); }
python
def check_fam_for_samples(required_samples, source, gold): """Check fam files for required_samples.""" # Checking the source panel source_samples = set() with open(source, 'r') as input_file: for line in input_file: sample = tuple(line.rstrip("\r\n").split(" ")[:2]) if sample in required_samples: source_samples.add(sample) # Checking the gold standard gold_samples = set() with open(gold, 'r') as input_file: for line in input_file: sample = tuple(line.rstrip("\r\n").split(" ")[:2]) if sample in required_samples: gold_samples.add(sample) # Checking if we got everything logger.info(" - Found {} samples in source panel".format( len(source_samples), )) logger.info(" - Found {} samples in gold standard".format( len(gold_samples), )) if len(required_samples - (source_samples | gold_samples)) != 0: return False else: return True
java
public static FloatMatrix pinv(FloatMatrix A) { return solveLeastSquares(A, FloatMatrix.eye(A.rows)); }
python
def info(self, request): """Return height of the latest committed block.""" self.abort_if_abci_chain_is_not_synced() # Check if BigchainDB supports the Tendermint version if not (hasattr(request, 'version') and tendermint_version_is_compatible(request.version)): logger.error(f'Unsupported Tendermint version: {getattr(request, "version", "no version")}.' f' Currently, BigchainDB only supports {__tm_supported_versions__}. Exiting!') sys.exit(1) logger.info(f"Tendermint version: {request.version}") r = ResponseInfo() block = self.bigchaindb.get_latest_block() if block: chain_shift = 0 if self.chain is None else self.chain['height'] r.last_block_height = block['height'] - chain_shift r.last_block_app_hash = block['app_hash'].encode('utf-8') else: r.last_block_height = 0 r.last_block_app_hash = b'' return r
python
def uninstall_all_passbands(local=True): """ Uninstall all passbands, either globally or locally (need to call twice to delete ALL passbands) If local=False, you must have permission to access the installation directory """ pbdir = _pbdir_local if local else _pbdir_global for f in os.listdir(pbdir): pbpath = os.path.join(pbdir, f) logger.warning("deleting file: {}".format(pbpath)) os.remove(pbpath)
java
public static <T> Future<T> anyOf(Future<T>... fts) { CompletableFuture<T>[] array = new CompletableFuture[fts.length]; for(int i=0;i<fts.length;i++){ array[i] = fts[i].getFuture(); } return (Future<T>) Future.of(CompletableFuture.anyOf(array)); }
java
public FieldDoc wrap(FieldDoc source) { if (source == null || source instanceof Proxy<?> || !(source instanceof FieldDocImpl)) { return source; } return new FieldDocWrapper((FieldDocImpl) source); }
java
@Nullable public static Long parseLongObj (@Nullable final String sStr, @Nullable final Long aDefault) { return parseLongObj (sStr, DEFAULT_RADIX, aDefault); }
java
public void build( OptionBuilder option, String... options ) { for (String opt : options) { Matcher matcher = Pattern.compile( "(--[^\\s]*)(\\s(.+))?" ).matcher( opt ); if (matcher.matches()) { option.withLongForm( matcher.group( 1 ) ); if (!StringUtil.isBlank( matcher.group( 3 ) )) option.wantsArgument( matcher.group( 3 ) ); continue; } matcher = Pattern.compile( "(-.)(\\s+(.+))?" ).matcher( opt ); if (matcher.matches()) { option.withShortForm( matcher.group( 1 ) ); if (!StringUtil.isBlank( matcher.group( 3 ) )) option.wantsArgument( matcher.group( 3 ) ); continue; } option.withDescription( opt ); } }
python
def _read_items(self): self._items = [] self._items = glob.glob(path.join(self._config_path, '*.csv')) if len(self._items) == 0: return 0, -1 else: self._items.sort() for i, an_item in enumerate(self._items): self._items[i] = an_item.replace(self._config_path + sep, '').replace('.csv', '') """ get already loaded playlist id """ for i, a_playlist in enumerate(self._items): if a_playlist ==self._selected_playlist: self._selected_playlist_id = i break self._max_len = len(max(self._items, key=len)) if self._max_len > 44: self._max_len = 44 self._num_of_items = len(self._items)
java
@Override public CreateBackupVaultResult createBackupVault(CreateBackupVaultRequest request) { request = beforeClientExecution(request); return executeCreateBackupVault(request); }
python
def scaled_imu3_encode(self, time_boot_ms, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag): ''' The RAW IMU readings for 3rd 9DOF sensor setup. This message should contain the scaled values to the described units time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t) xacc : X acceleration (mg) (int16_t) yacc : Y acceleration (mg) (int16_t) zacc : Z acceleration (mg) (int16_t) xgyro : Angular speed around X axis (millirad /sec) (int16_t) ygyro : Angular speed around Y axis (millirad /sec) (int16_t) zgyro : Angular speed around Z axis (millirad /sec) (int16_t) xmag : X Magnetic field (milli tesla) (int16_t) ymag : Y Magnetic field (milli tesla) (int16_t) zmag : Z Magnetic field (milli tesla) (int16_t) ''' return MAVLink_scaled_imu3_message(time_boot_ms, xacc, yacc, zacc, xgyro, ygyro, zgyro, xmag, ymag, zmag)
java
public static synchronized void sse(String url, BiConsumer<ServerSentEventConnection, String> connectionCallback) { checkStarted(); instance().endpoints.add(HandlerUtil.sse(url, connectionCallback)); }
java
public static ImmutableList<ImmutableMap<String, String>> applyAllSuggestedFixChoicesToCode( Iterable<SuggestedFix> fixChoices, Map<String, String> fileNameToCodeMap) { if (Iterables.isEmpty(fixChoices)) { return ImmutableList.of(ImmutableMap.of()); } int alternativeCount = Iterables.getFirst(fixChoices, null).getAlternatives().size(); Preconditions.checkArgument( Streams.stream(fixChoices) .map(f -> f.getAlternatives().size()) .allMatch(Predicate.isEqual(alternativeCount)), "All SuggestedFixAlternatives must offer an equal number of choices for this " + "utility to make sense"); return IntStream.range(0, alternativeCount) .mapToObj(i -> applySuggestedFixChoicesToCode(fixChoices, i, fileNameToCodeMap)) .collect(ImmutableList.toImmutableList()); }
python
def _add_segmetrics_to_output(out, data): """Add metrics for measuring reliability of CNV estimates. """ out_file = "%s-segmetrics.txt" % os.path.splitext(out["cns"])[0] if not utils.file_exists(out_file): with file_transaction(data, out_file) as tx_out_file: cmd = [os.path.join(os.path.dirname(sys.executable), "cnvkit.py"), "segmetrics", "--median", "--iqr", "--ci", "--pi", "-s", out["cns"], "-o", tx_out_file, out["cnr"]] # Use less fine grained bootstrapping intervals for whole genome runs if dd.get_coverage_interval(data) == "genome": cmd += ["--alpha", "0.1", "--bootstrap", "50"] else: cmd += ["--alpha", "0.01", "--bootstrap", "500"] do.run(cmd, "CNVkit segmetrics") out["segmetrics"] = out_file return out
java
public static Last[][] setScoreVector(int x, Subproblem subproblem, int gop, int gep, int[] subs, boolean storing, int[][][] scores) { return setScoreVector(x, subproblem.getQueryStartIndex(), subproblem.getTargetStartIndex(), subproblem.getTargetEndIndex(), gop, gep, subs, storing, scores, subproblem.isStartAnchored()); }
java
@Override public void visitCode(Code obj) { try { stack.resetForMethodEntry(this); reportedType = ImmutabilityType.UNKNOWN; super.visitCode(obj); } catch (StopOpcodeParsingException e) { // report type is immutable } }
python
def configure_server(): ''' Configure the transfer environment and store ''' home = os.path.expanduser('~') if os.path.isfile(os.path.join(home, '.transfer', 'config.yaml')): with open(os.path.join(home, '.transfer', 'config.yaml'), 'r') as fp: config = yaml.load(fp.read()) else: config = [] project_name = input('Name your project: ') existing_project = None for project in config: if project_name == project['name']: existing_project = project_name if existing_project is not None: print(colored('Project ' + project_name + ' already exists', 'red')) overwrite = str_input('Would you like to overwrite this project? (yes or no) ', ['yes', 'no']) if overwrite == 'no': return else: config = [project for project in config if project_name != project['name']] api_port = int_input('port for local prediction API (suggested: 5000)', 1024, 49151) print('Select image resolution:') print('[0] low (224 px)') print('[1] mid (448 px)') print('[2] high (896 px)') img_resolution_index = int_input('choice', 0, 2, show_range = False) if img_resolution_index == 0: img_size = 1 elif img_resolution_index == 1: img_size = 2 else: img_size = 4 num_categories = int_input('number of image categories in your model', 0, 10000000) weights = False while weights == False: server_weights = os.path.expanduser(input('Select weights file: ')) if os.path.isfile(server_weights): weights = True else: print('Cannot find the weight file: ', server_weights) project = {'name': project_name, 'api_port': api_port, 'img_size': img_size, 'number_categories': num_categories, 'server_weights': server_weights} config.append(project) store_config(config) print('') print(colored('Project configure saved!', 'cyan')) print('') print('To start the server:') print('') print(colored(' transfer --prediction-rest-api --project ' + project_name, 'green')) print('or') print(colored(' transfer --prediction-rest-api -p ' + project_name, 'green'))
python
def get_start_date(module, x): """ 曜日による最初の授業の日を返す """ weekdays = ['月', '火', '水', '木', '金', '土', '日'] a, b = parse_module(module) module = a + b[0] d = datetime.datetime(*start_dates[module]) days = weekdays.index(x) - d.weekday() if days < 0: days += 7 delta = datetime.timedelta(days=days) return d + delta
python
def verify(self, secret_key): """ Verifies the authenticity of a notification message. TODO: This is doing a form of authentication and this functionality should really be merged with the pluggable authentication mechanism at some point. """ verification_input = NotificationMessage.SERVICE_NAME verification_input += NotificationMessage.OPERATION_NAME verification_input += self.timestamp h = hmac.new(key=secret_key, digestmod=sha) h.update(verification_input) signature_calc = base64.b64encode(h.digest()) return self.signature == signature_calc
java
@Action(invokeOn = InvokeOn.OBJECT_AND_COLLECTION) @ActionLayout( describedAs = "Toggle, for testing (direct) bulk actions" ) public void toggleForBulkActions() { boolean flag = getFlag() != null? getFlag(): false; setFlag(!flag); }
python
def convex_hull(obj, qhull_options='QbB Pp QJn'): """ Get a new Trimesh object representing the convex hull of the current mesh, with proper normals and watertight. Requires scipy >.12. Arguments -------- obj : Trimesh, or (n,3) float Mesh or cartesian points Returns -------- convex : Trimesh Mesh of convex hull """ from .base import Trimesh if isinstance(obj, Trimesh): points = obj.vertices.view(np.ndarray) else: # will remove subclassing points = np.asarray(obj, dtype=np.float64) if not util.is_shape(points, (-1, 3)): raise ValueError('Object must be Trimesh or (n,3) points!') hull = spatial.ConvexHull(points, qhull_options=qhull_options) # hull object doesn't remove unreferenced vertices # create a mask to re- index faces for only referenced vertices vid = np.sort(hull.vertices) mask = np.zeros(len(hull.points), dtype=np.int64) mask[vid] = np.arange(len(vid)) # remove unreferenced vertices here faces = mask[hull.simplices].copy() # rescale vertices back to original size vertices = hull.points[vid].copy() # qhull returns faces with random winding # calculate the returned normal of each face crosses = triangles.cross(vertices[faces]) # qhull returns zero magnitude faces like an asshole normals, valid = util.unitize(crosses, check_valid=True) # remove zero magnitude faces faces = faces[valid] crosses = crosses[valid] # each triangle area and mean center triangles_area = triangles.area(crosses=crosses, sum=False) triangles_center = vertices[faces].mean(axis=1) # since the convex hull is (hopefully) convex, the vector from # the centroid to the center of each face # should have a positive dot product with the normal of that face # if it doesn't it is probably backwards # note that this sometimes gets screwed up by precision issues centroid = np.average(triangles_center, weights=triangles_area, axis=0) # a vector from the centroid to a point on each face test_vector = triangles_center - centroid # check the projection against face normals backwards = util.diagonal_dot(normals, test_vector) < 0.0 # flip the winding outward facing faces[backwards] = np.fliplr(faces[backwards]) # flip the normal normals[backwards] *= -1.0 # save the work we did to the cache so it doesn't have to be recomputed initial_cache = {'triangles_cross': crosses, 'triangles_center': triangles_center, 'area_faces': triangles_area, 'centroid': centroid} # create the Trimesh object for the convex hull convex = Trimesh(vertices=vertices, faces=faces, face_normals=normals, initial_cache=initial_cache, process=True, validate=False) # we did the gross case above, but sometimes precision issues # leave some faces backwards anyway # this call will exit early if the winding is consistent # and if not will fix it by traversing the adjacency graph convex.fix_normals(multibody=False) # sometimes the QbB option will cause precision issues # so try the hull again without it and # check for qhull_options is None to avoid infinite recursion if (qhull_options is not None and not convex.is_winding_consistent): return convex_hull(convex, qhull_options=None) return convex
python
def instance_attr(self, name, context=None): """Get the list of nodes associated to the given attribute name. Assignments are looked for in both this class and in parents. :returns: The list of assignments to the given name. :rtype: list(NodeNG) :raises AttributeInferenceError: If no attribute with this name can be found in this class or parent classes. """ # Return a copy, so we don't modify self.instance_attrs, # which could lead to infinite loop. values = list(self.instance_attrs.get(name, [])) # get all values from parents for class_node in self.instance_attr_ancestors(name, context): values += class_node.instance_attrs[name] values = [n for n in values if not isinstance(n, node_classes.DelAttr)] if values: return values raise exceptions.AttributeInferenceError( target=self, attribute=name, context=context )
python
def copy(self, request, **kwargs): # pylint: disable=unused-argument ''' Copy instance with deps. ''' instance = self.copy_instance(self.get_object()) serializer = self.get_serializer(instance, data=request.data, partial=True) serializer.is_valid() serializer.save() return Response(serializer.data, status.HTTP_201_CREATED).resp
java
public long getChildItemId(View child) { if (mAdapter == null || !mAdapter.hasStableIds()) { return NO_ID; } final ViewHolder holder = getChildViewHolderInt(child); return holder != null ? holder.getItemId() : NO_ID; }
python
def cross_product(self, p1, p2): """Returns the cross product of two XYPoints.""" return (p1.x * p2.y - p1.y * p2.x)
python
def get_stp_mst_detail_output_msti_msti_root_id(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_stp_mst_detail = ET.Element("get_stp_mst_detail") config = get_stp_mst_detail output = ET.SubElement(get_stp_mst_detail, "output") msti = ET.SubElement(output, "msti") instance_id_key = ET.SubElement(msti, "instance-id") instance_id_key.text = kwargs.pop('instance_id') msti_root_id = ET.SubElement(msti, "msti-root-id") msti_root_id.text = kwargs.pop('msti_root_id') callback = kwargs.pop('callback', self._callback) return callback(config)
python
def thermal_data(data, figsize=(12, 4), ms_data=50, v_label='Unit-cell volume $(\mathrm{\AA}^3)$', pdf_filen=None, title='P-V-T data'): """ plot P-V-T data before fitting :param data: {'p': unumpy array, 'v': unumpy array, 'temp': unumpy array} :param eoscurves: {'v': unumpy array, '300': unumpy array at the temperature ....} :param v_label: label for volume axis :param figsize: figure size :param ms_data: marker size for data points :param pdf_filen: name of pdf output file :param title: title of the figure :return: None """ # basic figure setup f, ax = plt.subplots(1, 2, figsize=figsize, sharex=True) # read data to plot if isuncertainties([data['p'], data['v'], data['temp']]): p = unp.nominal_values(data['p']) v = unp.nominal_values(data['v']) temp = unp.nominal_values(data['temp']) sp = unp.std_devs(data['p']) sv = unp.std_devs(data['v']) stemp = unp.std_devs(data['temp']) ax[0].errorbar(p, v, xerr=sp, yerr=sv, marker=' ', c='k', ms=0, mew=0, linestyle='None', capsize=0, lw=0.5, zorder=1) ax[1].errorbar(p, temp, xerr=sp, yerr=stemp, marker=' ', c='k', ms=0, mew=0, linestyle='None', capsize=0, lw=0.5, zorder=1) else: p = data['p'] v = data['v'] temp = data['temp'] points = ax[0].scatter(p, v, marker='o', s=ms_data, c=temp, cmap=c_map, vmin=300., vmax=temp.max(), zorder=2) points = ax[1].scatter(p, temp, marker='o', s=ms_data, c=temp, cmap=c_map, vmin=300., vmax=temp.max(), zorder=2) ax[0].set_xlabel('Pressure (GPa)') ax[1].set_xlabel('Pressure (GPa)') ax[0].set_ylabel(v_label) ax[1].set_ylabel('Temperature (K)') f.suptitle(title) # the parameters are the specified position you set position = f.add_axes([0.92, 0.11, .01, 0.75]) f.colorbar(points, orientation="vertical", cax=position) # position.text(150., 0.5, 'Temperature (K)', fontsize=10, # rotation=270, va='center') if pdf_filen is not None: f.savefig(pdf_filen)
java
public static File getExecutableLocation(final String exeName) { // // must add current working directory to the // from of the path from the "path" environment variable final File currentDir = new File(System.getProperty("user.dir")); if (new File(currentDir, exeName).exists()) { return currentDir; } final File[] envPath = CUtil.getPathFromEnvironment("PATH", File.pathSeparator); for (final File element : envPath) { if (new File(element, exeName).exists()) { return element; } } return null; }
python
def create_password(self, data): """Create a password.""" # http://teampasswordmanager.com/docs/api-passwords/#create_password log.info('Create new password %s' % data) NewID = self.post('passwords.json', data).get('id') log.info('Password has been created with ID %s' % NewID) return NewID
java
protected AsteriskVersion determineVersionByCoreShowVersion() throws Exception { final ManagerResponse coreShowVersionResponse = sendAction(new CommandAction(CMD_SHOW_VERSION)); if (coreShowVersionResponse == null || !(coreShowVersionResponse instanceof CommandResponse)) { // this needs 'command' permissions logger.info("Could not get response for 'core show version'"); return null; } final List<String> coreShowVersionResult = ((CommandResponse) coreShowVersionResponse).getResult(); if (coreShowVersionResult == null || coreShowVersionResult.isEmpty()) { logger.warn("Got empty response for 'core show version'"); return null; } final String coreLine = coreShowVersionResult.get(0); return AsteriskVersion.getDetermineVersionFromString(coreLine); }
java
public final void mT__133() throws RecognitionException { try { int _type = T__133; int _channel = DEFAULT_TOKEN_CHANNEL; // InternalSARL.g:119:8: ( 'super' ) // InternalSARL.g:119:10: 'super' { match("super"); } state.type = _type; state.channel = _channel; } finally { } }
java
private CustomVariable getCustomVariable(String parameter, int index){ CustomVariableList cvl = (CustomVariableList)parameters.get(parameter); if (cvl == null){ return null; } return cvl.get(index); }
java
public Response approveSuggestion(String suggestionId) throws JinxException { JinxUtils.validateParams(suggestionId); Map<String, String> params = new TreeMap<>(); params.put("method", "flickr.photos.suggestions.approveSuggestion"); params.put("suggestion_id", suggestionId); return jinx.flickrPost(params, Response.class); }
java
public Packet prepend(Consumer<Packet> consumer) { prepend = true; consumer.accept(this); while (!stack.isEmpty()) { queue.offerFirst(stack.pop()); } prepend = false; return this; }
python
def add_fields(self, field_dict): """Add a mapping of field names to PayloadField instances. :API: public """ for key, field in field_dict.items(): self.add_field(key, field)
java
@Override public ResourceSet<TollFree> read(final TwilioRestClient client) { return new ResourceSet<>(this, client, firstPage(client)); }
java
public static void deleteIfExists(File file) throws IOException { if (file.exists()) { if (file.isFile()) { if (!file.delete()) { throw new IOException("Delete file failure,path:" + file.getAbsolutePath()); } } else { File[] files = file.listFiles(); if (files != null && files.length > 0) { for (File temp : files) { deleteIfExists(temp); } } if (!file.delete()) { throw new IOException("Delete file failure,path:" + file.getAbsolutePath()); } } } }
python
def detect_from_pkgconfig(self): """Detects the igraph include directory, library directory and the list of libraries to link to using ``pkg-config``.""" if not buildcfg.has_pkgconfig: print("Cannot find the C core of igraph on this system using pkg-config.") return False cmd = "pkg-config igraph --cflags --libs" if self.static_extension: cmd += " --static" line, exit_code = get_output(cmd) if exit_code > 0 or len(line) == 0: return False opts = line.strip().split() self.libraries = [opt[2:] for opt in opts if opt.startswith("-l")] self.library_dirs = [opt[2:] for opt in opts if opt.startswith("-L")] self.include_dirs = [opt[2:] for opt in opts if opt.startswith("-I")] return True
python
def sca_xsect(scatterer, h_pol=True): """Scattering cross section for the current setup, with polarization. Args: scatterer: a Scatterer instance. h_pol: If True (default), use horizontal polarization. If False, use vertical polarization. Returns: The scattering cross section. """ if scatterer.psd_integrator is not None: return scatterer.psd_integrator.get_angular_integrated( scatterer.psd, scatterer.get_geometry(), "sca_xsect") old_geom = scatterer.get_geometry() def d_xsect(thet, phi): (scatterer.phi, scatterer.thet) = (phi*rad_to_deg, thet*rad_to_deg) Z = scatterer.get_Z() I = sca_intensity(scatterer, h_pol) return I * np.sin(thet) try: xsect = dblquad(d_xsect, 0.0, 2*np.pi, lambda x: 0.0, lambda x: np.pi)[0] finally: scatterer.set_geometry(old_geom) return xsect
python
def frame_vars_to_xml(frame_f_locals, hidden_ns=None): """ dumps frame variables to XML <var name="var_name" scope="local" type="type" value="value"/> """ xml = "" keys = dict_keys(frame_f_locals) if hasattr(keys, 'sort'): keys.sort() # Python 3.0 does not have it else: keys = sorted(keys) # Jython 2.1 does not have it return_values_xml = '' for k in keys: try: v = frame_f_locals[k] eval_full_val = should_evaluate_full_value(v) if k == RETURN_VALUES_DICT: for name, val in dict_iter_items(v): return_values_xml += var_to_xml(val, name, additional_in_xml=' isRetVal="True"') else: if hidden_ns is not None and k in hidden_ns: xml += var_to_xml(v, str(k), additional_in_xml=' isIPythonHidden="True"', evaluate_full_value=eval_full_val) else: xml += var_to_xml(v, str(k), evaluate_full_value=eval_full_val) except Exception: pydev_log.exception("Unexpected error, recovered safely.") # Show return values as the first entry. return return_values_xml + xml
java
public final void synpred253_Java_fragment() throws RecognitionException { // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1221:22: ( '.' methodName ) // src/main/resources/org/drools/compiler/semantics/java/parser/Java.g:1221:22: '.' methodName { match(input,47,FOLLOW_47_in_synpred253_Java5729); if (state.failed) return; pushFollow(FOLLOW_methodName_in_synpred253_Java5731); methodName(); state._fsp--; if (state.failed) return; } }
java
public CompletionStage<Map<String, Result<SSHResult<R>>>> callSyncSSH(final SaltClient client, SSHTarget<?> target, SaltSSHConfig cfg, AuthMethod auth) { Map<String, Object> args = new HashMap<>(); args.putAll(getPayload()); args.putAll(target.getProps()); SaltSSHUtils.mapConfigPropsToArgs(cfg, args); Type xor = parameterizedType(null, Result.class, parameterizedType(null, SSHResult.class, getReturnType().getType())); Type map = parameterizedType(null, Map.class, String.class, xor); Type listType = parameterizedType(null, List.class, map); Type wrapperType = parameterizedType(null, Return.class, listType); return client.call(this, Client.SSH, Optional.of(target), args, (TypeToken<Return<List<Map<String, Result<SSHResult<R>>>>>>) TypeToken.get(wrapperType), auth) .thenApply(wrapper -> wrapper.getResult().get(0)); }
java
private Permission getReadPermission(String table) { return _readPermissionCache != null ? _readPermissionCache.getUnchecked(table) : createReadPermission(table); }
java
public int getOrElse (int key, int defval) { Record rec = locateRecord(key); return (rec == null) ? defval : rec.value; }
python
def set_settings(self, releases=None, default_release=None): """set path to storage""" super(ReplicaSets, self).set_settings(releases, default_release) Servers().set_settings(releases, default_release)
java
@Nonnull @Override public ScheduledFuture<?> schedule(@Nonnull Runnable command, long delay, @Nonnull TimeUnit unit) { scheduledOnce.mark(); return delegate.schedule(new InstrumentedRunnable(command), delay, unit); }
python
def extend_unique(seq, more): """Return a new sequence containing the items in `seq` plus any items in `more` that aren't already in `seq`, preserving the order of both. """ seen = set(seq) new = [] for item in more: if item not in seen: seen.add(item) new.append(item) return seq + type(seq)(new)
python
def machine_to_machine(self): """ Access the machine_to_machine :returns: twilio.rest.api.v2010.account.available_phone_number.machine_to_machine.MachineToMachineList :rtype: twilio.rest.api.v2010.account.available_phone_number.machine_to_machine.MachineToMachineList """ if self._machine_to_machine is None: self._machine_to_machine = MachineToMachineList( self._version, account_sid=self._solution['account_sid'], country_code=self._solution['country_code'], ) return self._machine_to_machine
python
def _local_to_shape(self, local_x, local_y): """Translate local coordinates point to shape coordinates. Shape coordinates have the same unit as local coordinates, but are offset such that the origin of the shape coordinate system (0, 0) is located at the top-left corner of the shape bounding box. """ return ( local_x - self.shape_offset_x, local_y - self.shape_offset_y )
python
def keyPressEvent( self, event ): """ Handles the Ctrl+C/Ctrl+V events for copy & paste. :param event | <QKeyEvent> """ if ( event.key() == Qt.Key_C and \ event.modifiers() == Qt.ControlModifier ): self.copy() event.accept() return elif ( event.key() == Qt.Key_V and \ event.modifiers() == Qt.ControlModifier ): self.paste() event.accept() return elif ( event.key() == Qt.Key_Delete ): indexes = map(self.row, self.selectedItems()) for index in reversed(sorted(indexes)): self.takeItem(index) event.accept() return elif event.key() == Qt.Key_Backspace: if self.count() > 1: self.takeItem(self.count() - 2) self.setFocus() super(XMultiTagEdit, self).keyPressEvent(event)
java
public static <T extends ImageGray<T>> InputToBinary<T> blockOtsu(ConfigLength regionWidth, double scale, boolean down, boolean thresholdFromLocalBlocks, boolean otsu2, double tuning,Class<T> inputType) { if( BOverrideFactoryThresholdBinary.blockOtsu != null ) return BOverrideFactoryThresholdBinary.blockOtsu.handle(otsu2,regionWidth, tuning, scale, down, thresholdFromLocalBlocks, inputType); BlockProcessor processor = new ThresholdBlockOtsu(otsu2,tuning,scale,down); InputToBinary<GrayU8> otsu; if( BoofConcurrency.USE_CONCURRENT ) { otsu = new ThresholdBlock_MT<>(processor, regionWidth, thresholdFromLocalBlocks, GrayU8.class); } else { otsu = new ThresholdBlock<>(processor, regionWidth, thresholdFromLocalBlocks, GrayU8.class); } return new InputToBinarySwitch<>(otsu,inputType); }
python
def _call_marginalizevperp(self,o,integrate_method='dopr54_c',**kwargs): """Call the DF, marginalizing over perpendicular velocity""" #Get d, l, vlos l= o.ll(obs=[1.,0.,0.],ro=1.)*_DEGTORAD vlos= o.vlos(ro=1.,vo=1.,obs=[1.,0.,0.,0.,0.,0.]) R= o.R(use_physical=False) phi= o.phi(use_physical=False) #Get local circular velocity, projected onto the los if isinstance(self._pot,list): vcirc= calcRotcurve([p for p in self._pot if not p.isNonAxi],R)[0] else: vcirc= calcRotcurve(self._pot,R)[0] vcirclos= vcirc*math.sin(phi+l) #Marginalize alphalos= phi+l if not 'nsigma' in kwargs or ('nsigma' in kwargs and \ kwargs['nsigma'] is None): nsigma= _NSIGMA else: nsigma= kwargs['nsigma'] kwargs.pop('nsigma',None) #BOVY: add asymmetric drift here? if math.fabs(math.sin(alphalos)) < math.sqrt(1./2.): sigmaR1= nu.sqrt(self._initdf.sigmaT2(R,phi=phi, use_physical=False)) #Slight abuse cosalphalos= math.cos(alphalos) tanalphalos= math.tan(alphalos) return integrate.quad(_marginalizeVperpIntegrandSinAlphaSmall, -nsigma,nsigma, args=(self,R,cosalphalos,tanalphalos, vlos-vcirclos,vcirc, sigmaR1,phi), **kwargs)[0]/math.fabs(cosalphalos)*sigmaR1 else: sigmaR1= nu.sqrt(self._initdf.sigmaR2(R,phi=phi, use_physical=False)) sinalphalos= math.sin(alphalos) cotalphalos= 1./math.tan(alphalos) return integrate.quad(_marginalizeVperpIntegrandSinAlphaLarge, -nsigma,nsigma, args=(self,R,sinalphalos,cotalphalos, vlos-vcirclos,vcirc,sigmaR1,phi), **kwargs)[0]/math.fabs(sinalphalos)*sigmaR1
python
def cmd_output(self, args): '''handle output commands''' if len(args) < 1 or args[0] == "list": self.cmd_output_list() elif args[0] == "add": if len(args) != 2: print("Usage: output add OUTPUT") return self.cmd_output_add(args[1:]) elif args[0] == "remove": if len(args) != 2: print("Usage: output remove OUTPUT") return self.cmd_output_remove(args[1:]) elif args[0] == "sysid": if len(args) != 3: print("Usage: output sysid SYSID OUTPUT") return self.cmd_output_sysid(args[1:]) else: print("usage: output <list|add|remove|sysid>")
java
static public <D extends ImageGray<D>> void intensityE( D derivX , D derivY , GrayF32 intensity ) { if( derivX instanceof GrayF32) { GradientToEdgeFeatures.intensityE((GrayF32)derivX,(GrayF32)derivY,intensity); } else if( derivX instanceof GrayS16) { GradientToEdgeFeatures.intensityE((GrayS16)derivX,(GrayS16)derivY,intensity); } else if( derivX instanceof GrayS32) { GradientToEdgeFeatures.intensityE((GrayS32)derivX,(GrayS32)derivY,intensity); } else { throw new IllegalArgumentException("Unknown input type"); } }
java
public static ZipEntry getClassZipEntryFromZipInClassPath(String className) throws IOException { String fileName = StringSupport.replaceAll(className, ".", "/"); fileName += ".class"; Collection<String> jars = StringSupport.split(System.getProperty("java.class.path"), ";:", false); for(String jarFileName : jars) { if (jarFileName.endsWith(".jar") || jarFileName.endsWith(".zip")) { ZipEntry entry = getZipEntryFromZip(fileName, jarFileName); if (entry != null) { return entry; } } } return null; }
python
def TLV_GET(attrs, attrNum, format): """ Get a tag-length-value encoded attribute. """ attrView = attrs[attrNum] if format == 's': format = str(attrView.len) + format try: (result,) = struct.unpack_from(format, attrView.buf, attrView.offset) except TypeError: # Working around struct.unpack_from issue #10212 (result,) = struct.unpack_from(format, str(bytearray(attrView.buf)), attrView.offset) return result
python
def request(self, url, method='GET', params=None, data=None, expected_response_code=200, headers=None): """Make a HTTP request to the InfluxDB API. :param url: the path of the HTTP request, e.g. write, query, etc. :type url: str :param method: the HTTP method for the request, defaults to GET :type method: str :param params: additional parameters for the request, defaults to None :type params: dict :param data: the data of the request, defaults to None :type data: str :param expected_response_code: the expected response code of the request, defaults to 200 :type expected_response_code: int :param headers: headers to add to the request :type headers: dict :returns: the response from the request :rtype: :class:`requests.Response` :raises InfluxDBServerError: if the response code is any server error code (5xx) :raises InfluxDBClientError: if the response code is not the same as `expected_response_code` and is not a server error code """ url = "{0}/{1}".format(self._baseurl, url) if headers is None: headers = self._headers if params is None: params = {} if isinstance(data, (dict, list)): data = json.dumps(data) # Try to send the request more than once by default (see #103) retry = True _try = 0 while retry: try: response = self._session.request( method=method, url=url, auth=(self._username, self._password), params=params, data=data, headers=headers, proxies=self._proxies, verify=self._verify_ssl, timeout=self._timeout ) break except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError, requests.exceptions.Timeout): _try += 1 if self._retries != 0: retry = _try < self._retries if method == "POST": time.sleep((2 ** _try) * random.random() / 100.0) if not retry: raise # if there's not an error, there must have been a successful response if 500 <= response.status_code < 600: raise InfluxDBServerError(response.content) elif response.status_code == expected_response_code: return response else: raise InfluxDBClientError(response.content, response.status_code)
java
@Override public int remove(PartitionRecord criteria) throws Exception { StringBuilder delete = new StringBuilder(111) .append("DELETE FROM Partition p WHERE"); if (criteria != null) { if (criteria.hasExecutor()) delete.append(" p.EXECUTOR=:x AND"); if (criteria.hasHostName()) delete.append(" p.HOSTNAME=:h AND"); if (criteria.hasId()) delete.append(" p.ID=:i AND"); if (criteria.hasLibertyServer()) delete.append(" p.LSERVER=:l AND"); if (criteria.hasUserDir()) delete.append(" p.USERDIR=:u AND"); } int length = delete.length(); delete.delete(length - (delete.charAt(length - 1) == 'E' ? 6 : 4), length); final boolean trace = TraceComponent.isAnyTracingEnabled(); if (trace && tc.isEntryEnabled()) Tr.entry(this, tc, "remove", criteria, delete); EntityManager em = getPersistenceServiceUnit().createEntityManager(); try { Query query = em.createQuery(delete.toString()); if (criteria != null) { if (criteria.hasExecutor()) query.setParameter("x", criteria.getExecutor()); if (criteria.hasHostName()) query.setParameter("h", criteria.getHostName()); if (criteria.hasId()) query.setParameter("i", criteria.getId()); if (criteria.hasLibertyServer()) query.setParameter("l", criteria.getLibertyServer()); if (criteria.hasUserDir()) query.setParameter("u", criteria.getUserDir()); } int count = query.executeUpdate(); if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, "remove", count); return count; } finally { em.close(); } }
java
public void setPointerList(int i, MMAXPointer v) { if (MMAXAnnotation_Type.featOkTst && ((MMAXAnnotation_Type)jcasType).casFeat_pointerList == null) jcasType.jcas.throwFeatMissing("pointerList", "de.julielab.jules.types.mmax.MMAXAnnotation"); jcasType.jcas.checkArrayBounds(jcasType.ll_cas.ll_getRefValue(addr, ((MMAXAnnotation_Type)jcasType).casFeatCode_pointerList), i); jcasType.ll_cas.ll_setRefArrayValue(jcasType.ll_cas.ll_getRefValue(addr, ((MMAXAnnotation_Type)jcasType).casFeatCode_pointerList), i, jcasType.ll_cas.ll_getFSRef(v));}
java
public static String getResourceTypeDescription(Locale locale, String name) { CmsExplorerTypeSettings settings = OpenCms.getWorkplaceManager().getExplorerTypeSetting(name); if (settings != null) { // try to find the localized key String key = settings.getInfo(); if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(key)) { return OpenCms.getWorkplaceManager().getMessages(locale).keyDefault(key, name); } } return ""; }
java
private List<EntityIdValue> getItemIdValueList(StatementGroup statementGroup) { List<EntityIdValue> result = new ArrayList<>(statementGroup.size()); for (Statement s : statementGroup) { Value v = s.getValue(); if (v instanceof EntityIdValue) { result.add((EntityIdValue) v); } } return result; }
python
def _weighted_spearman(y, y_pred, w): """Calculate the weighted Spearman correlation coefficient.""" y_pred_ranked = np.apply_along_axis(rankdata, 0, y_pred) y_ranked = np.apply_along_axis(rankdata, 0, y) return _weighted_pearson(y_pred_ranked, y_ranked, w)
python
def pull(self): """ Pull selected repo from a remote git repository, while preserving user changes """ if not os.path.exists(self.repo_dir): yield from self.initialize_repo() else: yield from self.update()
java
public ArrayList<Pair<String, Integer>> dhtNodes() { string_int_pair_vector v = p.get_dht_nodes(); int size = (int) v.size(); ArrayList<Pair<String, Integer>> l = new ArrayList<>(); for (int i = 0; i < size; i++) { string_int_pair n = v.get(i); l.add(new Pair<>(n.getFirst(), n.getSecond())); } return l; }
python
def write_packages(self, reqs_file): """ Dump the packages in the catalog in a requirements file """ write_file_lines(reqs_file, ('{}\n'.format(package) for package in self.packages))
python
def group_create(name, content, **kwargs): """ Create routing group. You can optionally specify content for created routing group. It can be either direct JSON expression in single quotes, or path to the json file with settings. Settings itself must be key-value list, where `key` represents application name, and `value` represents its weight. For example: cocaine-tool group create -n new_group -c '{ "app": 1, "another_app": 2 }'. Warning: all application weights must be positive integers, total weight must be positive. """ ctx = Context(**kwargs) ctx.execute_action('group:create', **{ 'storage': ctx.repo.create_secure_service('storage'), 'name': name, 'content': content, })
python
def randomwrap(func): """ Decorator for random value generators Allows passing of sequence of parameters, as well as a size argument. Convention: - If size=1 and the parameters are all scalars, return a scalar. - If size=1, the random variates are 1D. - If the parameters are scalars and size > 1, the random variates are 1D. - If size > 1 and the parameters are sequences, the random variates are aligned as (size, max(length)), where length is the parameters size. :Example: >>> rbernoulli(.1) 0 >>> rbernoulli([.1,.9]) np.asarray([0, 1]) >>> rbernoulli(.9, size=2) np.asarray([1, 1]) >>> rbernoulli([.1,.9], 2) np.asarray([[0, 1], [0, 1]]) """ # Find the order of the arguments. refargs, defaults = utils.get_signature(func) # vfunc = np.vectorize(self.func) npos = len(refargs) - len(defaults) # Number of pos. arg. nkwds = len(defaults) # Number of kwds args. mv = func.__name__[ 1:] in mv_continuous_distributions + mv_discrete_distributions # Use the NumPy random function directly if this is not a multivariate # distribution if not mv: return func def wrapper(*args, **kwds): # First transform keyword arguments into positional arguments. n = len(args) if nkwds > 0: args = list(args) for i, k in enumerate(refargs[n:]): if k in kwds.keys(): args.append(kwds[k]) else: args.append(defaults[n - npos + i]) r = [] s = [] largs = [] nr = args[-1] length = [np.atleast_1d(a).shape[0] for a in args] dimension = [np.atleast_1d(a).ndim for a in args] N = max(length) if len(set(dimension)) > 2: raise('Dimensions do not agree.') # Make sure all elements are iterable and have consistent lengths, ie # 1 or n, but not m and n. for arg, s in zip(args, length): t = type(arg) arr = np.empty(N, type) if s == 1: arr.fill(arg) elif s == N: arr = np.asarray(arg) else: raise RuntimeError('Arguments size not allowed: %s.' % s) largs.append(arr) if mv and N > 1 and max(dimension) > 1 and nr > 1: raise ValueError( 'Multivariate distributions cannot take s>1 and multiple values.') if mv: for i, arg in enumerate(largs[:-1]): largs[0] = np.atleast_2d(arg) for arg in zip(*largs): r.append(func(*arg)) size = arg[-1] vec_stochastics = len(r) > 1 if mv: if nr == 1: return r[0] else: return np.vstack(r) else: if size > 1 and vec_stochastics: return np.atleast_2d(r).T elif vec_stochastics or size > 1: return np.concatenate(r) else: # Scalar case return r[0][0] wrapper.__doc__ = func.__doc__ wrapper.__name__ = func.__name__ return wrapper
python
def add_to_subset(self, id, s): """ Adds a node to a subset """ n = self.node(id) if 'meta' not in n: n['meta'] = {} meta = n['meta'] if 'subsets' not in meta: meta['subsets'] = [] meta['subsets'].append(s)
java
public OkCoinOrderResult getOrder(String symbol, Integer type, String orderIds) throws IOException { OkCoinOrderResult orderResult = okCoin.getOrders(apikey, type, orderIds, symbol, signatureCreator()); return returnOrThrow(orderResult); }
java
public static <T1, T2, R> BiFunction<T1, T2, R> spy(BiFunction<T1, T2, R> function, Box<R> result, Box<T1> param1, Box<T2> param2) { return new BinaryCapturingFunction<>(function, result, param1, param2); }
java
public String getNameOfSubstance() { if (Chemical_Type.featOkTst && ((Chemical_Type)jcasType).casFeat_nameOfSubstance == null) jcasType.jcas.throwFeatMissing("nameOfSubstance", "de.julielab.jules.types.Chemical"); return jcasType.ll_cas.ll_getStringValue(addr, ((Chemical_Type)jcasType).casFeatCode_nameOfSubstance);}
java
public static JPropertyFile getOrCreatePropertyFile(JPackage thePackage, String name) { JPropertyFile propertyFile = null; for (Iterator<JResourceFile> iterator = thePackage.propertyFiles(); iterator .hasNext() && (null == propertyFile);) { final JResourceFile resourceFile = (JResourceFile) iterator.next(); if (resourceFile instanceof JPropertyFile && name.equals(resourceFile.name())) { propertyFile = (JPropertyFile) resourceFile; } } if (null == propertyFile) { propertyFile = new JPropertyFile(name); thePackage.addResourceFile(propertyFile); } return propertyFile; }
java
private boolean extraEntity(String outputString, int charToMap) { boolean extra = false; if (charToMap < ASCII_MAX) { switch (charToMap) { case '"' : // quot if (!outputString.equals("&quot;")) extra = true; break; case '&' : // amp if (!outputString.equals("&amp;")) extra = true; break; case '<' : // lt if (!outputString.equals("&lt;")) extra = true; break; case '>' : // gt if (!outputString.equals("&gt;")) extra = true; break; default : // other entity in range 0 to 127 extra = true; } } return extra; }
python
def download_file(self, src_uri, target): """Download file from MediaFire. src_uri -- MediaFire file URI to download target -- download path or file-like object in write mode """ resource = self.get_resource_by_uri(src_uri) if not isinstance(resource, File): raise MediaFireError("Only files can be downloaded") quick_key = resource['quickkey'] result = self.api.file_get_links(quick_key=quick_key, link_type='direct_download') direct_download = result['links'][0]['direct_download'] # Force download over HTTPS direct_download = direct_download.replace('http:', 'https:') name = resource['filename'] target_is_filehandle = True if hasattr(target, 'write') else False if not target_is_filehandle: if (os.path.exists(target) and os.path.isdir(target)) or \ target.endswith("/"): target = os.path.join(target, name) if not os.path.isdir(os.path.dirname(target)): os.makedirs(os.path.dirname(target)) logger.info("Downloading %s to %s", src_uri, target) response = requests.get(direct_download, stream=True) try: if target_is_filehandle: out_fd = target else: out_fd = open(target, 'wb') checksum = hashlib.sha256() for chunk in response.iter_content(chunk_size=4096): if chunk: out_fd.write(chunk) checksum.update(chunk) checksum_hex = checksum.hexdigest().lower() if checksum_hex != resource['hash']: raise DownloadError("Hash mismatch ({} != {})".format( resource['hash'], checksum_hex)) logger.info("Download completed successfully") finally: if not target_is_filehandle: out_fd.close()
java
public static SetAclAction fromProto(File.PSetAclAction pSetAclAction) { if (pSetAclAction == null) { throw new IllegalStateException("Null proto set acl action."); } switch (pSetAclAction) { case REPLACE: return SetAclAction.REPLACE; case MODIFY: return SetAclAction.MODIFY; case REMOVE: return SetAclAction.REMOVE; case REMOVE_ALL: return SetAclAction.REMOVE_ALL; case REMOVE_DEFAULT: return SetAclAction.REMOVE_DEFAULT; default: throw new IllegalStateException("Unrecognized proto set acl action: " + pSetAclAction); } }
java
public static MozuUrl storeCredentialsUrl() { UrlFormatter formatter = new UrlFormatter("/api/platform/extensions/credentialStore/"); return new MozuUrl(formatter.getResourceUrl(), MozuUrl.UrlLocation.TENANT_POD) ; }
java
public static @Nonnull JsonSet set(Iterable<?> c) { JsonSet jjArray = new JsonSet(); if(c instanceof JsonElement) { jjArray.add((JsonArray)c); } else { for (Object o : c) { if (o instanceof JsonElement) { jjArray.add((JsonElement) o); } else { jjArray.add(primitive(o)); } } } return jjArray; }
python
def wait(self, timeout=None): """ Waits (blocks) until a running pipeline finishes. Arguments: - timeout(``int``) [default: ``None``] Specifies the timeout, ``RuntimeError`` will be raised. The default is to wait indefinetely for the pipeline to finish. """ if self._started.isSet() and \ self._running.isSet() and \ not self._pausing.isSet(): self._finished.wait(timeout) else: raise PlumberError
java
@Override public ORID getIdentity(final Object iPojo) { checkOpeness(); final ODocument record = getRecordByUserObject(iPojo, false); if (record != null) return record.getIdentity(); return OObjectSerializerHelper.getObjectID(this, iPojo); }
java
public NameValue getNameValueLeaf(BaseMessageHeader header, boolean bAddIfNotFound) { Object[][] mxString = header.getNameValueTree(); NameValue node = this; if (mxString != null) { for (int i = 0; i < mxString.length; i++) { node = node.getNameValueNode((String)mxString[i][MessageConstants.NAME], mxString[i][MessageConstants.VALUE], bAddIfNotFound); if (node == null) return null; } } return node; }
python
def setInputFormatText( self, text ): """ Sets the input format text for this widget to the given value. :param text | <str> """ try: self._inputFormat = XLineEdit.InputFormat[nativestring(text)] except KeyError: pass
java
public Matrix solve(Matrix B) { //Solve A x = L L^T x = b, for x //First solve L y = b Matrix y = forwardSub(L, B); //Sole L^T x = y Matrix x = backSub(L, y); return x; }
python
def get_snapshots(self): """ Returns a list of all completed snapshots for this volume ID. """ ec2 = self.get_ec2_connection() rs = ec2.get_all_snapshots() all_vols = [self.volume_id] + self.past_volume_ids snaps = [] for snapshot in rs: if snapshot.volume_id in all_vols: if snapshot.progress == '100%': snapshot.date = boto.utils.parse_ts(snapshot.start_time) snapshot.keep = True snaps.append(snapshot) snaps.sort(cmp=lambda x,y: cmp(x.date, y.date)) return snaps
python
def marshal(data, fields, envelope=None): """Takes raw data (in the form of a dict, list, object) and a dict of fields to output and filters the data based on those fields. :param data: the actual object(s) from which the fields are taken from :param fields: a dict of whose keys will make up the final serialized response output :param envelope: optional key that will be used to envelop the serialized response >>> from flask_restful import fields, marshal >>> data = { 'a': 100, 'b': 'foo' } >>> mfields = { 'a': fields.Raw } >>> marshal(data, mfields) OrderedDict([('a', 100)]) >>> marshal(data, mfields, envelope='data') OrderedDict([('data', OrderedDict([('a', 100)]))]) """ def make(cls): if isinstance(cls, type): return cls() return cls if isinstance(data, (list, tuple)): return (OrderedDict([(envelope, [marshal(d, fields) for d in data])]) if envelope else [marshal(d, fields) for d in data]) items = ((k, marshal(data, v) if isinstance(v, dict) else make(v).output(k, data)) for k, v in fields.items()) return OrderedDict([(envelope, OrderedDict(items))]) if envelope else OrderedDict(items)
python
def refresh(self): """ Refresh the access token """ data = { 'grant_type': 'refresh_token', 'refresh_token': self._token.refresh_token } response = self.http_client.post(self.URL, data=data, auth=(self.client_id, self.client_secret)) response.raise_for_status() self._token = Token.from_json(response.json())
java
public static boolean cholU( DMatrix6x6 A ) { A.a11 = Math.sqrt(A.a11); A.a21 = 0; A.a31 = 0; A.a41 = 0; A.a51 = 0; A.a61 = 0; A.a12 = (A.a12)/A.a11; A.a22 = Math.sqrt(A.a22-A.a12*A.a12); A.a32 = 0; A.a42 = 0; A.a52 = 0; A.a62 = 0; A.a13 = (A.a13)/A.a11; A.a23 = (A.a23-A.a12*A.a13)/A.a22; A.a33 = Math.sqrt(A.a33-A.a13*A.a13-A.a23*A.a23); A.a43 = 0; A.a53 = 0; A.a63 = 0; A.a14 = (A.a14)/A.a11; A.a24 = (A.a24-A.a12*A.a14)/A.a22; A.a34 = (A.a34-A.a13*A.a14-A.a23*A.a24)/A.a33; A.a44 = Math.sqrt(A.a44-A.a14*A.a14-A.a24*A.a24-A.a34*A.a34); A.a54 = 0; A.a64 = 0; A.a15 = (A.a15)/A.a11; A.a25 = (A.a25-A.a12*A.a15)/A.a22; A.a35 = (A.a35-A.a13*A.a15-A.a23*A.a25)/A.a33; A.a45 = (A.a45-A.a14*A.a15-A.a24*A.a25-A.a34*A.a35)/A.a44; A.a55 = Math.sqrt(A.a55-A.a15*A.a15-A.a25*A.a25-A.a35*A.a35-A.a45*A.a45); A.a65 = 0; A.a16 = (A.a16)/A.a11; A.a26 = (A.a26-A.a12*A.a16)/A.a22; A.a36 = (A.a36-A.a13*A.a16-A.a23*A.a26)/A.a33; A.a46 = (A.a46-A.a14*A.a16-A.a24*A.a26-A.a34*A.a36)/A.a44; A.a56 = (A.a56-A.a15*A.a16-A.a25*A.a26-A.a35*A.a36-A.a45*A.a46)/A.a55; A.a66 = Math.sqrt(A.a66-A.a16*A.a16-A.a26*A.a26-A.a36*A.a36-A.a46*A.a46-A.a56*A.a56); return !UtilEjml.isUncountable(A.a66); }
python
def version_info(self): """ Returns API version information for the HMC. This operation does not require authentication. Returns: :term:`HMC API version`: The HMC API version supported by the HMC. Raises: :exc:`~zhmcclient.HTTPError` :exc:`~zhmcclient.ParseError` :exc:`~zhmcclient.ConnectionError` """ if self._api_version is None: self.query_api_version() return self._api_version['api-major-version'],\ self._api_version['api-minor-version']
python
def augment_init_method(cls): """ Replace the existing cls.__init__() method with a new one which also initialises the field generators and similar bookkeeping. """ orig_init = cls.__init__ def new_init(self, *args, **kwargs): super(CustomGenerator, self).__init__() # TODO: does this behave correctly with longer inheritance chains? orig_init(self, *args, **kwargs) self.orig_args = args self.orig_kwargs = kwargs self.ns_gen_templates = TohuNamespace() self.ns_gen_templates.update_from_dict(self.__class__.__dict__) self.ns_gen_templates.update_from_dict(self.__dict__) self.ns_gen_templates.set_owner(self.__class__) self._mark_field_generator_templates() self.ns_gens = self.ns_gen_templates.spawn() self.ns_gens.set_owner(self) self._update_namespace_with_field_generators() self._set_field_names() self._set_tohu_items_name() self._set_tohu_items_cls() cls.__init__ = new_init
python
def _getZODBAnalyses(self, objid): """ Searches for analyses from ZODB to be filled with results. objid can be either AR ID or Worksheet's Reference Sample IDs. Only analyses that matches with getAnallowedAnalysisStates() will be returned. If not a ReferenceAnalysis, getAllowedARStates() is also checked. Returns empty array if no analyses found """ # ars = [] analyses = [] searchcriteria = ['getId', 'getClientSampleID'] allowed_ar_states = self.getAllowedARStates() allowed_an_states = self.getAllowedAnalysisStates() # allowed_ar_states_msg = [_(s) for s in allowed_ar_states] allowed_an_states_msg = [_(s) for s in allowed_an_states] # Acceleration of searches using priorization if self._priorizedsearchcriteria in ['rgid', 'rid', 'ruid']: # Look from reference analyses analyses = self._getZODBAnalysesFromReferenceAnalyses( objid, self._priorizedsearchcriteria) if len(analyses) == 0: # Look from ar and derived analyses = self._getZODBAnalysesFromAR(objid, '', searchcriteria, allowed_ar_states) # Discard analyses that don't match with allowed_an_states analyses = [analysis for analysis in analyses if analysis.portal_type != 'Analysis' or self.wf.getInfoFor(analysis, 'review_state') in allowed_an_states] if len(analyses) == 0: self.warn( "No analyses '${allowed_analysis_states}' " "states found for ${object_id}", mapping={"allowed_analysis_states": ', '.join( allowed_an_states_msg), "object_id": objid}) return analyses
java
public CmsGroup readGroup(CmsDbContext dbc, CmsProject project) { try { return readGroup(dbc, project.getGroupId()); } catch (CmsException exc) { return new CmsGroup( CmsUUID.getNullUUID(), CmsUUID.getNullUUID(), project.getGroupId() + "", "deleted group", 0); } }
java
protected ProblemTreeViewer getViewer() { try { return (ProblemTreeViewer) this.reflect.get(this, "fViewer"); //$NON-NLS-1$ } catch (SecurityException | NoSuchFieldException | IllegalArgumentException | IllegalAccessException e) { throw new Error(e); } }
java
public String getSQLQuery(boolean bUseCurrentValues, Vector<BaseField> vParamList) { String strRecordset = this.makeTableNames(false); String strFields = this.getSQLFields(DBConstants.SQL_SELECT_TYPE, bUseCurrentValues); boolean bIsQueryRecord = this.isQueryRecord(); String strSortParams = this.addSortParams(bIsQueryRecord, true); this.handleInitialKey(); // Set up the smaller key String strStartRange = this.addSelectParams(">=", DBConstants.START_SELECT_KEY, true, bIsQueryRecord, bUseCurrentValues, vParamList, true, false); // Add only if changed this.handleEndKey(); // Set up the larger key String strEndRange = this.addSelectParams("<=", DBConstants.END_SELECT_KEY, true, bIsQueryRecord, bUseCurrentValues, vParamList, true, false); // Add only if changed String strWhere = DBConstants.BLANK; if (strStartRange.length() == 0) strWhere = strEndRange; else { if (strEndRange.length() == 0) strWhere = strStartRange; else strWhere = strStartRange + " AND " + strEndRange; } // Next, get the recordset filter StringBuffer strbFilter = new StringBuffer(); this.handleRemoteCriteria(strbFilter, bIsQueryRecord, vParamList); // Add any selection criteria (from behaviors) if (strbFilter.length() > 0) { if (strWhere.length() == 0) strWhere = strbFilter.toString(); else strWhere += " AND (" + strbFilter.toString() + ")"; } if (strWhere.length() > 0) strWhere = " WHERE " + strWhere; strRecordset = "SELECT" + strFields + " FROM " + strRecordset + strWhere + strSortParams; return strRecordset; }