language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public void show (Popups.Position pos, Widget target) { Popups.show(this, pos, target); }
python
def add_secondary(self, address, append_lists=False): """ Add secondary IP addresses to this host element. If append_list is True, then add to existing list. Otherwise overwrite. :param list address: ip addresses to add in IPv4 or IPv6 format :param bool append_list: add to existing or overwrite (default: append) :return: None """ self.update( secondary=address, append_lists=append_lists)
java
public org.yecht.Node handle(Parser p, String a) { IRubyObject anchor_name = runtime.newString(a); IRubyObject nm = runtime.newString("name"); org.yecht.Node badanc = org.yecht.Node.newMap(nm, anchor_name); badanc.type_id = "tag:ruby.yaml.org,2002:object:YAML::Yecht::BadAlias"; return badanc; }
python
def copy_all_a(input_a, *other_inputs, **kwargs): """Copy all readings in input a into the output. All other inputs are skipped so that after this function runs there are no readings left in any of the input walkers when the function finishes, even if it generated no output readings. Returns: list(IOTileReading) """ output = [] while input_a.count() > 0: output.append(input_a.pop()) for input_x in other_inputs: input_x.skip_all() return output
python
def findbestparams_iso(xsamples): """ Minimize sum of square differences of H_iso-<H_iso> for timesamples""" p = 0.5*np.sum(xsamples.T[3:]**2,axis=0) r = np.sum(xsamples.T[:3]**2,axis=0) return np.abs(leastsq(deltaH_iso,np.array([10.,10.]), Dfun = None , col_deriv=1,args=(p,r,))[0])
python
def device_event_list(self, **kwargs): # noqa: E501 """List all device events. # noqa: E501 List all device events for an account. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.device_event_list(asynchronous=True) >>> result = thread.get() :param asynchronous bool :param int limit: How many objects to retrieve in the page. :param str order: The order of the records based on creation time, `ASC` or `DESC`; by default `ASC`. :param str after: The ID of The item after which to retrieve the next page. :param str include: Comma-separated list of data fields to return. Currently supported: `total_count` :param str filter: URL encoded query string parameter to filter returned data. ##### Filtering ```?filter={URL encoded query string}``` The query string is made up of key/value pairs separated by ampersands. So for a query of ```key1=value1&key2=value2&key3=value3``` this would be encoded as follows: ```?filter=key1%3Dvalue1%26key2%3Dvalue2%26key3%3Dvalue3``` ###### Filterable fields: The below table lists all the fields that can be filtered on with certain filters: <table> <thead> <tr> <th>Field</th> <th>= / __eq / __neq</th> <th>__in / __nin</th> <th>__lte / __gte</th> <tr> <thead> <tbody> <tr> <td>date_time</td> <td>✓</td> <td>✓</td> <td>✓</td> </tr> <tr> <td>description</td> <td>✓</td> <td>✓</td> <td>&nbsp;</td> </tr> <tr> <td>id</td> <td>✓</td> <td>✓</td> <td>&nbsp;</td> </tr> <tr> <td>device_id</td> <td>✓</td> <td>✓</td> <td>&nbsp;</td> </tr> <tr> <td>event_type</td> <td>✓</td> <td>✓</td> <td>&nbsp;</td> </tr> <tr> <td>state_change</td> <td>✓</td> <td>✓</td> <td>&nbsp;</td> </tr> </tbody> </table> &nbsp; The examples below show the queries in *unencoded* form. ###### By id: ```id={id}``` ###### By state change: ```state_change=[True|False]``` ###### By event type: ```event_type={value}``` ###### On date-time fields: Date-time fields should be specified in UTC RFC3339 format ```YYYY-MM-DDThh:mm:ss.msZ```. There are three permitted variations: * UTC RFC3339 with milliseconds e.g. 2016-11-30T16:25:12.1234Z * UTC RFC3339 without milliseconds e.g. 2016-11-30T16:25:12Z * UTC RFC3339 shortened - without milliseconds and punctuation e.g. 20161130T162512Z Date-time filtering supports three operators: * equality * greater than or equal to &ndash; field name suffixed with ```__gte``` * less than or equal to &ndash; field name suffixed with ```__lte``` Lower and upper limits to a date-time range may be specified by including both the ```__gte``` and ```__lte``` forms in the filter. ```{field name}[|__lte|__gte]={UTC RFC3339 date-time}``` ##### Multi-field example ```id=0158d38771f70000000000010010038c&state_change=True&date_time__gte=2016-11-30T16:25:12.1234Z``` Encoded: ```?filter=id%3D0158d38771f70000000000010010038c%26state_change%3DTrue%26date_time__gte%3D2016-11-30T16%3A25%3A12.1234Z``` ##### Filtering with filter operators String field filtering supports the following operators: * equality: `__eq` * non-equality: `__neq` * in : `__in` * not in: `__nin` For `__in` and `__nin` filters list of parameters must be comma-separated: `event_type__in=update.device.device-created,update.device.device-updated` :return: DeviceEventPage If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.device_event_list_with_http_info(**kwargs) # noqa: E501 else: (data) = self.device_event_list_with_http_info(**kwargs) # noqa: E501 return data
java
public final void declareAlreadyPrecommitted() throws TaskListException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "declareAlreadyPrecommitted"); if (STATE_UNTOUCHED == _state) { _state = STATE_END_PRECOMMIT; } else if (STATE_END_PRECOMMIT == _state) { // declared precommitted already } else { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "declareAlreadyPrecommitted"); throw new TaskListException(_state); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "declareAlreadyPrecommitted"); }
java
@Override public EJSWrapperCommon internalCreateWrapper(BeanId beanId) throws CreateException, RemoteException, CSIException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.entry(tc, "internalCreateWrapper : " + beanId); // ----------------------------------------------------------------------- // Creating the wrappers for an EJBFactory is fairly simple; there is // only one remote wrapper, which is really just a wrapper around the // HomeOfHomes. // // A different instance of the wrapper class (EJBFactory) is requred // for every binding, to insure it is associated with the correct // cluster, for proper WLM routing. The 'primary key' of an EJBFactory // is the J2EEName of either the applicaton or module, and is used // to obtain the proper cluster identity. // // A special EJSWrapperCommon constructor is used, that processes // just the one wrapper, and avoids the requirement of passing // BeanMetaData. There really is no BMD for the EJBFactory, as it // just represents the HomeOfHomes. // ----------------------------------------------------------------------- EJBFactoryImpl ejbFactory = new EJBFactoryImpl(ivEJBLinkResolver); J2EEName factoryKey = (J2EEName) beanId.getPrimaryKey(); Object cluster = ivContainer.getEJBRuntime().getClusterIdentity(factoryKey); EJSWrapperCommon wrappers = new EJSWrapperCommon(ejbFactory, beanId, cluster, ivContainer); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.exit(tc, "internalCreateWrapper : " + wrappers); return wrappers; }
python
def p_expression_lor(self, p): 'expression : expression LOR expression' p[0] = Lor(p[1], p[3], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
java
protected final ItemInList findItemInListFor(final List<ItemInList> pItemsList, final Long pItemId, final EShopItemType pItemType) { int j = 0; while (j < pItemsList.size()) { if (pItemsList.get(j).getItsType().equals(pItemType) && pItemsList.get(j).getItemId().equals(pItemId)) { return pItemsList.get(j); } j++; } return null; }
python
def Change(self,cpu=None,memory=None,description=None,group_id=None): """Change existing server object. One more more fields can be set and method will return with a requests object for all queued activities. This is a convenience function - all each of these changes requires a seperate API call. Some API calls are synchronous (e.g. changing group ID or password) while others are async. """ if group_id: groupId = group_id else: groupId = None payloads = [] requests = [] for key in ("cpu","memory","description","groupId"): if locals()[key]: requests.append(clc.v2.Requests(clc.v2.API.Call('PATCH','servers/%s/%s' % (self.alias,self.id), json.dumps([{"op": "set", "member": key, "value": locals()[key]}]), session=self.session), alias=self.alias, session=self.session)) if len(requests): self.dirty = True return(sum(requests))
java
private NodeUsageReport getReportUnprotected(String trackerName) { NodeUsageReport usageReport = usageReports.get(trackerName); if (usageReport == null) { usageReport = new NodeUsageReport(trackerName, 0, 0, 0, 0, 0, 0, 0); usageReports.put(trackerName, usageReport); } return usageReport; }
python
def get_intel_notifications_feed(self, page=None, timeout=None): """ Get notification feed in JSON for further processing. :param page: the next_page property of the results of a previously issued query to this API. This parameter should not be provided if it is the very first query to the API, i.e. if we are retrieving the first page of results. :param timeout: The amount of time in seconds the request should wait before timing out. :returns: The next page identifier, The results (JSON is possible with .json()) """ params = {'apikey': self.api_key, 'next': page} try: response = requests.get(self.base + 'hunting/notifications-feed/', params=params, proxies=self.proxies, timeout=timeout) # VT returns an empty result, len(content)==0, and status OK if there are no pending notifications. # To keep the API consistent we generate an empty object instead. # This might not be necessary with a later release of the VTI API. (bug has been submitted) if len(response.content) == 0: response.__dict__['_content'] = \ b'{"notifications":[],"verbose_msg":"No pending notification","result":0,"next":null}' except requests.RequestException as e: return dict(error=str(e)) return _return_response_and_status_code(response)
java
static int seeded(int item, int i) { long hash = SEED[i] * item; hash += hash >>> 32; return (int) hash; }
python
def Jkpw(dW, h, n=5): """matrix J approximating repeated Stratonovich integrals for each of N time intervals, based on the method of Kloeden, Platen and Wright (1992). Args: dW (array of shape (N, m)): giving m independent Weiner increments for each time step N. (You can make this array using sdeint.deltaW()) h (float): the time step size n (int, optional): how many terms to take in the series expansion Returns: (A, J) where A: array of shape (N, m, m) giving the Levy areas that were used. J: array of shape (N, m, m) giving an m x m matrix of repeated Stratonovich integral values for each of the N time intervals. """ m = dW.shape[1] A, I = Ikpw(dW, h, n) J = I + 0.5*h*np.eye(m).reshape((1, m, m)) return (A, J)
java
public long getPID() { long currentPID = cachedPID.get(); if (currentPID != 0) { return currentPID; } try { currentPID = detectPID(); } catch (Throwable cause) { logger.info("Unable to detect process ID!", cause); } if (currentPID == 0) { currentPID = System.nanoTime(); if (!cachedPID.compareAndSet(0, currentPID)) { currentPID = cachedPID.get(); } } else { cachedPID.set(currentPID); } return currentPID; }
python
def new_with_atts_removed(self, *attributes): """Returns a new FmtStr with the same content but some attributes removed""" return FmtStr(*[Chunk(bfs.s, bfs.atts.remove(*attributes)) for bfs in self.chunks])
java
static public long unpackRecid(DataInput2 in) throws IOException { long val = in.readPackedLong(); val = DataIO.parity1Get(val); return val >>> 1; }
java
public String sequence(int hashCode) { if (hashCode <= 0) return null; int state = 0; // If the hash code is larger than the number of suffixes in the start state, // the hash code does not correspond to a sequence. if (hashCode > d_nSeqs) return null; StringBuilder wordBuilder = new StringBuilder(); // Stop if we are in a state where we cannot add more characters. while (d_stateOffsets.get(state) != transitionsUpperBound(state)) { // Obtain the next transition, decreasing the hash code by the number of // preceding suffixes. int trans = d_transitionNSuffixes.binarySearch(d_stateOffsets.get(state), transitionsUpperBound(state), hashCode); if (trans >= 0) --trans; else trans = -trans - 2; hashCode -= d_transitionNSuffixes.get(trans); // Add the character on the given transition and move. wordBuilder.append(d_transitionChars[trans]); state = d_transitionTo.get(trans); // If we encounter a final state, decrease the hash code, since it represents a // suffix. If our hash code is reduced to zero, we have found the sequence. if (d_finalStates.get(state)) { --hashCode; if (hashCode == 0) return wordBuilder.toString(); } } // Bad luck, we cannot really get here! return null; }
python
def mkdir(self, astr_dirSpec): """ Given an <astr_dirSpec> in form '/a/b/c/d/.../f', create that path in the internal stree, creating all intermediate nodes as necessary :param astr_dirSpec: :return: """ if astr_dirSpec != '/' and astr_dirSpec != "//": str_currentPath = self.cwd() l_pathSpec = astr_dirSpec.split('/') if not len(l_pathSpec[0]): self.cd('/') l_nodesDepth = l_pathSpec[1:] else: l_nodesDepth = l_pathSpec for d in l_nodesDepth: self.mkcd(d) self.cd(str_currentPath)
java
public static int[] range(int length) { int[] index = new int[length]; for (int i=0; i<index.length; i++) { // TODO: This should maybe be a safe cast for the benefit of non-IntDouble classes. index[i] = (int) i; } return index; }
java
public void marshall(Rectangle rectangle, ProtocolMarshaller protocolMarshaller) { if (rectangle == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(rectangle.getHeight(), HEIGHT_BINDING); protocolMarshaller.marshall(rectangle.getWidth(), WIDTH_BINDING); protocolMarshaller.marshall(rectangle.getX(), X_BINDING); protocolMarshaller.marshall(rectangle.getY(), Y_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public byte[] decodeBytes(byte[] rgbValue) throws NoSuchAlgorithmException { rgbValue = super.decodeBytes(rgbValue); // Base64 encoding rgbValue = this.decrypt(rgbValue); return rgbValue; }
python
def _dens(self, R, z, phi=0., t=0.): """ NAME: _dens PURPOSE: evaluate the density at (R,z, phi) INPUT: R - Cylindrical Galactocentric radius z - vertical height phi - azimuth t - time OUTPUT: density at (R,z, phi) HISTORY: 2016-05-17 - Written - Aladdin """ if not self.isNonAxi and phi is None: phi= 0. return self._computeArray(self._rhoTilde, R,z,phi)
python
def _validate_build_resource_structure(autoload_resource): """Validate resource structure :param dict autoload_resource: :return correct autoload resource structure :rtype: dict """ result = {} for resource_prefix, resources in autoload_resource.iteritems(): max_free_index = max(map(int, resources)) + 1 or 1 for index, sub_resources in resources.iteritems(): if not index or index == -1: index = max_free_index max_free_index += 1 if len(sub_resources) > 1: result["{0}{1}".format(resource_prefix, index)] = sub_resources[0] for resource in sub_resources[1:]: result["{0}{1}".format(resource_prefix, str(max_free_index))] = resource max_free_index += 1 else: result["{0}{1}".format(resource_prefix, index)] = sub_resources[0] return result
python
def derivative(f, t): """Fourth-order finite-differencing with non-uniform time steps The formula for this finite difference comes from Eq. (A 5b) of "Derivative formulas and errors for non-uniformly spaced points" by M. K. Bowen and Ronald Smith. As explained in their Eqs. (B 9b) and (B 10b), this is a fourth-order formula -- though that's a squishy concept with non-uniform time steps. TODO: If there are fewer than five points, the function should revert to simpler (lower-order) formulas. """ dfdt = np.empty_like(f) if (f.ndim == 1): _derivative(f, t, dfdt) elif (f.ndim == 2): _derivative_2d(f, t, dfdt) elif (f.ndim == 3): _derivative_3d(f, t, dfdt) else: raise NotImplementedError("Taking derivatives of {0}-dimensional arrays is not yet implemented".format(f.ndim)) return dfdt
java
public static Map<String, String> getEvidences(ProbabilisticNetwork bn) { HashMap<String, String> evidences = new HashMap<String, String>(); for (Node n : bn.getNodes()) { ProbabilisticNode pn = (ProbabilisticNode) n; if (pn.hasEvidence()) { evidences.put(pn.getName(), pn.getStateAt(pn.getEvidence())); } } return evidences; }
java
public void marshall(EncryptionConfiguration encryptionConfiguration, ProtocolMarshaller protocolMarshaller) { if (encryptionConfiguration == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(encryptionConfiguration.getNoEncryptionConfig(), NOENCRYPTIONCONFIG_BINDING); protocolMarshaller.marshall(encryptionConfiguration.getKMSEncryptionConfig(), KMSENCRYPTIONCONFIG_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public Options put(String key, ICollectionItemOptions value) { options.put(key, value); return this; }
python
def wite_to_json(self, dir_path="", file_name=""): """将性能数据写入文件.""" # 提取数据 data = { "plot_data": self.record_thread.profile_data, "method_exec_info": self.method_exec_info, "search_file": self.search_file, "source_file": self.source_file} # 写入文件 file_path = os.path.join(dir_path, file_name) if not os.path.exists(dir_path): os.makedirs(dir_path) json.dump(data, open(file_path, "w+"), indent=4)
java
@Override public DeleteDatastoreResult deleteDatastore(DeleteDatastoreRequest request) { request = beforeClientExecution(request); return executeDeleteDatastore(request); }
java
private List<PipelineStage> findUnmappedStages(Dashboard dashboard,List<PipelineStage> pipelineStageList){ List<PipelineStage> unmappedStages = new ArrayList<>(); Map<PipelineStage, String> stageToEnvironmentNameMap = PipelineUtils.getStageToEnvironmentNameMap(dashboard); for (PipelineStage systemStage : pipelineStageList) { if (PipelineStageType.DEPLOY.equals(systemStage.getType())) { String mappedName = stageToEnvironmentNameMap.get(systemStage); if (mappedName == null || mappedName.isEmpty()) { unmappedStages.add(systemStage); } } } return unmappedStages; }
java
private SagaType containsItem(final Iterable<SagaType> source, final Class itemToSearch) { SagaType containedItem = null; for (SagaType sagaType : source) { if (sagaType.getSagaClass().equals(itemToSearch)) { containedItem = sagaType; break; } } return containedItem; }
java
public static void startWakefulService(Context context) { acquireWakeLock(context); context.startService(new Intent(context, WingsService.class)); }
python
def send_heartbeats(heartbeats, args, configs, use_ntlm_proxy=False): """Send heartbeats to WakaTime API. Returns `SUCCESS` when heartbeat was sent, otherwise returns an error code. """ if len(heartbeats) == 0: return SUCCESS api_url = args.api_url if not api_url: api_url = 'https://api.wakatime.com/api/v1/users/current/heartbeats.bulk' log.debug('Sending heartbeats to api at %s' % api_url) timeout = args.timeout if not timeout: timeout = 60 data = [h.sanitize().dict() for h in heartbeats] log.debug(data) # setup api request request_body = json.dumps(data) api_key = u(base64.b64encode(str.encode(args.key) if is_py3 else args.key)) auth = u('Basic {api_key}').format(api_key=api_key) headers = { 'User-Agent': get_user_agent(args.plugin), 'Content-Type': 'application/json', 'Accept': 'application/json', 'Authorization': auth, } hostname = get_hostname(args) if hostname: headers['X-Machine-Name'] = u(hostname).encode('utf-8') # add Olson timezone to request try: tz = tzlocal.get_localzone() except: tz = None if tz: headers['TimeZone'] = u(tz.zone).encode('utf-8') session_cache = SessionCache() session = session_cache.get() should_try_ntlm = False proxies = {} if args.proxy: if use_ntlm_proxy: from .packages.requests_ntlm import HttpNtlmAuth username = args.proxy.rsplit(':', 1) password = '' if len(username) == 2: password = username[1] username = username[0] session.auth = HttpNtlmAuth(username, password, session) else: should_try_ntlm = '\\' in args.proxy proxies['https'] = args.proxy ssl_verify = not args.nosslverify if args.ssl_certs_file and ssl_verify: ssl_verify = args.ssl_certs_file # send request to api response, code = None, None try: response = session.post(api_url, data=request_body, headers=headers, proxies=proxies, timeout=timeout, verify=ssl_verify) except RequestException: if should_try_ntlm: return send_heartbeats(heartbeats, args, configs, use_ntlm_proxy=True) else: exception_data = { sys.exc_info()[0].__name__: u(sys.exc_info()[1]), } if log.isEnabledFor(logging.DEBUG): exception_data['traceback'] = traceback.format_exc() if args.offline: queue = Queue(args, configs) queue.push_many(heartbeats) if log.isEnabledFor(logging.DEBUG): log.warn(exception_data) else: log.error(exception_data) except: # delete cached session when requests raises unknown exception if should_try_ntlm: return send_heartbeats(heartbeats, args, configs, use_ntlm_proxy=True) else: exception_data = { sys.exc_info()[0].__name__: u(sys.exc_info()[1]), 'traceback': traceback.format_exc(), } if args.offline: queue = Queue(args, configs) queue.push_many(heartbeats) log.warn(exception_data) else: code = response.status_code if response is not None else None content = response.text if response is not None else None if _success(code): results = _get_results(response) _process_server_results(heartbeats, code, content, results, args, configs) session_cache.save(session) return SUCCESS else: log.debug({ 'response_code': code, 'response_text': content, }) if should_try_ntlm: return send_heartbeats(heartbeats, args, configs, use_ntlm_proxy=True) _handle_unsent_heartbeats(heartbeats, code, content, args, configs) session_cache.delete() return AUTH_ERROR if code == 401 else API_ERROR
python
def write_element(elem_to_parse, file_or_path, encoding=DEFAULT_ENCODING): """ Writes the contents of the parsed element to file_or_path :see: get_element(parent_to_parse, element_path) """ xml_header = '<?xml version="1.0" encoding="{0}"?>'.format(encoding) get_element_tree(elem_to_parse).write(file_or_path, encoding, xml_header)
python
def _kl_divergence_bh(params, P, degrees_of_freedom, n_samples, n_components, angle=0.5, skip_num_points=0, verbose=False): """t-SNE objective function: KL divergence of p_ijs and q_ijs. Uses Barnes-Hut tree methods to calculate the gradient that runs in O(NlogN) instead of O(N^2) Parameters ---------- params : array, shape (n_params,) Unraveled embedding. P : csr sparse matrix, shape (n_samples, n_sample) Sparse approximate joint probability matrix, computed only for the k nearest-neighbors and symmetrized. degrees_of_freedom : float Degrees of freedom of the Student's-t distribution. n_samples : int Number of samples. n_components : int Dimension of the embedded space. angle : float (default: 0.5) This is the trade-off between speed and accuracy for Barnes-Hut T-SNE. 'angle' is the angular size (referred to as theta in [3]) of a distant node as measured from a point. If this size is below 'angle' then it is used as a summary node of all points contained within it. This method is not very sensitive to changes in this parameter in the range of 0.2 - 0.8. Angle less than 0.2 has quickly increasing computation time and angle greater 0.8 has quickly increasing error. skip_num_points : int (optional, default:0) This does not compute the gradient for points with indices below `skip_num_points`. This is useful when computing transforms of new data where you'd like to keep the old data fixed. verbose : int Verbosity level. Returns ------- kl_divergence : float Kullback-Leibler divergence of p_ij and q_ij. grad : array, shape (n_params,) Unraveled gradient of the Kullback-Leibler divergence with respect to the embedding. """ params = params.astype(np.float32, copy=False) X_embedded = params.reshape(n_samples, n_components) val_P = P.data.astype(np.float32, copy=False) neighbors = P.indices.astype(np.int64, copy=False) indptr = P.indptr.astype(np.int64, copy=False) grad = np.zeros(X_embedded.shape, dtype=np.float32) error = _barnes_hut_tsne.gradient(val_P, X_embedded, neighbors, indptr, grad, angle, n_components, verbose, dof=degrees_of_freedom) c = 2.0 * (degrees_of_freedom + 1.0) / degrees_of_freedom grad = grad.ravel() grad *= c return error, grad
java
public void stop() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "stop"); synchronized (this) { active = false; // stop the liveness timer for sending ControlRequestHighestGeneratedTick, if needed //NOTE: the requestHighestGeneratedTickTimer will stop of its own accord once the //stream is noticed to be if (initRepeatHandler != null) { initRepeatHandler.cancel(); } if (inactivityTimer != null) { inactivityTimer.cancel(); inactivityTimer = null; } if (resetRequestAckSender != null) { resetRequestAckSender.stop(); } // stop all the liveness timers for value ticks dem.stopTimer(); if (imeRestorationHandler!=null) imeRestorationHandler.stopTimer(); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "stop"); }
python
def retract(self, idx_or_declared_fact): """ Retracts a specific fact, using its index .. note:: This updates the agenda """ self.facts.retract(idx_or_declared_fact) if not self.running: added, removed = self.get_activations() self.strategy.update_agenda(self.agenda, added, removed)
python
def fetch_changes(repo_path, up_commit='master'): """ Fetch latest changes from stage and touch .timestamp if any python sources have been modified. """ last_up_commit = None prevcwd = os.getcwd() try: gitexe = 'git' os.chdir(repo_path) old_sources_timestamp = sources_latest_timestamp('.') shell_command([gitexe, 'pull']) last_up_commit = subprocess.check_output(['git', 'rev-parse', 'HEAD']) shell_command([gitexe, 'checkout', up_commit]) up_commit = subprocess.check_output(['git', 'rev-parse', 'HEAD']) new_sources_timestamp = sources_latest_timestamp('.') if old_sources_timestamp < new_sources_timestamp: with open('.timestamp', 'w') as up_commit_file: up_commit_file.write(up_commit) finally: os.chdir(prevcwd) return last_up_commit, up_commit
python
def set_property(self, name, value, update_session=True): """Create or set the value of a property. Returns `True` if the property was created or updated, or `False` if there were no changes to the value of the property. Args: name (str): Name of the property to create or update value (any): Value of the property. This can be any type of JSON serializable data update_session (bool): Automatically add the change to the SQLAlchemy session. Default: True Returns: `bool` """ if type(value) == datetime: value = value.isoformat() else: value = value try: prop = self.get_property(name) if prop.value == value: return False prop.value = value except AttributeError: prop = ResourceProperty() prop.resource_id = self.id prop.name = name prop.value = value if update_session: db.session.add(prop) return True
java
private void pushEvent(GobblinTrackingEvent event) throws IOException { Map<String, String> metadata = event.getMetadata(); String name = getMetricName(metadata, event.getName()); long timestamp = event.getTimestamp(); MultiPartEvent multiPartEvent = MultiPartEvent.getEvent(metadata.get(EventSubmitter.EVENT_TYPE)); if (multiPartEvent == null) { influxDBPusher.push(buildEventAsPoint(name, EMTPY_VALUE, timestamp)); } else { List<Point> points = Lists.newArrayList(); for (String field : multiPartEvent.getMetadataFields()) { Point point = buildEventAsPoint(JOINER.join(name, field), convertValue(field, metadata.get(field)), timestamp); points.add(point); } influxDBPusher.push(points); } }
java
public String convertTextFidelityRepTxtExToString(EDataType eDataType, Object instanceValue) { return instanceValue == null ? null : instanceValue.toString(); }
python
def flush_tx_buffer(self): """ Flushes the transmit buffer. :raises can.CanError: If flushing of the transmit buffer failed. """ log.info('Flushing transmit buffer') self._ucan.reset_can(self.channel, ResetFlags.RESET_ONLY_TX_BUFF)
java
private int itemToIndex(I i) { Integer r = allItems.absoluteIndexOf(i); return r == null ? -1 : r.intValue(); }
java
public static byte[] encryptWithPublicKey(byte[] publicKeyData, byte[] data) throws InvalidKeyException, NoSuchAlgorithmException, InvalidKeySpecException, NoSuchPaddingException, IllegalBlockSizeException, BadPaddingException, IOException { return encryptWithPublicKey(publicKeyData, data, DEFAULT_CIPHER_TRANSFORMATION, DEFAULT_PADDING_SIZE); }
java
@Override public Collection<URL> getURLs() { // There is only ever a single URI for the directory so return this try { return Collections.singleton(dir.toURI().toURL()); } catch (MalformedURLException e) { return Collections.emptySet(); } }
python
def FlatArrow(line1, line2, c="m", alpha=1, tipSize=1, tipWidth=1): """Build a 2D arrow in 3D space by joining two close lines. .. hint:: |flatarrow| |flatarrow.py|_ """ if isinstance(line1, Actor): line1 = line1.coordinates() if isinstance(line2, Actor): line2 = line2.coordinates() sm1, sm2 = np.array(line1[-1]), np.array(line2[-1]) v = (sm1-sm2)/3*tipWidth p1 = sm1+v p2 = sm2-v pm1 = (sm1+sm2)/2 pm2 = (np.array(line1[-2])+np.array(line2[-2]))/2 pm12 = pm1-pm2 tip = pm12/np.linalg.norm(pm12)*np.linalg.norm(v)*3*tipSize/tipWidth + pm1 line1.append(p1) line1.append(tip) line2.append(p2) line2.append(tip) resm = max(100, len(line1)) actor = Ribbon(line1, line2, alpha=alpha, c=c, res=(resm, 1)).phong() settings.collectable_actors.pop() settings.collectable_actors.append(actor) return actor
python
def ticket_collaborators(self, id, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/tickets#list-collaborators-for-a-ticket" api_path = "/api/v2/tickets/{id}/collaborators.json" api_path = api_path.format(id=id) return self.call(api_path, **kwargs)
java
private void errorStatus(MtasSolrStatus status, IOException exception) { try { status.setError(exception); if (requestHandler != null) { requestHandler.finishStatus(status); } } catch (IOException e) { log.error(e); } }
python
def subscribe(self, coro): """ Subscribe to status updates from the Opentherm Gateway. Can only be used after connect() @coro is a coroutine which will be called with a single argument (status) when a status change occurs. Return True on success, False if not connected or already subscribed. """ if coro not in self._notify: self._notify.append(coro) return True return False
python
def __move(self, current_pos): ''' Move in the feature map. Args: current_pos: The now position. Returns: The next position. ''' if self.__move_range is not None: next_pos = np.random.randint(current_pos - self.__move_range, current_pos + self.__move_range) if next_pos < 0: next_pos = 0 elif next_pos >= self.var_arr.shape[0] - 1: next_pos = self.var_arr.shape[0] - 1 return next_pos else: next_pos = np.random.randint(self.var_arr.shape[0] - 1) return next_pos
java
@Override public void renderHead(IHeaderResponse response) { super.renderHead(response); response.render(JavaScriptHeaderItem.forReference(DRAWER_JAVASCRIPT)); response.render(JavaScriptHeaderItem.forReference(MANAGER_JAVASCRIPT)); response.render(CssHeaderItem.forReference(DRAWER_CSS)); Iterator<ListItem> iter = drawers.descendingIterator(); WebMarkupContainer drawer; while (iter.hasNext()) { drawer=iter.next().item; response.render(OnDomReadyHeaderItem.forScript("$('#"+drawer.getMarkupId()+"').modaldrawer('show');")); if (drawers.getFirst().item.equals(drawer)) { response.render(OnDomReadyHeaderItem.forScript("$('#"+drawer.getMarkupId()+"').addClass('shown-modal');")); response.render(OnDomReadyHeaderItem.forScript("$('#"+drawer.getMarkupId()+"').removeClass('hidden-modal');")); } else { response.render(OnDomReadyHeaderItem.forScript("$('#"+drawer.getMarkupId()+"').removeClass('shown-modal');")); response.render(OnDomReadyHeaderItem.forScript("$('#"+drawer.getMarkupId()+"').addClass('hidden-modal');")); } } }
python
def init_config(self, app): """Initialize configuration.""" # Set up API endpoints for records. for k in dir(config): if k.startswith('RECORDS_REST_'): app.config.setdefault(k, getattr(config, k)) # Resolve the Elasticsearch error handlers handlers = app.config['RECORDS_REST_ELASTICSEARCH_ERROR_HANDLERS'] for k, v in handlers.items(): handlers[k] = obj_or_import_string(v)
python
def addFeatureEndLocation( self, coordinate, reference_id, strand=None, position_types=None): """ Adds the coordinate details for the end of this feature :param coordinate: :param reference_id: :param strand: :return: """ self.stop = self._getLocation(coordinate, reference_id, strand, position_types) return
python
def on_plot_select(self,event): """ Select data point if cursor is in range of a data point @param: event -> the wx Mouseevent for that click """ if not self.xdata or not self.ydata: return pos=event.GetPosition() width, height = self.canvas.get_width_height() pos[1] = height - pos[1] xpick_data,ypick_data = pos xdata_org = self.xdata ydata_org = self.ydata data_corrected = self.map.transData.transform(vstack([xdata_org,ydata_org]).T) xdata,ydata = data_corrected.T xdata = list(map(float,xdata)) ydata = list(map(float,ydata)) e = 4e0 index = None for i,(x,y) in enumerate(zip(xdata,ydata)): if 0 < sqrt((x-xpick_data)**2. + (y-ypick_data)**2.) < e: index = i break if index==None: print("Couldn't find point %.1f,%.1f"%(xpick_data,ypick_data)) self.change_selected(index)
java
public static Tailer create(final File file, final TailerListener listener, final long delayMillis, final boolean end, final boolean reOpen, final int bufSize) { return Tailer.create(file, Tailer.DEFAULT_CHARSET, listener, delayMillis, end, reOpen, bufSize); }
python
def expire_leaderboard_at_for(self, leaderboard_name, timestamp): ''' Expire the given leaderboard at a specific UNIX timestamp. Do not use this with leaderboards that utilize member data as there is no facility to cascade the expiration out to the keys for the member data. @param leaderboard_name [String] Name of the leaderboard. @param timestamp [int] UNIX timestamp at which the leaderboard will be expired. ''' pipeline = self.redis_connection.pipeline() pipeline.expireat(leaderboard_name, timestamp) pipeline.expireat( self._ties_leaderboard_key(leaderboard_name), timestamp) pipeline.expireat(self._member_data_key(leaderboard_name), timestamp) pipeline.execute()
python
def targets(self): """ Using the data from the BLAST analyses, set the targets folder, and create the 'mapping file'. This is the genera-specific FASTA file that will be used for all the reference mapping; it replaces the 'bait file' in the code """ logging.info('Performing analysis with {} targets folder'.format(self.analysistype)) for sample in self.runmetadata: if sample.general.bestassemblyfile != 'NA': sample[self.analysistype].targetpath = \ os.path.join(self.targetpath, 'genera', sample[self.analysistype].genus, '') # There is a relatively strict databasing scheme necessary for the custom targets. Eventually, # there will be a helper script to combine individual files into a properly formatted combined file try: sample[self.analysistype].mappingfile = glob('{}*.fa' .format(sample[self.analysistype].targetpath))[0] # If the fasta file is missing, raise a custom error except IndexError as e: # noinspection PyPropertyAccess e.args = ['Cannot find the combined fasta file in {}. Please note that the file must have a ' '.fasta extension'.format(sample[self.analysistype].targetpath)] if os.path.isdir(sample[self.analysistype].targetpath): raise else: sample.general.bestassemblyfile = 'NA'
python
def confirm_operation(prompt, prefix=None, assume_yes=False, err=False): """Prompt the user for confirmation for dangerous actions.""" if assume_yes: return True prefix = prefix or click.style( "Are you %s certain you want to" % (click.style("absolutely", bold=True)) ) prompt = "%(prefix)s %(prompt)s?" % {"prefix": prefix, "prompt": prompt} if click.confirm(prompt, err=err): return True click.echo(err=err) click.secho("OK, phew! Close call. :-)", fg="green", err=err) return False
java
private FlowScope traverseArrayLiteral(Node n, FlowScope scope) { scope = traverseChildren(n, scope); n.setJSType(getNativeType(ARRAY_TYPE)); return scope; }
python
def address(address=None): """Convert one of a number of inputs into a valid ip:port string. Elements which are not provided are filled in as follows: * IP Address: the system is asked for the set of IP addresses associated with the machine and the first one is used, preferring those matching `address` if it is a wildcard. * Port number: a random port is selected from the pool of dynamically-available port numbers. This means you can pass any of: nothing; a hostname; an IP address; an IP address with wildcards; a port number If an IP address is supplied but is invalid, an InvalidAddressError exception is raised. :param address: (optional) Any of: an IP address, a port number, or both :returns: a valid ip:port string for this machine """ address = str(address or "").strip() # # If the address is an ip:port pair, split into its component parts. # Otherwise, try to determine whether we're looking at an IP # or at a port and leave the other one blank # host_or_ip, port = split_address(address) # # If the port has been supplied, make sure it's numeric and that it's a valid # port number. If it hasn't been supplied, remove a random one from the pool # of possible dynamically-allocated ports and use that. # if port: try: port = int(port) except ValueError: raise AddressError("Port %s must be a number" % port) if port not in config.VALID_PORTS: raise AddressError("Port %d must be in range %d - %d" % ( port, min(config.VALID_PORTS), max(config.VALID_PORTS)) ) else: random.shuffle(PORT_POOL) port = PORT_POOL.pop() # # The address part could be an IP address (optionally including # wildcards to indicate a preference) or a hostname or nothing. # If it's a hostname we attempt to resolve it to an IP address. # It it's nothing or a wildcard we query the system for a matching IP address. # if (not host_or_ip) or is_valid_ip_pattern(host_or_ip): # # If a specific IP address is given, use that. # If an IP pattern is given (ie something with a wildcard in it) treat # that as no address with a preference for that wildcard. # prefer = None if "*" in host_or_ip: host_or_ip, prefer = None, [host_or_ip] # # If no IP (or only a wildcard) is specified, query the system for valid # addresses, preferring those which match the wildcard. NB if the preference # matches one we've previously used, we can return a cached address. But # different requests can specify different wildcard preferences. # if not host_or_ip: if _ip4 and _prefer == prefer: ip = _ip4 else: ip = _find_ip4(prefer) else: ip = host_or_ip else: # # Treat the string as a hostname and resolve to an IP4 address # try: ip = socket.gethostbyname(host_or_ip) except socket.gaierror as exc: _logger.error("gaierror %d for %s", exc.errno, host_or_ip) raise InvalidAddressError(host_or_ip, exc.errno) else: # # Bizarrely specific check because BT Internet "helpfully" # redirects DNS fails to this address which hosts a sponsored # landing page! # if ip == "92.242.132.15": raise InvalidAddressError(host_or_ip, 0) return "%s:%s" % (ip, port)
python
def delete_mount_cache(real_name): ''' .. versionadded:: 2018.3.0 Provide information if the path is mounted CLI Example: .. code-block:: bash salt '*' mount.delete_mount_cache /mnt/share ''' cache = salt.utils.mount.read_cache(__opts__) if cache: if 'mounts' in cache: if real_name in cache['mounts']: del cache['mounts'][real_name] cache_write = salt.utils.mount.write_cache(cache, __opts__) if not cache_write: raise CommandExecutionError('Unable to write mount cache.') return True
java
public CSVParserBuilder<T, K> usingSeparatorWithQuote(char separator, char quote) { this.metadata = new CSVFileMetadata(separator, Optional.of(quote)); return this; }
python
def propagate(self, assumptions=[], phase_saving=0): """ Propagate a given set of assumption literals. """ if self.maplesat: if self.use_timer: start_time = time.clock() # saving default SIGINT handler def_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_DFL) st, props = pysolvers.maplechrono_propagate(self.maplesat, assumptions, phase_saving) # recovering default SIGINT handler def_sigint_handler = signal.signal(signal.SIGINT, def_sigint_handler) if self.use_timer: self.call_time = time.clock() - start_time self.accu_time += self.call_time return bool(st), props if props != None else []
python
def uniq(self) -> 'TList[T]': """ Usage: >>> TList([1, 2, 3, 2, 1]).uniq() [1, 2, 3] """ rs = TList() for e in self: if e not in rs: rs.append(e) return rs
python
def set_manifest(self, alias, manifest_json): """ Give a name (alias) to a manifest. :param alias: Alias name :type alias: str :param manifest_json: A V2 Schema 2 manifest JSON string :type digests: list """ self._request('put', 'manifests/' + alias, data=manifest_json, headers={'Content-Type': _schema2_mimetype})
java
public T path(String path) { if (path.length() == 0) { this.path += encodePath(path); } else { this.path += "/" + encodePath(path); } return returnThis(); }
python
def extract_bs(self, cutoff, ligcentroid, resis): """Return list of ids from residues belonging to the binding site""" return [obres.GetIdx() for obres in resis if self.res_belongs_to_bs(obres, cutoff, ligcentroid)]
python
def calc_intsize(self): """ Calculates the size of an integration (cross + auto) in bytes """ # assume first cross blob starts after headxml and second is one int of bytes later for k in self.binarychunks.iterkeys(): if int(k.split('/')[3]) == 1 and 'cross' in k.split('/')[-1]: headsize = self.binarychunks[k] break for k in self.binarychunks.iterkeys(): if int(k.split('/')[3]) == 2 and 'cross' in k.split('/')[-1]: intsize = self.binarychunks[k] - headsize break return (headsize, intsize)
java
public void removeLockedNode(String nodeId) throws SQLException { ResultSet resultSet = null; PreparedStatement preparedStatement = null; Connection jdbcConnection = openConnection(); try { InspectionQuery query = getDeleteQuery(nodeId); preparedStatement = jdbcConnection.prepareStatement(query.getStatement()); preparedStatement.executeUpdate(); } finally { JDBCUtils.freeResources(resultSet, preparedStatement, jdbcConnection); } }
python
def info(gandi, resource): """Display information about an ip. Resource can be an ip or id. """ output_keys = ['ip', 'state', 'dc', 'type', 'vm', 'reverse'] datacenters = gandi.datacenter.list() ip = gandi.ip.info(resource) iface = gandi.iface.info(ip['iface_id']) vms = None if iface.get('vm_id'): vm = gandi.iaas.info(iface['vm_id']) vms = {vm['id']: vm} output_ip(gandi, ip, datacenters, vms, {iface['id']: iface}, output_keys) return ip
python
def _wrap_public_method(self, attr): """ Ensure that plugin will call current connection method when accessing as ``plugin.<public_method>(*args, **kwargs)``. """ def wrapper(*args, **kwargs): return getattr(self.connection, attr)(*args, **kwargs) return wrapper
python
def _read_msg(session): """ Perform a read on input socket to consume message and then return the payload and block_id in a tuple. :param session: Push Session to read data for. """ if len(session.data) == session.message_length: # Data Already completely read. Return return True try: data = session.socket.recv(session.message_length - len(session.data)) if len(data) == 0: raise PushException("No Data on Socket!") session.data += data except ssl.SSLError: # This can happen when select gets triggered # for an SSL socket and data has not yet been # read. Wait for it to get triggered again. return False # Whether or not all data was read. return len(session.data) == session.message_length
java
public IoBuffer fetchOutNetBuffer() { IoBufferEx answer = outNetBuffer; if (answer == null) { return (IoBuffer) emptyBuffer; } outNetBuffer = null; return (IoBuffer) answer.shrink(allocator); }
java
@Override public Identifier toPhysicalColumnName(Identifier name, JdbcEnvironment context) { // call superclass and get string value Identifier columnIdentifier = super.toPhysicalColumnName(name, context); String columnIdentifierText = columnIdentifier.getText(); if (StringUtils.equalsAnyIgnoreCase(columnIdentifierText, RESERVED_WORDS_ORACLE)) { columnIdentifier = convertToLimitedLowerCase(context, columnIdentifier, columnNamePrefix); } return columnIdentifier; }
python
def _parse_name(name: str, as_qvm: bool, noisy: bool) -> Tuple[str, str, bool]: """ Try to figure out whether we're getting a (noisy) qvm, and the associated qpu name. See :py:func:`get_qc` for examples of valid names + flags. """ parts = name.split('-') if len(parts) >= 2 and parts[-2] == 'noisy' and parts[-1] in ['qvm', 'pyqvm']: if as_qvm is not None and (not as_qvm): raise ValueError("The provided qc name indicates you are getting a noisy QVM, " "but you have specified `as_qvm=False`") if noisy is not None and (not noisy): raise ValueError("The provided qc name indicates you are getting a noisy QVM, " "but you have specified `noisy=False`") qvm_type = parts[-1] noisy = True prefix = '-'.join(parts[:-2]) return prefix, qvm_type, noisy if len(parts) >= 1 and parts[-1] in ['qvm', 'pyqvm']: if as_qvm is not None and (not as_qvm): raise ValueError("The provided qc name indicates you are getting a QVM, " "but you have specified `as_qvm=False`") qvm_type = parts[-1] if noisy is None: noisy = False prefix = '-'.join(parts[:-1]) return prefix, qvm_type, noisy if as_qvm is not None and as_qvm: qvm_type = 'qvm' else: qvm_type = None if noisy is None: noisy = False return name, qvm_type, noisy
java
public Observable<String> beginGetVpnProfilePackageUrlAsync(String resourceGroupName, String virtualNetworkGatewayName) { return beginGetVpnProfilePackageUrlWithServiceResponseAsync(resourceGroupName, virtualNetworkGatewayName).map(new Func1<ServiceResponse<String>, String>() { @Override public String call(ServiceResponse<String> response) { return response.body(); } }); }
java
public Vector3d transformTranspose(Vector3dc v, Vector3d dest) { v.mulTranspose(this, dest); return dest; }
java
@Override public long skip(long n) throws IOException { if (mInputStream.available() >= n) { return mInputStream.skip(n); } // The number of bytes to skip is possibly large, open a new stream from GCS. mInputStream.close(); mPos += n; try { mObject = mClient.getObject(mBucketName, mKey, null /* ignore ModifiedSince */, null /* ignore UnmodifiedSince */, null /* ignore MatchTags */, null /* ignore NoneMatchTags */, mPos /* byteRangeStart */, null /* ignore byteRangeEnd */); mInputStream = new BufferedInputStream(mObject.getDataInputStream()); } catch (ServiceException e) { throw new IOException(e); } return n; }
java
public RecordId currentRecordId() { int id = rp.currentId(); return new RecordId(new BlockId(fileName, currentBlkNum), id); }
java
String readString() throws IOException { StringBuilder sb = new StringBuilder(50); // first char must be ": char ch = reader.next(); if (ch != '\"') { throw new JsonParseException("Expected \" but actual is: " + ch, reader.readed); } for (;;) { ch = reader.next(); if (ch == '\\') { // escape: \" \\ \/ \b \f \n \r \t char ech = reader.next(); switch (ech) { case '\"': sb.append('\"'); break; case '\\': sb.append('\\'); break; case '/': sb.append('/'); break; case 'b': sb.append('\b'); break; case 'f': sb.append('\f'); break; case 'n': sb.append('\n'); break; case 'r': sb.append('\r'); break; case 't': sb.append('\t'); break; case 'u': // read an unicode uXXXX: int u = 0; for (int i = 0; i < 4; i++) { char uch = reader.next(); if (uch >= '0' && uch <= '9') { u = (u << 4) + (uch - '0'); } else if (uch >= 'a' && uch <= 'f') { u = (u << 4) + (uch - 'a') + 10; } else if (uch >= 'A' && uch <= 'F') { u = (u << 4) + (uch - 'A') + 10; } else { throw new JsonParseException("Unexpected char: " + uch, reader.readed); } } sb.append((char) u); break; default: throw new JsonParseException("Unexpected char: " + ch, reader.readed); } } else if (ch == '\"') { // end of string: break; } else if (ch == '\r' || ch == '\n') { throw new JsonParseException("Unexpected char: " + ch, reader.readed); } else { sb.append(ch); } } return sb.toString(); }
java
private String buildAuthorizationHeader(String serverPrincipalName) throws LoginException { /* * Get the principal from the Subject's private credentials and populate the * client and server principal name for the GSS API */ final String clientPrincipal = getClientPrincipalName(); final CreateAuthorizationHeaderAction action = new CreateAuthorizationHeaderAction(clientPrincipal, serverPrincipalName); /* * Check if the TGT in the Subject's private credentials are valid. If * valid, then we use the TGT in the Subject's private credentials. If not, * we build the Subject's private credentials again from valid TGT in the * Kerberos client cache. */ Set<Object> privateCreds = loginContext.getSubject().getPrivateCredentials(); for (Object privateCred : privateCreds) { if (privateCred instanceof KerberosTicket) { String serverPrincipalTicketName = ((KerberosTicket) privateCred).getServer().getName(); if ((serverPrincipalTicketName.startsWith("krbtgt")) && ((KerberosTicket) privateCred).getEndTime().compareTo(new Date()) == -1) { buildSubjectCredentials(); break; } } } /* * Subject.doAs takes in the Subject context and the action to be run as * arguments. This method executes the action as the Subject given in the * argument. We do this in order to provide the Subject's context so that we * reuse the service ticket which will be populated in the Subject rather * than getting the service ticket from the KDC for each request. The GSS * API populates the service ticket in the Subject and reuses it * */ Subject.doAs(loginContext.getSubject(), action); return action.getNegotiateToken(); }
python
def setup_seq_signals(self, ): """Setup the signals for the sequence page :returns: None :rtype: None :raises: None """ log.debug("Setting up sequence page signals.") self.seq_prj_view_pb.clicked.connect(self.seq_view_prj) self.seq_shot_view_pb.clicked.connect(self.seq_view_shot) self.seq_shot_create_pb.clicked.connect(self.seq_create_shot) self.seq_desc_pte.textChanged.connect(self.seq_save)
python
def run_terraform(self, command='plan'): # noqa pylint: disable=too-many-branches,too-many-statements """Run Terraform.""" response = {'skipped_configs': False} tf_cmd = [command] if command == 'destroy': tf_cmd.append('-force') elif command == 'apply': if 'CI' in self.context.env_vars: tf_cmd.append('-auto-approve=true') else: tf_cmd.append('-auto-approve=false') workspace_tfvars_file = get_workspace_tfvars_file(self.path, self.context.env_name, # noqa self.context.env_region) # noqa backend_options = create_config_backend_options(self.options.get('options', {}), # noqa self.context.env_name, self.context.env_vars) # This filename will only be used if it exists backend_options['filename'] = get_backend_tfvars_file( self.path, self.context.env_name, self.context.env_region ) workspace_tfvar_present = os.path.isfile( os.path.join(self.path, workspace_tfvars_file) ) if workspace_tfvar_present: tf_cmd.append("-var-file=%s" % workspace_tfvars_file) if isinstance(self.options.get('environments', {}).get(self.context.env_name), dict): for (key, val) in self.options['environments'][self.context.env_name].items(): # noqa tf_cmd.extend(['-var', "%s=%s" % (key, val)]) if self.options.get('environments', {}).get(self.context.env_name) or ( workspace_tfvar_present): LOGGER.info("Preparing to run terraform %s on %s...", command, os.path.basename(self.path)) module_defined_tf_var = get_module_defined_tf_var( self.options.get('options', {}).get('terraform_version', {}), self.context.env_name ) if module_defined_tf_var: tf_bin = TFEnv(self.path).install(module_defined_tf_var) elif os.path.isfile(os.path.join(self.path, '.terraform-version')): tf_bin = TFEnv(self.path).install() else: if not which('terraform'): LOGGER.error('Terraform not available (a ' '".terraform-version" file is not present ' 'and "terraform" not found in path). Fix ' 'this by writing a desired Terraform version ' 'to your module\'s .terraform-version file ' 'or installing Terraform.') sys.exit(1) tf_bin = 'terraform' tf_cmd.insert(0, tf_bin) with change_dir(self.path): if not os.path.isdir(os.path.join(self.path, '.terraform')) or ( # noqa os.path.isfile(os.path.join(self.path, '.terraform', FAILED_INIT_FILENAME))): if os.path.isfile(os.path.join(self.path, '.terraform', FAILED_INIT_FILENAME)): LOGGER.info('Previous init failed; trashing ' '.terraform directory and running it ' 'again...') send2trash(os.path.join(self.path, '.terraform')) else: LOGGER.info('.terraform directory missing; running ' '"terraform init"...') run_terraform_init( tf_bin=tf_bin, module_path=self.path, backend_options=backend_options, env_name=self.context.env_name, env_region=self.context.env_region, env_vars=self.context.env_vars ) else: reinit_on_backend_changes( tf_bin=tf_bin, module_path=self.path, backend_options=backend_options, env_name=self.context.env_name, env_region=self.context.env_region, env_vars=self.context.env_vars ) LOGGER.debug('Checking current Terraform workspace...') current_tf_workspace = subprocess.check_output( [tf_bin, 'workspace', 'show'], env=self.context.env_vars ).strip().decode() if current_tf_workspace != self.context.env_name: LOGGER.info("Terraform workspace currently set to %s; " "switching to %s...", current_tf_workspace, self.context.env_name) LOGGER.debug('Checking available Terraform ' 'workspaces...') available_tf_envs = subprocess.check_output( [tf_bin, 'workspace', 'list'], env=self.context.env_vars ).decode() if re.compile("^[*\\s]\\s%s$" % self.context.env_name, re.M).search(available_tf_envs): run_module_command( cmd_list=[tf_bin, 'workspace', 'select', self.context.env_name], env_vars=self.context.env_vars ) else: LOGGER.info("Terraform workspace %s not found; " "creating it...", self.context.env_name) run_module_command( cmd_list=[tf_bin, 'workspace', 'new', self.context.env_name], env_vars=self.context.env_vars ) # Previously, another tf init was run here after every # workspace switch/creation. That does not appear to be # necessary now (this note can be removed in the future, # i.e. after 1.0) if 'SKIP_TF_GET' not in self.context.env_vars: LOGGER.info('Executing "terraform get" to update remote ' 'modules') run_module_command( cmd_list=[tf_bin, 'get', '-update=true'], env_vars=self.context.env_vars ) else: LOGGER.info('Skipping "terraform get" due to ' '"SKIP_TF_GET" environment variable...') LOGGER.info("Running Terraform %s on %s (\"%s\")", command, os.path.basename(self.path), " ".join(tf_cmd)) run_module_command(cmd_list=tf_cmd, env_vars=self.context.env_vars) else: response['skipped_configs'] = True LOGGER.info("Skipping Terraform %s of %s", command, os.path.basename(self.path)) LOGGER.info( "(no tfvars file for this environment/region found -- looking " "for one of \"%s\")", ', '.join(gen_workspace_tfvars_files( self.context.env_name, self.context.env_region))) return response
java
protected void init(CmsObject cms, CmsMessages messages, I_CmsWidgetParameter param, String configuration) { if (configuration == null) { // no configuration String found, return return; } configuration = CmsMacroResolver.resolveMacros(configuration, cms, messages); JSONObject jsonObj = new JSONObject(); try { jsonObj = new JSONObject(configuration); } catch (JSONException e) { // initialization failed return; } // determine the class name that fills in values dynamically setClassName(jsonObj.optString(CONFIG_KEY_CLASS, null)); I_CmsGalleryWidgetDynamicConfiguration dynConf = null; if (getClassName() != null) { try { dynConf = (I_CmsGalleryWidgetDynamicConfiguration)Class.forName(getClassName()).newInstance(); } catch (Exception e) { // class not found } } // determine the initial item list settings setType(jsonObj.optString(CONFIG_KEY_TYPE)); if ((CONFIG_VALUE_DYNAMIC.equals(getType()) || CmsStringUtil.isEmpty(getType())) && (dynConf != null)) { setType(dynConf.getType(cms, messages, param)); } setGalleryTypes(jsonObj.optString(CONFIG_KEY_GALLERYTYPES, null)); setStartup(jsonObj.optString(CONFIG_KEY_STARTUP)); if ((CONFIG_VALUE_DYNAMIC.equals(getStartup()) || CmsStringUtil.isEmpty(getStartup())) && (dynConf != null)) { setStartup(dynConf.getStartup(cms, messages, param)); } }
java
public static CPMeasurementUnit fetchByG_K_T(long groupId, String key, int type) { return getPersistence().fetchByG_K_T(groupId, key, type); }
java
public Observable<List<IdentifyResult>> identifyAsync(String personGroupId, List<UUID> faceIds, IdentifyOptionalParameter identifyOptionalParameter) { return identifyWithServiceResponseAsync(personGroupId, faceIds, identifyOptionalParameter).map(new Func1<ServiceResponse<List<IdentifyResult>>, List<IdentifyResult>>() { @Override public List<IdentifyResult> call(ServiceResponse<List<IdentifyResult>> response) { return response.body(); } }); }
python
def set_data_path(self, pth): """Set the location of the measures data directory. :param pth: The absolute path to the measures data directory. """ if os.path.exists(pth): if not os.path.exists(os.path.join(pth, 'data', 'geodetic')): raise IOError("The given path doesn't contain a 'data' " "subdirectory") os.environ["AIPSPATH"] = "%s dummy dummy" % pth
java
public static PageSnapshot shootPage(WebDriver driver, boolean useDevicePixelRatio) { Browser browser = new Browser(driver, useDevicePixelRatio); PageSnapshot pageScreenshot = new PageSnapshot(driver,browser.getDevicePixelRatio()); pageScreenshot.setImage(browser.takeScreenshot()); return pageScreenshot; }
python
def lola_image(self, save=False, name='BaseLola.png'): ''' Draw the topography of the region of interest Args: save (Optional[bool]): Weither or not to save the image. Defaults to False. name (Optional[str]): Absolut path to save the resulting image. Default to 'BaseLola.png' in the working directory. Returns: An image correponding to the region tography. Realized from the data taken by the LOLA instrument on board of LRO. Note: Nice to use in a jupyter notebook with ``%matplotib inline`` activated. Feel free to modify this method to plot exactly what you need. ''' fig = plt.figure(figsize=(10, 8)) ax1 = fig.add_subplot(111) lon_m, lon_M, lat_m, lat_M = self.lambert_window( self.size_window, self.lat0, self.lon0) m = Basemap(llcrnrlon=lon_m, llcrnrlat=lat_m, urcrnrlon=lon_M, urcrnrlat=lat_M, resolution='i', projection='laea', rsphere=1734400, lat_0=self.lat0, lon_0=self.lon0) Xl, Yl, Zl = self.get_arrays('Lola') Xl, Yl = m(Xl, Yl) CS = m.pcolormesh(Xl, Yl, Zl, cmap='gist_earth', alpha=.5, ax=ax1, zorder=1) # m.contour(Xl,Yl,Zl,20, colors = 'black', alpha = 1.0 , zorder=2) xc, yc = m(self.lon0, self.lat0) ax1.scatter(xc, yc, s=200, marker='v', zorder=2) self._add_scale(m, ax1) self._add_colorbar(m, CS, ax1, 'Topography') if save == True: fig.savefig(name, rasterized=True, dpi=50, bbox_inches='tight', pad_inches=0.1)
java
public Nfs3RemoveResponse sendRemove(NfsRemoveRequest request) throws IOException { Nfs3RemoveResponse response = new Nfs3RemoveResponse(); _rpcWrapper.callRpcNaked(request, response); return response; }
python
def select_next(self): """ Selects the next occurrence. :return: True in case of success, false if no occurrence could be selected. """ current_occurence = self._current_occurrence() occurrences = self.get_occurences() if not occurrences: return current = self._occurrences[current_occurence] cursor_pos = self.editor.textCursor().position() if cursor_pos not in range(current[0], current[1] + 1) or \ current_occurence == -1: # search first occurrence that occurs after the cursor position current_occurence = 0 for i, (start, end) in enumerate(self._occurrences): if end > cursor_pos: current_occurence = i break else: if (current_occurence == -1 or current_occurence >= len(occurrences) - 1): current_occurence = 0 else: current_occurence += 1 self._set_current_occurrence(current_occurence) try: cursor = self.editor.textCursor() cursor.setPosition(occurrences[current_occurence][0]) cursor.setPosition(occurrences[current_occurence][1], cursor.KeepAnchor) self.editor.setTextCursor(cursor) return True except IndexError: return False
python
def _dir_additions(self): """ add the string-like attributes from the info_axis. If info_axis is a MultiIndex, it's first level values are used. """ additions = {c for c in self._info_axis.unique(level=0)[:100] if isinstance(c, str) and c.isidentifier()} return super()._dir_additions().union(additions)
python
def subtract(lhs, rhs): """Returns element-wise difference of the input arrays with broadcasting. Equivalent to ``lhs - rhs``, ``mx.nd.broadcast_sub(lhs, rhs)`` and ``mx.nd.broadcast_minus(lhs, rhs)``. .. note:: If the corresponding dimensions of two arrays have the same size or one of them has size 1, then the arrays are broadcastable to a common shape. Parameters ---------- lhs : scalar or mxnet.ndarray.array First array to be subtracted. rhs : scalar or mxnet.ndarray.array Second array to be subtracted. If ``lhs.shape != rhs.shape``, they must be broadcastable to a common shape. Returns ------- NDArray The element-wise difference of the input arrays. Examples -------- >>> x = mx.nd.ones((2,3)) >>> y = mx.nd.arange(2).reshape((2,1)) >>> z = mx.nd.arange(2).reshape((1,2)) >>> x.asnumpy() array([[ 1., 1., 1.], [ 1., 1., 1.]], dtype=float32) >>> y.asnumpy() array([[ 0.], [ 1.]], dtype=float32) >>> z.asnumpy() array([[ 0., 1.]], dtype=float32) >>> (x-2).asnumpy() array([[-1., -1., -1.], [-1., -1., -1.]], dtype=float32) >>> (x-y).asnumpy() array([[ 1., 1., 1.], [ 0., 0., 0.]], dtype=float32) >>> mx.nd.subtract(x,y).asnumpy() array([[ 1., 1., 1.], [ 0., 0., 0.]], dtype=float32) >>> (z-y).asnumpy() array([[ 0., 1.], [-1., 0.]], dtype=float32) """ # pylint: disable= no-member, protected-access return _ufunc_helper( lhs, rhs, op.broadcast_sub, operator.sub, _internal._minus_scalar, _internal._rminus_scalar)
python
def fetch_all(self, api_client, fetchstatuslogger, q, targets): ''' Make all API calls as defined in metadata.json :param api_client: :param fetchstatuslogger: :param q: :param targets: :return: ''' self.fetchstatuslogger = fetchstatuslogger if targets != None: # Ensure targets is a tuple if type(targets) != list and type(targets) != tuple: targets = tuple(targets,) elif type(targets) != tuple: targets = tuple(targets) for target in targets: self._fetch_targets(api_client, q, target)
java
public static vpnsessionpolicy_aaauser_binding[] get(nitro_service service, String name) throws Exception{ vpnsessionpolicy_aaauser_binding obj = new vpnsessionpolicy_aaauser_binding(); obj.set_name(name); vpnsessionpolicy_aaauser_binding response[] = (vpnsessionpolicy_aaauser_binding[]) obj.get_resources(service); return response; }
python
def add_bookmark(request): """ This view serves and validates a bookmark form. If requested via ajax it also returns the drop bookmark form to replace the add bookmark form. """ if request.method == "POST": form = BookmarkForm(user=request.user, data=request.POST) if form.is_valid(): bookmark = form.save() if not request.is_ajax(): messages.success(request, 'Bookmark added') if request.POST.get('next'): return HttpResponseRedirect(request.POST.get('next')) return HttpResponse('Added') return render_to_response( 'admin_tools/menu/remove_bookmark_form.html', {'bookmark': bookmark, 'url': bookmark.url} ) else: form = BookmarkForm(user=request.user) return render_to_response( 'admin_tools/menu/form.html', {'form': form, 'title': 'Add Bookmark'} )
python
def rotation_matrix(d): """ Calculates a rotation matrix given a vector d. The direction of d corresponds to the rotation axis. The length of d corresponds to the sin of the angle of rotation. Variant of: http://mail.scipy.org/pipermail/numpy-discussion/2009-March/040806.html """ sin_angle = np.linalg.norm(d) if sin_angle == 0: return np.identity(3) d /= sin_angle eye = np.eye(3) ddt = np.outer(d, d) skew = np.array([[ 0, d[2], -d[1]], [-d[2], 0, d[0]], [ d[1], -d[0], 0]], dtype=np.float64) M = ddt + np.sqrt(1 - sin_angle**2) * (eye - ddt) + sin_angle * skew return M
java
private static byte[] digest(final byte[] input, final String algorithm, final byte[] salt, final int iterations) { try { MessageDigest digest = MessageDigest.getInstance(algorithm); if (salt != null) { digest.update(salt); } byte[] result = digest.digest(input); for (int i = 1; i < iterations; i++) { digest.reset(); result = digest.digest(result); } return result; } catch (NoSuchAlgorithmException e) { throw new ImpossibleException(e); } }