language
stringclasses
2 values
func_code_string
stringlengths
63
466k
java
public GetCrawlerMetricsRequest withCrawlerNameList(String... crawlerNameList) { if (this.crawlerNameList == null) { setCrawlerNameList(new java.util.ArrayList<String>(crawlerNameList.length)); } for (String ele : crawlerNameList) { this.crawlerNameList.add(ele); } return this; }
python
def _remove_ordered_from_queue(self, last_caught_up_3PC=None): """ Remove any Ordered that the replica might be sending to node which is less than or equal to `last_caught_up_3PC` if `last_caught_up_3PC` is passed else remove all ordered, needed in catchup """ to_remove = [] for i, msg in enumerate(self.outBox): if isinstance(msg, Ordered) and \ (not last_caught_up_3PC or compare_3PC_keys((msg.viewNo, msg.ppSeqNo), last_caught_up_3PC) >= 0): to_remove.append(i) self.logger.trace('{} going to remove {} Ordered messages from outbox'.format(self, len(to_remove))) # Removing Ordered from queue but returning `Ordered` in order that # they should be processed. removed = [] for i in reversed(to_remove): removed.insert(0, self.outBox[i]) del self.outBox[i] return removed
python
def clean_base_uri(url): """ This will clean up the url so that it is in the form: https://<SUB_DOMAIN>.eagleeyenetworks.com/ :param url: str of the url :return: str of the clean base url """ if url is None: return url if not '//' in url: url = 'https://' + url ret = url[:url.find('/', url.find('//') + 2) + 1] or url assert '.' in url, "<%s> is not a proper base_uri" % ret if not ret.endswith('/'): ret += '/' return ret
python
def from_tokens(cls, path:PathOrStr, trn_tok:Collection[Collection[str]], trn_lbls:Collection[Union[int,float]], val_tok:Collection[Collection[str]], val_lbls:Collection[Union[int,float]], vocab:Vocab=None, tst_tok:Collection[Collection[str]]=None, classes:Collection[Any]=None, max_vocab:int=60000, min_freq:int=3, **kwargs) -> DataBunch: "Create a `TextDataBunch` from tokens and labels. `kwargs` are passed to the dataloader creation." processor = NumericalizeProcessor(vocab=vocab, max_vocab=max_vocab, min_freq=min_freq) src = ItemLists(path, TextList(trn_tok, path=path, processor=processor), TextList(val_tok, path=path, processor=processor)) src = src.label_for_lm() if cls==TextLMDataBunch else src.label_from_lists(trn_lbls, val_lbls, classes=classes) if tst_tok is not None: src.add_test(TextList(tst_tok, path=path)) return src.databunch(**kwargs)
java
public void buildMethodsSummary(XMLNode node, Content memberSummaryTree) { MemberSummaryWriter writer = memberSummaryWriters.get(VisibleMemberMap.Kind.METHODS); VisibleMemberMap visibleMemberMap = getVisibleMemberMap(VisibleMemberMap.Kind.METHODS); addSummary(writer, visibleMemberMap, true, memberSummaryTree); }
java
public Date getExpirationDate(ApplicationDefinition appDef, String shard) { checkServiceState(); return m_olap.getExpirationDate(appDef, shard); }
python
def excel_to_bytes(wb: Workbook) -> bytes: """ Obtain a binary version of an :class:`openpyxl.Workbook` representation of an Excel file. """ memfile = io.BytesIO() wb.save(memfile) return memfile.getvalue()
java
private static ParameterizedType newParameterizedType(final Class<?> base, Type... args) throws Exception { return Types.newParameterizedType(base, args); }
python
def read(self, entity=None, attrs=None, ignore=None, params=None): """Deal with different named data returned from the server """ if attrs is None: attrs = self.read_json() attrs['override'] = attrs.pop('override?') attrs['unlimited'] = attrs.pop('unlimited?') return super(Filter, self).read(entity, attrs, ignore, params)
java
public void marshall(CreateWorkspacesRequest createWorkspacesRequest, ProtocolMarshaller protocolMarshaller) { if (createWorkspacesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(createWorkspacesRequest.getWorkspaces(), WORKSPACES_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
python
def add_derived_quantity(self, derived_quantity, func, *quantities): """ Add a derived quantify modifier. Parameters ---------- derived_quantity : str name of the derived quantity to be added func : callable function to calculate the derived quantity the number of arguments should equal number of following `quantities` quantities : list of str quantities to pass to the callable """ if derived_quantity in self._quantity_modifiers: raise ValueError('quantity name `{}` already exists'.format(derived_quantity)) if set(quantities).issubset(self._native_quantities): new_modifier = (func,) + quantities else: functions = [] quantities_needed = [] quantity_count = [] for q in quantities: modifier = self.get_normalized_quantity_modifier(q) functions.append(modifier[0]) quantities_needed.extend(modifier[1:]) quantity_count.append(len(modifier)-1) def _new_func(*x): assert len(x) == sum(quantity_count) count_current = 0 new_args = [] for func_this, count in zip(functions, quantity_count): new_args.append(func_this(*x[count_current:count_current+count])) count_current += count return func(*new_args) new_modifier = (_new_func,) + tuple(quantities_needed) self.add_quantity_modifier(derived_quantity, new_modifier)
python
def from_first_relation(cls, vertex0, vertex1): """Intialize a fresh match based on the first relation""" result = cls([(vertex0, vertex1)]) result.previous_ends1 = set([vertex1]) return result
java
protected void convertNeighbors(DBIDRange ids, DBIDRef ix, boolean square, KNNList neighbours, DoubleArray dist, IntegerArray ind) { for(DoubleDBIDListIter iter = neighbours.iter(); iter.valid(); iter.advance()) { if(DBIDUtil.equal(iter, ix)) { continue; // Skip query point } double d = iter.doubleValue(); dist.add(square ? (d * d) : d); ind.add(ids.getOffset(iter)); } }
java
private static String formatAddress6(int[] hexRepresentation){ if(hexRepresentation == null){ throw new NullPointerException(); } if(hexRepresentation.length != IPV6_LEN){ throw new IllegalArgumentException(); } StringBuilder stringBuilder = new StringBuilder(); boolean inCompressedSection = false; for(int i = 0;i<hexRepresentation.length;i++){ if(hexRepresentation[i] == -1){ if(!inCompressedSection){ inCompressedSection = true; if(i == 0){ stringBuilder.append("::"); } else { stringBuilder.append(':'); } } } else { inCompressedSection = false; stringBuilder.append(Integer.toHexString(hexRepresentation[i])); if(i+1<hexRepresentation.length){ stringBuilder.append(":"); } } } return stringBuilder.toString(); }
java
private String[] extractSegId(StringBuffer sb) { String[] ret = new String[]{"", ""}; if (sb.length() > 1) { int startpos = 0; char ch = sb.charAt(0); if (ch == '+' || ch == ':' || ch == '\'') startpos++; // erste DEG extrahieren int endpos = sb.indexOf("+", startpos); if (endpos == -1) { endpos = sb.length(); } // code und version aus der ersten DEG extrahieren String[] des = sb.substring(startpos, endpos).split(":"); ret[0] = des[0]; // segcode ret[1] = des[2]; // segversion } return ret; }
python
def execute(self): """ This function Executes the program with set arguments. """ prog_cmd = self.get_cmd().strip() if prog_cmd == '': self.status = 'Failure' debug.log("Error: No program to execute for %s!"%self.name) debug.log(("Could not combine path and arguments into cmdline:" "\n%s %s)\n")%(self.path, ' '.join(self.args))) else: debug.log("\n\nExecute %s...\n%s" % (self.name, prog_cmd)) # Create shell script script = '%s.sh'%self.name if self.wdir != '': script = '%s/%s'%(self.wdir, script) else: script = '%s/%s'%(os.getcwd(), script) with open_(script, 'w') as f: f.write('#!/bin/bash\n') if self.wdir != '': f.write('cd {workdir}\n'.format(workdir=self.wdir)) f.write( ('touch {stdout} {stderr}\n' 'chmod a+r {stdout} {stderr}\n' '{cmd} 1> {stdout} 2> {stderr}\n' 'ec=$?\n').format( stdout=self.stdout, stderr=self.stderr, cmd=prog_cmd ) ) if not self.forcewait: f.write(('if [ "$ec" -ne "0" ]; then echo "Error" >> {stderr}; ' 'else echo "Done" >> {stderr}; fi\n').format( stderr=self.stderr)) f.write('exit $ec\n') os.chmod(script, 0o744) if self.queue is not None: # Setup execution of shell script through TORQUE other_args = '' if self.forcewait: other_args += "-K " # ADDING -K argument if wait() is forced # QSUB INFO :: run_time_limit(walltime, dd:hh:mm:ss), # memory(mem, up to 100GB *gigabyte), # processors(ppn, up to 16) # USE AS LITTLE AS NEEDED! cmd = ('/usr/bin/qsub ' '-l nodes=1:ppn={procs},walltime={hours}:00:00,mem={mem}g ' '-r y {workdir_arg} {other_args} {cmd}').format( procs=self.procs, hours=self.walltime, mem=self.mem, workdir_arg="-d %s"%(self.wdir) if self.wdir != '' else '', other_args=other_args, cmd=script) debug.log("\n\nTORQUE SETUP %s...\n%s\n" % (self.name, cmd)) else: cmd = script if self.server is not None: cmd = "ssh {server} {cmd}".format( server=self.server, cmd=quote(cmd) ) self.status = 'Executing' # EXECUTING PROGRAM self.update_timer(-time()) # TIME START if self.forcewait: self.p = Popen(cmd) ec = self.p.wait() if ec == 0: debug.log("Program finished successfully!") self.status = 'Done' else: debug.log("Program failed on execution!") self.status = 'Failure' self.p = None else: # WaitOn should be called to determine if the program has ended debug.log("CMD: %s"%cmd) self.p = Popen(cmd) # shell=True, executable="/bin/bash" self.update_timer(time()) # TIME END debug.log("timed: %s" % (self.get_time()))
python
def _is_under_root(self, full_path): """Guard against arbitrary file retrieval.""" if (path.abspath(full_path) + path.sep)\ .startswith(path.abspath(self.root) + path.sep): return True else: return False
java
private Node createUnstubCall(Node functionNode, int stubId) { return astFactory .createCall( // We can't look up the type of the stub creating method, because we add its // definition after type checking. astFactory.createNameWithUnknownType(UNSTUB_METHOD_NAME), astFactory.createNumber(stubId), functionNode) .useSourceInfoIfMissingFromForTree(functionNode); }
java
public void fixupVariables(java.util.Vector vars, int globalsSize) { super.fixupVariables(vars, globalsSize); m_expr.fixupVariables(vars, globalsSize); }
java
public TrieNode matchPredictor(String search) { TrieNode cursor = matchEnd(search); if (cursor.getNumberOfChildren() > 0) { return cursor; } String string = cursor.getString(); if (string.isEmpty()) return null; return matchPredictor(string.substring(1)); }
java
public void push(String symbol) { long now = System.currentTimeMillis(); if (!symbol.equals(lastSymbol) || (now - lastActivity > interdigitInterval)) { lastActivity = now; lastSymbol = symbol; detectorImpl.fireEvent(symbol); } else lastActivity=now; }
java
public static boolean isXMLNameStart(int codepoint) { if (codepoint >= Character.codePointAt("A", 0) && codepoint <= Character.codePointAt("Z", 0)) { return true; } else if (codepoint == Character.codePointAt("_", 0)) { return true; } else if (codepoint >= Character.codePointAt("a", 0) && codepoint <= Character.codePointAt("z", 0)) { return true; } else if (codepoint >= 0xC0 && codepoint <= 0xD6) { return true; } else if (codepoint >= 0xD8 && codepoint <= 0xF6) { return true; } else if (codepoint >= 0xF8 && codepoint <= 0x2FF) { return true; } else if (codepoint >= 0x370 && codepoint <= 0x37D) { return true; } else if (codepoint >= 0x37F && codepoint <= 0x1FFF) { return true; } else if (codepoint >= 0x200C && codepoint <= 0x200D) { return true; } else if (codepoint >= 0x2070 && codepoint <= 0x218F) { return true; } else if (codepoint >= 0x2C00 && codepoint <= 0x2FEF) { return true; } else if (codepoint >= 0x3001 && codepoint <= 0xD7FF) { return true; } else if (codepoint >= 0xF900 && codepoint <= 0xFDCF) { return true; } else if (codepoint >= 0xFDF0 && codepoint <= 0xFFFD) { return true; } else if (codepoint >= 0x10000 && codepoint <= 0xEFFFF) { return true; } else { return false; } }
java
public static <T extends ResourceId> Builder<T> newBuilder(Status status) { return new Builder<T>().setStatus(status); }
python
def approx_equals(self, other, atol): """Return ``True`` if ``other`` is equal to this set up to ``atol``. Parameters ---------- other : Object to be tested. atol : float Maximum allowed difference in maximum norm between the interval endpoints. Examples -------- >>> rbox1 = IntervalProd(0, 0.5) >>> rbox2 = IntervalProd(0, np.sqrt(0.5)**2) >>> rbox1.approx_equals(rbox2, atol=0) # Numerical error False >>> rbox1.approx_equals(rbox2, atol=1e-15) True """ if other is self: return True elif not isinstance(other, IntervalProd): return False return (np.allclose(self.min_pt, other.min_pt, atol=atol, rtol=0.0) and np.allclose(self.max_pt, other.max_pt, atol=atol, rtol=0.0))
python
def check_dir(directory, newly_created_files): """Returns list of files that fail the check.""" header_parse_failures = [] for root, dirs, files in os.walk(directory): for f in files: if f.endswith('.py') and os.path.basename(f) != '__init__.py': filename = os.path.join(root, f) try: check_header(filename, filename in newly_created_files) except HeaderCheckFailure as e: header_parse_failures.append(e.message) return header_parse_failures
java
public IfcStructuralSurfaceActivityTypeEnum createIfcStructuralSurfaceActivityTypeEnumFromString( EDataType eDataType, String initialValue) { IfcStructuralSurfaceActivityTypeEnum result = IfcStructuralSurfaceActivityTypeEnum.get(initialValue); if (result == null) throw new IllegalArgumentException( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType.getName() + "'"); return result; }
python
def grow_slice(slc, size): """ Grow a slice object by 1 in each direction without overreaching the list. Parameters ---------- slc: slice slice object to grow size: int list length Returns ------- slc: slice extended slice """ return slice(max(0, slc.start-1), min(size, slc.stop+1))
python
def battery_percent(self): """Get batteries capacity percent.""" if not batinfo_tag or not self.bat.stat: return [] # Init the bsum (sum of percent) # and Loop over batteries (yes a computer could have more than 1 battery) bsum = 0 for b in self.bat.stat: try: bsum += int(b.capacity) except ValueError: return [] # Return the global percent return int(bsum / len(self.bat.stat))
java
public void setXoaOset(Integer newXoaOset) { Integer oldXoaOset = xoaOset; xoaOset = newXoaOset; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, AfplibPackage.OBP__XOA_OSET, oldXoaOset, xoaOset)); }
java
public static <S extends Storable> OrderedProperty<S> get(StorableProperty<S> property, Direction direction) { return get(ChainedProperty.get(property), direction); }
java
public final void setDialogScrollableArea(@Nullable final Area top, @Nullable final Area bottom) { this.dialogScrollableArea = ScrollableArea.create(top, bottom); }
python
def list_datastores(call=None): ''' Returns a list of data stores on OpenNebula. .. versionadded:: 2016.3.0 CLI Example: .. code-block:: bash salt-cloud -f list_datastores opennebula ''' if call == 'action': raise SaltCloudSystemExit( 'The list_datastores function must be called with -f or --function.' ) server, user, password = _get_xml_rpc() auth = ':'.join([user, password]) datastore_pool = server.one.datastorepool.info(auth)[1] datastores = {} for datastore in _get_xml(datastore_pool): datastores[datastore.find('NAME').text] = _xml_to_dict(datastore) return datastores
java
@Override public boolean save(String content, Charset charset, String location) { File file = new File(location); try { Files.writeToFile(file, content, charset); } catch (IOException ex) { return false; } return true; }
java
private boolean shouldStackButtons() { SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); String key = getString(R.string.stack_buttons_preference_key); boolean defaultValue = getResources().getBoolean(R.bool.stack_buttons_preference_default_value); return sharedPreferences.getBoolean(key, defaultValue); }
java
private void appendDateTime(long epochMillis) { date.setTime(epochMillis); calendar.setTime(date); appendDate(); write(' '); appendTime(); }
python
def transform_obs(self, obs): """Render some SC2 observations into something an agent can handle.""" empty = np.array([], dtype=np.int32).reshape((0, 7)) out = named_array.NamedDict({ # Fill out some that are sometimes empty. "single_select": empty, "multi_select": empty, "build_queue": empty, "cargo": empty, "cargo_slots_available": np.array([0], dtype=np.int32), }) def or_zeros(layer, size): if layer is not None: return layer.astype(np.int32, copy=False) else: return np.zeros((size.y, size.x), dtype=np.int32) aif = self._agent_interface_format if aif.feature_dimensions: out["feature_screen"] = named_array.NamedNumpyArray( np.stack(or_zeros(f.unpack(obs.observation), aif.feature_dimensions.screen) for f in SCREEN_FEATURES), names=[ScreenFeatures, None, None]) out["feature_minimap"] = named_array.NamedNumpyArray( np.stack(or_zeros(f.unpack(obs.observation), aif.feature_dimensions.minimap) for f in MINIMAP_FEATURES), names=[MinimapFeatures, None, None]) if aif.rgb_dimensions: out["rgb_screen"] = Feature.unpack_rgb_image( obs.observation.render_data.map).astype(np.int32) out["rgb_minimap"] = Feature.unpack_rgb_image( obs.observation.render_data.minimap).astype(np.int32) out["last_actions"] = np.array( [self.reverse_action(a).function for a in obs.actions], dtype=np.int32) out["action_result"] = np.array([o.result for o in obs.action_errors], dtype=np.int32) out["alerts"] = np.array(obs.observation.alerts, dtype=np.int32) out["game_loop"] = np.array([obs.observation.game_loop], dtype=np.int32) score_details = obs.observation.score.score_details out["score_cumulative"] = named_array.NamedNumpyArray([ obs.observation.score.score, score_details.idle_production_time, score_details.idle_worker_time, score_details.total_value_units, score_details.total_value_structures, score_details.killed_value_units, score_details.killed_value_structures, score_details.collected_minerals, score_details.collected_vespene, score_details.collection_rate_minerals, score_details.collection_rate_vespene, score_details.spent_minerals, score_details.spent_vespene, ], names=ScoreCumulative, dtype=np.int32) def get_score_details(key, details, categories): row = getattr(details, key.name) return [getattr(row, category.name) for category in categories] out["score_by_category"] = named_array.NamedNumpyArray([ get_score_details(key, score_details, ScoreCategories) for key in ScoreByCategory ], names=[ScoreByCategory, ScoreCategories], dtype=np.int32) out["score_by_vital"] = named_array.NamedNumpyArray([ get_score_details(key, score_details, ScoreVitals) for key in ScoreByVital ], names=[ScoreByVital, ScoreVitals], dtype=np.int32) player = obs.observation.player_common out["player"] = named_array.NamedNumpyArray([ player.player_id, player.minerals, player.vespene, player.food_used, player.food_cap, player.food_army, player.food_workers, player.idle_worker_count, player.army_count, player.warp_gate_count, player.larva_count, ], names=Player, dtype=np.int32) def unit_vec(u): return np.array(( u.unit_type, u.player_relative, u.health, u.shields, u.energy, u.transport_slots_taken, int(u.build_progress * 100), # discretize ), dtype=np.int32) ui = obs.observation.ui_data with sw("ui"): groups = np.zeros((10, 2), dtype=np.int32) for g in ui.groups: groups[g.control_group_index, :] = (g.leader_unit_type, g.count) out["control_groups"] = groups if ui.single: out["single_select"] = named_array.NamedNumpyArray( [unit_vec(ui.single.unit)], [None, UnitLayer]) if ui.multi and ui.multi.units: out["multi_select"] = named_array.NamedNumpyArray( [unit_vec(u) for u in ui.multi.units], [None, UnitLayer]) if ui.cargo and ui.cargo.passengers: out["single_select"] = named_array.NamedNumpyArray( [unit_vec(ui.single.unit)], [None, UnitLayer]) out["cargo"] = named_array.NamedNumpyArray( [unit_vec(u) for u in ui.cargo.passengers], [None, UnitLayer]) out["cargo_slots_available"] = np.array([ui.cargo.slots_available], dtype=np.int32) if ui.production and ui.production.build_queue: out["single_select"] = named_array.NamedNumpyArray( [unit_vec(ui.production.unit)], [None, UnitLayer]) out["build_queue"] = named_array.NamedNumpyArray( [unit_vec(u) for u in ui.production.build_queue], [None, UnitLayer]) def full_unit_vec(u, pos_transform, is_raw=False): screen_pos = pos_transform.fwd_pt( point.Point.build(u.pos)) screen_radius = pos_transform.fwd_dist(u.radius) return np.array(( # Match unit_vec order u.unit_type, u.alliance, # Self = 1, Ally = 2, Neutral = 3, Enemy = 4 u.health, u.shield, u.energy, u.cargo_space_taken, int(u.build_progress * 100), # discretize # Resume API order int(u.health / u.health_max * 255) if u.health_max > 0 else 0, int(u.shield / u.shield_max * 255) if u.shield_max > 0 else 0, int(u.energy / u.energy_max * 255) if u.energy_max > 0 else 0, u.display_type, # Visible = 1, Snapshot = 2, Hidden = 3 u.owner, # 1-15, 16 = neutral screen_pos.x, screen_pos.y, u.facing, screen_radius, u.cloak, # Cloaked = 1, CloakedDetected = 2, NotCloaked = 3 u.is_selected, u.is_blip, u.is_powered, u.mineral_contents, u.vespene_contents, # Not populated for enemies or neutral u.cargo_space_max, u.assigned_harvesters, u.ideal_harvesters, u.weapon_cooldown, len(u.orders), u.tag if is_raw else 0 ), dtype=np.int32) raw = obs.observation.raw_data if aif.use_feature_units: with sw("feature_units"): # Update the camera location so we can calculate world to screen pos self._update_camera(point.Point.build(raw.player.camera)) feature_units = [] for u in raw.units: if u.is_on_screen and u.display_type != sc_raw.Hidden: feature_units.append( full_unit_vec(u, self._world_to_feature_screen_px)) out["feature_units"] = named_array.NamedNumpyArray( feature_units, [None, FeatureUnit], dtype=np.int32) if aif.use_raw_units: with sw("raw_units"): raw_units = [full_unit_vec(u, self._world_to_world_tl, is_raw=True) for u in raw.units] out["raw_units"] = named_array.NamedNumpyArray( raw_units, [None, FeatureUnit], dtype=np.int32) if aif.use_unit_counts: with sw("unit_counts"): unit_counts = collections.defaultdict(int) for u in raw.units: if u.alliance == sc_raw.Self: unit_counts[u.unit_type] += 1 out["unit_counts"] = named_array.NamedNumpyArray( sorted(unit_counts.items()), [None, UnitCounts], dtype=np.int32) if aif.use_camera_position: camera_position = self._world_to_world_tl.fwd_pt( point.Point.build(raw.player.camera)) out["camera_position"] = np.array((camera_position.x, camera_position.y), dtype=np.int32) out["available_actions"] = np.array(self.available_actions(obs.observation), dtype=np.int32) return out
java
private void _handleMultiValues(ArrayList<String> values, String key, String command) { if (key == null) return; if (values == null || values.isEmpty()) { _generateEmptyMultiValueError(key); return; } ValidationResult vr; // validate the key vr = validator.cleanMultiValuePropertyKey(key); // Check for an error if (vr.getErrorCode() != 0) { pushValidationResult(vr); } // reset the key Object _key = vr.getObject(); String cleanKey = (_key != null) ? vr.getObject().toString() : null; // if key is empty generate an error and return if (cleanKey == null || cleanKey.isEmpty()) { _generateInvalidMultiValueKeyError(key); return; } key = cleanKey; try { JSONArray currentValues = _constructExistingMultiValue(key, command); JSONArray newValues = _cleanMultiValues(values, key); _validateAndPushMultiValue(currentValues, newValues, values, key, command); } catch (Throwable t) { getConfigLogger().verbose(getAccountId(), "Error handling multi value operation for key " + key, t); } }
python
def make_script_sig(stack_script, redeem_script): ''' str, str -> bytearray ''' stack_script += ' {}'.format( serialization.hex_serialize(redeem_script)) return serialization.serialize(stack_script)
python
def _check_requirements(self): """ Check if VPCS is available with the correct version. """ path = self._vpcs_path() if not path: raise VPCSError("No path to a VPCS executable has been set") # This raise an error if ubridge is not available self.ubridge_path if not os.path.isfile(path): raise VPCSError("VPCS program '{}' is not accessible".format(path)) if not os.access(path, os.X_OK): raise VPCSError("VPCS program '{}' is not executable".format(path)) yield from self._check_vpcs_version()
python
def _set_soo(self, v, load=False): """ Setter method for soo, mapped from YANG variable /rbridge_id/route_map/content/set/extcommunity/soo (container) If this variable is read-only (config: false) in the source YANG file, then _set_soo is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_soo() directly. YANG Description: Site-of-Origin extended community """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=soo.soo, is_container='container', presence=False, yang_name="soo", rest_name="soo", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Site-of-Origin extended community'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """soo must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=soo.soo, is_container='container', presence=False, yang_name="soo", rest_name="soo", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Site-of-Origin extended community'}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)""", }) self.__soo = t if hasattr(self, '_set'): self._set()
java
public void add(int position, SoundCloudTrack track) { if (mCurrentTrackIndex == -1) { mCurrentTrackIndex = 0; } mSoundCloudPlaylist.addTrack(position, track); }
java
@Override public EClass getIfcElectricConductanceMeasure() { if (ifcElectricConductanceMeasureEClass == null) { ifcElectricConductanceMeasureEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(801); } return ifcElectricConductanceMeasureEClass; }
python
def get_default_config(self): """ Return default config. path: Graphite path output ksm_path: location where KSM kernel data can be found """ config = super(KSMCollector, self).get_default_config() config.update({ 'path': 'ksm', 'ksm_path': '/sys/kernel/mm/ksm'}) return config
python
def listar_por_grupo_equipamento(self, id_grupo_equipamento): """Lista todos os direitos de grupos de usuário em um grupo de equipamento. :param id_grupo_equipamento: Identificador do grupo de equipamento para filtrar a pesquisa. :return: Dicionário com a seguinte estrutura: :: {'direito_grupo_equipamento': [{'id_grupo_equipamento': < id_grupo_equipamento >, 'exclusao': < exclusao >, 'alterar_config': < alterar_config >, 'nome_grupo_equipamento': < nome_grupo_equipamento >, 'id_grupo_usuario': < id_grupo_usuario >, 'escrita': < escrita >, 'nome_grupo_usuario': < nome_grupo_usuario >, 'id': < id >, 'leitura': < leitura >}, … demais direitos …]} :raise InvalidParameterError: O identificador do grupo de equipamento é nulo ou inválido. :raise DataBaseError: Falha na networkapi ao acessar o banco de dados. :raise XMLError: Falha na networkapi ao gerar o XML de resposta. """ if not is_valid_int_param(id_grupo_equipamento): raise InvalidParameterError( u'O identificador do grupo de equipamento é inválido ou não foi informado.') url = 'direitosgrupoequipamento/egrupo/' + \ str(id_grupo_equipamento) + '/' code, map = self.submit(None, 'GET', url) key = 'direito_grupo_equipamento' return get_list_map(self.response(code, map, [key]), key)
python
async def SetMetricCredentials(self, creds): ''' creds : typing.Sequence[~ApplicationMetricCredential] Returns -> typing.Sequence[~ErrorResult] ''' # map input types to rpc msg _params = dict() msg = dict(type='Application', request='SetMetricCredentials', version=4, params=_params) _params['creds'] = creds reply = await self.rpc(msg) return reply
python
def insert(self, **data): """ Insert the passed +data+ into the table. Raises Invalid if a where clause is present (i.e. no INSERT INTO table WHERE) """ if self.where_clause: raise Invalid("Cannot insert with 'where' clause.") # Ensure that order is preserved data = data.items() cmd = "insert into {table} ({attrs}) values ({values})".format( table=self.table_name, attrs=", ".join(entry[0] for entry in data), values=", ".join(["?"] * len(data)), ) handle = Repo.db.execute(cmd, [entry[1] for entry in data]) # Return the id of the added row return handle.lastrowid
java
public void marshall(AlgorithmValidationProfile algorithmValidationProfile, ProtocolMarshaller protocolMarshaller) { if (algorithmValidationProfile == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(algorithmValidationProfile.getProfileName(), PROFILENAME_BINDING); protocolMarshaller.marshall(algorithmValidationProfile.getTrainingJobDefinition(), TRAININGJOBDEFINITION_BINDING); protocolMarshaller.marshall(algorithmValidationProfile.getTransformJobDefinition(), TRANSFORMJOBDEFINITION_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } }
java
public DatastaxMapperBuilder<T> addMapping(final ColumnDefinitions metaData) { for (int i = 1; i <= metaData.size(); i++) { addMapping(metaData.getName(i), i, metaData.getType(i), new Object[0]); } return this; }
java
@Override public void accept(final double value) { super.accept(value); _min = Math.min(_min, value); _max = Math.max(_max, value); _sum.add(value); }
java
public List<LocalDateTime> top(int n) { List<LocalDateTime> top = new ArrayList<>(); long[] values = data.toLongArray(); LongArrays.parallelQuickSort(values, DescendingLongComparator.instance()); for (int i = 0; i < n && i < values.length; i++) { top.add(PackedLocalDateTime.asLocalDateTime(values[i])); } return top; }
java
private Object getCursor(Object implObject, Method method, Object[] args) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException, SQLException { final boolean trace = TraceComponent.isAnyTracingEnabled(); if (trace && tc.isEntryEnabled()) Tr.entry(tc, "getCursor", this, args[0]); ResultSet rsetImpl = (ResultSet) method.invoke(implObject, args); WSJdbcResultSet rsetWrapper = rsetImpl == null ? null : createWrapper(rsetImpl); if (trace && tc.isEntryEnabled()) Tr.exit(tc, "getCursor", rsetWrapper); return rsetWrapper; }
java
public void setFooterDivider(Drawable drawable) { mFooterDivider = drawable; if (drawable != null) { mFooterDividerHeight = drawable.getIntrinsicHeight(); } else { mFooterDividerHeight = 0; } mContentView.setWillNotDraw(drawable == null); mContentView.invalidate(); }
java
public static <T> boolean addAll(Collection<T> target, Iterable<? extends T> iterable) { if (iterable instanceof Collection) { return target.addAll((Collection<? extends T>) iterable); } return Iterators.addAll(target, iterable.iterator()); }
java
protected void setField(Object component, Field field, Object proxy) { try { checkAccessabilityOfField(field); field.set(component, proxy); } catch (Exception e) { throw new RuntimeException("Bumm", e); } }
python
def plot_line_loading_diff(networkA, networkB, timestep=0): """ Plot difference in line loading between two networks (with and without switches) as color on lines Positive values mean that line loading with switches is bigger than without Plot switches as small dots Parameters ---------- networkA : PyPSA network container Holds topology of grid with switches including results from powerflow analysis networkB : PyPSA network container Holds topology of grid without switches including results from powerflow analysis filename : str Specify filename If not given, figure will be show directly timestep : int timestep to show, default is 0 """ # new colormap to make sure 0% difference has the same color in every plot def shiftedColorMap( cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'): ''' Function to offset the "center" of a colormap. Useful for data with a negative min and positive max and you want the middle of the colormap's dynamic range to be at zero Input ----- cmap : The matplotlib colormap to be altered start : Offset from lowest point in the colormap's range. Defaults to 0.0 (no lower ofset). Should be between 0.0 and `midpoint`. midpoint : The new center of the colormap. Defaults to 0.5 (no shift). Should be between 0.0 and 1.0. In general, this should be 1 - vmax/(vmax + abs(vmin)) For example if your data range from -15.0 to +5.0 and you want the center of the colormap at 0.0, `midpoint` should be set to 1 - 5/(5 + 15)) or 0.75 stop : Offset from highets point in the colormap's range. Defaults to 1.0 (no upper ofset). Should be between `midpoint` and 1.0. ''' cdict = { 'red': [], 'green': [], 'blue': [], 'alpha': [] } # regular index to compute the colors reg_index = np.linspace(start, stop, 257) # shifted index to match the data shift_index = np.hstack([ np.linspace(0.0, midpoint, 128, endpoint=False), np.linspace(midpoint, 1.0, 129, endpoint=True) ]) for ri, si in zip(reg_index, shift_index): r, g, b, a = cmap(ri) cdict['red'].append((si, r, r)) cdict['green'].append((si, g, g)) cdict['blue'].append((si, b, b)) cdict['alpha'].append((si, a, a)) newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict) plt.register_cmap(cmap=newcmap) return newcmap # calculate difference in loading between both networks loading_switches = abs( networkA.lines_t.p0.mul(networkA.snapshot_weightings, axis=0).\ loc[networkA.snapshots[timestep]].to_frame()) loading_switches.columns = ['switch'] loading_noswitches = abs( networkB.lines_t.p0.mul(networkB.snapshot_weightings, axis=0).\ loc[networkB.snapshots[timestep]].to_frame()) loading_noswitches.columns = ['noswitch'] diff_network = loading_switches.join(loading_noswitches) diff_network['noswitch'] = diff_network['noswitch'].fillna( diff_network['switch']) diff_network[networkA.snapshots[timestep]] \ = diff_network['switch'] - diff_network['noswitch'] # get switches new_buses = pd.Series(index=networkA.buses.index.values) new_buses.loc[set(networkA.buses.index.values) - set(networkB.buses.index.values)] = 0.1 new_buses = new_buses.fillna(0) # plot network with difference in loading and shifted colormap loading = (diff_network.loc[:, networkA.snapshots[timestep]] / (networkA.lines.s_nom)) * 100 midpoint = 1 - max(loading) / (max(loading) + abs(min(loading))) shifted_cmap = shiftedColorMap( plt.cm.jet, midpoint=midpoint, name='shifted') ll = networkA.plot(line_colors=loading, line_cmap=shifted_cmap, title="Line loading", bus_sizes=new_buses, bus_colors='blue', line_widths=0.55) cb = plt.colorbar(ll[1]) cb.set_label('Difference in line loading in % of s_nom')
python
def get_page_content(id): """Return XHTML content of a page. Parameters: - id: id of a Confluence page. """ data = _json.loads(_api.rest("/" + str(id) + "?expand=body.storage")) return data["body"]["storage"]["value"]
java
public boolean upload(WebLocator el, String filePath) { return executor.browse(el) && executor.upload(filePath); }
java
private AuthOutcome secureLevelControl(Account operatedAccount, String resourceAccountSid, UserIdentityContext userIdentityContext) { Account operatingAccount = userIdentityContext.getEffectiveAccount(); String operatingAccountSid = null; if (operatingAccount != null) operatingAccountSid = operatingAccount.getSid().toString(); String operatedAccountSid = null; if (operatedAccount != null) operatedAccountSid = operatedAccount.getSid().toString(); // in case we're dealing with resources, we first make sure that they are accessed under their owner account. if (resourceAccountSid != null) if (! resourceAccountSid.equals(operatedAccountSid)) return AuthOutcome.FAILED; // check parent/ancestor relationship between operatingAccount and operatedAccount if ( operatingAccountSid.equals(operatedAccountSid) ) return AuthOutcome.OK; if ( operatedAccount.getParentSid() != null ) { if (operatedAccount.getParentSid().toString().equals(operatingAccountSid)) return AuthOutcome.OK; else if (accountsDao.getAccountLineage(operatedAccount).contains(operatingAccountSid)) return AuthOutcome.OK; } return AuthOutcome.FAILED; }
python
def connect_to_queues(region=None, public=True): """Creates a client for working with Queues.""" return _create_client(ep_name="queues", region=region, public=public)
python
def compute_jaccard_index(x_set, y_set): """Return the Jaccard similarity coefficient of 2 given sets. Args: x_set (set): first set. y_set (set): second set. Returns: float: Jaccard similarity coefficient. """ if not x_set or not y_set: return 0.0 intersection_cardinal = len(x_set & y_set) union_cardinal = len(x_set | y_set) return intersection_cardinal / float(union_cardinal)
python
def list_versions(self, prefix='', delimiter='', key_marker='', version_id_marker='', headers=None): """ List version objects within a bucket. This returns an instance of an VersionedBucketListResultSet that automatically handles all of the result paging, etc. from S3. You just need to keep iterating until there are no more results. Called with no arguments, this will return an iterator object across all keys within the bucket. :type prefix: string :param prefix: allows you to limit the listing to a particular prefix. For example, if you call the method with prefix='/foo/' then the iterator will only cycle through the keys that begin with the string '/foo/'. :type delimiter: string :param delimiter: can be used in conjunction with the prefix to allow you to organize and browse your keys hierarchically. See: http://docs.amazonwebservices.com/AmazonS3/2006-03-01/ for more details. :type marker: string :param marker: The "marker" of where you are in the result set :rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet` :return: an instance of a BucketListResultSet that handles paging, etc """ return VersionedBucketListResultSet(self, prefix, delimiter, key_marker, version_id_marker, headers)
python
def get_image(vm_): ''' Return the image object to use ''' vm_image = config.get_cloud_config_value('image', vm_, __opts__).encode( 'ascii', 'salt-cloud-force-ascii' ) images = avail_images() for key, value in six.iteritems(images): if vm_image and vm_image in (images[key]['id'], images[key]['name']): return images[key] raise SaltCloudNotFound( 'The specified image, \'{0}\', could not be found.'.format(vm_image) )
python
def purposes_for_layer(layer_geometry_key): """Get purposes of a layer geometry id. :param layer_geometry_key: The geometry id :type layer_geometry_key: str :returns: List of suitable layer purpose. :rtype: list """ return_value = [] for layer_purpose in layer_purposes: layer_geometry_keys = [ i['key'] for i in layer_purpose['allowed_geometries']] if layer_geometry_key in layer_geometry_keys: return_value.append(layer_purpose['key']) return sorted(return_value)
java
public static void main(String[] args) throws IOException { int argIdx = 0; // TODO: Proper argument parsing: -t <type> -c <compression> int type = args.length > argIdx + 1 ? Integer.parseInt(args[argIdx++]) : -1; int compression = args.length > argIdx + 1 ? Integer.parseInt(args[argIdx++]) : 0; if (args.length <= argIdx) { System.err.println("No file specified"); System.exit(1); } File file = new File(args[argIdx++]); BufferedImage original; // BufferedImage original = ImageIO.read(file); ImageInputStream inputStream = ImageIO.createImageInputStream(file); try { Iterator<ImageReader> readers = ImageIO.getImageReaders(inputStream); if (!readers.hasNext()) { System.err.println("No reader for: " + file); System.exit(1); } ImageReader reader = readers.next(); reader.setInput(inputStream); ImageReadParam param = reader.getDefaultReadParam(); param.setDestinationType(reader.getRawImageType(0)); if (param.getDestinationType() == null) { Iterator<ImageTypeSpecifier> types = reader.getImageTypes(0); while (types.hasNext()) { ImageTypeSpecifier typeSpecifier = types.next(); if (typeSpecifier.getColorModel().getColorSpace().getType() == ColorSpace.TYPE_CMYK) { param.setDestinationType(typeSpecifier); } } } System.err.println("param.getDestinationType(): " + param.getDestinationType()); original = reader.read(0, param); } finally { inputStream.close(); } System.err.println("original: " + original); // BufferedImage image = original; // BufferedImage image = new BufferedImage(original.getWidth(), original.getHeight(), BufferedImage.TYPE_INT_ARGB); // BufferedImage image = new BufferedImage(original.getWidth(), original.getHeight(), BufferedImage.TYPE_INT_RGB); // BufferedImage image = new BufferedImage(original.getWidth(), original.getHeight(), BufferedImage.TYPE_4BYTE_ABGR); // BufferedImage image = new BufferedImage(original.getWidth(), original.getHeight(), BufferedImage.TYPE_INT_BGR); // BufferedImage image = new BufferedImage(original.getWidth(), original.getHeight(), BufferedImage.TYPE_3BYTE_BGR); BufferedImage image; if (type <= 0 || type == original.getType()) { image = original; } else if (type == BufferedImage.TYPE_BYTE_INDEXED) { // image = ImageUtil.createIndexed(original, 256, null, ImageUtil.COLOR_SELECTION_QUALITY | ImageUtil.DITHER_DIFFUSION_ALTSCANS); image = ImageUtil.createIndexed(original, 256, null, ImageUtil.COLOR_SELECTION_FAST | ImageUtil.DITHER_DIFFUSION_ALTSCANS); } else { image = new BufferedImage(original.getWidth(), original.getHeight(), type); Graphics2D graphics = image.createGraphics(); try { graphics.drawImage(original, 0, 0, null); } finally { graphics.dispose(); } } original = null; File output = File.createTempFile(file.getName().replace('.', '-'), ".tif"); // output.deleteOnExit(); System.err.println("output: " + output); TIFFImageWriter writer = new TIFFImageWriter(null); // ImageWriter writer = ImageIO.getImageWritersByFormatName("PNG").next(); // ImageWriter writer = ImageIO.getImageWritersByFormatName("BMP").next(); ImageOutputStream stream = ImageIO.createImageOutputStream(output); try { writer.setOutput(stream); ImageWriteParam param = writer.getDefaultWriteParam(); param.setCompressionMode(ImageWriteParam.MODE_EXPLICIT); // param.setCompressionType("None"); // param.setCompressionType("PackBits"); // param.setCompressionType("ZLib"); param.setCompressionType(param.getCompressionTypes()[compression]); // if (compression == 2) { // param.setCompressionQuality(0); // } System.err.println("compression: " + param.getLocalizedCompressionTypeName()); long start = System.currentTimeMillis(); writer.write(null, new IIOImage(image, null, null), param); System.err.println("Write time: " + (System.currentTimeMillis() - start) + " ms"); } finally { stream.close(); } System.err.println("output.length: " + output.length()); // ImageOutputStream stream = ImageIO.createImageOutputStream(output); // try { // writer.setOutput(stream); // writer.prepareWriteSequence(null); // for(int i = 0; i < images.size(); i ++){ // writer.writeToSequence(new IIOImage(images.get(i), null, null), null); // } // writer.endWriteSequence(); // } // finally { // stream.close(); // } // writer.dispose(); image = null; BufferedImage read = ImageIO.read(output); System.err.println("read: " + read); TIFFImageReader.showIt(read, output.getName()); }
java
public void setLogHandler(String id, LogHandler ref) { if (id != null && ref != null) { logHandlerServices.put(id, ref); } }
python
def goes_requires(self, regs): """ Returns whether any of the goes_to block requires any of the given registers. """ if len(self) and self.mem[-1].inst == 'call' and self.mem[-1].condition_flag is None: for block in self.calls: if block.is_used(regs, 0): return True d = block.destroys() if not len([x for x in regs if x not in d]): return False # If all registers are destroyed then they're not used for block in self.goes_to: if block.is_used(regs, 0): return True return False
java
public static HashMap<String,String> parse(String args){ HashMap<String,String> map = new HashMap<>(); if (args != null){ String[] split = args.split(";"); for (String nameValuePair : split) { String[] nameValue = nameValuePair.split("="); if (nameValue.length == 2){ map.put(nameValue[0].toLowerCase(), nameValue[1]); } } } return map; }
java
public final AbstractItem findFirstMatching(Filter filter) throws MessageStoreException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "findFirstMatching", filter); AbstractItem item = _references().findFirstMatching(filter); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "findFirstMatching", item); return item; }
java
public JSONArray optJSONArray(String name) { Object object = opt(name); return object instanceof JSONArray ? (JSONArray) object : null; }
python
def cluster_over_time(stat, time, window, argmax=numpy.argmax): """Cluster generalized transient events over time via maximum stat over a symmetric sliding window Parameters ---------- stat: numpy.ndarray vector of ranking values to maximize time: numpy.ndarray time to use for clustering window: float length to cluster over argmax: function the function used to calculate the maximum value Returns ------- cindex: numpy.ndarray The set of indices corresponding to the surviving coincidences. """ logging.info('Clustering events over %s s window', window) indices = [] time_sorting = time.argsort() stat = stat[time_sorting] time = time[time_sorting] left = numpy.searchsorted(time, time - window) right = numpy.searchsorted(time, time + window) indices = numpy.zeros(len(left), dtype=numpy.uint32) # i is the index we are inspecting, j is the next one to save i = 0 j = 0 while i < len(left): l = left[i] r = right[i] # If there are no other points to compare it is obviously the max if (r - l) == 1: indices[j] = i j += 1 i += 1 continue # Find the location of the maximum within the time interval around i max_loc = argmax(stat[l:r]) + l # If this point is the max, we can skip to the right boundary if max_loc == i: indices[j] = i i = r j += 1 # If the max is later than i, we can skip to it elif max_loc > i: i = max_loc elif max_loc < i: i += 1 indices = indices[:j] logging.info('%d triggers remaining', len(indices)) return time_sorting[indices]
java
public MuServer start() { if (httpPort < 0 && httpsPort < 0) { throw new IllegalArgumentException("No ports were configured. Please call MuServerBuilder.withHttpPort(int) or MuServerBuilder.withHttpsPort(int)"); } ServerSettings settings = new ServerSettings(minimumGzipSize, maxHeadersSize, maxUrlSize, gzipEnabled, mimeTypesToGzip); NettyHandlerAdapter nettyHandlerAdapter = new NettyHandlerAdapter(handlers); NioEventLoopGroup bossGroup = new NioEventLoopGroup(1); NioEventLoopGroup workerGroup = new NioEventLoopGroup(); List<Channel> channels = new ArrayList<>(); Runnable shutdown = () -> { try { for (Channel channel : channels) { channel.close().sync(); } bossGroup.shutdownGracefully(0, 0, TimeUnit.MILLISECONDS).sync(); workerGroup.shutdownGracefully(0, 0, TimeUnit.MILLISECONDS).sync(); } catch (Exception e) { log.info("Error while shutting down. Will ignore. Error was: " + e.getMessage()); } }; try { GlobalTrafficShapingHandler trafficShapingHandler = new GlobalTrafficShapingHandler(workerGroup, 0, 0, 1000); MuStatsImpl stats = new MuStatsImpl(trafficShapingHandler.trafficCounter()); AtomicReference<MuServer> serverRef = new AtomicReference<>(); SslContextProvider sslContextProvider = null; Channel httpChannel = httpPort < 0 ? null : createChannel(bossGroup, workerGroup, nettyHandlerAdapter, host, httpPort, null, trafficShapingHandler, stats, serverRef, settings, false); Channel httpsChannel; boolean http2Enabled = Toggles.http2; if (httpsPort < 0) { httpsChannel = null; } else { SSLContextBuilder toUse = this.sslContextBuilder != null ? this.sslContextBuilder : SSLContextBuilder.unsignedLocalhostCertBuilder(); SslContext nettySslContext = toUse.toNettySslContext(http2Enabled); log.debug("SSL Context is " + nettySslContext); sslContextProvider = new SslContextProvider(nettySslContext); httpsChannel = createChannel(bossGroup, workerGroup, nettyHandlerAdapter, host, httpsPort, sslContextProvider, trafficShapingHandler, stats, serverRef, settings, http2Enabled); } URI uri = null; if (httpChannel != null) { channels.add(httpChannel); uri = getUriFromChannel(httpChannel, "http", host); } URI httpsUri = null; if (httpsChannel != null) { channels.add(httpsChannel); httpsUri = getUriFromChannel(httpsChannel, "https", host); } InetSocketAddress serverAddress = (InetSocketAddress) channels.get(0).localAddress(); MuServer server = new MuServerImpl(uri, httpsUri, shutdown, stats, serverAddress, sslContextProvider, http2Enabled); serverRef.set(server); if (addShutdownHook) { Runtime.getRuntime().addShutdownHook(new Thread(server::stop)); } return server; } catch (Exception ex) { shutdown.run(); throw new MuException("Error while starting server", ex); } }
python
def get_vcl_html(self, service_id, version_number, name): """Get the uploaded VCL for a particular service and version with HTML syntax highlighting.""" content = self._fetch("/service/%s/version/%d/vcl/%s/content" % (service_id, version_number, name)) return content.get("content", None)
python
def tolist(self): """ Return the array as a list of rows. Each row is a `dict` of values. Facilitates inserting data into a database. .. versionadded:: 0.3.1 Returns ------- quotes : list A list in which each entry is a dictionary representing a single options quote. """ return [_todict(key, self.data.loc[key, :]) for key in self.data.index]
python
def get_android_id(self) -> str: '''Show Android ID.''' output, _ = self._execute( '-s', self.device_sn, 'shell', 'settings', 'get', 'secure', 'android_id') return output.strip()
python
def setTopRight(self, loc): """ Move this region so its top right corner is on ``loc`` """ offset = self.getTopRight().getOffset(loc) # Calculate offset from current top right return self.setLocation(self.getTopLeft().offset(offset)) # Move top left corner by the same offset
python
def _ParseTimeRange(self, timerange): """Parses a timerange argument and always returns non-None timerange.""" if timerange is None: timerange = (None, None) from_time, to_time = timerange if not from_time: from_time = rdfvalue.RDFDatetime().FromSecondsSinceEpoch(0) if not to_time: to_time = rdfvalue.RDFDatetime().FromSecondsSinceEpoch(sys.maxsize) return (from_time, to_time)
java
private static ILigand[] getLigands(IAtom atom, IAtomContainer container, IAtom exclude) { List<IAtom> neighbors = container.getConnectedAtomsList(atom); ILigand[] ligands = new ILigand[neighbors.size() - 1]; int i = 0; for (IAtom neighbor : neighbors) { if (!neighbor.equals(exclude)) ligands[i++] = new Ligand(container, new VisitedAtoms(), atom, neighbor); } return ligands; }
java
public void deregisterConsumerSetMonitor(ConnectionImpl connection, ConsumerSetChangeCallback callback) throws SINotPossibleInCurrentConfigurationException { if (tc.isEntryEnabled()) SibTr.entry( tc, "deregisterConsumerSetMonitor", new Object[] { connection, callback}); // Call the registrar to do the work _consumerMonitorRegistrar.deregisterMonitor(connection, callback); if (tc.isEntryEnabled()) SibTr.exit(tc, "deregisterConsumerSetMonitor"); }
java
public static <T,V extends Object> void type(Collection<V> valueColl, Class<T> type) { for(V value: valueColl) type(value, type); } //------- Numerical checks ---------------------------------------------------------------------------- //------- Integer -------------------------------- /** * Checks if the given integer is positive. * @param value The integer value to validate. * @throws ParameterException if the given integer value is <code>null</code> or smaller/equal to 0. */ public static void positive(Integer value) { if(!validation) return; notNull(value); if(value <= 0) throw new ParameterException(ErrorCode.NOTPOSITIVE); }
python
def remote_profile(script, argv, profiler_factory, interval, spawn, signum, pickle_protocol, endpoint, verbose): """Launch a server to profile continuously. The default endpoint is 127.0.0.1:8912. """ filename, code, globals_ = script sys.argv[:] = [filename] + list(argv) # create listener. listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM) listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) listener.bind(endpoint) listener.listen(1) # be verbose or quiet. if verbose: log = lambda x: click.echo(click.style('> ', fg='cyan') + x) bound_addr = listener.getsockname() log('Listening on {0}:{1} for profiling...'.format(*bound_addr)) else: log = noop # start profiling server. frame = sys._getframe() profiler = profiler_factory(base_frame=frame, base_code=code) profiler_trigger = BackgroundProfiler(profiler, signum) profiler_trigger.prepare() server_args = (interval, log, pickle_protocol) server = SelectProfilingServer(listener, profiler_trigger, *server_args) spawn(server.serve_forever) # exec the script. try: exec_(code, globals_) except KeyboardInterrupt: pass
python
def notify_thread_not_alive(self, thread_id, use_lock=True): """ if thread is not alive, cancel trace_dispatch processing """ if self.writer is None: return with self._lock_running_thread_ids if use_lock else NULL: if not self._enable_thread_notifications: return thread = self._running_thread_ids.pop(thread_id, None) if thread is None: return was_notified = thread.additional_info.pydev_notify_kill if not was_notified: thread.additional_info.pydev_notify_kill = True self.writer.add_command(self.cmd_factory.make_thread_killed_message(thread_id))
python
def _gen_te_xref(self, expr: Union[ShExJ.tripleExpr, ShExJ.tripleExprLabel]) -> None: """ Generate the triple expression map (te_id_map) :param expr: root triple expression """ if expr is not None and not isinstance_(expr, ShExJ.tripleExprLabel) and 'id' in expr and expr.id is not None: if expr.id in self.te_id_map: return else: self.te_id_map[self._resolve_relative_uri(expr.id)] = expr if isinstance(expr, (ShExJ.OneOf, ShExJ.EachOf)): for expr2 in expr.expressions: self._gen_te_xref(expr2) elif isinstance(expr, ShExJ.TripleConstraint): if expr.valueExpr is not None: self._gen_schema_xref(expr.valueExpr)
java
public AlphabeticIndex<V> addLabels(UnicodeSet additions) { initialLabels.addAll(additions); buckets = null; return this; }
java
public SourceBuilder add(String fmt, Object... args) { TemplateApplier.withParams(args).onText(source::append).onParam(this::add).parse(fmt); return this; }
python
def add(self, name='', type='', agent='', scanner='', location='', language='en', *args, **kwargs): """ Simplified add for the most common options. Parameters: name (str): Name of the library agent (str): Example com.plexapp.agents.imdb type (str): movie, show, # check me location (str): /path/to/files language (str): Two letter language fx en kwargs (dict): Advanced options should be passed as a dict. where the id is the key. **Photo Preferences** * **agent** (str): com.plexapp.agents.none * **enableAutoPhotoTags** (bool): Tag photos. Default value false. * **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true. * **includeInGlobal** (bool): Include in dashboard. Default value true. * **scanner** (str): Plex Photo Scanner **Movie Preferences** * **agent** (str): com.plexapp.agents.none, com.plexapp.agents.imdb, com.plexapp.agents.themoviedb * **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true. * **enableCinemaTrailers** (bool): Enable Cinema Trailers. Default value true. * **includeInGlobal** (bool): Include in dashboard. Default value true. * **scanner** (str): Plex Movie Scanner, Plex Video Files Scanner **IMDB Movie Options** (com.plexapp.agents.imdb) * **title** (bool): Localized titles. Default value false. * **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true. * **only_trailers** (bool): Skip extras which aren't trailers. Default value false. * **redband** (bool): Use red band (restricted audiences) trailers when available. Default value false. * **native_subs** (bool): Include extras with subtitles in Library language. Default value false. * **cast_list** (int): Cast List Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database. * **ratings** (int): Ratings Source, Default value 0 Possible options: 0:Rotten Tomatoes, 1:IMDb, 2:The Movie Database. * **summary** (int): Plot Summary Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database. * **country** (int): Default value 46 Possible options 0:Argentina, 1:Australia, 2:Austria, 3:Belgium, 4:Belize, 5:Bolivia, 6:Brazil, 7:Canada, 8:Chile, 9:Colombia, 10:Costa Rica, 11:Czech Republic, 12:Denmark, 13:Dominican Republic, 14:Ecuador, 15:El Salvador, 16:France, 17:Germany, 18:Guatemala, 19:Honduras, 20:Hong Kong SAR, 21:Ireland, 22:Italy, 23:Jamaica, 24:Korea, 25:Liechtenstein, 26:Luxembourg, 27:Mexico, 28:Netherlands, 29:New Zealand, 30:Nicaragua, 31:Panama, 32:Paraguay, 33:Peru, 34:Portugal, 35:Peoples Republic of China, 36:Puerto Rico, 37:Russia, 38:Singapore, 39:South Africa, 40:Spain, 41:Sweden, 42:Switzerland, 43:Taiwan, 44:Trinidad, 45:United Kingdom, 46:United States, 47:Uruguay, 48:Venezuela. * **collections** (bool): Use collection info from The Movie Database. Default value false. * **localart** (bool): Prefer artwork based on library language. Default value true. * **adult** (bool): Include adult content. Default value false. * **usage** (bool): Send anonymous usage data to Plex. Default value true. **TheMovieDB Movie Options** (com.plexapp.agents.themoviedb) * **collections** (bool): Use collection info from The Movie Database. Default value false. * **localart** (bool): Prefer artwork based on library language. Default value true. * **adult** (bool): Include adult content. Default value false. * **country** (int): Country (used for release date and content rating). Default value 47 Possible options 0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize, 6:Bolivia, 7:Brazil, 8:Canada, 9:Chile, 10:Colombia, 11:Costa Rica, 12:Czech Republic, 13:Denmark, 14:Dominican Republic, 15:Ecuador, 16:El Salvador, 17:France, 18:Germany, 19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland, 23:Italy, 24:Jamaica, 25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands, 30:New Zealand, 31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal, 36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore, 40:South Africa, 41:Spain, 42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad, 46:United Kingdom, 47:United States, 48:Uruguay, 49:Venezuela. **Show Preferences** * **agent** (str): com.plexapp.agents.none, com.plexapp.agents.thetvdb, com.plexapp.agents.themoviedb * **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true. * **episodeSort** (int): Episode order. Default -1 Possible options: 0:Oldest first, 1:Newest first. * **flattenSeasons** (int): Seasons. Default value 0 Possible options: 0:Show,1:Hide. * **includeInGlobal** (bool): Include in dashboard. Default value true. * **scanner** (str): Plex Series Scanner **TheTVDB Show Options** (com.plexapp.agents.thetvdb) * **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true. * **native_subs** (bool): Include extras with subtitles in Library language. Default value false. **TheMovieDB Show Options** (com.plexapp.agents.themoviedb) * **collections** (bool): Use collection info from The Movie Database. Default value false. * **localart** (bool): Prefer artwork based on library language. Default value true. * **adult** (bool): Include adult content. Default value false. * **country** (int): Country (used for release date and content rating). Default value 47 options 0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize, 6:Bolivia, 7:Brazil, 8:Canada, 9:Chile, 10:Colombia, 11:Costa Rica, 12:Czech Republic, 13:Denmark, 14:Dominican Republic, 15:Ecuador, 16:El Salvador, 17:France, 18:Germany, 19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland, 23:Italy, 24:Jamaica, 25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands, 30:New Zealand, 31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal, 36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore, 40:South Africa, 41:Spain, 42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad, 46:United Kingdom, 47:United States, 48:Uruguay, 49:Venezuela. **Other Video Preferences** * **agent** (str): com.plexapp.agents.none, com.plexapp.agents.imdb, com.plexapp.agents.themoviedb * **enableBIFGeneration** (bool): Enable video preview thumbnails. Default value true. * **enableCinemaTrailers** (bool): Enable Cinema Trailers. Default value true. * **includeInGlobal** (bool): Include in dashboard. Default value true. * **scanner** (str): Plex Movie Scanner, Plex Video Files Scanner **IMDB Other Video Options** (com.plexapp.agents.imdb) * **title** (bool): Localized titles. Default value false. * **extras** (bool): Find trailers and extras automatically (Plex Pass required). Default value true. * **only_trailers** (bool): Skip extras which aren't trailers. Default value false. * **redband** (bool): Use red band (restricted audiences) trailers when available. Default value false. * **native_subs** (bool): Include extras with subtitles in Library language. Default value false. * **cast_list** (int): Cast List Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database. * **ratings** (int): Ratings Source Default value 0 Possible options: 0:Rotten Tomatoes,1:IMDb,2:The Movie Database. * **summary** (int): Plot Summary Source: Default value 1 Possible options: 0:IMDb,1:The Movie Database. * **country** (int): Country: Default value 46 Possible options: 0:Argentina, 1:Australia, 2:Austria, 3:Belgium, 4:Belize, 5:Bolivia, 6:Brazil, 7:Canada, 8:Chile, 9:Colombia, 10:Costa Rica, 11:Czech Republic, 12:Denmark, 13:Dominican Republic, 14:Ecuador, 15:El Salvador, 16:France, 17:Germany, 18:Guatemala, 19:Honduras, 20:Hong Kong SAR, 21:Ireland, 22:Italy, 23:Jamaica, 24:Korea, 25:Liechtenstein, 26:Luxembourg, 27:Mexico, 28:Netherlands, 29:New Zealand, 30:Nicaragua, 31:Panama, 32:Paraguay, 33:Peru, 34:Portugal, 35:Peoples Republic of China, 36:Puerto Rico, 37:Russia, 38:Singapore, 39:South Africa, 40:Spain, 41:Sweden, 42:Switzerland, 43:Taiwan, 44:Trinidad, 45:United Kingdom, 46:United States, 47:Uruguay, 48:Venezuela. * **collections** (bool): Use collection info from The Movie Database. Default value false. * **localart** (bool): Prefer artwork based on library language. Default value true. * **adult** (bool): Include adult content. Default value false. * **usage** (bool): Send anonymous usage data to Plex. Default value true. **TheMovieDB Other Video Options** (com.plexapp.agents.themoviedb) * **collections** (bool): Use collection info from The Movie Database. Default value false. * **localart** (bool): Prefer artwork based on library language. Default value true. * **adult** (bool): Include adult content. Default value false. * **country** (int): Country (used for release date and content rating). Default value 47 Possible options 0:, 1:Argentina, 2:Australia, 3:Austria, 4:Belgium, 5:Belize, 6:Bolivia, 7:Brazil, 8:Canada, 9:Chile, 10:Colombia, 11:Costa Rica, 12:Czech Republic, 13:Denmark, 14:Dominican Republic, 15:Ecuador, 16:El Salvador, 17:France, 18:Germany, 19:Guatemala, 20:Honduras, 21:Hong Kong SAR, 22:Ireland, 23:Italy, 24:Jamaica, 25:Korea, 26:Liechtenstein, 27:Luxembourg, 28:Mexico, 29:Netherlands, 30:New Zealand, 31:Nicaragua, 32:Panama, 33:Paraguay, 34:Peru, 35:Portugal, 36:Peoples Republic of China, 37:Puerto Rico, 38:Russia, 39:Singapore, 40:South Africa, 41:Spain, 42:Sweden, 43:Switzerland, 44:Taiwan, 45:Trinidad, 46:United Kingdom, 47:United States, 48:Uruguay, 49:Venezuela. """ part = '/library/sections?name=%s&type=%s&agent=%s&scanner=%s&language=%s&location=%s' % ( quote_plus(name), type, agent, quote_plus(scanner), language, quote_plus(location)) # noqa E126 if kwargs: part += urlencode(kwargs) return self._server.query(part, method=self._server._session.post)
java
@SuppressWarnings({ "rawtypes", "unchecked" }) // The alternative would be to change the TopologolyLayerCallback.send() to take List<? extends Command> private void execute(final ListCommand command) { getMetaData(command); final Queue<ListCommand> log = metaData.getUnapprovedCommands(); if (log.isEmpty()) { updateVersion(command); executeCommand(command); } else if (log.peek().getListVersionChange().equals(command.getListVersionChange())) { updateVersion(command); log.remove(); } else { // repair indices List<? extends ListCommand> repairedCommands = indexRepairer.repairCommands( metaData.getUnapprovedCommands(), command); // repair versions if local commands are left after repairing indices. versionRepairer.repairLocalCommandsVersion(metaData.getUnapprovedCommands(), command); repairedCommands = versionRepairer.repairRemoteCommandVersion(repairedCommands, metaData.getUnapprovedCommandsAsList()); // re-send repaired local changes topologyLayerCallback.sendCommands((List) metaData.getUnapprovedCommandsAsList()); // execute repaired commands for (final ListCommand repaired : repairedCommands) { executeCommand(repaired); } updateVersion(command); } }
java
public void execute () throws MojoExecutionException, MojoFailureException { final GrammarInfo [] grammarInfos = scanForGrammars (); if (grammarInfos == null) { getLog ().info ("Skipping non-existing source directory: " + getSourceDirectory ()); return; } else if (grammarInfos.length <= 0) { getLog ().info ("Skipping - all parsers are up to date"); } else { determineNonGeneratedSourceRoots (); if (StringUtils.isEmpty (grammarEncoding)) { getLog ().warn ("File encoding for grammars has not been configured, using platform default encoding, i.e. build is platform dependent!"); } if (StringUtils.isEmpty (outputEncoding)) { getLog ().warn ("File encoding for output has not been configured, defaulting to UTF-8!"); } for (final GrammarInfo grammarInfo : grammarInfos) { processGrammar (grammarInfo); } getLog ().info ("Processed " + grammarInfos.length + " grammar" + (grammarInfos.length != 1 ? "s" : "")); } final Collection <File> compileSourceRoots = new LinkedHashSet <> (Arrays.asList (getCompileSourceRoots ())); for (final File file : compileSourceRoots) { addSourceRoot (file); } }
python
def _is_related(parent_entry, child_entry): '''This function checks if a child entry is related to the parent entry. This is done by comparing the reference and sequence numbers.''' if parent_entry.header.mft_record == child_entry.header.base_record_ref and \ parent_entry.header.seq_number == child_entry.header.base_record_seq: return True else: return False
java
public static org.osmdroid.views.overlay.Polygon addPolygonToMap( MapView map, org.osmdroid.views.overlay.Polygon polygon, PolygonOptions options) { if (options!=null) { polygon.setFillColor(options.getFillColor()); polygon.setTitle(options.getTitle()); polygon.setStrokeColor(options.getStrokeColor()); polygon.setStrokeWidth(options.getStrokeWidth()); polygon.setSubDescription(options.getSubtitle()); polygon.setInfoWindow(new BasicInfoWindow(R.layout.bonuspack_bubble, map)); } map.getOverlayManager().add(polygon); return polygon; }
python
def running(name, restart=False, path=None): ''' .. versionchanged:: 2015.5.0 The :mod:`lxc.started <salt.states.lxc.started>` state has been renamed to ``lxc.running`` Ensure that a container is running .. note:: This state does not enforce the existence of the named container, it just starts the container if it is not running. To ensure that the named container exists, use :mod:`lxc.present <salt.states.lxc.present>`. name The name of the container path path to the container parent default: /var/lib/lxc (system default) .. versionadded:: 2015.8.0 restart : False Restart container if it is already running .. code-block:: yaml web01: lxc.running web02: lxc.running: - restart: True ''' ret = {'name': name, 'result': True, 'comment': 'Container \'{0}\' is already running'.format(name), 'changes': {}} state = {'old': __salt__['lxc.state'](name, path=path)} if state['old'] is None: ret['result'] = False ret['comment'] = 'Container \'{0}\' does not exist'.format(name) return ret elif state['old'] == 'running' and not restart: return ret elif state['old'] == 'stopped' and restart: # No need to restart since container is not running restart = False if restart: if state['old'] != 'stopped': action = ('restart', 'restarted') else: action = ('start', 'started') else: if state['old'] == 'frozen': action = ('unfreeze', 'unfrozen') else: action = ('start', 'started') if __opts__['test']: ret['result'] = None ret['comment'] = ('Container \'{0}\' would be {1}' .format(name, action[1])) return ret try: if state['old'] == 'frozen' and not restart: result = __salt__['lxc.unfreeze'](name, path=path) else: if restart: result = __salt__['lxc.restart'](name, path=path) else: result = __salt__['lxc.start'](name, path=path) except (CommandExecutionError, SaltInvocationError) as exc: ret['result'] = False ret['comment'] = exc.strerror state['new'] = __salt__['lxc.state'](name, path=path) else: state['new'] = result['state']['new'] if state['new'] != 'running': ret['result'] = False ret['comment'] = ('Unable to {0} container \'{1}\'' .format(action[0], name)) else: ret['comment'] = ('Container \'{0}\' was successfully {1}' .format(name, action[1])) try: ret['changes']['restarted'] = result['restarted'] except KeyError: pass if state['old'] != state['new']: ret['changes']['state'] = state return ret
java
public boolean compatible() { if (tc.isEntryEnabled()) Tr.entry(tc, "compatible", this); if (tc.isEntryEnabled()) Tr.exit(tc, "compatible", new Boolean(_compatible)); return _compatible; }
python
def __deserialize_primitive(self, data, klass): """ Deserializes string to primitive type. :param data: str. :param klass: class literal. :return: int, long, float, str, bool. """ try: return klass(data) except UnicodeEncodeError: return text(data) except TypeError: return data
java
public static Intent pickContact(String scope) { Intent intent; if (Build.VERSION.SDK_INT < Build.VERSION_CODES.ECLAIR) { intent = new Intent(Intent.ACTION_PICK, Contacts.People.CONTENT_URI); } else { intent = new Intent(Intent.ACTION_PICK, Uri.parse("content://com.android.contacts/contacts")); } if (!TextUtils.isEmpty(scope)) { intent.setType(scope); } return intent; }
java
public static Replacer makeTable(String... pairs) { if(pairs == null || pairs.length < 2) return new Replacer(Pattern.compile("$"), new DummySubstitution("")); TableSubstitution tab = new TableSubstitution(pairs); StringBuilder sb = new StringBuilder(128); sb.append("(?>"); for(String s : tab.dictionary.keySet()) { sb.append("\\Q"); sb.append(s); sb.append("\\E|"); } if(sb.length() > 3) sb.setCharAt(sb.length() - 1, ')'); else sb.append(')'); return new Replacer(Pattern.compile(sb.toString()), tab); }
python
def msg2subjective(msg, processor, subject, **config): """ Return a human-readable text representation of a dict-like fedmsg message from the subjective perspective of a user. For example, if the subject viewing the message is "oddshocks" and the message would normally translate into "oddshocks commented on ticket #174", it would instead translate into "you commented on ticket #174". """ text = processor.subjective(msg, subject, **config) if not text: text = processor.subtitle(msg, **config) return text
java
public void clear() { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Clearing this header handler: " + this); } this.num_items = 0; this.values.clear(); this.genericValues.clear(); }
python
def execute(self, args): """Executes the configured cmd passing args in one or more rounds xargs style. :param list args: Extra arguments to pass to cmd. """ all_args = list(args) try: return self._cmd(all_args) except OSError as e: if errno.E2BIG == e.errno: args1, args2 = self._split_args(all_args) result = self.execute(args1) if result != 0: return result return self.execute(args2) else: raise e
python
def index(self, pkt_num): """Return datagram index.""" int_check(pkt_num) for counter, datagram in enumerate(self.datagram): if pkt_num in datagram.index: return counter return None
java
private LocalEnvironment joinEnvironments(Context... contexts) { // Context head = contexts[0]; GlobalEnvironment global = head.getEnvironment().getParent(); HashSet<WyilFile.Decl.Variable> modified = new HashSet<>(); HashSet<WyilFile.Decl.Variable> deleted = new HashSet<>(); Map<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> headLocals = head.environment.locals; // Compute the modified and deleted sets for (int i = 1; i < contexts.length; ++i) { Context ithContext = contexts[i]; Map<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> ithLocals = ithContext.environment.locals; // First check env against head for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : ithLocals.entrySet()) { WyilFile.Decl.Variable key = e.getKey(); WyalFile.VariableDeclaration s1 = e.getValue(); WyalFile.VariableDeclaration s2 = headLocals.get(key); if (s1 == null) { deleted.add(key); } else if (!s1.equals(s2)) { modified.add(key); } } // Second, check head against env for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : headLocals.entrySet()) { WyilFile.Decl.Variable key = e.getKey(); WyalFile.VariableDeclaration s1 = e.getValue(); WyalFile.VariableDeclaration s2 = ithLocals.get(key); if (s1 == null) { deleted.add(key); } else if (!s1.equals(s2)) { modified.add(key); } } } // Finally, construct the combined local map IdentityHashMap<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> combinedLocals = new IdentityHashMap<>(); for (Map.Entry<WyilFile.Decl.Variable, WyalFile.VariableDeclaration> e : headLocals.entrySet()) { WyilFile.Decl.Variable key = e.getKey(); WyalFile.VariableDeclaration value = e.getValue(); if (deleted.contains(key)) { // Ignore this entry. This must be checked before we look at // modified (since variable can be marked both). continue; } else if (modified.contains(key)) { // Update version number value = global.allocateVersion(key); } combinedLocals.put(key, value); } // Now, use the modified and deleted sets to build the new environment return new LocalEnvironment(global, combinedLocals); }
python
def random_word(tokens, tokenizer): """ Masking some random tokens for Language Model task with probabilities as in the original BERT paper. :param tokens: list of str, tokenized sentence. :param tokenizer: Tokenizer, object used for tokenization (we need it's vocab here) :return: (list of str, list of int), masked tokens and related labels for LM prediction """ output_label = [] for i, token in enumerate(tokens): prob = random.random() # mask token with 15% probability if prob < 0.15: prob /= 0.15 # 80% randomly change token to mask token if prob < 0.8: tokens[i] = "[MASK]" # 10% randomly change token to random token elif prob < 0.9: tokens[i] = random.choice(list(tokenizer.vocab.items()))[0] # -> rest 10% randomly keep current token # append current token to output (we will predict these later) try: output_label.append(tokenizer.vocab[token]) except KeyError: # For unknown words (should not occur with BPE vocab) output_label.append(tokenizer.vocab["[UNK]"]) logger.warning("Cannot find token '{}' in vocab. Using [UNK] insetad".format(token)) else: # no masking token (will be ignored by loss function later) output_label.append(-1) return tokens, output_label