language
stringclasses 2
values | func_code_string
stringlengths 63
466k
|
---|---|
java | public static PreProcessor changePrefix(String from, String to) {
return mkPreProcessorWithMeta((prefix, data, options) -> {
logger.debug("changing prefix at '{}' from '{}' to '{}'", prefix, from, to);
return data.entrySet().stream()
.map(e -> {
if (!e.getKey().startsWith(prefix)) return e;
else {
String tail = e.getKey().substring(prefix.length())
.replaceFirst("^[\\.]?" + Pattern.quote(from), to)
.replaceFirst("^\\.", "");
String newKey = isEmptyStr(tail) ? prefix
: (prefix + "." + tail).replaceFirst("^\\.", "");
return entry(
newKey,
e.getValue()
);
}
}).collect(Collectors.toMap(
Map.Entry::getKey,
Map.Entry::getValue
));
}, new ExtensionMeta(PRE_PROCESSOR_CHANGE_PREFIX,
"changePrefix(from '" +from+ "' to '" +to+ "')",
Arrays.asList(from, to)));
} |
python | def decode_list(self, ids):
"""Transform a sequence of int ids into a their string versions.
This method supports transforming individual input/output ids to their
string versions so that sequence to/from text conversions can be visualized
in a human readable format.
Args:
ids: list of integers to be converted.
Returns:
strs: list of human-readable string.
"""
decoded_ids = []
for id_ in ids:
if 0 <= id_ < self._num_reserved_ids:
decoded_ids.append(RESERVED_TOKENS[int(id_)])
else:
decoded_ids.append(id_ - self._num_reserved_ids)
return [str(d) for d in decoded_ids] |
python | def default_ms_subtable(subtable, name=None, tabdesc=None, dminfo=None):
"""
Creates a default Measurement Set subtable. Any Table Description
elements in tabdesc will overwrite the corresponding element in a default
Measurement Set Table Description (columns, hypercolumns and keywords).
if name is given, it will be treated as a path that the table should
be created in. Set to subtable if None
if subtable is "" or "MAIN" a standard MeasurementSet with subtables will
be created.
"""
if name is None:
name = subtable
# Default to empty dictionaries
if tabdesc is None:
tabdesc = {}
if dminfo is None:
dminfo = {}
# Wrap the Table object
return table(_default_ms_subtable(subtable, name, tabdesc, dminfo),
_oper=3) |
java | protected void setImage(int xsize, int ysize, ByteBuffer buf, Rectangle rect, int bpp) {
int bytespp = bpp / 8;
int bytespl = (int) Math.ceil(xsize * bpp / 8.0);
api.TessBaseAPISetImage(handle, buf, xsize, ysize, bytespp, bytespl);
if (rect != null && !rect.isEmpty()) {
api.TessBaseAPISetRectangle(handle, rect.x, rect.y, rect.width, rect.height);
}
} |
python | def scan(self, data, part):
"""Scan a string.
Parameters
----------
data : `str`
String to scan.
part : `bool`
True if data is partial.
Returns
-------
`generator` of (`str` or `markovchain.scanner.Scanner.END`)
Token generator.
"""
if not self.end_chars:
yield from data
self.start = self.start or bool(data)
self.end = False
else:
for char in data:
if char in self.end_chars:
if not self.start:
continue
self.end = True
else:
if self.end:
yield self.END
self.end = False
self.start = True
yield char
if not part and self.start:
if not self.end and self.default_end is not None:
yield self.default_end
yield self.END
self.reset() |
java | void baseOffMeshLinks(MeshTile tile) {
if (tile == null) {
return;
}
long base = getPolyRefBase(tile);
// Base off-mesh connection start points.
for (int i = 0; i < tile.data.header.offMeshConCount; ++i) {
OffMeshConnection con = tile.data.offMeshCons[i];
Poly poly = tile.data.polys[con.poly];
float[] ext = new float[] { con.rad, tile.data.header.walkableClimb, con.rad };
// Find polygon to connect to.
FindNearestPolyResult nearestPoly = findNearestPolyInTile(tile, con.pos, ext);
long ref = nearestPoly.getNearestRef();
if (ref == 0) {
continue;
}
float[] p = con.pos; // First vertex
float[] nearestPt = nearestPoly.getNearestPos();
// findNearestPoly may return too optimistic results, further check
// to make sure.
if (sqr(nearestPt[0] - p[0]) + sqr(nearestPt[2] - p[2]) > sqr(con.rad)) {
continue;
}
// Make sure the location is on current mesh.
tile.data.verts[poly.verts[0] * 3] = nearestPt[0];
tile.data.verts[poly.verts[0] * 3 + 1] = nearestPt[1];
tile.data.verts[poly.verts[0] * 3 + 2] = nearestPt[2];
// Link off-mesh connection to target poly.
int idx = allocLink(tile);
Link link = tile.links.get(idx);
link.ref = ref;
link.edge = 0;
link.side = 0xff;
link.bmin = link.bmax = 0;
// Add to linked list.
link.next = poly.firstLink;
poly.firstLink = idx;
// Start end-point is always connect back to off-mesh connection.
int tidx = allocLink(tile);
int landPolyIdx = decodePolyIdPoly(ref);
Poly landPoly = tile.data.polys[landPolyIdx];
link = tile.links.get(tidx);
link.ref = base | (con.poly);
link.edge = 0xff;
link.side = 0xff;
link.bmin = link.bmax = 0;
// Add to linked list.
link.next = landPoly.firstLink;
landPoly.firstLink = tidx;
}
} |
python | def get_certificate_info():
"""
checks app certificate expiry status
"""
if hasattr(settings, 'MIT_WS_CERTIFICATE') and settings.MIT_WS_CERTIFICATE:
mit_ws_certificate = settings.MIT_WS_CERTIFICATE
else:
return {"status": NO_CONFIG}
app_cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, (
mit_ws_certificate if not isinstance(mit_ws_certificate, str)
else mit_ws_certificate.encode().decode('unicode_escape').encode()
)
)
app_cert_expiration = datetime.strptime(
app_cert.get_notAfter().decode('ascii'),
'%Y%m%d%H%M%SZ'
)
date_delta = app_cert_expiration - datetime.now()
# if more then 30 days left in expiry of certificate then app is safe
return {
'app_cert_expires': app_cert_expiration.strftime('%Y-%m-%dT%H:%M:%S'),
'status': UP if date_delta.days > 30 else DOWN
} |
java | public Wrapper setFortune(Fortune value, SetMode mode) {
putWrapped(FIELD_Fortune, Fortune.class, value, mode);
return this;
} |
python | def parse_lemme(self, linea: str, origin: int=0, _deramise: bool=True):
""" Constructeur de la classe Lemme à partir de la ligne linea.
Exemple de linea avec numéro d'éclat:
# cădo|lego|cĕcĭd|cās|is, ere, cecidi, casum|687
# 0 | 1 | 2 | 3 | 4 | 5
:param linea: Ligne à parser
:type linea: str
:param origin: 0 for original curated lemma, 1 for automatic import from Gaffiot
:type origin: int
:param _deramise: Force the deramisation of the normalized graphie
:type _deramise: bool
"""
eclats = linea.split('|')
lg = eclats[0].split('=')
if _deramise:
cle = atone(deramise(lg[0]))
else:
cle = atone(lg[0])
# Some lemma have homonyms, we have a number do differentiate them
nh = 0
if cle[-1].isnumeric():
nh = int(cle[-1])
grd = cle[:-1]
else:
grd = cle
# We setup the accentuated graphie
if len(lg) == 1:
grq = grd
else:
grq = lg[1]
# Pour l'affichage des dictionnaires, élimine les doubles de la forme canonique
gr = atone(grq.split(",")[0])
grModele = eclats[1]
modele = self.lemmatiseur.modele(grModele)
# contrôle de format. la liste doit avoir 6 items
if len(eclats) < 6:
warnings.warn("Ligne mal formée : " + gr + "\n ---Dernier champ " + eclats[-1] + "\n ---" + linea)
radicaux = DefaultOrderedDict(list)
# lecture des radicaux, 2 et 3
for i in range(2, 4):
if eclats[i]:
lrad = eclats[i].split(',')
for rad in lrad:
radicaux[i - 1].append(Radical(rad, i - 1))
# Gros doute sur le fonctionnement ici
indMorph = eclats[4]
match_renvoi = Lemme.RENVOI.match(indMorph)
if match_renvoi is not None:
renvoi = match_renvoi.group(1)
else:
renvoi = ""
pos = ""
if "adj." in indMorph:
pos += 'a'
if "conj" in indMorph:
pos += 'c'
if "excl." in indMorph:
pos += 'e'
if "interj" in indMorph:
pos += 'i'
if "num." in indMorph:
pos += 'm'
if "pron." in indMorph:
pos += 'p'
if "prép" in indMorph:
pos += 'r'
if "adv" in indMorph:
pos += 'd'
if " nom " in indMorph:
pos += 'n'
if "npr." in indMorph:
pos += 'n'
if not pos:
pos = modele.pos() # Je prends le POS du modèle
if pos == "d" and renvoi:
pos = ""
# S'il y a un renvoi (cf.) et que le modèle a donné le POS "d" (adverbe),
# je prendrai le pos du renvoi (les indéclinables ont le POS par défaut "d").
# Je ne peux pas le faire maintenant !
# Nombre d'occurrences
if len(eclats[5]):
nbOcc = int(eclats[5])
else:
nbOcc = 1
lemma = Lemme(
cle=cle,
graphie=gr, graphie_accentuee=grq,
modele=modele, radicaux=radicaux,
parent=self.lemmatiseur,
nombre_homonymie=nh, nbOcc=nbOcc,
origin=origin, pos=pos
)
# We register the lemma for each radical
for radNum in lemma._radicaux:
for rad in lemma._radicaux[radNum]:
rad.set_lemme(lemma)
self._register_lemme(lemma)
return lemma |
java | public List<Command> setPropertyValue(final UUID propertyId, final Object value) throws SynchronizeFXException {
final State state = createCommandList(new WithCommandType() {
@Override
public void invoke(final State state) {
setPropertyValue(propertyId, value, state);
}
}, true);
return state.commands;
} |
java | public static void close(Iterable<? extends Closeable> closeables)
throws IOException {
IOException first = null;
for (final Closeable c : closeables) {
if (c == null) {
LOG.debug("trying to call .close() on null reference");
continue;
}
try {
c.close();
} catch (final IOException e) {
if (first == null) {
first = e;
} else {
first.addSuppressed(e);
}
}
}
if (first != null) {
throw first;
}
} |
java | private static int arrayMemberHash(final Class<?> componentType, final Object o) {
if (componentType.equals(Byte.TYPE)) {
return Arrays.hashCode((byte[]) o);
}
if (componentType.equals(Short.TYPE)) {
return Arrays.hashCode((short[]) o);
}
if (componentType.equals(Integer.TYPE)) {
return Arrays.hashCode((int[]) o);
}
if (componentType.equals(Character.TYPE)) {
return Arrays.hashCode((char[]) o);
}
if (componentType.equals(Long.TYPE)) {
return Arrays.hashCode((long[]) o);
}
if (componentType.equals(Float.TYPE)) {
return Arrays.hashCode((float[]) o);
}
if (componentType.equals(Double.TYPE)) {
return Arrays.hashCode((double[]) o);
}
if (componentType.equals(Boolean.TYPE)) {
return Arrays.hashCode((boolean[]) o);
}
return Arrays.hashCode((Object[]) o);
} |
java | @Conditioned
@Quand("Je mets à jour la date '(.*)-(.*)' avec une '(.*)' date '(.*)'[\\.|\\?]")
@When("I update date '(.*)-(.*)' with a '(.*)' date '(.*)'[\\.|\\?]")
public void updateDate(String page, String elementName, String dateType, String dateOrKey, List<GherkinStepCondition> conditions) throws TechnicalException, FailureException {
final String date = Context.getValue(dateOrKey) != null ? Context.getValue(dateOrKey) : dateOrKey;
if (!"".equals(date)) {
final PageElement pageElement = Page.getInstance(page).getPageElementByKey('-' + elementName);
if (date.matches(Constants.DATE_FORMAT_REG_EXP)) {
updateDateValidated(pageElement, dateType, date);
} else {
new Result.Failure<>(date, Messages.format(Messages.getMessage(Messages.FAIL_MESSAGE_WRONG_DATE_FORMAT), date, elementName), false, pageElement.getPage().getCallBack());
}
}
} |
python | def deserialize_packet_id(packet_id: str) -> dict:
r"""Turn a packet id into individual packet components.
>>> deserialize_packet_id('newkaku_000001_01') == {
... 'protocol': 'newkaku',
... 'id': '000001',
... 'switch': '01',
... }
True
>>> deserialize_packet_id('ikeakoppla_000080_0') == {
... 'protocol': 'ikea koppla',
... 'id': '000080',
... 'switch': '0',
... }
True
"""
if packet_id == 'rflink':
return {'protocol': UNKNOWN}
protocol, *id_switch = packet_id.split(PACKET_ID_SEP)
assert len(id_switch) < 3
packet_identifiers = {
# lookup the reverse translation of the protocol in the translation
# table, fallback to protocol. If this is a unserializable protocol
# name, it has not been serialized before and is not in the
# translate_protocols table this will result in an invalid command.
'protocol': protocol_translations.get(protocol, protocol),
}
if id_switch:
packet_identifiers['id'] = id_switch[0]
if len(id_switch) > 1:
packet_identifiers['switch'] = id_switch[1]
return packet_identifiers |
python | def process_link(self, env, refnode, has_explicit_title, title, target):
"""This handles some special cases for reference links in .NET
First, the standard Sphinx reference syntax of ``:ref:`Title<Link>```,
where a reference to ``Link`` is created with title ``Title``, causes
problems for the generic .NET syntax of ``:dn:cls:`FooBar<T>```. So, here
we assume that ``<T>`` was the generic declaration, and fix the
reference.
This also uses :py:cls:`AnyXRefRole` to add `ref_context` onto the
refnode. Add data there that you need it on refnodes.
This method also resolves special reference operators ``~`` and ``.``
"""
result = super(DotNetXRefRole, self).process_link(env, refnode,
has_explicit_title,
title, target)
(title, target) = result
if not has_explicit_title:
# If the first character is a tilde, don't display the parent name
title = title.lstrip('.')
target = target.lstrip('~')
if title[0:1] == '~':
title = title[1:]
dot = title.rfind('.')
if dot != -1:
title = title[dot + 1:]
else:
if title != target:
target = title = '{title}<{target}>'.format(title=title,
target=target)
return title, target |
java | @Override
public final CustOrder process(final Map<String, Object> pRqVs,
final CustOrder pEntity, final IRequestData pRqDt) throws Exception {
String act = pRqDt.getParameter("act");
if (!("cnc".equals(act) || "pyd".equals(act) || "cls".equals(act))) {
throw new ExceptionWithCode(ExceptionWithCode.FORBIDDEN,
"Wrong action CO! " + act);
}
CustOrder oco = null;
if (pEntity.getIsNew()) {
throw new ExceptionWithCode(ExceptionWithCode.FORBIDDEN,
"Attempt creating CO!");
} else {
oco = this.srvOrm.retrieveEntityById(pRqVs, CustOrder.class,
pEntity.getItsId());
if (oco.getInId() != null) {
throw new Exception("NEY CU with INV");
}
oco.setDescr(pEntity.getDescr());
boolean isNdUp = true;
if ("cnc".equals(act)) {
if (!(oco.getStat().equals(EOrdStat.BOOKED)
|| oco.getStat().equals(EOrdStat.PAYED)
|| oco.getStat().equals(EOrdStat.CLOSED))) {
throw new ExceptionWithCode(ExceptionWithCode.FORBIDDEN,
"Wrong action CO for status ! " + act + "/" + oco.getStat());
}
this.cncOrd.cancel(pRqVs, oco, EOrdStat.CANCELED);
isNdUp = false;
} else if ("pyd".equals(act)) {
if (!oco.getStat().equals(EOrdStat.BOOKED)) {
throw new ExceptionWithCode(ExceptionWithCode.FORBIDDEN,
"Wrong action CO for status ! " + act + "/" + oco.getStat());
}
oco.setStat(EOrdStat.PAYED);
} else if ("cls".equals(act)) {
if (!(oco.getStat().equals(EOrdStat.BOOKED)
|| oco.getStat().equals(EOrdStat.PAYED))) {
throw new ExceptionWithCode(ExceptionWithCode.FORBIDDEN,
"Wrong action CO for status ! " + act + "/" + oco.getStat());
}
oco.setStat(EOrdStat.CLOSED);
}
if (isNdUp) {
String[] fieldsNames =
new String[] {"itsId", "itsVersion", "stat", "descr"};
pRqVs.put("fieldsNames", fieldsNames);
this.srvOrm.updateEntity(pRqVs, oco);
pRqVs.remove("fieldsNames");
}
}
return oco;
} |
python | def logged_query(cr, query, args=None, skip_no_result=False):
"""
Logs query and affected rows at level DEBUG.
:param query: a query string suitable to pass to cursor.execute()
:param args: a list, tuple or dictionary passed as substitution values
to cursor.execute().
:param skip_no_result: If True, then logging details are only shown
if there are affected records.
"""
if args is None:
args = ()
args = tuple(args) if type(args) == list else args
try:
cr.execute(query, args)
except (ProgrammingError, IntegrityError):
logger.error('Error running %s' % cr.mogrify(query, args))
raise
if not skip_no_result or cr.rowcount:
logger.debug('Running %s', query % args)
logger.debug('%s rows affected', cr.rowcount)
return cr.rowcount |
java | public ITrigger getTrigger (final TriggerKey triggerKey) throws SchedulerException
{
validateState ();
return m_aResources.getJobStore ().retrieveTrigger (triggerKey);
} |
java | public static boolean isElementIgnored(final Node received, Set<String> ignoreExpressions, NamespaceContext namespaceContext) {
if (CollectionUtils.isEmpty(ignoreExpressions)) {
return false;
}
/** This is the faster version, but then the ignoreValue name must be
* the full path name like: Numbers.NumberItem.AreaCode
*/
if (ignoreExpressions.contains(XMLUtils.getNodesPathName(received))) {
return true;
}
/** This is the slower version, but here the ignoreValues can be
* the short path name like only: AreaCode
*
* If there are more nodes with the same short name,
* the first one will match, eg. if there are:
* Numbers1.NumberItem.AreaCode
* Numbers2.NumberItem.AreaCode
* And ignoreValues contains just: AreaCode
* the only first Node: Numbers1.NumberItem.AreaCode will be ignored.
*/
for (String expression : ignoreExpressions) {
if (received == XMLUtils.findNodeByName(received.getOwnerDocument(), expression)) {
return true;
}
}
/** This is the XPath version using XPath expressions in
* ignoreValues to identify nodes to be ignored
*/
for (String expression : ignoreExpressions) {
if (XPathUtils.isXPathExpression(expression)) {
NodeList foundNodes = XPathUtils.evaluateAsNodeList(received.getOwnerDocument(),
expression,
namespaceContext);
if (foundNodes != null) {
for (int i = 0; i < foundNodes.getLength(); i++) {
if (foundNodes.item(i) != null && foundNodes.item(i).isSameNode(received)) {
return true;
}
}
}
}
}
return false;
} |
java | public static String formatDateToHRStyle(HR hr, Date date){
if (null == date) {
return "";
}
String style = DateTimeKit.TIME_24HR_STYLE;
if (hr == HR.HR12) {
style = DateTimeKit.TIME_12HR_STYLE;
}
return formatDateToStyle(style, date);
} |
java | static String toNumeral(
NumberSystem numsys,
char zeroDigit,
int number
) {
if (numsys.isDecimal()) {
int delta = zeroDigit - '0';
String standard = Integer.toString(number);
if (delta == 0) {
return standard;
}
StringBuilder numeral = new StringBuilder();
for (int i = 0, n = standard.length(); i < n; i++) {
int codepoint = standard.charAt(i) + delta;
numeral.append((char) codepoint);
}
return numeral.toString();
} else {
return numsys.toNumeral(number);
}
} |
python | def networkName(self):
""" :return: the name of the GSM Network Operator to which the modem is connected """
copsMatch = lineMatching(r'^\+COPS: (\d),(\d),"(.+)",{0,1}\d*$', self.write('AT+COPS?')) # response format: +COPS: mode,format,"operator_name",x
if copsMatch:
return copsMatch.group(3) |
python | def form_valid(self, form):
if self.__pk:
obj = PurchasesAlbaran.objects.get(pk=self.__pk)
self.request.albaran = obj
form.instance.albaran = obj
form.instance.validator_user = self.request.user
raise Exception("revisar StorageBatch")
"""
batch = StorageBatch.objects.filter(pk=form.data['batch']).first()
if not batch:
errors = form._errors.setdefault("batch", ErrorList())
errors.append(_("Batch invalid"))
return super(LineAlbaranCreate, self).form_invalid(form)
"""
# comprueba si el producto comprado requiere un valor de atributo especial
product_final = ProductFinal.objects.filter(pk=form.data['product']).first()
feature_special_value = None
if not product_final:
errors = form._errors.setdefault("feature_special_value", ErrorList())
errors.append(_("Product not selected"))
return super(LineAlbaranCreate, self).form_invalid(form)
elif product_final.product.feature_special:
# es obligatorio la informacion de caracteristicas especiales
if 'feature_special_value' not in form.data or not form.data['feature_special_value']:
errors = form._errors.setdefault("feature_special_value", ErrorList())
errors.append(_("Product needs information of feature special"))
return super(LineAlbaranCreate, self).form_invalid(form)
else:
feature_special_value = list(set(filter(None, form.data['feature_special_value'].split('\n'))))
try:
quantity = int(float(form.data['quantity']))
except ValueError:
errors = form._errors.setdefault("quantity", ErrorList())
errors.append(_("Quantity is not valid"))
return super(LineAlbaranCreate, self).form_invalid(form)
if product_final.product.feature_special.unique:
# mismo numero de caracteristicas que de cantidades
# si el feature special esta marcado como 'unico'
if len(feature_special_value) != quantity:
errors = form._errors.setdefault("feature_special_value", ErrorList())
errors.append(_("Quantity and values of feature special not equals"))
return super(LineAlbaranCreate, self).form_invalid(form)
# no existen las caracteristicas especiales dadas de alta en el sistema
elif ProductUnique.objects.filter(product_final=product_final, value__in=feature_special_value).exists():
errors = form._errors.setdefault("feature_special_value", ErrorList())
errors.append(_("Some value of feature special exists"))
return super(LineAlbaranCreate, self).form_invalid(form)
elif len(feature_special_value) != 1:
errors = form._errors.setdefault("feature_special_value", ErrorList())
errors.append(_("The special feature must be unique for all products"))
return super(LineAlbaranCreate, self).form_invalid(form)
try:
with transaction.atomic():
# save line albaran
result = super(LineAlbaranCreate, self).form_valid(form)
raise Exception("Cambiar ProductStock por ProductUnique")
"""
if self.object.status != PURCHASE_ALBARAN_LINE_STATUS_REJECTED:
# prepare stock
ps = ProductStock()
ps.product_final = product_final
ps.line_albaran = self.object
ps.batch = batch
# save stock
ps.quantity = self.object.quantity
ps.save()
if feature_special_value:
# prepare product feature special
if product_final.product.feature_special.unique:
pfs = ProductUnique()
pfs.product_final = product_final
# save product featureSpecial and stock
for fs in feature_special_value:
pfs.pk = None
pfs.value = fs
pfs.save()
else:
pfs = ProductUnique.objects.filter(
value=feature_special_value[0],
product_final=product_final
).first()
if pfs:
pfs.stock_real += self.object.quantity
else:
pfs = ProductUnique()
pfs.product_final = product_final
pfs.value = feature_special_value[0]
pfs.stock_real = self.object.quantity
pfs.save()
else:
# product unique by default
pfs = ProductUnique.objects.filter(product_final=product_final).first()
if not pfs:
pfs = ProductUnique()
pfs.product_final = product_final
pfs.stock_real = self.object.quantity
else:
pfs.stock_real += self.object.quantity
pfs.save()
"""
return result
except IntegrityError as e:
errors = form._errors.setdefault("product", ErrorList())
errors.append(_("Integrity Error: {}".format(e)))
return super(LineAlbaranCreate, self).form_invalid(form) |
java | private static Method lookup( String method, String url ) {
String s = method+url;
for( String x : _handlers.keySet() )
if( x.equals(s) ) // TODO: regex
return _handlers.get(x);
return null;
} |
java | public static Object parse(String s) {
try {
return new JSONParser(DEFAULT_PERMISSIVE_MODE).parse(s);
} catch (Exception e) {
return null;
}
} |
python | def get_subscribe_authorize_url(self, scene, template_id, redirect_url, reserved=None):
"""
构造请求用户授权的url
详情请参阅:
https://mp.weixin.qq.com/wiki?id=mp1500374289_66bvB
:param scene: 订阅场景值,开发者可以填0-10000的整形值,用来标识订阅场景值
:type scene: int
:param template_id: 订阅消息模板ID,登录公众平台后台,在接口权限列表处可查看订阅模板ID
:param redirect_url: 授权后重定向的回调地址
:param reserved: 用于保持请求和回调的状态,授权请后原样带回给第三方。该参数可用于防止csrf攻击。若不指定则随机生成。
"""
if reserved is None:
reserved = random_string()
base_url = 'https://mp.weixin.qq.com/mp/subscribemsg'
params = [
('action', 'get_confirm'),
('appid', self.appid),
('scene', scene),
('template_id', template_id),
('redirect_url', redirect_url),
('reserved', reserved),
]
encoded_params = six.moves.urllib.parse.urlencode(params)
url = '{base}?{params}#wechat_redirect'.format(base=base_url, params=encoded_params)
return url |
java | private void setAcceptedLocalCandidate(TransportCandidate bestLocalCandidate) {
for (int i = 0; i < resolver.getCandidateCount(); i++) {
// TODO FIX The EQUAL Sentence
if (resolver.getCandidate(i).getIp().equals(bestLocalCandidate.getIp())
&& resolver.getCandidate(i).getPort() == bestLocalCandidate.getPort()) {
acceptedLocalCandidate = resolver.getCandidate(i);
return;
}
}
LOGGER.fine("BEST: ip=" + bestLocalCandidate.getIp() + " port=" + bestLocalCandidate.getPort() + " has not been offered.");
// throw new XMPPException("Local transport candidate has not be offered.");
} |
java | @SuppressWarnings("unchecked")
public static <T extends StatefulConnection<?, ?>> GenericObjectPool<T> createGenericObjectPool(
Supplier<T> connectionSupplier, GenericObjectPoolConfig<T> config, boolean wrapConnections) {
LettuceAssert.notNull(connectionSupplier, "Connection supplier must not be null");
LettuceAssert.notNull(config, "GenericObjectPoolConfig must not be null");
AtomicReference<Origin<T>> poolRef = new AtomicReference<>();
GenericObjectPool<T> pool = new GenericObjectPool<T>(new RedisPooledObjectFactory<T>(connectionSupplier), config) {
@Override
public T borrowObject() throws Exception {
return wrapConnections ? ConnectionWrapping.wrapConnection(super.borrowObject(), poolRef.get()) : super
.borrowObject();
}
@Override
public void returnObject(T obj) {
if (wrapConnections && obj instanceof HasTargetConnection) {
super.returnObject((T) ((HasTargetConnection) obj).getTargetConnection());
return;
}
super.returnObject(obj);
}
};
poolRef.set(new ObjectPoolWrapper<>(pool));
return pool;
} |
python | def tobcolz(table, dtype=None, sample=1000, **kwargs):
"""Load data into a bcolz ctable, e.g.::
>>> import petl as etl
>>> table = [('foo', 'bar', 'baz'),
... ('apples', 1, 2.5),
... ('oranges', 3, 4.4),
... ('pears', 7, .1)]
>>> ctbl = etl.tobcolz(table)
>>> ctbl
ctable((3,), [('foo', '<U7'), ('bar', '<i8'), ('baz', '<f8')])
nbytes: 132; cbytes: 1023.98 KB; ratio: 0.00
cparams := cparams(clevel=5, shuffle=1, cname='lz4', quantize=0)
[('apples', 1, 2.5) ('oranges', 3, 4.4) ('pears', 7, 0.1)]
>>> ctbl.names
['foo', 'bar', 'baz']
>>> ctbl['foo']
carray((3,), <U7)
nbytes := 84; cbytes := 511.98 KB; ratio: 0.00
cparams := cparams(clevel=5, shuffle=1, cname='lz4', quantize=0)
chunklen := 18724; chunksize: 524272; blocksize: 0
['apples' 'oranges' 'pears']
Other keyword arguments are passed through to the ctable constructor.
.. versionadded:: 1.1.0
"""
import bcolz
import numpy as np
it = iter(table)
peek, it = iterpeek(it, sample)
hdr = next(it)
# numpy is fussy about having tuples, need to make sure
it = (tuple(row) for row in it)
flds = list(map(text_type, hdr))
dtype = construct_dtype(flds, peek, dtype)
# create ctable
kwargs.setdefault('expectedlen', 1000000)
kwargs.setdefault('mode', 'w')
ctbl = bcolz.ctable(np.array([], dtype=dtype), **kwargs)
# fill chunk-wise
chunklen = sum(ctbl.cols[name].chunklen
for name in ctbl.names) // len(ctbl.names)
while True:
data = list(itertools.islice(it, chunklen))
data = np.array(data, dtype=dtype)
ctbl.append(data)
if len(data) < chunklen:
break
ctbl.flush()
return ctbl |
java | @Override
public Map<String, Object> getBodyParameters() {
HashMap<String, Object> params = new HashMap<String, Object>();
params.put("userId", this.userId);
params.put("itemId", this.itemId);
params.put("portion", this.portion);
if (this.sessionId!=null) {
params.put("sessionId", this.sessionId);
}
if (this.timestamp!=null) {
params.put("timestamp", this.timestamp.getTime()/1000.0);
}
if (this.cascadeCreate!=null) {
params.put("cascadeCreate", this.cascadeCreate);
}
if (this.recommId!=null) {
params.put("recommId", this.recommId);
}
if (this.additionalData!=null) {
params.put("additionalData", this.additionalData);
}
return params;
} |
python | def enable(name, start=False, **kwargs):
'''
Start service ``name`` at boot.
Returns ``True`` if operation is successful
name
the service's name
start : False
If ``True``, start the service once enabled.
CLI Example:
.. code-block:: bash
salt '*' service.enable <name> [start=True]
'''
# non-existent service
if not available(name):
return False
# if service is aliased, refuse to enable it
alias = get_svc_alias()
if name in alias:
log.error('This service is aliased, enable its alias instead')
return False
# down_file: file that disables sv autostart
svc_realpath = _get_svc_path(name)[0]
down_file = os.path.join(svc_realpath, 'down')
# if service already enabled, remove down_file to
# let service starts on boot (as requested)
if enabled(name):
if os.path.exists(down_file):
try:
os.unlink(down_file)
except OSError:
log.error('Unable to remove file %s', down_file)
return False
return True
# let's enable the service
if not start:
# create a temp 'down' file BEFORE enabling service.
# will prevent sv from starting this service automatically.
log.trace('need a temporary file %s', down_file)
if not os.path.exists(down_file):
try:
salt.utils.files.fopen(down_file, "w").close() # pylint: disable=resource-leakage
except IOError:
log.error('Unable to create file %s', down_file)
return False
# enable the service
try:
os.symlink(svc_realpath, _service_path(name))
except IOError:
# (attempt to) remove temp down_file anyway
log.error('Unable to create symlink %s', down_file)
if not start:
os.unlink(down_file)
return False
# ensure sv is aware of this new service before continuing.
# if not, down_file might be removed too quickly,
# before 'sv' have time to take care about it.
# Documentation indicates that a change is handled within 5 seconds.
cmd = 'sv status {0}'.format(_service_path(name))
retcode_sv = 1
count_sv = 0
while retcode_sv != 0 and count_sv < 10:
time.sleep(0.5)
count_sv += 1
call = __salt__['cmd.run_all'](cmd)
retcode_sv = call['retcode']
# remove the temp down_file in any case.
if (not start) and os.path.exists(down_file):
try:
os.unlink(down_file)
except OSError:
log.error('Unable to remove temp file %s', down_file)
retcode_sv = 1
# if an error happened, revert our changes
if retcode_sv != 0:
os.unlink(os.path.join([_service_path(name), name]))
return False
return True |
python | def click_chain(self, selectors_list, by=By.CSS_SELECTOR,
timeout=settings.SMALL_TIMEOUT, spacing=0):
""" This method clicks on a list of elements in succession.
'spacing' is the amount of time to wait between clicks. (sec) """
if self.timeout_multiplier and timeout == settings.SMALL_TIMEOUT:
timeout = self.__get_new_timeout(timeout)
for selector in selectors_list:
self.click(selector, by=by, timeout=timeout)
if spacing > 0:
time.sleep(spacing) |
python | def toHierarchy(self, classView, level, stream, lastChild=False):
'''
**Parameters**
``classView`` (bool)
``True`` if generating the Class Hierarchy, ``False`` for File Hierarchy.
``level`` (int)
Recursion level used to determine indentation.
``stream`` (StringIO)
The stream to write the contents to.
``lastChild`` (bool)
When :data:`~exhale.configs.createTreeView` is ``True`` and
:data:`~exhale.configs.treeViewIsBootstrap` is ``False``, the generated
HTML ``li`` elements need to add a ``class="lastChild"`` to use the
appropriate styling.
.. todo:: add thorough documentation of this
'''
if self.inHierarchy(classView):
# For the Tree Views, we need to know if there are nested children before
# writing anything. If there are, we need to open a new list
nested_children = self.hierarchySortedDirectDescendants(classView)
############################################################################
# Write out this node. #
############################################################################
# Easy case: just write another bullet point
if not configs.createTreeView:
stream.write("{indent}- :ref:`{link}`\n".format(
indent=' ' * level,
link=self.link_name
))
# Otherwise, we're generating some raw HTML and/or JavaScript depending on
# whether we are using bootstrap or not
else:
# Declare the relevant links needed for the Tree Views
indent = " " * (level * 2)
next_indent = " {0}".format(indent)
# turn double underscores into underscores, then underscores into hyphens
html_link = self.link_name.replace("__", "_").replace("_", "-")
href = "{file}.html#{anchor}".format(
file=self.file_name.rsplit(".rst", 1)[0],
anchor=html_link
)
# should always have at least two parts (templates will have more)
title_as_link_parts = self.title.split(" ")
if self.template_params:
# E.g. 'Template Class Foo'
q_start = 0
q_end = 2
else:
# E.g. 'Class Foo'
q_start = 0
q_end = 1
# the qualifier will not be part of the hyperlink (for clarity of
# navigation), the link_title will be
qualifier = " ".join(title_as_link_parts[q_start:q_end])
link_title = " ".join(title_as_link_parts[q_end:])
link_title = link_title.replace("&", "&").replace("<", "<").replace(">", ">")
# the actual text / link inside of the list item
li_text = '{qualifier} <a href="{href}">{link_title}</a>'.format(
qualifier=qualifier,
href=href,
link_title=link_title
)
if configs.treeViewIsBootstrap:
text = "text: \"<span class=\\\"{span_cls}\\\">{qualifier}</span> {link_title}\"".format(
span_cls=configs.treeViewBootstrapTextSpanClass,
qualifier=qualifier,
link_title=link_title
)
link = "href: \"{href}\"".format(href=href)
# write some json data, something like
# {
# text: "<span class=\\\"text-muted\\\"> some text",
# href: "link to actual item",
# selectable: false,
stream.write("{indent}{{\n{next_indent}{text},\n".format(
indent=indent,
next_indent=next_indent,
text=text
))
stream.write("{next_indent}{link},\n{next_indent}selectable: false,\n".format(
next_indent=next_indent,
link=link
))
# if requested, add the badge indicating how many children there are
# only add this if there are children
if configs.treeViewBootstrapUseBadgeTags and nested_children:
stream.write("{next_indent}tags: ['{num_children}'],\n".format(
next_indent=next_indent,
num_children=len(nested_children)
))
if nested_children:
# If there are children then `nodes: [ ... ]` will be next
stream.write("\n{next_indent}nodes: [\n".format(next_indent=next_indent))
else:
# Otherwise, this element is ending. JavaScript doesn't care
# about trailing commas :)
stream.write("{indent}}},\n".format(indent=indent))
else:
if lastChild:
opening_li = '<li class="lastChild">'
else:
opening_li = "<li>"
if nested_children:
# write this list element and begin the next list
# writes something like
# <li>
# some text with an href
# <ul>
#
# the <ul> started here gets closed below
stream.write("{indent}{li}\n{next_indent}{li_text}\n{next_indent}<ul>\n".format(
indent=indent,
li=opening_li,
next_indent=next_indent,
li_text=li_text
))
else:
# write this list element and end it now (since no children)
# writes something like
# <li>
# some text with an href
# </li>
stream.write("{indent}{li}{li_text}</li>\n".format(
indent=indent,
li=opening_li,
li_text=li_text
))
############################################################################
# Write out all of the children (if there are any). #
############################################################################
last_child_index = len(nested_children) - 1
child_idx = 0
for child in nested_children:
child.toHierarchy(classView, level + 1, stream, child_idx == last_child_index)
child_idx += 1
############################################################################
# If there were children, close the lists we started above. #
############################################################################
if configs.createTreeView and nested_children:
if configs.treeViewIsBootstrap:
# close the `nodes: [ ... ]` and final } for element
# the final comma IS necessary, and extra commas don't matter in javascript
stream.write("{next_indent}]\n{indent}}},\n".format(
next_indent=next_indent,
indent=indent
))
else:
stream.write("{next_indent}</ul>\n{indent}</li>\n".format(
next_indent=next_indent,
indent=indent
)) |
java | @Override
public List<CommerceUserSegmentCriterion> findAll(int start, int end) {
return findAll(start, end, null);
} |
java | public TransformActionBuilder xslt(Resource xsltResource, Charset charset) {
try {
action.setXsltData(FileUtils.readToString(xsltResource, charset));
} catch (IOException e) {
throw new CitrusRuntimeException("Failed to read xstl resource", e);
}
return this;
} |
java | @Restricted(DoNotUse.class) // called from newJob view
public FormValidation doCheckJobName(@QueryParameter String value) {
// this method can be used to check if a file exists anywhere in the file system,
// so it should be protected.
getOwner().checkPermission(Item.CREATE);
if (Util.fixEmpty(value) == null) {
return FormValidation.ok();
}
try {
Jenkins.checkGoodName(value);
value = value.trim(); // why trim *after* checkGoodName? not sure, but ItemGroupMixIn.createTopLevelItem does the same
Jenkins.getInstance().getProjectNamingStrategy().checkName(value);
} catch (Failure e) {
return FormValidation.error(e.getMessage());
}
if (getOwner().getItemGroup().getItem(value) != null) {
return FormValidation.error(Messages.Hudson_JobAlreadyExists(value));
}
// looks good
return FormValidation.ok();
} |
python | def extend(self, protocol: Union[Iterable[Dict], 'Pipeline']) -> 'Pipeline':
"""Add another pipeline to the end of the current pipeline.
:param protocol: An iterable of dictionaries (or another Pipeline)
:return: This pipeline for fluid query building
Example:
>>> p1 = Pipeline.from_functions(['enrich_protein_and_rna_origins'])
>>> p2 = Pipeline.from_functions(['remove_pathologies'])
>>> p1.extend(p2)
"""
for data in protocol:
name, args, kwargs = _get_protocol_tuple(data)
self.append(name, *args, **kwargs)
return self |
java | public R setTimezone(String timezone) {
mBodyMap.put(BoxUser.FIELD_TIMEZONE, timezone);
return (R) this;
} |
java | public ResultMessage execute(ClientState state) throws RequestValidationException, RequestExecutionException
{
List<PermissionDetails> details = new ArrayList<PermissionDetails>();
if (resource != null && recursive)
{
for (IResource r : Resources.chain(resource))
details.addAll(list(state, r));
}
else
{
details.addAll(list(state, resource));
}
Collections.sort(details);
return resultMessage(details);
} |
python | def _get_linewise_report(self):
"""
Returns a report each line of which comprises a pair of an input line
and an error. Unlike in the standard report, errors will appear as many
times as they occur.
Helper for the get_report method.
"""
d = defaultdict(list) # line: [] of errors
for error, lines in self.errors.items():
for line_num in lines:
d[line_num].append(error)
return '\n'.join([
'{:>3} → {}'.format(line, error.string)
for line in sorted(d.keys())
for error in d[line]]) |
java | protected String convertToPerformanceView(long afterMinusBefore) { // from DfTraceViewUtil.java
if (afterMinusBefore < 0) {
return String.valueOf(afterMinusBefore);
}
long sec = afterMinusBefore / 1000;
final long min = sec / 60;
sec = sec % 60;
final long mil = afterMinusBefore % 1000;
final StringBuffer sb = new StringBuffer();
if (min >= 10) { // Minute
sb.append(min).append("m");
} else if (min < 10 && min >= 0) {
sb.append("0").append(min).append("m");
}
if (sec >= 10) { // Second
sb.append(sec).append("s");
} else if (sec < 10 && sec >= 0) {
sb.append("0").append(sec).append("s");
}
if (mil >= 100) { // Millisecond
sb.append(mil).append("ms");
} else if (mil < 100 && mil >= 10) {
sb.append("0").append(mil).append("ms");
} else if (mil < 10 && mil >= 0) {
sb.append("00").append(mil).append("ms");
}
return sb.toString();
} |
java | public static void setContent(final HttpRequestBase req,
final byte[] content,
final String contentType) throws HttpException {
if (content == null) {
return;
}
if (!(req instanceof HttpEntityEnclosingRequestBase)) {
throw new HttpException("Invalid operation for method " +
req.getMethod());
}
final HttpEntityEnclosingRequestBase eem = (HttpEntityEnclosingRequestBase)req;
final ByteArrayEntity entity = new ByteArrayEntity(content);
entity.setContentType(contentType);
eem.setEntity(entity);
} |
python | def change_filename(filehandle, meta):
"""Changes the filename to reflect the conversion from PDF to JPG.
This method will preserve the original filename in the meta dictionary.
"""
filename = secure_filename(meta.get('filename', filehandle.filename))
basename, _ = os.path.splitext(filename)
meta['original_filename'] = filehandle.filename
filehandle.filename = filename + '.jpg'
return filehandle |
python | def adjacency(tree):
"""
Construct the adjacency matrix of the tree
:param tree:
:return:
"""
dd = ids(tree)
N = len(dd)
A = np.zeros((N, N))
def _adj(node):
if np.isscalar(node):
return
elif isinstance(node, tuple) and len(node) == 2:
A[dd[node], dd[node[0]]] = 1
A[dd[node[0]], dd[node]] = 1
_adj(node[0])
A[dd[node], dd[node[1]]] = 1
A[dd[node[1]], dd[node]] = 1
_adj(node[1])
_adj(tree)
return A |
java | private void updateCands() {
if (!this.dense) { connectOccs(); }
assert !this.schedule;
this.schedule = true;
if (this.schedule = currentCands().empty()) { for (int idx = 1; idx <= maxvar(); idx++) { touch(idx); } }
this.schedule = true;
touchFixed();
} |
python | def dump_queue(queue):
"""
Empties all pending items in a queue and returns them in a list.
"""
result = []
try:
while True:
item = queue.get_nowait()
result.append(item)
except: Empty
return result |
java | @Override
public Object getValue(@NonNull String key) {
if (key == null) { throw new IllegalArgumentException("key cannot be null."); }
final int index = indexForColumnName(key);
return index >= 0 ? getValue(index) : null;
} |
java | @SuppressWarnings("RedundantTypeArguments")
public <T> T get(String name) throws ReflectException {
return field(name).<T>get();
} |
java | private void checkGAs(List l)
{
for (final Iterator i = l.iterator(); i.hasNext();)
if (!(i.next() instanceof GroupAddress))
throw new KNXIllegalArgumentException("not a group address list");
} |
java | protected void invokeAuthenticationPostProcessors(final AuthenticationBuilder builder,
final AuthenticationTransaction transaction) {
LOGGER.debug("Invoking authentication post processors for authentication transaction");
val pops = authenticationEventExecutionPlan.getAuthenticationPostProcessors(transaction);
final Collection<AuthenticationPostProcessor> supported = pops.stream().filter(processor -> transaction.getCredentials()
.stream()
.anyMatch(processor::supports))
.collect(Collectors.toList());
for (val p : supported) {
p.process(builder, transaction);
}
} |
python | def PushItem(self, item, block=True):
"""Pushes an item onto the queue.
Args:
item (object): item to add.
block (Optional[bool]): True to block the process when the queue is full.
Raises:
QueueFull: if the item could not be pushed the queue because it's full.
"""
try:
self._queue.put(item, block=block)
except Queue.Full as exception:
raise errors.QueueFull(exception) |
java | public ImageBuffer getRaster(int id)
{
return rasters.get(UtilMath.clamp(id, 0, rasters.size() - 1));
} |
python | def _update_offset_value(self, f, offset, size, value):
'''
Writes "value" into location "offset" in file "f".
'''
f.seek(offset, 0)
if (size == 8):
f.write(struct.pack('>q', value))
else:
f.write(struct.pack('>i', value)) |
python | def resources_update(portal_url, apikey, distributions,
resource_files, generate_new_access_url=None,
catalog_id=None):
"""Sube archivos locales a sus distribuciones correspondientes en el portal
pasado por parámetro.
Args:
portal_url (str): La URL del portal CKAN de destino.
apikey (str): La apikey de un usuario con los permisos que le
permitan crear o actualizar el dataset.
distributions(list): Lista de distribuciones posibles para
actualizar.
resource_files(dict): Diccionario con entradas
id_de_distribucion:path_al_recurso a subir
generate_new_access_url(list): Lista de ids de distribuciones a
las cuales se actualizará el accessURL con los valores
generados por el portal de destino
catalog_id(str): prependea el id al id del recurso para
encontrarlo antes de subirlo
Returns:
list: los ids de los recursos modificados
"""
ckan_portal = RemoteCKAN(portal_url, apikey=apikey)
result = []
generate_new_access_url = generate_new_access_url or []
for distribution in distributions:
updated = False
resource_id = catalog_id + '_' + distribution['identifier']\
if catalog_id else distribution['identifier']
fields = {'id': resource_id}
if distribution['identifier'] in generate_new_access_url:
fields.update({'accessURL': ''})
updated = True
if distribution['identifier'] in resource_files:
fields.update({'resource_type': 'file.upload',
'upload':
open(resource_files[distribution['identifier']],
'rb')
})
updated = True
if updated:
try:
pushed = ckan_portal.action.resource_patch(**fields)
result.append(pushed['id'])
except CKANAPIError as e:
logger.exception(
"Error subiendo recurso {} a la distribución {}: {}"
.format(resource_files[distribution['identifier']],
resource_files, str(e)))
return result |
python | def get_std_start_date(self):
""" If the date is custom, return the start datetime with the format %Y-%m-%d %H:%M:%S. Else, returns "". """
first, _ = self._val
if first != datetime.min and first != datetime.max:
return first.strftime("%Y-%m-%d %H:%M:%S")
else:
return "" |
java | public ListStacksRequest withStackStatusFilters(StackStatus... stackStatusFilters) {
com.amazonaws.internal.SdkInternalList<String> stackStatusFiltersCopy = new com.amazonaws.internal.SdkInternalList<String>(stackStatusFilters.length);
for (StackStatus value : stackStatusFilters) {
stackStatusFiltersCopy.add(value.toString());
}
if (getStackStatusFilters() == null) {
setStackStatusFilters(stackStatusFiltersCopy);
} else {
getStackStatusFilters().addAll(stackStatusFiltersCopy);
}
return this;
} |
java | public ApiResponse<Void> getLoginWithHttpInfo(String redirectUri, Boolean includeState) throws ApiException {
com.squareup.okhttp.Call call = getLoginValidateBeforeCall(redirectUri, includeState, null, null);
return apiClient.execute(call);
} |
java | public InputStream getInputStream() throws IOException {
if (null == inputStream) {
if (readFromCache) {
inputStream = new FileInputStream(cacheFile.toFile());
} else {
WriteCacheFileInputStream wis = new WriteCacheFileInputStream(delegate.getInputStream(),
new FileOutputStream(cacheFile.toFile()));
wis.onInputStreamClose(() -> {
try {
final int responseCode = delegate.getResponseCode();
if (responseCode == HTTP_OK) {
cache.saveCachedDataInfo(cacheFile, cachedDataInfo);
} else {
if (logger.isWarnEnabled()) {
logger.warn("not caching because of response code {}: {}", responseCode, getURL());
}
}
} catch (final IOException e) {
if (logger.isWarnEnabled()) {
logger.warn("cannot retrieve response code");
}
}
});
inputStream = wis;
}
}
return inputStream;
} |
python | def create_ca(self, name, ca_name='', cert_type=crypto.TYPE_RSA, bits=2048,
alt_names=None, years=5, serial=0, pathlen=0,
overwrite=False):
"""
Create a certificate authority
Arguments: name - The name of the CA
cert_type - The type of the cert. TYPE_RSA or TYPE_DSA
bits - The number of bits to use
alt_names - An array of alternative names in the format:
IP:address, DNS:address
Returns: KeyCertPair for the new CA
"""
cakey = self.create_key_pair(cert_type, bits)
req = self.create_request(cakey, CN=name)
signing_key = cakey
signing_cert = req
parent_ca = ''
if ca_name:
ca_bundle = self.store.get_files(ca_name)
signing_key = ca_bundle.key.load()
signing_cert = ca_bundle.cert.load()
parent_ca = ca_bundle.cert.file_path
basicConstraints = "CA:true"
# If pathlen is exactly 0, this CA cannot sign intermediaries.
# A negative value leaves this out entirely and allows arbitrary
# numbers of intermediates.
if pathlen >=0:
basicConstraints += ', pathlen:' + str(pathlen)
extensions = [
crypto.X509Extension(
b"basicConstraints", True, basicConstraints.encode()),
crypto.X509Extension(
b"keyUsage", True, b"keyCertSign, cRLSign"),
crypto.X509Extension(
b"extendedKeyUsage", True, b"serverAuth, clientAuth"),
lambda cert: crypto.X509Extension(
b"subjectKeyIdentifier", False, b"hash", subject=cert),
lambda cert: crypto.X509Extension(
b"authorityKeyIdentifier", False, b"keyid:always",
issuer=cert),
]
if alt_names:
extensions.append(
crypto.X509Extension(b"subjectAltName",
False, ",".join(alt_names).encode())
)
# TODO: start time before today for clock skew?
cacert = self.sign(
req, (signing_cert, signing_key), (0, 60*60*24*365*years),
extensions=extensions)
x509s = {'key': cakey, 'cert': cacert, 'ca': cacert}
self.store.add_files(name, x509s, overwrite=overwrite,
parent_ca=parent_ca, is_ca=True)
if ca_name:
self.store.add_sign_link(ca_name, name)
return self.store.get_record(name) |
java | public static CommerceTaxMethod fetchByGroupId_First(long groupId,
OrderByComparator<CommerceTaxMethod> orderByComparator) {
return getPersistence().fetchByGroupId_First(groupId, orderByComparator);
} |
python | def is_associated_file(self):
# type: () -> bool
'''
A method to determine whether this file is 'associated' with another file
on the ISO.
Parameters:
None.
Returns:
True if this file is associated with another file on the ISO, False
otherwise.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('Directory Record not yet initialized')
return self.file_flags & (1 << self.FILE_FLAG_ASSOCIATED_FILE_BIT) |
python | def get_point_index(point, all_points, eps = 1e-4):
""" Get the index of a point in an array """
inds = np.where(np.linalg.norm(point - all_points, axis=1) < eps)
if inds[0].shape[0] == 0:
return -1
return inds[0][0] |
python | def _dep_changed(self, dep, code_changed=False, value_changed=False):
""" Called when a dependency's expression has changed.
"""
self.changed(code_changed, value_changed) |
java | @Override
public PathImpl schemeWalk(String userPath,
Map<String,Object> attributes,
String filePath,
int offset)
{
if (! isWindows()) {
return super.schemeWalk(userPath, attributes, filePath, offset);
}
String canonicalPath;
if (filePath.length() < offset + 2) {
return super.schemeWalk(userPath, attributes, filePath, offset);
}
char ch1 = filePath.charAt(offset + 1);
char ch2 = filePath.charAt(offset);
if ((ch2 == '/' || ch2 == _separatorChar)
&& (ch1 == '/' || ch1 == _separatorChar))
return super.schemeWalk(userPath, attributes,
convertFromWindowsPath(filePath.substring(offset)), 0);
else
return super.schemeWalk(userPath, attributes, filePath, offset);
} |
java | public List<Jid> getBlockList()
throws NoResponseException, XMPPErrorException, NotConnectedException, InterruptedException {
if (blockListCached == null) {
BlockListIQ blockListIQ = new BlockListIQ();
BlockListIQ blockListIQResult = connection().createStanzaCollectorAndSend(blockListIQ).nextResultOrThrow();
blockListCached = blockListIQResult.getBlockedJidsCopy();
}
return Collections.unmodifiableList(blockListCached);
} |
python | def file_decrypt_from_key_info( sender_key_info, blockchain_id, key_index, hostname, input_path, output_path, passphrase=None, config_path=CONFIG_PATH, wallet_keys=None ):
"""
Try to decrypt data with one of the receiver's keys
Return {'status': True} if we succeeded
Return {'error': ..., 'status': False} if we failed permanently
Return {'error': ..., 'status': True} if the key failed, and we should try the next one.
"""
config_dir = os.path.dirname(config_path)
# find remote sender
my_key_info = file_key_lookup( blockchain_id, key_index, hostname, config_path=config_path, wallet_keys=wallet_keys )
if 'error' in my_key_info:
log.error("Failed to look up key: %s" % my_key_info['error'])
return {'status': True, 'error': 'Failed to lookup sender key'}
# decrypt
res = None
with open(input_path, "r") as f:
res = blockstack_gpg.gpg_decrypt( f, output_path, sender_key_info, my_key_info, passphrase=passphrase, config_dir=config_dir )
if 'error' in res:
if res['error'] == 'Failed to decrypt data':
log.warn("Key %s failed to decrypt" % my_key_info['key_id'])
return {'status': True, 'error': 'Failed to decrypt with key'}
else:
log.error("Failed to decrypt: %s" % res['error'])
return {'status': False, 'error': 'GPG error (%s)' % res['error']}
return {'status': True} |
python | def createOptimizer(self, params, model):
"""
Create a new instance of the optimizer
"""
lr = params["learning_rate"]
print("Creating optimizer with learning rate=", lr)
if params["optimizer"] == "SGD":
optimizer = optim.SGD(model.parameters(), lr=lr,
momentum=params["momentum"],
weight_decay=params["weight_decay"],
)
elif params["optimizer"] == "Adam":
optimizer = optim.Adam(model.parameters(), lr=lr)
else:
raise LookupError("Incorrect optimizer value")
return optimizer |
java | @SuppressWarnings("unchecked")
protected final void initialize(int maxSize) {
size = 0;
int heapSize = maxSize + 1;
heap = (T[]) new Object[heapSize];
this.maxSize = maxSize;
} |
python | def save(self):
"""存储过程
"""
self.client.update(
{
'portfolio_cookie': self.portfolio_cookie,
'user_cookie': self.user_cookie
},
{'$set': self.message},
upsert=True
) |
python | def pivot_table(self, index, columns, values='value',
aggfunc='count', fill_value=None, style=None):
"""Returns a pivot table
Parameters
----------
index: str or list of strings
rows for Pivot table
columns: str or list of strings
columns for Pivot table
values: str, default 'value'
dataframe column to aggregate or count
aggfunc: str or function, default 'count'
function used for aggregation,
accepts 'count', 'mean', and 'sum'
fill_value: scalar, default None
value to replace missing values with
style: str, default None
output style for pivot table formatting
accepts 'highlight_not_max', 'heatmap'
"""
index = [index] if isstr(index) else index
columns = [columns] if isstr(columns) else columns
df = self.data
# allow 'aggfunc' to be passed as string for easier user interface
if isstr(aggfunc):
if aggfunc == 'count':
df = self.data.groupby(index + columns, as_index=False).count()
fill_value = 0
elif aggfunc == 'mean':
df = self.data.groupby(index + columns, as_index=False).mean()\
.round(2)
aggfunc = np.sum
fill_value = 0 if style == 'heatmap' else ""
elif aggfunc == 'sum':
aggfunc = np.sum
fill_value = 0 if style == 'heatmap' else ""
df = df.pivot_table(values=values, index=index, columns=columns,
aggfunc=aggfunc, fill_value=fill_value)
return df |
java | private static WebServiceRefPartialInfo buildPartialInfoFromWebServiceClient(Class<?> serviceInterfaceClass) {
WebServiceClient webServiceClient = serviceInterfaceClass.getAnnotation(WebServiceClient.class);
if (webServiceClient == null) {
return null;
}
String className = serviceInterfaceClass.getName();
String wsdlLocation = webServiceClient.wsdlLocation();
QName serviceQName = null;
String localPart = webServiceClient.name();
if (localPart != null) {
serviceQName = new QName(webServiceClient.targetNamespace(), localPart);
}
String handlerChainDeclaringClassName = null;
javax.jws.HandlerChain handlerChainAnnotation = serviceInterfaceClass.getAnnotation(javax.jws.HandlerChain.class);
if (handlerChainAnnotation != null)
handlerChainDeclaringClassName = serviceInterfaceClass.getName();
WebServiceRefPartialInfo partialInfo = new WebServiceRefPartialInfo(className, wsdlLocation, serviceQName, null, handlerChainDeclaringClassName, handlerChainAnnotation);
return partialInfo;
} |
java | public HistogramVisual asVisual()
{
float[] visualCounts = new float[bins.length - 2];
for (int i = 0; i < visualCounts.length; ++i) {
visualCounts[i] = (float) bins[i + 1];
}
return new HistogramVisual(breaks, visualCounts, new float[]{min, max});
} |
java | private void updateArrayOfSetters(ClassOutline co, JCodeModel model) {
JDefinedClass implClass = co.implClass;
List<JMethod> removedMethods = new ArrayList<>();
Iterator<JMethod> iter = implClass.methods().iterator();
while (iter.hasNext()) {
JMethod method = iter.next();
if (method.params().size() == 1 && method.params().get(0).type().name().startsWith("ArrayOf")) {
removedMethods.add(method);
iter.remove();
}
}
for (JMethod removed : removedMethods) {
// Parse the old code to get the variable name
StringWriter oldWriter = new StringWriter();
removed.body().state(new JFormatter(oldWriter));
String oldBody = oldWriter.toString();
String varName = oldBody.substring(oldBody.indexOf("this.") + "this.".length(), oldBody.indexOf(" = "));
// Build the new method
JType arrType = removed.params().get(0).type();
String type = arrType.name().substring("ArrayOf".length());
JFieldVar field = implClass.fields().get(varName);
String fieldName = model._getClass(field.type().fullName()).fields().keySet().iterator().next();
JMethod newMethod = implClass.method(removed.mods().getValue(), Void.TYPE, removed.name());
newMethod.param(model.ref("java.util.List").narrow(model.ref(type)), "value");
newMethod.body().decl(arrType, "arr", JExpr._new(arrType));
newMethod.body().directStatement("arr.get" + fieldName.substring(0, 1).toUpperCase() + fieldName.substring(1) + "().addAll(value);");
newMethod.body().directStatement("this." + varName + " = arr;");
}
} |
python | def _(mcs, cls_name="Object", with_meta=None):
""" Method to generate real metaclass to be used::
mc = ExtensibleType._("MyClass") # note this line
@six.add_metaclass(mc)
class MyClassBase(object):
pass
:param str cls_name: name of generated class
:param class with_meta: Mix aditional metaclass in.
(default: None)
:return: specific metaclass to track new inheritance tree
"""
if with_meta is not None:
class EXType(with_meta, mcs):
_cls_name = cls_name
_base_classes = []
_generated_class = None
else:
class EXType(mcs):
_cls_name = cls_name
_base_classes = []
_generated_class = None
return EXType |
python | def clear(self):
"""Completely clear a Node of all its cached state (so that it
can be re-evaluated by interfaces that do continuous integration
builds).
"""
# The del_binfo() call here isn't necessary for normal execution,
# but is for interactive mode, where we might rebuild the same
# target and need to start from scratch.
self.del_binfo()
self.clear_memoized_values()
self.ninfo = self.new_ninfo()
self.executor_cleanup()
try:
delattr(self, '_calculated_sig')
except AttributeError:
pass
self.includes = None |
python | def _compute_inter_event_std(self, C, C_PGA, pga1100, mag, vs30):
"""
Compute inter event standard deviation, equation 25, page 82.
"""
tau_0 = self._compute_std_0(C['s3'], C['s4'], mag)
tau_b_pga = self._compute_std_0(C_PGA['s3'], C_PGA['s4'], mag)
delta_amp = self._compute_partial_derivative_site_amp(C, pga1100, vs30)
std_inter = np.sqrt(tau_0 ** 2 + (delta_amp ** 2) * (tau_b_pga ** 2) +
2 * delta_amp * tau_0 * tau_b_pga * C['rho'])
return std_inter |
java | public void marshall(Dimensions dimensions, ProtocolMarshaller protocolMarshaller) {
if (dimensions == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(dimensions.getQueue(), QUEUE_BINDING);
protocolMarshaller.marshall(dimensions.getChannel(), CHANNEL_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | private static final String trimFirstWord(String s) {
s = s.trim();
char[] chars = s.toCharArray();
int firstSpace = -1;
for (int i = 0; i < chars.length; i++) {
char c = chars[i];
if (Character.isWhitespace(c)) {
firstSpace = i;
break;
}
}
if (firstSpace == -1) {
return s;
}
s = s.substring(firstSpace).trim();
s = capitalize(s);
return s;
} |
java | @Override
public boolean accept(File file) {
return file != null ? match(file.getName(), pattern) : false;
} |
python | def registContact(self, CorpNum, ContactInfo, UserID=None):
""" 담당자 추가
args
CorpNum : 회원 사업자번호
ContactInfo : 담당자 정보, Reference ContactInfo class
UserID : 회원 아이디
return
처리결과. consist of code and message
raise
PopbillException
"""
postData = self._stringtify(ContactInfo)
return self._httppost('/IDs/New', postData, CorpNum, UserID) |
java | @SuppressWarnings("unchecked")
public static Object convertToCollection(final Object result, final Class collectionType,
final Class targetEntity, final boolean projection) {
final Object converted;
if (collectionType.equals(Iterator.class)) {
converted = toIterator(result, targetEntity, projection);
} else if (collectionType.isAssignableFrom(List.class)) {
converted = Lists.newArrayList(toIterator(result, targetEntity, projection));
} else if (collectionType.isAssignableFrom(Set.class)) {
converted = Sets.newHashSet(toIterator(result, targetEntity, projection));
} else if (!collectionType.isInterface()) {
converted = convertToCollectionImpl(result, collectionType, targetEntity, projection);
} else {
throw new ResultConversionException(String.format(
"Incompatible result type requested %s for conversion from actual result %s",
collectionType, result.getClass()));
}
return converted;
} |
java | public void marshall(BatchDescribeSimulationJobRequest batchDescribeSimulationJobRequest, ProtocolMarshaller protocolMarshaller) {
if (batchDescribeSimulationJobRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(batchDescribeSimulationJobRequest.getJobs(), JOBS_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} |
java | public ScreenParent makeScreen(ScreenLoc itsLocation, ComponentParent parentScreen, int iDocMode, Map<String,Object> properties)
{
ScreenParent screen = null;
if ((iDocMode & ScreenConstants.MAINT_MODE) == ScreenConstants.MAINT_MODE)
{ // This is a little weird... can't directly change this table, must edit AnnivMaster
Record recAnnivMaster = ((ReferenceField)this.getField(Anniversary.ANNIV_MASTER_ID)).getReferenceRecord(this.getRecordOwner());
recAnnivMaster.setOpenMode(recAnnivMaster.getOpenMode() & ~DBConstants.OPEN_READ_ONLY);
if ((this.getEditMode() == DBConstants.EDIT_CURRENT)
|| (this.getEditMode() == DBConstants.EDIT_IN_PROGRESS))
recAnnivMaster = ((ReferenceField)this.getField(Anniversary.ANNIV_MASTER_ID)).getReference();
// Disconnect recAnnivMaster and free this
((ReferenceField)this.getField(Anniversary.ANNIV_MASTER_ID)).setReferenceRecord(null);
this.free();
return recAnnivMaster.makeScreen(itsLocation, parentScreen, iDocMode, properties);
}
else
screen = super.makeScreen(itsLocation, parentScreen, iDocMode, properties);
return screen;
} |
python | def get_profile_model():
"""
Returns configured user profile model or None if not found
"""
user_profile_module = getattr(settings, 'USER_PROFILE_MODULE', None)
if user_profile_module:
app_label, model_name = user_profile_module.split('.')
return get_model(app_label, model_name)
else:
return None |
python | def _detect_encoding(self, source_file):
"""Detect encoding."""
encoding = self._guess(source_file)
# If we didn't explicitly detect an encoding, assume default.
if encoding is None:
encoding = self.default_encoding
return encoding |
java | public static double[] getDoubleData(DataBuffer buf) {
if (buf.allocationMode() == DataBuffer.AllocationMode.HEAP)
return buf.asDouble();
else {
double[] ret = new double[(int) buf.length()];
for (int i = 0; i < buf.length(); i++)
ret[i] = buf.getDouble(i);
return ret;
}
} |
java | protected String getIndex() {
try {
return m_configObject.getString(JSON_KEY_INDEX);
} catch (JSONException e) {
if (null == m_baseConfig) {
if (LOG.isInfoEnabled()) {
LOG.info(Messages.get().getBundle().key(Messages.LOG_NO_INDEX_SPECIFIED_0), e);
}
return null;
} else {
return m_baseConfig.getGeneralConfig().getSolrIndex();
}
}
} |
java | public BufferedImage getImage (ImageKey key, Colorization[] zations)
{
CacheRecord crec = null;
synchronized (_ccache) {
crec = _ccache.get(key);
}
if (crec != null) {
// log.info("Cache hit", "key", key, "crec", crec);
return crec.getImage(zations, _ccache);
}
// log.info("Cache miss", "key", key, "crec", crec);
// load up the raw image
BufferedImage image = loadImage(key);
if (image == null) {
log.warning("Failed to load image " + key + ".");
// create a blank image instead
image = new BufferedImage(10, 10, BufferedImage.TYPE_BYTE_INDEXED);
}
// log.info("Loaded Image", "path", key.path, "image", image,
// "size", ImageUtil.getEstimatedMemoryUsage(image));
// create a cache record
crec = new CacheRecord(key, image);
synchronized (_ccache) {
_ccache.put(key, crec);
}
_keySet.add(key);
// periodically report our image cache performance
reportCachePerformance();
return crec.getImage(zations, _ccache);
} |
python | def rgev(xi, mu=0, sigma=1, size=None):
"""
Random generalized extreme value (GEV) variates.
"""
q = np.random.uniform(size=size)
z = flib.gev_ppf(q, xi)
return z * sigma + mu |
python | def launch():
"""Launch the experiment."""
exp = experiment(db.init_db(drop_all=False))
exp.log("Launching experiment...", "-----")
init_db()
exp.recruiter().open_recruitment(n=exp.initial_recruitment_size)
session_psiturk.commit()
session.commit()
return success_response(request_type="launch") |
python | def logout(lancet, service):
"""Forget saved passwords for the web services."""
if service:
services = [service]
else:
services = ['tracker', 'harvest']
for service in services:
url = lancet.config.get(service, 'url')
key = 'lancet+{}'.format(url)
username = lancet.config.get(service, 'username')
with taskstatus('Logging out from {}', url) as ts:
if keyring.get_password(key, username):
keyring.delete_password(key, username)
ts.ok('Logged out from {}', url)
else:
ts.ok('Already logged out from {}', url) |
python | def add(self, *args):
"""Add constraints to the model."""
self._constrs.extend(self._moma._prob.add_linear_constraints(*args)) |
python | def create_build_configuration_process(repository, revision, **kwargs):
"""
Create a new BuildConfiguration. BuildConfigurations represent the settings and configuration required to run a build of a specific version of the associated Project's source code.
If a ProductVersion ID is provided, the BuildConfiguration will have access to artifacts which were produced for that version, but may not have been released yet.
:return BPM Task ID of the new BuildConfiguration creation
"""
if not kwargs.get("dependency_ids"):
kwargs["dependency_ids"] = []
if not kwargs.get("build_configuration_set_ids"):
kwargs["build_configuration_set_ids"] = []
if kwargs.get("generic_parameters"):
kwargs["generic_parameters"] = ast.literal_eval(kwargs.get("generic_parameters"))
if not kwargs.get("project"):
kwargs["project"] = pnc_api.projects.get_specific(kwargs.get("project_id")).content
if not kwargs.get("environment"):
kwargs["environment"] = pnc_api.environments.get_specific(kwargs.get("build_environment_id")).content
build_configuration = create_build_conf_object(scm_revision=revision, **kwargs)
repo_creation = swagger_client.RepositoryCreationUrlAutoRest()
repo_creation.scm_url = repository
repo_creation.build_configuration_rest = build_configuration
response = utils.checked_api_call(
pnc_api.bpm, 'start_r_creation_task_with_single_url', body=repo_creation)
if response:
return response |
java | private List<CmsCategory> internalReadSubCategories(CmsObject cms, String rootPath, boolean includeSubCats)
throws CmsException {
List<CmsCategory> categories = new ArrayList<CmsCategory>();
List<CmsResource> resources = cms.readResources(
cms.getRequestContext().removeSiteRoot(rootPath),
CmsResourceFilter.DEFAULT.addRequireType(CmsResourceTypeFolder.RESOURCE_TYPE_ID),
includeSubCats);
Iterator<CmsResource> it = resources.iterator();
while (it.hasNext()) {
CmsResource resource = it.next();
categories.add(getCategory(cms, resource));
}
return categories;
} |
java | public void emphasizePoint( int index )
{
if( dots == null || dots.length < (index - 1) )
return; // impossible !
// if no change, nothing to do
if( emphasizedPoint == index )
return;
// de-emphasize the current emphasized point
if( emphasizedPoint >= 0 )
{
dots[emphasizedPoint].attr( "r", dotNormalSize );
emphasizedPoint = -1;
}
if( index >= 0 )
{
dots[index].attr( "r", dotBigSize );
emphasizedPoint = index;
}
} |
python | def draw_arith(ax, p0, size=1, alpha=0, arith=None, format=None,
fontsize=10, **kwds):
r"""Draw an arithmetic operator."""
if format is None: format = 'k-'
a = size/2.0
x0 = [0, 2.5*a, 0, 0]
y0 = [a, 0, -a, a]
cur_list = [(x0, y0)]
cur_list = rotate_and_traslate(cur_list, alpha, p0)
for curi in cur_list: ax.plot(curi[0], curi[1], format, **kwds)
if arith is not None:
pyplot.text(p0[0]+0.75*a, p0[1], arith, horizontalalignment='center',
verticalalignment='center', fontsize=fontsize) |
python | def make_module(self, vars=None, shared=False, locals=None):
"""This method works like the :attr:`module` attribute when called
without arguments but it will evaluate the template on every call
rather than caching it. It's also possible to provide
a dict which is then used as context. The arguments are the same
as for the :meth:`new_context` method.
"""
return TemplateModule(self, self.new_context(vars, shared, locals)) |
java | protected CompensatingTransactionHolderSupport getNewHolder() {
DirContext newCtx = getContextSource().getReadWriteContext();
return new DirContextHolder(
new DefaultCompensatingTransactionOperationManager(
new LdapCompensatingTransactionOperationFactory(
renamingStrategy)), newCtx);
} |
python | def setup_app(self, app, add_context_processor=True): # pragma: no cover
'''
This method has been deprecated. Please use
:meth:`LoginManager.init_app` instead.
'''
warnings.warn('Warning setup_app is deprecated. Please use init_app.',
DeprecationWarning)
self.init_app(app, add_context_processor) |
java | private static int normalizeInt(int input, int localMax, int maxOutput) {
double ln = Math.log(localMax);
if(input == 0) {
return 0;
} else if(input == 1) {
return 1;
} else {
double iln = Math.log(input);
double pct = iln / ln;
double num = pct * maxOutput;
int idx = (int) num;
// System.out.format("%d - %f - %f - %f - %f : %d\n",
// input,ln,iln,pct,num,idx);
if(input < idx) {
return input;
} else {
return idx;
}
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.