_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 31
13.1k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q278200
|
HitClusterizer.set_hit_dtype
|
test
|
def set_hit_dtype(self, hit_dtype):
''' Set the data type of the hits.
Fields that are not mentioned here are NOT copied into the clustered hits array.
Clusterizer has to know the hit data type to produce the clustered hit result with the same data types.
Parameters:
-----------
hit_dtype : numpy.dtype or equivalent
Defines the dtype of the hit array.
Example:
--------
hit_dtype = [("column", np.uint16), ("row", np.uint16)], where
"column", "row" is the field name of the input hit array.
'''
if not hit_dtype:
hit_dtype = np.dtype([])
|
python
|
{
"resource": ""
}
|
q278201
|
HitClusterizer.set_cluster_dtype
|
test
|
def set_cluster_dtype(self, cluster_dtype):
''' Set the data type of the cluster.
Parameters:
-----------
cluster_dtype : numpy.dtype or equivalent
Defines the dtype of the cluster array.
'''
if not cluster_dtype:
cluster_dtype = np.dtype([])
else:
cluster_dtype = np.dtype(cluster_dtype)
|
python
|
{
"resource": ""
}
|
q278202
|
HitClusterizer._check_struct_compatibility
|
test
|
def _check_struct_compatibility(self, hits):
''' Takes the hit array and checks if the important data fields have the same data type than the hit clustered array and that the field names are correct.'''
for key, _ in self._cluster_hits_descr:
if key in self._hit_fields_mapping_inverse:
mapped_key = self._hit_fields_mapping_inverse[key]
else:
mapped_key = key
# Only check hit fields that contain hit information
if mapped_key in ['cluster_ID', 'is_seed', 'cluster_size', 'n_cluster']:
continue
if key not in hits.dtype.names:
raise TypeError('Required hit
|
python
|
{
"resource": ""
}
|
q278203
|
add_ruleclause_name
|
test
|
def add_ruleclause_name(self, ns_name, rid) -> bool:
"""Create
|
python
|
{
"resource": ""
}
|
q278204
|
add_rules
|
test
|
def add_rules(self, bnf, r) -> bool:
"""Attach a parser tree to the dict of rules"""
|
python
|
{
"resource": ""
}
|
q278205
|
add_rule
|
test
|
def add_rule(self, rule, rn, alts) -> bool:
"""Add the rule name"""
rule.rulename = self.value(rn)
|
python
|
{
"resource": ""
}
|
q278206
|
add_sequences
|
test
|
def add_sequences(self, sequences, cla) -> bool:
"""Create a tree.Seq"""
if not hasattr(sequences, 'parser_tree'):
# forward sublevel of sequence as is
sequences.parser_tree = cla.parser_tree
else:
oldnode = sequences
if isinstance(oldnode.parser_tree, parsing.Seq):
|
python
|
{
"resource": ""
}
|
q278207
|
add_alt
|
test
|
def add_alt(self, alternatives, alt) -> bool:
"""Create a tree.Alt"""
if not hasattr(alternatives, 'parser_tree'):
# forward sublevel of alt as is
if hasattr(alt, 'parser_tree'):
alternatives.parser_tree = alt.parser_tree
|
python
|
{
"resource": ""
}
|
q278208
|
add_range
|
test
|
def add_range(self, sequence, begin, end):
"""Add a read_range primitive"""
sequence.parser_tree =
|
python
|
{
"resource": ""
}
|
q278209
|
add_rpt
|
test
|
def add_rpt(self, sequence, mod, pt):
"""Add a repeater to the previous sequence"""
modstr = self.value(mod)
if modstr == '!!':
# cursor on the REPEATER
self._stream.restore_context()
# log the error
self.diagnostic.notify(
error.Severity.ERROR,
"Cannot repeat a lookahead rule",
error.LocationInfo.from_stream(self._stream, is_error=True)
)
raise self.diagnostic
if modstr == '!':
# cursor on the REPEATER
self._stream.restore_context()
# log the error
self.diagnostic.notify(
|
python
|
{
"resource": ""
}
|
q278210
|
add_capture
|
test
|
def add_capture(self, sequence, cpt):
"""Create a tree.Capture"""
cpt_value
|
python
|
{
"resource": ""
}
|
q278211
|
add_bind
|
test
|
def add_bind(self, sequence, cpt):
"""Create a tree.Bind"""
cpt_value
|
python
|
{
"resource": ""
}
|
q278212
|
add_hook
|
test
|
def add_hook(self, sequence, h):
"""Create a tree.Hook"""
|
python
|
{
"resource": ""
}
|
q278213
|
param_num
|
test
|
def param_num(self, param, n):
"""Parse a int in
|
python
|
{
"resource": ""
}
|
q278214
|
param_str
|
test
|
def param_str(self, param, s):
"""Parse a str in parameter
|
python
|
{
"resource": ""
}
|
q278215
|
param_char
|
test
|
def param_char(self, param, c):
"""Parse a char in
|
python
|
{
"resource": ""
}
|
q278216
|
param_id
|
test
|
def param_id(self, param, i):
"""Parse a node name in
|
python
|
{
"resource": ""
}
|
q278217
|
hook_name
|
test
|
def hook_name(self, hook, n):
"""Parse a hook name"""
|
python
|
{
"resource": ""
}
|
q278218
|
hook_param
|
test
|
def hook_param(self, hook, p):
"""Parse a hook parameter"""
|
python
|
{
"resource": ""
}
|
q278219
|
EBNF.get_rules
|
test
|
def get_rules(self) -> parsing.Node:
"""
Parse the DSL and provide a dictionnaries of all resulting rules.
Call by the MetaGrammar class.
TODO: could be done in the rules property of parsing.BasicParser???
"""
res = None
try:
res = self.eval_rule('bnf_dsl')
if not res:
# we fail to parse, but error is not set
self.diagnostic.notify(
error.Severity.ERROR,
|
python
|
{
"resource": ""
}
|
q278220
|
ignore_cxx
|
test
|
def ignore_cxx(self) -> bool:
"""Consume comments and whitespace characters."""
self._stream.save_context()
while not self.read_eof():
idxref = self._stream.index
if self._stream.peek_char in " \t\v\f\r\n":
while (not self.read_eof()
and self._stream.peek_char in " \t\v\f\r\n"):
self._stream.incpos()
if self.peek_text("//"):
while not self.read_eof() and not self.peek_char("\n"):
self._stream.incpos()
if not self.read_char("\n") and self.read_eof():
return self._stream.validate_context()
|
python
|
{
"resource": ""
}
|
q278221
|
StateRegister.add_state
|
test
|
def add_state(self, s: State):
"""
all state in the register have a uid
|
python
|
{
"resource": ""
}
|
q278222
|
StateRegister.to_dot
|
test
|
def to_dot(self) -> str:
"""
Provide a '.dot' representation of all State in the register.
"""
txt = ""
txt += "digraph S%d {\n" % id(self)
if self.label is not None:
txt += '\tlabel="%s";\n' % (self.label + '\l').replace('\n', '\l')
txt += "\trankdir=LR;\n"
#txt += '\tlabelloc="t";\n'
txt += '\tgraph [labeljust=l, labelloc=t,
|
python
|
{
"resource": ""
}
|
q278223
|
StateRegister.to_dot_file
|
test
|
def to_dot_file(self, fname: str):
"""
write a '.dot' file.
"""
|
python
|
{
"resource": ""
}
|
q278224
|
StateRegister.to_png_file
|
test
|
def to_png_file(self, fname: str):
"""
write a '.png' file.
"""
cmd = pipes.Template()
cmd.append('dot -Tpng > %s' % fname, '-.')
|
python
|
{
"resource": ""
}
|
q278225
|
StateRegister.to_fmt
|
test
|
def to_fmt(self) -> str:
"""
Provide a useful representation of the register.
"""
infos = fmt.end(";\n", [])
s = fmt.sep(', ', [])
for ids in sorted(self.states.keys()):
s.lsdata.append(str(ids))
infos.lsdata.append(fmt.block('(', ')', [s]))
infos.lsdata.append("events:" + repr(self.events))
infos.lsdata.append(
|
python
|
{
"resource": ""
}
|
q278226
|
State.nextstate
|
test
|
def nextstate(self, newstate, treenode=None, user_data=None):
"""
Manage transition of state.
"""
if newstate is None:
return self
if isinstance(newstate, State) and id(newstate) != id(self):
return newstate
elif isinstance(newstate, StateEvent):
self.state_register.named_events[newstate.name] = True
|
python
|
{
"resource": ""
}
|
q278227
|
LivingContext.resetLivingState
|
test
|
def resetLivingState(self):
"""Only one Living State on the S0 of each StateRegister"""
# TODO: add some test to control number of instanciation of LivingState
# clean all living state on S0
must_delete = []
l = len(self.ls)
for idx, ls in zip(range(l), self.ls):
|
python
|
{
"resource": ""
}
|
q278228
|
Inference.infer_block
|
test
|
def infer_block(self, body, diagnostic=None):
"""
Infer type on block is to type each of is sub-element
"""
|
python
|
{
"resource": ""
}
|
q278229
|
Inference.infer_subexpr
|
test
|
def infer_subexpr(self, expr, diagnostic=None):
"""
Infer type on the subexpr
"""
|
python
|
{
"resource": ""
}
|
q278230
|
Inference.infer_id
|
test
|
def infer_id(self, ident, diagnostic=None):
"""
Infer type from an ID!
- check if ID is declarated in the scope
- if no ID is polymorphic type
"""
# check if ID is declared
#defined = self.type_node.get_by_symbol_name(ident)
|
python
|
{
"resource": ""
}
|
q278231
|
Inference.infer_literal
|
test
|
def infer_literal(self, args, diagnostic=None):
"""
Infer type from an LITERAL!
Type of literal depend of language.
We adopt a basic convention
"""
literal, t = args
|
python
|
{
"resource": ""
}
|
q278232
|
dump_nodes
|
test
|
def dump_nodes(self):
"""
Dump tag,rule,id and value cache. For debug.
example::
R = [
#dump_nodes
]
"""
print("DUMP NODE LOCAL INFOS")
try:
print("map Id->node name")
for k, v in self.id_cache.items():
print("[%d]=%s" % (k, v))
print("map tag->capture infos")
for k, v in self.tag_cache.items():
print("[%s]=%s"
|
python
|
{
"resource": ""
}
|
q278233
|
parserrule_topython
|
test
|
def parserrule_topython(parser: parsing.BasicParser,
rulename: str) -> ast.FunctionDef:
"""Generates code for a rule.
def rulename(self):
<code for the rule>
return True
"""
visitor = RuleVisitor()
rule = parser._rules[rulename]
fn_args = ast.arguments([ast.arg('self', None)], None, None, [], None,
|
python
|
{
"resource": ""
}
|
q278234
|
RuleVisitor.__exit_scope
|
test
|
def __exit_scope(self) -> ast.stmt:
"""Create the appropriate scope exiting statement.
The documentation only shows one level and always uses
'return False' in examples.
'raise AltFalse()' within a try.
'break' within a loop.
'return False' otherwise.
"""
if self.in_optional:
return ast.Pass()
if self.in_try:
return ast.Raise(
|
python
|
{
"resource": ""
}
|
q278235
|
RuleVisitor._clause
|
test
|
def _clause(self, pt: parsing.ParserTree) -> [ast.stmt]:
"""Normalize a test expression into a statements list.
Statements list are returned as-is.
Expression is packaged as:
if not expr:
return False
|
python
|
{
"resource": ""
}
|
q278236
|
RuleVisitor.visit_Call
|
test
|
def visit_Call(self, node: parsing.Call) -> ast.expr:
"""Generates python code calling the function.
fn(*args)
"""
return ast.Call(
ast.Attribute(
ast.Name('self', ast.Load),
|
python
|
{
"resource": ""
}
|
q278237
|
RuleVisitor.visit_CallTrue
|
test
|
def visit_CallTrue(self, node: parsing.CallTrue) -> ast.expr:
"""Generates python code calling the function and returning True.
lambda: fn(*args) or True
"""
return ast.Lambda(
ast.arguments([], None, None, [], None, None, [], []),
|
python
|
{
"resource": ""
}
|
q278238
|
RuleVisitor.visit_Hook
|
test
|
def visit_Hook(self, node: parsing.Hook) -> ast.expr:
"""Generates python code calling a hook.
self.evalHook('hookname', self.ruleNodes[-1])
"""
return ast.Call(
ast.Attribute(
ast.Name('self', ast.Load()), 'evalHook', ast.Load()),
[
ast.Str(node.name),
ast.Subscript(
ast.Attribute(
|
python
|
{
"resource": ""
}
|
q278239
|
RuleVisitor.visit_Rule
|
test
|
def visit_Rule(self, node: parsing.Rule) -> ast.expr:
"""Generates python code calling a rule.
self.evalRule('rulename')
|
python
|
{
"resource": ""
}
|
q278240
|
RuleVisitor.visit_Capture
|
test
|
def visit_Capture(self, node: parsing.Capture) -> [ast.stmt] or ast.expr:
"""Generates python code to capture text consumed by a clause.
#If all clauses can be inlined
self.beginTag('tagname') and clause and self.endTag('tagname')
if not self.beginTag('tagname'):
return False
<code for the clause>
if not self.endTag('tagname'):
return False
"""
begintag = ast.Attribute(
ast.Name('self', ast.Load()), 'beginTag', ast.Load())
endtag = ast.Attribute(
ast.Name('self', ast.Load()), 'endTag', ast.Load())
begin = ast.Call(begintag, [ast.Str(node.tagname)], [], None, None)
end = ast.Call(endtag, [ast.Str(node.tagname)], [],
|
python
|
{
"resource": ""
}
|
q278241
|
RuleVisitor.visit_Scope
|
test
|
def visit_Scope(self, node: parsing.Capture) -> [ast.stmt] or ast.expr:
"""Generates python code for a scope.
if not self.begin():
return False
res = self.pt()
if not self.end():
|
python
|
{
"resource": ""
}
|
q278242
|
RuleVisitor.visit_Alt
|
test
|
def visit_Alt(self, node: parsing.Alt) -> [ast.stmt]:
"""Generates python code for alternatives.
try:
try:
<code for clause> #raise AltFalse when alternative is False
raise AltTrue()
except AltFalse:
pass
return False
except AltTrue:
pass
"""
clauses = [self.visit(clause) for clause in node.ptlist]
for clause in clauses:
if not isinstance(clause, ast.expr):
break
else:
return ast.BoolOp(ast.Or(), clauses)
res = ast.Try([], [ast.ExceptHandler(
ast.Name('AltTrue', ast.Load()), None, [ast.Pass()])], [], [])
|
python
|
{
"resource": ""
}
|
q278243
|
RuleVisitor.visit_Seq
|
test
|
def visit_Seq(self, node: parsing.Seq) -> [ast.stmt] or ast.expr:
"""Generates python code for clauses.
#Continuous clauses which can can be inlined are combined with and
clause and clause
if not clause:
return False
if not clause:
return False
"""
exprs, stmts = [], []
for clause in node.ptlist:
clause_ast = self.visit(clause)
if isinstance(clause_ast, ast.expr):
exprs.append(clause_ast)
else:
|
python
|
{
"resource": ""
}
|
q278244
|
RuleVisitor.visit_RepOptional
|
test
|
def visit_RepOptional(self, node: parsing.RepOptional) -> ([ast.stmt] or
ast.expr):
"""Generates python code for an optional clause.
<code for the clause>
"""
cl_ast = self.visit(node.pt)
if
|
python
|
{
"resource": ""
}
|
q278245
|
RuleVisitor.visit_Rep0N
|
test
|
def visit_Rep0N(self, node: parsing.Rep0N) -> [ast.stmt]:
"""Generates python code for a clause repeated 0 or more times.
#If all clauses can be inlined
while clause:
pass
while True:
<code for the clause>
"""
cl_ast = self.visit(node.pt)
|
python
|
{
"resource": ""
}
|
q278246
|
RuleVisitor.visit_Rep1N
|
test
|
def visit_Rep1N(self, node: parsing.Rep0N) -> [ast.stmt]:
"""Generates python code for a clause repeated 1 or more times.
<code for the clause>
while True:
<code for the clause>
"""
clause = self.visit(node.pt)
if isinstance(clause, ast.expr):
return (self._clause(clause) + self.visit_Rep0N(node))
|
python
|
{
"resource": ""
}
|
q278247
|
catend
|
test
|
def catend(dst: str, src: str, indent) -> str:
"""cat two strings but handle \n for tabulation"""
res = dst
txtsrc = src
if not isinstance(src, str):
txtsrc = str(src)
for c in list(txtsrc):
if len(res) > 0 and res[-1] == '\n':
|
python
|
{
"resource": ""
}
|
q278248
|
list_set_indent
|
test
|
def list_set_indent(lst: list, indent: int=1):
"""recurs into list for indentation"""
for i in lst:
if isinstance(i, indentable):
|
python
|
{
"resource": ""
}
|
q278249
|
list_to_str
|
test
|
def list_to_str(lst: list, content: str, indent: int=1):
"""recurs into list for string computing """
for i in lst:
if isinstance(i, indentable):
content = i.to_str(content, indent)
elif isinstance(i, list):
|
python
|
{
"resource": ""
}
|
q278250
|
echo_nodes
|
test
|
def echo_nodes(self, *rest):
"""
Print nodes.
example::
R = [
In : node #echo("coucou", 12, node)
]
"""
txt = ""
for thing in rest:
|
python
|
{
"resource": ""
}
|
q278251
|
populate_from_sequence
|
test
|
def populate_from_sequence(seq: list, r: ref(Edge), sr: state.StateRegister):
""" function that connect each other one sequence of MatchExpr. """
base_state = r
# we need to detect the last state of the sequence
idxlast = len(seq) - 1
idx = 0
for m in seq:
# alternatives are represented by builtin list
if isinstance(m, list):
# so recursively connect all states of each alternative sequences.
for item in m:
populate_from_sequence(item, r, sr)
elif isinstance(m, MatchExpr):
# from the current state, have we a existing edge for this event?
eX = r().get_next_edge(m)
if eX is None:
sX = None
if idx != idxlast:
|
python
|
{
"resource": ""
}
|
q278252
|
populate_state_register
|
test
|
def populate_state_register(all_seq: [list], sr: state.StateRegister) -> Edge:
""" function that create a state for all instance
of MatchExpr in the given list and connect each others.
"""
# Basic State
s0 = state.State(sr)
# loop on himself
s0.matchDefault(s0)
# this is default
sr.set_default_state(s0)
# use Edge to store connection
|
python
|
{
"resource": ""
}
|
q278253
|
MatchBlock.build_state_tree
|
test
|
def build_state_tree(self, tree: list, sr: state.StateRegister):
""" main function for creating a bottom-up tree automata
for a block of matching statements.
"""
all_seq = []
# for all statements populate a list
|
python
|
{
"resource": ""
}
|
q278254
|
pred_eq
|
test
|
def pred_eq(self, n, val):
"""
Test if a node set with setint or setstr equal a certain value
example::
R = [
__scope__:n
['a' #setint(n, 12) | 'b' #setint(n,
|
python
|
{
"resource": ""
}
|
q278255
|
from_string
|
test
|
def from_string(bnf: str, entry=None, *optional_inherit) -> Grammar:
"""
Create a Grammar from a string
"""
inherit = [Grammar] + list(optional_inherit)
|
python
|
{
"resource": ""
}
|
q278256
|
from_file
|
test
|
def from_file(fn: str, entry=None, *optional_inherit) -> Grammar:
"""
Create a Grammar from a file
"""
import os.path
if os.path.exists(fn):
f = open(fn, 'r')
bnf = f.read()
f.close()
inherit = [Grammar] + list(optional_inherit)
|
python
|
{
"resource": ""
}
|
q278257
|
Grammar.parse
|
test
|
def parse(self, source: str=None, entry: str=None) -> parsing.Node:
"""Parse source using the grammar"""
self.from_string = True
if source is not None:
self.parsed_stream(source)
if entry is None:
entry = self.entry
|
python
|
{
"resource": ""
}
|
q278258
|
Grammar.parse_file
|
test
|
def parse_file(self, filename: str, entry: str=None) -> parsing.Node:
"""Parse filename using the grammar"""
self.from_string = False
import os.path
with open(filename, 'r') as f:
self.parsed_stream(f.read(), os.path.abspath(filename))
if entry is None:
|
python
|
{
"resource": ""
}
|
q278259
|
set_node
|
test
|
def set_node(self, dst, src):
"""
Basically copy one node to another.
usefull to transmit a node from a terminal
rule as result of the current rule.
example::
R = [
In : node #set(_, node)
]
here the node return by the rule In is
also the node return by the rule R
"""
if not isinstance(src, Node):
dst.value = src
else:
dst.set(src)
idsrc = id(src)
iddst = id(dst)
if iddst not in self.id_cache:
|
python
|
{
"resource": ""
}
|
q278260
|
set_node_as_int
|
test
|
def set_node_as_int(self, dst, src):
"""
Set a node to a value captured from another node
example::
R = [
|
python
|
{
"resource": ""
}
|
q278261
|
get_subnode
|
test
|
def get_subnode(self, dst, ast, expr):
"""
get the value of subnode
example::
R = [
__scope__:big getsomethingbig:>big
#get(_, big, '.val') //
|
python
|
{
"resource": ""
}
|
q278262
|
default_serializer
|
test
|
def default_serializer(o):
"""Default serializer for json."""
defs = (
((datetime.date, datetime.time),
lambda x: x.isoformat(), ),
((datetime.datetime, ),
|
python
|
{
"resource": ""
}
|
q278263
|
get
|
test
|
def get(query, from_date, limit=0, **kwargs):
"""Get deposits."""
dep_generator = _get_depositions()
total_depids = 1 # Count of depositions is hard
|
python
|
{
"resource": ""
}
|
q278264
|
dump
|
test
|
def dump(deposition, from_date, with_json=True, latest_only=False, **kwargs):
"""Dump the deposition object as dictionary."""
# Serialize the __getstate__ and fall back to default serializer
dep_json = json.dumps(deposition.__getstate__(),
default=default_serializer)
dep_dict
|
python
|
{
"resource": ""
}
|
q278265
|
_get_recids_invenio12
|
test
|
def _get_recids_invenio12(from_date):
"""Get BibDocs for Invenio 1."""
from invenio.dbquery import run_sql
return (id[0] for id in
|
python
|
{
"resource": ""
}
|
q278266
|
_get_recids_invenio2
|
test
|
def _get_recids_invenio2(from_date):
"""Get BibDocs for Invenio 2."""
from invenio.legacy.dbquery import run_sql
return (id[0] for id
|
python
|
{
"resource": ""
}
|
q278267
|
_import_bibdoc
|
test
|
def _import_bibdoc():
"""Import BibDocFile."""
try:
from invenio.bibdocfile import BibRecDocs,
|
python
|
{
"resource": ""
}
|
q278268
|
dump_bibdoc
|
test
|
def dump_bibdoc(recid, from_date, **kwargs):
"""Dump all BibDoc metadata.
:param docid: BibDoc ID
:param from_date: Dump only BibDoc revisions newer than this date.
:returns: List of version of the BibDoc formatted as a dict
"""
BibRecDocs, BibDoc = _import_bibdoc()
bibdocfile_dump = []
date = datetime.datetime.strptime(from_date, '%Y-%m-%d %H:%M:%S')
for bibdoc in BibRecDocs(recid).list_bibdocs():
for version in bibdoc.list_versions():
bibdoc_version = bibdoc.list_version_files(version)
for f in bibdoc_version:
if f.is_icon() or f.md < date:
# Don't care about icons
# Don't care about files not modified since from_date
continue
bibdocfile_dump.append(dict(
bibdocid=f.get_bibdocid(),
checksum=f.get_checksum(),
comment=f.get_comment(),
copyright=(
f.get_copyright() if hasattr(f, 'get_copyright')
else None),
creation_date=datetime_toutc(f.cd).isoformat(),
description=f.get_description(),
encoding=f.encoding,
etag=f.etag,
flags=f.flags,
format=f.get_format(),
full_name=f.get_full_name(),
full_path=f.get_full_path(),
hidden=f.hidden,
|
python
|
{
"resource": ""
}
|
q278269
|
get_check
|
test
|
def get_check():
"""Get bibdocs to check."""
try:
from invenio.dbquery import run_sql
except ImportError:
from invenio.legacy.dbquery import run_sql
return (
run_sql('select
|
python
|
{
"resource": ""
}
|
q278270
|
check
|
test
|
def check(id_):
"""Check bibdocs."""
BibRecDocs, BibDoc = _import_bibdoc()
try:
BibDoc(id_).list_all_files()
except Exception:
|
python
|
{
"resource": ""
}
|
q278271
|
dump
|
test
|
def dump(obj, from_date, with_json=True, latest_only=False, **kwargs):
"""Dump the oauth2server tokens."""
return dict(id=obj.id,
client_id=obj.client_id,
user_id=obj.user_id,
|
python
|
{
"resource": ""
}
|
q278272
|
get
|
test
|
def get(*args, **kwargs):
"""Get UserEXT objects."""
try:
from invenio.modules.accounts.models import
|
python
|
{
"resource": ""
}
|
q278273
|
dump
|
test
|
def dump(u, from_date, with_json=True, latest_only=False, **kwargs):
"""Dump the UserEXt objects as a list of dictionaries.
:param u: UserEXT to be dumped.
:type u: `invenio_accounts.models.UserEXT [Invenio2.x]`
:returns: User
|
python
|
{
"resource": ""
}
|
q278274
|
get
|
test
|
def get(*args, **kwargs):
"""Get communities."""
from invenio.modules.communities.models
|
python
|
{
"resource": ""
}
|
q278275
|
_get_modified_recids_invenio12
|
test
|
def _get_modified_recids_invenio12(from_date):
"""Get record ids for Invenio 1."""
from invenio.search_engine import search_pattern
from invenio.dbquery import run_sql
return set((id[0] for id in run_sql(
|
python
|
{
"resource": ""
}
|
q278276
|
_get_modified_recids_invenio2
|
test
|
def _get_modified_recids_invenio2(from_date):
"""Get record ids for Invenio 2."""
from invenio.legacy.search_engine import search_pattern
from invenio.modules.records.models import Record
date = datetime.datetime.strptime(from_date, '%Y-%m-%d %H:%M:%S')
return set(
|
python
|
{
"resource": ""
}
|
q278277
|
_get_collection_restrictions
|
test
|
def _get_collection_restrictions(collection):
"""Get all restrictions for a given collection, users and fireroles."""
try:
from invenio.dbquery import run_sql
from invenio.access_control_firerole import compile_role_definition
except ImportError:
from invenio.modules.access.firerole import compile_role_definition
from invenio.legacy.dbquery import run_sql
res = run_sql(
'SELECT r.firerole_def_src, email '
'FROM accROLE as r '
'JOIN accROLE_accACTION_accARGUMENT ON r.id=id_accROLE '
'JOIN accARGUMENT AS a ON a.id=id_accARGUMENT '
'JOIN user_accROLE AS u ON r.id=u.id_accROLE '
'JOIN user ON user.id=u.id_user '
|
python
|
{
"resource": ""
}
|
q278278
|
get_record_revisions
|
test
|
def get_record_revisions(recid, from_date):
"""Get record revisions."""
try:
from invenio.dbquery import run_sql
except ImportError:
from invenio.legacy.dbquery import
|
python
|
{
"resource": ""
}
|
q278279
|
get_record_collections
|
test
|
def get_record_collections(recid):
"""Get all collections the record belong to."""
try:
from invenio.search_engine import (
get_all_collections_of_a_record,
get_restricted_collections_for_recid)
except ImportError:
from invenio.legacy.search_engine import (
get_all_collections_of_a_record,
get_restricted_collections_for_recid)
collections = {
|
python
|
{
"resource": ""
}
|
q278280
|
dump_record_json
|
test
|
def dump_record_json(marcxml):
"""Dump JSON of record."""
try:
from invenio.modules.records.api import Record
d = Record.create(marcxml, 'marc')
return d.dumps(clean=True)
except ImportError:
|
python
|
{
"resource": ""
}
|
q278281
|
get
|
test
|
def get(query, from_date, **kwargs):
"""Get recids matching query and with changes."""
recids, search_pattern = get_modified_recids(from_date)
recids = recids.union(get_modified_bibdoc_recids(from_date))
if query:
|
python
|
{
"resource": ""
}
|
q278282
|
dump
|
test
|
def dump(recid,
from_date,
with_json=False,
latest_only=False,
with_collections=False,
**kwargs):
"""Dump MARCXML and JSON representation of a record.
:param recid: Record identifier
:param from_date: Dump only revisions from this date onwards.
:param with_json: If ``True`` use old ``Record.create`` to generate the
JSON representation of the record.
:param latest_only: Dump only the last revision of the record metadata.
:param with_collections: If ``True`` dump the list of collections that the
record belongs to.
:returns: List of
|
python
|
{
"resource": ""
}
|
q278283
|
dump
|
test
|
def dump(ra, from_date, with_json=True, latest_only=False, **kwargs):
"""Dump the remote accounts as a list of dictionaries.
:param ra: Remote account to be dumped.
:type ra: `invenio_oauthclient.models.RemoteAccount [Invenio2.x]`
:returns: Remote accounts serialized to dictionary.
|
python
|
{
"resource": ""
}
|
q278284
|
load_common
|
test
|
def load_common(model_cls, data):
"""Helper function for loading JSON data verbatim into model."""
obj
|
python
|
{
"resource": ""
}
|
q278285
|
collect_things_entry_points
|
test
|
def collect_things_entry_points():
"""Collect entry points."""
things = dict()
for entry_point in iter_entry_points(group='invenio_migrator.things'):
|
python
|
{
"resource": ""
}
|
q278286
|
init_app_context
|
test
|
def init_app_context():
"""Initialize app context for Invenio 2.x."""
try:
from invenio.base.factory import create_app
app = create_app()
|
python
|
{
"resource": ""
}
|
q278287
|
memoize
|
test
|
def memoize(func):
"""Cache for heavy function calls."""
cache = {}
@wraps(func)
def wrap(*args, **kwargs):
key = '{0}{1}'.format(args, kwargs)
|
python
|
{
"resource": ""
}
|
q278288
|
_get_run_sql
|
test
|
def _get_run_sql():
"""Import ``run_sql``."""
try:
from invenio.dbquery import
|
python
|
{
"resource": ""
}
|
q278289
|
get_connected_roles
|
test
|
def get_connected_roles(action_id):
"""Get roles connected to an action."""
try:
from invenio.access_control_admin import compile_role_definition
except ImportError:
from invenio.modules.access.firerole import compile_role_definition
run_sql = _get_run_sql()
roles = {}
res = run_sql(
'select r.id, r.name, r.description, r.firerole_def_src, '
'a.keyword, a.value, email from accROLE as r '
'join accROLE_accACTION_accARGUMENT on r.id=id_accROLE '
'join accARGUMENT as a on a.id=id_accARGUMENT '
'join user_accROLE as u on r.id=u.id_accROLE '
'join user on user.id=u.id_user '
'where id_accACTION=%s', (action_id, )
)
for r in res:
role = roles.setdefault(
r[0], {
'id': r[0],
'name': r[1],
|
python
|
{
"resource": ""
}
|
q278290
|
get
|
test
|
def get(query, *args, **kwargs):
"""Get action definitions to dump."""
run_sql = _get_run_sql()
actions = [
dict(id=row[0],
name=row[1],
allowedkeywords=row[2],
optional=row[3])
for action in query.split(',') for row in run_sql(
|
python
|
{
"resource": ""
}
|
q278291
|
dump
|
test
|
def dump(rt, from_date, with_json=True, latest_only=False, **kwargs):
"""Dump the remote tokens as a list of dictionaries.
:param ra: Remote toekn to be dumped.
:type ra: `invenio_oauthclient.models.RemoteToken [Invenio2.x]`
:returns: Remote tokens serialized to dictionary.
:rtype: dict
|
python
|
{
"resource": ""
}
|
q278292
|
load_token
|
test
|
def load_token(data):
"""Load the oauth2server token from data dump."""
from
|
python
|
{
"resource": ""
}
|
q278293
|
import_record
|
test
|
def import_record(data, source_type=None, latest_only=False):
"""Migrate a record from a migration dump.
:param data: Dictionary for representing a single record and files.
:param source_type: Determines if the MARCXML or the JSON dump is used.
Default: ``marcxml``.
:param latest_only: Determine is only the latest revision should be loaded.
"""
source_type = source_type or 'marcxml'
assert source_type in ['marcxml', 'json']
|
python
|
{
"resource": ""
}
|
q278294
|
config_imp_or_default
|
test
|
def config_imp_or_default(app, config_var_imp, default):
"""Import config var import path or use default value."""
|
python
|
{
"resource": ""
}
|
q278295
|
dump
|
test
|
def dump(obj, from_date, with_json=True, latest_only=False, **kwargs):
"""Dump the oauth2server Client."""
return dict(name=obj.name,
description=obj.description,
website=obj.website,
|
python
|
{
"resource": ""
}
|
q278296
|
_get_users_invenio12
|
test
|
def _get_users_invenio12(*args, **kwargs):
"""Get user accounts Invenio 1."""
from invenio.dbquery import run_sql, deserialize_via_marshal
User = namedtuple('User', [
'id', 'email', 'password', 'password_salt', 'note', 'full_name',
'settings', 'nickname', 'last_login'
])
users = run_sql(
'SELECT id, email, password, note, settings, nickname, last_login'
' FROM user',
run_on_slave=True)
return len(users), [
User(
id=user[0],
email=user[1],
|
python
|
{
"resource": ""
}
|
q278297
|
_get_users_invenio2
|
test
|
def _get_users_invenio2(*args, **kwargs):
"""Get user accounts from Invenio
|
python
|
{
"resource": ""
}
|
q278298
|
dump
|
test
|
def dump(u, *args, **kwargs):
"""Dump the users as a list of dictionaries.
:param u: User to be dumped.
:type u: `invenio.modules.accounts.models.User [Invenio2.x]` or namedtuple.
:returns: User serialized to dictionary.
:rtype: dict
"""
|
python
|
{
"resource": ""
}
|
q278299
|
load_deposit
|
test
|
def load_deposit(data):
"""Load the raw JSON dump of the Deposition.
Uses Record API in order to bypass all Deposit-specific initialization,
which are to be done after the final stage
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.