id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
6,300
ckoepp/TwitterSearch
TwitterSearch/TwitterUserOrder.py
TwitterUserOrder.set_include_rts
def set_include_rts(self, rts): """ Sets 'include_rts' parameter. When set to False, \ the timeline will strip any native retweets from the returned timeline :param rts: Boolean triggering the usage of the parameter :raises: TwitterSearchException """ if not isinstance(rts, bool): raise TwitterSearchException(1008) self.arguments.update({'include_rts': 'true' if rts else 'false'})
python
def set_include_rts(self, rts): """ Sets 'include_rts' parameter. When set to False, \ the timeline will strip any native retweets from the returned timeline :param rts: Boolean triggering the usage of the parameter :raises: TwitterSearchException """ if not isinstance(rts, bool): raise TwitterSearchException(1008) self.arguments.update({'include_rts': 'true' if rts else 'false'})
[ "def", "set_include_rts", "(", "self", ",", "rts", ")", ":", "if", "not", "isinstance", "(", "rts", ",", "bool", ")", ":", "raise", "TwitterSearchException", "(", "1008", ")", "self", ".", "arguments", ".", "update", "(", "{", "'include_rts'", ":", "'true'", "if", "rts", "else", "'false'", "}", ")" ]
Sets 'include_rts' parameter. When set to False, \ the timeline will strip any native retweets from the returned timeline :param rts: Boolean triggering the usage of the parameter :raises: TwitterSearchException
[ "Sets", "include_rts", "parameter", ".", "When", "set", "to", "False", "\\", "the", "timeline", "will", "strip", "any", "native", "retweets", "from", "the", "returned", "timeline" ]
627b9f519d49faf6b83859717f9082b3b2622aaf
https://github.com/ckoepp/TwitterSearch/blob/627b9f519d49faf6b83859717f9082b3b2622aaf/TwitterSearch/TwitterUserOrder.py#L68-L78
6,301
ckoepp/TwitterSearch
TwitterSearch/TwitterUserOrder.py
TwitterUserOrder.set_exclude_replies
def set_exclude_replies(self, exclude): """ Sets 'exclude_replies' parameter used to \ prevent replies from appearing in the returned timeline :param exclude: Boolean triggering the usage of the parameter :raises: TwitterSearchException """ if not isinstance(exclude, bool): raise TwitterSearchException(1008) self.arguments.update({'exclude_replies': 'true' if exclude else 'false'})
python
def set_exclude_replies(self, exclude): """ Sets 'exclude_replies' parameter used to \ prevent replies from appearing in the returned timeline :param exclude: Boolean triggering the usage of the parameter :raises: TwitterSearchException """ if not isinstance(exclude, bool): raise TwitterSearchException(1008) self.arguments.update({'exclude_replies': 'true' if exclude else 'false'})
[ "def", "set_exclude_replies", "(", "self", ",", "exclude", ")", ":", "if", "not", "isinstance", "(", "exclude", ",", "bool", ")", ":", "raise", "TwitterSearchException", "(", "1008", ")", "self", ".", "arguments", ".", "update", "(", "{", "'exclude_replies'", ":", "'true'", "if", "exclude", "else", "'false'", "}", ")" ]
Sets 'exclude_replies' parameter used to \ prevent replies from appearing in the returned timeline :param exclude: Boolean triggering the usage of the parameter :raises: TwitterSearchException
[ "Sets", "exclude_replies", "parameter", "used", "to", "\\", "prevent", "replies", "from", "appearing", "in", "the", "returned", "timeline" ]
627b9f519d49faf6b83859717f9082b3b2622aaf
https://github.com/ckoepp/TwitterSearch/blob/627b9f519d49faf6b83859717f9082b3b2622aaf/TwitterSearch/TwitterUserOrder.py#L80-L92
6,302
ckoepp/TwitterSearch
TwitterSearch/TwitterUserOrder.py
TwitterUserOrder.set_contributor_details
def set_contributor_details(self, contdetails): """ Sets 'contributor_details' parameter used to enhance the \ contributors element of the status response to include \ the screen_name of the contributor. By default only \ the user_id of the contributor is included :param contdetails: Boolean triggering the usage of the parameter :raises: TwitterSearchException """ if not isinstance(contdetails, bool): raise TwitterSearchException(1008) self.arguments.update({'contributor_details': 'true' if contdetails else 'false'})
python
def set_contributor_details(self, contdetails): """ Sets 'contributor_details' parameter used to enhance the \ contributors element of the status response to include \ the screen_name of the contributor. By default only \ the user_id of the contributor is included :param contdetails: Boolean triggering the usage of the parameter :raises: TwitterSearchException """ if not isinstance(contdetails, bool): raise TwitterSearchException(1008) self.arguments.update({'contributor_details': 'true' if contdetails else 'false'})
[ "def", "set_contributor_details", "(", "self", ",", "contdetails", ")", ":", "if", "not", "isinstance", "(", "contdetails", ",", "bool", ")", ":", "raise", "TwitterSearchException", "(", "1008", ")", "self", ".", "arguments", ".", "update", "(", "{", "'contributor_details'", ":", "'true'", "if", "contdetails", "else", "'false'", "}", ")" ]
Sets 'contributor_details' parameter used to enhance the \ contributors element of the status response to include \ the screen_name of the contributor. By default only \ the user_id of the contributor is included :param contdetails: Boolean triggering the usage of the parameter :raises: TwitterSearchException
[ "Sets", "contributor_details", "parameter", "used", "to", "enhance", "the", "\\", "contributors", "element", "of", "the", "status", "response", "to", "include", "\\", "the", "screen_name", "of", "the", "contributor", ".", "By", "default", "only", "\\", "the", "user_id", "of", "the", "contributor", "is", "included" ]
627b9f519d49faf6b83859717f9082b3b2622aaf
https://github.com/ckoepp/TwitterSearch/blob/627b9f519d49faf6b83859717f9082b3b2622aaf/TwitterSearch/TwitterUserOrder.py#L94-L108
6,303
tkaemming/django-subdomains
subdomains/templatetags/subdomainurls.py
url
def url(context, view, subdomain=UNSET, *args, **kwargs): """ Resolves a URL in a template, using subdomain-based URL resolution. If no subdomain is provided and a ``request`` is in the template context when rendering, the URL will be resolved relative to the current request's subdomain. If no ``request`` is provided, the URL will be resolved relative to current domain with the ``settings.ROOT_URLCONF``. Usage:: {% load subdomainurls %} {% url 'view-name' subdomain='subdomain' %} .. note:: This tag uses the variable URL syntax introduced in Django 1.3 as ``{% load url from future %}`` and was made the standard in Django 1.5. If you are upgrading a legacy application from one of the previous template tag formats, make sure to quote your constant string URL names to avoid :exc:`~django.core.urlresolver.NoReverseMatch` errors during template rendering. """ if subdomain is UNSET: request = context.get('request') if request is not None: subdomain = getattr(request, 'subdomain', None) else: subdomain = None elif subdomain is '': subdomain = None return reverse(view, subdomain=subdomain, args=args, kwargs=kwargs)
python
def url(context, view, subdomain=UNSET, *args, **kwargs): """ Resolves a URL in a template, using subdomain-based URL resolution. If no subdomain is provided and a ``request`` is in the template context when rendering, the URL will be resolved relative to the current request's subdomain. If no ``request`` is provided, the URL will be resolved relative to current domain with the ``settings.ROOT_URLCONF``. Usage:: {% load subdomainurls %} {% url 'view-name' subdomain='subdomain' %} .. note:: This tag uses the variable URL syntax introduced in Django 1.3 as ``{% load url from future %}`` and was made the standard in Django 1.5. If you are upgrading a legacy application from one of the previous template tag formats, make sure to quote your constant string URL names to avoid :exc:`~django.core.urlresolver.NoReverseMatch` errors during template rendering. """ if subdomain is UNSET: request = context.get('request') if request is not None: subdomain = getattr(request, 'subdomain', None) else: subdomain = None elif subdomain is '': subdomain = None return reverse(view, subdomain=subdomain, args=args, kwargs=kwargs)
[ "def", "url", "(", "context", ",", "view", ",", "subdomain", "=", "UNSET", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "subdomain", "is", "UNSET", ":", "request", "=", "context", ".", "get", "(", "'request'", ")", "if", "request", "is", "not", "None", ":", "subdomain", "=", "getattr", "(", "request", ",", "'subdomain'", ",", "None", ")", "else", ":", "subdomain", "=", "None", "elif", "subdomain", "is", "''", ":", "subdomain", "=", "None", "return", "reverse", "(", "view", ",", "subdomain", "=", "subdomain", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")" ]
Resolves a URL in a template, using subdomain-based URL resolution. If no subdomain is provided and a ``request`` is in the template context when rendering, the URL will be resolved relative to the current request's subdomain. If no ``request`` is provided, the URL will be resolved relative to current domain with the ``settings.ROOT_URLCONF``. Usage:: {% load subdomainurls %} {% url 'view-name' subdomain='subdomain' %} .. note:: This tag uses the variable URL syntax introduced in Django 1.3 as ``{% load url from future %}`` and was made the standard in Django 1.5. If you are upgrading a legacy application from one of the previous template tag formats, make sure to quote your constant string URL names to avoid :exc:`~django.core.urlresolver.NoReverseMatch` errors during template rendering.
[ "Resolves", "a", "URL", "in", "a", "template", "using", "subdomain", "-", "based", "URL", "resolution", "." ]
be6cc1c556a2007287ef4e647ea1784cf7690a44
https://github.com/tkaemming/django-subdomains/blob/be6cc1c556a2007287ef4e647ea1784cf7690a44/subdomains/templatetags/subdomainurls.py#L12-L43
6,304
tkaemming/django-subdomains
subdomains/utils.py
urljoin
def urljoin(domain, path=None, scheme=None): """ Joins a domain, path and scheme part together, returning a full URL. :param domain: the domain, e.g. ``example.com`` :param path: the path part of the URL, e.g. ``/example/`` :param scheme: the scheme part of the URL, e.g. ``http``, defaulting to the value of ``settings.DEFAULT_URL_SCHEME`` :returns: a full URL """ if scheme is None: scheme = getattr(settings, 'DEFAULT_URL_SCHEME', 'http') return urlunparse((scheme, domain, path or '', None, None, None))
python
def urljoin(domain, path=None, scheme=None): """ Joins a domain, path and scheme part together, returning a full URL. :param domain: the domain, e.g. ``example.com`` :param path: the path part of the URL, e.g. ``/example/`` :param scheme: the scheme part of the URL, e.g. ``http``, defaulting to the value of ``settings.DEFAULT_URL_SCHEME`` :returns: a full URL """ if scheme is None: scheme = getattr(settings, 'DEFAULT_URL_SCHEME', 'http') return urlunparse((scheme, domain, path or '', None, None, None))
[ "def", "urljoin", "(", "domain", ",", "path", "=", "None", ",", "scheme", "=", "None", ")", ":", "if", "scheme", "is", "None", ":", "scheme", "=", "getattr", "(", "settings", ",", "'DEFAULT_URL_SCHEME'", ",", "'http'", ")", "return", "urlunparse", "(", "(", "scheme", ",", "domain", ",", "path", "or", "''", ",", "None", ",", "None", ",", "None", ")", ")" ]
Joins a domain, path and scheme part together, returning a full URL. :param domain: the domain, e.g. ``example.com`` :param path: the path part of the URL, e.g. ``/example/`` :param scheme: the scheme part of the URL, e.g. ``http``, defaulting to the value of ``settings.DEFAULT_URL_SCHEME`` :returns: a full URL
[ "Joins", "a", "domain", "path", "and", "scheme", "part", "together", "returning", "a", "full", "URL", "." ]
be6cc1c556a2007287ef4e647ea1784cf7690a44
https://github.com/tkaemming/django-subdomains/blob/be6cc1c556a2007287ef4e647ea1784cf7690a44/subdomains/utils.py#L25-L38
6,305
tkaemming/django-subdomains
subdomains/middleware.py
SubdomainMiddleware.process_request
def process_request(self, request): """ Adds a ``subdomain`` attribute to the ``request`` parameter. """ domain, host = map(lower, (self.get_domain_for_request(request), request.get_host())) pattern = r'^(?:(?P<subdomain>.*?)\.)?%s(?::.*)?$' % re.escape(domain) matches = re.match(pattern, host) if matches: request.subdomain = matches.group('subdomain') else: request.subdomain = None logger.warning('The host %s does not belong to the domain %s, ' 'unable to identify the subdomain for this request', request.get_host(), domain)
python
def process_request(self, request): """ Adds a ``subdomain`` attribute to the ``request`` parameter. """ domain, host = map(lower, (self.get_domain_for_request(request), request.get_host())) pattern = r'^(?:(?P<subdomain>.*?)\.)?%s(?::.*)?$' % re.escape(domain) matches = re.match(pattern, host) if matches: request.subdomain = matches.group('subdomain') else: request.subdomain = None logger.warning('The host %s does not belong to the domain %s, ' 'unable to identify the subdomain for this request', request.get_host(), domain)
[ "def", "process_request", "(", "self", ",", "request", ")", ":", "domain", ",", "host", "=", "map", "(", "lower", ",", "(", "self", ".", "get_domain_for_request", "(", "request", ")", ",", "request", ".", "get_host", "(", ")", ")", ")", "pattern", "=", "r'^(?:(?P<subdomain>.*?)\\.)?%s(?::.*)?$'", "%", "re", ".", "escape", "(", "domain", ")", "matches", "=", "re", ".", "match", "(", "pattern", ",", "host", ")", "if", "matches", ":", "request", ".", "subdomain", "=", "matches", ".", "group", "(", "'subdomain'", ")", "else", ":", "request", ".", "subdomain", "=", "None", "logger", ".", "warning", "(", "'The host %s does not belong to the domain %s, '", "'unable to identify the subdomain for this request'", ",", "request", ".", "get_host", "(", ")", ",", "domain", ")" ]
Adds a ``subdomain`` attribute to the ``request`` parameter.
[ "Adds", "a", "subdomain", "attribute", "to", "the", "request", "parameter", "." ]
be6cc1c556a2007287ef4e647ea1784cf7690a44
https://github.com/tkaemming/django-subdomains/blob/be6cc1c556a2007287ef4e647ea1784cf7690a44/subdomains/middleware.py#L28-L44
6,306
tkaemming/django-subdomains
subdomains/middleware.py
SubdomainURLRoutingMiddleware.process_request
def process_request(self, request): """ Sets the current request's ``urlconf`` attribute to the urlconf associated with the subdomain, if it is listed in ``settings.SUBDOMAIN_URLCONFS``. """ super(SubdomainURLRoutingMiddleware, self).process_request(request) subdomain = getattr(request, 'subdomain', UNSET) if subdomain is not UNSET: urlconf = settings.SUBDOMAIN_URLCONFS.get(subdomain) if urlconf is not None: logger.debug("Using urlconf %s for subdomain: %s", repr(urlconf), repr(subdomain)) request.urlconf = urlconf
python
def process_request(self, request): """ Sets the current request's ``urlconf`` attribute to the urlconf associated with the subdomain, if it is listed in ``settings.SUBDOMAIN_URLCONFS``. """ super(SubdomainURLRoutingMiddleware, self).process_request(request) subdomain = getattr(request, 'subdomain', UNSET) if subdomain is not UNSET: urlconf = settings.SUBDOMAIN_URLCONFS.get(subdomain) if urlconf is not None: logger.debug("Using urlconf %s for subdomain: %s", repr(urlconf), repr(subdomain)) request.urlconf = urlconf
[ "def", "process_request", "(", "self", ",", "request", ")", ":", "super", "(", "SubdomainURLRoutingMiddleware", ",", "self", ")", ".", "process_request", "(", "request", ")", "subdomain", "=", "getattr", "(", "request", ",", "'subdomain'", ",", "UNSET", ")", "if", "subdomain", "is", "not", "UNSET", ":", "urlconf", "=", "settings", ".", "SUBDOMAIN_URLCONFS", ".", "get", "(", "subdomain", ")", "if", "urlconf", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"Using urlconf %s for subdomain: %s\"", ",", "repr", "(", "urlconf", ")", ",", "repr", "(", "subdomain", ")", ")", "request", ".", "urlconf", "=", "urlconf" ]
Sets the current request's ``urlconf`` attribute to the urlconf associated with the subdomain, if it is listed in ``settings.SUBDOMAIN_URLCONFS``.
[ "Sets", "the", "current", "request", "s", "urlconf", "attribute", "to", "the", "urlconf", "associated", "with", "the", "subdomain", "if", "it", "is", "listed", "in", "settings", ".", "SUBDOMAIN_URLCONFS", "." ]
be6cc1c556a2007287ef4e647ea1784cf7690a44
https://github.com/tkaemming/django-subdomains/blob/be6cc1c556a2007287ef4e647ea1784cf7690a44/subdomains/middleware.py#L51-L66
6,307
tkaemming/django-subdomains
subdomains/middleware.py
SubdomainURLRoutingMiddleware.process_response
def process_response(self, request, response): """ Forces the HTTP ``Vary`` header onto requests to avoid having responses cached across subdomains. """ if getattr(settings, 'FORCE_VARY_ON_HOST', True): patch_vary_headers(response, ('Host',)) return response
python
def process_response(self, request, response): """ Forces the HTTP ``Vary`` header onto requests to avoid having responses cached across subdomains. """ if getattr(settings, 'FORCE_VARY_ON_HOST', True): patch_vary_headers(response, ('Host',)) return response
[ "def", "process_response", "(", "self", ",", "request", ",", "response", ")", ":", "if", "getattr", "(", "settings", ",", "'FORCE_VARY_ON_HOST'", ",", "True", ")", ":", "patch_vary_headers", "(", "response", ",", "(", "'Host'", ",", ")", ")", "return", "response" ]
Forces the HTTP ``Vary`` header onto requests to avoid having responses cached across subdomains.
[ "Forces", "the", "HTTP", "Vary", "header", "onto", "requests", "to", "avoid", "having", "responses", "cached", "across", "subdomains", "." ]
be6cc1c556a2007287ef4e647ea1784cf7690a44
https://github.com/tkaemming/django-subdomains/blob/be6cc1c556a2007287ef4e647ea1784cf7690a44/subdomains/middleware.py#L68-L76
6,308
python-constraint/python-constraint
documentation/source/conf.py
process_docstring
def process_docstring(app, what, name, obj, options, lines): """ Process the docstring for a given python object. Note that the list 'lines' is changed in this function. Sphinx uses the altered content of the list. """ result = [re.sub(r'U\{([^}]*)\}', r'\1', re.sub(r'(L|C)\{([^}]*)\}', r':py:obj:`\2`', re.sub(r'@(' + '|'.join(FIELDS) + r')', r':\1', l))) for l in lines] lines[:] = result[:]
python
def process_docstring(app, what, name, obj, options, lines): """ Process the docstring for a given python object. Note that the list 'lines' is changed in this function. Sphinx uses the altered content of the list. """ result = [re.sub(r'U\{([^}]*)\}', r'\1', re.sub(r'(L|C)\{([^}]*)\}', r':py:obj:`\2`', re.sub(r'@(' + '|'.join(FIELDS) + r')', r':\1', l))) for l in lines] lines[:] = result[:]
[ "def", "process_docstring", "(", "app", ",", "what", ",", "name", ",", "obj", ",", "options", ",", "lines", ")", ":", "result", "=", "[", "re", ".", "sub", "(", "r'U\\{([^}]*)\\}'", ",", "r'\\1'", ",", "re", ".", "sub", "(", "r'(L|C)\\{([^}]*)\\}'", ",", "r':py:obj:`\\2`'", ",", "re", ".", "sub", "(", "r'@('", "+", "'|'", ".", "join", "(", "FIELDS", ")", "+", "r')'", ",", "r':\\1'", ",", "l", ")", ")", ")", "for", "l", "in", "lines", "]", "lines", "[", ":", "]", "=", "result", "[", ":", "]" ]
Process the docstring for a given python object. Note that the list 'lines' is changed in this function. Sphinx uses the altered content of the list.
[ "Process", "the", "docstring", "for", "a", "given", "python", "object", ".", "Note", "that", "the", "list", "lines", "is", "changed", "in", "this", "function", ".", "Sphinx", "uses", "the", "altered", "content", "of", "the", "list", "." ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/documentation/source/conf.py#L86-L97
6,309
python-constraint/python-constraint
constraint/__init__.py
doArc8
def doArc8(arcs, domains, assignments): """ Perform the ARC-8 arc checking algorithm and prune domains @attention: Currently unused. """ check = dict.fromkeys(domains, True) while check: variable, _ = check.popitem() if variable not in arcs or variable in assignments: continue domain = domains[variable] arcsvariable = arcs[variable] for othervariable in arcsvariable: arcconstraints = arcsvariable[othervariable] if othervariable in assignments: otherdomain = [assignments[othervariable]] else: otherdomain = domains[othervariable] if domain: # changed = False for value in domain[:]: assignments[variable] = value if otherdomain: for othervalue in otherdomain: assignments[othervariable] = othervalue for constraint, variables in arcconstraints: if not constraint( variables, domains, assignments, True ): break else: # All constraints passed. Value is safe. break else: # All othervalues failed. Kill value. domain.hideValue(value) # changed = True del assignments[othervariable] del assignments[variable] # if changed: # check.update(dict.fromkeys(arcsvariable)) if not domain: return False return True
python
def doArc8(arcs, domains, assignments): """ Perform the ARC-8 arc checking algorithm and prune domains @attention: Currently unused. """ check = dict.fromkeys(domains, True) while check: variable, _ = check.popitem() if variable not in arcs or variable in assignments: continue domain = domains[variable] arcsvariable = arcs[variable] for othervariable in arcsvariable: arcconstraints = arcsvariable[othervariable] if othervariable in assignments: otherdomain = [assignments[othervariable]] else: otherdomain = domains[othervariable] if domain: # changed = False for value in domain[:]: assignments[variable] = value if otherdomain: for othervalue in otherdomain: assignments[othervariable] = othervalue for constraint, variables in arcconstraints: if not constraint( variables, domains, assignments, True ): break else: # All constraints passed. Value is safe. break else: # All othervalues failed. Kill value. domain.hideValue(value) # changed = True del assignments[othervariable] del assignments[variable] # if changed: # check.update(dict.fromkeys(arcsvariable)) if not domain: return False return True
[ "def", "doArc8", "(", "arcs", ",", "domains", ",", "assignments", ")", ":", "check", "=", "dict", ".", "fromkeys", "(", "domains", ",", "True", ")", "while", "check", ":", "variable", ",", "_", "=", "check", ".", "popitem", "(", ")", "if", "variable", "not", "in", "arcs", "or", "variable", "in", "assignments", ":", "continue", "domain", "=", "domains", "[", "variable", "]", "arcsvariable", "=", "arcs", "[", "variable", "]", "for", "othervariable", "in", "arcsvariable", ":", "arcconstraints", "=", "arcsvariable", "[", "othervariable", "]", "if", "othervariable", "in", "assignments", ":", "otherdomain", "=", "[", "assignments", "[", "othervariable", "]", "]", "else", ":", "otherdomain", "=", "domains", "[", "othervariable", "]", "if", "domain", ":", "# changed = False", "for", "value", "in", "domain", "[", ":", "]", ":", "assignments", "[", "variable", "]", "=", "value", "if", "otherdomain", ":", "for", "othervalue", "in", "otherdomain", ":", "assignments", "[", "othervariable", "]", "=", "othervalue", "for", "constraint", ",", "variables", "in", "arcconstraints", ":", "if", "not", "constraint", "(", "variables", ",", "domains", ",", "assignments", ",", "True", ")", ":", "break", "else", ":", "# All constraints passed. Value is safe.", "break", "else", ":", "# All othervalues failed. Kill value.", "domain", ".", "hideValue", "(", "value", ")", "# changed = True", "del", "assignments", "[", "othervariable", "]", "del", "assignments", "[", "variable", "]", "# if changed:", "# check.update(dict.fromkeys(arcsvariable))", "if", "not", "domain", ":", "return", "False", "return", "True" ]
Perform the ARC-8 arc checking algorithm and prune domains @attention: Currently unused.
[ "Perform", "the", "ARC", "-", "8", "arc", "checking", "algorithm", "and", "prune", "domains" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L322-L366
6,310
python-constraint/python-constraint
constraint/__init__.py
Problem.addVariables
def addVariables(self, variables, domain): """ Add one or more variables to the problem Example: >>> problem = Problem() >>> problem.addVariables(["a", "b"], [1, 2, 3]) >>> solutions = problem.getSolutions() >>> len(solutions) 9 >>> {'a': 3, 'b': 1} in solutions True @param variables: Any object containing a sequence of objects represeting problem variables @type variables: sequence of hashable objects @param domain: Set of items defining the possible values that the given variables may assume @type domain: list, tuple, or instance of C{Domain} """ for variable in variables: self.addVariable(variable, domain)
python
def addVariables(self, variables, domain): """ Add one or more variables to the problem Example: >>> problem = Problem() >>> problem.addVariables(["a", "b"], [1, 2, 3]) >>> solutions = problem.getSolutions() >>> len(solutions) 9 >>> {'a': 3, 'b': 1} in solutions True @param variables: Any object containing a sequence of objects represeting problem variables @type variables: sequence of hashable objects @param domain: Set of items defining the possible values that the given variables may assume @type domain: list, tuple, or instance of C{Domain} """ for variable in variables: self.addVariable(variable, domain)
[ "def", "addVariables", "(", "self", ",", "variables", ",", "domain", ")", ":", "for", "variable", "in", "variables", ":", "self", ".", "addVariable", "(", "variable", ",", "domain", ")" ]
Add one or more variables to the problem Example: >>> problem = Problem() >>> problem.addVariables(["a", "b"], [1, 2, 3]) >>> solutions = problem.getSolutions() >>> len(solutions) 9 >>> {'a': 3, 'b': 1} in solutions True @param variables: Any object containing a sequence of objects represeting problem variables @type variables: sequence of hashable objects @param domain: Set of items defining the possible values that the given variables may assume @type domain: list, tuple, or instance of C{Domain}
[ "Add", "one", "or", "more", "variables", "to", "the", "problem" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L160-L182
6,311
python-constraint/python-constraint
constraint/__init__.py
Problem.addConstraint
def addConstraint(self, constraint, variables=None): """ Add a constraint to the problem Example: >>> problem = Problem() >>> problem.addVariables(["a", "b"], [1, 2, 3]) >>> problem.addConstraint(lambda a, b: b == a+1, ["a", "b"]) >>> solutions = problem.getSolutions() >>> @param constraint: Constraint to be included in the problem @type constraint: instance a L{Constraint} subclass or a function to be wrapped by L{FunctionConstraint} @param variables: Variables affected by the constraint (default to all variables). Depending on the constraint type the order may be important. @type variables: set or sequence of variables """ if not isinstance(constraint, Constraint): if callable(constraint): constraint = FunctionConstraint(constraint) else: msg = "Constraints must be instances of subclasses " "of the Constraint class" raise ValueError(msg) self._constraints.append((constraint, variables))
python
def addConstraint(self, constraint, variables=None): """ Add a constraint to the problem Example: >>> problem = Problem() >>> problem.addVariables(["a", "b"], [1, 2, 3]) >>> problem.addConstraint(lambda a, b: b == a+1, ["a", "b"]) >>> solutions = problem.getSolutions() >>> @param constraint: Constraint to be included in the problem @type constraint: instance a L{Constraint} subclass or a function to be wrapped by L{FunctionConstraint} @param variables: Variables affected by the constraint (default to all variables). Depending on the constraint type the order may be important. @type variables: set or sequence of variables """ if not isinstance(constraint, Constraint): if callable(constraint): constraint = FunctionConstraint(constraint) else: msg = "Constraints must be instances of subclasses " "of the Constraint class" raise ValueError(msg) self._constraints.append((constraint, variables))
[ "def", "addConstraint", "(", "self", ",", "constraint", ",", "variables", "=", "None", ")", ":", "if", "not", "isinstance", "(", "constraint", ",", "Constraint", ")", ":", "if", "callable", "(", "constraint", ")", ":", "constraint", "=", "FunctionConstraint", "(", "constraint", ")", "else", ":", "msg", "=", "\"Constraints must be instances of subclasses \"", "\"of the Constraint class\"", "raise", "ValueError", "(", "msg", ")", "self", ".", "_constraints", ".", "append", "(", "(", "constraint", ",", "variables", ")", ")" ]
Add a constraint to the problem Example: >>> problem = Problem() >>> problem.addVariables(["a", "b"], [1, 2, 3]) >>> problem.addConstraint(lambda a, b: b == a+1, ["a", "b"]) >>> solutions = problem.getSolutions() >>> @param constraint: Constraint to be included in the problem @type constraint: instance a L{Constraint} subclass or a function to be wrapped by L{FunctionConstraint} @param variables: Variables affected by the constraint (default to all variables). Depending on the constraint type the order may be important. @type variables: set or sequence of variables
[ "Add", "a", "constraint", "to", "the", "problem" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L184-L210
6,312
python-constraint/python-constraint
constraint/__init__.py
Problem.getSolution
def getSolution(self): """ Find and return a solution to the problem Example: >>> problem = Problem() >>> problem.getSolution() is None True >>> problem.addVariables(["a"], [42]) >>> problem.getSolution() {'a': 42} @return: Solution for the problem @rtype: dictionary mapping variables to values """ domains, constraints, vconstraints = self._getArgs() if not domains: return None return self._solver.getSolution(domains, constraints, vconstraints)
python
def getSolution(self): """ Find and return a solution to the problem Example: >>> problem = Problem() >>> problem.getSolution() is None True >>> problem.addVariables(["a"], [42]) >>> problem.getSolution() {'a': 42} @return: Solution for the problem @rtype: dictionary mapping variables to values """ domains, constraints, vconstraints = self._getArgs() if not domains: return None return self._solver.getSolution(domains, constraints, vconstraints)
[ "def", "getSolution", "(", "self", ")", ":", "domains", ",", "constraints", ",", "vconstraints", "=", "self", ".", "_getArgs", "(", ")", "if", "not", "domains", ":", "return", "None", "return", "self", ".", "_solver", ".", "getSolution", "(", "domains", ",", "constraints", ",", "vconstraints", ")" ]
Find and return a solution to the problem Example: >>> problem = Problem() >>> problem.getSolution() is None True >>> problem.addVariables(["a"], [42]) >>> problem.getSolution() {'a': 42} @return: Solution for the problem @rtype: dictionary mapping variables to values
[ "Find", "and", "return", "a", "solution", "to", "the", "problem" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L212-L231
6,313
python-constraint/python-constraint
constraint/__init__.py
Problem.getSolutions
def getSolutions(self): """ Find and return all solutions to the problem Example: >>> problem = Problem() >>> problem.getSolutions() == [] True >>> problem.addVariables(["a"], [42]) >>> problem.getSolutions() [{'a': 42}] @return: All solutions for the problem @rtype: list of dictionaries mapping variables to values """ domains, constraints, vconstraints = self._getArgs() if not domains: return [] return self._solver.getSolutions(domains, constraints, vconstraints)
python
def getSolutions(self): """ Find and return all solutions to the problem Example: >>> problem = Problem() >>> problem.getSolutions() == [] True >>> problem.addVariables(["a"], [42]) >>> problem.getSolutions() [{'a': 42}] @return: All solutions for the problem @rtype: list of dictionaries mapping variables to values """ domains, constraints, vconstraints = self._getArgs() if not domains: return [] return self._solver.getSolutions(domains, constraints, vconstraints)
[ "def", "getSolutions", "(", "self", ")", ":", "domains", ",", "constraints", ",", "vconstraints", "=", "self", ".", "_getArgs", "(", ")", "if", "not", "domains", ":", "return", "[", "]", "return", "self", ".", "_solver", ".", "getSolutions", "(", "domains", ",", "constraints", ",", "vconstraints", ")" ]
Find and return all solutions to the problem Example: >>> problem = Problem() >>> problem.getSolutions() == [] True >>> problem.addVariables(["a"], [42]) >>> problem.getSolutions() [{'a': 42}] @return: All solutions for the problem @rtype: list of dictionaries mapping variables to values
[ "Find", "and", "return", "all", "solutions", "to", "the", "problem" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L233-L252
6,314
python-constraint/python-constraint
constraint/__init__.py
Problem.getSolutionIter
def getSolutionIter(self): """ Return an iterator to the solutions of the problem Example: >>> problem = Problem() >>> list(problem.getSolutionIter()) == [] True >>> problem.addVariables(["a"], [42]) >>> iter = problem.getSolutionIter() >>> next(iter) {'a': 42} >>> next(iter) Traceback (most recent call last): File "<stdin>", line 1, in ? StopIteration """ domains, constraints, vconstraints = self._getArgs() if not domains: return iter(()) return self._solver.getSolutionIter(domains, constraints, vconstraints)
python
def getSolutionIter(self): """ Return an iterator to the solutions of the problem Example: >>> problem = Problem() >>> list(problem.getSolutionIter()) == [] True >>> problem.addVariables(["a"], [42]) >>> iter = problem.getSolutionIter() >>> next(iter) {'a': 42} >>> next(iter) Traceback (most recent call last): File "<stdin>", line 1, in ? StopIteration """ domains, constraints, vconstraints = self._getArgs() if not domains: return iter(()) return self._solver.getSolutionIter(domains, constraints, vconstraints)
[ "def", "getSolutionIter", "(", "self", ")", ":", "domains", ",", "constraints", ",", "vconstraints", "=", "self", ".", "_getArgs", "(", ")", "if", "not", "domains", ":", "return", "iter", "(", "(", ")", ")", "return", "self", ".", "_solver", ".", "getSolutionIter", "(", "domains", ",", "constraints", ",", "vconstraints", ")" ]
Return an iterator to the solutions of the problem Example: >>> problem = Problem() >>> list(problem.getSolutionIter()) == [] True >>> problem.addVariables(["a"], [42]) >>> iter = problem.getSolutionIter() >>> next(iter) {'a': 42} >>> next(iter) Traceback (most recent call last): File "<stdin>", line 1, in ? StopIteration
[ "Return", "an", "iterator", "to", "the", "solutions", "of", "the", "problem" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L254-L275
6,315
python-constraint/python-constraint
constraint/__init__.py
Solver.getSolution
def getSolution(self, domains, constraints, vconstraints): """ Return one solution for the given problem @param domains: Dictionary mapping variables to their domains @type domains: dict @param constraints: List of pairs of (constraint, variables) @type constraints: list @param vconstraints: Dictionary mapping variables to a list of constraints affecting the given variables. @type vconstraints: dict """ msg = "%s is an abstract class" % self.__class__.__name__ raise NotImplementedError(msg)
python
def getSolution(self, domains, constraints, vconstraints): """ Return one solution for the given problem @param domains: Dictionary mapping variables to their domains @type domains: dict @param constraints: List of pairs of (constraint, variables) @type constraints: list @param vconstraints: Dictionary mapping variables to a list of constraints affecting the given variables. @type vconstraints: dict """ msg = "%s is an abstract class" % self.__class__.__name__ raise NotImplementedError(msg)
[ "def", "getSolution", "(", "self", ",", "domains", ",", "constraints", ",", "vconstraints", ")", ":", "msg", "=", "\"%s is an abstract class\"", "%", "self", ".", "__class__", ".", "__name__", "raise", "NotImplementedError", "(", "msg", ")" ]
Return one solution for the given problem @param domains: Dictionary mapping variables to their domains @type domains: dict @param constraints: List of pairs of (constraint, variables) @type constraints: list @param vconstraints: Dictionary mapping variables to a list of constraints affecting the given variables. @type vconstraints: dict
[ "Return", "one", "solution", "for", "the", "given", "problem" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L373-L386
6,316
python-constraint/python-constraint
constraint/__init__.py
Domain.resetState
def resetState(self): """ Reset to the original domain state, including all possible values """ self.extend(self._hidden) del self._hidden[:] del self._states[:]
python
def resetState(self): """ Reset to the original domain state, including all possible values """ self.extend(self._hidden) del self._hidden[:] del self._states[:]
[ "def", "resetState", "(", "self", ")", ":", "self", ".", "extend", "(", "self", ".", "_hidden", ")", "del", "self", ".", "_hidden", "[", ":", "]", "del", "self", ".", "_states", "[", ":", "]" ]
Reset to the original domain state, including all possible values
[ "Reset", "to", "the", "original", "domain", "state", "including", "all", "possible", "values" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L770-L776
6,317
python-constraint/python-constraint
constraint/__init__.py
Domain.popState
def popState(self): """ Restore domain state from the top of the stack Variables hidden since the last popped state are then available again. """ diff = self._states.pop() - len(self) if diff: self.extend(self._hidden[-diff:]) del self._hidden[-diff:]
python
def popState(self): """ Restore domain state from the top of the stack Variables hidden since the last popped state are then available again. """ diff = self._states.pop() - len(self) if diff: self.extend(self._hidden[-diff:]) del self._hidden[-diff:]
[ "def", "popState", "(", "self", ")", ":", "diff", "=", "self", ".", "_states", ".", "pop", "(", ")", "-", "len", "(", "self", ")", "if", "diff", ":", "self", ".", "extend", "(", "self", ".", "_hidden", "[", "-", "diff", ":", "]", ")", "del", "self", ".", "_hidden", "[", "-", "diff", ":", "]" ]
Restore domain state from the top of the stack Variables hidden since the last popped state are then available again.
[ "Restore", "domain", "state", "from", "the", "top", "of", "the", "stack" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L787-L797
6,318
python-constraint/python-constraint
constraint/__init__.py
Domain.hideValue
def hideValue(self, value): """ Hide the given value from the domain After that call the given value won't be seen as a possible value on that domain anymore. The hidden value will be restored when the previous saved state is popped. @param value: Object currently available in the domain """ list.remove(self, value) self._hidden.append(value)
python
def hideValue(self, value): """ Hide the given value from the domain After that call the given value won't be seen as a possible value on that domain anymore. The hidden value will be restored when the previous saved state is popped. @param value: Object currently available in the domain """ list.remove(self, value) self._hidden.append(value)
[ "def", "hideValue", "(", "self", ",", "value", ")", ":", "list", ".", "remove", "(", "self", ",", "value", ")", "self", ".", "_hidden", ".", "append", "(", "value", ")" ]
Hide the given value from the domain After that call the given value won't be seen as a possible value on that domain anymore. The hidden value will be restored when the previous saved state is popped. @param value: Object currently available in the domain
[ "Hide", "the", "given", "value", "from", "the", "domain" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L799-L810
6,319
python-constraint/python-constraint
constraint/__init__.py
Constraint.preProcess
def preProcess(self, variables, domains, constraints, vconstraints): """ Preprocess variable domains This method is called before starting to look for solutions, and is used to prune domains with specific constraint logic when possible. For instance, any constraints with a single variable may be applied on all possible values and removed, since they may act on individual values even without further knowledge about other assignments. @param variables: Variables affected by that constraint, in the same order provided by the user @type variables: sequence @param domains: Dictionary mapping variables to their domains @type domains: dict @param constraints: List of pairs of (constraint, variables) @type constraints: list @param vconstraints: Dictionary mapping variables to a list of constraints affecting the given variables. @type vconstraints: dict """ if len(variables) == 1: variable = variables[0] domain = domains[variable] for value in domain[:]: if not self(variables, domains, {variable: value}): domain.remove(value) constraints.remove((self, variables)) vconstraints[variable].remove((self, variables))
python
def preProcess(self, variables, domains, constraints, vconstraints): """ Preprocess variable domains This method is called before starting to look for solutions, and is used to prune domains with specific constraint logic when possible. For instance, any constraints with a single variable may be applied on all possible values and removed, since they may act on individual values even without further knowledge about other assignments. @param variables: Variables affected by that constraint, in the same order provided by the user @type variables: sequence @param domains: Dictionary mapping variables to their domains @type domains: dict @param constraints: List of pairs of (constraint, variables) @type constraints: list @param vconstraints: Dictionary mapping variables to a list of constraints affecting the given variables. @type vconstraints: dict """ if len(variables) == 1: variable = variables[0] domain = domains[variable] for value in domain[:]: if not self(variables, domains, {variable: value}): domain.remove(value) constraints.remove((self, variables)) vconstraints[variable].remove((self, variables))
[ "def", "preProcess", "(", "self", ",", "variables", ",", "domains", ",", "constraints", ",", "vconstraints", ")", ":", "if", "len", "(", "variables", ")", "==", "1", ":", "variable", "=", "variables", "[", "0", "]", "domain", "=", "domains", "[", "variable", "]", "for", "value", "in", "domain", "[", ":", "]", ":", "if", "not", "self", "(", "variables", ",", "domains", ",", "{", "variable", ":", "value", "}", ")", ":", "domain", ".", "remove", "(", "value", ")", "constraints", ".", "remove", "(", "(", "self", ",", "variables", ")", ")", "vconstraints", "[", "variable", "]", ".", "remove", "(", "(", "self", ",", "variables", ")", ")" ]
Preprocess variable domains This method is called before starting to look for solutions, and is used to prune domains with specific constraint logic when possible. For instance, any constraints with a single variable may be applied on all possible values and removed, since they may act on individual values even without further knowledge about other assignments. @param variables: Variables affected by that constraint, in the same order provided by the user @type variables: sequence @param domains: Dictionary mapping variables to their domains @type domains: dict @param constraints: List of pairs of (constraint, variables) @type constraints: list @param vconstraints: Dictionary mapping variables to a list of constraints affecting the given variables. @type vconstraints: dict
[ "Preprocess", "variable", "domains" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L849-L878
6,320
python-constraint/python-constraint
constraint/__init__.py
Constraint.forwardCheck
def forwardCheck(self, variables, domains, assignments, _unassigned=Unassigned): """ Helper method for generic forward checking Currently, this method acts only when there's a single unassigned variable. @param variables: Variables affected by that constraint, in the same order provided by the user @type variables: sequence @param domains: Dictionary mapping variables to their domains @type domains: dict @param assignments: Dictionary mapping assigned variables to their current assumed value @type assignments: dict @return: Boolean value stating if this constraint is currently broken or not @rtype: bool """ unassignedvariable = _unassigned for variable in variables: if variable not in assignments: if unassignedvariable is _unassigned: unassignedvariable = variable else: break else: if unassignedvariable is not _unassigned: # Remove from the unassigned variable domain's all # values which break our variable's constraints. domain = domains[unassignedvariable] if domain: for value in domain[:]: assignments[unassignedvariable] = value if not self(variables, domains, assignments): domain.hideValue(value) del assignments[unassignedvariable] if not domain: return False return True
python
def forwardCheck(self, variables, domains, assignments, _unassigned=Unassigned): """ Helper method for generic forward checking Currently, this method acts only when there's a single unassigned variable. @param variables: Variables affected by that constraint, in the same order provided by the user @type variables: sequence @param domains: Dictionary mapping variables to their domains @type domains: dict @param assignments: Dictionary mapping assigned variables to their current assumed value @type assignments: dict @return: Boolean value stating if this constraint is currently broken or not @rtype: bool """ unassignedvariable = _unassigned for variable in variables: if variable not in assignments: if unassignedvariable is _unassigned: unassignedvariable = variable else: break else: if unassignedvariable is not _unassigned: # Remove from the unassigned variable domain's all # values which break our variable's constraints. domain = domains[unassignedvariable] if domain: for value in domain[:]: assignments[unassignedvariable] = value if not self(variables, domains, assignments): domain.hideValue(value) del assignments[unassignedvariable] if not domain: return False return True
[ "def", "forwardCheck", "(", "self", ",", "variables", ",", "domains", ",", "assignments", ",", "_unassigned", "=", "Unassigned", ")", ":", "unassignedvariable", "=", "_unassigned", "for", "variable", "in", "variables", ":", "if", "variable", "not", "in", "assignments", ":", "if", "unassignedvariable", "is", "_unassigned", ":", "unassignedvariable", "=", "variable", "else", ":", "break", "else", ":", "if", "unassignedvariable", "is", "not", "_unassigned", ":", "# Remove from the unassigned variable domain's all", "# values which break our variable's constraints.", "domain", "=", "domains", "[", "unassignedvariable", "]", "if", "domain", ":", "for", "value", "in", "domain", "[", ":", "]", ":", "assignments", "[", "unassignedvariable", "]", "=", "value", "if", "not", "self", "(", "variables", ",", "domains", ",", "assignments", ")", ":", "domain", ".", "hideValue", "(", "value", ")", "del", "assignments", "[", "unassignedvariable", "]", "if", "not", "domain", ":", "return", "False", "return", "True" ]
Helper method for generic forward checking Currently, this method acts only when there's a single unassigned variable. @param variables: Variables affected by that constraint, in the same order provided by the user @type variables: sequence @param domains: Dictionary mapping variables to their domains @type domains: dict @param assignments: Dictionary mapping assigned variables to their current assumed value @type assignments: dict @return: Boolean value stating if this constraint is currently broken or not @rtype: bool
[ "Helper", "method", "for", "generic", "forward", "checking" ]
e23fe9852cddddf1c3e258e03f2175df24b4c702
https://github.com/python-constraint/python-constraint/blob/e23fe9852cddddf1c3e258e03f2175df24b4c702/constraint/__init__.py#L880-L919
6,321
kencochrane/django-defender
defender/views.py
block_view
def block_view(request): """ List the blocked IP and Usernames """ blocked_ip_list = get_blocked_ips() blocked_username_list = get_blocked_usernames() context = {'blocked_ip_list': blocked_ip_list, 'blocked_username_list': blocked_username_list} return render(request, 'defender/admin/blocks.html', context)
python
def block_view(request): """ List the blocked IP and Usernames """ blocked_ip_list = get_blocked_ips() blocked_username_list = get_blocked_usernames() context = {'blocked_ip_list': blocked_ip_list, 'blocked_username_list': blocked_username_list} return render(request, 'defender/admin/blocks.html', context)
[ "def", "block_view", "(", "request", ")", ":", "blocked_ip_list", "=", "get_blocked_ips", "(", ")", "blocked_username_list", "=", "get_blocked_usernames", "(", ")", "context", "=", "{", "'blocked_ip_list'", ":", "blocked_ip_list", ",", "'blocked_username_list'", ":", "blocked_username_list", "}", "return", "render", "(", "request", ",", "'defender/admin/blocks.html'", ",", "context", ")" ]
List the blocked IP and Usernames
[ "List", "the", "blocked", "IP", "and", "Usernames" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/views.py#L14-L21
6,322
kencochrane/django-defender
defender/utils.py
get_ip
def get_ip(request): """ get the ip address from the request """ if config.BEHIND_REVERSE_PROXY: ip_address = request.META.get(config.REVERSE_PROXY_HEADER, '') ip_address = ip_address.split(",", 1)[0].strip() if ip_address == '': ip_address = get_ip_address_from_request(request) else: ip_address = get_ip_address_from_request(request) return ip_address
python
def get_ip(request): """ get the ip address from the request """ if config.BEHIND_REVERSE_PROXY: ip_address = request.META.get(config.REVERSE_PROXY_HEADER, '') ip_address = ip_address.split(",", 1)[0].strip() if ip_address == '': ip_address = get_ip_address_from_request(request) else: ip_address = get_ip_address_from_request(request) return ip_address
[ "def", "get_ip", "(", "request", ")", ":", "if", "config", ".", "BEHIND_REVERSE_PROXY", ":", "ip_address", "=", "request", ".", "META", ".", "get", "(", "config", ".", "REVERSE_PROXY_HEADER", ",", "''", ")", "ip_address", "=", "ip_address", ".", "split", "(", "\",\"", ",", "1", ")", "[", "0", "]", ".", "strip", "(", ")", "if", "ip_address", "==", "''", ":", "ip_address", "=", "get_ip_address_from_request", "(", "request", ")", "else", ":", "ip_address", "=", "get_ip_address_from_request", "(", "request", ")", "return", "ip_address" ]
get the ip address from the request
[ "get", "the", "ip", "address", "from", "the", "request" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L41-L50
6,323
kencochrane/django-defender
defender/utils.py
get_blocked_ips
def get_blocked_ips(): """ get a list of blocked ips from redis """ if config.DISABLE_IP_LOCKOUT: # There are no blocked IP's since we disabled them. return [] key = get_ip_blocked_cache_key("*") key_list = [redis_key.decode('utf-8') for redis_key in REDIS_SERVER.keys(key)] return strip_keys(key_list)
python
def get_blocked_ips(): """ get a list of blocked ips from redis """ if config.DISABLE_IP_LOCKOUT: # There are no blocked IP's since we disabled them. return [] key = get_ip_blocked_cache_key("*") key_list = [redis_key.decode('utf-8') for redis_key in REDIS_SERVER.keys(key)] return strip_keys(key_list)
[ "def", "get_blocked_ips", "(", ")", ":", "if", "config", ".", "DISABLE_IP_LOCKOUT", ":", "# There are no blocked IP's since we disabled them.", "return", "[", "]", "key", "=", "get_ip_blocked_cache_key", "(", "\"*\"", ")", "key_list", "=", "[", "redis_key", ".", "decode", "(", "'utf-8'", ")", "for", "redis_key", "in", "REDIS_SERVER", ".", "keys", "(", "key", ")", "]", "return", "strip_keys", "(", "key_list", ")" ]
get a list of blocked ips from redis
[ "get", "a", "list", "of", "blocked", "ips", "from", "redis" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L101-L109
6,324
kencochrane/django-defender
defender/utils.py
get_blocked_usernames
def get_blocked_usernames(): """ get a list of blocked usernames from redis """ if config.DISABLE_USERNAME_LOCKOUT: # There are no blocked usernames since we disabled them. return [] key = get_username_blocked_cache_key("*") key_list = [redis_key.decode('utf-8') for redis_key in REDIS_SERVER.keys(key)] return strip_keys(key_list)
python
def get_blocked_usernames(): """ get a list of blocked usernames from redis """ if config.DISABLE_USERNAME_LOCKOUT: # There are no blocked usernames since we disabled them. return [] key = get_username_blocked_cache_key("*") key_list = [redis_key.decode('utf-8') for redis_key in REDIS_SERVER.keys(key)] return strip_keys(key_list)
[ "def", "get_blocked_usernames", "(", ")", ":", "if", "config", ".", "DISABLE_USERNAME_LOCKOUT", ":", "# There are no blocked usernames since we disabled them.", "return", "[", "]", "key", "=", "get_username_blocked_cache_key", "(", "\"*\"", ")", "key_list", "=", "[", "redis_key", ".", "decode", "(", "'utf-8'", ")", "for", "redis_key", "in", "REDIS_SERVER", ".", "keys", "(", "key", ")", "]", "return", "strip_keys", "(", "key_list", ")" ]
get a list of blocked usernames from redis
[ "get", "a", "list", "of", "blocked", "usernames", "from", "redis" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L112-L120
6,325
kencochrane/django-defender
defender/utils.py
increment_key
def increment_key(key): """ given a key increment the value """ pipe = REDIS_SERVER.pipeline() pipe.incr(key, 1) if config.COOLOFF_TIME: pipe.expire(key, config.COOLOFF_TIME) new_value = pipe.execute()[0] return new_value
python
def increment_key(key): """ given a key increment the value """ pipe = REDIS_SERVER.pipeline() pipe.incr(key, 1) if config.COOLOFF_TIME: pipe.expire(key, config.COOLOFF_TIME) new_value = pipe.execute()[0] return new_value
[ "def", "increment_key", "(", "key", ")", ":", "pipe", "=", "REDIS_SERVER", ".", "pipeline", "(", ")", "pipe", ".", "incr", "(", "key", ",", "1", ")", "if", "config", ".", "COOLOFF_TIME", ":", "pipe", ".", "expire", "(", "key", ",", "config", ".", "COOLOFF_TIME", ")", "new_value", "=", "pipe", ".", "execute", "(", ")", "[", "0", "]", "return", "new_value" ]
given a key increment the value
[ "given", "a", "key", "increment", "the", "value" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L123-L130
6,326
kencochrane/django-defender
defender/utils.py
username_from_request
def username_from_request(request): """ unloads username from default POST request """ if config.USERNAME_FORM_FIELD in request.POST: return request.POST[config.USERNAME_FORM_FIELD][:255] return None
python
def username_from_request(request): """ unloads username from default POST request """ if config.USERNAME_FORM_FIELD in request.POST: return request.POST[config.USERNAME_FORM_FIELD][:255] return None
[ "def", "username_from_request", "(", "request", ")", ":", "if", "config", ".", "USERNAME_FORM_FIELD", "in", "request", ".", "POST", ":", "return", "request", ".", "POST", "[", "config", ".", "USERNAME_FORM_FIELD", "]", "[", ":", "255", "]", "return", "None" ]
unloads username from default POST request
[ "unloads", "username", "from", "default", "POST", "request" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L133-L137
6,327
kencochrane/django-defender
defender/utils.py
get_user_attempts
def get_user_attempts(request, get_username=get_username_from_request, username=None): """ Returns number of access attempts for this ip, username """ ip_address = get_ip(request) username = lower_username(username or get_username(request)) # get by IP ip_count = REDIS_SERVER.get(get_ip_attempt_cache_key(ip_address)) if not ip_count: ip_count = 0 ip_count = int(ip_count) # get by username username_count = REDIS_SERVER.get(get_username_attempt_cache_key(username)) if not username_count: username_count = 0 username_count = int(username_count) # return the larger of the two. return max(ip_count, username_count)
python
def get_user_attempts(request, get_username=get_username_from_request, username=None): """ Returns number of access attempts for this ip, username """ ip_address = get_ip(request) username = lower_username(username or get_username(request)) # get by IP ip_count = REDIS_SERVER.get(get_ip_attempt_cache_key(ip_address)) if not ip_count: ip_count = 0 ip_count = int(ip_count) # get by username username_count = REDIS_SERVER.get(get_username_attempt_cache_key(username)) if not username_count: username_count = 0 username_count = int(username_count) # return the larger of the two. return max(ip_count, username_count)
[ "def", "get_user_attempts", "(", "request", ",", "get_username", "=", "get_username_from_request", ",", "username", "=", "None", ")", ":", "ip_address", "=", "get_ip", "(", "request", ")", "username", "=", "lower_username", "(", "username", "or", "get_username", "(", "request", ")", ")", "# get by IP", "ip_count", "=", "REDIS_SERVER", ".", "get", "(", "get_ip_attempt_cache_key", "(", "ip_address", ")", ")", "if", "not", "ip_count", ":", "ip_count", "=", "0", "ip_count", "=", "int", "(", "ip_count", ")", "# get by username", "username_count", "=", "REDIS_SERVER", ".", "get", "(", "get_username_attempt_cache_key", "(", "username", ")", ")", "if", "not", "username_count", ":", "username_count", "=", "0", "username_count", "=", "int", "(", "username_count", ")", "# return the larger of the two.", "return", "max", "(", "ip_count", ",", "username_count", ")" ]
Returns number of access attempts for this ip, username
[ "Returns", "number", "of", "access", "attempts", "for", "this", "ip", "username" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L145-L165
6,328
kencochrane/django-defender
defender/utils.py
block_ip
def block_ip(ip_address): """ given the ip, block it """ if not ip_address: # no reason to continue when there is no ip return if config.DISABLE_IP_LOCKOUT: # no need to block, we disabled it. return key = get_ip_blocked_cache_key(ip_address) if config.COOLOFF_TIME: REDIS_SERVER.set(key, 'blocked', config.COOLOFF_TIME) else: REDIS_SERVER.set(key, 'blocked') send_ip_block_signal(ip_address)
python
def block_ip(ip_address): """ given the ip, block it """ if not ip_address: # no reason to continue when there is no ip return if config.DISABLE_IP_LOCKOUT: # no need to block, we disabled it. return key = get_ip_blocked_cache_key(ip_address) if config.COOLOFF_TIME: REDIS_SERVER.set(key, 'blocked', config.COOLOFF_TIME) else: REDIS_SERVER.set(key, 'blocked') send_ip_block_signal(ip_address)
[ "def", "block_ip", "(", "ip_address", ")", ":", "if", "not", "ip_address", ":", "# no reason to continue when there is no ip", "return", "if", "config", ".", "DISABLE_IP_LOCKOUT", ":", "# no need to block, we disabled it.", "return", "key", "=", "get_ip_blocked_cache_key", "(", "ip_address", ")", "if", "config", ".", "COOLOFF_TIME", ":", "REDIS_SERVER", ".", "set", "(", "key", ",", "'blocked'", ",", "config", ".", "COOLOFF_TIME", ")", "else", ":", "REDIS_SERVER", ".", "set", "(", "key", ",", "'blocked'", ")", "send_ip_block_signal", "(", "ip_address", ")" ]
given the ip, block it
[ "given", "the", "ip", "block", "it" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L168-L181
6,329
kencochrane/django-defender
defender/utils.py
block_username
def block_username(username): """ given the username block it. """ if not username: # no reason to continue when there is no username return if config.DISABLE_USERNAME_LOCKOUT: # no need to block, we disabled it. return key = get_username_blocked_cache_key(username) if config.COOLOFF_TIME: REDIS_SERVER.set(key, 'blocked', config.COOLOFF_TIME) else: REDIS_SERVER.set(key, 'blocked') send_username_block_signal(username)
python
def block_username(username): """ given the username block it. """ if not username: # no reason to continue when there is no username return if config.DISABLE_USERNAME_LOCKOUT: # no need to block, we disabled it. return key = get_username_blocked_cache_key(username) if config.COOLOFF_TIME: REDIS_SERVER.set(key, 'blocked', config.COOLOFF_TIME) else: REDIS_SERVER.set(key, 'blocked') send_username_block_signal(username)
[ "def", "block_username", "(", "username", ")", ":", "if", "not", "username", ":", "# no reason to continue when there is no username", "return", "if", "config", ".", "DISABLE_USERNAME_LOCKOUT", ":", "# no need to block, we disabled it.", "return", "key", "=", "get_username_blocked_cache_key", "(", "username", ")", "if", "config", ".", "COOLOFF_TIME", ":", "REDIS_SERVER", ".", "set", "(", "key", ",", "'blocked'", ",", "config", ".", "COOLOFF_TIME", ")", "else", ":", "REDIS_SERVER", ".", "set", "(", "key", ",", "'blocked'", ")", "send_username_block_signal", "(", "username", ")" ]
given the username block it.
[ "given", "the", "username", "block", "it", "." ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L184-L197
6,330
kencochrane/django-defender
defender/utils.py
record_failed_attempt
def record_failed_attempt(ip_address, username): """ record the failed login attempt, if over limit return False, if not over limit return True """ # increment the failed count, and get current number ip_block = False if not config.DISABLE_IP_LOCKOUT: # we only want to increment the IP if this is disabled. ip_count = increment_key(get_ip_attempt_cache_key(ip_address)) # if over the limit, add to block if ip_count > config.IP_FAILURE_LIMIT: block_ip(ip_address) ip_block = True user_block = False if username and not config.DISABLE_USERNAME_LOCKOUT: user_count = increment_key(get_username_attempt_cache_key(username)) # if over the limit, add to block if user_count > config.USERNAME_FAILURE_LIMIT: block_username(username) user_block = True # if we have this turned on, then there is no reason to look at ip_block # we will just look at user_block, and short circut the result since # we don't need to continue. if config.DISABLE_IP_LOCKOUT: # if user_block is True, it means it was blocked # we need to return False return not user_block if config.DISABLE_USERNAME_LOCKOUT: # The same as DISABLE_IP_LOCKOUT return not ip_block # we want to make sure both the IP and user is blocked before we # return False # this is mostly used when a lot of your users are using proxies, # and you don't want one user to block everyone on that one IP. if config.LOCKOUT_BY_IP_USERNAME: # both ip_block and user_block need to be True in order # to return a False. return not (ip_block and user_block) # if any blocks return False, no blocks. return True return not (ip_block or user_block)
python
def record_failed_attempt(ip_address, username): """ record the failed login attempt, if over limit return False, if not over limit return True """ # increment the failed count, and get current number ip_block = False if not config.DISABLE_IP_LOCKOUT: # we only want to increment the IP if this is disabled. ip_count = increment_key(get_ip_attempt_cache_key(ip_address)) # if over the limit, add to block if ip_count > config.IP_FAILURE_LIMIT: block_ip(ip_address) ip_block = True user_block = False if username and not config.DISABLE_USERNAME_LOCKOUT: user_count = increment_key(get_username_attempt_cache_key(username)) # if over the limit, add to block if user_count > config.USERNAME_FAILURE_LIMIT: block_username(username) user_block = True # if we have this turned on, then there is no reason to look at ip_block # we will just look at user_block, and short circut the result since # we don't need to continue. if config.DISABLE_IP_LOCKOUT: # if user_block is True, it means it was blocked # we need to return False return not user_block if config.DISABLE_USERNAME_LOCKOUT: # The same as DISABLE_IP_LOCKOUT return not ip_block # we want to make sure both the IP and user is blocked before we # return False # this is mostly used when a lot of your users are using proxies, # and you don't want one user to block everyone on that one IP. if config.LOCKOUT_BY_IP_USERNAME: # both ip_block and user_block need to be True in order # to return a False. return not (ip_block and user_block) # if any blocks return False, no blocks. return True return not (ip_block or user_block)
[ "def", "record_failed_attempt", "(", "ip_address", ",", "username", ")", ":", "# increment the failed count, and get current number", "ip_block", "=", "False", "if", "not", "config", ".", "DISABLE_IP_LOCKOUT", ":", "# we only want to increment the IP if this is disabled.", "ip_count", "=", "increment_key", "(", "get_ip_attempt_cache_key", "(", "ip_address", ")", ")", "# if over the limit, add to block", "if", "ip_count", ">", "config", ".", "IP_FAILURE_LIMIT", ":", "block_ip", "(", "ip_address", ")", "ip_block", "=", "True", "user_block", "=", "False", "if", "username", "and", "not", "config", ".", "DISABLE_USERNAME_LOCKOUT", ":", "user_count", "=", "increment_key", "(", "get_username_attempt_cache_key", "(", "username", ")", ")", "# if over the limit, add to block", "if", "user_count", ">", "config", ".", "USERNAME_FAILURE_LIMIT", ":", "block_username", "(", "username", ")", "user_block", "=", "True", "# if we have this turned on, then there is no reason to look at ip_block", "# we will just look at user_block, and short circut the result since", "# we don't need to continue.", "if", "config", ".", "DISABLE_IP_LOCKOUT", ":", "# if user_block is True, it means it was blocked", "# we need to return False", "return", "not", "user_block", "if", "config", ".", "DISABLE_USERNAME_LOCKOUT", ":", "# The same as DISABLE_IP_LOCKOUT", "return", "not", "ip_block", "# we want to make sure both the IP and user is blocked before we", "# return False", "# this is mostly used when a lot of your users are using proxies,", "# and you don't want one user to block everyone on that one IP.", "if", "config", ".", "LOCKOUT_BY_IP_USERNAME", ":", "# both ip_block and user_block need to be True in order", "# to return a False.", "return", "not", "(", "ip_block", "and", "user_block", ")", "# if any blocks return False, no blocks. return True", "return", "not", "(", "ip_block", "or", "user_block", ")" ]
record the failed login attempt, if over limit return False, if not over limit return True
[ "record", "the", "failed", "login", "attempt", "if", "over", "limit", "return", "False", "if", "not", "over", "limit", "return", "True" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L200-L243
6,331
kencochrane/django-defender
defender/utils.py
unblock_ip
def unblock_ip(ip_address, pipe=None): """ unblock the given IP """ do_commit = False if not pipe: pipe = REDIS_SERVER.pipeline() do_commit = True if ip_address: pipe.delete(get_ip_attempt_cache_key(ip_address)) pipe.delete(get_ip_blocked_cache_key(ip_address)) if do_commit: pipe.execute()
python
def unblock_ip(ip_address, pipe=None): """ unblock the given IP """ do_commit = False if not pipe: pipe = REDIS_SERVER.pipeline() do_commit = True if ip_address: pipe.delete(get_ip_attempt_cache_key(ip_address)) pipe.delete(get_ip_blocked_cache_key(ip_address)) if do_commit: pipe.execute()
[ "def", "unblock_ip", "(", "ip_address", ",", "pipe", "=", "None", ")", ":", "do_commit", "=", "False", "if", "not", "pipe", ":", "pipe", "=", "REDIS_SERVER", ".", "pipeline", "(", ")", "do_commit", "=", "True", "if", "ip_address", ":", "pipe", ".", "delete", "(", "get_ip_attempt_cache_key", "(", "ip_address", ")", ")", "pipe", ".", "delete", "(", "get_ip_blocked_cache_key", "(", "ip_address", ")", ")", "if", "do_commit", ":", "pipe", ".", "execute", "(", ")" ]
unblock the given IP
[ "unblock", "the", "given", "IP" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L246-L256
6,332
kencochrane/django-defender
defender/utils.py
unblock_username
def unblock_username(username, pipe=None): """ unblock the given Username """ do_commit = False if not pipe: pipe = REDIS_SERVER.pipeline() do_commit = True if username: pipe.delete(get_username_attempt_cache_key(username)) pipe.delete(get_username_blocked_cache_key(username)) if do_commit: pipe.execute()
python
def unblock_username(username, pipe=None): """ unblock the given Username """ do_commit = False if not pipe: pipe = REDIS_SERVER.pipeline() do_commit = True if username: pipe.delete(get_username_attempt_cache_key(username)) pipe.delete(get_username_blocked_cache_key(username)) if do_commit: pipe.execute()
[ "def", "unblock_username", "(", "username", ",", "pipe", "=", "None", ")", ":", "do_commit", "=", "False", "if", "not", "pipe", ":", "pipe", "=", "REDIS_SERVER", ".", "pipeline", "(", ")", "do_commit", "=", "True", "if", "username", ":", "pipe", ".", "delete", "(", "get_username_attempt_cache_key", "(", "username", ")", ")", "pipe", ".", "delete", "(", "get_username_blocked_cache_key", "(", "username", ")", ")", "if", "do_commit", ":", "pipe", ".", "execute", "(", ")" ]
unblock the given Username
[ "unblock", "the", "given", "Username" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L259-L269
6,333
kencochrane/django-defender
defender/utils.py
reset_failed_attempts
def reset_failed_attempts(ip_address=None, username=None): """ reset the failed attempts for these ip's and usernames """ pipe = REDIS_SERVER.pipeline() unblock_ip(ip_address, pipe=pipe) unblock_username(username, pipe=pipe) pipe.execute()
python
def reset_failed_attempts(ip_address=None, username=None): """ reset the failed attempts for these ip's and usernames """ pipe = REDIS_SERVER.pipeline() unblock_ip(ip_address, pipe=pipe) unblock_username(username, pipe=pipe) pipe.execute()
[ "def", "reset_failed_attempts", "(", "ip_address", "=", "None", ",", "username", "=", "None", ")", ":", "pipe", "=", "REDIS_SERVER", ".", "pipeline", "(", ")", "unblock_ip", "(", "ip_address", ",", "pipe", "=", "pipe", ")", "unblock_username", "(", "username", ",", "pipe", "=", "pipe", ")", "pipe", ".", "execute", "(", ")" ]
reset the failed attempts for these ip's and usernames
[ "reset", "the", "failed", "attempts", "for", "these", "ip", "s", "and", "usernames" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L272-L280
6,334
kencochrane/django-defender
defender/utils.py
lockout_response
def lockout_response(request): """ if we are locked out, here is the response """ if config.LOCKOUT_TEMPLATE: context = { 'cooloff_time_seconds': config.COOLOFF_TIME, 'cooloff_time_minutes': config.COOLOFF_TIME / 60, 'failure_limit': config.FAILURE_LIMIT, } return render(request, config.LOCKOUT_TEMPLATE, context) if config.LOCKOUT_URL: return HttpResponseRedirect(config.LOCKOUT_URL) if config.COOLOFF_TIME: return HttpResponse("Account locked: too many login attempts. " "Please try again later.") else: return HttpResponse("Account locked: too many login attempts. " "Contact an admin to unlock your account.")
python
def lockout_response(request): """ if we are locked out, here is the response """ if config.LOCKOUT_TEMPLATE: context = { 'cooloff_time_seconds': config.COOLOFF_TIME, 'cooloff_time_minutes': config.COOLOFF_TIME / 60, 'failure_limit': config.FAILURE_LIMIT, } return render(request, config.LOCKOUT_TEMPLATE, context) if config.LOCKOUT_URL: return HttpResponseRedirect(config.LOCKOUT_URL) if config.COOLOFF_TIME: return HttpResponse("Account locked: too many login attempts. " "Please try again later.") else: return HttpResponse("Account locked: too many login attempts. " "Contact an admin to unlock your account.")
[ "def", "lockout_response", "(", "request", ")", ":", "if", "config", ".", "LOCKOUT_TEMPLATE", ":", "context", "=", "{", "'cooloff_time_seconds'", ":", "config", ".", "COOLOFF_TIME", ",", "'cooloff_time_minutes'", ":", "config", ".", "COOLOFF_TIME", "/", "60", ",", "'failure_limit'", ":", "config", ".", "FAILURE_LIMIT", ",", "}", "return", "render", "(", "request", ",", "config", ".", "LOCKOUT_TEMPLATE", ",", "context", ")", "if", "config", ".", "LOCKOUT_URL", ":", "return", "HttpResponseRedirect", "(", "config", ".", "LOCKOUT_URL", ")", "if", "config", ".", "COOLOFF_TIME", ":", "return", "HttpResponse", "(", "\"Account locked: too many login attempts. \"", "\"Please try again later.\"", ")", "else", ":", "return", "HttpResponse", "(", "\"Account locked: too many login attempts. \"", "\"Contact an admin to unlock your account.\"", ")" ]
if we are locked out, here is the response
[ "if", "we", "are", "locked", "out", "here", "is", "the", "response" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L283-L301
6,335
kencochrane/django-defender
defender/utils.py
is_user_already_locked
def is_user_already_locked(username): """Is this username already locked?""" if username is None: return False if config.DISABLE_USERNAME_LOCKOUT: return False return REDIS_SERVER.get(get_username_blocked_cache_key(username))
python
def is_user_already_locked(username): """Is this username already locked?""" if username is None: return False if config.DISABLE_USERNAME_LOCKOUT: return False return REDIS_SERVER.get(get_username_blocked_cache_key(username))
[ "def", "is_user_already_locked", "(", "username", ")", ":", "if", "username", "is", "None", ":", "return", "False", "if", "config", ".", "DISABLE_USERNAME_LOCKOUT", ":", "return", "False", "return", "REDIS_SERVER", ".", "get", "(", "get_username_blocked_cache_key", "(", "username", ")", ")" ]
Is this username already locked?
[ "Is", "this", "username", "already", "locked?" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L304-L310
6,336
kencochrane/django-defender
defender/utils.py
is_source_ip_already_locked
def is_source_ip_already_locked(ip_address): """Is this IP already locked?""" if ip_address is None: return False if config.DISABLE_IP_LOCKOUT: return False return REDIS_SERVER.get(get_ip_blocked_cache_key(ip_address))
python
def is_source_ip_already_locked(ip_address): """Is this IP already locked?""" if ip_address is None: return False if config.DISABLE_IP_LOCKOUT: return False return REDIS_SERVER.get(get_ip_blocked_cache_key(ip_address))
[ "def", "is_source_ip_already_locked", "(", "ip_address", ")", ":", "if", "ip_address", "is", "None", ":", "return", "False", "if", "config", ".", "DISABLE_IP_LOCKOUT", ":", "return", "False", "return", "REDIS_SERVER", ".", "get", "(", "get_ip_blocked_cache_key", "(", "ip_address", ")", ")" ]
Is this IP already locked?
[ "Is", "this", "IP", "already", "locked?" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L313-L319
6,337
kencochrane/django-defender
defender/utils.py
is_already_locked
def is_already_locked(request, get_username=get_username_from_request, username=None): """Parse the username & IP from the request, and see if it's already locked.""" user_blocked = is_user_already_locked(username or get_username(request)) ip_blocked = is_source_ip_already_locked(get_ip(request)) if config.LOCKOUT_BY_IP_USERNAME: # if both this IP and this username are present the request is blocked return ip_blocked and user_blocked return ip_blocked or user_blocked
python
def is_already_locked(request, get_username=get_username_from_request, username=None): """Parse the username & IP from the request, and see if it's already locked.""" user_blocked = is_user_already_locked(username or get_username(request)) ip_blocked = is_source_ip_already_locked(get_ip(request)) if config.LOCKOUT_BY_IP_USERNAME: # if both this IP and this username are present the request is blocked return ip_blocked and user_blocked return ip_blocked or user_blocked
[ "def", "is_already_locked", "(", "request", ",", "get_username", "=", "get_username_from_request", ",", "username", "=", "None", ")", ":", "user_blocked", "=", "is_user_already_locked", "(", "username", "or", "get_username", "(", "request", ")", ")", "ip_blocked", "=", "is_source_ip_already_locked", "(", "get_ip", "(", "request", ")", ")", "if", "config", ".", "LOCKOUT_BY_IP_USERNAME", ":", "# if both this IP and this username are present the request is blocked", "return", "ip_blocked", "and", "user_blocked", "return", "ip_blocked", "or", "user_blocked" ]
Parse the username & IP from the request, and see if it's already locked.
[ "Parse", "the", "username", "&", "IP", "from", "the", "request", "and", "see", "if", "it", "s", "already", "locked", "." ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L322-L332
6,338
kencochrane/django-defender
defender/utils.py
check_request
def check_request(request, login_unsuccessful, get_username=get_username_from_request, username=None): """ check the request, and process results""" ip_address = get_ip(request) username = username or get_username(request) if not login_unsuccessful: # user logged in -- forget the failed attempts reset_failed_attempts(ip_address=ip_address, username=username) return True else: # add a failed attempt for this user return record_failed_attempt(ip_address, username)
python
def check_request(request, login_unsuccessful, get_username=get_username_from_request, username=None): """ check the request, and process results""" ip_address = get_ip(request) username = username or get_username(request) if not login_unsuccessful: # user logged in -- forget the failed attempts reset_failed_attempts(ip_address=ip_address, username=username) return True else: # add a failed attempt for this user return record_failed_attempt(ip_address, username)
[ "def", "check_request", "(", "request", ",", "login_unsuccessful", ",", "get_username", "=", "get_username_from_request", ",", "username", "=", "None", ")", ":", "ip_address", "=", "get_ip", "(", "request", ")", "username", "=", "username", "or", "get_username", "(", "request", ")", "if", "not", "login_unsuccessful", ":", "# user logged in -- forget the failed attempts", "reset_failed_attempts", "(", "ip_address", "=", "ip_address", ",", "username", "=", "username", ")", "return", "True", "else", ":", "# add a failed attempt for this user", "return", "record_failed_attempt", "(", "ip_address", ",", "username", ")" ]
check the request, and process results
[ "check", "the", "request", "and", "process", "results" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L335-L348
6,339
kencochrane/django-defender
defender/utils.py
add_login_attempt_to_db
def add_login_attempt_to_db(request, login_valid, get_username=get_username_from_request, username=None): """ Create a record for the login attempt If using celery call celery task, if not, call the method normally """ if not config.STORE_ACCESS_ATTEMPTS: # If we don't want to store in the database, then don't proceed. return username = username or get_username(request) user_agent = request.META.get('HTTP_USER_AGENT', '<unknown>')[:255] ip_address = get_ip(request) http_accept = request.META.get('HTTP_ACCEPT', '<unknown>') path_info = request.META.get('PATH_INFO', '<unknown>') if config.USE_CELERY: from .tasks import add_login_attempt_task add_login_attempt_task.delay(user_agent, ip_address, username, http_accept, path_info, login_valid) else: store_login_attempt(user_agent, ip_address, username, http_accept, path_info, login_valid)
python
def add_login_attempt_to_db(request, login_valid, get_username=get_username_from_request, username=None): """ Create a record for the login attempt If using celery call celery task, if not, call the method normally """ if not config.STORE_ACCESS_ATTEMPTS: # If we don't want to store in the database, then don't proceed. return username = username or get_username(request) user_agent = request.META.get('HTTP_USER_AGENT', '<unknown>')[:255] ip_address = get_ip(request) http_accept = request.META.get('HTTP_ACCEPT', '<unknown>') path_info = request.META.get('PATH_INFO', '<unknown>') if config.USE_CELERY: from .tasks import add_login_attempt_task add_login_attempt_task.delay(user_agent, ip_address, username, http_accept, path_info, login_valid) else: store_login_attempt(user_agent, ip_address, username, http_accept, path_info, login_valid)
[ "def", "add_login_attempt_to_db", "(", "request", ",", "login_valid", ",", "get_username", "=", "get_username_from_request", ",", "username", "=", "None", ")", ":", "if", "not", "config", ".", "STORE_ACCESS_ATTEMPTS", ":", "# If we don't want to store in the database, then don't proceed.", "return", "username", "=", "username", "or", "get_username", "(", "request", ")", "user_agent", "=", "request", ".", "META", ".", "get", "(", "'HTTP_USER_AGENT'", ",", "'<unknown>'", ")", "[", ":", "255", "]", "ip_address", "=", "get_ip", "(", "request", ")", "http_accept", "=", "request", ".", "META", ".", "get", "(", "'HTTP_ACCEPT'", ",", "'<unknown>'", ")", "path_info", "=", "request", ".", "META", ".", "get", "(", "'PATH_INFO'", ",", "'<unknown>'", ")", "if", "config", ".", "USE_CELERY", ":", "from", ".", "tasks", "import", "add_login_attempt_task", "add_login_attempt_task", ".", "delay", "(", "user_agent", ",", "ip_address", ",", "username", ",", "http_accept", ",", "path_info", ",", "login_valid", ")", "else", ":", "store_login_attempt", "(", "user_agent", ",", "ip_address", ",", "username", ",", "http_accept", ",", "path_info", ",", "login_valid", ")" ]
Create a record for the login attempt If using celery call celery task, if not, call the method normally
[ "Create", "a", "record", "for", "the", "login", "attempt", "If", "using", "celery", "call", "celery", "task", "if", "not", "call", "the", "method", "normally" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/utils.py#L351-L374
6,340
kencochrane/django-defender
defender/tasks.py
add_login_attempt_task
def add_login_attempt_task(user_agent, ip_address, username, http_accept, path_info, login_valid): """ Create a record for the login attempt """ store_login_attempt(user_agent, ip_address, username, http_accept, path_info, login_valid)
python
def add_login_attempt_task(user_agent, ip_address, username, http_accept, path_info, login_valid): """ Create a record for the login attempt """ store_login_attempt(user_agent, ip_address, username, http_accept, path_info, login_valid)
[ "def", "add_login_attempt_task", "(", "user_agent", ",", "ip_address", ",", "username", ",", "http_accept", ",", "path_info", ",", "login_valid", ")", ":", "store_login_attempt", "(", "user_agent", ",", "ip_address", ",", "username", ",", "http_accept", ",", "path_info", ",", "login_valid", ")" ]
Create a record for the login attempt
[ "Create", "a", "record", "for", "the", "login", "attempt" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/tasks.py#L10-L14
6,341
kencochrane/django-defender
defender/data.py
store_login_attempt
def store_login_attempt(user_agent, ip_address, username, http_accept, path_info, login_valid): """ Store the login attempt to the db. """ AccessAttempt.objects.create( user_agent=user_agent, ip_address=ip_address, username=username, http_accept=http_accept, path_info=path_info, login_valid=login_valid, )
python
def store_login_attempt(user_agent, ip_address, username, http_accept, path_info, login_valid): """ Store the login attempt to the db. """ AccessAttempt.objects.create( user_agent=user_agent, ip_address=ip_address, username=username, http_accept=http_accept, path_info=path_info, login_valid=login_valid, )
[ "def", "store_login_attempt", "(", "user_agent", ",", "ip_address", ",", "username", ",", "http_accept", ",", "path_info", ",", "login_valid", ")", ":", "AccessAttempt", ".", "objects", ".", "create", "(", "user_agent", "=", "user_agent", ",", "ip_address", "=", "ip_address", ",", "username", "=", "username", ",", "http_accept", "=", "http_accept", ",", "path_info", "=", "path_info", ",", "login_valid", "=", "login_valid", ",", ")" ]
Store the login attempt to the db.
[ "Store", "the", "login", "attempt", "to", "the", "db", "." ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/data.py#L4-L14
6,342
kencochrane/django-defender
defender/connection.py
get_redis_connection
def get_redis_connection(): """ Get the redis connection if not using mock """ if config.MOCK_REDIS: # pragma: no cover import mockredis return mockredis.mock_strict_redis_client() # pragma: no cover elif config.DEFENDER_REDIS_NAME: # pragma: no cover try: cache = caches[config.DEFENDER_REDIS_NAME] except InvalidCacheBackendError: raise KeyError(INVALID_CACHE_ERROR_MSG.format( config.DEFENDER_REDIS_NAME)) # every redis backend implement it own way to get the low level client try: # redis_cache.RedisCache case (django-redis-cache package) return cache.get_master_client() except AttributeError: # django_redis.cache.RedisCache case (django-redis package) return cache.client.get_client(True) else: # pragma: no cover redis_config = parse_redis_url(config.DEFENDER_REDIS_URL) return redis.StrictRedis( host=redis_config.get('HOST'), port=redis_config.get('PORT'), db=redis_config.get('DB'), password=redis_config.get('PASSWORD'), ssl=redis_config.get('SSL'))
python
def get_redis_connection(): """ Get the redis connection if not using mock """ if config.MOCK_REDIS: # pragma: no cover import mockredis return mockredis.mock_strict_redis_client() # pragma: no cover elif config.DEFENDER_REDIS_NAME: # pragma: no cover try: cache = caches[config.DEFENDER_REDIS_NAME] except InvalidCacheBackendError: raise KeyError(INVALID_CACHE_ERROR_MSG.format( config.DEFENDER_REDIS_NAME)) # every redis backend implement it own way to get the low level client try: # redis_cache.RedisCache case (django-redis-cache package) return cache.get_master_client() except AttributeError: # django_redis.cache.RedisCache case (django-redis package) return cache.client.get_client(True) else: # pragma: no cover redis_config = parse_redis_url(config.DEFENDER_REDIS_URL) return redis.StrictRedis( host=redis_config.get('HOST'), port=redis_config.get('PORT'), db=redis_config.get('DB'), password=redis_config.get('PASSWORD'), ssl=redis_config.get('SSL'))
[ "def", "get_redis_connection", "(", ")", ":", "if", "config", ".", "MOCK_REDIS", ":", "# pragma: no cover", "import", "mockredis", "return", "mockredis", ".", "mock_strict_redis_client", "(", ")", "# pragma: no cover", "elif", "config", ".", "DEFENDER_REDIS_NAME", ":", "# pragma: no cover", "try", ":", "cache", "=", "caches", "[", "config", ".", "DEFENDER_REDIS_NAME", "]", "except", "InvalidCacheBackendError", ":", "raise", "KeyError", "(", "INVALID_CACHE_ERROR_MSG", ".", "format", "(", "config", ".", "DEFENDER_REDIS_NAME", ")", ")", "# every redis backend implement it own way to get the low level client", "try", ":", "# redis_cache.RedisCache case (django-redis-cache package)", "return", "cache", ".", "get_master_client", "(", ")", "except", "AttributeError", ":", "# django_redis.cache.RedisCache case (django-redis package)", "return", "cache", ".", "client", ".", "get_client", "(", "True", ")", "else", ":", "# pragma: no cover", "redis_config", "=", "parse_redis_url", "(", "config", ".", "DEFENDER_REDIS_URL", ")", "return", "redis", ".", "StrictRedis", "(", "host", "=", "redis_config", ".", "get", "(", "'HOST'", ")", ",", "port", "=", "redis_config", ".", "get", "(", "'PORT'", ")", ",", "db", "=", "redis_config", ".", "get", "(", "'DB'", ")", ",", "password", "=", "redis_config", ".", "get", "(", "'PASSWORD'", ")", ",", "ssl", "=", "redis_config", ".", "get", "(", "'SSL'", ")", ")" ]
Get the redis connection if not using mock
[ "Get", "the", "redis", "connection", "if", "not", "using", "mock" ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/connection.py#L19-L44
6,343
kencochrane/django-defender
defender/connection.py
parse_redis_url
def parse_redis_url(url): """Parses a redis URL.""" # create config with some sane defaults redis_config = { "DB": 0, "PASSWORD": None, "HOST": "localhost", "PORT": 6379, "SSL": False } if not url: return redis_config url = urlparse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] if path: redis_config.update({"DB": int(path)}) if url.password: redis_config.update({"PASSWORD": url.password}) if url.hostname: redis_config.update({"HOST": url.hostname}) if url.port: redis_config.update({"PORT": int(url.port)}) if url.scheme in ['https', 'rediss']: redis_config.update({"SSL": True}) return redis_config
python
def parse_redis_url(url): """Parses a redis URL.""" # create config with some sane defaults redis_config = { "DB": 0, "PASSWORD": None, "HOST": "localhost", "PORT": 6379, "SSL": False } if not url: return redis_config url = urlparse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] if path: redis_config.update({"DB": int(path)}) if url.password: redis_config.update({"PASSWORD": url.password}) if url.hostname: redis_config.update({"HOST": url.hostname}) if url.port: redis_config.update({"PORT": int(url.port)}) if url.scheme in ['https', 'rediss']: redis_config.update({"SSL": True}) return redis_config
[ "def", "parse_redis_url", "(", "url", ")", ":", "# create config with some sane defaults", "redis_config", "=", "{", "\"DB\"", ":", "0", ",", "\"PASSWORD\"", ":", "None", ",", "\"HOST\"", ":", "\"localhost\"", ",", "\"PORT\"", ":", "6379", ",", "\"SSL\"", ":", "False", "}", "if", "not", "url", ":", "return", "redis_config", "url", "=", "urlparse", ".", "urlparse", "(", "url", ")", "# Remove query strings.", "path", "=", "url", ".", "path", "[", "1", ":", "]", "path", "=", "path", ".", "split", "(", "'?'", ",", "2", ")", "[", "0", "]", "if", "path", ":", "redis_config", ".", "update", "(", "{", "\"DB\"", ":", "int", "(", "path", ")", "}", ")", "if", "url", ".", "password", ":", "redis_config", ".", "update", "(", "{", "\"PASSWORD\"", ":", "url", ".", "password", "}", ")", "if", "url", ".", "hostname", ":", "redis_config", ".", "update", "(", "{", "\"HOST\"", ":", "url", ".", "hostname", "}", ")", "if", "url", ".", "port", ":", "redis_config", ".", "update", "(", "{", "\"PORT\"", ":", "int", "(", "url", ".", "port", ")", "}", ")", "if", "url", ".", "scheme", "in", "[", "'https'", ",", "'rediss'", "]", ":", "redis_config", ".", "update", "(", "{", "\"SSL\"", ":", "True", "}", ")", "return", "redis_config" ]
Parses a redis URL.
[ "Parses", "a", "redis", "URL", "." ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/connection.py#L48-L79
6,344
kencochrane/django-defender
defender/management/commands/cleanup_django_defender.py
Command.handle
def handle(self, **options): """ Removes any entries in the AccessAttempt that are older than your DEFENDER_ACCESS_ATTEMPT_EXPIRATION config, default 24 HOURS. """ print("Starting clean up of django-defender table") now = timezone.now() cleanup_delta = timedelta(hours=config.ACCESS_ATTEMPT_EXPIRATION) min_attempt_time = now - cleanup_delta attempts_to_clean = AccessAttempt.objects.filter( attempt_time__lt=min_attempt_time, ) attempts_to_clean_count = attempts_to_clean.count() attempts_to_clean.delete() print( "Finished. Removed {0} AccessAttempt entries.".format( attempts_to_clean_count) )
python
def handle(self, **options): """ Removes any entries in the AccessAttempt that are older than your DEFENDER_ACCESS_ATTEMPT_EXPIRATION config, default 24 HOURS. """ print("Starting clean up of django-defender table") now = timezone.now() cleanup_delta = timedelta(hours=config.ACCESS_ATTEMPT_EXPIRATION) min_attempt_time = now - cleanup_delta attempts_to_clean = AccessAttempt.objects.filter( attempt_time__lt=min_attempt_time, ) attempts_to_clean_count = attempts_to_clean.count() attempts_to_clean.delete() print( "Finished. Removed {0} AccessAttempt entries.".format( attempts_to_clean_count) )
[ "def", "handle", "(", "self", ",", "*", "*", "options", ")", ":", "print", "(", "\"Starting clean up of django-defender table\"", ")", "now", "=", "timezone", ".", "now", "(", ")", "cleanup_delta", "=", "timedelta", "(", "hours", "=", "config", ".", "ACCESS_ATTEMPT_EXPIRATION", ")", "min_attempt_time", "=", "now", "-", "cleanup_delta", "attempts_to_clean", "=", "AccessAttempt", ".", "objects", ".", "filter", "(", "attempt_time__lt", "=", "min_attempt_time", ",", ")", "attempts_to_clean_count", "=", "attempts_to_clean", ".", "count", "(", ")", "attempts_to_clean", ".", "delete", "(", ")", "print", "(", "\"Finished. Removed {0} AccessAttempt entries.\"", ".", "format", "(", "attempts_to_clean_count", ")", ")" ]
Removes any entries in the AccessAttempt that are older than your DEFENDER_ACCESS_ATTEMPT_EXPIRATION config, default 24 HOURS.
[ "Removes", "any", "entries", "in", "the", "AccessAttempt", "that", "are", "older", "than", "your", "DEFENDER_ACCESS_ATTEMPT_EXPIRATION", "config", "default", "24", "HOURS", "." ]
e3e547dbb83235e0d564a6d64652c7df00412ff2
https://github.com/kencochrane/django-defender/blob/e3e547dbb83235e0d564a6d64652c7df00412ff2/defender/management/commands/cleanup_django_defender.py#L15-L35
6,345
Cito/DBUtils
DBUtils/PooledDB.py
PooledDB.connection
def connection(self, shareable=True): """Get a steady, cached DB-API 2 connection from the pool. If shareable is set and the underlying DB-API 2 allows it, then the connection may be shared with other threads. """ if shareable and self._maxshared: self._lock.acquire() try: while (not self._shared_cache and self._maxconnections and self._connections >= self._maxconnections): self._wait_lock() if len(self._shared_cache) < self._maxshared: # shared cache is not full, get a dedicated connection try: # first try to get it from the idle cache con = self._idle_cache.pop(0) except IndexError: # else get a fresh connection con = self.steady_connection() else: con._ping_check() # check this connection con = SharedDBConnection(con) self._connections += 1 else: # shared cache full or no more connections allowed self._shared_cache.sort() # least shared connection first con = self._shared_cache.pop(0) # get it while con.con._transaction: # do not share connections which are in a transaction self._shared_cache.insert(0, con) self._wait_lock() self._shared_cache.sort() con = self._shared_cache.pop(0) con.con._ping_check() # check the underlying connection con.share() # increase share of this connection # put the connection (back) into the shared cache self._shared_cache.append(con) self._lock.notify() finally: self._lock.release() con = PooledSharedDBConnection(self, con) else: # try to get a dedicated connection self._lock.acquire() try: while (self._maxconnections and self._connections >= self._maxconnections): self._wait_lock() # connection limit not reached, get a dedicated connection try: # first try to get it from the idle cache con = self._idle_cache.pop(0) except IndexError: # else get a fresh connection con = self.steady_connection() else: con._ping_check() # check connection con = PooledDedicatedDBConnection(self, con) self._connections += 1 finally: self._lock.release() return con
python
def connection(self, shareable=True): """Get a steady, cached DB-API 2 connection from the pool. If shareable is set and the underlying DB-API 2 allows it, then the connection may be shared with other threads. """ if shareable and self._maxshared: self._lock.acquire() try: while (not self._shared_cache and self._maxconnections and self._connections >= self._maxconnections): self._wait_lock() if len(self._shared_cache) < self._maxshared: # shared cache is not full, get a dedicated connection try: # first try to get it from the idle cache con = self._idle_cache.pop(0) except IndexError: # else get a fresh connection con = self.steady_connection() else: con._ping_check() # check this connection con = SharedDBConnection(con) self._connections += 1 else: # shared cache full or no more connections allowed self._shared_cache.sort() # least shared connection first con = self._shared_cache.pop(0) # get it while con.con._transaction: # do not share connections which are in a transaction self._shared_cache.insert(0, con) self._wait_lock() self._shared_cache.sort() con = self._shared_cache.pop(0) con.con._ping_check() # check the underlying connection con.share() # increase share of this connection # put the connection (back) into the shared cache self._shared_cache.append(con) self._lock.notify() finally: self._lock.release() con = PooledSharedDBConnection(self, con) else: # try to get a dedicated connection self._lock.acquire() try: while (self._maxconnections and self._connections >= self._maxconnections): self._wait_lock() # connection limit not reached, get a dedicated connection try: # first try to get it from the idle cache con = self._idle_cache.pop(0) except IndexError: # else get a fresh connection con = self.steady_connection() else: con._ping_check() # check connection con = PooledDedicatedDBConnection(self, con) self._connections += 1 finally: self._lock.release() return con
[ "def", "connection", "(", "self", ",", "shareable", "=", "True", ")", ":", "if", "shareable", "and", "self", ".", "_maxshared", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "while", "(", "not", "self", ".", "_shared_cache", "and", "self", ".", "_maxconnections", "and", "self", ".", "_connections", ">=", "self", ".", "_maxconnections", ")", ":", "self", ".", "_wait_lock", "(", ")", "if", "len", "(", "self", ".", "_shared_cache", ")", "<", "self", ".", "_maxshared", ":", "# shared cache is not full, get a dedicated connection", "try", ":", "# first try to get it from the idle cache", "con", "=", "self", ".", "_idle_cache", ".", "pop", "(", "0", ")", "except", "IndexError", ":", "# else get a fresh connection", "con", "=", "self", ".", "steady_connection", "(", ")", "else", ":", "con", ".", "_ping_check", "(", ")", "# check this connection", "con", "=", "SharedDBConnection", "(", "con", ")", "self", ".", "_connections", "+=", "1", "else", ":", "# shared cache full or no more connections allowed", "self", ".", "_shared_cache", ".", "sort", "(", ")", "# least shared connection first", "con", "=", "self", ".", "_shared_cache", ".", "pop", "(", "0", ")", "# get it", "while", "con", ".", "con", ".", "_transaction", ":", "# do not share connections which are in a transaction", "self", ".", "_shared_cache", ".", "insert", "(", "0", ",", "con", ")", "self", ".", "_wait_lock", "(", ")", "self", ".", "_shared_cache", ".", "sort", "(", ")", "con", "=", "self", ".", "_shared_cache", ".", "pop", "(", "0", ")", "con", ".", "con", ".", "_ping_check", "(", ")", "# check the underlying connection", "con", ".", "share", "(", ")", "# increase share of this connection", "# put the connection (back) into the shared cache", "self", ".", "_shared_cache", ".", "append", "(", "con", ")", "self", ".", "_lock", ".", "notify", "(", ")", "finally", ":", "self", ".", "_lock", ".", "release", "(", ")", "con", "=", "PooledSharedDBConnection", "(", "self", ",", "con", ")", "else", ":", "# try to get a dedicated connection", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "while", "(", "self", ".", "_maxconnections", "and", "self", ".", "_connections", ">=", "self", ".", "_maxconnections", ")", ":", "self", ".", "_wait_lock", "(", ")", "# connection limit not reached, get a dedicated connection", "try", ":", "# first try to get it from the idle cache", "con", "=", "self", ".", "_idle_cache", ".", "pop", "(", "0", ")", "except", "IndexError", ":", "# else get a fresh connection", "con", "=", "self", ".", "steady_connection", "(", ")", "else", ":", "con", ".", "_ping_check", "(", ")", "# check connection", "con", "=", "PooledDedicatedDBConnection", "(", "self", ",", "con", ")", "self", ".", "_connections", "+=", "1", "finally", ":", "self", ".", "_lock", ".", "release", "(", ")", "return", "con" ]
Get a steady, cached DB-API 2 connection from the pool. If shareable is set and the underlying DB-API 2 allows it, then the connection may be shared with other threads.
[ "Get", "a", "steady", "cached", "DB", "-", "API", "2", "connection", "from", "the", "pool", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledDB.py#L277-L334
6,346
Cito/DBUtils
DBUtils/PooledDB.py
PooledDB.unshare
def unshare(self, con): """Decrease the share of a connection in the shared cache.""" self._lock.acquire() try: con.unshare() shared = con.shared if not shared: # connection is idle, try: # so try to remove it self._shared_cache.remove(con) # from shared cache except ValueError: pass # pool has already been closed finally: self._lock.release() if not shared: # connection has become idle, self.cache(con.con)
python
def unshare(self, con): """Decrease the share of a connection in the shared cache.""" self._lock.acquire() try: con.unshare() shared = con.shared if not shared: # connection is idle, try: # so try to remove it self._shared_cache.remove(con) # from shared cache except ValueError: pass # pool has already been closed finally: self._lock.release() if not shared: # connection has become idle, self.cache(con.con)
[ "def", "unshare", "(", "self", ",", "con", ")", ":", "self", ".", "_lock", ".", "acquire", "(", ")", "try", ":", "con", ".", "unshare", "(", ")", "shared", "=", "con", ".", "shared", "if", "not", "shared", ":", "# connection is idle,", "try", ":", "# so try to remove it", "self", ".", "_shared_cache", ".", "remove", "(", "con", ")", "# from shared cache", "except", "ValueError", ":", "pass", "# pool has already been closed", "finally", ":", "self", ".", "_lock", ".", "release", "(", ")", "if", "not", "shared", ":", "# connection has become idle,", "self", ".", "cache", "(", "con", ".", "con", ")" ]
Decrease the share of a connection in the shared cache.
[ "Decrease", "the", "share", "of", "a", "connection", "in", "the", "shared", "cache", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledDB.py#L340-L354
6,347
Cito/DBUtils
DBUtils/PooledDB.py
PooledSharedDBConnection.close
def close(self): """Close the pooled shared connection.""" # Instead of actually closing the connection, # unshare it and/or return it to the pool. if self._con: self._pool.unshare(self._shared_con) self._shared_con = self._con = None
python
def close(self): """Close the pooled shared connection.""" # Instead of actually closing the connection, # unshare it and/or return it to the pool. if self._con: self._pool.unshare(self._shared_con) self._shared_con = self._con = None
[ "def", "close", "(", "self", ")", ":", "# Instead of actually closing the connection,", "# unshare it and/or return it to the pool.", "if", "self", ".", "_con", ":", "self", ".", "_pool", ".", "unshare", "(", "self", ".", "_shared_con", ")", "self", ".", "_shared_con", "=", "self", ".", "_con", "=", "None" ]
Close the pooled shared connection.
[ "Close", "the", "pooled", "shared", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledDB.py#L516-L522
6,348
Cito/DBUtils
DBUtils/PersistentDB.py
PersistentDB.steady_connection
def steady_connection(self): """Get a steady, non-persistent DB-API 2 connection.""" return connect( self._creator, self._maxusage, self._setsession, self._failures, self._ping, self._closeable, *self._args, **self._kwargs)
python
def steady_connection(self): """Get a steady, non-persistent DB-API 2 connection.""" return connect( self._creator, self._maxusage, self._setsession, self._failures, self._ping, self._closeable, *self._args, **self._kwargs)
[ "def", "steady_connection", "(", "self", ")", ":", "return", "connect", "(", "self", ".", "_creator", ",", "self", ".", "_maxusage", ",", "self", ".", "_setsession", ",", "self", ".", "_failures", ",", "self", ".", "_ping", ",", "self", ".", "_closeable", ",", "*", "self", ".", "_args", ",", "*", "*", "self", ".", "_kwargs", ")" ]
Get a steady, non-persistent DB-API 2 connection.
[ "Get", "a", "steady", "non", "-", "persistent", "DB", "-", "API", "2", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PersistentDB.py#L201-L206
6,349
Cito/DBUtils
DBUtils/PersistentDB.py
PersistentDB.connection
def connection(self, shareable=False): """Get a steady, persistent DB-API 2 connection. The shareable parameter exists only for compatibility with the PooledDB connection method. In reality, persistent connections are of course never shared with other threads. """ try: con = self.thread.connection except AttributeError: con = self.steady_connection() if not con.threadsafety(): raise NotSupportedError("Database module is not thread-safe.") self.thread.connection = con con._ping_check() return con
python
def connection(self, shareable=False): """Get a steady, persistent DB-API 2 connection. The shareable parameter exists only for compatibility with the PooledDB connection method. In reality, persistent connections are of course never shared with other threads. """ try: con = self.thread.connection except AttributeError: con = self.steady_connection() if not con.threadsafety(): raise NotSupportedError("Database module is not thread-safe.") self.thread.connection = con con._ping_check() return con
[ "def", "connection", "(", "self", ",", "shareable", "=", "False", ")", ":", "try", ":", "con", "=", "self", ".", "thread", ".", "connection", "except", "AttributeError", ":", "con", "=", "self", ".", "steady_connection", "(", ")", "if", "not", "con", ".", "threadsafety", "(", ")", ":", "raise", "NotSupportedError", "(", "\"Database module is not thread-safe.\"", ")", "self", ".", "thread", ".", "connection", "=", "con", "con", ".", "_ping_check", "(", ")", "return", "con" ]
Get a steady, persistent DB-API 2 connection. The shareable parameter exists only for compatibility with the PooledDB connection method. In reality, persistent connections are of course never shared with other threads.
[ "Get", "a", "steady", "persistent", "DB", "-", "API", "2", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PersistentDB.py#L208-L224
6,350
Cito/DBUtils
DBUtils/PersistentPg.py
PersistentPg.steady_connection
def steady_connection(self): """Get a steady, non-persistent PyGreSQL connection.""" return SteadyPgConnection( self._maxusage, self._setsession, self._closeable, *self._args, **self._kwargs)
python
def steady_connection(self): """Get a steady, non-persistent PyGreSQL connection.""" return SteadyPgConnection( self._maxusage, self._setsession, self._closeable, *self._args, **self._kwargs)
[ "def", "steady_connection", "(", "self", ")", ":", "return", "SteadyPgConnection", "(", "self", ".", "_maxusage", ",", "self", ".", "_setsession", ",", "self", ".", "_closeable", ",", "*", "self", ".", "_args", ",", "*", "*", "self", ".", "_kwargs", ")" ]
Get a steady, non-persistent PyGreSQL connection.
[ "Get", "a", "steady", "non", "-", "persistent", "PyGreSQL", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PersistentPg.py#L160-L164
6,351
Cito/DBUtils
DBUtils/PersistentPg.py
PersistentPg.connection
def connection(self): """Get a steady, persistent PyGreSQL connection.""" try: con = self.thread.connection except AttributeError: con = self.steady_connection() self.thread.connection = con return con
python
def connection(self): """Get a steady, persistent PyGreSQL connection.""" try: con = self.thread.connection except AttributeError: con = self.steady_connection() self.thread.connection = con return con
[ "def", "connection", "(", "self", ")", ":", "try", ":", "con", "=", "self", ".", "thread", ".", "connection", "except", "AttributeError", ":", "con", "=", "self", ".", "steady_connection", "(", ")", "self", ".", "thread", ".", "connection", "=", "con", "return", "con" ]
Get a steady, persistent PyGreSQL connection.
[ "Get", "a", "steady", "persistent", "PyGreSQL", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PersistentPg.py#L166-L173
6,352
Cito/DBUtils
setversion.py
versionString
def versionString(version): """Create version string. For a sequence containing version information such as (2, 0, 0, 'pre'), this returns a printable string such as '2.0pre'. The micro version number is only excluded from the string if it is zero. """ ver = list(map(str, version)) numbers, rest = ver[:2 if ver[2] == '0' else 3], ver[3:] return '.'.join(numbers) + '-'.join(rest)
python
def versionString(version): """Create version string. For a sequence containing version information such as (2, 0, 0, 'pre'), this returns a printable string such as '2.0pre'. The micro version number is only excluded from the string if it is zero. """ ver = list(map(str, version)) numbers, rest = ver[:2 if ver[2] == '0' else 3], ver[3:] return '.'.join(numbers) + '-'.join(rest)
[ "def", "versionString", "(", "version", ")", ":", "ver", "=", "list", "(", "map", "(", "str", ",", "version", ")", ")", "numbers", ",", "rest", "=", "ver", "[", ":", "2", "if", "ver", "[", "2", "]", "==", "'0'", "else", "3", "]", ",", "ver", "[", "3", ":", "]", "return", "'.'", ".", "join", "(", "numbers", ")", "+", "'-'", ".", "join", "(", "rest", ")" ]
Create version string. For a sequence containing version information such as (2, 0, 0, 'pre'), this returns a printable string such as '2.0pre'. The micro version number is only excluded from the string if it is zero.
[ "Create", "version", "string", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/setversion.py#L37-L47
6,353
Cito/DBUtils
DBUtils/PooledPg.py
PooledPg.steady_connection
def steady_connection(self): """Get a steady, unpooled PostgreSQL connection.""" return SteadyPgConnection(self._maxusage, self._setsession, True, *self._args, **self._kwargs)
python
def steady_connection(self): """Get a steady, unpooled PostgreSQL connection.""" return SteadyPgConnection(self._maxusage, self._setsession, True, *self._args, **self._kwargs)
[ "def", "steady_connection", "(", "self", ")", ":", "return", "SteadyPgConnection", "(", "self", ".", "_maxusage", ",", "self", ".", "_setsession", ",", "True", ",", "*", "self", ".", "_args", ",", "*", "*", "self", ".", "_kwargs", ")" ]
Get a steady, unpooled PostgreSQL connection.
[ "Get", "a", "steady", "unpooled", "PostgreSQL", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledPg.py#L200-L203
6,354
Cito/DBUtils
DBUtils/PooledPg.py
PooledPg.connection
def connection(self): """Get a steady, cached PostgreSQL connection from the pool.""" if self._connections: if not self._connections.acquire(self._blocking): raise TooManyConnections try: con = self._cache.get(0) except Empty: con = self.steady_connection() return PooledPgConnection(self, con)
python
def connection(self): """Get a steady, cached PostgreSQL connection from the pool.""" if self._connections: if not self._connections.acquire(self._blocking): raise TooManyConnections try: con = self._cache.get(0) except Empty: con = self.steady_connection() return PooledPgConnection(self, con)
[ "def", "connection", "(", "self", ")", ":", "if", "self", ".", "_connections", ":", "if", "not", "self", ".", "_connections", ".", "acquire", "(", "self", ".", "_blocking", ")", ":", "raise", "TooManyConnections", "try", ":", "con", "=", "self", ".", "_cache", ".", "get", "(", "0", ")", "except", "Empty", ":", "con", "=", "self", ".", "steady_connection", "(", ")", "return", "PooledPgConnection", "(", "self", ",", "con", ")" ]
Get a steady, cached PostgreSQL connection from the pool.
[ "Get", "a", "steady", "cached", "PostgreSQL", "connection", "from", "the", "pool", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledPg.py#L205-L214
6,355
Cito/DBUtils
DBUtils/PooledPg.py
PooledPg.cache
def cache(self, con): """Put a connection back into the pool cache.""" try: if self._reset == 2: con.reset() # reset the connection completely else: if self._reset or con._transaction: try: con.rollback() # rollback a possible transaction except Exception: pass self._cache.put(con, 0) # and then put it back into the cache except Full: con.close() if self._connections: self._connections.release()
python
def cache(self, con): """Put a connection back into the pool cache.""" try: if self._reset == 2: con.reset() # reset the connection completely else: if self._reset or con._transaction: try: con.rollback() # rollback a possible transaction except Exception: pass self._cache.put(con, 0) # and then put it back into the cache except Full: con.close() if self._connections: self._connections.release()
[ "def", "cache", "(", "self", ",", "con", ")", ":", "try", ":", "if", "self", ".", "_reset", "==", "2", ":", "con", ".", "reset", "(", ")", "# reset the connection completely", "else", ":", "if", "self", ".", "_reset", "or", "con", ".", "_transaction", ":", "try", ":", "con", ".", "rollback", "(", ")", "# rollback a possible transaction", "except", "Exception", ":", "pass", "self", ".", "_cache", ".", "put", "(", "con", ",", "0", ")", "# and then put it back into the cache", "except", "Full", ":", "con", ".", "close", "(", ")", "if", "self", ".", "_connections", ":", "self", ".", "_connections", ".", "release", "(", ")" ]
Put a connection back into the pool cache.
[ "Put", "a", "connection", "back", "into", "the", "pool", "cache", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledPg.py#L216-L231
6,356
Cito/DBUtils
DBUtils/PooledPg.py
PooledPgConnection.reopen
def reopen(self): """Reopen the pooled connection.""" # If the connection is already back in the pool, # get another connection from the pool, # otherwise reopen the underlying connection. if self._con: self._con.reopen() else: self._con = self._pool.connection()
python
def reopen(self): """Reopen the pooled connection.""" # If the connection is already back in the pool, # get another connection from the pool, # otherwise reopen the underlying connection. if self._con: self._con.reopen() else: self._con = self._pool.connection()
[ "def", "reopen", "(", "self", ")", ":", "# If the connection is already back in the pool,", "# get another connection from the pool,", "# otherwise reopen the underlying connection.", "if", "self", ".", "_con", ":", "self", ".", "_con", ".", "reopen", "(", ")", "else", ":", "self", ".", "_con", "=", "self", ".", "_pool", ".", "connection", "(", ")" ]
Reopen the pooled connection.
[ "Reopen", "the", "pooled", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/PooledPg.py#L278-L286
6,357
Cito/DBUtils
DBUtils/SteadyPg.py
SteadyPgConnection.reopen
def reopen(self): """Reopen the tough connection. It will not complain if the connection cannot be reopened. """ try: self._con.reopen() except Exception: if self._transcation: self._transaction = False try: self._con.query('rollback') except Exception: pass else: self._transaction = False self._closed = False self._setsession() self._usage = 0
python
def reopen(self): """Reopen the tough connection. It will not complain if the connection cannot be reopened. """ try: self._con.reopen() except Exception: if self._transcation: self._transaction = False try: self._con.query('rollback') except Exception: pass else: self._transaction = False self._closed = False self._setsession() self._usage = 0
[ "def", "reopen", "(", "self", ")", ":", "try", ":", "self", ".", "_con", ".", "reopen", "(", ")", "except", "Exception", ":", "if", "self", ".", "_transcation", ":", "self", ".", "_transaction", "=", "False", "try", ":", "self", ".", "_con", ".", "query", "(", "'rollback'", ")", "except", "Exception", ":", "pass", "else", ":", "self", ".", "_transaction", "=", "False", "self", ".", "_closed", "=", "False", "self", ".", "_setsession", "(", ")", "self", ".", "_usage", "=", "0" ]
Reopen the tough connection. It will not complain if the connection cannot be reopened.
[ "Reopen", "the", "tough", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyPg.py#L188-L207
6,358
Cito/DBUtils
DBUtils/SteadyPg.py
SteadyPgConnection.reset
def reset(self): """Reset the tough connection. If a reset is not possible, tries to reopen the connection. It will not complain if the connection is already closed. """ try: self._con.reset() self._transaction = False self._setsession() self._usage = 0 except Exception: try: self.reopen() except Exception: try: self.rollback() except Exception: pass
python
def reset(self): """Reset the tough connection. If a reset is not possible, tries to reopen the connection. It will not complain if the connection is already closed. """ try: self._con.reset() self._transaction = False self._setsession() self._usage = 0 except Exception: try: self.reopen() except Exception: try: self.rollback() except Exception: pass
[ "def", "reset", "(", "self", ")", ":", "try", ":", "self", ".", "_con", ".", "reset", "(", ")", "self", ".", "_transaction", "=", "False", "self", ".", "_setsession", "(", ")", "self", ".", "_usage", "=", "0", "except", "Exception", ":", "try", ":", "self", ".", "reopen", "(", ")", "except", "Exception", ":", "try", ":", "self", ".", "rollback", "(", ")", "except", "Exception", ":", "pass" ]
Reset the tough connection. If a reset is not possible, tries to reopen the connection. It will not complain if the connection is already closed.
[ "Reset", "the", "tough", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyPg.py#L209-L228
6,359
Cito/DBUtils
DBUtils/SteadyPg.py
SteadyPgConnection._get_tough_method
def _get_tough_method(self, method): """Return a "tough" version of a connection class method. The tough version checks whether the connection is bad (lost) and automatically and transparently tries to reset the connection if this is the case (for instance, the database has been restarted). """ def tough_method(*args, **kwargs): transaction = self._transaction if not transaction: try: # check whether connection status is bad if not self._con.db.status: raise AttributeError if self._maxusage: # or connection used too often if self._usage >= self._maxusage: raise AttributeError except Exception: self.reset() # then reset the connection try: result = method(*args, **kwargs) # try connection method except Exception: # error in query if transaction: # inside a transaction self._transaction = False raise # propagate the error elif self._con.db.status: # if it was not a connection problem raise # then propagate the error else: # otherwise self.reset() # reset the connection result = method(*args, **kwargs) # and try one more time self._usage += 1 return result return tough_method
python
def _get_tough_method(self, method): """Return a "tough" version of a connection class method. The tough version checks whether the connection is bad (lost) and automatically and transparently tries to reset the connection if this is the case (for instance, the database has been restarted). """ def tough_method(*args, **kwargs): transaction = self._transaction if not transaction: try: # check whether connection status is bad if not self._con.db.status: raise AttributeError if self._maxusage: # or connection used too often if self._usage >= self._maxusage: raise AttributeError except Exception: self.reset() # then reset the connection try: result = method(*args, **kwargs) # try connection method except Exception: # error in query if transaction: # inside a transaction self._transaction = False raise # propagate the error elif self._con.db.status: # if it was not a connection problem raise # then propagate the error else: # otherwise self.reset() # reset the connection result = method(*args, **kwargs) # and try one more time self._usage += 1 return result return tough_method
[ "def", "_get_tough_method", "(", "self", ",", "method", ")", ":", "def", "tough_method", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "transaction", "=", "self", ".", "_transaction", "if", "not", "transaction", ":", "try", ":", "# check whether connection status is bad", "if", "not", "self", ".", "_con", ".", "db", ".", "status", ":", "raise", "AttributeError", "if", "self", ".", "_maxusage", ":", "# or connection used too often", "if", "self", ".", "_usage", ">=", "self", ".", "_maxusage", ":", "raise", "AttributeError", "except", "Exception", ":", "self", ".", "reset", "(", ")", "# then reset the connection", "try", ":", "result", "=", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# try connection method", "except", "Exception", ":", "# error in query", "if", "transaction", ":", "# inside a transaction", "self", ".", "_transaction", "=", "False", "raise", "# propagate the error", "elif", "self", ".", "_con", ".", "db", ".", "status", ":", "# if it was not a connection problem", "raise", "# then propagate the error", "else", ":", "# otherwise", "self", ".", "reset", "(", ")", "# reset the connection", "result", "=", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# and try one more time", "self", ".", "_usage", "+=", "1", "return", "result", "return", "tough_method" ]
Return a "tough" version of a connection class method. The tough version checks whether the connection is bad (lost) and automatically and transparently tries to reset the connection if this is the case (for instance, the database has been restarted).
[ "Return", "a", "tough", "version", "of", "a", "connection", "class", "method", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyPg.py#L283-L315
6,360
Cito/DBUtils
DBUtils/SteadyDB.py
connect
def connect( creator, maxusage=None, setsession=None, failures=None, ping=1, closeable=True, *args, **kwargs): """A tough version of the connection constructor of a DB-API 2 module. creator: either an arbitrary function returning new DB-API 2 compliant connection objects or a DB-API 2 compliant database module maxusage: maximum usage limit for the underlying DB-API 2 connection (number of database operations, 0 or None means unlimited usage) callproc(), execute() and executemany() count as one operation. When the limit is reached, the connection is automatically reset. setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", "set time zone mez"] failures: an optional exception class or a tuple of exception classes for which the failover mechanism shall be applied, if the default (OperationalError, InternalError) is not adequate ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = when _ping_check() is called, 2 = whenever a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) closeable: if this is set to false, then closing the connection will be silently ignored, but by default the connection can be closed args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module """ return SteadyDBConnection( creator, maxusage, setsession, failures, ping, closeable, *args, **kwargs)
python
def connect( creator, maxusage=None, setsession=None, failures=None, ping=1, closeable=True, *args, **kwargs): """A tough version of the connection constructor of a DB-API 2 module. creator: either an arbitrary function returning new DB-API 2 compliant connection objects or a DB-API 2 compliant database module maxusage: maximum usage limit for the underlying DB-API 2 connection (number of database operations, 0 or None means unlimited usage) callproc(), execute() and executemany() count as one operation. When the limit is reached, the connection is automatically reset. setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", "set time zone mez"] failures: an optional exception class or a tuple of exception classes for which the failover mechanism shall be applied, if the default (OperationalError, InternalError) is not adequate ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = when _ping_check() is called, 2 = whenever a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) closeable: if this is set to false, then closing the connection will be silently ignored, but by default the connection can be closed args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module """ return SteadyDBConnection( creator, maxusage, setsession, failures, ping, closeable, *args, **kwargs)
[ "def", "connect", "(", "creator", ",", "maxusage", "=", "None", ",", "setsession", "=", "None", ",", "failures", "=", "None", ",", "ping", "=", "1", ",", "closeable", "=", "True", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "SteadyDBConnection", "(", "creator", ",", "maxusage", ",", "setsession", ",", "failures", ",", "ping", ",", "closeable", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
A tough version of the connection constructor of a DB-API 2 module. creator: either an arbitrary function returning new DB-API 2 compliant connection objects or a DB-API 2 compliant database module maxusage: maximum usage limit for the underlying DB-API 2 connection (number of database operations, 0 or None means unlimited usage) callproc(), execute() and executemany() count as one operation. When the limit is reached, the connection is automatically reset. setsession: an optional list of SQL commands that may serve to prepare the session, e.g. ["set datestyle to german", "set time zone mez"] failures: an optional exception class or a tuple of exception classes for which the failover mechanism shall be applied, if the default (OperationalError, InternalError) is not adequate ping: determines when the connection should be checked with ping() (0 = None = never, 1 = default = when _ping_check() is called, 2 = whenever a cursor is created, 4 = when a query is executed, 7 = always, and all other bit combinations of these values) closeable: if this is set to false, then closing the connection will be silently ignored, but by default the connection can be closed args, kwargs: the parameters that shall be passed to the creator function or the connection constructor of the DB-API 2 module
[ "A", "tough", "version", "of", "the", "connection", "constructor", "of", "a", "DB", "-", "API", "2", "module", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L111-L139
6,361
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBConnection._create
def _create(self): """Create a new connection using the creator function.""" con = self._creator(*self._args, **self._kwargs) try: try: if self._dbapi.connect != self._creator: raise AttributeError except AttributeError: # try finding the DB-API 2 module via the connection itself try: mod = con.__module__ except AttributeError: mod = None while mod: try: self._dbapi = sys.modules[mod] if not callable(self._dbapi.connect): raise AttributeError except (AttributeError, KeyError): pass else: break i = mod.rfind('.') if i < 0: mod = None else: mod = mod[:i] else: try: mod = con.OperationalError.__module__ except AttributeError: mod = None while mod: try: self._dbapi = sys.modules[mod] if not callable(self._dbapi.connect): raise AttributeError except (AttributeError, KeyError): pass else: break i = mod.rfind('.') if i < 0: mod = None else: mod = mod[:i] else: self._dbapi = None if self._threadsafety is None: try: self._threadsafety = self._dbapi.threadsafety except AttributeError: try: self._threadsafety = con.threadsafety except AttributeError: pass if self._failures is None: try: self._failures = ( self._dbapi.OperationalError, self._dbapi.InternalError) except AttributeError: try: self._failures = ( self._creator.OperationalError, self._creator.InternalError) except AttributeError: try: self._failures = ( con.OperationalError, con.InternalError) except AttributeError: raise AttributeError( "Could not determine failure exceptions" " (please set failures or creator.dbapi).") if isinstance(self._failures, tuple): self._failure = self._failures[0] else: self._failure = self._failures self._setsession(con) except Exception as error: # the database module could not be determined # or the session could not be prepared try: # close the connection first con.close() except Exception: pass raise error # re-raise the original error again return con
python
def _create(self): """Create a new connection using the creator function.""" con = self._creator(*self._args, **self._kwargs) try: try: if self._dbapi.connect != self._creator: raise AttributeError except AttributeError: # try finding the DB-API 2 module via the connection itself try: mod = con.__module__ except AttributeError: mod = None while mod: try: self._dbapi = sys.modules[mod] if not callable(self._dbapi.connect): raise AttributeError except (AttributeError, KeyError): pass else: break i = mod.rfind('.') if i < 0: mod = None else: mod = mod[:i] else: try: mod = con.OperationalError.__module__ except AttributeError: mod = None while mod: try: self._dbapi = sys.modules[mod] if not callable(self._dbapi.connect): raise AttributeError except (AttributeError, KeyError): pass else: break i = mod.rfind('.') if i < 0: mod = None else: mod = mod[:i] else: self._dbapi = None if self._threadsafety is None: try: self._threadsafety = self._dbapi.threadsafety except AttributeError: try: self._threadsafety = con.threadsafety except AttributeError: pass if self._failures is None: try: self._failures = ( self._dbapi.OperationalError, self._dbapi.InternalError) except AttributeError: try: self._failures = ( self._creator.OperationalError, self._creator.InternalError) except AttributeError: try: self._failures = ( con.OperationalError, con.InternalError) except AttributeError: raise AttributeError( "Could not determine failure exceptions" " (please set failures or creator.dbapi).") if isinstance(self._failures, tuple): self._failure = self._failures[0] else: self._failure = self._failures self._setsession(con) except Exception as error: # the database module could not be determined # or the session could not be prepared try: # close the connection first con.close() except Exception: pass raise error # re-raise the original error again return con
[ "def", "_create", "(", "self", ")", ":", "con", "=", "self", ".", "_creator", "(", "*", "self", ".", "_args", ",", "*", "*", "self", ".", "_kwargs", ")", "try", ":", "try", ":", "if", "self", ".", "_dbapi", ".", "connect", "!=", "self", ".", "_creator", ":", "raise", "AttributeError", "except", "AttributeError", ":", "# try finding the DB-API 2 module via the connection itself", "try", ":", "mod", "=", "con", ".", "__module__", "except", "AttributeError", ":", "mod", "=", "None", "while", "mod", ":", "try", ":", "self", ".", "_dbapi", "=", "sys", ".", "modules", "[", "mod", "]", "if", "not", "callable", "(", "self", ".", "_dbapi", ".", "connect", ")", ":", "raise", "AttributeError", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "pass", "else", ":", "break", "i", "=", "mod", ".", "rfind", "(", "'.'", ")", "if", "i", "<", "0", ":", "mod", "=", "None", "else", ":", "mod", "=", "mod", "[", ":", "i", "]", "else", ":", "try", ":", "mod", "=", "con", ".", "OperationalError", ".", "__module__", "except", "AttributeError", ":", "mod", "=", "None", "while", "mod", ":", "try", ":", "self", ".", "_dbapi", "=", "sys", ".", "modules", "[", "mod", "]", "if", "not", "callable", "(", "self", ".", "_dbapi", ".", "connect", ")", ":", "raise", "AttributeError", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "pass", "else", ":", "break", "i", "=", "mod", ".", "rfind", "(", "'.'", ")", "if", "i", "<", "0", ":", "mod", "=", "None", "else", ":", "mod", "=", "mod", "[", ":", "i", "]", "else", ":", "self", ".", "_dbapi", "=", "None", "if", "self", ".", "_threadsafety", "is", "None", ":", "try", ":", "self", ".", "_threadsafety", "=", "self", ".", "_dbapi", ".", "threadsafety", "except", "AttributeError", ":", "try", ":", "self", ".", "_threadsafety", "=", "con", ".", "threadsafety", "except", "AttributeError", ":", "pass", "if", "self", ".", "_failures", "is", "None", ":", "try", ":", "self", ".", "_failures", "=", "(", "self", ".", "_dbapi", ".", "OperationalError", ",", "self", ".", "_dbapi", ".", "InternalError", ")", "except", "AttributeError", ":", "try", ":", "self", ".", "_failures", "=", "(", "self", ".", "_creator", ".", "OperationalError", ",", "self", ".", "_creator", ".", "InternalError", ")", "except", "AttributeError", ":", "try", ":", "self", ".", "_failures", "=", "(", "con", ".", "OperationalError", ",", "con", ".", "InternalError", ")", "except", "AttributeError", ":", "raise", "AttributeError", "(", "\"Could not determine failure exceptions\"", "\" (please set failures or creator.dbapi).\"", ")", "if", "isinstance", "(", "self", ".", "_failures", ",", "tuple", ")", ":", "self", ".", "_failure", "=", "self", ".", "_failures", "[", "0", "]", "else", ":", "self", ".", "_failure", "=", "self", ".", "_failures", "self", ".", "_setsession", "(", "con", ")", "except", "Exception", "as", "error", ":", "# the database module could not be determined", "# or the session could not be prepared", "try", ":", "# close the connection first", "con", ".", "close", "(", ")", "except", "Exception", ":", "pass", "raise", "error", "# re-raise the original error again", "return", "con" ]
Create a new connection using the creator function.
[ "Create", "a", "new", "connection", "using", "the", "creator", "function", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L209-L296
6,362
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBConnection._store
def _store(self, con): """Store a database connection for subsequent use.""" self._con = con self._transaction = False self._closed = False self._usage = 0
python
def _store(self, con): """Store a database connection for subsequent use.""" self._con = con self._transaction = False self._closed = False self._usage = 0
[ "def", "_store", "(", "self", ",", "con", ")", ":", "self", ".", "_con", "=", "con", "self", ".", "_transaction", "=", "False", "self", ".", "_closed", "=", "False", "self", ".", "_usage", "=", "0" ]
Store a database connection for subsequent use.
[ "Store", "a", "database", "connection", "for", "subsequent", "use", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L308-L313
6,363
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBConnection._reset
def _reset(self, force=False): """Reset a tough connection. Rollback if forced or the connection was in a transaction. """ if not self._closed and (force or self._transaction): try: self.rollback() except Exception: pass
python
def _reset(self, force=False): """Reset a tough connection. Rollback if forced or the connection was in a transaction. """ if not self._closed and (force or self._transaction): try: self.rollback() except Exception: pass
[ "def", "_reset", "(", "self", ",", "force", "=", "False", ")", ":", "if", "not", "self", ".", "_closed", "and", "(", "force", "or", "self", ".", "_transaction", ")", ":", "try", ":", "self", ".", "rollback", "(", ")", "except", "Exception", ":", "pass" ]
Reset a tough connection. Rollback if forced or the connection was in a transaction.
[ "Reset", "a", "tough", "connection", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L330-L340
6,364
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBConnection.begin
def begin(self, *args, **kwargs): """Indicate the beginning of a transaction. During a transaction, connections won't be transparently replaced, and all errors will be raised to the application. If the underlying driver supports this method, it will be called with the given parameters (e.g. for distributed transactions). """ self._transaction = True try: begin = self._con.begin except AttributeError: pass else: begin(*args, **kwargs)
python
def begin(self, *args, **kwargs): """Indicate the beginning of a transaction. During a transaction, connections won't be transparently replaced, and all errors will be raised to the application. If the underlying driver supports this method, it will be called with the given parameters (e.g. for distributed transactions). """ self._transaction = True try: begin = self._con.begin except AttributeError: pass else: begin(*args, **kwargs)
[ "def", "begin", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_transaction", "=", "True", "try", ":", "begin", "=", "self", ".", "_con", ".", "begin", "except", "AttributeError", ":", "pass", "else", ":", "begin", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Indicate the beginning of a transaction. During a transaction, connections won't be transparently replaced, and all errors will be raised to the application. If the underlying driver supports this method, it will be called with the given parameters (e.g. for distributed transactions).
[ "Indicate", "the", "beginning", "of", "a", "transaction", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L409-L425
6,365
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBConnection.commit
def commit(self): """Commit any pending transaction.""" self._transaction = False try: self._con.commit() except self._failures as error: # cannot commit try: # try to reopen the connection con = self._create() except Exception: pass else: self._close() self._store(con) raise error
python
def commit(self): """Commit any pending transaction.""" self._transaction = False try: self._con.commit() except self._failures as error: # cannot commit try: # try to reopen the connection con = self._create() except Exception: pass else: self._close() self._store(con) raise error
[ "def", "commit", "(", "self", ")", ":", "self", ".", "_transaction", "=", "False", "try", ":", "self", ".", "_con", ".", "commit", "(", ")", "except", "self", ".", "_failures", "as", "error", ":", "# cannot commit", "try", ":", "# try to reopen the connection", "con", "=", "self", ".", "_create", "(", ")", "except", "Exception", ":", "pass", "else", ":", "self", ".", "_close", "(", ")", "self", ".", "_store", "(", "con", ")", "raise", "error" ]
Commit any pending transaction.
[ "Commit", "any", "pending", "transaction", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L427-L440
6,366
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBConnection.cancel
def cancel(self): """Cancel a long-running transaction. If the underlying driver supports this method, it will be called. """ self._transaction = False try: cancel = self._con.cancel except AttributeError: pass else: cancel()
python
def cancel(self): """Cancel a long-running transaction. If the underlying driver supports this method, it will be called. """ self._transaction = False try: cancel = self._con.cancel except AttributeError: pass else: cancel()
[ "def", "cancel", "(", "self", ")", ":", "self", ".", "_transaction", "=", "False", "try", ":", "cancel", "=", "self", ".", "_con", ".", "cancel", "except", "AttributeError", ":", "pass", "else", ":", "cancel", "(", ")" ]
Cancel a long-running transaction. If the underlying driver supports this method, it will be called.
[ "Cancel", "a", "long", "-", "running", "transaction", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L457-L469
6,367
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBCursor._setsizes
def _setsizes(self, cursor=None): """Set stored input and output sizes for cursor execution.""" if cursor is None: cursor = self._cursor if self._inputsizes: cursor.setinputsizes(self._inputsizes) for column, size in self._outputsizes.items(): if column is None: cursor.setoutputsize(size) else: cursor.setoutputsize(size, column)
python
def _setsizes(self, cursor=None): """Set stored input and output sizes for cursor execution.""" if cursor is None: cursor = self._cursor if self._inputsizes: cursor.setinputsizes(self._inputsizes) for column, size in self._outputsizes.items(): if column is None: cursor.setoutputsize(size) else: cursor.setoutputsize(size, column)
[ "def", "_setsizes", "(", "self", ",", "cursor", "=", "None", ")", ":", "if", "cursor", "is", "None", ":", "cursor", "=", "self", ".", "_cursor", "if", "self", ".", "_inputsizes", ":", "cursor", ".", "setinputsizes", "(", "self", ".", "_inputsizes", ")", "for", "column", ",", "size", "in", "self", ".", "_outputsizes", ".", "items", "(", ")", ":", "if", "column", "is", "None", ":", "cursor", ".", "setoutputsize", "(", "size", ")", "else", ":", "cursor", ".", "setoutputsize", "(", "size", ",", "column", ")" ]
Set stored input and output sizes for cursor execution.
[ "Set", "stored", "input", "and", "output", "sizes", "for", "cursor", "execution", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L564-L574
6,368
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBCursor.close
def close(self): """Close the tough cursor. It will not complain if you close it more than once. """ if not self._closed: try: self._cursor.close() except Exception: pass self._closed = True
python
def close(self): """Close the tough cursor. It will not complain if you close it more than once. """ if not self._closed: try: self._cursor.close() except Exception: pass self._closed = True
[ "def", "close", "(", "self", ")", ":", "if", "not", "self", ".", "_closed", ":", "try", ":", "self", ".", "_cursor", ".", "close", "(", ")", "except", "Exception", ":", "pass", "self", ".", "_closed", "=", "True" ]
Close the tough cursor. It will not complain if you close it more than once.
[ "Close", "the", "tough", "cursor", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L576-L587
6,369
Cito/DBUtils
DBUtils/SteadyDB.py
SteadyDBCursor._get_tough_method
def _get_tough_method(self, name): """Return a "tough" version of the given cursor method.""" def tough_method(*args, **kwargs): execute = name.startswith('execute') con = self._con transaction = con._transaction if not transaction: con._ping_check(4) try: if con._maxusage: if con._usage >= con._maxusage: # the connection was used too often raise con._failure if execute: self._setsizes() method = getattr(self._cursor, name) result = method(*args, **kwargs) # try to execute if execute: self._clearsizes() except con._failures as error: # execution error if not transaction: try: cursor2 = con._cursor( *self._args, **self._kwargs) # open new cursor except Exception: pass else: try: # and try one more time to execute if execute: self._setsizes(cursor2) method = getattr(cursor2, name) result = method(*args, **kwargs) if execute: self._clearsizes() except Exception: pass else: self.close() self._cursor = cursor2 con._usage += 1 return result try: cursor2.close() except Exception: pass try: # try to reopen the connection con2 = con._create() except Exception: pass else: try: cursor2 = con2.cursor( *self._args, **self._kwargs) # open new cursor except Exception: pass else: if transaction: self.close() con._close() con._store(con2) self._cursor = cursor2 raise error # raise the original error again error2 = None try: # try one more time to execute if execute: self._setsizes(cursor2) method2 = getattr(cursor2, name) result = method2(*args, **kwargs) if execute: self._clearsizes() except error.__class__: # same execution error use2 = False error2 = error except Exception as error: # other execution errors use2 = True error2 = error else: use2 = True if use2: self.close() con._close() con._store(con2) self._cursor = cursor2 con._usage += 1 if error2: raise error2 # raise the other error return result try: cursor2.close() except Exception: pass try: con2.close() except Exception: pass if transaction: self._transaction = False raise error # re-raise the original error again else: con._usage += 1 return result return tough_method
python
def _get_tough_method(self, name): """Return a "tough" version of the given cursor method.""" def tough_method(*args, **kwargs): execute = name.startswith('execute') con = self._con transaction = con._transaction if not transaction: con._ping_check(4) try: if con._maxusage: if con._usage >= con._maxusage: # the connection was used too often raise con._failure if execute: self._setsizes() method = getattr(self._cursor, name) result = method(*args, **kwargs) # try to execute if execute: self._clearsizes() except con._failures as error: # execution error if not transaction: try: cursor2 = con._cursor( *self._args, **self._kwargs) # open new cursor except Exception: pass else: try: # and try one more time to execute if execute: self._setsizes(cursor2) method = getattr(cursor2, name) result = method(*args, **kwargs) if execute: self._clearsizes() except Exception: pass else: self.close() self._cursor = cursor2 con._usage += 1 return result try: cursor2.close() except Exception: pass try: # try to reopen the connection con2 = con._create() except Exception: pass else: try: cursor2 = con2.cursor( *self._args, **self._kwargs) # open new cursor except Exception: pass else: if transaction: self.close() con._close() con._store(con2) self._cursor = cursor2 raise error # raise the original error again error2 = None try: # try one more time to execute if execute: self._setsizes(cursor2) method2 = getattr(cursor2, name) result = method2(*args, **kwargs) if execute: self._clearsizes() except error.__class__: # same execution error use2 = False error2 = error except Exception as error: # other execution errors use2 = True error2 = error else: use2 = True if use2: self.close() con._close() con._store(con2) self._cursor = cursor2 con._usage += 1 if error2: raise error2 # raise the other error return result try: cursor2.close() except Exception: pass try: con2.close() except Exception: pass if transaction: self._transaction = False raise error # re-raise the original error again else: con._usage += 1 return result return tough_method
[ "def", "_get_tough_method", "(", "self", ",", "name", ")", ":", "def", "tough_method", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "execute", "=", "name", ".", "startswith", "(", "'execute'", ")", "con", "=", "self", ".", "_con", "transaction", "=", "con", ".", "_transaction", "if", "not", "transaction", ":", "con", ".", "_ping_check", "(", "4", ")", "try", ":", "if", "con", ".", "_maxusage", ":", "if", "con", ".", "_usage", ">=", "con", ".", "_maxusage", ":", "# the connection was used too often", "raise", "con", ".", "_failure", "if", "execute", ":", "self", ".", "_setsizes", "(", ")", "method", "=", "getattr", "(", "self", ".", "_cursor", ",", "name", ")", "result", "=", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# try to execute", "if", "execute", ":", "self", ".", "_clearsizes", "(", ")", "except", "con", ".", "_failures", "as", "error", ":", "# execution error", "if", "not", "transaction", ":", "try", ":", "cursor2", "=", "con", ".", "_cursor", "(", "*", "self", ".", "_args", ",", "*", "*", "self", ".", "_kwargs", ")", "# open new cursor", "except", "Exception", ":", "pass", "else", ":", "try", ":", "# and try one more time to execute", "if", "execute", ":", "self", ".", "_setsizes", "(", "cursor2", ")", "method", "=", "getattr", "(", "cursor2", ",", "name", ")", "result", "=", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "execute", ":", "self", ".", "_clearsizes", "(", ")", "except", "Exception", ":", "pass", "else", ":", "self", ".", "close", "(", ")", "self", ".", "_cursor", "=", "cursor2", "con", ".", "_usage", "+=", "1", "return", "result", "try", ":", "cursor2", ".", "close", "(", ")", "except", "Exception", ":", "pass", "try", ":", "# try to reopen the connection", "con2", "=", "con", ".", "_create", "(", ")", "except", "Exception", ":", "pass", "else", ":", "try", ":", "cursor2", "=", "con2", ".", "cursor", "(", "*", "self", ".", "_args", ",", "*", "*", "self", ".", "_kwargs", ")", "# open new cursor", "except", "Exception", ":", "pass", "else", ":", "if", "transaction", ":", "self", ".", "close", "(", ")", "con", ".", "_close", "(", ")", "con", ".", "_store", "(", "con2", ")", "self", ".", "_cursor", "=", "cursor2", "raise", "error", "# raise the original error again", "error2", "=", "None", "try", ":", "# try one more time to execute", "if", "execute", ":", "self", ".", "_setsizes", "(", "cursor2", ")", "method2", "=", "getattr", "(", "cursor2", ",", "name", ")", "result", "=", "method2", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "execute", ":", "self", ".", "_clearsizes", "(", ")", "except", "error", ".", "__class__", ":", "# same execution error", "use2", "=", "False", "error2", "=", "error", "except", "Exception", "as", "error", ":", "# other execution errors", "use2", "=", "True", "error2", "=", "error", "else", ":", "use2", "=", "True", "if", "use2", ":", "self", ".", "close", "(", ")", "con", ".", "_close", "(", ")", "con", ".", "_store", "(", "con2", ")", "self", ".", "_cursor", "=", "cursor2", "con", ".", "_usage", "+=", "1", "if", "error2", ":", "raise", "error2", "# raise the other error", "return", "result", "try", ":", "cursor2", ".", "close", "(", ")", "except", "Exception", ":", "pass", "try", ":", "con2", ".", "close", "(", ")", "except", "Exception", ":", "pass", "if", "transaction", ":", "self", ".", "_transaction", "=", "False", "raise", "error", "# re-raise the original error again", "else", ":", "con", ".", "_usage", "+=", "1", "return", "result", "return", "tough_method" ]
Return a "tough" version of the given cursor method.
[ "Return", "a", "tough", "version", "of", "the", "given", "cursor", "method", "." ]
90e8825e038f08c82044b8e50831480175fa026a
https://github.com/Cito/DBUtils/blob/90e8825e038f08c82044b8e50831480175fa026a/DBUtils/SteadyDB.py#L589-L690
6,370
mcs07/ChemDataExtractor
chemdataextractor/cli/tokenize.py
train_punkt
def train_punkt(ctx, input, output, abbr, colloc): """Train Punkt sentence splitter using sentences in input.""" click.echo('chemdataextractor.tokenize.train_punkt') import pickle from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer punkt = PunktTrainer() # Set these to true to include collocations more leniently, then increase MIN_COLLOC_FREQ to restrict again # punkt.INCLUDE_ALL_COLLOCS = False # punkt.INCLUDE_ABBREV_COLLOCS = False # punkt.MIN_COLLOC_FREQ = 1 # Don't train on titles. They may contain abbreviations, but basically never have actual sentence boundaries. for fin in input: click.echo('Training on %s' % fin.name) sentences = fin.read() #.replace('.\n', '. \n\n') punkt.train(sentences, finalize=False, verbose=True) punkt.finalize_training(verbose=True) if abbr: abbreviations = abbr.read().strip().split('\n') click.echo('Manually adding abbreviations: %s' % abbreviations) punkt._params.abbrev_types.update(abbreviations) if colloc: collocations = [tuple(l.split('. ', 1)) for l in colloc.read().strip().split('\n')] click.echo('Manually adding collocs: %s' % collocations) punkt._params.collocations.update(collocations) model = PunktSentenceTokenizer(punkt.get_params()) pickle.dump(model, output, protocol=pickle.HIGHEST_PROTOCOL)
python
def train_punkt(ctx, input, output, abbr, colloc): """Train Punkt sentence splitter using sentences in input.""" click.echo('chemdataextractor.tokenize.train_punkt') import pickle from nltk.tokenize.punkt import PunktSentenceTokenizer, PunktTrainer punkt = PunktTrainer() # Set these to true to include collocations more leniently, then increase MIN_COLLOC_FREQ to restrict again # punkt.INCLUDE_ALL_COLLOCS = False # punkt.INCLUDE_ABBREV_COLLOCS = False # punkt.MIN_COLLOC_FREQ = 1 # Don't train on titles. They may contain abbreviations, but basically never have actual sentence boundaries. for fin in input: click.echo('Training on %s' % fin.name) sentences = fin.read() #.replace('.\n', '. \n\n') punkt.train(sentences, finalize=False, verbose=True) punkt.finalize_training(verbose=True) if abbr: abbreviations = abbr.read().strip().split('\n') click.echo('Manually adding abbreviations: %s' % abbreviations) punkt._params.abbrev_types.update(abbreviations) if colloc: collocations = [tuple(l.split('. ', 1)) for l in colloc.read().strip().split('\n')] click.echo('Manually adding collocs: %s' % collocations) punkt._params.collocations.update(collocations) model = PunktSentenceTokenizer(punkt.get_params()) pickle.dump(model, output, protocol=pickle.HIGHEST_PROTOCOL)
[ "def", "train_punkt", "(", "ctx", ",", "input", ",", "output", ",", "abbr", ",", "colloc", ")", ":", "click", ".", "echo", "(", "'chemdataextractor.tokenize.train_punkt'", ")", "import", "pickle", "from", "nltk", ".", "tokenize", ".", "punkt", "import", "PunktSentenceTokenizer", ",", "PunktTrainer", "punkt", "=", "PunktTrainer", "(", ")", "# Set these to true to include collocations more leniently, then increase MIN_COLLOC_FREQ to restrict again", "# punkt.INCLUDE_ALL_COLLOCS = False", "# punkt.INCLUDE_ABBREV_COLLOCS = False", "# punkt.MIN_COLLOC_FREQ = 1", "# Don't train on titles. They may contain abbreviations, but basically never have actual sentence boundaries.", "for", "fin", "in", "input", ":", "click", ".", "echo", "(", "'Training on %s'", "%", "fin", ".", "name", ")", "sentences", "=", "fin", ".", "read", "(", ")", "#.replace('.\\n', '. \\n\\n')", "punkt", ".", "train", "(", "sentences", ",", "finalize", "=", "False", ",", "verbose", "=", "True", ")", "punkt", ".", "finalize_training", "(", "verbose", "=", "True", ")", "if", "abbr", ":", "abbreviations", "=", "abbr", ".", "read", "(", ")", ".", "strip", "(", ")", ".", "split", "(", "'\\n'", ")", "click", ".", "echo", "(", "'Manually adding abbreviations: %s'", "%", "abbreviations", ")", "punkt", ".", "_params", ".", "abbrev_types", ".", "update", "(", "abbreviations", ")", "if", "colloc", ":", "collocations", "=", "[", "tuple", "(", "l", ".", "split", "(", "'. '", ",", "1", ")", ")", "for", "l", "in", "colloc", ".", "read", "(", ")", ".", "strip", "(", ")", ".", "split", "(", "'\\n'", ")", "]", "click", ".", "echo", "(", "'Manually adding collocs: %s'", "%", "collocations", ")", "punkt", ".", "_params", ".", "collocations", ".", "update", "(", "collocations", ")", "model", "=", "PunktSentenceTokenizer", "(", "punkt", ".", "get_params", "(", ")", ")", "pickle", ".", "dump", "(", "model", ",", "output", ",", "protocol", "=", "pickle", ".", "HIGHEST_PROTOCOL", ")" ]
Train Punkt sentence splitter using sentences in input.
[ "Train", "Punkt", "sentence", "splitter", "using", "sentences", "in", "input", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/tokenize.py#L36-L61
6,371
mcs07/ChemDataExtractor
chemdataextractor/cli/tokenize.py
sentences
def sentences(ctx, input, output): """Read input document, and output sentences.""" log.info('chemdataextractor.read.elements') log.info('Reading %s' % input.name) doc = Document.from_file(input) for element in doc.elements: if isinstance(element, Text): for raw_sentence in element.raw_sentences: output.write(raw_sentence.strip()) output.write(u'\n')
python
def sentences(ctx, input, output): """Read input document, and output sentences.""" log.info('chemdataextractor.read.elements') log.info('Reading %s' % input.name) doc = Document.from_file(input) for element in doc.elements: if isinstance(element, Text): for raw_sentence in element.raw_sentences: output.write(raw_sentence.strip()) output.write(u'\n')
[ "def", "sentences", "(", "ctx", ",", "input", ",", "output", ")", ":", "log", ".", "info", "(", "'chemdataextractor.read.elements'", ")", "log", ".", "info", "(", "'Reading %s'", "%", "input", ".", "name", ")", "doc", "=", "Document", ".", "from_file", "(", "input", ")", "for", "element", "in", "doc", ".", "elements", ":", "if", "isinstance", "(", "element", ",", "Text", ")", ":", "for", "raw_sentence", "in", "element", ".", "raw_sentences", ":", "output", ".", "write", "(", "raw_sentence", ".", "strip", "(", ")", ")", "output", ".", "write", "(", "u'\\n'", ")" ]
Read input document, and output sentences.
[ "Read", "input", "document", "and", "output", "sentences", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/tokenize.py#L68-L77
6,372
mcs07/ChemDataExtractor
chemdataextractor/cli/tokenize.py
words
def words(ctx, input, output): """Read input document, and output words.""" log.info('chemdataextractor.read.elements') log.info('Reading %s' % input.name) doc = Document.from_file(input) for element in doc.elements: if isinstance(element, Text): for sentence in element.sentences: output.write(u' '.join(sentence.raw_tokens)) output.write(u'\n')
python
def words(ctx, input, output): """Read input document, and output words.""" log.info('chemdataextractor.read.elements') log.info('Reading %s' % input.name) doc = Document.from_file(input) for element in doc.elements: if isinstance(element, Text): for sentence in element.sentences: output.write(u' '.join(sentence.raw_tokens)) output.write(u'\n')
[ "def", "words", "(", "ctx", ",", "input", ",", "output", ")", ":", "log", ".", "info", "(", "'chemdataextractor.read.elements'", ")", "log", ".", "info", "(", "'Reading %s'", "%", "input", ".", "name", ")", "doc", "=", "Document", ".", "from_file", "(", "input", ")", "for", "element", "in", "doc", ".", "elements", ":", "if", "isinstance", "(", "element", ",", "Text", ")", ":", "for", "sentence", "in", "element", ".", "sentences", ":", "output", ".", "write", "(", "u' '", ".", "join", "(", "sentence", ".", "raw_tokens", ")", ")", "output", ".", "write", "(", "u'\\n'", ")" ]
Read input document, and output words.
[ "Read", "input", "document", "and", "output", "words", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/tokenize.py#L84-L93
6,373
mcs07/ChemDataExtractor
chemdataextractor/nlp/cem.py
CemTagger._in_stoplist
def _in_stoplist(self, entity): """Return True if the entity is in the stoplist.""" start = 0 end = len(entity) # Adjust boundaries to exclude disallowed prefixes/suffixes for prefix in IGNORE_PREFIX: if entity.startswith(prefix): # print('%s removing %s' % (currenttext, prefix)) start += len(prefix) break for suffix in IGNORE_SUFFIX: if entity.endswith(suffix): # print('%s removing %s' % (currenttext, suffix)) end -= len(suffix) break # Return True if entity has been reduced to nothing by adjusting boundaries if start >= end: return True # Return True if adjusted entity is in the literal stoplist entity = entity[start:end] if entity in STOPLIST: return True # log.debug('Entity: %s', entity) for stop_re in STOP_RES: if re.search(stop_re, entity): log.debug('Killed: %s', entity) return True
python
def _in_stoplist(self, entity): """Return True if the entity is in the stoplist.""" start = 0 end = len(entity) # Adjust boundaries to exclude disallowed prefixes/suffixes for prefix in IGNORE_PREFIX: if entity.startswith(prefix): # print('%s removing %s' % (currenttext, prefix)) start += len(prefix) break for suffix in IGNORE_SUFFIX: if entity.endswith(suffix): # print('%s removing %s' % (currenttext, suffix)) end -= len(suffix) break # Return True if entity has been reduced to nothing by adjusting boundaries if start >= end: return True # Return True if adjusted entity is in the literal stoplist entity = entity[start:end] if entity in STOPLIST: return True # log.debug('Entity: %s', entity) for stop_re in STOP_RES: if re.search(stop_re, entity): log.debug('Killed: %s', entity) return True
[ "def", "_in_stoplist", "(", "self", ",", "entity", ")", ":", "start", "=", "0", "end", "=", "len", "(", "entity", ")", "# Adjust boundaries to exclude disallowed prefixes/suffixes", "for", "prefix", "in", "IGNORE_PREFIX", ":", "if", "entity", ".", "startswith", "(", "prefix", ")", ":", "# print('%s removing %s' % (currenttext, prefix))", "start", "+=", "len", "(", "prefix", ")", "break", "for", "suffix", "in", "IGNORE_SUFFIX", ":", "if", "entity", ".", "endswith", "(", "suffix", ")", ":", "# print('%s removing %s' % (currenttext, suffix))", "end", "-=", "len", "(", "suffix", ")", "break", "# Return True if entity has been reduced to nothing by adjusting boundaries", "if", "start", ">=", "end", ":", "return", "True", "# Return True if adjusted entity is in the literal stoplist", "entity", "=", "entity", "[", "start", ":", "end", "]", "if", "entity", "in", "STOPLIST", ":", "return", "True", "# log.debug('Entity: %s', entity)", "for", "stop_re", "in", "STOP_RES", ":", "if", "re", ".", "search", "(", "stop_re", ",", "entity", ")", ":", "log", ".", "debug", "(", "'Killed: %s'", ",", "entity", ")", "return", "True" ]
Return True if the entity is in the stoplist.
[ "Return", "True", "if", "the", "entity", "is", "in", "the", "stoplist", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/nlp/cem.py#L518-L544
6,374
mcs07/ChemDataExtractor
chemdataextractor/cli/dict.py
_process_name
def _process_name(name): """Fix issues with Jochem names.""" # Unescape HTML entities name = unescape(name) # Remove bracketed stuff on the end name = NG_RE.sub('', name).strip() # Nomenclature groups name = END_RE.sub('', name).strip(', ') # Words name = RATIO_RE.sub('', name).strip(', ') # Ratios # Remove stuff off start name = START_RE.sub('', name).strip() # Remove balanced start and end brackets if none in between name = BRACKET_RE.sub('\g<1>', name) # Un-invert CAS style names comps = name.split(', ') if len(comps) == 2: if comps[1].endswith('-'): name = comps[0] name = '%s%s' % (comps[1], name) elif len(comps) > 2: name = comps[0] for i in range(1, len(comps)): if comps[i].endswith('-'): name = '%s%s' % (comps[i], name) else: name = '%s %s' % (name, comps[i]) return name
python
def _process_name(name): """Fix issues with Jochem names.""" # Unescape HTML entities name = unescape(name) # Remove bracketed stuff on the end name = NG_RE.sub('', name).strip() # Nomenclature groups name = END_RE.sub('', name).strip(', ') # Words name = RATIO_RE.sub('', name).strip(', ') # Ratios # Remove stuff off start name = START_RE.sub('', name).strip() # Remove balanced start and end brackets if none in between name = BRACKET_RE.sub('\g<1>', name) # Un-invert CAS style names comps = name.split(', ') if len(comps) == 2: if comps[1].endswith('-'): name = comps[0] name = '%s%s' % (comps[1], name) elif len(comps) > 2: name = comps[0] for i in range(1, len(comps)): if comps[i].endswith('-'): name = '%s%s' % (comps[i], name) else: name = '%s %s' % (name, comps[i]) return name
[ "def", "_process_name", "(", "name", ")", ":", "# Unescape HTML entities", "name", "=", "unescape", "(", "name", ")", "# Remove bracketed stuff on the end", "name", "=", "NG_RE", ".", "sub", "(", "''", ",", "name", ")", ".", "strip", "(", ")", "# Nomenclature groups", "name", "=", "END_RE", ".", "sub", "(", "''", ",", "name", ")", ".", "strip", "(", "', '", ")", "# Words", "name", "=", "RATIO_RE", ".", "sub", "(", "''", ",", "name", ")", ".", "strip", "(", "', '", ")", "# Ratios", "# Remove stuff off start", "name", "=", "START_RE", ".", "sub", "(", "''", ",", "name", ")", ".", "strip", "(", ")", "# Remove balanced start and end brackets if none in between", "name", "=", "BRACKET_RE", ".", "sub", "(", "'\\g<1>'", ",", "name", ")", "# Un-invert CAS style names", "comps", "=", "name", ".", "split", "(", "', '", ")", "if", "len", "(", "comps", ")", "==", "2", ":", "if", "comps", "[", "1", "]", ".", "endswith", "(", "'-'", ")", ":", "name", "=", "comps", "[", "0", "]", "name", "=", "'%s%s'", "%", "(", "comps", "[", "1", "]", ",", "name", ")", "elif", "len", "(", "comps", ")", ">", "2", ":", "name", "=", "comps", "[", "0", "]", "for", "i", "in", "range", "(", "1", ",", "len", "(", "comps", ")", ")", ":", "if", "comps", "[", "i", "]", ".", "endswith", "(", "'-'", ")", ":", "name", "=", "'%s%s'", "%", "(", "comps", "[", "i", "]", ",", "name", ")", "else", ":", "name", "=", "'%s %s'", "%", "(", "name", ",", "comps", "[", "i", "]", ")", "return", "name" ]
Fix issues with Jochem names.
[ "Fix", "issues", "with", "Jochem", "names", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/dict.py#L124-L154
6,375
mcs07/ChemDataExtractor
chemdataextractor/cli/dict.py
_get_variants
def _get_variants(name): """Return variants of chemical name.""" names = [name] oldname = name # Map greek words to unicode characters if DOT_GREEK_RE.search(name): wordname = name while True: m = DOT_GREEK_RE.search(wordname) if m: wordname = wordname[:m.start(1)-1] + m.group(1) + wordname[m.end(1)+1:] else: break symbolname = name while True: m = DOT_GREEK_RE.search(symbolname) if m: symbolname = symbolname[:m.start(1)-1] + GREEK_WORDS[m.group(1)] + symbolname[m.end(1)+1:] else: break names = [wordname, symbolname] else: while True: m = GREEK_RE.search(name) if m: name = name[:m.start(2)] + GREEK_WORDS[m.group(2)] + name[m.end(2):] else: break while True: m = UNAMBIGUOUS_GREEK_RE.search(name) if m: name = name[:m.start(1)] + GREEK_WORDS[m.group(1)] + name[m.end(1):] else: break if not name == oldname: names.append(name) newnames = [] for name in names: # If last word \d+, add variants with hyphen and no space preceding if NUM_END_RE.search(name): newnames.append(NUM_END_RE.sub('-\g<1>', name)) newnames.append(NUM_END_RE.sub('\g<1>', name)) # If last word [A-Za-z]\d* add variants with hyphen preceding. if ALPHANUM_END_RE.search(name): newnames.append(ALPHANUM_END_RE.sub('-\g<1>', name)) names.extend(newnames) return names
python
def _get_variants(name): """Return variants of chemical name.""" names = [name] oldname = name # Map greek words to unicode characters if DOT_GREEK_RE.search(name): wordname = name while True: m = DOT_GREEK_RE.search(wordname) if m: wordname = wordname[:m.start(1)-1] + m.group(1) + wordname[m.end(1)+1:] else: break symbolname = name while True: m = DOT_GREEK_RE.search(symbolname) if m: symbolname = symbolname[:m.start(1)-1] + GREEK_WORDS[m.group(1)] + symbolname[m.end(1)+1:] else: break names = [wordname, symbolname] else: while True: m = GREEK_RE.search(name) if m: name = name[:m.start(2)] + GREEK_WORDS[m.group(2)] + name[m.end(2):] else: break while True: m = UNAMBIGUOUS_GREEK_RE.search(name) if m: name = name[:m.start(1)] + GREEK_WORDS[m.group(1)] + name[m.end(1):] else: break if not name == oldname: names.append(name) newnames = [] for name in names: # If last word \d+, add variants with hyphen and no space preceding if NUM_END_RE.search(name): newnames.append(NUM_END_RE.sub('-\g<1>', name)) newnames.append(NUM_END_RE.sub('\g<1>', name)) # If last word [A-Za-z]\d* add variants with hyphen preceding. if ALPHANUM_END_RE.search(name): newnames.append(ALPHANUM_END_RE.sub('-\g<1>', name)) names.extend(newnames) return names
[ "def", "_get_variants", "(", "name", ")", ":", "names", "=", "[", "name", "]", "oldname", "=", "name", "# Map greek words to unicode characters", "if", "DOT_GREEK_RE", ".", "search", "(", "name", ")", ":", "wordname", "=", "name", "while", "True", ":", "m", "=", "DOT_GREEK_RE", ".", "search", "(", "wordname", ")", "if", "m", ":", "wordname", "=", "wordname", "[", ":", "m", ".", "start", "(", "1", ")", "-", "1", "]", "+", "m", ".", "group", "(", "1", ")", "+", "wordname", "[", "m", ".", "end", "(", "1", ")", "+", "1", ":", "]", "else", ":", "break", "symbolname", "=", "name", "while", "True", ":", "m", "=", "DOT_GREEK_RE", ".", "search", "(", "symbolname", ")", "if", "m", ":", "symbolname", "=", "symbolname", "[", ":", "m", ".", "start", "(", "1", ")", "-", "1", "]", "+", "GREEK_WORDS", "[", "m", ".", "group", "(", "1", ")", "]", "+", "symbolname", "[", "m", ".", "end", "(", "1", ")", "+", "1", ":", "]", "else", ":", "break", "names", "=", "[", "wordname", ",", "symbolname", "]", "else", ":", "while", "True", ":", "m", "=", "GREEK_RE", ".", "search", "(", "name", ")", "if", "m", ":", "name", "=", "name", "[", ":", "m", ".", "start", "(", "2", ")", "]", "+", "GREEK_WORDS", "[", "m", ".", "group", "(", "2", ")", "]", "+", "name", "[", "m", ".", "end", "(", "2", ")", ":", "]", "else", ":", "break", "while", "True", ":", "m", "=", "UNAMBIGUOUS_GREEK_RE", ".", "search", "(", "name", ")", "if", "m", ":", "name", "=", "name", "[", ":", "m", ".", "start", "(", "1", ")", "]", "+", "GREEK_WORDS", "[", "m", ".", "group", "(", "1", ")", "]", "+", "name", "[", "m", ".", "end", "(", "1", ")", ":", "]", "else", ":", "break", "if", "not", "name", "==", "oldname", ":", "names", ".", "append", "(", "name", ")", "newnames", "=", "[", "]", "for", "name", "in", "names", ":", "# If last word \\d+, add variants with hyphen and no space preceding", "if", "NUM_END_RE", ".", "search", "(", "name", ")", ":", "newnames", ".", "append", "(", "NUM_END_RE", ".", "sub", "(", "'-\\g<1>'", ",", "name", ")", ")", "newnames", ".", "append", "(", "NUM_END_RE", ".", "sub", "(", "'\\g<1>'", ",", "name", ")", ")", "# If last word [A-Za-z]\\d* add variants with hyphen preceding.", "if", "ALPHANUM_END_RE", ".", "search", "(", "name", ")", ":", "newnames", ".", "append", "(", "ALPHANUM_END_RE", ".", "sub", "(", "'-\\g<1>'", ",", "name", ")", ")", "names", ".", "extend", "(", "newnames", ")", "return", "names" ]
Return variants of chemical name.
[ "Return", "variants", "of", "chemical", "name", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/dict.py#L206-L252
6,376
mcs07/ChemDataExtractor
chemdataextractor/cli/dict.py
prepare_jochem
def prepare_jochem(ctx, jochem, output, csoutput): """Process and filter jochem file to produce list of names for dictionary.""" click.echo('chemdataextractor.dict.prepare_jochem') for i, line in enumerate(jochem): print('JC%s' % i) if line.startswith('TM '): if line.endswith(' @match=ci\n'): for tokens in _make_tokens(line[3:-11]): output.write(' '.join(tokens)) output.write('\n') else: for tokens in _make_tokens(line[3:-1]): csoutput.write(' '.join(tokens)) csoutput.write('\n')
python
def prepare_jochem(ctx, jochem, output, csoutput): """Process and filter jochem file to produce list of names for dictionary.""" click.echo('chemdataextractor.dict.prepare_jochem') for i, line in enumerate(jochem): print('JC%s' % i) if line.startswith('TM '): if line.endswith(' @match=ci\n'): for tokens in _make_tokens(line[3:-11]): output.write(' '.join(tokens)) output.write('\n') else: for tokens in _make_tokens(line[3:-1]): csoutput.write(' '.join(tokens)) csoutput.write('\n')
[ "def", "prepare_jochem", "(", "ctx", ",", "jochem", ",", "output", ",", "csoutput", ")", ":", "click", ".", "echo", "(", "'chemdataextractor.dict.prepare_jochem'", ")", "for", "i", ",", "line", "in", "enumerate", "(", "jochem", ")", ":", "print", "(", "'JC%s'", "%", "i", ")", "if", "line", ".", "startswith", "(", "'TM '", ")", ":", "if", "line", ".", "endswith", "(", "'\t@match=ci\\n'", ")", ":", "for", "tokens", "in", "_make_tokens", "(", "line", "[", "3", ":", "-", "11", "]", ")", ":", "output", ".", "write", "(", "' '", ".", "join", "(", "tokens", ")", ")", "output", ".", "write", "(", "'\\n'", ")", "else", ":", "for", "tokens", "in", "_make_tokens", "(", "line", "[", "3", ":", "-", "1", "]", ")", ":", "csoutput", ".", "write", "(", "' '", ".", "join", "(", "tokens", ")", ")", "csoutput", ".", "write", "(", "'\\n'", ")" ]
Process and filter jochem file to produce list of names for dictionary.
[ "Process", "and", "filter", "jochem", "file", "to", "produce", "list", "of", "names", "for", "dictionary", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/dict.py#L277-L290
6,377
mcs07/ChemDataExtractor
chemdataextractor/cli/dict.py
prepare_include
def prepare_include(ctx, include, output): """Process and filter include file to produce list of names for dictionary.""" click.echo('chemdataextractor.dict.prepare_include') for i, line in enumerate(include): print('IN%s' % i) for tokens in _make_tokens(line.strip()): output.write(u' '.join(tokens)) output.write(u'\n')
python
def prepare_include(ctx, include, output): """Process and filter include file to produce list of names for dictionary.""" click.echo('chemdataextractor.dict.prepare_include') for i, line in enumerate(include): print('IN%s' % i) for tokens in _make_tokens(line.strip()): output.write(u' '.join(tokens)) output.write(u'\n')
[ "def", "prepare_include", "(", "ctx", ",", "include", ",", "output", ")", ":", "click", ".", "echo", "(", "'chemdataextractor.dict.prepare_include'", ")", "for", "i", ",", "line", "in", "enumerate", "(", "include", ")", ":", "print", "(", "'IN%s'", "%", "i", ")", "for", "tokens", "in", "_make_tokens", "(", "line", ".", "strip", "(", ")", ")", ":", "output", ".", "write", "(", "u' '", ".", "join", "(", "tokens", ")", ")", "output", ".", "write", "(", "u'\\n'", ")" ]
Process and filter include file to produce list of names for dictionary.
[ "Process", "and", "filter", "include", "file", "to", "produce", "list", "of", "names", "for", "dictionary", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/dict.py#L297-L304
6,378
mcs07/ChemDataExtractor
chemdataextractor/cli/dict.py
build
def build(ctx, inputs, output, cs): """Build chemical name dictionary.""" click.echo('chemdataextractor.dict.build') dt = DictionaryTagger(lexicon=ChemLexicon(), case_sensitive=cs) names = [] for input in inputs: for line in input: tokens = line.split() names.append(tokens) dt.build(words=names) dt.save(output)
python
def build(ctx, inputs, output, cs): """Build chemical name dictionary.""" click.echo('chemdataextractor.dict.build') dt = DictionaryTagger(lexicon=ChemLexicon(), case_sensitive=cs) names = [] for input in inputs: for line in input: tokens = line.split() names.append(tokens) dt.build(words=names) dt.save(output)
[ "def", "build", "(", "ctx", ",", "inputs", ",", "output", ",", "cs", ")", ":", "click", ".", "echo", "(", "'chemdataextractor.dict.build'", ")", "dt", "=", "DictionaryTagger", "(", "lexicon", "=", "ChemLexicon", "(", ")", ",", "case_sensitive", "=", "cs", ")", "names", "=", "[", "]", "for", "input", "in", "inputs", ":", "for", "line", "in", "input", ":", "tokens", "=", "line", ".", "split", "(", ")", "names", ".", "append", "(", "tokens", ")", "dt", ".", "build", "(", "words", "=", "names", ")", "dt", ".", "save", "(", "output", ")" ]
Build chemical name dictionary.
[ "Build", "chemical", "name", "dictionary", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/dict.py#L312-L322
6,379
mcs07/ChemDataExtractor
chemdataextractor/reader/rsc.py
RscHtmlReader._parse_table_footnotes
def _parse_table_footnotes(self, fns, refs, specials): """Override to account for awkward RSC table footnotes.""" footnotes = [] for fn in fns: footnote = self._parse_text(fn, refs=refs, specials=specials, element_cls=Footnote)[0] footnote += Footnote('', id=fn.getprevious().get('id')) footnotes.append(footnote) return footnotes
python
def _parse_table_footnotes(self, fns, refs, specials): """Override to account for awkward RSC table footnotes.""" footnotes = [] for fn in fns: footnote = self._parse_text(fn, refs=refs, specials=specials, element_cls=Footnote)[0] footnote += Footnote('', id=fn.getprevious().get('id')) footnotes.append(footnote) return footnotes
[ "def", "_parse_table_footnotes", "(", "self", ",", "fns", ",", "refs", ",", "specials", ")", ":", "footnotes", "=", "[", "]", "for", "fn", "in", "fns", ":", "footnote", "=", "self", ".", "_parse_text", "(", "fn", ",", "refs", "=", "refs", ",", "specials", "=", "specials", ",", "element_cls", "=", "Footnote", ")", "[", "0", "]", "footnote", "+=", "Footnote", "(", "''", ",", "id", "=", "fn", ".", "getprevious", "(", ")", ".", "get", "(", "'id'", ")", ")", "footnotes", ".", "append", "(", "footnote", ")", "return", "footnotes" ]
Override to account for awkward RSC table footnotes.
[ "Override", "to", "account", "for", "awkward", "RSC", "table", "footnotes", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/reader/rsc.py#L44-L51
6,380
mcs07/ChemDataExtractor
scripts/melting_points.py
extract
def extract(): """Extract melting points from patents.""" Paragraph.parsers = [CompoundParser(), ChemicalLabelParser(), MpParser()] Table.parsers = [] patents = [] for root, dirs, files in os.walk('../examples/mp/grants'): for filename in files: if not filename.endswith('.xml'): continue path = os.path.abspath(os.path.join(root, filename)) size = os.path.getsize(path) patents.append((path, filename, size)) patents = sorted(patents, key=lambda p: p[2]) for path, filename, size in patents: print(path) shutil.copyfile(path, '../examples/mp/used/%s' % filename) with open(path) as f: d = Document.from_file(f) if os.path.isfile('../examples/mp/results/%s.json' % filename): continue records = [r.serialize() for r in d.records if len(r.melting_points) == 1] with open('../examples/mp/results/%s.json' % filename, 'w') as fout: fout.write(json.dumps(records, ensure_ascii=False, indent=2).encode('utf8'))
python
def extract(): """Extract melting points from patents.""" Paragraph.parsers = [CompoundParser(), ChemicalLabelParser(), MpParser()] Table.parsers = [] patents = [] for root, dirs, files in os.walk('../examples/mp/grants'): for filename in files: if not filename.endswith('.xml'): continue path = os.path.abspath(os.path.join(root, filename)) size = os.path.getsize(path) patents.append((path, filename, size)) patents = sorted(patents, key=lambda p: p[2]) for path, filename, size in patents: print(path) shutil.copyfile(path, '../examples/mp/used/%s' % filename) with open(path) as f: d = Document.from_file(f) if os.path.isfile('../examples/mp/results/%s.json' % filename): continue records = [r.serialize() for r in d.records if len(r.melting_points) == 1] with open('../examples/mp/results/%s.json' % filename, 'w') as fout: fout.write(json.dumps(records, ensure_ascii=False, indent=2).encode('utf8'))
[ "def", "extract", "(", ")", ":", "Paragraph", ".", "parsers", "=", "[", "CompoundParser", "(", ")", ",", "ChemicalLabelParser", "(", ")", ",", "MpParser", "(", ")", "]", "Table", ".", "parsers", "=", "[", "]", "patents", "=", "[", "]", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "'../examples/mp/grants'", ")", ":", "for", "filename", "in", "files", ":", "if", "not", "filename", ".", "endswith", "(", "'.xml'", ")", ":", "continue", "path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", ")", "size", "=", "os", ".", "path", ".", "getsize", "(", "path", ")", "patents", ".", "append", "(", "(", "path", ",", "filename", ",", "size", ")", ")", "patents", "=", "sorted", "(", "patents", ",", "key", "=", "lambda", "p", ":", "p", "[", "2", "]", ")", "for", "path", ",", "filename", ",", "size", "in", "patents", ":", "print", "(", "path", ")", "shutil", ".", "copyfile", "(", "path", ",", "'../examples/mp/used/%s'", "%", "filename", ")", "with", "open", "(", "path", ")", "as", "f", ":", "d", "=", "Document", ".", "from_file", "(", "f", ")", "if", "os", ".", "path", ".", "isfile", "(", "'../examples/mp/results/%s.json'", "%", "filename", ")", ":", "continue", "records", "=", "[", "r", ".", "serialize", "(", ")", "for", "r", "in", "d", ".", "records", "if", "len", "(", "r", ".", "melting_points", ")", "==", "1", "]", "with", "open", "(", "'../examples/mp/results/%s.json'", "%", "filename", ",", "'w'", ")", "as", "fout", ":", "fout", ".", "write", "(", "json", ".", "dumps", "(", "records", ",", "ensure_ascii", "=", "False", ",", "indent", "=", "2", ")", ".", "encode", "(", "'utf8'", ")", ")" ]
Extract melting points from patents.
[ "Extract", "melting", "points", "from", "patents", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/scripts/melting_points.py#L40-L64
6,381
mcs07/ChemDataExtractor
scripts/melting_points.py
make_sdf
def make_sdf(): """Produce SDF of ChemDataExtractor and Tetko sample melting points.""" # import numpy as np # my_results_by_inchikey = defaultdict(list) # result_dir = '../examples/mp/standardized_results' # fout = open('../examples/mp/sdf/chemdataextractor-melting-points.sdf', 'w') # writer = Chem.SDWriter(fout) # for filename in os.listdir(result_dir): # if not filename.endswith('.json'): # continue # patent_id = filename[:-5] # with open('%s/%s' % (result_dir, filename)) as fin: # results = json.loads(fin.read().decode('utf8')) # for m in results: # m['patent_id'] = patent_id # mol = Chem.MolFromSmiles(m['smiles']) # mol.SetProp(b'_Name', m['name'].encode('utf-8')) # mol.SetProp(b'OriginalText', m['value'].encode('utf-8')) # mol.SetProp(b'Value', b'%s' % m['float_value']) # mol.SetProp(b'Patent', m['patent_id'].encode('utf-8')) # mol.SetProp(b'SMILES', Chem.MolToSmiles(mol, isomericSmiles=True)) # mol.SetProp(b'QuantityType', b'MeltingPoint') # mol.SetProp(b'StdInChIKey', Chem.InchiToInchiKey(Chem.MolToInchi(mol))) # if not mol: # print('WARNING: %s' % m) # return # AllChem.Compute2DCoords(mol) # writer.write(mol) # my_results_by_inchikey[m['inchikey']].append(m) # writer.close() # fout.close() # # fout = open('../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf', 'w') # writer = Chem.SDWriter(fout) # for inchikey, results in my_results_by_inchikey.items(): # if len(results) == 1: # m = results[0] # elif len(results) == 2: # sorted_results = sorted(results, key=lambda x: x['float_value']) # m = sorted_results[0] # else: # median = np.median([r['float_value'] for r in results]) # chosen = results[0] # for result in results: # if result['float_value'] - median < chosen['float_value'] < median: # m = result # mol = Chem.MolFromSmiles(m['smiles']) # mol.SetProp(b'_Name', m['name'].encode('utf-8')) # mol.SetProp(b'OriginalText', m['value'].encode('utf-8')) # mol.SetProp(b'Value', b'%s' % m['float_value']) # mol.SetProp(b'Patent', m['patent_id'].encode('utf-8')) # mol.SetProp(b'SMILES', Chem.MolToSmiles(mol, isomericSmiles=True)) # mol.SetProp(b'QuantityType', b'MeltingPoint') # mol.SetProp(b'StdInChIKey', Chem.InchiToInchiKey(Chem.MolToInchi(mol))) # if not mol: # print('WARNING: %s' % m) # return # AllChem.Compute2DCoords(mol) # writer.write(mol) with open('../examples/mp/sdf/chemdataextractor-melting-points.sdf', 'rb') as f_in, gzip.open('../examples/mp/sdf/chemdataextractor-melting-points.sdf.gz', 'wb') as f_out: shutil.copyfileobj(f_in, f_out) with open('../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf', 'rb') as f_in, gzip.open('../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf.gz', 'wb') as f_out: shutil.copyfileobj(f_in, f_out)
python
def make_sdf(): """Produce SDF of ChemDataExtractor and Tetko sample melting points.""" # import numpy as np # my_results_by_inchikey = defaultdict(list) # result_dir = '../examples/mp/standardized_results' # fout = open('../examples/mp/sdf/chemdataextractor-melting-points.sdf', 'w') # writer = Chem.SDWriter(fout) # for filename in os.listdir(result_dir): # if not filename.endswith('.json'): # continue # patent_id = filename[:-5] # with open('%s/%s' % (result_dir, filename)) as fin: # results = json.loads(fin.read().decode('utf8')) # for m in results: # m['patent_id'] = patent_id # mol = Chem.MolFromSmiles(m['smiles']) # mol.SetProp(b'_Name', m['name'].encode('utf-8')) # mol.SetProp(b'OriginalText', m['value'].encode('utf-8')) # mol.SetProp(b'Value', b'%s' % m['float_value']) # mol.SetProp(b'Patent', m['patent_id'].encode('utf-8')) # mol.SetProp(b'SMILES', Chem.MolToSmiles(mol, isomericSmiles=True)) # mol.SetProp(b'QuantityType', b'MeltingPoint') # mol.SetProp(b'StdInChIKey', Chem.InchiToInchiKey(Chem.MolToInchi(mol))) # if not mol: # print('WARNING: %s' % m) # return # AllChem.Compute2DCoords(mol) # writer.write(mol) # my_results_by_inchikey[m['inchikey']].append(m) # writer.close() # fout.close() # # fout = open('../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf', 'w') # writer = Chem.SDWriter(fout) # for inchikey, results in my_results_by_inchikey.items(): # if len(results) == 1: # m = results[0] # elif len(results) == 2: # sorted_results = sorted(results, key=lambda x: x['float_value']) # m = sorted_results[0] # else: # median = np.median([r['float_value'] for r in results]) # chosen = results[0] # for result in results: # if result['float_value'] - median < chosen['float_value'] < median: # m = result # mol = Chem.MolFromSmiles(m['smiles']) # mol.SetProp(b'_Name', m['name'].encode('utf-8')) # mol.SetProp(b'OriginalText', m['value'].encode('utf-8')) # mol.SetProp(b'Value', b'%s' % m['float_value']) # mol.SetProp(b'Patent', m['patent_id'].encode('utf-8')) # mol.SetProp(b'SMILES', Chem.MolToSmiles(mol, isomericSmiles=True)) # mol.SetProp(b'QuantityType', b'MeltingPoint') # mol.SetProp(b'StdInChIKey', Chem.InchiToInchiKey(Chem.MolToInchi(mol))) # if not mol: # print('WARNING: %s' % m) # return # AllChem.Compute2DCoords(mol) # writer.write(mol) with open('../examples/mp/sdf/chemdataextractor-melting-points.sdf', 'rb') as f_in, gzip.open('../examples/mp/sdf/chemdataextractor-melting-points.sdf.gz', 'wb') as f_out: shutil.copyfileobj(f_in, f_out) with open('../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf', 'rb') as f_in, gzip.open('../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf.gz', 'wb') as f_out: shutil.copyfileobj(f_in, f_out)
[ "def", "make_sdf", "(", ")", ":", "# import numpy as np", "# my_results_by_inchikey = defaultdict(list)", "# result_dir = '../examples/mp/standardized_results'", "# fout = open('../examples/mp/sdf/chemdataextractor-melting-points.sdf', 'w')", "# writer = Chem.SDWriter(fout)", "# for filename in os.listdir(result_dir):", "# if not filename.endswith('.json'):", "# continue", "# patent_id = filename[:-5]", "# with open('%s/%s' % (result_dir, filename)) as fin:", "# results = json.loads(fin.read().decode('utf8'))", "# for m in results:", "# m['patent_id'] = patent_id", "# mol = Chem.MolFromSmiles(m['smiles'])", "# mol.SetProp(b'_Name', m['name'].encode('utf-8'))", "# mol.SetProp(b'OriginalText', m['value'].encode('utf-8'))", "# mol.SetProp(b'Value', b'%s' % m['float_value'])", "# mol.SetProp(b'Patent', m['patent_id'].encode('utf-8'))", "# mol.SetProp(b'SMILES', Chem.MolToSmiles(mol, isomericSmiles=True))", "# mol.SetProp(b'QuantityType', b'MeltingPoint')", "# mol.SetProp(b'StdInChIKey', Chem.InchiToInchiKey(Chem.MolToInchi(mol)))", "# if not mol:", "# print('WARNING: %s' % m)", "# return", "# AllChem.Compute2DCoords(mol)", "# writer.write(mol)", "# my_results_by_inchikey[m['inchikey']].append(m)", "# writer.close()", "# fout.close()", "#", "# fout = open('../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf', 'w')", "# writer = Chem.SDWriter(fout)", "# for inchikey, results in my_results_by_inchikey.items():", "# if len(results) == 1:", "# m = results[0]", "# elif len(results) == 2:", "# sorted_results = sorted(results, key=lambda x: x['float_value'])", "# m = sorted_results[0]", "# else:", "# median = np.median([r['float_value'] for r in results])", "# chosen = results[0]", "# for result in results:", "# if result['float_value'] - median < chosen['float_value'] < median:", "# m = result", "# mol = Chem.MolFromSmiles(m['smiles'])", "# mol.SetProp(b'_Name', m['name'].encode('utf-8'))", "# mol.SetProp(b'OriginalText', m['value'].encode('utf-8'))", "# mol.SetProp(b'Value', b'%s' % m['float_value'])", "# mol.SetProp(b'Patent', m['patent_id'].encode('utf-8'))", "# mol.SetProp(b'SMILES', Chem.MolToSmiles(mol, isomericSmiles=True))", "# mol.SetProp(b'QuantityType', b'MeltingPoint')", "# mol.SetProp(b'StdInChIKey', Chem.InchiToInchiKey(Chem.MolToInchi(mol)))", "# if not mol:", "# print('WARNING: %s' % m)", "# return", "# AllChem.Compute2DCoords(mol)", "# writer.write(mol)", "with", "open", "(", "'../examples/mp/sdf/chemdataextractor-melting-points.sdf'", ",", "'rb'", ")", "as", "f_in", ",", "gzip", ".", "open", "(", "'../examples/mp/sdf/chemdataextractor-melting-points.sdf.gz'", ",", "'wb'", ")", "as", "f_out", ":", "shutil", ".", "copyfileobj", "(", "f_in", ",", "f_out", ")", "with", "open", "(", "'../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf'", ",", "'rb'", ")", "as", "f_in", ",", "gzip", ".", "open", "(", "'../examples/mp/sdf/chemdataextractor-melting-points-filtered.sdf.gz'", ",", "'wb'", ")", "as", "f_out", ":", "shutil", ".", "copyfileobj", "(", "f_in", ",", "f_out", ")" ]
Produce SDF of ChemDataExtractor and Tetko sample melting points.
[ "Produce", "SDF", "of", "ChemDataExtractor", "and", "Tetko", "sample", "melting", "points", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/scripts/melting_points.py#L555-L620
6,382
mcs07/ChemDataExtractor
chemdataextractor/cli/__init__.py
cli
def cli(ctx, verbose): """ChemDataExtractor command line interface.""" log.debug('ChemDataExtractor v%s' % __version__) logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO) logging.getLogger('requests').setLevel(logging.WARN) ctx.obj = {}
python
def cli(ctx, verbose): """ChemDataExtractor command line interface.""" log.debug('ChemDataExtractor v%s' % __version__) logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO) logging.getLogger('requests').setLevel(logging.WARN) ctx.obj = {}
[ "def", "cli", "(", "ctx", ",", "verbose", ")", ":", "log", ".", "debug", "(", "'ChemDataExtractor v%s'", "%", "__version__", ")", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "DEBUG", "if", "verbose", "else", "logging", ".", "INFO", ")", "logging", ".", "getLogger", "(", "'requests'", ")", ".", "setLevel", "(", "logging", ".", "WARN", ")", "ctx", ".", "obj", "=", "{", "}" ]
ChemDataExtractor command line interface.
[ "ChemDataExtractor", "command", "line", "interface", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/__init__.py#L34-L39
6,383
mcs07/ChemDataExtractor
chemdataextractor/cli/__init__.py
extract
def extract(ctx, input, output): """Run ChemDataExtractor on a document.""" log.info('chemdataextractor.extract') log.info('Reading %s' % input.name) doc = Document.from_file(input, fname=input.name) records = [record.serialize(primitive=True) for record in doc.records] jsonstring = json.dumps(records, indent=2, ensure_ascii=False) output.write(jsonstring)
python
def extract(ctx, input, output): """Run ChemDataExtractor on a document.""" log.info('chemdataextractor.extract') log.info('Reading %s' % input.name) doc = Document.from_file(input, fname=input.name) records = [record.serialize(primitive=True) for record in doc.records] jsonstring = json.dumps(records, indent=2, ensure_ascii=False) output.write(jsonstring)
[ "def", "extract", "(", "ctx", ",", "input", ",", "output", ")", ":", "log", ".", "info", "(", "'chemdataextractor.extract'", ")", "log", ".", "info", "(", "'Reading %s'", "%", "input", ".", "name", ")", "doc", "=", "Document", ".", "from_file", "(", "input", ",", "fname", "=", "input", ".", "name", ")", "records", "=", "[", "record", ".", "serialize", "(", "primitive", "=", "True", ")", "for", "record", "in", "doc", ".", "records", "]", "jsonstring", "=", "json", ".", "dumps", "(", "records", ",", "indent", "=", "2", ",", "ensure_ascii", "=", "False", ")", "output", ".", "write", "(", "jsonstring", ")" ]
Run ChemDataExtractor on a document.
[ "Run", "ChemDataExtractor", "on", "a", "document", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/__init__.py#L46-L53
6,384
mcs07/ChemDataExtractor
chemdataextractor/cli/__init__.py
read
def read(ctx, input, output): """Output processed document elements.""" log.info('chemdataextractor.read') log.info('Reading %s' % input.name) doc = Document.from_file(input) for element in doc.elements: output.write(u'%s : %s\n=====\n' % (element.__class__.__name__, six.text_type(element)))
python
def read(ctx, input, output): """Output processed document elements.""" log.info('chemdataextractor.read') log.info('Reading %s' % input.name) doc = Document.from_file(input) for element in doc.elements: output.write(u'%s : %s\n=====\n' % (element.__class__.__name__, six.text_type(element)))
[ "def", "read", "(", "ctx", ",", "input", ",", "output", ")", ":", "log", ".", "info", "(", "'chemdataextractor.read'", ")", "log", ".", "info", "(", "'Reading %s'", "%", "input", ".", "name", ")", "doc", "=", "Document", ".", "from_file", "(", "input", ")", "for", "element", "in", "doc", ".", "elements", ":", "output", ".", "write", "(", "u'%s : %s\\n=====\\n'", "%", "(", "element", ".", "__class__", ".", "__name__", ",", "six", ".", "text_type", "(", "element", ")", ")", ")" ]
Output processed document elements.
[ "Output", "processed", "document", "elements", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/cli/__init__.py#L60-L66
6,385
mcs07/ChemDataExtractor
chemdataextractor/text/chem.py
extract_smiles
def extract_smiles(s): """Return a list of SMILES identifiers extracted from the string.""" # TODO: This still gets a lot of false positives. smiles = [] for t in s.split(): if len(t) > 2 and SMILES_RE.match(t) and not t.endswith('.') and bracket_level(t) == 0: smiles.append(t) return smiles
python
def extract_smiles(s): """Return a list of SMILES identifiers extracted from the string.""" # TODO: This still gets a lot of false positives. smiles = [] for t in s.split(): if len(t) > 2 and SMILES_RE.match(t) and not t.endswith('.') and bracket_level(t) == 0: smiles.append(t) return smiles
[ "def", "extract_smiles", "(", "s", ")", ":", "# TODO: This still gets a lot of false positives.", "smiles", "=", "[", "]", "for", "t", "in", "s", ".", "split", "(", ")", ":", "if", "len", "(", "t", ")", ">", "2", "and", "SMILES_RE", ".", "match", "(", "t", ")", "and", "not", "t", ".", "endswith", "(", "'.'", ")", "and", "bracket_level", "(", "t", ")", "==", "0", ":", "smiles", ".", "append", "(", "t", ")", "return", "smiles" ]
Return a list of SMILES identifiers extracted from the string.
[ "Return", "a", "list", "of", "SMILES", "identifiers", "extracted", "from", "the", "string", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/text/chem.py#L155-L162
6,386
mcs07/ChemDataExtractor
chemdataextractor/biblio/person.py
PersonName.could_be
def could_be(self, other): """Return True if the other PersonName is not explicitly inconsistent.""" # TODO: Some suffix and title differences should be allowed if type(other) is not type(self): return NotImplemented if self == other: return True for attr in ['title', 'firstname', 'middlename', 'nickname', 'prefix', 'lastname', 'suffix']: if attr not in self or attr not in other: continue puncmap = dict((ord(char), None) for char in string.punctuation) s = self[attr].lower().translate(puncmap) o = other[attr].lower().translate(puncmap) if s == o: continue if attr in {'firstname', 'middlename', 'lastname'}: if (({len(comp) for comp in s.split()} == {1} and [el[0] for el in o.split()] == s.split()) or ({len(comp) for comp in o.split()} == {1} and [el[0] for el in s.split()] == o.split())): continue return False return True
python
def could_be(self, other): """Return True if the other PersonName is not explicitly inconsistent.""" # TODO: Some suffix and title differences should be allowed if type(other) is not type(self): return NotImplemented if self == other: return True for attr in ['title', 'firstname', 'middlename', 'nickname', 'prefix', 'lastname', 'suffix']: if attr not in self or attr not in other: continue puncmap = dict((ord(char), None) for char in string.punctuation) s = self[attr].lower().translate(puncmap) o = other[attr].lower().translate(puncmap) if s == o: continue if attr in {'firstname', 'middlename', 'lastname'}: if (({len(comp) for comp in s.split()} == {1} and [el[0] for el in o.split()] == s.split()) or ({len(comp) for comp in o.split()} == {1} and [el[0] for el in s.split()] == o.split())): continue return False return True
[ "def", "could_be", "(", "self", ",", "other", ")", ":", "# TODO: Some suffix and title differences should be allowed", "if", "type", "(", "other", ")", "is", "not", "type", "(", "self", ")", ":", "return", "NotImplemented", "if", "self", "==", "other", ":", "return", "True", "for", "attr", "in", "[", "'title'", ",", "'firstname'", ",", "'middlename'", ",", "'nickname'", ",", "'prefix'", ",", "'lastname'", ",", "'suffix'", "]", ":", "if", "attr", "not", "in", "self", "or", "attr", "not", "in", "other", ":", "continue", "puncmap", "=", "dict", "(", "(", "ord", "(", "char", ")", ",", "None", ")", "for", "char", "in", "string", ".", "punctuation", ")", "s", "=", "self", "[", "attr", "]", ".", "lower", "(", ")", ".", "translate", "(", "puncmap", ")", "o", "=", "other", "[", "attr", "]", ".", "lower", "(", ")", ".", "translate", "(", "puncmap", ")", "if", "s", "==", "o", ":", "continue", "if", "attr", "in", "{", "'firstname'", ",", "'middlename'", ",", "'lastname'", "}", ":", "if", "(", "(", "{", "len", "(", "comp", ")", "for", "comp", "in", "s", ".", "split", "(", ")", "}", "==", "{", "1", "}", "and", "[", "el", "[", "0", "]", "for", "el", "in", "o", ".", "split", "(", ")", "]", "==", "s", ".", "split", "(", ")", ")", "or", "(", "{", "len", "(", "comp", ")", "for", "comp", "in", "o", ".", "split", "(", ")", "}", "==", "{", "1", "}", "and", "[", "el", "[", "0", "]", "for", "el", "in", "s", ".", "split", "(", ")", "]", "==", "o", ".", "split", "(", ")", ")", ")", ":", "continue", "return", "False", "return", "True" ]
Return True if the other PersonName is not explicitly inconsistent.
[ "Return", "True", "if", "the", "other", "PersonName", "is", "not", "explicitly", "inconsistent", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/person.py#L125-L145
6,387
mcs07/ChemDataExtractor
chemdataextractor/biblio/person.py
PersonName._is_suffix
def _is_suffix(self, t): """Return true if t is a suffix.""" return t not in NOT_SUFFIX and (t.replace('.', '') in SUFFIXES or t.replace('.', '') in SUFFIXES_LOWER)
python
def _is_suffix(self, t): """Return true if t is a suffix.""" return t not in NOT_SUFFIX and (t.replace('.', '') in SUFFIXES or t.replace('.', '') in SUFFIXES_LOWER)
[ "def", "_is_suffix", "(", "self", ",", "t", ")", ":", "return", "t", "not", "in", "NOT_SUFFIX", "and", "(", "t", ".", "replace", "(", "'.'", ",", "''", ")", "in", "SUFFIXES", "or", "t", ".", "replace", "(", "'.'", ",", "''", ")", "in", "SUFFIXES_LOWER", ")" ]
Return true if t is a suffix.
[ "Return", "true", "if", "t", "is", "a", "suffix", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/person.py#L170-L172
6,388
mcs07/ChemDataExtractor
chemdataextractor/biblio/person.py
PersonName._tokenize
def _tokenize(self, comps): """Split name on spaces, unless inside curly brackets or quotes.""" ps = [] for comp in comps: ps.extend([c.strip(' ,') for c in re.split(r'\s+(?=[^{}]*(?:\{|$))', comp)]) return [p for p in ps if p]
python
def _tokenize(self, comps): """Split name on spaces, unless inside curly brackets or quotes.""" ps = [] for comp in comps: ps.extend([c.strip(' ,') for c in re.split(r'\s+(?=[^{}]*(?:\{|$))', comp)]) return [p for p in ps if p]
[ "def", "_tokenize", "(", "self", ",", "comps", ")", ":", "ps", "=", "[", "]", "for", "comp", "in", "comps", ":", "ps", ".", "extend", "(", "[", "c", ".", "strip", "(", "' ,'", ")", "for", "c", "in", "re", ".", "split", "(", "r'\\s+(?=[^{}]*(?:\\{|$))'", ",", "comp", ")", "]", ")", "return", "[", "p", "for", "p", "in", "ps", "if", "p", "]" ]
Split name on spaces, unless inside curly brackets or quotes.
[ "Split", "name", "on", "spaces", "unless", "inside", "curly", "brackets", "or", "quotes", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/person.py#L174-L179
6,389
mcs07/ChemDataExtractor
chemdataextractor/biblio/person.py
PersonName._clean
def _clean(self, t, capitalize=None): """Convert to normalized unicode and strip trailing full stops.""" if self._from_bibtex: t = latex_to_unicode(t, capitalize=capitalize) t = ' '.join([el.rstrip('.') if el.count('.') == 1 else el for el in t.split()]) return t
python
def _clean(self, t, capitalize=None): """Convert to normalized unicode and strip trailing full stops.""" if self._from_bibtex: t = latex_to_unicode(t, capitalize=capitalize) t = ' '.join([el.rstrip('.') if el.count('.') == 1 else el for el in t.split()]) return t
[ "def", "_clean", "(", "self", ",", "t", ",", "capitalize", "=", "None", ")", ":", "if", "self", ".", "_from_bibtex", ":", "t", "=", "latex_to_unicode", "(", "t", ",", "capitalize", "=", "capitalize", ")", "t", "=", "' '", ".", "join", "(", "[", "el", ".", "rstrip", "(", "'.'", ")", "if", "el", ".", "count", "(", "'.'", ")", "==", "1", "else", "el", "for", "el", "in", "t", ".", "split", "(", ")", "]", ")", "return", "t" ]
Convert to normalized unicode and strip trailing full stops.
[ "Convert", "to", "normalized", "unicode", "and", "strip", "trailing", "full", "stops", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/person.py#L181-L186
6,390
mcs07/ChemDataExtractor
chemdataextractor/biblio/person.py
PersonName._strip
def _strip(self, tokens, criteria, prop, rev=False): """Strip off contiguous tokens from the start or end of the list that meet the criteria.""" num = len(tokens) res = [] for i, token in enumerate(reversed(tokens) if rev else tokens): if criteria(token) and num > i + 1: res.insert(0, tokens.pop()) if rev else res.append(tokens.pop(0)) else: break if res: self[prop] = self._clean(' '.join(res)) return tokens
python
def _strip(self, tokens, criteria, prop, rev=False): """Strip off contiguous tokens from the start or end of the list that meet the criteria.""" num = len(tokens) res = [] for i, token in enumerate(reversed(tokens) if rev else tokens): if criteria(token) and num > i + 1: res.insert(0, tokens.pop()) if rev else res.append(tokens.pop(0)) else: break if res: self[prop] = self._clean(' '.join(res)) return tokens
[ "def", "_strip", "(", "self", ",", "tokens", ",", "criteria", ",", "prop", ",", "rev", "=", "False", ")", ":", "num", "=", "len", "(", "tokens", ")", "res", "=", "[", "]", "for", "i", ",", "token", "in", "enumerate", "(", "reversed", "(", "tokens", ")", "if", "rev", "else", "tokens", ")", ":", "if", "criteria", "(", "token", ")", "and", "num", ">", "i", "+", "1", ":", "res", ".", "insert", "(", "0", ",", "tokens", ".", "pop", "(", ")", ")", "if", "rev", "else", "res", ".", "append", "(", "tokens", ".", "pop", "(", "0", ")", ")", "else", ":", "break", "if", "res", ":", "self", "[", "prop", "]", "=", "self", ".", "_clean", "(", "' '", ".", "join", "(", "res", ")", ")", "return", "tokens" ]
Strip off contiguous tokens from the start or end of the list that meet the criteria.
[ "Strip", "off", "contiguous", "tokens", "from", "the", "start", "or", "end", "of", "the", "list", "that", "meet", "the", "criteria", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/person.py#L188-L199
6,391
mcs07/ChemDataExtractor
chemdataextractor/reader/markup.py
LxmlReader._parse_text
def _parse_text(self, el, refs=None, specials=None, element_cls=Paragraph): """Like _parse_element but ensure a single element.""" if specials is None: specials = {} if refs is None: refs = {} elements = self._parse_element_r(el, specials=specials, refs=refs, element_cls=element_cls) # This occurs if the input element is self-closing... (some table td in NLM XML) if not elements: return [element_cls('')] element = elements[0] for next_element in elements[1:]: element += element_cls(' ') + next_element return [element]
python
def _parse_text(self, el, refs=None, specials=None, element_cls=Paragraph): """Like _parse_element but ensure a single element.""" if specials is None: specials = {} if refs is None: refs = {} elements = self._parse_element_r(el, specials=specials, refs=refs, element_cls=element_cls) # This occurs if the input element is self-closing... (some table td in NLM XML) if not elements: return [element_cls('')] element = elements[0] for next_element in elements[1:]: element += element_cls(' ') + next_element return [element]
[ "def", "_parse_text", "(", "self", ",", "el", ",", "refs", "=", "None", ",", "specials", "=", "None", ",", "element_cls", "=", "Paragraph", ")", ":", "if", "specials", "is", "None", ":", "specials", "=", "{", "}", "if", "refs", "is", "None", ":", "refs", "=", "{", "}", "elements", "=", "self", ".", "_parse_element_r", "(", "el", ",", "specials", "=", "specials", ",", "refs", "=", "refs", ",", "element_cls", "=", "element_cls", ")", "# This occurs if the input element is self-closing... (some table td in NLM XML)", "if", "not", "elements", ":", "return", "[", "element_cls", "(", "''", ")", "]", "element", "=", "elements", "[", "0", "]", "for", "next_element", "in", "elements", "[", "1", ":", "]", ":", "element", "+=", "element_cls", "(", "' '", ")", "+", "next_element", "return", "[", "element", "]" ]
Like _parse_element but ensure a single element.
[ "Like", "_parse_element", "but", "ensure", "a", "single", "element", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/reader/markup.py#L112-L125
6,392
mcs07/ChemDataExtractor
chemdataextractor/reader/markup.py
LxmlReader._parse_reference
def _parse_reference(self, el): """Return reference ID from href or text content.""" if '#' in el.get('href', ''): return [el.get('href').split('#', 1)[1]] elif 'rid' in el.attrib: return [el.attrib['rid']] elif 'idref' in el.attrib: return [el.attrib['idref']] else: return [''.join(el.itertext()).strip()]
python
def _parse_reference(self, el): """Return reference ID from href or text content.""" if '#' in el.get('href', ''): return [el.get('href').split('#', 1)[1]] elif 'rid' in el.attrib: return [el.attrib['rid']] elif 'idref' in el.attrib: return [el.attrib['idref']] else: return [''.join(el.itertext()).strip()]
[ "def", "_parse_reference", "(", "self", ",", "el", ")", ":", "if", "'#'", "in", "el", ".", "get", "(", "'href'", ",", "''", ")", ":", "return", "[", "el", ".", "get", "(", "'href'", ")", ".", "split", "(", "'#'", ",", "1", ")", "[", "1", "]", "]", "elif", "'rid'", "in", "el", ".", "attrib", ":", "return", "[", "el", ".", "attrib", "[", "'rid'", "]", "]", "elif", "'idref'", "in", "el", ".", "attrib", ":", "return", "[", "el", ".", "attrib", "[", "'idref'", "]", "]", "else", ":", "return", "[", "''", ".", "join", "(", "el", ".", "itertext", "(", ")", ")", ".", "strip", "(", ")", "]" ]
Return reference ID from href or text content.
[ "Return", "reference", "ID", "from", "href", "or", "text", "content", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/reader/markup.py#L163-L172
6,393
mcs07/ChemDataExtractor
chemdataextractor/reader/markup.py
LxmlReader._is_inline
def _is_inline(self, element): """Return True if an element is inline.""" if element.tag not in {etree.Comment, etree.ProcessingInstruction} and element.tag.lower() in self.inline_elements: return True return False
python
def _is_inline(self, element): """Return True if an element is inline.""" if element.tag not in {etree.Comment, etree.ProcessingInstruction} and element.tag.lower() in self.inline_elements: return True return False
[ "def", "_is_inline", "(", "self", ",", "element", ")", ":", "if", "element", ".", "tag", "not", "in", "{", "etree", ".", "Comment", ",", "etree", ".", "ProcessingInstruction", "}", "and", "element", ".", "tag", ".", "lower", "(", ")", "in", "self", ".", "inline_elements", ":", "return", "True", "return", "False" ]
Return True if an element is inline.
[ "Return", "True", "if", "an", "element", "is", "inline", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/reader/markup.py#L193-L197
6,394
mcs07/ChemDataExtractor
chemdataextractor/biblio/bibtex.py
BibtexParser._next_token
def _next_token(self, skipws=True): """Increment _token to the next token and return it.""" self._token = next(self._tokens).group(0) return self._next_token() if skipws and self._token.isspace() else self._token
python
def _next_token(self, skipws=True): """Increment _token to the next token and return it.""" self._token = next(self._tokens).group(0) return self._next_token() if skipws and self._token.isspace() else self._token
[ "def", "_next_token", "(", "self", ",", "skipws", "=", "True", ")", ":", "self", ".", "_token", "=", "next", "(", "self", ".", "_tokens", ")", ".", "group", "(", "0", ")", "return", "self", ".", "_next_token", "(", ")", "if", "skipws", "and", "self", ".", "_token", ".", "isspace", "(", ")", "else", "self", ".", "_token" ]
Increment _token to the next token and return it.
[ "Increment", "_token", "to", "the", "next", "token", "and", "return", "it", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/bibtex.py#L65-L68
6,395
mcs07/ChemDataExtractor
chemdataextractor/biblio/bibtex.py
BibtexParser._parse_entry
def _parse_entry(self): """Parse an entry.""" entry_type = self._next_token().lower() if entry_type == 'string': self._parse_string() elif entry_type not in ['comment', 'preamble']: self._parse_record(entry_type)
python
def _parse_entry(self): """Parse an entry.""" entry_type = self._next_token().lower() if entry_type == 'string': self._parse_string() elif entry_type not in ['comment', 'preamble']: self._parse_record(entry_type)
[ "def", "_parse_entry", "(", "self", ")", ":", "entry_type", "=", "self", ".", "_next_token", "(", ")", ".", "lower", "(", ")", "if", "entry_type", "==", "'string'", ":", "self", ".", "_parse_string", "(", ")", "elif", "entry_type", "not", "in", "[", "'comment'", ",", "'preamble'", "]", ":", "self", ".", "_parse_record", "(", "entry_type", ")" ]
Parse an entry.
[ "Parse", "an", "entry", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/bibtex.py#L80-L86
6,396
mcs07/ChemDataExtractor
chemdataextractor/biblio/bibtex.py
BibtexParser._parse_string
def _parse_string(self): """Parse a string entry and store the definition.""" if self._next_token() in ['{', '(']: field = self._parse_field() if field: self.definitions[field[0]] = field[1]
python
def _parse_string(self): """Parse a string entry and store the definition.""" if self._next_token() in ['{', '(']: field = self._parse_field() if field: self.definitions[field[0]] = field[1]
[ "def", "_parse_string", "(", "self", ")", ":", "if", "self", ".", "_next_token", "(", ")", "in", "[", "'{'", ",", "'('", "]", ":", "field", "=", "self", ".", "_parse_field", "(", ")", "if", "field", ":", "self", ".", "definitions", "[", "field", "[", "0", "]", "]", "=", "field", "[", "1", "]" ]
Parse a string entry and store the definition.
[ "Parse", "a", "string", "entry", "and", "store", "the", "definition", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/bibtex.py#L88-L93
6,397
mcs07/ChemDataExtractor
chemdataextractor/biblio/bibtex.py
BibtexParser._parse_record
def _parse_record(self, record_type): """Parse a record.""" if self._next_token() in ['{', '(']: key = self._next_token() self.records[key] = { u'id': key, u'type': record_type.lower() } if self._next_token() == ',': while True: field = self._parse_field() if field: k, v = field[0], field[1] if k in self.keynorms: k = self.keynorms[k] if k == 'pages': v = v.replace(' ', '').replace('--', '-') if k == 'author' or k == 'editor': v = self.parse_names(v) # Recapitalizing the title generally causes more problems than it solves # elif k == 'title': # v = latex_to_unicode(v, capitalize='title') else: v = latex_to_unicode(v) self.records[key][k] = v if self._token != ',': break
python
def _parse_record(self, record_type): """Parse a record.""" if self._next_token() in ['{', '(']: key = self._next_token() self.records[key] = { u'id': key, u'type': record_type.lower() } if self._next_token() == ',': while True: field = self._parse_field() if field: k, v = field[0], field[1] if k in self.keynorms: k = self.keynorms[k] if k == 'pages': v = v.replace(' ', '').replace('--', '-') if k == 'author' or k == 'editor': v = self.parse_names(v) # Recapitalizing the title generally causes more problems than it solves # elif k == 'title': # v = latex_to_unicode(v, capitalize='title') else: v = latex_to_unicode(v) self.records[key][k] = v if self._token != ',': break
[ "def", "_parse_record", "(", "self", ",", "record_type", ")", ":", "if", "self", ".", "_next_token", "(", ")", "in", "[", "'{'", ",", "'('", "]", ":", "key", "=", "self", ".", "_next_token", "(", ")", "self", ".", "records", "[", "key", "]", "=", "{", "u'id'", ":", "key", ",", "u'type'", ":", "record_type", ".", "lower", "(", ")", "}", "if", "self", ".", "_next_token", "(", ")", "==", "','", ":", "while", "True", ":", "field", "=", "self", ".", "_parse_field", "(", ")", "if", "field", ":", "k", ",", "v", "=", "field", "[", "0", "]", ",", "field", "[", "1", "]", "if", "k", "in", "self", ".", "keynorms", ":", "k", "=", "self", ".", "keynorms", "[", "k", "]", "if", "k", "==", "'pages'", ":", "v", "=", "v", ".", "replace", "(", "' '", ",", "''", ")", ".", "replace", "(", "'--'", ",", "'-'", ")", "if", "k", "==", "'author'", "or", "k", "==", "'editor'", ":", "v", "=", "self", ".", "parse_names", "(", "v", ")", "# Recapitalizing the title generally causes more problems than it solves", "# elif k == 'title':", "# v = latex_to_unicode(v, capitalize='title')", "else", ":", "v", "=", "latex_to_unicode", "(", "v", ")", "self", ".", "records", "[", "key", "]", "[", "k", "]", "=", "v", "if", "self", ".", "_token", "!=", "','", ":", "break" ]
Parse a record.
[ "Parse", "a", "record", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/bibtex.py#L95-L121
6,398
mcs07/ChemDataExtractor
chemdataextractor/biblio/bibtex.py
BibtexParser._parse_field
def _parse_field(self): """Parse a Field.""" name = self._next_token() if self._next_token() == '=': value = self._parse_value() return name, value
python
def _parse_field(self): """Parse a Field.""" name = self._next_token() if self._next_token() == '=': value = self._parse_value() return name, value
[ "def", "_parse_field", "(", "self", ")", ":", "name", "=", "self", ".", "_next_token", "(", ")", "if", "self", ".", "_next_token", "(", ")", "==", "'='", ":", "value", "=", "self", ".", "_parse_value", "(", ")", "return", "name", ",", "value" ]
Parse a Field.
[ "Parse", "a", "Field", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/bibtex.py#L123-L128
6,399
mcs07/ChemDataExtractor
chemdataextractor/biblio/bibtex.py
BibtexParser._parse_value
def _parse_value(self): """Parse a value. Digits, definitions, and the contents of double quotes or curly brackets.""" val = [] while True: t = self._next_token() if t == '"': brac_counter = 0 while True: t = self._next_token(skipws=False) if t == '{': brac_counter += 1 if t == '}': brac_counter -= 1 if t == '"' and brac_counter <= 0: break else: val.append(t) elif t == '{': brac_counter = 0 while True: t = self._next_token(skipws=False) if t == '{': brac_counter += 1 if t == '}': brac_counter -= 1 if brac_counter < 0: break else: val.append(t) elif re.match(r'\w', t): val.extend([self.definitions.get(t, t), ' ']) elif t.isdigit(): val.append([t, ' ']) elif t == '#': pass else: break value = ' '.join(''.join(val).split()) return value
python
def _parse_value(self): """Parse a value. Digits, definitions, and the contents of double quotes or curly brackets.""" val = [] while True: t = self._next_token() if t == '"': brac_counter = 0 while True: t = self._next_token(skipws=False) if t == '{': brac_counter += 1 if t == '}': brac_counter -= 1 if t == '"' and brac_counter <= 0: break else: val.append(t) elif t == '{': brac_counter = 0 while True: t = self._next_token(skipws=False) if t == '{': brac_counter += 1 if t == '}': brac_counter -= 1 if brac_counter < 0: break else: val.append(t) elif re.match(r'\w', t): val.extend([self.definitions.get(t, t), ' ']) elif t.isdigit(): val.append([t, ' ']) elif t == '#': pass else: break value = ' '.join(''.join(val).split()) return value
[ "def", "_parse_value", "(", "self", ")", ":", "val", "=", "[", "]", "while", "True", ":", "t", "=", "self", ".", "_next_token", "(", ")", "if", "t", "==", "'\"'", ":", "brac_counter", "=", "0", "while", "True", ":", "t", "=", "self", ".", "_next_token", "(", "skipws", "=", "False", ")", "if", "t", "==", "'{'", ":", "brac_counter", "+=", "1", "if", "t", "==", "'}'", ":", "brac_counter", "-=", "1", "if", "t", "==", "'\"'", "and", "brac_counter", "<=", "0", ":", "break", "else", ":", "val", ".", "append", "(", "t", ")", "elif", "t", "==", "'{'", ":", "brac_counter", "=", "0", "while", "True", ":", "t", "=", "self", ".", "_next_token", "(", "skipws", "=", "False", ")", "if", "t", "==", "'{'", ":", "brac_counter", "+=", "1", "if", "t", "==", "'}'", ":", "brac_counter", "-=", "1", "if", "brac_counter", "<", "0", ":", "break", "else", ":", "val", ".", "append", "(", "t", ")", "elif", "re", ".", "match", "(", "r'\\w'", ",", "t", ")", ":", "val", ".", "extend", "(", "[", "self", ".", "definitions", ".", "get", "(", "t", ",", "t", ")", ",", "' '", "]", ")", "elif", "t", ".", "isdigit", "(", ")", ":", "val", ".", "append", "(", "[", "t", ",", "' '", "]", ")", "elif", "t", "==", "'#'", ":", "pass", "else", ":", "break", "value", "=", "' '", ".", "join", "(", "''", ".", "join", "(", "val", ")", ".", "split", "(", ")", ")", "return", "value" ]
Parse a value. Digits, definitions, and the contents of double quotes or curly brackets.
[ "Parse", "a", "value", ".", "Digits", "definitions", "and", "the", "contents", "of", "double", "quotes", "or", "curly", "brackets", "." ]
349a3bea965f2073141d62043b89319222e46af1
https://github.com/mcs07/ChemDataExtractor/blob/349a3bea965f2073141d62043b89319222e46af1/chemdataextractor/biblio/bibtex.py#L130-L169