id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
249,000
hackthefed/govtrack2csv
govtrack2csv/__init__.py
extract_events
def extract_events(bill): """ Returns all events from legislation. Thing of this as a log for congress. There are alot of events that occur around legislation. For now we are going to kepe it simple. Introduction, cosponsor, votes dates """ events = [] #logger.debug(events) bill_id = bill.get('bill_id', None) if bill_id: for event in bill.get('actions', []): e = [] e.append(bill_id) e.append(event.get('acted_at', None)) e.append(event.get('how', None)) e.append(event.get('result', None)) e.append(event.get('roll', None)) e.append(event.get('status', None)) e.append(event.get('suspension', False)) e.append(event.get('text', None)) e.append(event.get('type', None)) e.append(event.get('vote_type', None)) e.append(event.get('where', None)) e.append(event.get('calander', None)) e.append(event.get('number', None)) e.append(event.get('under', None)) e.append(event.get('committee', None)) e.append(event.get('committees', [])) events.append(e) #logger.debug(events) return events
python
def extract_events(bill): """ Returns all events from legislation. Thing of this as a log for congress. There are alot of events that occur around legislation. For now we are going to kepe it simple. Introduction, cosponsor, votes dates """ events = [] #logger.debug(events) bill_id = bill.get('bill_id', None) if bill_id: for event in bill.get('actions', []): e = [] e.append(bill_id) e.append(event.get('acted_at', None)) e.append(event.get('how', None)) e.append(event.get('result', None)) e.append(event.get('roll', None)) e.append(event.get('status', None)) e.append(event.get('suspension', False)) e.append(event.get('text', None)) e.append(event.get('type', None)) e.append(event.get('vote_type', None)) e.append(event.get('where', None)) e.append(event.get('calander', None)) e.append(event.get('number', None)) e.append(event.get('under', None)) e.append(event.get('committee', None)) e.append(event.get('committees', [])) events.append(e) #logger.debug(events) return events
[ "def", "extract_events", "(", "bill", ")", ":", "events", "=", "[", "]", "#logger.debug(events)", "bill_id", "=", "bill", ".", "get", "(", "'bill_id'", ",", "None", ")", "if", "bill_id", ":", "for", "event", "in", "bill", ".", "get", "(", "'actions'", ",", "[", "]", ")", ":", "e", "=", "[", "]", "e", ".", "append", "(", "bill_id", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'acted_at'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'how'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'result'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'roll'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'status'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'suspension'", ",", "False", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'text'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'type'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'vote_type'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'where'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'calander'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'number'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'under'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'committee'", ",", "None", ")", ")", "e", ".", "append", "(", "event", ".", "get", "(", "'committees'", ",", "[", "]", ")", ")", "events", ".", "append", "(", "e", ")", "#logger.debug(events)", "return", "events" ]
Returns all events from legislation. Thing of this as a log for congress. There are alot of events that occur around legislation. For now we are going to kepe it simple. Introduction, cosponsor, votes dates
[ "Returns", "all", "events", "from", "legislation", ".", "Thing", "of", "this", "as", "a", "log", "for", "congress", ".", "There", "are", "alot", "of", "events", "that", "occur", "around", "legislation", ".", "For", "now", "we", "are", "going", "to", "kepe", "it", "simple", ".", "Introduction", "cosponsor", "votes", "dates" ]
db991f5fcd3dfda6e6d51fadd286cba983f493e5
https://github.com/hackthefed/govtrack2csv/blob/db991f5fcd3dfda6e6d51fadd286cba983f493e5/govtrack2csv/__init__.py#L335-L367
249,001
hackthefed/govtrack2csv
govtrack2csv/__init__.py
process_amendments
def process_amendments(congress): """ Traverse amendments for a project """ amend_dir = "{0}/{1}/amendments".format(congress['src'], congress['congress']) logger.info("Processing Amendments for {0}".format(congress['congress'])) amendments = [] for root, dirs, files in os.walk(amend_dir): if "data.json" in files and "text-versions" not in root: file_path = "{0}/data.json".format(root) logger.debug("Processing {0}".format(file_path)) a = json.loads(open(file_path, 'r').read()) amendment = [] amendment.append(a['amendment_id']) amendment.append(a['amendment_type']) if a['amends_amendment']: amendment.append(a['amends_amendment'].get('amendment_id', None)) else: amendment.append(None) if a['amends_bill']: amendment.append(a['amends_bill'].get('bill_id', None)) else: amendment.append(None) if a['amends_treaty']: amendment.append(a['amends_treaty'].get('treaty_id', None)) else: amendment.append(None) amendment.append(a['chamber']) amendment.append(a['congress']) amendment.append(a['description']) amendment.append(a['introduced_at']) amendment.append(a['number']) amendment.append(a.get('proposed_at', None)) amendment.append(a['purpose']) amendment.append(a['sponsor'].get('thomas_id', None)) amendment.append(a['sponsor'].get('committee_id', None)) amendment.append(a['sponsor']['type']) amendment.append(a['status']) amendment.append(a['updated_at']) amendments.append(amendment) return amendments if amendments else [[None] * 17]
python
def process_amendments(congress): """ Traverse amendments for a project """ amend_dir = "{0}/{1}/amendments".format(congress['src'], congress['congress']) logger.info("Processing Amendments for {0}".format(congress['congress'])) amendments = [] for root, dirs, files in os.walk(amend_dir): if "data.json" in files and "text-versions" not in root: file_path = "{0}/data.json".format(root) logger.debug("Processing {0}".format(file_path)) a = json.loads(open(file_path, 'r').read()) amendment = [] amendment.append(a['amendment_id']) amendment.append(a['amendment_type']) if a['amends_amendment']: amendment.append(a['amends_amendment'].get('amendment_id', None)) else: amendment.append(None) if a['amends_bill']: amendment.append(a['amends_bill'].get('bill_id', None)) else: amendment.append(None) if a['amends_treaty']: amendment.append(a['amends_treaty'].get('treaty_id', None)) else: amendment.append(None) amendment.append(a['chamber']) amendment.append(a['congress']) amendment.append(a['description']) amendment.append(a['introduced_at']) amendment.append(a['number']) amendment.append(a.get('proposed_at', None)) amendment.append(a['purpose']) amendment.append(a['sponsor'].get('thomas_id', None)) amendment.append(a['sponsor'].get('committee_id', None)) amendment.append(a['sponsor']['type']) amendment.append(a['status']) amendment.append(a['updated_at']) amendments.append(amendment) return amendments if amendments else [[None] * 17]
[ "def", "process_amendments", "(", "congress", ")", ":", "amend_dir", "=", "\"{0}/{1}/amendments\"", ".", "format", "(", "congress", "[", "'src'", "]", ",", "congress", "[", "'congress'", "]", ")", "logger", ".", "info", "(", "\"Processing Amendments for {0}\"", ".", "format", "(", "congress", "[", "'congress'", "]", ")", ")", "amendments", "=", "[", "]", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "amend_dir", ")", ":", "if", "\"data.json\"", "in", "files", "and", "\"text-versions\"", "not", "in", "root", ":", "file_path", "=", "\"{0}/data.json\"", ".", "format", "(", "root", ")", "logger", ".", "debug", "(", "\"Processing {0}\"", ".", "format", "(", "file_path", ")", ")", "a", "=", "json", ".", "loads", "(", "open", "(", "file_path", ",", "'r'", ")", ".", "read", "(", ")", ")", "amendment", "=", "[", "]", "amendment", ".", "append", "(", "a", "[", "'amendment_id'", "]", ")", "amendment", ".", "append", "(", "a", "[", "'amendment_type'", "]", ")", "if", "a", "[", "'amends_amendment'", "]", ":", "amendment", ".", "append", "(", "a", "[", "'amends_amendment'", "]", ".", "get", "(", "'amendment_id'", ",", "None", ")", ")", "else", ":", "amendment", ".", "append", "(", "None", ")", "if", "a", "[", "'amends_bill'", "]", ":", "amendment", ".", "append", "(", "a", "[", "'amends_bill'", "]", ".", "get", "(", "'bill_id'", ",", "None", ")", ")", "else", ":", "amendment", ".", "append", "(", "None", ")", "if", "a", "[", "'amends_treaty'", "]", ":", "amendment", ".", "append", "(", "a", "[", "'amends_treaty'", "]", ".", "get", "(", "'treaty_id'", ",", "None", ")", ")", "else", ":", "amendment", ".", "append", "(", "None", ")", "amendment", ".", "append", "(", "a", "[", "'chamber'", "]", ")", "amendment", ".", "append", "(", "a", "[", "'congress'", "]", ")", "amendment", ".", "append", "(", "a", "[", "'description'", "]", ")", "amendment", ".", "append", "(", "a", "[", "'introduced_at'", "]", ")", "amendment", ".", "append", "(", "a", "[", "'number'", "]", ")", "amendment", ".", "append", "(", "a", ".", "get", "(", "'proposed_at'", ",", "None", ")", ")", "amendment", ".", "append", "(", "a", "[", "'purpose'", "]", ")", "amendment", ".", "append", "(", "a", "[", "'sponsor'", "]", ".", "get", "(", "'thomas_id'", ",", "None", ")", ")", "amendment", ".", "append", "(", "a", "[", "'sponsor'", "]", ".", "get", "(", "'committee_id'", ",", "None", ")", ")", "amendment", ".", "append", "(", "a", "[", "'sponsor'", "]", "[", "'type'", "]", ")", "amendment", ".", "append", "(", "a", "[", "'status'", "]", ")", "amendment", ".", "append", "(", "a", "[", "'updated_at'", "]", ")", "amendments", ".", "append", "(", "amendment", ")", "return", "amendments", "if", "amendments", "else", "[", "[", "None", "]", "*", "17", "]" ]
Traverse amendments for a project
[ "Traverse", "amendments", "for", "a", "project" ]
db991f5fcd3dfda6e6d51fadd286cba983f493e5
https://github.com/hackthefed/govtrack2csv/blob/db991f5fcd3dfda6e6d51fadd286cba983f493e5/govtrack2csv/__init__.py#L414-L461
249,002
hackthefed/govtrack2csv
govtrack2csv/__init__.py
lis_to_bio_map
def lis_to_bio_map(folder): """ Senators have a lis_id that is used in some places. That's dumb. Build a dict from lis_id to bioguide_id which every member of congress has. """ logger.info("Opening legislator csv for lis_dct creation") lis_dic = {} leg_path = "{0}/legislators.csv".format(folder) logger.info(leg_path) with open(leg_path, 'r') as csvfile: leg_reader = csv.reader(csvfile) for row in leg_reader: if row[22]: lis_dic[row[22]] = row[19] return lis_dic
python
def lis_to_bio_map(folder): """ Senators have a lis_id that is used in some places. That's dumb. Build a dict from lis_id to bioguide_id which every member of congress has. """ logger.info("Opening legislator csv for lis_dct creation") lis_dic = {} leg_path = "{0}/legislators.csv".format(folder) logger.info(leg_path) with open(leg_path, 'r') as csvfile: leg_reader = csv.reader(csvfile) for row in leg_reader: if row[22]: lis_dic[row[22]] = row[19] return lis_dic
[ "def", "lis_to_bio_map", "(", "folder", ")", ":", "logger", ".", "info", "(", "\"Opening legislator csv for lis_dct creation\"", ")", "lis_dic", "=", "{", "}", "leg_path", "=", "\"{0}/legislators.csv\"", ".", "format", "(", "folder", ")", "logger", ".", "info", "(", "leg_path", ")", "with", "open", "(", "leg_path", ",", "'r'", ")", "as", "csvfile", ":", "leg_reader", "=", "csv", ".", "reader", "(", "csvfile", ")", "for", "row", "in", "leg_reader", ":", "if", "row", "[", "22", "]", ":", "lis_dic", "[", "row", "[", "22", "]", "]", "=", "row", "[", "19", "]", "return", "lis_dic" ]
Senators have a lis_id that is used in some places. That's dumb. Build a dict from lis_id to bioguide_id which every member of congress has.
[ "Senators", "have", "a", "lis_id", "that", "is", "used", "in", "some", "places", ".", "That", "s", "dumb", ".", "Build", "a", "dict", "from", "lis_id", "to", "bioguide_id", "which", "every", "member", "of", "congress", "has", "." ]
db991f5fcd3dfda6e6d51fadd286cba983f493e5
https://github.com/hackthefed/govtrack2csv/blob/db991f5fcd3dfda6e6d51fadd286cba983f493e5/govtrack2csv/__init__.py#L580-L594
249,003
svetlyak40wt/twiggy-goodies
twiggy_goodies/std_logging.py
RedirectLoggingHandler.convert_level
def convert_level(self, record): """Converts a logging level into a logbook level.""" level = record.levelno if level >= logging.CRITICAL: return levels.CRITICAL if level >= logging.ERROR: return levels.ERROR if level >= logging.WARNING: return levels.WARNING if level >= logging.INFO: return levels.INFO return levels.DEBUG
python
def convert_level(self, record): """Converts a logging level into a logbook level.""" level = record.levelno if level >= logging.CRITICAL: return levels.CRITICAL if level >= logging.ERROR: return levels.ERROR if level >= logging.WARNING: return levels.WARNING if level >= logging.INFO: return levels.INFO return levels.DEBUG
[ "def", "convert_level", "(", "self", ",", "record", ")", ":", "level", "=", "record", ".", "levelno", "if", "level", ">=", "logging", ".", "CRITICAL", ":", "return", "levels", ".", "CRITICAL", "if", "level", ">=", "logging", ".", "ERROR", ":", "return", "levels", ".", "ERROR", "if", "level", ">=", "logging", ".", "WARNING", ":", "return", "levels", ".", "WARNING", "if", "level", ">=", "logging", ".", "INFO", ":", "return", "levels", ".", "INFO", "return", "levels", ".", "DEBUG" ]
Converts a logging level into a logbook level.
[ "Converts", "a", "logging", "level", "into", "a", "logbook", "level", "." ]
71528d5959fab81eb8d0e4373f20d37a013ac00e
https://github.com/svetlyak40wt/twiggy-goodies/blob/71528d5959fab81eb8d0e4373f20d37a013ac00e/twiggy_goodies/std_logging.py#L19-L30
249,004
svetlyak40wt/twiggy-goodies
twiggy_goodies/std_logging.py
RedirectLoggingHandler.find_extra
def find_extra(self, record): """Tries to find custom data from the old logging record. The return value is a dictionary that is merged with the log record extra dictionaries. """ rv = vars(record).copy() for key in ('name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process'): rv.pop(key, None) return rv
python
def find_extra(self, record): """Tries to find custom data from the old logging record. The return value is a dictionary that is merged with the log record extra dictionaries. """ rv = vars(record).copy() for key in ('name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process'): rv.pop(key, None) return rv
[ "def", "find_extra", "(", "self", ",", "record", ")", ":", "rv", "=", "vars", "(", "record", ")", ".", "copy", "(", ")", "for", "key", "in", "(", "'name'", ",", "'msg'", ",", "'args'", ",", "'levelname'", ",", "'levelno'", ",", "'pathname'", ",", "'filename'", ",", "'module'", ",", "'exc_info'", ",", "'exc_text'", ",", "'lineno'", ",", "'funcName'", ",", "'created'", ",", "'msecs'", ",", "'relativeCreated'", ",", "'thread'", ",", "'threadName'", ",", "'processName'", ",", "'process'", ")", ":", "rv", ".", "pop", "(", "key", ",", "None", ")", "return", "rv" ]
Tries to find custom data from the old logging record. The return value is a dictionary that is merged with the log record extra dictionaries.
[ "Tries", "to", "find", "custom", "data", "from", "the", "old", "logging", "record", ".", "The", "return", "value", "is", "a", "dictionary", "that", "is", "merged", "with", "the", "log", "record", "extra", "dictionaries", "." ]
71528d5959fab81eb8d0e4373f20d37a013ac00e
https://github.com/svetlyak40wt/twiggy-goodies/blob/71528d5959fab81eb8d0e4373f20d37a013ac00e/twiggy_goodies/std_logging.py#L32-L44
249,005
openpermissions/perch
perch/views.py
view
def view(db_name): """ Register a map function as a view Currently, only a single map function can be created for each view NOTE: the map function source is saved in CouchDB, so it cannot depend on anything outside the function's scope. :param db_name: the database name """ def decorator(func): v = View(db_name, func) v.register() return v return decorator
python
def view(db_name): """ Register a map function as a view Currently, only a single map function can be created for each view NOTE: the map function source is saved in CouchDB, so it cannot depend on anything outside the function's scope. :param db_name: the database name """ def decorator(func): v = View(db_name, func) v.register() return v return decorator
[ "def", "view", "(", "db_name", ")", ":", "def", "decorator", "(", "func", ")", ":", "v", "=", "View", "(", "db_name", ",", "func", ")", "v", ".", "register", "(", ")", "return", "v", "return", "decorator" ]
Register a map function as a view Currently, only a single map function can be created for each view NOTE: the map function source is saved in CouchDB, so it cannot depend on anything outside the function's scope. :param db_name: the database name
[ "Register", "a", "map", "function", "as", "a", "view" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L107-L122
249,006
openpermissions/perch
perch/views.py
load_design_docs
def load_design_docs(): """ Load design docs for registered views """ url = ':'.join([options.url_registry_db, str(options.db_port)]) client = partial(couch.BlockingCouch, couch_url=url) for name, docs in _views.items(): db = client(db_name=name) views = [] for doc in docs: try: current_doc = db.get_doc(doc['_id']) # use the current _rev if not provided if '_rev' not in doc: doc['_rev'] = current_doc['_rev'] except couch.NotFound: pass views.append(doc) db.save_docs(views)
python
def load_design_docs(): """ Load design docs for registered views """ url = ':'.join([options.url_registry_db, str(options.db_port)]) client = partial(couch.BlockingCouch, couch_url=url) for name, docs in _views.items(): db = client(db_name=name) views = [] for doc in docs: try: current_doc = db.get_doc(doc['_id']) # use the current _rev if not provided if '_rev' not in doc: doc['_rev'] = current_doc['_rev'] except couch.NotFound: pass views.append(doc) db.save_docs(views)
[ "def", "load_design_docs", "(", ")", ":", "url", "=", "':'", ".", "join", "(", "[", "options", ".", "url_registry_db", ",", "str", "(", "options", ".", "db_port", ")", "]", ")", "client", "=", "partial", "(", "couch", ".", "BlockingCouch", ",", "couch_url", "=", "url", ")", "for", "name", ",", "docs", "in", "_views", ".", "items", "(", ")", ":", "db", "=", "client", "(", "db_name", "=", "name", ")", "views", "=", "[", "]", "for", "doc", "in", "docs", ":", "try", ":", "current_doc", "=", "db", ".", "get_doc", "(", "doc", "[", "'_id'", "]", ")", "# use the current _rev if not provided", "if", "'_rev'", "not", "in", "doc", ":", "doc", "[", "'_rev'", "]", "=", "current_doc", "[", "'_rev'", "]", "except", "couch", ".", "NotFound", ":", "pass", "views", ".", "append", "(", "doc", ")", "db", ".", "save_docs", "(", "views", ")" ]
Load design docs for registered views
[ "Load", "design", "docs", "for", "registered", "views" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L125-L148
249,007
openpermissions/perch
perch/views.py
reference_links
def reference_links(doc): """Get reference links""" if doc.get('type') == 'organisation' and doc.get('state') != 'deactivated': for asset_id_type, link in doc.get('reference_links', {}).get('links', {}).items(): value = { 'organisation_id': doc['_id'], 'link': link } yield asset_id_type, value
python
def reference_links(doc): """Get reference links""" if doc.get('type') == 'organisation' and doc.get('state') != 'deactivated': for asset_id_type, link in doc.get('reference_links', {}).get('links', {}).items(): value = { 'organisation_id': doc['_id'], 'link': link } yield asset_id_type, value
[ "def", "reference_links", "(", "doc", ")", ":", "if", "doc", ".", "get", "(", "'type'", ")", "==", "'organisation'", "and", "doc", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "for", "asset_id_type", ",", "link", "in", "doc", ".", "get", "(", "'reference_links'", ",", "{", "}", ")", ".", "get", "(", "'links'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "value", "=", "{", "'organisation_id'", ":", "doc", "[", "'_id'", "]", ",", "'link'", ":", "link", "}", "yield", "asset_id_type", ",", "value" ]
Get reference links
[ "Get", "reference", "links" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L264-L272
249,008
openpermissions/perch
perch/views.py
active_services
def active_services(doc): """View for getting active services""" if doc.get('state') != 'deactivated': for service_id, service in doc.get('services', {}).items(): if service.get('state') != 'deactivated': service_type = service.get('service_type') org = doc['_id'] service['id'] = service_id service['organisation_id'] = org yield service_id, service yield [service_type, org], service yield [service_type, None], service yield [None, org], service yield [None, None], service
python
def active_services(doc): """View for getting active services""" if doc.get('state') != 'deactivated': for service_id, service in doc.get('services', {}).items(): if service.get('state') != 'deactivated': service_type = service.get('service_type') org = doc['_id'] service['id'] = service_id service['organisation_id'] = org yield service_id, service yield [service_type, org], service yield [service_type, None], service yield [None, org], service yield [None, None], service
[ "def", "active_services", "(", "doc", ")", ":", "if", "doc", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "for", "service_id", ",", "service", "in", "doc", ".", "get", "(", "'services'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "if", "service", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "service_type", "=", "service", ".", "get", "(", "'service_type'", ")", "org", "=", "doc", "[", "'_id'", "]", "service", "[", "'id'", "]", "=", "service_id", "service", "[", "'organisation_id'", "]", "=", "org", "yield", "service_id", ",", "service", "yield", "[", "service_type", ",", "org", "]", ",", "service", "yield", "[", "service_type", ",", "None", "]", ",", "service", "yield", "[", "None", ",", "org", "]", ",", "service", "yield", "[", "None", ",", "None", "]", ",", "service" ]
View for getting active services
[ "View", "for", "getting", "active", "services" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L276-L290
249,009
openpermissions/perch
perch/views.py
services
def services(doc): """View for getting services""" for service_id, service in doc.get('services', {}).items(): service_type = service.get('service_type') org = doc['_id'] service['id'] = service_id service['organisation_id'] = org yield service_id, service yield [service_type, org], service yield [service_type, None], service yield [None, org], service yield [None, None], service
python
def services(doc): """View for getting services""" for service_id, service in doc.get('services', {}).items(): service_type = service.get('service_type') org = doc['_id'] service['id'] = service_id service['organisation_id'] = org yield service_id, service yield [service_type, org], service yield [service_type, None], service yield [None, org], service yield [None, None], service
[ "def", "services", "(", "doc", ")", ":", "for", "service_id", ",", "service", "in", "doc", ".", "get", "(", "'services'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "service_type", "=", "service", ".", "get", "(", "'service_type'", ")", "org", "=", "doc", "[", "'_id'", "]", "service", "[", "'id'", "]", "=", "service_id", "service", "[", "'organisation_id'", "]", "=", "org", "yield", "service_id", ",", "service", "yield", "[", "service_type", ",", "org", "]", ",", "service", "yield", "[", "service_type", ",", "None", "]", ",", "service", "yield", "[", "None", ",", "org", "]", ",", "service", "yield", "[", "None", ",", "None", "]", ",", "service" ]
View for getting services
[ "View", "for", "getting", "services" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L294-L306
249,010
openpermissions/perch
perch/views.py
active_service_location
def active_service_location(doc): """View for getting active service by location""" if doc.get('state') != 'deactivated': for service_id, service in doc.get('services', {}).items(): if service.get('state') != 'deactivated': service['id'] = service_id service['organisation_id'] = doc['_id'] location = service.get('location', None) if location: yield location, service
python
def active_service_location(doc): """View for getting active service by location""" if doc.get('state') != 'deactivated': for service_id, service in doc.get('services', {}).items(): if service.get('state') != 'deactivated': service['id'] = service_id service['organisation_id'] = doc['_id'] location = service.get('location', None) if location: yield location, service
[ "def", "active_service_location", "(", "doc", ")", ":", "if", "doc", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "for", "service_id", ",", "service", "in", "doc", ".", "get", "(", "'services'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "if", "service", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "service", "[", "'id'", "]", "=", "service_id", "service", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "location", "=", "service", ".", "get", "(", "'location'", ",", "None", ")", "if", "location", ":", "yield", "location", ",", "service" ]
View for getting active service by location
[ "View", "for", "getting", "active", "service", "by", "location" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L310-L320
249,011
openpermissions/perch
perch/views.py
service_location
def service_location(doc): """View for getting service by location""" for service_id, service in doc.get('services', {}).items(): service['id'] = service_id service['organisation_id'] = doc['_id'] location = service.get('location', None) if location: yield location, service
python
def service_location(doc): """View for getting service by location""" for service_id, service in doc.get('services', {}).items(): service['id'] = service_id service['organisation_id'] = doc['_id'] location = service.get('location', None) if location: yield location, service
[ "def", "service_location", "(", "doc", ")", ":", "for", "service_id", ",", "service", "in", "doc", ".", "get", "(", "'services'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "service", "[", "'id'", "]", "=", "service_id", "service", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "location", "=", "service", ".", "get", "(", "'location'", ",", "None", ")", "if", "location", ":", "yield", "location", ",", "service" ]
View for getting service by location
[ "View", "for", "getting", "service", "by", "location" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L324-L332
249,012
openpermissions/perch
perch/views.py
service_name
def service_name(doc): """View for getting service by name""" for service_id, service in doc.get('services', {}).items(): service['id'] = service_id service['organisation_id'] = doc['_id'] name = service.get('name', None) if name: yield name, service
python
def service_name(doc): """View for getting service by name""" for service_id, service in doc.get('services', {}).items(): service['id'] = service_id service['organisation_id'] = doc['_id'] name = service.get('name', None) if name: yield name, service
[ "def", "service_name", "(", "doc", ")", ":", "for", "service_id", ",", "service", "in", "doc", ".", "get", "(", "'services'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "service", "[", "'id'", "]", "=", "service_id", "service", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "name", "=", "service", ".", "get", "(", "'name'", ",", "None", ")", "if", "name", ":", "yield", "name", ",", "service" ]
View for getting service by name
[ "View", "for", "getting", "service", "by", "name" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L336-L344
249,013
openpermissions/perch
perch/views.py
active_repositories
def active_repositories(doc): """View for getting active repositories""" if doc.get('state') != 'deactivated': for repository_id, repo in doc.get('repositories', {}).items(): if repo.get('state') != 'deactivated': repo['id'] = repository_id repo['organisation_id'] = doc['_id'] yield repository_id, repo
python
def active_repositories(doc): """View for getting active repositories""" if doc.get('state') != 'deactivated': for repository_id, repo in doc.get('repositories', {}).items(): if repo.get('state') != 'deactivated': repo['id'] = repository_id repo['organisation_id'] = doc['_id'] yield repository_id, repo
[ "def", "active_repositories", "(", "doc", ")", ":", "if", "doc", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "for", "repository_id", ",", "repo", "in", "doc", ".", "get", "(", "'repositories'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "if", "repo", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "repo", "[", "'id'", "]", "=", "repository_id", "repo", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "yield", "repository_id", ",", "repo" ]
View for getting active repositories
[ "View", "for", "getting", "active", "repositories" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L362-L370
249,014
openpermissions/perch
perch/views.py
repositories
def repositories(doc): """View for getting repositories""" for repository_id, repo in doc.get('repositories', {}).items(): repo['id'] = repository_id repo['organisation_id'] = doc['_id'] yield repository_id, repo
python
def repositories(doc): """View for getting repositories""" for repository_id, repo in doc.get('repositories', {}).items(): repo['id'] = repository_id repo['organisation_id'] = doc['_id'] yield repository_id, repo
[ "def", "repositories", "(", "doc", ")", ":", "for", "repository_id", ",", "repo", "in", "doc", ".", "get", "(", "'repositories'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "repo", "[", "'id'", "]", "=", "repository_id", "repo", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "yield", "repository_id", ",", "repo" ]
View for getting repositories
[ "View", "for", "getting", "repositories" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L374-L380
249,015
openpermissions/perch
perch/views.py
repository_name
def repository_name(doc): """View for checking repository name is unique""" for repository_id, repo in doc.get('repositories', {}).items(): repo['id'] = repository_id repo['organisation_id'] = doc['_id'] name = repo.get('name', None) if name: yield name, repository_id
python
def repository_name(doc): """View for checking repository name is unique""" for repository_id, repo in doc.get('repositories', {}).items(): repo['id'] = repository_id repo['organisation_id'] = doc['_id'] name = repo.get('name', None) if name: yield name, repository_id
[ "def", "repository_name", "(", "doc", ")", ":", "for", "repository_id", ",", "repo", "in", "doc", ".", "get", "(", "'repositories'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "repo", "[", "'id'", "]", "=", "repository_id", "repo", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "name", "=", "repo", ".", "get", "(", "'name'", ",", "None", ")", "if", "name", ":", "yield", "name", ",", "repository_id" ]
View for checking repository name is unique
[ "View", "for", "checking", "repository", "name", "is", "unique" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L384-L392
249,016
openpermissions/perch
perch/views.py
service_and_repository
def service_and_repository(doc): """ View for looking up services and repositories by their ID Used in the auth service """ if doc.get('type') == 'organisation' and doc.get('state') != 'deactivated': for repository_id, repo in doc.get('repositories', {}).items(): if repo.get('state') != 'deactivated': repo['id'] = repository_id repo['organisation_id'] = doc['_id'] yield repository_id, repo for service_id, service in doc.get('services', {}).items(): if service.get('state') != 'deactivated': service['id'] = service_id service['organisation_id'] = doc['_id'] yield service_id, service
python
def service_and_repository(doc): """ View for looking up services and repositories by their ID Used in the auth service """ if doc.get('type') == 'organisation' and doc.get('state') != 'deactivated': for repository_id, repo in doc.get('repositories', {}).items(): if repo.get('state') != 'deactivated': repo['id'] = repository_id repo['organisation_id'] = doc['_id'] yield repository_id, repo for service_id, service in doc.get('services', {}).items(): if service.get('state') != 'deactivated': service['id'] = service_id service['organisation_id'] = doc['_id'] yield service_id, service
[ "def", "service_and_repository", "(", "doc", ")", ":", "if", "doc", ".", "get", "(", "'type'", ")", "==", "'organisation'", "and", "doc", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "for", "repository_id", ",", "repo", "in", "doc", ".", "get", "(", "'repositories'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "if", "repo", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "repo", "[", "'id'", "]", "=", "repository_id", "repo", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "yield", "repository_id", ",", "repo", "for", "service_id", ",", "service", "in", "doc", ".", "get", "(", "'services'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "if", "service", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "service", "[", "'id'", "]", "=", "service_id", "service", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "yield", "service_id", ",", "service" ]
View for looking up services and repositories by their ID Used in the auth service
[ "View", "for", "looking", "up", "services", "and", "repositories", "by", "their", "ID" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L396-L415
249,017
openpermissions/perch
perch/views.py
View.create_design_doc
def create_design_doc(self): """Create a design document from a Python map function""" source = [x for x in inspect.getsourcelines(self.func)[0] if not x.startswith('@')] doc = { '_id': '_design/{}'.format(self.name), 'language': 'python', 'views': { self.name: { 'map': ''.join(source) } } } return doc
python
def create_design_doc(self): """Create a design document from a Python map function""" source = [x for x in inspect.getsourcelines(self.func)[0] if not x.startswith('@')] doc = { '_id': '_design/{}'.format(self.name), 'language': 'python', 'views': { self.name: { 'map': ''.join(source) } } } return doc
[ "def", "create_design_doc", "(", "self", ")", ":", "source", "=", "[", "x", "for", "x", "in", "inspect", ".", "getsourcelines", "(", "self", ".", "func", ")", "[", "0", "]", "if", "not", "x", ".", "startswith", "(", "'@'", ")", "]", "doc", "=", "{", "'_id'", ":", "'_design/{}'", ".", "format", "(", "self", ".", "name", ")", ",", "'language'", ":", "'python'", ",", "'views'", ":", "{", "self", ".", "name", ":", "{", "'map'", ":", "''", ".", "join", "(", "source", ")", "}", "}", "}", "return", "doc" ]
Create a design document from a Python map function
[ "Create", "a", "design", "document", "from", "a", "Python", "map", "function" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L50-L65
249,018
openpermissions/perch
perch/views.py
View.values
def values(self, **kwargs): """Get the view's values""" result = yield self.get(**kwargs) if not result['rows']: raise Return([]) raise Return([x['value'] for x in result['rows']])
python
def values(self, **kwargs): """Get the view's values""" result = yield self.get(**kwargs) if not result['rows']: raise Return([]) raise Return([x['value'] for x in result['rows']])
[ "def", "values", "(", "self", ",", "*", "*", "kwargs", ")", ":", "result", "=", "yield", "self", ".", "get", "(", "*", "*", "kwargs", ")", "if", "not", "result", "[", "'rows'", "]", ":", "raise", "Return", "(", "[", "]", ")", "raise", "Return", "(", "[", "x", "[", "'value'", "]", "for", "x", "in", "result", "[", "'rows'", "]", "]", ")" ]
Get the view's values
[ "Get", "the", "view", "s", "values" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/views.py#L97-L104
249,019
davisd50/sparc.apps.cache
sparc/apps/cache/cache.py
cache.go
def go(self, poller_configurations): """Create threaded pollers and start configured polling cycles """ try: notify( CachableSourcePollersAboutToStartEvent(poller_configurations)) logger.info("Starting pollers") exit_ = threading.Event() for config in poller_configurations: # config is a dict kwargs = {'exit_': exit_} kwargs.update(config) threading.Thread(target=self.poll, kwargs=(kwargs), ).start() while threading.active_count() > 1: time.sleep(.001) except KeyboardInterrupt: logger.info("KeyboardInterrupt signal caught, shutting down pollers...") exit_.set()
python
def go(self, poller_configurations): """Create threaded pollers and start configured polling cycles """ try: notify( CachableSourcePollersAboutToStartEvent(poller_configurations)) logger.info("Starting pollers") exit_ = threading.Event() for config in poller_configurations: # config is a dict kwargs = {'exit_': exit_} kwargs.update(config) threading.Thread(target=self.poll, kwargs=(kwargs), ).start() while threading.active_count() > 1: time.sleep(.001) except KeyboardInterrupt: logger.info("KeyboardInterrupt signal caught, shutting down pollers...") exit_.set()
[ "def", "go", "(", "self", ",", "poller_configurations", ")", ":", "try", ":", "notify", "(", "CachableSourcePollersAboutToStartEvent", "(", "poller_configurations", ")", ")", "logger", ".", "info", "(", "\"Starting pollers\"", ")", "exit_", "=", "threading", ".", "Event", "(", ")", "for", "config", "in", "poller_configurations", ":", "# config is a dict", "kwargs", "=", "{", "'exit_'", ":", "exit_", "}", "kwargs", ".", "update", "(", "config", ")", "threading", ".", "Thread", "(", "target", "=", "self", ".", "poll", ",", "kwargs", "=", "(", "kwargs", ")", ",", ")", ".", "start", "(", ")", "while", "threading", ".", "active_count", "(", ")", ">", "1", ":", "time", ".", "sleep", "(", ".001", ")", "except", "KeyboardInterrupt", ":", "logger", ".", "info", "(", "\"KeyboardInterrupt signal caught, shutting down pollers...\"", ")", "exit_", ".", "set", "(", ")" ]
Create threaded pollers and start configured polling cycles
[ "Create", "threaded", "pollers", "and", "start", "configured", "polling", "cycles" ]
793b0f18255230809c30dc27c2bb1bb04b3f194d
https://github.com/davisd50/sparc.apps.cache/blob/793b0f18255230809c30dc27c2bb1bb04b3f194d/sparc/apps/cache/cache.py#L203-L221
249,020
EventTeam/beliefs
src/beliefs/cells/colors.py
RGBColorCell.from_name
def from_name(clz, name): """ Instantiates the object from a known name """ if isinstance(name, list) and "green" in name: name = "teal" assert name in COLOR_NAMES, 'Unknown color name' r, b, g = COLOR_NAMES[name] return clz(r, b, g)
python
def from_name(clz, name): """ Instantiates the object from a known name """ if isinstance(name, list) and "green" in name: name = "teal" assert name in COLOR_NAMES, 'Unknown color name' r, b, g = COLOR_NAMES[name] return clz(r, b, g)
[ "def", "from_name", "(", "clz", ",", "name", ")", ":", "if", "isinstance", "(", "name", ",", "list", ")", "and", "\"green\"", "in", "name", ":", "name", "=", "\"teal\"", "assert", "name", "in", "COLOR_NAMES", ",", "'Unknown color name'", "r", ",", "b", ",", "g", "=", "COLOR_NAMES", "[", "name", "]", "return", "clz", "(", "r", ",", "b", ",", "g", ")" ]
Instantiates the object from a known name
[ "Instantiates", "the", "object", "from", "a", "known", "name" ]
c07d22b61bebeede74a72800030dde770bf64208
https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/colors.py#L27-L35
249,021
EventTeam/beliefs
src/beliefs/cells/colors.py
RGBColorCell.to_html
def to_html(self): """ Converts to Hex """ out = "#" if self.r == 0: out += "00" else: out += hex(self.r)[2:] if self.b == 0: out += "00" else: out += hex(self.b)[2:] if self.g == 0: out += "00" else: out += hex(self.g)[2:] return out
python
def to_html(self): """ Converts to Hex """ out = "#" if self.r == 0: out += "00" else: out += hex(self.r)[2:] if self.b == 0: out += "00" else: out += hex(self.b)[2:] if self.g == 0: out += "00" else: out += hex(self.g)[2:] return out
[ "def", "to_html", "(", "self", ")", ":", "out", "=", "\"#\"", "if", "self", ".", "r", "==", "0", ":", "out", "+=", "\"00\"", "else", ":", "out", "+=", "hex", "(", "self", ".", "r", ")", "[", "2", ":", "]", "if", "self", ".", "b", "==", "0", ":", "out", "+=", "\"00\"", "else", ":", "out", "+=", "hex", "(", "self", ".", "b", ")", "[", "2", ":", "]", "if", "self", ".", "g", "==", "0", ":", "out", "+=", "\"00\"", "else", ":", "out", "+=", "hex", "(", "self", ".", "g", ")", "[", "2", ":", "]", "return", "out" ]
Converts to Hex
[ "Converts", "to", "Hex" ]
c07d22b61bebeede74a72800030dde770bf64208
https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/colors.py#L38-L53
249,022
EventTeam/beliefs
src/beliefs/cells/colors.py
RGBColorCell.merge
def merge(self, other): """ Merges the values """ print "MERGING", self, other other = self.coerce(other) if self.is_contradictory(other): raise Contradiction("Cannot merge %s and %s" % (self, other)) elif self.value is None and not other.value is None: self.r, self.g, self.b = other.r, other.g, other.b self.value = RGBColor(self.r, self.b, self.g, rgb_type='sRGB') # last cases: other is none, or both are none return self
python
def merge(self, other): """ Merges the values """ print "MERGING", self, other other = self.coerce(other) if self.is_contradictory(other): raise Contradiction("Cannot merge %s and %s" % (self, other)) elif self.value is None and not other.value is None: self.r, self.g, self.b = other.r, other.g, other.b self.value = RGBColor(self.r, self.b, self.g, rgb_type='sRGB') # last cases: other is none, or both are none return self
[ "def", "merge", "(", "self", ",", "other", ")", ":", "print", "\"MERGING\"", ",", "self", ",", "other", "other", "=", "self", ".", "coerce", "(", "other", ")", "if", "self", ".", "is_contradictory", "(", "other", ")", ":", "raise", "Contradiction", "(", "\"Cannot merge %s and %s\"", "%", "(", "self", ",", "other", ")", ")", "elif", "self", ".", "value", "is", "None", "and", "not", "other", ".", "value", "is", "None", ":", "self", ".", "r", ",", "self", ".", "g", ",", "self", ".", "b", "=", "other", ".", "r", ",", "other", ".", "g", ",", "other", ".", "b", "self", ".", "value", "=", "RGBColor", "(", "self", ".", "r", ",", "self", ".", "b", ",", "self", ".", "g", ",", "rgb_type", "=", "'sRGB'", ")", "# last cases: other is none, or both are none ", "return", "self" ]
Merges the values
[ "Merges", "the", "values" ]
c07d22b61bebeede74a72800030dde770bf64208
https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/colors.py#L80-L93
249,023
dustinmm80/healthy
healthy.py
calculate_health
def calculate_health(package_name, package_version=None, verbose=False, no_output=False): """ Calculates the health of a package, based on several factors :param package_name: name of package on pypi.python.org :param package_version: version number of package to check, optional - defaults to latest version :param verbose: flag to print out reasons :param no_output: print no output :param lint: run pylint on the package :returns: (score: integer, reasons: list of reasons for score) :rtype: tuple """ total_score = 0 reasons = [] package_releases = CLIENT.package_releases(package_name) if not package_releases: if not no_output: print(TERMINAL.red('{} is not listed on pypi'.format(package_name))) return 0, [] if package_version is None: package_version = package_releases[0] package_info = CLIENT.release_data(package_name, package_version) release_urls = CLIENT.release_urls(package_name, package_version) if not package_info or not release_urls: if not no_output: print(TERMINAL.red('Version {} is not listed on pypi'.format( package_version))) return 0, [] if not no_output: print(TERMINAL.bold('{} v{}'.format(package_name, package_version))) print('-----') checkers = [ checks.check_license, checks.check_homepage, checks.check_summary, checks.check_description, checks.check_python_classifiers, checks.check_author_info, checks.check_release_files, checks.check_stale ] for checker in checkers: result, reason, score = checker(package_info, release_urls) if result: total_score += score else: reasons.append(reason) if total_score < 0: total_score = 0 if not no_output: percentage = int(float(total_score) / float(checks.TOTAL_POSSIBLE) * 100) score_string = 'score: {}/{} {}%'.format(total_score, checks.TOTAL_POSSIBLE, percentage) print(get_health_color(percentage)(score_string)) if verbose and not no_output: for reason in reasons: print(reason) if no_output: return total_score, reasons
python
def calculate_health(package_name, package_version=None, verbose=False, no_output=False): """ Calculates the health of a package, based on several factors :param package_name: name of package on pypi.python.org :param package_version: version number of package to check, optional - defaults to latest version :param verbose: flag to print out reasons :param no_output: print no output :param lint: run pylint on the package :returns: (score: integer, reasons: list of reasons for score) :rtype: tuple """ total_score = 0 reasons = [] package_releases = CLIENT.package_releases(package_name) if not package_releases: if not no_output: print(TERMINAL.red('{} is not listed on pypi'.format(package_name))) return 0, [] if package_version is None: package_version = package_releases[0] package_info = CLIENT.release_data(package_name, package_version) release_urls = CLIENT.release_urls(package_name, package_version) if not package_info or not release_urls: if not no_output: print(TERMINAL.red('Version {} is not listed on pypi'.format( package_version))) return 0, [] if not no_output: print(TERMINAL.bold('{} v{}'.format(package_name, package_version))) print('-----') checkers = [ checks.check_license, checks.check_homepage, checks.check_summary, checks.check_description, checks.check_python_classifiers, checks.check_author_info, checks.check_release_files, checks.check_stale ] for checker in checkers: result, reason, score = checker(package_info, release_urls) if result: total_score += score else: reasons.append(reason) if total_score < 0: total_score = 0 if not no_output: percentage = int(float(total_score) / float(checks.TOTAL_POSSIBLE) * 100) score_string = 'score: {}/{} {}%'.format(total_score, checks.TOTAL_POSSIBLE, percentage) print(get_health_color(percentage)(score_string)) if verbose and not no_output: for reason in reasons: print(reason) if no_output: return total_score, reasons
[ "def", "calculate_health", "(", "package_name", ",", "package_version", "=", "None", ",", "verbose", "=", "False", ",", "no_output", "=", "False", ")", ":", "total_score", "=", "0", "reasons", "=", "[", "]", "package_releases", "=", "CLIENT", ".", "package_releases", "(", "package_name", ")", "if", "not", "package_releases", ":", "if", "not", "no_output", ":", "print", "(", "TERMINAL", ".", "red", "(", "'{} is not listed on pypi'", ".", "format", "(", "package_name", ")", ")", ")", "return", "0", ",", "[", "]", "if", "package_version", "is", "None", ":", "package_version", "=", "package_releases", "[", "0", "]", "package_info", "=", "CLIENT", ".", "release_data", "(", "package_name", ",", "package_version", ")", "release_urls", "=", "CLIENT", ".", "release_urls", "(", "package_name", ",", "package_version", ")", "if", "not", "package_info", "or", "not", "release_urls", ":", "if", "not", "no_output", ":", "print", "(", "TERMINAL", ".", "red", "(", "'Version {} is not listed on pypi'", ".", "format", "(", "package_version", ")", ")", ")", "return", "0", ",", "[", "]", "if", "not", "no_output", ":", "print", "(", "TERMINAL", ".", "bold", "(", "'{} v{}'", ".", "format", "(", "package_name", ",", "package_version", ")", ")", ")", "print", "(", "'-----'", ")", "checkers", "=", "[", "checks", ".", "check_license", ",", "checks", ".", "check_homepage", ",", "checks", ".", "check_summary", ",", "checks", ".", "check_description", ",", "checks", ".", "check_python_classifiers", ",", "checks", ".", "check_author_info", ",", "checks", ".", "check_release_files", ",", "checks", ".", "check_stale", "]", "for", "checker", "in", "checkers", ":", "result", ",", "reason", ",", "score", "=", "checker", "(", "package_info", ",", "release_urls", ")", "if", "result", ":", "total_score", "+=", "score", "else", ":", "reasons", ".", "append", "(", "reason", ")", "if", "total_score", "<", "0", ":", "total_score", "=", "0", "if", "not", "no_output", ":", "percentage", "=", "int", "(", "float", "(", "total_score", ")", "/", "float", "(", "checks", ".", "TOTAL_POSSIBLE", ")", "*", "100", ")", "score_string", "=", "'score: {}/{} {}%'", ".", "format", "(", "total_score", ",", "checks", ".", "TOTAL_POSSIBLE", ",", "percentage", ")", "print", "(", "get_health_color", "(", "percentage", ")", "(", "score_string", ")", ")", "if", "verbose", "and", "not", "no_output", ":", "for", "reason", "in", "reasons", ":", "print", "(", "reason", ")", "if", "no_output", ":", "return", "total_score", ",", "reasons" ]
Calculates the health of a package, based on several factors :param package_name: name of package on pypi.python.org :param package_version: version number of package to check, optional - defaults to latest version :param verbose: flag to print out reasons :param no_output: print no output :param lint: run pylint on the package :returns: (score: integer, reasons: list of reasons for score) :rtype: tuple
[ "Calculates", "the", "health", "of", "a", "package", "based", "on", "several", "factors" ]
b59016c3f578ca45b6ce857a2d5c4584b8542288
https://github.com/dustinmm80/healthy/blob/b59016c3f578ca45b6ce857a2d5c4584b8542288/healthy.py#L34-L104
249,024
Wessie/hurler
hurler/filters.py
filter
def filter(filter_creator): """ Creates a decorator that can be used as a filter. .. warning:: This is currently not compatible with most other decorators, if you are using a decorator that isn't part of `hurler` you should take caution. """ filter_func = [None] def function_getter(function): if isinstance(function, Filter): function.add_filter(filter) return function else: return Filter( filter=filter_func[0], callback=function, ) def filter_decorator(*args, **kwargs): filter_function = filter_creator(*args, **kwargs) filter_func[0] = filter_function return function_getter return filter_decorator
python
def filter(filter_creator): """ Creates a decorator that can be used as a filter. .. warning:: This is currently not compatible with most other decorators, if you are using a decorator that isn't part of `hurler` you should take caution. """ filter_func = [None] def function_getter(function): if isinstance(function, Filter): function.add_filter(filter) return function else: return Filter( filter=filter_func[0], callback=function, ) def filter_decorator(*args, **kwargs): filter_function = filter_creator(*args, **kwargs) filter_func[0] = filter_function return function_getter return filter_decorator
[ "def", "filter", "(", "filter_creator", ")", ":", "filter_func", "=", "[", "None", "]", "def", "function_getter", "(", "function", ")", ":", "if", "isinstance", "(", "function", ",", "Filter", ")", ":", "function", ".", "add_filter", "(", "filter", ")", "return", "function", "else", ":", "return", "Filter", "(", "filter", "=", "filter_func", "[", "0", "]", ",", "callback", "=", "function", ",", ")", "def", "filter_decorator", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "filter_function", "=", "filter_creator", "(", "*", "args", ",", "*", "*", "kwargs", ")", "filter_func", "[", "0", "]", "=", "filter_function", "return", "function_getter", "return", "filter_decorator" ]
Creates a decorator that can be used as a filter. .. warning:: This is currently not compatible with most other decorators, if you are using a decorator that isn't part of `hurler` you should take caution.
[ "Creates", "a", "decorator", "that", "can", "be", "used", "as", "a", "filter", "." ]
5719000237e24df9f24fb8229f1153ebfa684972
https://github.com/Wessie/hurler/blob/5719000237e24df9f24fb8229f1153ebfa684972/hurler/filters.py#L75-L104
249,025
ONSdigital/sdx-common
sdx/common/log_levels.py
set_level
def set_level(logger=None, log_level=None): '''Set logging levels using logger names. :param logger: Name of the logger :type logger: String :param log_level: A string or integer corresponding to a Python logging level :type log_level: String :rtype: None ''' log_level = logging.getLevelName(os.getenv('VERBOSITY', 'WARNING')) logging.getLogger(logger).setLevel(log_level)
python
def set_level(logger=None, log_level=None): '''Set logging levels using logger names. :param logger: Name of the logger :type logger: String :param log_level: A string or integer corresponding to a Python logging level :type log_level: String :rtype: None ''' log_level = logging.getLevelName(os.getenv('VERBOSITY', 'WARNING')) logging.getLogger(logger).setLevel(log_level)
[ "def", "set_level", "(", "logger", "=", "None", ",", "log_level", "=", "None", ")", ":", "log_level", "=", "logging", ".", "getLevelName", "(", "os", ".", "getenv", "(", "'VERBOSITY'", ",", "'WARNING'", ")", ")", "logging", ".", "getLogger", "(", "logger", ")", ".", "setLevel", "(", "log_level", ")" ]
Set logging levels using logger names. :param logger: Name of the logger :type logger: String :param log_level: A string or integer corresponding to a Python logging level :type log_level: String :rtype: None
[ "Set", "logging", "levels", "using", "logger", "names", "." ]
815f6a116d41fddae182943d821dc5f582a9af69
https://github.com/ONSdigital/sdx-common/blob/815f6a116d41fddae182943d821dc5f582a9af69/sdx/common/log_levels.py#L5-L18
249,026
abe-winter/pg13-py
pg13/sqparse2.py
bin_priority
def bin_priority(op,left,right): "I don't know how to handle order of operations in the LR grammar, so here it is" # note: recursion limits protect this from infinite looping. I'm serious. (i.e. it will crash rather than hanging) if isinstance(left,BinX) and left.op < op: return bin_priority(left.op,left.left,bin_priority(op,left.right,right)) elif isinstance(left,UnX) and left.op < op: return un_priority(left.op,BinX(op,left.val,right)) # note: obviously, no need to do this when right is a UnX elif isinstance(right,BinX) and right.op < op: return bin_priority(right.op,bin_priority(op,left,right.left),right.right) else: return BinX(op,left,right)
python
def bin_priority(op,left,right): "I don't know how to handle order of operations in the LR grammar, so here it is" # note: recursion limits protect this from infinite looping. I'm serious. (i.e. it will crash rather than hanging) if isinstance(left,BinX) and left.op < op: return bin_priority(left.op,left.left,bin_priority(op,left.right,right)) elif isinstance(left,UnX) and left.op < op: return un_priority(left.op,BinX(op,left.val,right)) # note: obviously, no need to do this when right is a UnX elif isinstance(right,BinX) and right.op < op: return bin_priority(right.op,bin_priority(op,left,right.left),right.right) else: return BinX(op,left,right)
[ "def", "bin_priority", "(", "op", ",", "left", ",", "right", ")", ":", "# note: recursion limits protect this from infinite looping. I'm serious. (i.e. it will crash rather than hanging)", "if", "isinstance", "(", "left", ",", "BinX", ")", "and", "left", ".", "op", "<", "op", ":", "return", "bin_priority", "(", "left", ".", "op", ",", "left", ".", "left", ",", "bin_priority", "(", "op", ",", "left", ".", "right", ",", "right", ")", ")", "elif", "isinstance", "(", "left", ",", "UnX", ")", "and", "left", ".", "op", "<", "op", ":", "return", "un_priority", "(", "left", ".", "op", ",", "BinX", "(", "op", ",", "left", ".", "val", ",", "right", ")", ")", "# note: obviously, no need to do this when right is a UnX", "elif", "isinstance", "(", "right", ",", "BinX", ")", "and", "right", ".", "op", "<", "op", ":", "return", "bin_priority", "(", "right", ".", "op", ",", "bin_priority", "(", "op", ",", "left", ",", "right", ".", "left", ")", ",", "right", ".", "right", ")", "else", ":", "return", "BinX", "(", "op", ",", "left", ",", "right", ")" ]
I don't know how to handle order of operations in the LR grammar, so here it is
[ "I", "don", "t", "know", "how", "to", "handle", "order", "of", "operations", "in", "the", "LR", "grammar", "so", "here", "it", "is" ]
c78806f99f35541a8756987e86edca3438aa97f5
https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/sqparse2.py#L116-L122
249,027
abe-winter/pg13-py
pg13/sqparse2.py
un_priority
def un_priority(op,val): "unary expression order-of-operations helper" if isinstance(val,BinX) and val.op < op: return bin_priority(val.op,UnX(op,val.left),val.right) else: return UnX(op,val)
python
def un_priority(op,val): "unary expression order-of-operations helper" if isinstance(val,BinX) and val.op < op: return bin_priority(val.op,UnX(op,val.left),val.right) else: return UnX(op,val)
[ "def", "un_priority", "(", "op", ",", "val", ")", ":", "if", "isinstance", "(", "val", ",", "BinX", ")", "and", "val", ".", "op", "<", "op", ":", "return", "bin_priority", "(", "val", ".", "op", ",", "UnX", "(", "op", ",", "val", ".", "left", ")", ",", "val", ".", "right", ")", "else", ":", "return", "UnX", "(", "op", ",", "val", ")" ]
unary expression order-of-operations helper
[ "unary", "expression", "order", "-", "of", "-", "operations", "helper" ]
c78806f99f35541a8756987e86edca3438aa97f5
https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/sqparse2.py#L123-L126
249,028
abe-winter/pg13-py
pg13/sqparse2.py
lex
def lex(string): "this is only used by tests" safe_lexer = LEXER.clone() # reentrant? I can't tell, I hate implicit globals. do a threading test safe_lexer.input(string) a = [] while 1: t = safe_lexer.token() if t: a.append(t) else: break return a
python
def lex(string): "this is only used by tests" safe_lexer = LEXER.clone() # reentrant? I can't tell, I hate implicit globals. do a threading test safe_lexer.input(string) a = [] while 1: t = safe_lexer.token() if t: a.append(t) else: break return a
[ "def", "lex", "(", "string", ")", ":", "safe_lexer", "=", "LEXER", ".", "clone", "(", ")", "# reentrant? I can't tell, I hate implicit globals. do a threading test", "safe_lexer", ".", "input", "(", "string", ")", "a", "=", "[", "]", "while", "1", ":", "t", "=", "safe_lexer", ".", "token", "(", ")", "if", "t", ":", "a", ".", "append", "(", "t", ")", "else", ":", "break", "return", "a" ]
this is only used by tests
[ "this", "is", "only", "used", "by", "tests" ]
c78806f99f35541a8756987e86edca3438aa97f5
https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/sqparse2.py#L352-L361
249,029
abe-winter/pg13-py
pg13/sqparse2.py
parse
def parse(string): "return a BaseX tree for the string" print string if string.strip().lower().startswith('create index'): return IndexX(string) return YACC.parse(string, lexer=LEXER.clone())
python
def parse(string): "return a BaseX tree for the string" print string if string.strip().lower().startswith('create index'): return IndexX(string) return YACC.parse(string, lexer=LEXER.clone())
[ "def", "parse", "(", "string", ")", ":", "print", "string", "if", "string", ".", "strip", "(", ")", ".", "lower", "(", ")", ".", "startswith", "(", "'create index'", ")", ":", "return", "IndexX", "(", "string", ")", "return", "YACC", ".", "parse", "(", "string", ",", "lexer", "=", "LEXER", ".", "clone", "(", ")", ")" ]
return a BaseX tree for the string
[ "return", "a", "BaseX", "tree", "for", "the", "string" ]
c78806f99f35541a8756987e86edca3438aa97f5
https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/sqparse2.py#L364-L368
249,030
PSU-OIT-ARC/elasticmodels
elasticmodels/indexes.py
IndexRegistry.register
def register(self, model, index): """Register the model with the registry""" self.model_to_indexes[model].add(index) if not self.connected: connections.index_name = {} from django.conf import settings kwargs = {} for name, params in settings.ELASTICSEARCH_CONNECTIONS.items(): params = copy.deepcopy(params) kwargs[name] = params connections.index_name[name] = params.pop("index_name") connections.configure(**kwargs) self.connected = True
python
def register(self, model, index): """Register the model with the registry""" self.model_to_indexes[model].add(index) if not self.connected: connections.index_name = {} from django.conf import settings kwargs = {} for name, params in settings.ELASTICSEARCH_CONNECTIONS.items(): params = copy.deepcopy(params) kwargs[name] = params connections.index_name[name] = params.pop("index_name") connections.configure(**kwargs) self.connected = True
[ "def", "register", "(", "self", ",", "model", ",", "index", ")", ":", "self", ".", "model_to_indexes", "[", "model", "]", ".", "add", "(", "index", ")", "if", "not", "self", ".", "connected", ":", "connections", ".", "index_name", "=", "{", "}", "from", "django", ".", "conf", "import", "settings", "kwargs", "=", "{", "}", "for", "name", ",", "params", "in", "settings", ".", "ELASTICSEARCH_CONNECTIONS", ".", "items", "(", ")", ":", "params", "=", "copy", ".", "deepcopy", "(", "params", ")", "kwargs", "[", "name", "]", "=", "params", "connections", ".", "index_name", "[", "name", "]", "=", "params", ".", "pop", "(", "\"index_name\"", ")", "connections", ".", "configure", "(", "*", "*", "kwargs", ")", "self", ".", "connected", "=", "True" ]
Register the model with the registry
[ "Register", "the", "model", "with", "the", "registry" ]
67870508096f66123ef10b89789bbac06571cc80
https://github.com/PSU-OIT-ARC/elasticmodels/blob/67870508096f66123ef10b89789bbac06571cc80/elasticmodels/indexes.py#L42-L54
249,031
ardydedase/pycouchbase
pycouchbase/connection.py
Connection.bucket
def bucket(cls, bucket_name, connection=None): """Gives the bucket from couchbase server. :param bucket_name: Bucket name to fetch. :type bucket_name: str :returns: couchbase driver's Bucket object. :rtype: :class:`couchbase.client.Bucket` :raises: :exc:`RuntimeError` If the credentials wasn't set. """ connection = cls.connection if connection == None else connection if bucket_name not in cls._buckets: connection = "{connection}/{bucket_name}".format(connection=connection, bucket_name=bucket_name) if cls.password: cls._buckets[connection] = Bucket(connection, password=cls.password) else: cls._buckets[connection] = Bucket(connection) return cls._buckets[connection]
python
def bucket(cls, bucket_name, connection=None): """Gives the bucket from couchbase server. :param bucket_name: Bucket name to fetch. :type bucket_name: str :returns: couchbase driver's Bucket object. :rtype: :class:`couchbase.client.Bucket` :raises: :exc:`RuntimeError` If the credentials wasn't set. """ connection = cls.connection if connection == None else connection if bucket_name not in cls._buckets: connection = "{connection}/{bucket_name}".format(connection=connection, bucket_name=bucket_name) if cls.password: cls._buckets[connection] = Bucket(connection, password=cls.password) else: cls._buckets[connection] = Bucket(connection) return cls._buckets[connection]
[ "def", "bucket", "(", "cls", ",", "bucket_name", ",", "connection", "=", "None", ")", ":", "connection", "=", "cls", ".", "connection", "if", "connection", "==", "None", "else", "connection", "if", "bucket_name", "not", "in", "cls", ".", "_buckets", ":", "connection", "=", "\"{connection}/{bucket_name}\"", ".", "format", "(", "connection", "=", "connection", ",", "bucket_name", "=", "bucket_name", ")", "if", "cls", ".", "password", ":", "cls", ".", "_buckets", "[", "connection", "]", "=", "Bucket", "(", "connection", ",", "password", "=", "cls", ".", "password", ")", "else", ":", "cls", ".", "_buckets", "[", "connection", "]", "=", "Bucket", "(", "connection", ")", "return", "cls", ".", "_buckets", "[", "connection", "]" ]
Gives the bucket from couchbase server. :param bucket_name: Bucket name to fetch. :type bucket_name: str :returns: couchbase driver's Bucket object. :rtype: :class:`couchbase.client.Bucket` :raises: :exc:`RuntimeError` If the credentials wasn't set.
[ "Gives", "the", "bucket", "from", "couchbase", "server", "." ]
6f010b4d2ef41aead2366878d0cf0b1284c0db0e
https://github.com/ardydedase/pycouchbase/blob/6f010b4d2ef41aead2366878d0cf0b1284c0db0e/pycouchbase/connection.py#L23-L43
249,032
ulf1/oxyba
oxyba/linreg_ols_qr.py
linreg_ols_qr
def linreg_ols_qr(y, X): """Linear Regression, OLS, inverse by QR Factoring""" import numpy as np try: # multiply with inverse to compute coefficients q, r = np.linalg.qr(np.dot(X.T, X)) return np.dot(np.dot(np.linalg.inv(r), q.T), np.dot(X.T, y)) except np.linalg.LinAlgError: print("LinAlgError: Factoring failed") return None
python
def linreg_ols_qr(y, X): """Linear Regression, OLS, inverse by QR Factoring""" import numpy as np try: # multiply with inverse to compute coefficients q, r = np.linalg.qr(np.dot(X.T, X)) return np.dot(np.dot(np.linalg.inv(r), q.T), np.dot(X.T, y)) except np.linalg.LinAlgError: print("LinAlgError: Factoring failed") return None
[ "def", "linreg_ols_qr", "(", "y", ",", "X", ")", ":", "import", "numpy", "as", "np", "try", ":", "# multiply with inverse to compute coefficients", "q", ",", "r", "=", "np", ".", "linalg", ".", "qr", "(", "np", ".", "dot", "(", "X", ".", "T", ",", "X", ")", ")", "return", "np", ".", "dot", "(", "np", ".", "dot", "(", "np", ".", "linalg", ".", "inv", "(", "r", ")", ",", "q", ".", "T", ")", ",", "np", ".", "dot", "(", "X", ".", "T", ",", "y", ")", ")", "except", "np", ".", "linalg", ".", "LinAlgError", ":", "print", "(", "\"LinAlgError: Factoring failed\"", ")", "return", "None" ]
Linear Regression, OLS, inverse by QR Factoring
[ "Linear", "Regression", "OLS", "inverse", "by", "QR", "Factoring" ]
b3043116050de275124365cb11e7df91fb40169d
https://github.com/ulf1/oxyba/blob/b3043116050de275124365cb11e7df91fb40169d/oxyba/linreg_ols_qr.py#L2-L10
249,033
inspirehep/plotextractor
plotextractor/output_utils.py
find_open_and_close_braces
def find_open_and_close_braces(line_index, start, brace, lines): """ Take the line where we want to start and the index where we want to start and find the first instance of matched open and close braces of the same type as brace in file file. :param: line (int): the index of the line we want to start searching at :param: start (int): the index in the line we want to start searching at :param: brace (string): one of the type of brace we are looking for ({, }, [, or ]) :param lines ([string, string, ...]): the array of lines in the file we are looking in. :return: (start, start_line, end, end_line): (int, int, int): the index of the start and end of whatever braces we are looking for, and the line number that the end is on (since it may be different than the line we started on) """ if brace in ['[', ']']: open_brace = '[' close_brace = ']' elif brace in ['{', '}']: open_brace = '{' close_brace = '}' elif brace in ['(', ')']: open_brace = '(' close_brace = ')' else: # unacceptable brace type! return (-1, -1, -1, -1) open_braces = [] line = lines[line_index] ret_open_index = line.find(open_brace, start) line_index_cpy = line_index # sometimes people don't put the braces on the same line # as the tag while ret_open_index == -1: line_index = line_index + 1 if line_index >= len(lines): # failed to find open braces... return (0, line_index_cpy, 0, line_index_cpy) line = lines[line_index] ret_open_index = line.find(open_brace) open_braces.append(open_brace) ret_open_line = line_index open_index = ret_open_index close_index = ret_open_index while len(open_braces) > 0: if open_index == -1 and close_index == -1: # we hit the end of the line! oh, noez! line_index = line_index + 1 if line_index >= len(lines): # hanging braces! return (ret_open_index, ret_open_line, ret_open_index, ret_open_line) line = lines[line_index] # to not skip things that are at the beginning of the line close_index = line.find(close_brace) open_index = line.find(open_brace) else: if close_index != -1: close_index = line.find(close_brace, close_index + 1) if open_index != -1: open_index = line.find(open_brace, open_index + 1) if close_index != -1: open_braces.pop() if len(open_braces) == 0 and \ (open_index > close_index or open_index == -1): break if open_index != -1: open_braces.append(open_brace) ret_close_index = close_index return (ret_open_index, ret_open_line, ret_close_index, line_index)
python
def find_open_and_close_braces(line_index, start, brace, lines): """ Take the line where we want to start and the index where we want to start and find the first instance of matched open and close braces of the same type as brace in file file. :param: line (int): the index of the line we want to start searching at :param: start (int): the index in the line we want to start searching at :param: brace (string): one of the type of brace we are looking for ({, }, [, or ]) :param lines ([string, string, ...]): the array of lines in the file we are looking in. :return: (start, start_line, end, end_line): (int, int, int): the index of the start and end of whatever braces we are looking for, and the line number that the end is on (since it may be different than the line we started on) """ if brace in ['[', ']']: open_brace = '[' close_brace = ']' elif brace in ['{', '}']: open_brace = '{' close_brace = '}' elif brace in ['(', ')']: open_brace = '(' close_brace = ')' else: # unacceptable brace type! return (-1, -1, -1, -1) open_braces = [] line = lines[line_index] ret_open_index = line.find(open_brace, start) line_index_cpy = line_index # sometimes people don't put the braces on the same line # as the tag while ret_open_index == -1: line_index = line_index + 1 if line_index >= len(lines): # failed to find open braces... return (0, line_index_cpy, 0, line_index_cpy) line = lines[line_index] ret_open_index = line.find(open_brace) open_braces.append(open_brace) ret_open_line = line_index open_index = ret_open_index close_index = ret_open_index while len(open_braces) > 0: if open_index == -1 and close_index == -1: # we hit the end of the line! oh, noez! line_index = line_index + 1 if line_index >= len(lines): # hanging braces! return (ret_open_index, ret_open_line, ret_open_index, ret_open_line) line = lines[line_index] # to not skip things that are at the beginning of the line close_index = line.find(close_brace) open_index = line.find(open_brace) else: if close_index != -1: close_index = line.find(close_brace, close_index + 1) if open_index != -1: open_index = line.find(open_brace, open_index + 1) if close_index != -1: open_braces.pop() if len(open_braces) == 0 and \ (open_index > close_index or open_index == -1): break if open_index != -1: open_braces.append(open_brace) ret_close_index = close_index return (ret_open_index, ret_open_line, ret_close_index, line_index)
[ "def", "find_open_and_close_braces", "(", "line_index", ",", "start", ",", "brace", ",", "lines", ")", ":", "if", "brace", "in", "[", "'['", ",", "']'", "]", ":", "open_brace", "=", "'['", "close_brace", "=", "']'", "elif", "brace", "in", "[", "'{'", ",", "'}'", "]", ":", "open_brace", "=", "'{'", "close_brace", "=", "'}'", "elif", "brace", "in", "[", "'('", ",", "')'", "]", ":", "open_brace", "=", "'('", "close_brace", "=", "')'", "else", ":", "# unacceptable brace type!", "return", "(", "-", "1", ",", "-", "1", ",", "-", "1", ",", "-", "1", ")", "open_braces", "=", "[", "]", "line", "=", "lines", "[", "line_index", "]", "ret_open_index", "=", "line", ".", "find", "(", "open_brace", ",", "start", ")", "line_index_cpy", "=", "line_index", "# sometimes people don't put the braces on the same line", "# as the tag", "while", "ret_open_index", "==", "-", "1", ":", "line_index", "=", "line_index", "+", "1", "if", "line_index", ">=", "len", "(", "lines", ")", ":", "# failed to find open braces...", "return", "(", "0", ",", "line_index_cpy", ",", "0", ",", "line_index_cpy", ")", "line", "=", "lines", "[", "line_index", "]", "ret_open_index", "=", "line", ".", "find", "(", "open_brace", ")", "open_braces", ".", "append", "(", "open_brace", ")", "ret_open_line", "=", "line_index", "open_index", "=", "ret_open_index", "close_index", "=", "ret_open_index", "while", "len", "(", "open_braces", ")", ">", "0", ":", "if", "open_index", "==", "-", "1", "and", "close_index", "==", "-", "1", ":", "# we hit the end of the line! oh, noez!", "line_index", "=", "line_index", "+", "1", "if", "line_index", ">=", "len", "(", "lines", ")", ":", "# hanging braces!", "return", "(", "ret_open_index", ",", "ret_open_line", ",", "ret_open_index", ",", "ret_open_line", ")", "line", "=", "lines", "[", "line_index", "]", "# to not skip things that are at the beginning of the line", "close_index", "=", "line", ".", "find", "(", "close_brace", ")", "open_index", "=", "line", ".", "find", "(", "open_brace", ")", "else", ":", "if", "close_index", "!=", "-", "1", ":", "close_index", "=", "line", ".", "find", "(", "close_brace", ",", "close_index", "+", "1", ")", "if", "open_index", "!=", "-", "1", ":", "open_index", "=", "line", ".", "find", "(", "open_brace", ",", "open_index", "+", "1", ")", "if", "close_index", "!=", "-", "1", ":", "open_braces", ".", "pop", "(", ")", "if", "len", "(", "open_braces", ")", "==", "0", "and", "(", "open_index", ">", "close_index", "or", "open_index", "==", "-", "1", ")", ":", "break", "if", "open_index", "!=", "-", "1", ":", "open_braces", ".", "append", "(", "open_brace", ")", "ret_close_index", "=", "close_index", "return", "(", "ret_open_index", ",", "ret_open_line", ",", "ret_close_index", ",", "line_index", ")" ]
Take the line where we want to start and the index where we want to start and find the first instance of matched open and close braces of the same type as brace in file file. :param: line (int): the index of the line we want to start searching at :param: start (int): the index in the line we want to start searching at :param: brace (string): one of the type of brace we are looking for ({, }, [, or ]) :param lines ([string, string, ...]): the array of lines in the file we are looking in. :return: (start, start_line, end, end_line): (int, int, int): the index of the start and end of whatever braces we are looking for, and the line number that the end is on (since it may be different than the line we started on)
[ "Take", "the", "line", "where", "we", "want", "to", "start", "and", "the", "index", "where", "we", "want", "to", "start", "and", "find", "the", "first", "instance", "of", "matched", "open", "and", "close", "braces", "of", "the", "same", "type", "as", "brace", "in", "file", "file", "." ]
12a65350fb9f32d496f9ea57908d9a2771b20474
https://github.com/inspirehep/plotextractor/blob/12a65350fb9f32d496f9ea57908d9a2771b20474/plotextractor/output_utils.py#L31-L116
249,034
inspirehep/plotextractor
plotextractor/output_utils.py
assemble_caption
def assemble_caption(begin_line, begin_index, end_line, end_index, lines): """ Take the caption of a picture and put it all together in a nice way. If it spans multiple lines, put it on one line. If it contains controlled characters, strip them out. If it has tags we don't want to worry about, get rid of them, etc. :param: begin_line (int): the index of the line where the caption begins :param: begin_index (int): the index within the line where the caption begins :param: end_line (int): the index of the line where the caption ends :param: end_index (int): the index within the line where the caption ends :param: lines ([string, string, ...]): the line strings of the text :return: caption (string): the caption, formatted and pieced together """ # stuff we don't like label_head = '\\label{' # reassemble that sucker if end_line > begin_line: # our caption spanned multiple lines caption = lines[begin_line][begin_index:] for included_line_index in range(begin_line + 1, end_line): caption = caption + ' ' + lines[included_line_index] caption = caption + ' ' + lines[end_line][:end_index] caption = caption.replace('\n', ' ') caption = caption.replace(' ', ' ') else: # it fit on one line caption = lines[begin_line][begin_index:end_index] # clean out a label tag, if there is one label_begin = caption.find(label_head) if label_begin > -1: # we know that our caption is only one line, so if there's a label # tag in it, it will be all on one line. so we make up some args dummy_start, dummy_start_line, label_end, dummy_end = \ find_open_and_close_braces(0, label_begin, '{', [caption]) caption = caption[:label_begin] + caption[label_end + 1:] caption = caption.strip() if len(caption) > 1 and caption[0] == '{' and caption[-1] == '}': caption = caption[1:-1] return caption
python
def assemble_caption(begin_line, begin_index, end_line, end_index, lines): """ Take the caption of a picture and put it all together in a nice way. If it spans multiple lines, put it on one line. If it contains controlled characters, strip them out. If it has tags we don't want to worry about, get rid of them, etc. :param: begin_line (int): the index of the line where the caption begins :param: begin_index (int): the index within the line where the caption begins :param: end_line (int): the index of the line where the caption ends :param: end_index (int): the index within the line where the caption ends :param: lines ([string, string, ...]): the line strings of the text :return: caption (string): the caption, formatted and pieced together """ # stuff we don't like label_head = '\\label{' # reassemble that sucker if end_line > begin_line: # our caption spanned multiple lines caption = lines[begin_line][begin_index:] for included_line_index in range(begin_line + 1, end_line): caption = caption + ' ' + lines[included_line_index] caption = caption + ' ' + lines[end_line][:end_index] caption = caption.replace('\n', ' ') caption = caption.replace(' ', ' ') else: # it fit on one line caption = lines[begin_line][begin_index:end_index] # clean out a label tag, if there is one label_begin = caption.find(label_head) if label_begin > -1: # we know that our caption is only one line, so if there's a label # tag in it, it will be all on one line. so we make up some args dummy_start, dummy_start_line, label_end, dummy_end = \ find_open_and_close_braces(0, label_begin, '{', [caption]) caption = caption[:label_begin] + caption[label_end + 1:] caption = caption.strip() if len(caption) > 1 and caption[0] == '{' and caption[-1] == '}': caption = caption[1:-1] return caption
[ "def", "assemble_caption", "(", "begin_line", ",", "begin_index", ",", "end_line", ",", "end_index", ",", "lines", ")", ":", "# stuff we don't like", "label_head", "=", "'\\\\label{'", "# reassemble that sucker", "if", "end_line", ">", "begin_line", ":", "# our caption spanned multiple lines", "caption", "=", "lines", "[", "begin_line", "]", "[", "begin_index", ":", "]", "for", "included_line_index", "in", "range", "(", "begin_line", "+", "1", ",", "end_line", ")", ":", "caption", "=", "caption", "+", "' '", "+", "lines", "[", "included_line_index", "]", "caption", "=", "caption", "+", "' '", "+", "lines", "[", "end_line", "]", "[", ":", "end_index", "]", "caption", "=", "caption", ".", "replace", "(", "'\\n'", ",", "' '", ")", "caption", "=", "caption", ".", "replace", "(", "' '", ",", "' '", ")", "else", ":", "# it fit on one line", "caption", "=", "lines", "[", "begin_line", "]", "[", "begin_index", ":", "end_index", "]", "# clean out a label tag, if there is one", "label_begin", "=", "caption", ".", "find", "(", "label_head", ")", "if", "label_begin", ">", "-", "1", ":", "# we know that our caption is only one line, so if there's a label", "# tag in it, it will be all on one line. so we make up some args", "dummy_start", ",", "dummy_start_line", ",", "label_end", ",", "dummy_end", "=", "find_open_and_close_braces", "(", "0", ",", "label_begin", ",", "'{'", ",", "[", "caption", "]", ")", "caption", "=", "caption", "[", ":", "label_begin", "]", "+", "caption", "[", "label_end", "+", "1", ":", "]", "caption", "=", "caption", ".", "strip", "(", ")", "if", "len", "(", "caption", ")", ">", "1", "and", "caption", "[", "0", "]", "==", "'{'", "and", "caption", "[", "-", "1", "]", "==", "'}'", ":", "caption", "=", "caption", "[", "1", ":", "-", "1", "]", "return", "caption" ]
Take the caption of a picture and put it all together in a nice way. If it spans multiple lines, put it on one line. If it contains controlled characters, strip them out. If it has tags we don't want to worry about, get rid of them, etc. :param: begin_line (int): the index of the line where the caption begins :param: begin_index (int): the index within the line where the caption begins :param: end_line (int): the index of the line where the caption ends :param: end_index (int): the index within the line where the caption ends :param: lines ([string, string, ...]): the line strings of the text :return: caption (string): the caption, formatted and pieced together
[ "Take", "the", "caption", "of", "a", "picture", "and", "put", "it", "all", "together", "in", "a", "nice", "way", ".", "If", "it", "spans", "multiple", "lines", "put", "it", "on", "one", "line", ".", "If", "it", "contains", "controlled", "characters", "strip", "them", "out", ".", "If", "it", "has", "tags", "we", "don", "t", "want", "to", "worry", "about", "get", "rid", "of", "them", "etc", "." ]
12a65350fb9f32d496f9ea57908d9a2771b20474
https://github.com/inspirehep/plotextractor/blob/12a65350fb9f32d496f9ea57908d9a2771b20474/plotextractor/output_utils.py#L119-L168
249,035
inspirehep/plotextractor
plotextractor/output_utils.py
prepare_image_data
def prepare_image_data(extracted_image_data, output_directory, image_mapping): """Prepare and clean image-data from duplicates and other garbage. :param: extracted_image_data ([(string, string, list, list) ...], ...])): the images and their captions + contexts, ordered :param: tex_file (string): the location of the TeX (used for finding the associated images; the TeX is assumed to be in the same directory as the converted images) :param: image_list ([string, string, ...]): a list of the converted image file names :return extracted_image_data ([(string, string, list, list) ...], ...])) again the list of image data cleaned for output """ img_list = {} for image, caption, label in extracted_image_data: if not image or image == 'ERROR': continue image_location = get_image_location( image, output_directory, image_mapping.keys() ) if not image_location or not os.path.exists(image_location) or \ len(image_location) < 3: continue image_location = os.path.normpath(image_location) if image_location in img_list: if caption not in img_list[image_location]['captions']: img_list[image_location]['captions'].append(caption) else: img_list[image_location] = dict( url=image_location, original_url=image_mapping[image_location], captions=[caption], label=label, name=get_name_from_path(image_location, output_directory) ) return img_list.values()
python
def prepare_image_data(extracted_image_data, output_directory, image_mapping): """Prepare and clean image-data from duplicates and other garbage. :param: extracted_image_data ([(string, string, list, list) ...], ...])): the images and their captions + contexts, ordered :param: tex_file (string): the location of the TeX (used for finding the associated images; the TeX is assumed to be in the same directory as the converted images) :param: image_list ([string, string, ...]): a list of the converted image file names :return extracted_image_data ([(string, string, list, list) ...], ...])) again the list of image data cleaned for output """ img_list = {} for image, caption, label in extracted_image_data: if not image or image == 'ERROR': continue image_location = get_image_location( image, output_directory, image_mapping.keys() ) if not image_location or not os.path.exists(image_location) or \ len(image_location) < 3: continue image_location = os.path.normpath(image_location) if image_location in img_list: if caption not in img_list[image_location]['captions']: img_list[image_location]['captions'].append(caption) else: img_list[image_location] = dict( url=image_location, original_url=image_mapping[image_location], captions=[caption], label=label, name=get_name_from_path(image_location, output_directory) ) return img_list.values()
[ "def", "prepare_image_data", "(", "extracted_image_data", ",", "output_directory", ",", "image_mapping", ")", ":", "img_list", "=", "{", "}", "for", "image", ",", "caption", ",", "label", "in", "extracted_image_data", ":", "if", "not", "image", "or", "image", "==", "'ERROR'", ":", "continue", "image_location", "=", "get_image_location", "(", "image", ",", "output_directory", ",", "image_mapping", ".", "keys", "(", ")", ")", "if", "not", "image_location", "or", "not", "os", ".", "path", ".", "exists", "(", "image_location", ")", "or", "len", "(", "image_location", ")", "<", "3", ":", "continue", "image_location", "=", "os", ".", "path", ".", "normpath", "(", "image_location", ")", "if", "image_location", "in", "img_list", ":", "if", "caption", "not", "in", "img_list", "[", "image_location", "]", "[", "'captions'", "]", ":", "img_list", "[", "image_location", "]", "[", "'captions'", "]", ".", "append", "(", "caption", ")", "else", ":", "img_list", "[", "image_location", "]", "=", "dict", "(", "url", "=", "image_location", ",", "original_url", "=", "image_mapping", "[", "image_location", "]", ",", "captions", "=", "[", "caption", "]", ",", "label", "=", "label", ",", "name", "=", "get_name_from_path", "(", "image_location", ",", "output_directory", ")", ")", "return", "img_list", ".", "values", "(", ")" ]
Prepare and clean image-data from duplicates and other garbage. :param: extracted_image_data ([(string, string, list, list) ...], ...])): the images and their captions + contexts, ordered :param: tex_file (string): the location of the TeX (used for finding the associated images; the TeX is assumed to be in the same directory as the converted images) :param: image_list ([string, string, ...]): a list of the converted image file names :return extracted_image_data ([(string, string, list, list) ...], ...])) again the list of image data cleaned for output
[ "Prepare", "and", "clean", "image", "-", "data", "from", "duplicates", "and", "other", "garbage", "." ]
12a65350fb9f32d496f9ea57908d9a2771b20474
https://github.com/inspirehep/plotextractor/blob/12a65350fb9f32d496f9ea57908d9a2771b20474/plotextractor/output_utils.py#L171-L211
249,036
inspirehep/plotextractor
plotextractor/output_utils.py
get_converted_image_name
def get_converted_image_name(image): """Return the name of the image after it has been converted to png format. Strips off the old extension. :param: image (string): The fullpath of the image before conversion :return: converted_image (string): the fullpath of the image after convert """ png_extension = '.png' if image[(0 - len(png_extension)):] == png_extension: # it already ends in png! we're golden return image img_dir = os.path.split(image)[0] image = os.path.split(image)[-1] # cut off the old extension if len(image.split('.')) > 1: old_extension = '.' + image.split('.')[-1] converted_image = image[:(0 - len(old_extension))] + png_extension else: # no extension... damn converted_image = image + png_extension return os.path.join(img_dir, converted_image)
python
def get_converted_image_name(image): """Return the name of the image after it has been converted to png format. Strips off the old extension. :param: image (string): The fullpath of the image before conversion :return: converted_image (string): the fullpath of the image after convert """ png_extension = '.png' if image[(0 - len(png_extension)):] == png_extension: # it already ends in png! we're golden return image img_dir = os.path.split(image)[0] image = os.path.split(image)[-1] # cut off the old extension if len(image.split('.')) > 1: old_extension = '.' + image.split('.')[-1] converted_image = image[:(0 - len(old_extension))] + png_extension else: # no extension... damn converted_image = image + png_extension return os.path.join(img_dir, converted_image)
[ "def", "get_converted_image_name", "(", "image", ")", ":", "png_extension", "=", "'.png'", "if", "image", "[", "(", "0", "-", "len", "(", "png_extension", ")", ")", ":", "]", "==", "png_extension", ":", "# it already ends in png! we're golden", "return", "image", "img_dir", "=", "os", ".", "path", ".", "split", "(", "image", ")", "[", "0", "]", "image", "=", "os", ".", "path", ".", "split", "(", "image", ")", "[", "-", "1", "]", "# cut off the old extension", "if", "len", "(", "image", ".", "split", "(", "'.'", ")", ")", ">", "1", ":", "old_extension", "=", "'.'", "+", "image", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "converted_image", "=", "image", "[", ":", "(", "0", "-", "len", "(", "old_extension", ")", ")", "]", "+", "png_extension", "else", ":", "# no extension... damn", "converted_image", "=", "image", "+", "png_extension", "return", "os", ".", "path", ".", "join", "(", "img_dir", ",", "converted_image", ")" ]
Return the name of the image after it has been converted to png format. Strips off the old extension. :param: image (string): The fullpath of the image before conversion :return: converted_image (string): the fullpath of the image after convert
[ "Return", "the", "name", "of", "the", "image", "after", "it", "has", "been", "converted", "to", "png", "format", "." ]
12a65350fb9f32d496f9ea57908d9a2771b20474
https://github.com/inspirehep/plotextractor/blob/12a65350fb9f32d496f9ea57908d9a2771b20474/plotextractor/output_utils.py#L318-L344
249,037
inspirehep/plotextractor
plotextractor/output_utils.py
get_tex_location
def get_tex_location(new_tex_name, current_tex_name, recurred=False): """ Takes the name of a TeX file and attempts to match it to an actual file in the tarball. :param: new_tex_name (string): the name of the TeX file to find :param: current_tex_name (string): the location of the TeX file where we found the reference :return: tex_location (string): the location of the other TeX file on disk or None if it is not found """ tex_location = None current_dir = os.path.split(current_tex_name)[0] some_kind_of_tag = '\\\\\\w+ ' new_tex_name = new_tex_name.strip() if new_tex_name.startswith('input'): new_tex_name = new_tex_name[len('input'):] if re.match(some_kind_of_tag, new_tex_name): new_tex_name = new_tex_name[len(new_tex_name.split(' ')[0]) + 1:] if new_tex_name.startswith('./'): new_tex_name = new_tex_name[2:] if len(new_tex_name) == 0: return None new_tex_name = new_tex_name.strip() new_tex_file = os.path.split(new_tex_name)[-1] new_tex_folder = os.path.split(new_tex_name)[0] if new_tex_folder == new_tex_file: new_tex_folder = '' # could be in the current directory for any_file in os.listdir(current_dir): if any_file == new_tex_file: return os.path.join(current_dir, new_tex_file) # could be in a subfolder of the current directory if os.path.isdir(os.path.join(current_dir, new_tex_folder)): for any_file in os.listdir(os.path.join(current_dir, new_tex_folder)): if any_file == new_tex_file: return os.path.join(os.path.join(current_dir, new_tex_folder), new_tex_file) # could be in a subfolder of a higher directory one_dir_up = os.path.join(os.path.split(current_dir)[0], new_tex_folder) if os.path.isdir(one_dir_up): for any_file in os.listdir(one_dir_up): if any_file == new_tex_file: return os.path.join(one_dir_up, new_tex_file) two_dirs_up = os.path.join(os.path.split(os.path.split(current_dir)[0])[0], new_tex_folder) if os.path.isdir(two_dirs_up): for any_file in os.listdir(two_dirs_up): if any_file == new_tex_file: return os.path.join(two_dirs_up, new_tex_file) if tex_location is None and not recurred: return get_tex_location(new_tex_name + '.tex', current_tex_name, recurred=True) return tex_location
python
def get_tex_location(new_tex_name, current_tex_name, recurred=False): """ Takes the name of a TeX file and attempts to match it to an actual file in the tarball. :param: new_tex_name (string): the name of the TeX file to find :param: current_tex_name (string): the location of the TeX file where we found the reference :return: tex_location (string): the location of the other TeX file on disk or None if it is not found """ tex_location = None current_dir = os.path.split(current_tex_name)[0] some_kind_of_tag = '\\\\\\w+ ' new_tex_name = new_tex_name.strip() if new_tex_name.startswith('input'): new_tex_name = new_tex_name[len('input'):] if re.match(some_kind_of_tag, new_tex_name): new_tex_name = new_tex_name[len(new_tex_name.split(' ')[0]) + 1:] if new_tex_name.startswith('./'): new_tex_name = new_tex_name[2:] if len(new_tex_name) == 0: return None new_tex_name = new_tex_name.strip() new_tex_file = os.path.split(new_tex_name)[-1] new_tex_folder = os.path.split(new_tex_name)[0] if new_tex_folder == new_tex_file: new_tex_folder = '' # could be in the current directory for any_file in os.listdir(current_dir): if any_file == new_tex_file: return os.path.join(current_dir, new_tex_file) # could be in a subfolder of the current directory if os.path.isdir(os.path.join(current_dir, new_tex_folder)): for any_file in os.listdir(os.path.join(current_dir, new_tex_folder)): if any_file == new_tex_file: return os.path.join(os.path.join(current_dir, new_tex_folder), new_tex_file) # could be in a subfolder of a higher directory one_dir_up = os.path.join(os.path.split(current_dir)[0], new_tex_folder) if os.path.isdir(one_dir_up): for any_file in os.listdir(one_dir_up): if any_file == new_tex_file: return os.path.join(one_dir_up, new_tex_file) two_dirs_up = os.path.join(os.path.split(os.path.split(current_dir)[0])[0], new_tex_folder) if os.path.isdir(two_dirs_up): for any_file in os.listdir(two_dirs_up): if any_file == new_tex_file: return os.path.join(two_dirs_up, new_tex_file) if tex_location is None and not recurred: return get_tex_location(new_tex_name + '.tex', current_tex_name, recurred=True) return tex_location
[ "def", "get_tex_location", "(", "new_tex_name", ",", "current_tex_name", ",", "recurred", "=", "False", ")", ":", "tex_location", "=", "None", "current_dir", "=", "os", ".", "path", ".", "split", "(", "current_tex_name", ")", "[", "0", "]", "some_kind_of_tag", "=", "'\\\\\\\\\\\\w+ '", "new_tex_name", "=", "new_tex_name", ".", "strip", "(", ")", "if", "new_tex_name", ".", "startswith", "(", "'input'", ")", ":", "new_tex_name", "=", "new_tex_name", "[", "len", "(", "'input'", ")", ":", "]", "if", "re", ".", "match", "(", "some_kind_of_tag", ",", "new_tex_name", ")", ":", "new_tex_name", "=", "new_tex_name", "[", "len", "(", "new_tex_name", ".", "split", "(", "' '", ")", "[", "0", "]", ")", "+", "1", ":", "]", "if", "new_tex_name", ".", "startswith", "(", "'./'", ")", ":", "new_tex_name", "=", "new_tex_name", "[", "2", ":", "]", "if", "len", "(", "new_tex_name", ")", "==", "0", ":", "return", "None", "new_tex_name", "=", "new_tex_name", ".", "strip", "(", ")", "new_tex_file", "=", "os", ".", "path", ".", "split", "(", "new_tex_name", ")", "[", "-", "1", "]", "new_tex_folder", "=", "os", ".", "path", ".", "split", "(", "new_tex_name", ")", "[", "0", "]", "if", "new_tex_folder", "==", "new_tex_file", ":", "new_tex_folder", "=", "''", "# could be in the current directory", "for", "any_file", "in", "os", ".", "listdir", "(", "current_dir", ")", ":", "if", "any_file", "==", "new_tex_file", ":", "return", "os", ".", "path", ".", "join", "(", "current_dir", ",", "new_tex_file", ")", "# could be in a subfolder of the current directory", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "current_dir", ",", "new_tex_folder", ")", ")", ":", "for", "any_file", "in", "os", ".", "listdir", "(", "os", ".", "path", ".", "join", "(", "current_dir", ",", "new_tex_folder", ")", ")", ":", "if", "any_file", "==", "new_tex_file", ":", "return", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "join", "(", "current_dir", ",", "new_tex_folder", ")", ",", "new_tex_file", ")", "# could be in a subfolder of a higher directory", "one_dir_up", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "split", "(", "current_dir", ")", "[", "0", "]", ",", "new_tex_folder", ")", "if", "os", ".", "path", ".", "isdir", "(", "one_dir_up", ")", ":", "for", "any_file", "in", "os", ".", "listdir", "(", "one_dir_up", ")", ":", "if", "any_file", "==", "new_tex_file", ":", "return", "os", ".", "path", ".", "join", "(", "one_dir_up", ",", "new_tex_file", ")", "two_dirs_up", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "split", "(", "current_dir", ")", "[", "0", "]", ")", "[", "0", "]", ",", "new_tex_folder", ")", "if", "os", ".", "path", ".", "isdir", "(", "two_dirs_up", ")", ":", "for", "any_file", "in", "os", ".", "listdir", "(", "two_dirs_up", ")", ":", "if", "any_file", "==", "new_tex_file", ":", "return", "os", ".", "path", ".", "join", "(", "two_dirs_up", ",", "new_tex_file", ")", "if", "tex_location", "is", "None", "and", "not", "recurred", ":", "return", "get_tex_location", "(", "new_tex_name", "+", "'.tex'", ",", "current_tex_name", ",", "recurred", "=", "True", ")", "return", "tex_location" ]
Takes the name of a TeX file and attempts to match it to an actual file in the tarball. :param: new_tex_name (string): the name of the TeX file to find :param: current_tex_name (string): the location of the TeX file where we found the reference :return: tex_location (string): the location of the other TeX file on disk or None if it is not found
[ "Takes", "the", "name", "of", "a", "TeX", "file", "and", "attempts", "to", "match", "it", "to", "an", "actual", "file", "in", "the", "tarball", "." ]
12a65350fb9f32d496f9ea57908d9a2771b20474
https://github.com/inspirehep/plotextractor/blob/12a65350fb9f32d496f9ea57908d9a2771b20474/plotextractor/output_utils.py#L347-L412
249,038
inspirehep/plotextractor
plotextractor/output_utils.py
get_name_from_path
def get_name_from_path(full_path, root_path): """Create a filename by merging path after root directory.""" relative_image_path = os.path.relpath(full_path, root_path) return "_".join(relative_image_path.split('.')[:-1]).replace('/', '_')\ .replace(';', '').replace(':', '')
python
def get_name_from_path(full_path, root_path): """Create a filename by merging path after root directory.""" relative_image_path = os.path.relpath(full_path, root_path) return "_".join(relative_image_path.split('.')[:-1]).replace('/', '_')\ .replace(';', '').replace(':', '')
[ "def", "get_name_from_path", "(", "full_path", ",", "root_path", ")", ":", "relative_image_path", "=", "os", ".", "path", ".", "relpath", "(", "full_path", ",", "root_path", ")", "return", "\"_\"", ".", "join", "(", "relative_image_path", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", ".", "replace", "(", "'/'", ",", "'_'", ")", ".", "replace", "(", "';'", ",", "''", ")", ".", "replace", "(", "':'", ",", "''", ")" ]
Create a filename by merging path after root directory.
[ "Create", "a", "filename", "by", "merging", "path", "after", "root", "directory", "." ]
12a65350fb9f32d496f9ea57908d9a2771b20474
https://github.com/inspirehep/plotextractor/blob/12a65350fb9f32d496f9ea57908d9a2771b20474/plotextractor/output_utils.py#L415-L419
249,039
awsroadhouse/roadhouse
roadhouse/group.py
SecurityGroupsConfig.apply
def apply(self, vpc): """ returns a list of new security groups that will be added """ assert vpc is not None # make sure we're up to date self.reload_remote_groups() vpc_groups = self.vpc_groups(vpc) self._apply_groups(vpc) # reloads groups from AWS, the authority self.reload_remote_groups() vpc_groups = self.vpc_groups(vpc) groups = {k.name:k for k in vpc_groups} for x,y in self.config.items(): # process 1 security group at a time group = groups[x] if y.get('rules'): # apply all rule changes rules = [Rule.parse(rule) for rule in y.get('rules')] rules = list(itertools.chain(*rules)) rules = self.filter_existing_rules(rules, group) # need to use chain because multiple rules can be created for a single stanza for rule in rules: group_name = groups.get(rule.group_name, None) if group_name and rule.address: raise Exception("Can't auth an address and a group") logger.debug("Authorizing %s %s %s to address:%s name:%s", rule.protocol, rule.from_port, rule.to_port, rule.address, rule.group_name) group_to_authorize = groups.get(rule.group_name, None) try: group.authorize(rule.protocol, rule.from_port, rule.to_port, rule.address, group_to_authorize, None) except Exception as e: print "could not authorize group %s" % group_to_authorize raise # apply rules return self
python
def apply(self, vpc): """ returns a list of new security groups that will be added """ assert vpc is not None # make sure we're up to date self.reload_remote_groups() vpc_groups = self.vpc_groups(vpc) self._apply_groups(vpc) # reloads groups from AWS, the authority self.reload_remote_groups() vpc_groups = self.vpc_groups(vpc) groups = {k.name:k for k in vpc_groups} for x,y in self.config.items(): # process 1 security group at a time group = groups[x] if y.get('rules'): # apply all rule changes rules = [Rule.parse(rule) for rule in y.get('rules')] rules = list(itertools.chain(*rules)) rules = self.filter_existing_rules(rules, group) # need to use chain because multiple rules can be created for a single stanza for rule in rules: group_name = groups.get(rule.group_name, None) if group_name and rule.address: raise Exception("Can't auth an address and a group") logger.debug("Authorizing %s %s %s to address:%s name:%s", rule.protocol, rule.from_port, rule.to_port, rule.address, rule.group_name) group_to_authorize = groups.get(rule.group_name, None) try: group.authorize(rule.protocol, rule.from_port, rule.to_port, rule.address, group_to_authorize, None) except Exception as e: print "could not authorize group %s" % group_to_authorize raise # apply rules return self
[ "def", "apply", "(", "self", ",", "vpc", ")", ":", "assert", "vpc", "is", "not", "None", "# make sure we're up to date", "self", ".", "reload_remote_groups", "(", ")", "vpc_groups", "=", "self", ".", "vpc_groups", "(", "vpc", ")", "self", ".", "_apply_groups", "(", "vpc", ")", "# reloads groups from AWS, the authority", "self", ".", "reload_remote_groups", "(", ")", "vpc_groups", "=", "self", ".", "vpc_groups", "(", "vpc", ")", "groups", "=", "{", "k", ".", "name", ":", "k", "for", "k", "in", "vpc_groups", "}", "for", "x", ",", "y", "in", "self", ".", "config", ".", "items", "(", ")", ":", "# process 1 security group at a time", "group", "=", "groups", "[", "x", "]", "if", "y", ".", "get", "(", "'rules'", ")", ":", "# apply all rule changes", "rules", "=", "[", "Rule", ".", "parse", "(", "rule", ")", "for", "rule", "in", "y", ".", "get", "(", "'rules'", ")", "]", "rules", "=", "list", "(", "itertools", ".", "chain", "(", "*", "rules", ")", ")", "rules", "=", "self", ".", "filter_existing_rules", "(", "rules", ",", "group", ")", "# need to use chain because multiple rules can be created for a single stanza", "for", "rule", "in", "rules", ":", "group_name", "=", "groups", ".", "get", "(", "rule", ".", "group_name", ",", "None", ")", "if", "group_name", "and", "rule", ".", "address", ":", "raise", "Exception", "(", "\"Can't auth an address and a group\"", ")", "logger", ".", "debug", "(", "\"Authorizing %s %s %s to address:%s name:%s\"", ",", "rule", ".", "protocol", ",", "rule", ".", "from_port", ",", "rule", ".", "to_port", ",", "rule", ".", "address", ",", "rule", ".", "group_name", ")", "group_to_authorize", "=", "groups", ".", "get", "(", "rule", ".", "group_name", ",", "None", ")", "try", ":", "group", ".", "authorize", "(", "rule", ".", "protocol", ",", "rule", ".", "from_port", ",", "rule", ".", "to_port", ",", "rule", ".", "address", ",", "group_to_authorize", ",", "None", ")", "except", "Exception", "as", "e", ":", "print", "\"could not authorize group %s\"", "%", "group_to_authorize", "raise", "# apply rules", "return", "self" ]
returns a list of new security groups that will be added
[ "returns", "a", "list", "of", "new", "security", "groups", "that", "will", "be", "added" ]
d7c2c316fc20a04b8cae3357996c0ce4f51d44ea
https://github.com/awsroadhouse/roadhouse/blob/d7c2c316fc20a04b8cae3357996c0ce4f51d44ea/roadhouse/group.py#L53-L103
249,040
fedora-infra/fmn.rules
fmn/rules/anitya.py
anitya_unmapped_new_update
def anitya_unmapped_new_update(config, message): """ New releases of upstream projects that have no mapping to Fedora Adding this rule will let through events when new upstream releases are detected, but only on upstream projects that have no mapping to Fedora Packages. This could be useful to you if you want to monitor release-monitoring.org itself and watch for projects that might need help adjusting their metadata. """ if not anitya_new_update(config, message): return False for package in message['msg']['message']['packages']: if package['distro'].lower() == 'fedora': return False # If none of the packages were listed as Fedora, then this is unmapped. return True
python
def anitya_unmapped_new_update(config, message): """ New releases of upstream projects that have no mapping to Fedora Adding this rule will let through events when new upstream releases are detected, but only on upstream projects that have no mapping to Fedora Packages. This could be useful to you if you want to monitor release-monitoring.org itself and watch for projects that might need help adjusting their metadata. """ if not anitya_new_update(config, message): return False for package in message['msg']['message']['packages']: if package['distro'].lower() == 'fedora': return False # If none of the packages were listed as Fedora, then this is unmapped. return True
[ "def", "anitya_unmapped_new_update", "(", "config", ",", "message", ")", ":", "if", "not", "anitya_new_update", "(", "config", ",", "message", ")", ":", "return", "False", "for", "package", "in", "message", "[", "'msg'", "]", "[", "'message'", "]", "[", "'packages'", "]", ":", "if", "package", "[", "'distro'", "]", ".", "lower", "(", ")", "==", "'fedora'", ":", "return", "False", "# If none of the packages were listed as Fedora, then this is unmapped.", "return", "True" ]
New releases of upstream projects that have no mapping to Fedora Adding this rule will let through events when new upstream releases are detected, but only on upstream projects that have no mapping to Fedora Packages. This could be useful to you if you want to monitor release-monitoring.org itself and watch for projects that might need help adjusting their metadata.
[ "New", "releases", "of", "upstream", "projects", "that", "have", "no", "mapping", "to", "Fedora" ]
f9ec790619fcc8b41803077c4dec094e5127fc24
https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/anitya.py#L8-L25
249,041
fedora-infra/fmn.rules
fmn/rules/anitya.py
anitya_specific_distro
def anitya_specific_distro(config, message, distro=None, *args, **kw): """ Distro-specific release-monitoring.org events This rule will match all anitya events *only for a particular distro*. """ if not distro: return False if not anitya_catchall(config, message): return False d = message['msg'].get('distro', {}) if d: # Have to be careful for None here if d.get('name', '').lower() == distro.lower(): return True d = None p = message['msg'].get('project', {}) if p: d = p.get('distro', {}) if d: # Have to be careful for None here if d.get('name', '').lower() == distro.lower(): return True for pkg in message['msg'].get('message', {}).get('packages', []): if pkg['distro'].lower() == distro.lower(): return True return False
python
def anitya_specific_distro(config, message, distro=None, *args, **kw): """ Distro-specific release-monitoring.org events This rule will match all anitya events *only for a particular distro*. """ if not distro: return False if not anitya_catchall(config, message): return False d = message['msg'].get('distro', {}) if d: # Have to be careful for None here if d.get('name', '').lower() == distro.lower(): return True d = None p = message['msg'].get('project', {}) if p: d = p.get('distro', {}) if d: # Have to be careful for None here if d.get('name', '').lower() == distro.lower(): return True for pkg in message['msg'].get('message', {}).get('packages', []): if pkg['distro'].lower() == distro.lower(): return True return False
[ "def", "anitya_specific_distro", "(", "config", ",", "message", ",", "distro", "=", "None", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "if", "not", "distro", ":", "return", "False", "if", "not", "anitya_catchall", "(", "config", ",", "message", ")", ":", "return", "False", "d", "=", "message", "[", "'msg'", "]", ".", "get", "(", "'distro'", ",", "{", "}", ")", "if", "d", ":", "# Have to be careful for None here", "if", "d", ".", "get", "(", "'name'", ",", "''", ")", ".", "lower", "(", ")", "==", "distro", ".", "lower", "(", ")", ":", "return", "True", "d", "=", "None", "p", "=", "message", "[", "'msg'", "]", ".", "get", "(", "'project'", ",", "{", "}", ")", "if", "p", ":", "d", "=", "p", ".", "get", "(", "'distro'", ",", "{", "}", ")", "if", "d", ":", "# Have to be careful for None here", "if", "d", ".", "get", "(", "'name'", ",", "''", ")", ".", "lower", "(", ")", "==", "distro", ".", "lower", "(", ")", ":", "return", "True", "for", "pkg", "in", "message", "[", "'msg'", "]", ".", "get", "(", "'message'", ",", "{", "}", ")", ".", "get", "(", "'packages'", ",", "[", "]", ")", ":", "if", "pkg", "[", "'distro'", "]", ".", "lower", "(", ")", "==", "distro", ".", "lower", "(", ")", ":", "return", "True", "return", "False" ]
Distro-specific release-monitoring.org events This rule will match all anitya events *only for a particular distro*.
[ "Distro", "-", "specific", "release", "-", "monitoring", ".", "org", "events" ]
f9ec790619fcc8b41803077c4dec094e5127fc24
https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/anitya.py#L41-L70
249,042
fedora-infra/fmn.rules
fmn/rules/anitya.py
anitya_by_upstream_project
def anitya_by_upstream_project(config, message, projects=None, *args, **kw): """ Anything regarding a particular "upstream project" Adding this rule will let through *any* anitya notification that pertains to a particular "upstream project". Note that the "project" name is often different from the distro "package" name. For instance, the package python-requests in Fedora will have the upstream project name "requests". You can specify a comma-separated list of upstream project names here. """ # We only deal in anitya messages, first off. if not anitya_catchall(config, message): return False if not projects or not isinstance(projects, six.string_types): return False # Get the project for the message. project = message.get('msg', {}).get('project', {}).get('name', None) # Split the string into a list of targets targets = [p.strip() for p in projects.split(',')] # Filter out empty strings if someone is putting ',,,' garbage in targets = [target for target in targets if target] return project in targets
python
def anitya_by_upstream_project(config, message, projects=None, *args, **kw): """ Anything regarding a particular "upstream project" Adding this rule will let through *any* anitya notification that pertains to a particular "upstream project". Note that the "project" name is often different from the distro "package" name. For instance, the package python-requests in Fedora will have the upstream project name "requests". You can specify a comma-separated list of upstream project names here. """ # We only deal in anitya messages, first off. if not anitya_catchall(config, message): return False if not projects or not isinstance(projects, six.string_types): return False # Get the project for the message. project = message.get('msg', {}).get('project', {}).get('name', None) # Split the string into a list of targets targets = [p.strip() for p in projects.split(',')] # Filter out empty strings if someone is putting ',,,' garbage in targets = [target for target in targets if target] return project in targets
[ "def", "anitya_by_upstream_project", "(", "config", ",", "message", ",", "projects", "=", "None", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "# We only deal in anitya messages, first off.", "if", "not", "anitya_catchall", "(", "config", ",", "message", ")", ":", "return", "False", "if", "not", "projects", "or", "not", "isinstance", "(", "projects", ",", "six", ".", "string_types", ")", ":", "return", "False", "# Get the project for the message.", "project", "=", "message", ".", "get", "(", "'msg'", ",", "{", "}", ")", ".", "get", "(", "'project'", ",", "{", "}", ")", ".", "get", "(", "'name'", ",", "None", ")", "# Split the string into a list of targets", "targets", "=", "[", "p", ".", "strip", "(", ")", "for", "p", "in", "projects", ".", "split", "(", "','", ")", "]", "# Filter out empty strings if someone is putting ',,,' garbage in", "targets", "=", "[", "target", "for", "target", "in", "targets", "if", "target", "]", "return", "project", "in", "targets" ]
Anything regarding a particular "upstream project" Adding this rule will let through *any* anitya notification that pertains to a particular "upstream project". Note that the "project" name is often different from the distro "package" name. For instance, the package python-requests in Fedora will have the upstream project name "requests". You can specify a comma-separated list of upstream project names here.
[ "Anything", "regarding", "a", "particular", "upstream", "project" ]
f9ec790619fcc8b41803077c4dec094e5127fc24
https://github.com/fedora-infra/fmn.rules/blob/f9ec790619fcc8b41803077c4dec094e5127fc24/fmn/rules/anitya.py#L187-L212
249,043
radjkarl/appBase
appbase/Launcher.py
_FileSystemModel.data
def data(self, index, role): """use zipped icon.png as icon""" if index.column() == 0 and role == QtCore.Qt.DecorationRole: if self.isPyz(index): with ZipFile(str(self.filePath(index)), 'r') as myzip: # print myzip.namelist() try: myzip.extract('icon', self._tmp_dir_work) p = os.path.join(self._tmp_dir_work, 'icon') return QtGui.QIcon(p) except KeyError: pass return super(_FileSystemModel, self).data(index, role)
python
def data(self, index, role): """use zipped icon.png as icon""" if index.column() == 0 and role == QtCore.Qt.DecorationRole: if self.isPyz(index): with ZipFile(str(self.filePath(index)), 'r') as myzip: # print myzip.namelist() try: myzip.extract('icon', self._tmp_dir_work) p = os.path.join(self._tmp_dir_work, 'icon') return QtGui.QIcon(p) except KeyError: pass return super(_FileSystemModel, self).data(index, role)
[ "def", "data", "(", "self", ",", "index", ",", "role", ")", ":", "if", "index", ".", "column", "(", ")", "==", "0", "and", "role", "==", "QtCore", ".", "Qt", ".", "DecorationRole", ":", "if", "self", ".", "isPyz", "(", "index", ")", ":", "with", "ZipFile", "(", "str", "(", "self", ".", "filePath", "(", "index", ")", ")", ",", "'r'", ")", "as", "myzip", ":", "# print myzip.namelist()\r", "try", ":", "myzip", ".", "extract", "(", "'icon'", ",", "self", ".", "_tmp_dir_work", ")", "p", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_tmp_dir_work", ",", "'icon'", ")", "return", "QtGui", ".", "QIcon", "(", "p", ")", "except", "KeyError", ":", "pass", "return", "super", "(", "_FileSystemModel", ",", "self", ")", ".", "data", "(", "index", ",", "role", ")" ]
use zipped icon.png as icon
[ "use", "zipped", "icon", ".", "png", "as", "icon" ]
72b514e6dee7c083f01a2d0b2cc93d46df55bdcb
https://github.com/radjkarl/appBase/blob/72b514e6dee7c083f01a2d0b2cc93d46df55bdcb/appbase/Launcher.py#L524-L536
249,044
ploneintranet/ploneintranet.workspace
src/ploneintranet/workspace/browser/tiles/sidebar.py
BaseTile.status_messages
def status_messages(self): """ Returns status messages if any """ messages = IStatusMessage(self.request) m = messages.show() for item in m: item.id = idnormalizer.normalize(item.message) return m
python
def status_messages(self): """ Returns status messages if any """ messages = IStatusMessage(self.request) m = messages.show() for item in m: item.id = idnormalizer.normalize(item.message) return m
[ "def", "status_messages", "(", "self", ")", ":", "messages", "=", "IStatusMessage", "(", "self", ".", "request", ")", "m", "=", "messages", ".", "show", "(", ")", "for", "item", "in", "m", ":", "item", ".", "id", "=", "idnormalizer", ".", "normalize", "(", "item", ".", "message", ")", "return", "m" ]
Returns status messages if any
[ "Returns", "status", "messages", "if", "any" ]
a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba
https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/browser/tiles/sidebar.py#L35-L42
249,045
ploneintranet/ploneintranet.workspace
src/ploneintranet/workspace/browser/tiles/sidebar.py
SidebarSettingsMembers.users
def users(self): """Get current users and add in any search results. :returns: a list of dicts with keys - id - title :rtype: list """ existing_users = self.existing_users() existing_user_ids = [x['id'] for x in existing_users] # Only add search results that are not already members sharing = getMultiAdapter((self.my_workspace(), self.request), name='sharing') search_results = sharing.user_search_results() users = existing_users + [x for x in search_results if x['id'] not in existing_user_ids] users.sort(key=lambda x: safe_unicode(x["title"])) return users
python
def users(self): """Get current users and add in any search results. :returns: a list of dicts with keys - id - title :rtype: list """ existing_users = self.existing_users() existing_user_ids = [x['id'] for x in existing_users] # Only add search results that are not already members sharing = getMultiAdapter((self.my_workspace(), self.request), name='sharing') search_results = sharing.user_search_results() users = existing_users + [x for x in search_results if x['id'] not in existing_user_ids] users.sort(key=lambda x: safe_unicode(x["title"])) return users
[ "def", "users", "(", "self", ")", ":", "existing_users", "=", "self", ".", "existing_users", "(", ")", "existing_user_ids", "=", "[", "x", "[", "'id'", "]", "for", "x", "in", "existing_users", "]", "# Only add search results that are not already members", "sharing", "=", "getMultiAdapter", "(", "(", "self", ".", "my_workspace", "(", ")", ",", "self", ".", "request", ")", ",", "name", "=", "'sharing'", ")", "search_results", "=", "sharing", ".", "user_search_results", "(", ")", "users", "=", "existing_users", "+", "[", "x", "for", "x", "in", "search_results", "if", "x", "[", "'id'", "]", "not", "in", "existing_user_ids", "]", "users", ".", "sort", "(", "key", "=", "lambda", "x", ":", "safe_unicode", "(", "x", "[", "\"title\"", "]", ")", ")", "return", "users" ]
Get current users and add in any search results. :returns: a list of dicts with keys - id - title :rtype: list
[ "Get", "current", "users", "and", "add", "in", "any", "search", "results", "." ]
a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba
https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/browser/tiles/sidebar.py#L54-L73
249,046
ploneintranet/ploneintranet.workspace
src/ploneintranet/workspace/browser/tiles/sidebar.py
Sidebar.children
def children(self): """ returns a list of dicts of items in the current context """ items = [] catalog = self.context.portal_catalog current_path = '/'.join(self.context.getPhysicalPath()) sidebar_search = self.request.get('sidebar-search', None) if sidebar_search: st = '%s*' % sidebar_search # XXX plone only allows * as postfix. # With solr we might want to do real substr results = catalog.searchResults(SearchableText=st, path=current_path) else: results = self.context.getFolderContents() for item in results: # Do some checks to set the right classes for icons and candy desc = ( item['Description'] and 'has-description' or 'has-no-description' ) content_type = TYPE_MAP.get(item['portal_type'], 'none') mime_type = '' # XXX: will be needed later for grouping by mimetyp # typ can be user, folder, date and mime typish typ = 'folder' # XXX: This needs to get dynamic later url = item.getURL() ptool = api.portal.get_tool('portal_properties') view_action_types = \ ptool.site_properties.typesUseViewActionInListings if content_type in FOLDERISH_TYPES: dpi = ( "source: #workspace-documents; " "target: #workspace-documents" ) url = url + '/@@sidebar.default#workspace-documents' content_type = 'group' else: if item['portal_type'] in view_action_types: url = "%s/view" % url dpi = ( "target: #document-body; " "source: #document-body; " "history: record" ) content_type = 'document' cls = 'item %s type-%s %s' % (content_type, typ, desc) items.append({ 'id': item['getId'], 'cls': cls, 'title': item['Title'], 'description': item['Description'], 'url': url, 'type': TYPE_MAP.get(item['portal_type'], 'none'), 'mime-type': mime_type, 'dpi': dpi}) return items
python
def children(self): """ returns a list of dicts of items in the current context """ items = [] catalog = self.context.portal_catalog current_path = '/'.join(self.context.getPhysicalPath()) sidebar_search = self.request.get('sidebar-search', None) if sidebar_search: st = '%s*' % sidebar_search # XXX plone only allows * as postfix. # With solr we might want to do real substr results = catalog.searchResults(SearchableText=st, path=current_path) else: results = self.context.getFolderContents() for item in results: # Do some checks to set the right classes for icons and candy desc = ( item['Description'] and 'has-description' or 'has-no-description' ) content_type = TYPE_MAP.get(item['portal_type'], 'none') mime_type = '' # XXX: will be needed later for grouping by mimetyp # typ can be user, folder, date and mime typish typ = 'folder' # XXX: This needs to get dynamic later url = item.getURL() ptool = api.portal.get_tool('portal_properties') view_action_types = \ ptool.site_properties.typesUseViewActionInListings if content_type in FOLDERISH_TYPES: dpi = ( "source: #workspace-documents; " "target: #workspace-documents" ) url = url + '/@@sidebar.default#workspace-documents' content_type = 'group' else: if item['portal_type'] in view_action_types: url = "%s/view" % url dpi = ( "target: #document-body; " "source: #document-body; " "history: record" ) content_type = 'document' cls = 'item %s type-%s %s' % (content_type, typ, desc) items.append({ 'id': item['getId'], 'cls': cls, 'title': item['Title'], 'description': item['Description'], 'url': url, 'type': TYPE_MAP.get(item['portal_type'], 'none'), 'mime-type': mime_type, 'dpi': dpi}) return items
[ "def", "children", "(", "self", ")", ":", "items", "=", "[", "]", "catalog", "=", "self", ".", "context", ".", "portal_catalog", "current_path", "=", "'/'", ".", "join", "(", "self", ".", "context", ".", "getPhysicalPath", "(", ")", ")", "sidebar_search", "=", "self", ".", "request", ".", "get", "(", "'sidebar-search'", ",", "None", ")", "if", "sidebar_search", ":", "st", "=", "'%s*'", "%", "sidebar_search", "# XXX plone only allows * as postfix.", "# With solr we might want to do real substr", "results", "=", "catalog", ".", "searchResults", "(", "SearchableText", "=", "st", ",", "path", "=", "current_path", ")", "else", ":", "results", "=", "self", ".", "context", ".", "getFolderContents", "(", ")", "for", "item", "in", "results", ":", "# Do some checks to set the right classes for icons and candy", "desc", "=", "(", "item", "[", "'Description'", "]", "and", "'has-description'", "or", "'has-no-description'", ")", "content_type", "=", "TYPE_MAP", ".", "get", "(", "item", "[", "'portal_type'", "]", ",", "'none'", ")", "mime_type", "=", "''", "# XXX: will be needed later for grouping by mimetyp", "# typ can be user, folder, date and mime typish", "typ", "=", "'folder'", "# XXX: This needs to get dynamic later", "url", "=", "item", ".", "getURL", "(", ")", "ptool", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_properties'", ")", "view_action_types", "=", "ptool", ".", "site_properties", ".", "typesUseViewActionInListings", "if", "content_type", "in", "FOLDERISH_TYPES", ":", "dpi", "=", "(", "\"source: #workspace-documents; \"", "\"target: #workspace-documents\"", ")", "url", "=", "url", "+", "'/@@sidebar.default#workspace-documents'", "content_type", "=", "'group'", "else", ":", "if", "item", "[", "'portal_type'", "]", "in", "view_action_types", ":", "url", "=", "\"%s/view\"", "%", "url", "dpi", "=", "(", "\"target: #document-body; \"", "\"source: #document-body; \"", "\"history: record\"", ")", "content_type", "=", "'document'", "cls", "=", "'item %s type-%s %s'", "%", "(", "content_type", ",", "typ", ",", "desc", ")", "items", ".", "append", "(", "{", "'id'", ":", "item", "[", "'getId'", "]", ",", "'cls'", ":", "cls", ",", "'title'", ":", "item", "[", "'Title'", "]", ",", "'description'", ":", "item", "[", "'Description'", "]", ",", "'url'", ":", "url", ",", "'type'", ":", "TYPE_MAP", ".", "get", "(", "item", "[", "'portal_type'", "]", ",", "'none'", ")", ",", "'mime-type'", ":", "mime_type", ",", "'dpi'", ":", "dpi", "}", ")", "return", "items" ]
returns a list of dicts of items in the current context
[ "returns", "a", "list", "of", "dicts", "of", "items", "in", "the", "current", "context" ]
a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba
https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/browser/tiles/sidebar.py#L223-L286
249,047
todddeluca/fabvenv
fabvenv.py
Venv.install
def install(self): ''' Use pip to install the requirements file. ''' remote_path = os.path.join(self.venv, 'requirements.txt') put(self.requirements, remote_path) run('{pip} install -r {requirements}'.format( pip=self.pip(), requirements=remote_path))
python
def install(self): ''' Use pip to install the requirements file. ''' remote_path = os.path.join(self.venv, 'requirements.txt') put(self.requirements, remote_path) run('{pip} install -r {requirements}'.format( pip=self.pip(), requirements=remote_path))
[ "def", "install", "(", "self", ")", ":", "remote_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "venv", ",", "'requirements.txt'", ")", "put", "(", "self", ".", "requirements", ",", "remote_path", ")", "run", "(", "'{pip} install -r {requirements}'", ".", "format", "(", "pip", "=", "self", ".", "pip", "(", ")", ",", "requirements", "=", "remote_path", ")", ")" ]
Use pip to install the requirements file.
[ "Use", "pip", "to", "install", "the", "requirements", "file", "." ]
ba0121412a7f47b3732d45b6cee42ac2b8737159
https://github.com/todddeluca/fabvenv/blob/ba0121412a7f47b3732d45b6cee42ac2b8737159/fabvenv.py#L70-L77
249,048
todddeluca/fabvenv
fabvenv.py
Venv.freeze
def freeze(self): ''' Use pip to freeze the requirements and save them to the local requirements.txt file. ''' remote_path = os.path.join(self.venv, 'requirements.txt') run('{} freeze > {}'.format(self.pip(), remote_path)) get(remote_path, self.requirements)
python
def freeze(self): ''' Use pip to freeze the requirements and save them to the local requirements.txt file. ''' remote_path = os.path.join(self.venv, 'requirements.txt') run('{} freeze > {}'.format(self.pip(), remote_path)) get(remote_path, self.requirements)
[ "def", "freeze", "(", "self", ")", ":", "remote_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "venv", ",", "'requirements.txt'", ")", "run", "(", "'{} freeze > {}'", ".", "format", "(", "self", ".", "pip", "(", ")", ",", "remote_path", ")", ")", "get", "(", "remote_path", ",", "self", ".", "requirements", ")" ]
Use pip to freeze the requirements and save them to the local requirements.txt file.
[ "Use", "pip", "to", "freeze", "the", "requirements", "and", "save", "them", "to", "the", "local", "requirements", ".", "txt", "file", "." ]
ba0121412a7f47b3732d45b6cee42ac2b8737159
https://github.com/todddeluca/fabvenv/blob/ba0121412a7f47b3732d45b6cee42ac2b8737159/fabvenv.py#L90-L97
249,049
todddeluca/fabvenv
fabvenv.py
Venv.remove
def remove(self): ''' Remove the virtual environment completely ''' print 'remove' if self.exists(): print 'cleaning', self.venv run('rm -rf {}'.format(self.venv))
python
def remove(self): ''' Remove the virtual environment completely ''' print 'remove' if self.exists(): print 'cleaning', self.venv run('rm -rf {}'.format(self.venv))
[ "def", "remove", "(", "self", ")", ":", "print", "'remove'", "if", "self", ".", "exists", "(", ")", ":", "print", "'cleaning'", ",", "self", ".", "venv", "run", "(", "'rm -rf {}'", ".", "format", "(", "self", ".", "venv", ")", ")" ]
Remove the virtual environment completely
[ "Remove", "the", "virtual", "environment", "completely" ]
ba0121412a7f47b3732d45b6cee42ac2b8737159
https://github.com/todddeluca/fabvenv/blob/ba0121412a7f47b3732d45b6cee42ac2b8737159/fabvenv.py#L99-L106
249,050
todddeluca/fabvenv
fabvenv.py
Venv.venv_pth
def venv_pth(self, dirs): ''' Add the directories in `dirs` to the `sys.path`. A venv.pth file will be written in the site-packages dir of this virtualenv to add dirs to sys.path. dirs: a list of directories. ''' # Create venv.pth to add dirs to sys.path when using the virtualenv. text = StringIO.StringIO() text.write("# Autogenerated file. Do not modify.\n") for path in dirs: text.write('{}\n'.format(path)) put(text, os.path.join(self.site_packages_dir(), 'venv.pth'), mode=0664)
python
def venv_pth(self, dirs): ''' Add the directories in `dirs` to the `sys.path`. A venv.pth file will be written in the site-packages dir of this virtualenv to add dirs to sys.path. dirs: a list of directories. ''' # Create venv.pth to add dirs to sys.path when using the virtualenv. text = StringIO.StringIO() text.write("# Autogenerated file. Do not modify.\n") for path in dirs: text.write('{}\n'.format(path)) put(text, os.path.join(self.site_packages_dir(), 'venv.pth'), mode=0664)
[ "def", "venv_pth", "(", "self", ",", "dirs", ")", ":", "# Create venv.pth to add dirs to sys.path when using the virtualenv.", "text", "=", "StringIO", ".", "StringIO", "(", ")", "text", ".", "write", "(", "\"# Autogenerated file. Do not modify.\\n\"", ")", "for", "path", "in", "dirs", ":", "text", ".", "write", "(", "'{}\\n'", ".", "format", "(", "path", ")", ")", "put", "(", "text", ",", "os", ".", "path", ".", "join", "(", "self", ".", "site_packages_dir", "(", ")", ",", "'venv.pth'", ")", ",", "mode", "=", "0664", ")" ]
Add the directories in `dirs` to the `sys.path`. A venv.pth file will be written in the site-packages dir of this virtualenv to add dirs to sys.path. dirs: a list of directories.
[ "Add", "the", "directories", "in", "dirs", "to", "the", "sys", ".", "path", ".", "A", "venv", ".", "pth", "file", "will", "be", "written", "in", "the", "site", "-", "packages", "dir", "of", "this", "virtualenv", "to", "add", "dirs", "to", "sys", ".", "path", "." ]
ba0121412a7f47b3732d45b6cee42ac2b8737159
https://github.com/todddeluca/fabvenv/blob/ba0121412a7f47b3732d45b6cee42ac2b8737159/fabvenv.py#L111-L124
249,051
ploneintranet/ploneintranet.workspace
src/ploneintranet/workspace/adapters.py
PloneIntranetWorkspace.add_to_team
def add_to_team(self, **kw): """ We override this method to add our additional participation policy groups, as detailed in available_groups above """ group = self.context.participant_policy.title() data = kw.copy() if "groups" in data: data["groups"].add(group) else: data["groups"] = set([group]) super(PloneIntranetWorkspace, self).add_to_team(**data)
python
def add_to_team(self, **kw): """ We override this method to add our additional participation policy groups, as detailed in available_groups above """ group = self.context.participant_policy.title() data = kw.copy() if "groups" in data: data["groups"].add(group) else: data["groups"] = set([group]) super(PloneIntranetWorkspace, self).add_to_team(**data)
[ "def", "add_to_team", "(", "self", ",", "*", "*", "kw", ")", ":", "group", "=", "self", ".", "context", ".", "participant_policy", ".", "title", "(", ")", "data", "=", "kw", ".", "copy", "(", ")", "if", "\"groups\"", "in", "data", ":", "data", "[", "\"groups\"", "]", ".", "add", "(", "group", ")", "else", ":", "data", "[", "\"groups\"", "]", "=", "set", "(", "[", "group", "]", ")", "super", "(", "PloneIntranetWorkspace", ",", "self", ")", ".", "add_to_team", "(", "*", "*", "data", ")" ]
We override this method to add our additional participation policy groups, as detailed in available_groups above
[ "We", "override", "this", "method", "to", "add", "our", "additional", "participation", "policy", "groups", "as", "detailed", "in", "available_groups", "above" ]
a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba
https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/adapters.py#L32-L44
249,052
ploneintranet/ploneintranet.workspace
src/ploneintranet/workspace/adapters.py
PloneIntranetWorkspace.group_for_policy
def group_for_policy(self, policy=None): """ Lookup the collective.workspace usergroup corresponding to the given policy :param policy: The value of the policy to lookup, defaults to the current policy :type policy: str """ if policy is None: policy = self.context.participant_policy return "%s:%s" % (policy.title(), self.context.UID())
python
def group_for_policy(self, policy=None): """ Lookup the collective.workspace usergroup corresponding to the given policy :param policy: The value of the policy to lookup, defaults to the current policy :type policy: str """ if policy is None: policy = self.context.participant_policy return "%s:%s" % (policy.title(), self.context.UID())
[ "def", "group_for_policy", "(", "self", ",", "policy", "=", "None", ")", ":", "if", "policy", "is", "None", ":", "policy", "=", "self", ".", "context", ".", "participant_policy", "return", "\"%s:%s\"", "%", "(", "policy", ".", "title", "(", ")", ",", "self", ".", "context", ".", "UID", "(", ")", ")" ]
Lookup the collective.workspace usergroup corresponding to the given policy :param policy: The value of the policy to lookup, defaults to the current policy :type policy: str
[ "Lookup", "the", "collective", ".", "workspace", "usergroup", "corresponding", "to", "the", "given", "policy" ]
a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba
https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/adapters.py#L46-L57
249,053
ploneintranet/ploneintranet.workspace
src/ploneintranet/workspace/adapters.py
WorkspaceLocalRoleAdapter.getRoles
def getRoles(self, principal_id): """ give an Owner who is also a 'selfpublisher', the reviewer role """ context = self.context current_roles = list(DefaultLocalRoleAdapter.getRoles( self, principal_id, )) # check we are not on the workspace itself if IHasWorkspace.providedBy(context): return current_roles # otherwise we should acquire the workspace and check out roles workspace = getattr(context, 'acquire_workspace', lambda: None)() if workspace is None: return current_roles workspace_roles = api.user.get_roles(obj=workspace) if 'SelfPublisher' in workspace_roles and 'Owner' in current_roles: current_roles.append('Reviewer') return current_roles
python
def getRoles(self, principal_id): """ give an Owner who is also a 'selfpublisher', the reviewer role """ context = self.context current_roles = list(DefaultLocalRoleAdapter.getRoles( self, principal_id, )) # check we are not on the workspace itself if IHasWorkspace.providedBy(context): return current_roles # otherwise we should acquire the workspace and check out roles workspace = getattr(context, 'acquire_workspace', lambda: None)() if workspace is None: return current_roles workspace_roles = api.user.get_roles(obj=workspace) if 'SelfPublisher' in workspace_roles and 'Owner' in current_roles: current_roles.append('Reviewer') return current_roles
[ "def", "getRoles", "(", "self", ",", "principal_id", ")", ":", "context", "=", "self", ".", "context", "current_roles", "=", "list", "(", "DefaultLocalRoleAdapter", ".", "getRoles", "(", "self", ",", "principal_id", ",", ")", ")", "# check we are not on the workspace itself", "if", "IHasWorkspace", ".", "providedBy", "(", "context", ")", ":", "return", "current_roles", "# otherwise we should acquire the workspace and check out roles", "workspace", "=", "getattr", "(", "context", ",", "'acquire_workspace'", ",", "lambda", ":", "None", ")", "(", ")", "if", "workspace", "is", "None", ":", "return", "current_roles", "workspace_roles", "=", "api", ".", "user", ".", "get_roles", "(", "obj", "=", "workspace", ")", "if", "'SelfPublisher'", "in", "workspace_roles", "and", "'Owner'", "in", "current_roles", ":", "current_roles", ".", "append", "(", "'Reviewer'", ")", "return", "current_roles" ]
give an Owner who is also a 'selfpublisher', the reviewer role
[ "give", "an", "Owner", "who", "is", "also", "a", "selfpublisher", "the", "reviewer", "role" ]
a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba
https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/adapters.py#L69-L90
249,054
AnimusPEXUS/wayround_i2p_carafe
wayround_i2p/carafe/carafe.py
Router.add
def add(self, method, path_settings, target): """ Simply creates Route and appends it to self.routes read Route class docs for parameters meaning """ self.routes.append(Route(method, path_settings, target)) return
python
def add(self, method, path_settings, target): """ Simply creates Route and appends it to self.routes read Route class docs for parameters meaning """ self.routes.append(Route(method, path_settings, target)) return
[ "def", "add", "(", "self", ",", "method", ",", "path_settings", ",", "target", ")", ":", "self", ".", "routes", ".", "append", "(", "Route", "(", "method", ",", "path_settings", ",", "target", ")", ")", "return" ]
Simply creates Route and appends it to self.routes read Route class docs for parameters meaning
[ "Simply", "creates", "Route", "and", "appends", "it", "to", "self", ".", "routes" ]
c92a72e1f7b559ac0bd6dc0ce2716ce1e61a9c5e
https://github.com/AnimusPEXUS/wayround_i2p_carafe/blob/c92a72e1f7b559ac0bd6dc0ce2716ce1e61a9c5e/wayround_i2p/carafe/carafe.py#L189-L196
249,055
gnahckire/ciscospark-py
ciscospark/core.py
Auth.clean_query_Dict
def clean_query_Dict(cls, query_Dict): """removes NoneTypes from the dict """ return {k: v for k, v in query_Dict.items() if v}
python
def clean_query_Dict(cls, query_Dict): """removes NoneTypes from the dict """ return {k: v for k, v in query_Dict.items() if v}
[ "def", "clean_query_Dict", "(", "cls", ",", "query_Dict", ")", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "query_Dict", ".", "items", "(", ")", "if", "v", "}" ]
removes NoneTypes from the dict
[ "removes", "NoneTypes", "from", "the", "dict" ]
73b551c658f8f48f3a2fc1f1d891fb47883d5561
https://github.com/gnahckire/ciscospark-py/blob/73b551c658f8f48f3a2fc1f1d891fb47883d5561/ciscospark/core.py#L47-L50
249,056
gnahckire/ciscospark-py
ciscospark/core.py
Rooms.list
def list(self, teamId=None, rType=None, maxResults=C.MAX_RESULT_DEFAULT, limit=C.ALL): """ rType can be DIRECT or GROUP """ queryParams = {'teamId': teamId, 'type': rType, 'max': maxResults} queryParams = self.clean_query_Dict(queryParams) ret = self.send_request(C.GET, self.end, data=queryParams, limit=limit) return [Room(self.token, roomData) for roomData in ret['items']]
python
def list(self, teamId=None, rType=None, maxResults=C.MAX_RESULT_DEFAULT, limit=C.ALL): """ rType can be DIRECT or GROUP """ queryParams = {'teamId': teamId, 'type': rType, 'max': maxResults} queryParams = self.clean_query_Dict(queryParams) ret = self.send_request(C.GET, self.end, data=queryParams, limit=limit) return [Room(self.token, roomData) for roomData in ret['items']]
[ "def", "list", "(", "self", ",", "teamId", "=", "None", ",", "rType", "=", "None", ",", "maxResults", "=", "C", ".", "MAX_RESULT_DEFAULT", ",", "limit", "=", "C", ".", "ALL", ")", ":", "queryParams", "=", "{", "'teamId'", ":", "teamId", ",", "'type'", ":", "rType", ",", "'max'", ":", "maxResults", "}", "queryParams", "=", "self", ".", "clean_query_Dict", "(", "queryParams", ")", "ret", "=", "self", ".", "send_request", "(", "C", ".", "GET", ",", "self", ".", "end", ",", "data", "=", "queryParams", ",", "limit", "=", "limit", ")", "return", "[", "Room", "(", "self", ".", "token", ",", "roomData", ")", "for", "roomData", "in", "ret", "[", "'items'", "]", "]" ]
rType can be DIRECT or GROUP
[ "rType", "can", "be", "DIRECT", "or", "GROUP" ]
73b551c658f8f48f3a2fc1f1d891fb47883d5561
https://github.com/gnahckire/ciscospark-py/blob/73b551c658f8f48f3a2fc1f1d891fb47883d5561/ciscospark/core.py#L97-L106
249,057
townsenddw/jhubctl
jhubctl/utils.py
get_flag_args
def get_flag_args(**options): """Build a list of flags.""" flags = [] for key, value in options.items(): # Build short flags. if len(key) == 1: flag = f'-{key}' # Built long flags. else: flag = f'--{key}' flags = flags + [flag, value] return flags
python
def get_flag_args(**options): """Build a list of flags.""" flags = [] for key, value in options.items(): # Build short flags. if len(key) == 1: flag = f'-{key}' # Built long flags. else: flag = f'--{key}' flags = flags + [flag, value] return flags
[ "def", "get_flag_args", "(", "*", "*", "options", ")", ":", "flags", "=", "[", "]", "for", "key", ",", "value", "in", "options", ".", "items", "(", ")", ":", "# Build short flags.", "if", "len", "(", "key", ")", "==", "1", ":", "flag", "=", "f'-{key}'", "# Built long flags.", "else", ":", "flag", "=", "f'--{key}'", "flags", "=", "flags", "+", "[", "flag", ",", "value", "]", "return", "flags" ]
Build a list of flags.
[ "Build", "a", "list", "of", "flags", "." ]
c8c20f86a16e9d01dd90e4607d81423417cc773b
https://github.com/townsenddw/jhubctl/blob/c8c20f86a16e9d01dd90e4607d81423417cc773b/jhubctl/utils.py#L12-L23
249,058
townsenddw/jhubctl
jhubctl/utils.py
kubectl
def kubectl(*args, input=None, **flags): """Simple wrapper to kubectl.""" # Build command line call. line = ['kubectl'] + list(args) line = line + get_flag_args(**flags) if input is not None: line = line + ['-f', '-'] # Run subprocess output = subprocess.run( line, input=input, capture_output=True, text=True ) return output
python
def kubectl(*args, input=None, **flags): """Simple wrapper to kubectl.""" # Build command line call. line = ['kubectl'] + list(args) line = line + get_flag_args(**flags) if input is not None: line = line + ['-f', '-'] # Run subprocess output = subprocess.run( line, input=input, capture_output=True, text=True ) return output
[ "def", "kubectl", "(", "*", "args", ",", "input", "=", "None", ",", "*", "*", "flags", ")", ":", "# Build command line call.", "line", "=", "[", "'kubectl'", "]", "+", "list", "(", "args", ")", "line", "=", "line", "+", "get_flag_args", "(", "*", "*", "flags", ")", "if", "input", "is", "not", "None", ":", "line", "=", "line", "+", "[", "'-f'", ",", "'-'", "]", "# Run subprocess", "output", "=", "subprocess", ".", "run", "(", "line", ",", "input", "=", "input", ",", "capture_output", "=", "True", ",", "text", "=", "True", ")", "return", "output" ]
Simple wrapper to kubectl.
[ "Simple", "wrapper", "to", "kubectl", "." ]
c8c20f86a16e9d01dd90e4607d81423417cc773b
https://github.com/townsenddw/jhubctl/blob/c8c20f86a16e9d01dd90e4607d81423417cc773b/jhubctl/utils.py#L26-L40
249,059
openpermissions/perch
perch/organisation.py
cleanString
def cleanString(someText): """ remove special characters and spaces from string and convert to lowercase """ ret = '' if someText is not None: ret = filter(unicode.isalnum, someText.lower()) return ret
python
def cleanString(someText): """ remove special characters and spaces from string and convert to lowercase """ ret = '' if someText is not None: ret = filter(unicode.isalnum, someText.lower()) return ret
[ "def", "cleanString", "(", "someText", ")", ":", "ret", "=", "''", "if", "someText", "is", "not", "None", ":", "ret", "=", "filter", "(", "unicode", ".", "isalnum", ",", "someText", ".", "lower", "(", ")", ")", "return", "ret" ]
remove special characters and spaces from string and convert to lowercase
[ "remove", "special", "characters", "and", "spaces", "from", "string", "and", "convert", "to", "lowercase" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L51-L59
249,060
openpermissions/perch
perch/organisation.py
group_permissions
def group_permissions(permissions): """ Groups a permissions list Returns a dictionary, with permission types as keys and sets of entities with access to the resource as values, e.g.: { 'organisation_id': { 'org1': set(['rw', 'r', 'w']), 'org2': set(['-']), 'org3': set(['r', 'w']), }, 'all': set(['r']) } 'org1' has 'rw' access to the resource, 'org2' is denied access and 'org3' has 'r' & 'w' access (the same as 'rw'). Note that 'rw' will always result in 'rw', 'r' & 'w' in the set to make checks easier. If present in the resource's permissions, the 'all' permission type is an exception in that it's value is just a set instead of a dictionary. :param permissions: a list of permissions :returns: defaultdict """ groups = defaultdict(lambda: defaultdict(set)) for p in sorted(permissions, key=itemgetter('type')): permission_set = groups[p['type']][p.get('value')] permission_set.add(p['permission']) if p['permission'] == 'rw': permission_set.update({'r', 'w'}) # the 'all' permission type always has None as the value groups['all'] = groups['all'][None] return groups
python
def group_permissions(permissions): """ Groups a permissions list Returns a dictionary, with permission types as keys and sets of entities with access to the resource as values, e.g.: { 'organisation_id': { 'org1': set(['rw', 'r', 'w']), 'org2': set(['-']), 'org3': set(['r', 'w']), }, 'all': set(['r']) } 'org1' has 'rw' access to the resource, 'org2' is denied access and 'org3' has 'r' & 'w' access (the same as 'rw'). Note that 'rw' will always result in 'rw', 'r' & 'w' in the set to make checks easier. If present in the resource's permissions, the 'all' permission type is an exception in that it's value is just a set instead of a dictionary. :param permissions: a list of permissions :returns: defaultdict """ groups = defaultdict(lambda: defaultdict(set)) for p in sorted(permissions, key=itemgetter('type')): permission_set = groups[p['type']][p.get('value')] permission_set.add(p['permission']) if p['permission'] == 'rw': permission_set.update({'r', 'w'}) # the 'all' permission type always has None as the value groups['all'] = groups['all'][None] return groups
[ "def", "group_permissions", "(", "permissions", ")", ":", "groups", "=", "defaultdict", "(", "lambda", ":", "defaultdict", "(", "set", ")", ")", "for", "p", "in", "sorted", "(", "permissions", ",", "key", "=", "itemgetter", "(", "'type'", ")", ")", ":", "permission_set", "=", "groups", "[", "p", "[", "'type'", "]", "]", "[", "p", ".", "get", "(", "'value'", ")", "]", "permission_set", ".", "add", "(", "p", "[", "'permission'", "]", ")", "if", "p", "[", "'permission'", "]", "==", "'rw'", ":", "permission_set", ".", "update", "(", "{", "'r'", ",", "'w'", "}", ")", "# the 'all' permission type always has None as the value", "groups", "[", "'all'", "]", "=", "groups", "[", "'all'", "]", "[", "None", "]", "return", "groups" ]
Groups a permissions list Returns a dictionary, with permission types as keys and sets of entities with access to the resource as values, e.g.: { 'organisation_id': { 'org1': set(['rw', 'r', 'w']), 'org2': set(['-']), 'org3': set(['r', 'w']), }, 'all': set(['r']) } 'org1' has 'rw' access to the resource, 'org2' is denied access and 'org3' has 'r' & 'w' access (the same as 'rw'). Note that 'rw' will always result in 'rw', 'r' & 'w' in the set to make checks easier. If present in the resource's permissions, the 'all' permission type is an exception in that it's value is just a set instead of a dictionary. :param permissions: a list of permissions :returns: defaultdict
[ "Groups", "a", "permissions", "list" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L288-L326
249,061
openpermissions/perch
perch/organisation.py
generate_secret
def generate_secret(length=30): """ Generate an ASCII secret using random.SysRandom Based on oauthlib's common.generate_token function """ rand = random.SystemRandom() ascii_characters = string.ascii_letters + string.digits return ''.join(rand.choice(ascii_characters) for _ in range(length))
python
def generate_secret(length=30): """ Generate an ASCII secret using random.SysRandom Based on oauthlib's common.generate_token function """ rand = random.SystemRandom() ascii_characters = string.ascii_letters + string.digits return ''.join(rand.choice(ascii_characters) for _ in range(length))
[ "def", "generate_secret", "(", "length", "=", "30", ")", ":", "rand", "=", "random", ".", "SystemRandom", "(", ")", "ascii_characters", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", "return", "''", ".", "join", "(", "rand", ".", "choice", "(", "ascii_characters", ")", "for", "_", "in", "range", "(", "length", ")", ")" ]
Generate an ASCII secret using random.SysRandom Based on oauthlib's common.generate_token function
[ "Generate", "an", "ASCII", "secret", "using", "random", ".", "SysRandom" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L754-L763
249,062
openpermissions/perch
perch/organisation.py
Organisation.all
def all(cls, state=None, include_deactivated=False): """ Get all organisations :param state: State of organisation :param include_deactivated: Flag to include deactivated :returns: list of Organisation instances :raises: SocketError, CouchException """ if state and state not in validators.VALID_STATES: raise exceptions.ValidationError('Invalid "state"') elif state: organisations = yield views.organisations.get(key=state, include_docs=True) elif include_deactivated: organisations = yield views.organisations.get(include_docs=True) else: organisations = yield views.active_organisations.get(include_docs=True) raise Return([cls(**org['doc']) for org in organisations['rows']])
python
def all(cls, state=None, include_deactivated=False): """ Get all organisations :param state: State of organisation :param include_deactivated: Flag to include deactivated :returns: list of Organisation instances :raises: SocketError, CouchException """ if state and state not in validators.VALID_STATES: raise exceptions.ValidationError('Invalid "state"') elif state: organisations = yield views.organisations.get(key=state, include_docs=True) elif include_deactivated: organisations = yield views.organisations.get(include_docs=True) else: organisations = yield views.active_organisations.get(include_docs=True) raise Return([cls(**org['doc']) for org in organisations['rows']])
[ "def", "all", "(", "cls", ",", "state", "=", "None", ",", "include_deactivated", "=", "False", ")", ":", "if", "state", "and", "state", "not", "in", "validators", ".", "VALID_STATES", ":", "raise", "exceptions", ".", "ValidationError", "(", "'Invalid \"state\"'", ")", "elif", "state", ":", "organisations", "=", "yield", "views", ".", "organisations", ".", "get", "(", "key", "=", "state", ",", "include_docs", "=", "True", ")", "elif", "include_deactivated", ":", "organisations", "=", "yield", "views", ".", "organisations", ".", "get", "(", "include_docs", "=", "True", ")", "else", ":", "organisations", "=", "yield", "views", ".", "active_organisations", ".", "get", "(", "include_docs", "=", "True", ")", "raise", "Return", "(", "[", "cls", "(", "*", "*", "org", "[", "'doc'", "]", ")", "for", "org", "in", "organisations", "[", "'rows'", "]", "]", ")" ]
Get all organisations :param state: State of organisation :param include_deactivated: Flag to include deactivated :returns: list of Organisation instances :raises: SocketError, CouchException
[ "Get", "all", "organisations" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L176-L195
249,063
openpermissions/perch
perch/organisation.py
Organisation.user_organisations
def user_organisations(cls, user_id, state=None, include_deactivated=False): """ Get organisations that the user has joined :param user_id: the user ID :param state: the user's "join" state :param include_deactivated: Include deactivated resources in response :returns: list of Organisation instances :raises: SocketError, CouchException """ if state and state not in validators.VALID_STATES: raise exceptions.ValidationError('Invalid "state"') if include_deactivated: organisations = yield views.joined_organisations.get( key=[user_id, state], include_docs=True) else: organisations = yield views.active_joined_organisations.get( key=[user_id, state], include_docs=True) raise Return([cls(**org['doc']) for org in organisations['rows']])
python
def user_organisations(cls, user_id, state=None, include_deactivated=False): """ Get organisations that the user has joined :param user_id: the user ID :param state: the user's "join" state :param include_deactivated: Include deactivated resources in response :returns: list of Organisation instances :raises: SocketError, CouchException """ if state and state not in validators.VALID_STATES: raise exceptions.ValidationError('Invalid "state"') if include_deactivated: organisations = yield views.joined_organisations.get( key=[user_id, state], include_docs=True) else: organisations = yield views.active_joined_organisations.get( key=[user_id, state], include_docs=True) raise Return([cls(**org['doc']) for org in organisations['rows']])
[ "def", "user_organisations", "(", "cls", ",", "user_id", ",", "state", "=", "None", ",", "include_deactivated", "=", "False", ")", ":", "if", "state", "and", "state", "not", "in", "validators", ".", "VALID_STATES", ":", "raise", "exceptions", ".", "ValidationError", "(", "'Invalid \"state\"'", ")", "if", "include_deactivated", ":", "organisations", "=", "yield", "views", ".", "joined_organisations", ".", "get", "(", "key", "=", "[", "user_id", ",", "state", "]", ",", "include_docs", "=", "True", ")", "else", ":", "organisations", "=", "yield", "views", ".", "active_joined_organisations", ".", "get", "(", "key", "=", "[", "user_id", ",", "state", "]", ",", "include_docs", "=", "True", ")", "raise", "Return", "(", "[", "cls", "(", "*", "*", "org", "[", "'doc'", "]", ")", "for", "org", "in", "organisations", "[", "'rows'", "]", "]", ")" ]
Get organisations that the user has joined :param user_id: the user ID :param state: the user's "join" state :param include_deactivated: Include deactivated resources in response :returns: list of Organisation instances :raises: SocketError, CouchException
[ "Get", "organisations", "that", "the", "user", "has", "joined" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L199-L219
249,064
openpermissions/perch
perch/organisation.py
Organisation.can_update
def can_update(self, user, **data): """ Sys admins can always update an organisation. Organisation admins and creators can update, but may not update the following fields: - star_rating :param user: a User :param data: data that the user wants to update :returns: bool, set of fields that the user was not authorized to update """ if user.is_admin(): raise Return((True, set([]))) org_admin = user.is_org_admin(self.id) creator = self.created_by == user.id if org_admin or creator: fields = {'star_rating'} & set(data.keys()) if fields: raise Return((False, fields)) else: raise Return((True, set([]))) raise Return((False, set([])))
python
def can_update(self, user, **data): """ Sys admins can always update an organisation. Organisation admins and creators can update, but may not update the following fields: - star_rating :param user: a User :param data: data that the user wants to update :returns: bool, set of fields that the user was not authorized to update """ if user.is_admin(): raise Return((True, set([]))) org_admin = user.is_org_admin(self.id) creator = self.created_by == user.id if org_admin or creator: fields = {'star_rating'} & set(data.keys()) if fields: raise Return((False, fields)) else: raise Return((True, set([]))) raise Return((False, set([])))
[ "def", "can_update", "(", "self", ",", "user", ",", "*", "*", "data", ")", ":", "if", "user", ".", "is_admin", "(", ")", ":", "raise", "Return", "(", "(", "True", ",", "set", "(", "[", "]", ")", ")", ")", "org_admin", "=", "user", ".", "is_org_admin", "(", "self", ".", "id", ")", "creator", "=", "self", ".", "created_by", "==", "user", ".", "id", "if", "org_admin", "or", "creator", ":", "fields", "=", "{", "'star_rating'", "}", "&", "set", "(", "data", ".", "keys", "(", ")", ")", "if", "fields", ":", "raise", "Return", "(", "(", "False", ",", "fields", ")", ")", "else", ":", "raise", "Return", "(", "(", "True", ",", "set", "(", "[", "]", ")", ")", ")", "raise", "Return", "(", "(", "False", ",", "set", "(", "[", "]", ")", ")", ")" ]
Sys admins can always update an organisation. Organisation admins and creators can update, but may not update the following fields: - star_rating :param user: a User :param data: data that the user wants to update :returns: bool, set of fields that the user was not authorized to update
[ "Sys", "admins", "can", "always", "update", "an", "organisation", "." ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L233-L257
249,065
openpermissions/perch
perch/organisation.py
Service.check_unique
def check_unique(self): """Check the service's name and location are unique""" errors = [] service_id = getattr(self, 'id', None) fields = [('location', views.service_location), ('name', views.service_name)] for field, view in fields: value = getattr(self, field, None) if not value: continue result = yield view.values(key=value) matched = {x['id'] for x in result if x['id'] != service_id} if matched: errors.append("Service with {} '{}' already exists" .format(field, value)) if errors: raise exceptions.ValidationError(errors)
python
def check_unique(self): """Check the service's name and location are unique""" errors = [] service_id = getattr(self, 'id', None) fields = [('location', views.service_location), ('name', views.service_name)] for field, view in fields: value = getattr(self, field, None) if not value: continue result = yield view.values(key=value) matched = {x['id'] for x in result if x['id'] != service_id} if matched: errors.append("Service with {} '{}' already exists" .format(field, value)) if errors: raise exceptions.ValidationError(errors)
[ "def", "check_unique", "(", "self", ")", ":", "errors", "=", "[", "]", "service_id", "=", "getattr", "(", "self", ",", "'id'", ",", "None", ")", "fields", "=", "[", "(", "'location'", ",", "views", ".", "service_location", ")", ",", "(", "'name'", ",", "views", ".", "service_name", ")", "]", "for", "field", ",", "view", "in", "fields", ":", "value", "=", "getattr", "(", "self", ",", "field", ",", "None", ")", "if", "not", "value", ":", "continue", "result", "=", "yield", "view", ".", "values", "(", "key", "=", "value", ")", "matched", "=", "{", "x", "[", "'id'", "]", "for", "x", "in", "result", "if", "x", "[", "'id'", "]", "!=", "service_id", "}", "if", "matched", ":", "errors", ".", "append", "(", "\"Service with {} '{}' already exists\"", ".", "format", "(", "field", ",", "value", ")", ")", "if", "errors", ":", "raise", "exceptions", ".", "ValidationError", "(", "errors", ")" ]
Check the service's name and location are unique
[ "Check", "the", "service", "s", "name", "and", "location", "are", "unique" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L364-L383
249,066
openpermissions/perch
perch/organisation.py
Service.get_by_location
def get_by_location(cls, location, include_deactivated=False): """Get a service by it's location""" if include_deactivated: view = views.service_location else: view = views.active_service_location result = yield view.first(key=location, include_docs=True) parent = cls.parent_resource(**result['doc']) raise Return(cls(parent=parent, **result['value']))
python
def get_by_location(cls, location, include_deactivated=False): """Get a service by it's location""" if include_deactivated: view = views.service_location else: view = views.active_service_location result = yield view.first(key=location, include_docs=True) parent = cls.parent_resource(**result['doc']) raise Return(cls(parent=parent, **result['value']))
[ "def", "get_by_location", "(", "cls", ",", "location", ",", "include_deactivated", "=", "False", ")", ":", "if", "include_deactivated", ":", "view", "=", "views", ".", "service_location", "else", ":", "view", "=", "views", ".", "active_service_location", "result", "=", "yield", "view", ".", "first", "(", "key", "=", "location", ",", "include_docs", "=", "True", ")", "parent", "=", "cls", ".", "parent_resource", "(", "*", "*", "result", "[", "'doc'", "]", ")", "raise", "Return", "(", "cls", "(", "parent", "=", "parent", ",", "*", "*", "result", "[", "'value'", "]", ")", ")" ]
Get a service by it's location
[ "Get", "a", "service", "by", "it", "s", "location" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L398-L408
249,067
openpermissions/perch
perch/organisation.py
Service.all
def all(cls, service_type=None, organisation_id=None, include_deactivated=False): """ Get all resources :param service_type: Filter by service type :param organisation_id: Filter by organisation id :param include_deactivated: Flag to include deactivated Services :returns: list of Resource instances :raises: SocketError, CouchException """ if include_deactivated: resources = yield views.services.get(key=[service_type, organisation_id]) else: resources = yield views.active_services.get(key=[service_type, organisation_id]) # TODO: shouldn't this include the doc as the parent? raise Return([cls(**resource['value']) for resource in resources['rows']])
python
def all(cls, service_type=None, organisation_id=None, include_deactivated=False): """ Get all resources :param service_type: Filter by service type :param organisation_id: Filter by organisation id :param include_deactivated: Flag to include deactivated Services :returns: list of Resource instances :raises: SocketError, CouchException """ if include_deactivated: resources = yield views.services.get(key=[service_type, organisation_id]) else: resources = yield views.active_services.get(key=[service_type, organisation_id]) # TODO: shouldn't this include the doc as the parent? raise Return([cls(**resource['value']) for resource in resources['rows']])
[ "def", "all", "(", "cls", ",", "service_type", "=", "None", ",", "organisation_id", "=", "None", ",", "include_deactivated", "=", "False", ")", ":", "if", "include_deactivated", ":", "resources", "=", "yield", "views", ".", "services", ".", "get", "(", "key", "=", "[", "service_type", ",", "organisation_id", "]", ")", "else", ":", "resources", "=", "yield", "views", ".", "active_services", ".", "get", "(", "key", "=", "[", "service_type", ",", "organisation_id", "]", ")", "# TODO: shouldn't this include the doc as the parent?", "raise", "Return", "(", "[", "cls", "(", "*", "*", "resource", "[", "'value'", "]", ")", "for", "resource", "in", "resources", "[", "'rows'", "]", "]", ")" ]
Get all resources :param service_type: Filter by service type :param organisation_id: Filter by organisation id :param include_deactivated: Flag to include deactivated Services :returns: list of Resource instances :raises: SocketError, CouchException
[ "Get", "all", "resources" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L412-L429
249,068
openpermissions/perch
perch/organisation.py
Service.can_update
def can_update(self, user, **kwargs): """Org admins may not update organisation_id or service_type""" if user.is_admin(): raise Return((True, set([]))) is_creator = self.created_by == user.id if not (user.is_org_admin(self.organisation_id) or is_creator): raise Return((False, set([]))) fields = ({'service_type', 'organisation_id'} & set(kwargs.keys())) if fields: raise Return((False, fields)) else: raise Return((True, set([])))
python
def can_update(self, user, **kwargs): """Org admins may not update organisation_id or service_type""" if user.is_admin(): raise Return((True, set([]))) is_creator = self.created_by == user.id if not (user.is_org_admin(self.organisation_id) or is_creator): raise Return((False, set([]))) fields = ({'service_type', 'organisation_id'} & set(kwargs.keys())) if fields: raise Return((False, fields)) else: raise Return((True, set([])))
[ "def", "can_update", "(", "self", ",", "user", ",", "*", "*", "kwargs", ")", ":", "if", "user", ".", "is_admin", "(", ")", ":", "raise", "Return", "(", "(", "True", ",", "set", "(", "[", "]", ")", ")", ")", "is_creator", "=", "self", ".", "created_by", "==", "user", ".", "id", "if", "not", "(", "user", ".", "is_org_admin", "(", "self", ".", "organisation_id", ")", "or", "is_creator", ")", ":", "raise", "Return", "(", "(", "False", ",", "set", "(", "[", "]", ")", ")", ")", "fields", "=", "(", "{", "'service_type'", ",", "'organisation_id'", "}", "&", "set", "(", "kwargs", ".", "keys", "(", ")", ")", ")", "if", "fields", ":", "raise", "Return", "(", "(", "False", ",", "fields", ")", ")", "else", ":", "raise", "Return", "(", "(", "True", ",", "set", "(", "[", "]", ")", ")", ")" ]
Org admins may not update organisation_id or service_type
[ "Org", "admins", "may", "not", "update", "organisation_id", "or", "service_type" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L475-L488
249,069
openpermissions/perch
perch/organisation.py
Service.authenticate
def authenticate(cls, client_id, secret): """ Authenticate a client using it's secret :param client_id: the client / service ID :param secret: the client secret :returns: a Service instance """ result = yield views.oauth_client.get(key=[secret, client_id]) if not result['rows']: raise Return() service = yield Service.get(client_id) raise Return(service)
python
def authenticate(cls, client_id, secret): """ Authenticate a client using it's secret :param client_id: the client / service ID :param secret: the client secret :returns: a Service instance """ result = yield views.oauth_client.get(key=[secret, client_id]) if not result['rows']: raise Return() service = yield Service.get(client_id) raise Return(service)
[ "def", "authenticate", "(", "cls", ",", "client_id", ",", "secret", ")", ":", "result", "=", "yield", "views", ".", "oauth_client", ".", "get", "(", "key", "=", "[", "secret", ",", "client_id", "]", ")", "if", "not", "result", "[", "'rows'", "]", ":", "raise", "Return", "(", ")", "service", "=", "yield", "Service", ".", "get", "(", "client_id", ")", "raise", "Return", "(", "service", ")" ]
Authenticate a client using it's secret :param client_id: the client / service ID :param secret: the client secret :returns: a Service instance
[ "Authenticate", "a", "client", "using", "it", "s", "secret" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L492-L505
249,070
openpermissions/perch
perch/organisation.py
Service.authorized
def authorized(self, requested_access, resource): """ Check whether the service is authorized to access the resource :param requested_access: "r", "w", or "rw" :param resource: a Resource or SubResource with "permissions" attribute :returns: True if has access, False otherwise """ if {self.state, resource.state} != {State.approved}: return False permissions = group_permissions(getattr(resource, 'permissions', [])) org_permissions = permissions['organisation_id'][self.organisation_id] type_permissions = permissions['service_type'][self.service_type] all_permissions = permissions['all'] for permission_set in [org_permissions, type_permissions, all_permissions]: if '-' in permission_set: return False elif set([x for x in requested_access]).issubset(permission_set): return True return False
python
def authorized(self, requested_access, resource): """ Check whether the service is authorized to access the resource :param requested_access: "r", "w", or "rw" :param resource: a Resource or SubResource with "permissions" attribute :returns: True if has access, False otherwise """ if {self.state, resource.state} != {State.approved}: return False permissions = group_permissions(getattr(resource, 'permissions', [])) org_permissions = permissions['organisation_id'][self.organisation_id] type_permissions = permissions['service_type'][self.service_type] all_permissions = permissions['all'] for permission_set in [org_permissions, type_permissions, all_permissions]: if '-' in permission_set: return False elif set([x for x in requested_access]).issubset(permission_set): return True return False
[ "def", "authorized", "(", "self", ",", "requested_access", ",", "resource", ")", ":", "if", "{", "self", ".", "state", ",", "resource", ".", "state", "}", "!=", "{", "State", ".", "approved", "}", ":", "return", "False", "permissions", "=", "group_permissions", "(", "getattr", "(", "resource", ",", "'permissions'", ",", "[", "]", ")", ")", "org_permissions", "=", "permissions", "[", "'organisation_id'", "]", "[", "self", ".", "organisation_id", "]", "type_permissions", "=", "permissions", "[", "'service_type'", "]", "[", "self", ".", "service_type", "]", "all_permissions", "=", "permissions", "[", "'all'", "]", "for", "permission_set", "in", "[", "org_permissions", ",", "type_permissions", ",", "all_permissions", "]", ":", "if", "'-'", "in", "permission_set", ":", "return", "False", "elif", "set", "(", "[", "x", "for", "x", "in", "requested_access", "]", ")", ".", "issubset", "(", "permission_set", ")", ":", "return", "True", "return", "False" ]
Check whether the service is authorized to access the resource :param requested_access: "r", "w", or "rw" :param resource: a Resource or SubResource with "permissions" attribute :returns: True if has access, False otherwise
[ "Check", "whether", "the", "service", "is", "authorized", "to", "access", "the", "resource" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L507-L530
249,071
openpermissions/perch
perch/organisation.py
Repository.validate
def validate(self): """Validate the resource""" if not self._resource.get('permissions'): self.permissions = self.default_permissions try: # update _resource so have default values from the schema self._resource = self.schema(self._resource) except MultipleInvalid as e: errors = [format_error(err, self.resource_type) for err in e.errors] raise exceptions.ValidationError({'errors': errors}) yield self.check_service() yield self.check_unique()
python
def validate(self): """Validate the resource""" if not self._resource.get('permissions'): self.permissions = self.default_permissions try: # update _resource so have default values from the schema self._resource = self.schema(self._resource) except MultipleInvalid as e: errors = [format_error(err, self.resource_type) for err in e.errors] raise exceptions.ValidationError({'errors': errors}) yield self.check_service() yield self.check_unique()
[ "def", "validate", "(", "self", ")", ":", "if", "not", "self", ".", "_resource", ".", "get", "(", "'permissions'", ")", ":", "self", ".", "permissions", "=", "self", ".", "default_permissions", "try", ":", "# update _resource so have default values from the schema", "self", ".", "_resource", "=", "self", ".", "schema", "(", "self", ".", "_resource", ")", "except", "MultipleInvalid", "as", "e", ":", "errors", "=", "[", "format_error", "(", "err", ",", "self", ".", "resource_type", ")", "for", "err", "in", "e", ".", "errors", "]", "raise", "exceptions", ".", "ValidationError", "(", "{", "'errors'", ":", "errors", "}", ")", "yield", "self", ".", "check_service", "(", ")", "yield", "self", ".", "check_unique", "(", ")" ]
Validate the resource
[ "Validate", "the", "resource" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L604-L617
249,072
openpermissions/perch
perch/organisation.py
Repository.check_service
def check_service(self): """Check the service exists and is a repository service""" try: service = yield Service.get(self.service_id) except couch.NotFound: raise exceptions.ValidationError('Service {} not found' .format(self.service_id)) if service.service_type != 'repository': raise exceptions.ValidationError('{} is not a repository service' .format(self.service_id)) if service.state != State.approved: raise exceptions.ValidationError('{} is not an approved service' .format(self.service_id))
python
def check_service(self): """Check the service exists and is a repository service""" try: service = yield Service.get(self.service_id) except couch.NotFound: raise exceptions.ValidationError('Service {} not found' .format(self.service_id)) if service.service_type != 'repository': raise exceptions.ValidationError('{} is not a repository service' .format(self.service_id)) if service.state != State.approved: raise exceptions.ValidationError('{} is not an approved service' .format(self.service_id))
[ "def", "check_service", "(", "self", ")", ":", "try", ":", "service", "=", "yield", "Service", ".", "get", "(", "self", ".", "service_id", ")", "except", "couch", ".", "NotFound", ":", "raise", "exceptions", ".", "ValidationError", "(", "'Service {} not found'", ".", "format", "(", "self", ".", "service_id", ")", ")", "if", "service", ".", "service_type", "!=", "'repository'", ":", "raise", "exceptions", ".", "ValidationError", "(", "'{} is not a repository service'", ".", "format", "(", "self", ".", "service_id", ")", ")", "if", "service", ".", "state", "!=", "State", ".", "approved", ":", "raise", "exceptions", ".", "ValidationError", "(", "'{} is not an approved service'", ".", "format", "(", "self", ".", "service_id", ")", ")" ]
Check the service exists and is a repository service
[ "Check", "the", "service", "exists", "and", "is", "a", "repository", "service" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L620-L634
249,073
openpermissions/perch
perch/organisation.py
Repository.check_unique
def check_unique(self): """Check the repository's name is unique""" result = yield views.repository_name.values(key=self.name) repo_id = getattr(self, 'id', None) repos = {x for x in result if x != repo_id and x} if repos: raise exceptions.ValidationError( "Repository with name '{}' already exists".format(self.name))
python
def check_unique(self): """Check the repository's name is unique""" result = yield views.repository_name.values(key=self.name) repo_id = getattr(self, 'id', None) repos = {x for x in result if x != repo_id and x} if repos: raise exceptions.ValidationError( "Repository with name '{}' already exists".format(self.name))
[ "def", "check_unique", "(", "self", ")", ":", "result", "=", "yield", "views", ".", "repository_name", ".", "values", "(", "key", "=", "self", ".", "name", ")", "repo_id", "=", "getattr", "(", "self", ",", "'id'", ",", "None", ")", "repos", "=", "{", "x", "for", "x", "in", "result", "if", "x", "!=", "repo_id", "and", "x", "}", "if", "repos", ":", "raise", "exceptions", ".", "ValidationError", "(", "\"Repository with name '{}' already exists\"", ".", "format", "(", "self", ".", "name", ")", ")" ]
Check the repository's name is unique
[ "Check", "the", "repository", "s", "name", "is", "unique" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L637-L645
249,074
openpermissions/perch
perch/organisation.py
Repository.can_update
def can_update(self, user, **kwargs): """ Sys admin's can change anything If the user is an organisation administrator or created the repository, they may change any field other than "organisation_id" If the user is a service administrator the user may change the "state" but no other fields. """ if user.is_admin(): raise Return((True, set([]))) is_creator = self.created_by == user.id if user.is_org_admin(self.organisation_id) or is_creator: fields = set([]) if 'organisation_id' in kwargs: fields.add('organisation_id') if fields: raise Return((False, fields)) else: raise Return((True, set([]))) try: service = yield Service.get(self.service_id) if user.is_org_admin(service.organisation_id): fields = set(kwargs) - {'state'} if fields: raise Return((False, fields)) else: raise Return((True, fields)) except couch.NotFound: # will be handled in Repository.validate pass raise Return((False, set([])))
python
def can_update(self, user, **kwargs): """ Sys admin's can change anything If the user is an organisation administrator or created the repository, they may change any field other than "organisation_id" If the user is a service administrator the user may change the "state" but no other fields. """ if user.is_admin(): raise Return((True, set([]))) is_creator = self.created_by == user.id if user.is_org_admin(self.organisation_id) or is_creator: fields = set([]) if 'organisation_id' in kwargs: fields.add('organisation_id') if fields: raise Return((False, fields)) else: raise Return((True, set([]))) try: service = yield Service.get(self.service_id) if user.is_org_admin(service.organisation_id): fields = set(kwargs) - {'state'} if fields: raise Return((False, fields)) else: raise Return((True, fields)) except couch.NotFound: # will be handled in Repository.validate pass raise Return((False, set([])))
[ "def", "can_update", "(", "self", ",", "user", ",", "*", "*", "kwargs", ")", ":", "if", "user", ".", "is_admin", "(", ")", ":", "raise", "Return", "(", "(", "True", ",", "set", "(", "[", "]", ")", ")", ")", "is_creator", "=", "self", ".", "created_by", "==", "user", ".", "id", "if", "user", ".", "is_org_admin", "(", "self", ".", "organisation_id", ")", "or", "is_creator", ":", "fields", "=", "set", "(", "[", "]", ")", "if", "'organisation_id'", "in", "kwargs", ":", "fields", ".", "add", "(", "'organisation_id'", ")", "if", "fields", ":", "raise", "Return", "(", "(", "False", ",", "fields", ")", ")", "else", ":", "raise", "Return", "(", "(", "True", ",", "set", "(", "[", "]", ")", ")", ")", "try", ":", "service", "=", "yield", "Service", ".", "get", "(", "self", ".", "service_id", ")", "if", "user", ".", "is_org_admin", "(", "service", ".", "organisation_id", ")", ":", "fields", "=", "set", "(", "kwargs", ")", "-", "{", "'state'", "}", "if", "fields", ":", "raise", "Return", "(", "(", "False", ",", "fields", ")", ")", "else", ":", "raise", "Return", "(", "(", "True", ",", "fields", ")", ")", "except", "couch", ".", "NotFound", ":", "# will be handled in Repository.validate", "pass", "raise", "Return", "(", "(", "False", ",", "set", "(", "[", "]", ")", ")", ")" ]
Sys admin's can change anything If the user is an organisation administrator or created the repository, they may change any field other than "organisation_id" If the user is a service administrator the user may change the "state" but no other fields.
[ "Sys", "admin", "s", "can", "change", "anything" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L680-L717
249,075
openpermissions/perch
perch/organisation.py
Repository.with_relations
def with_relations(self, user=None): """ Return a cleaned dictionary including relations :returns: a Repository instance """ repository = self.clean(user=user) try: parent = yield self.get_parent() repository['organisation'] = parent.clean() except couch.NotFound: parent = None repository['organisation'] = {'id': self.parent_id} service_id = self.service_id try: # TODO: cache this lookup service = yield Service.get(service_id) repository['service'] = service.clean(user=user) except couch.NotFound: # just include the service ID if cannot find the service repository['service'] = {'id': service_id} del repository['service_id'] del repository['organisation_id'] raise Return(repository)
python
def with_relations(self, user=None): """ Return a cleaned dictionary including relations :returns: a Repository instance """ repository = self.clean(user=user) try: parent = yield self.get_parent() repository['organisation'] = parent.clean() except couch.NotFound: parent = None repository['organisation'] = {'id': self.parent_id} service_id = self.service_id try: # TODO: cache this lookup service = yield Service.get(service_id) repository['service'] = service.clean(user=user) except couch.NotFound: # just include the service ID if cannot find the service repository['service'] = {'id': service_id} del repository['service_id'] del repository['organisation_id'] raise Return(repository)
[ "def", "with_relations", "(", "self", ",", "user", "=", "None", ")", ":", "repository", "=", "self", ".", "clean", "(", "user", "=", "user", ")", "try", ":", "parent", "=", "yield", "self", ".", "get_parent", "(", ")", "repository", "[", "'organisation'", "]", "=", "parent", ".", "clean", "(", ")", "except", "couch", ".", "NotFound", ":", "parent", "=", "None", "repository", "[", "'organisation'", "]", "=", "{", "'id'", ":", "self", ".", "parent_id", "}", "service_id", "=", "self", ".", "service_id", "try", ":", "# TODO: cache this lookup", "service", "=", "yield", "Service", ".", "get", "(", "service_id", ")", "repository", "[", "'service'", "]", "=", "service", ".", "clean", "(", "user", "=", "user", ")", "except", "couch", ".", "NotFound", ":", "# just include the service ID if cannot find the service", "repository", "[", "'service'", "]", "=", "{", "'id'", ":", "service_id", "}", "del", "repository", "[", "'service_id'", "]", "del", "repository", "[", "'organisation_id'", "]", "raise", "Return", "(", "repository", ")" ]
Return a cleaned dictionary including relations :returns: a Repository instance
[ "Return", "a", "cleaned", "dictionary", "including", "relations" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L725-L751
249,076
openpermissions/perch
perch/organisation.py
OAuthSecret.client_secrets
def client_secrets(cls, client_id): """ Get the client's secrets using the client_id :param client_id: the client ID, e.g. a service ID :returns: list OAuthSecret instances """ secrets = yield cls.view.get(key=client_id, include_docs=True) raise Return([cls(**secret['doc']) for secret in secrets['rows']])
python
def client_secrets(cls, client_id): """ Get the client's secrets using the client_id :param client_id: the client ID, e.g. a service ID :returns: list OAuthSecret instances """ secrets = yield cls.view.get(key=client_id, include_docs=True) raise Return([cls(**secret['doc']) for secret in secrets['rows']])
[ "def", "client_secrets", "(", "cls", ",", "client_id", ")", ":", "secrets", "=", "yield", "cls", ".", "view", ".", "get", "(", "key", "=", "client_id", ",", "include_docs", "=", "True", ")", "raise", "Return", "(", "[", "cls", "(", "*", "*", "secret", "[", "'doc'", "]", ")", "for", "secret", "in", "secrets", "[", "'rows'", "]", "]", ")" ]
Get the client's secrets using the client_id :param client_id: the client ID, e.g. a service ID :returns: list OAuthSecret instances
[ "Get", "the", "client", "s", "secrets", "using", "the", "client_id" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L801-L809
249,077
openpermissions/perch
perch/organisation.py
OAuthSecret.delete_all_secrets
def delete_all_secrets(cls, user, client_id): """Delete all of the client's credentials""" can_delete = yield cls(client_id=client_id).can_delete(user) if not can_delete: raise exceptions.Unauthorized('User may not delete {} secrets' .format(client_id)) results = yield cls.view.get(key=client_id, include_docs=True) if results['rows']: db = cls.db_client() docs = [{ '_rev': doc['doc']['_rev'], '_id': doc['doc']['_id'], '_deleted': True } for doc in results['rows']] yield db.save_docs(docs)
python
def delete_all_secrets(cls, user, client_id): """Delete all of the client's credentials""" can_delete = yield cls(client_id=client_id).can_delete(user) if not can_delete: raise exceptions.Unauthorized('User may not delete {} secrets' .format(client_id)) results = yield cls.view.get(key=client_id, include_docs=True) if results['rows']: db = cls.db_client() docs = [{ '_rev': doc['doc']['_rev'], '_id': doc['doc']['_id'], '_deleted': True } for doc in results['rows']] yield db.save_docs(docs)
[ "def", "delete_all_secrets", "(", "cls", ",", "user", ",", "client_id", ")", ":", "can_delete", "=", "yield", "cls", "(", "client_id", "=", "client_id", ")", ".", "can_delete", "(", "user", ")", "if", "not", "can_delete", ":", "raise", "exceptions", ".", "Unauthorized", "(", "'User may not delete {} secrets'", ".", "format", "(", "client_id", ")", ")", "results", "=", "yield", "cls", ".", "view", ".", "get", "(", "key", "=", "client_id", ",", "include_docs", "=", "True", ")", "if", "results", "[", "'rows'", "]", ":", "db", "=", "cls", ".", "db_client", "(", ")", "docs", "=", "[", "{", "'_rev'", ":", "doc", "[", "'doc'", "]", "[", "'_rev'", "]", ",", "'_id'", ":", "doc", "[", "'doc'", "]", "[", "'_id'", "]", ",", "'_deleted'", ":", "True", "}", "for", "doc", "in", "results", "[", "'rows'", "]", "]", "yield", "db", ".", "save_docs", "(", "docs", ")" ]
Delete all of the client's credentials
[ "Delete", "all", "of", "the", "client", "s", "credentials" ]
36d78994133918f3c52c187f19e50132960a0156
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/organisation.py#L813-L831
249,078
OpenVolunteeringPlatform/django-ovp-core
ovp_core/emails.py
ContactFormMail.sendContact
def sendContact(self, context={}): """ Send contact form message to single or multiple recipients """ for recipient in self.recipients: super(ContactFormMail, self).__init__(recipient, self.async) self.sendEmail('contactForm', 'New contact form message', context)
python
def sendContact(self, context={}): """ Send contact form message to single or multiple recipients """ for recipient in self.recipients: super(ContactFormMail, self).__init__(recipient, self.async) self.sendEmail('contactForm', 'New contact form message', context)
[ "def", "sendContact", "(", "self", ",", "context", "=", "{", "}", ")", ":", "for", "recipient", "in", "self", ".", "recipients", ":", "super", "(", "ContactFormMail", ",", "self", ")", ".", "__init__", "(", "recipient", ",", "self", ".", "async", ")", "self", ".", "sendEmail", "(", "'contactForm'", ",", "'New contact form message'", ",", "context", ")" ]
Send contact form message to single or multiple recipients
[ "Send", "contact", "form", "message", "to", "single", "or", "multiple", "recipients" ]
c81b868a0a4b317f7b1ec0718cabc34f7794dd20
https://github.com/OpenVolunteeringPlatform/django-ovp-core/blob/c81b868a0a4b317f7b1ec0718cabc34f7794dd20/ovp_core/emails.py#L75-L81
249,079
b3j0f/utils
b3j0f/utils/proxy.py
proxify_elt
def proxify_elt(elt, bases=None, _dict=None, public=False): """Proxify input elt. :param elt: elt to proxify. :param bases: elt class base classes. If None, use elt type. :param dict _dict: specific elt class content to use. :param bool public: if True (default False), proxify only public members (where name starts with the character '_'). :return: proxified element. :raises: TypeError if elt does not implement all routines of bases and _dict. """ # ensure _dict is a dictionary proxy_dict = {} if _dict is None else _dict.copy() # set of proxified attribute names which are proxified during bases parsing # and avoid to proxify them twice during _dict parsing proxified_attribute_names = set() # ensure bases is a tuple of types if bases is None: bases = (elt if isclass(elt) else elt.__class__,) if isinstance(bases, string_types): bases = (lookup(bases),) elif isclass(bases): bases = (bases,) else: bases = tuple(bases) # fill proxy_dict with routines of bases for base in bases: # exclude object if base is object: continue for name, member in getmembers(base, isroutine): # check if name is public if public and not name.startswith('_'): continue eltmember = getattr(elt, name, None) if eltmember is None: raise TypeError( 'Wrong elt {0}. Must implement {1} ({2}) of {3}.'. format(elt, name, member, base) ) # proxify member if member is not a constructor if name not in ['__new__', '__init__']: # get routine from proxy_dict or eltmember routine = proxy_dict.get(name, eltmember) # exclude object methods if getattr(routine, '__objclass__', None) is not object: # get routine proxy routine_proxy = proxify_routine(routine) if ismethod(routine_proxy): routine_proxy = get_method_function(routine_proxy) # update proxy_dict proxy_dict[name] = routine_proxy # and save the proxified attribute flag proxified_attribute_names.add(name) # proxify proxy_dict for name in proxy_dict: value = proxy_dict[name] if not hasattr(elt, name): raise TypeError( 'Wrong elt {0}. Must implement {1} ({2}).'.format( elt, name, value ) ) if isroutine(value): # if member has not already been proxified if name not in proxified_attribute_names: # proxify it value = proxify_routine(value) proxy_dict[name] = value # set default constructors if not present in proxy_dict if '__new__' not in proxy_dict: proxy_dict['__new__'] = object.__new__ if '__init__' not in proxy_dict: proxy_dict['__init__'] = object.__init__ # generate a new proxy class cls = type('Proxy', bases, proxy_dict) # instantiate proxy cls result = cls if isclass(elt) else cls() # bind elt to proxy setattr(result, __PROXIFIED__, elt) return result
python
def proxify_elt(elt, bases=None, _dict=None, public=False): """Proxify input elt. :param elt: elt to proxify. :param bases: elt class base classes. If None, use elt type. :param dict _dict: specific elt class content to use. :param bool public: if True (default False), proxify only public members (where name starts with the character '_'). :return: proxified element. :raises: TypeError if elt does not implement all routines of bases and _dict. """ # ensure _dict is a dictionary proxy_dict = {} if _dict is None else _dict.copy() # set of proxified attribute names which are proxified during bases parsing # and avoid to proxify them twice during _dict parsing proxified_attribute_names = set() # ensure bases is a tuple of types if bases is None: bases = (elt if isclass(elt) else elt.__class__,) if isinstance(bases, string_types): bases = (lookup(bases),) elif isclass(bases): bases = (bases,) else: bases = tuple(bases) # fill proxy_dict with routines of bases for base in bases: # exclude object if base is object: continue for name, member in getmembers(base, isroutine): # check if name is public if public and not name.startswith('_'): continue eltmember = getattr(elt, name, None) if eltmember is None: raise TypeError( 'Wrong elt {0}. Must implement {1} ({2}) of {3}.'. format(elt, name, member, base) ) # proxify member if member is not a constructor if name not in ['__new__', '__init__']: # get routine from proxy_dict or eltmember routine = proxy_dict.get(name, eltmember) # exclude object methods if getattr(routine, '__objclass__', None) is not object: # get routine proxy routine_proxy = proxify_routine(routine) if ismethod(routine_proxy): routine_proxy = get_method_function(routine_proxy) # update proxy_dict proxy_dict[name] = routine_proxy # and save the proxified attribute flag proxified_attribute_names.add(name) # proxify proxy_dict for name in proxy_dict: value = proxy_dict[name] if not hasattr(elt, name): raise TypeError( 'Wrong elt {0}. Must implement {1} ({2}).'.format( elt, name, value ) ) if isroutine(value): # if member has not already been proxified if name not in proxified_attribute_names: # proxify it value = proxify_routine(value) proxy_dict[name] = value # set default constructors if not present in proxy_dict if '__new__' not in proxy_dict: proxy_dict['__new__'] = object.__new__ if '__init__' not in proxy_dict: proxy_dict['__init__'] = object.__init__ # generate a new proxy class cls = type('Proxy', bases, proxy_dict) # instantiate proxy cls result = cls if isclass(elt) else cls() # bind elt to proxy setattr(result, __PROXIFIED__, elt) return result
[ "def", "proxify_elt", "(", "elt", ",", "bases", "=", "None", ",", "_dict", "=", "None", ",", "public", "=", "False", ")", ":", "# ensure _dict is a dictionary", "proxy_dict", "=", "{", "}", "if", "_dict", "is", "None", "else", "_dict", ".", "copy", "(", ")", "# set of proxified attribute names which are proxified during bases parsing", "# and avoid to proxify them twice during _dict parsing", "proxified_attribute_names", "=", "set", "(", ")", "# ensure bases is a tuple of types", "if", "bases", "is", "None", ":", "bases", "=", "(", "elt", "if", "isclass", "(", "elt", ")", "else", "elt", ".", "__class__", ",", ")", "if", "isinstance", "(", "bases", ",", "string_types", ")", ":", "bases", "=", "(", "lookup", "(", "bases", ")", ",", ")", "elif", "isclass", "(", "bases", ")", ":", "bases", "=", "(", "bases", ",", ")", "else", ":", "bases", "=", "tuple", "(", "bases", ")", "# fill proxy_dict with routines of bases", "for", "base", "in", "bases", ":", "# exclude object", "if", "base", "is", "object", ":", "continue", "for", "name", ",", "member", "in", "getmembers", "(", "base", ",", "isroutine", ")", ":", "# check if name is public", "if", "public", "and", "not", "name", ".", "startswith", "(", "'_'", ")", ":", "continue", "eltmember", "=", "getattr", "(", "elt", ",", "name", ",", "None", ")", "if", "eltmember", "is", "None", ":", "raise", "TypeError", "(", "'Wrong elt {0}. Must implement {1} ({2}) of {3}.'", ".", "format", "(", "elt", ",", "name", ",", "member", ",", "base", ")", ")", "# proxify member if member is not a constructor", "if", "name", "not", "in", "[", "'__new__'", ",", "'__init__'", "]", ":", "# get routine from proxy_dict or eltmember", "routine", "=", "proxy_dict", ".", "get", "(", "name", ",", "eltmember", ")", "# exclude object methods", "if", "getattr", "(", "routine", ",", "'__objclass__'", ",", "None", ")", "is", "not", "object", ":", "# get routine proxy", "routine_proxy", "=", "proxify_routine", "(", "routine", ")", "if", "ismethod", "(", "routine_proxy", ")", ":", "routine_proxy", "=", "get_method_function", "(", "routine_proxy", ")", "# update proxy_dict", "proxy_dict", "[", "name", "]", "=", "routine_proxy", "# and save the proxified attribute flag", "proxified_attribute_names", ".", "add", "(", "name", ")", "# proxify proxy_dict", "for", "name", "in", "proxy_dict", ":", "value", "=", "proxy_dict", "[", "name", "]", "if", "not", "hasattr", "(", "elt", ",", "name", ")", ":", "raise", "TypeError", "(", "'Wrong elt {0}. Must implement {1} ({2}).'", ".", "format", "(", "elt", ",", "name", ",", "value", ")", ")", "if", "isroutine", "(", "value", ")", ":", "# if member has not already been proxified", "if", "name", "not", "in", "proxified_attribute_names", ":", "# proxify it", "value", "=", "proxify_routine", "(", "value", ")", "proxy_dict", "[", "name", "]", "=", "value", "# set default constructors if not present in proxy_dict", "if", "'__new__'", "not", "in", "proxy_dict", ":", "proxy_dict", "[", "'__new__'", "]", "=", "object", ".", "__new__", "if", "'__init__'", "not", "in", "proxy_dict", ":", "proxy_dict", "[", "'__init__'", "]", "=", "object", ".", "__init__", "# generate a new proxy class", "cls", "=", "type", "(", "'Proxy'", ",", "bases", ",", "proxy_dict", ")", "# instantiate proxy cls", "result", "=", "cls", "if", "isclass", "(", "elt", ")", "else", "cls", "(", ")", "# bind elt to proxy", "setattr", "(", "result", ",", "__PROXIFIED__", ",", "elt", ")", "return", "result" ]
Proxify input elt. :param elt: elt to proxify. :param bases: elt class base classes. If None, use elt type. :param dict _dict: specific elt class content to use. :param bool public: if True (default False), proxify only public members (where name starts with the character '_'). :return: proxified element. :raises: TypeError if elt does not implement all routines of bases and _dict.
[ "Proxify", "input", "elt", "." ]
793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff
https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/proxy.py#L80-L171
249,080
b3j0f/utils
b3j0f/utils/proxy.py
proxify_routine
def proxify_routine(routine, impl=None): """Proxify a routine with input impl. :param routine: routine to proxify. :param impl: new impl to use. If None, use routine. """ # init impl impl = routine if impl is None else impl is_method = ismethod(routine) if is_method: function = get_method_function(routine) else: function = routine # flag which indicates that the function is not a pure python function # and has to be wrapped wrap_function = not hasattr(function, '__code__') try: # get params from routine args, varargs, kwargs, _ = getargspec(function) except TypeError: # in case of error, wrap the function wrap_function = True if wrap_function: # if function is not pure python, create a generic one # with assignments assigned = [] for wrapper_assignment in WRAPPER_ASSIGNMENTS: if hasattr(function, wrapper_assignment): assigned.append(wrapper_assignment) # and updates updated = [] for wrapper_update in WRAPPER_UPDATES: if hasattr(function, wrapper_update): updated.append(wrapper_update) @wraps(function, assigned=assigned, updated=updated) def wrappedfunction(*args, **kwargs): """Default wrap function.""" function = wrappedfunction # get params from function args, varargs, kwargs, _ = getargspec(function) name = function.__name__ result = _compilecode( function=function, name=name, impl=impl, args=args, varargs=varargs, kwargs=kwargs ) # set wrapping assignments for wrapper_assignment in WRAPPER_ASSIGNMENTS: try: value = getattr(function, wrapper_assignment) except AttributeError: pass else: setattr(result, wrapper_assignment, value) # set proxy module result.__module__ = proxify_routine.__module__ # update wrapping updating for wrapper_update in WRAPPER_UPDATES: try: value = getattr(function, wrapper_update) except AttributeError: pass else: getattr(result, wrapper_update).update(value) # set proxyfied element on proxy setattr(result, __PROXIFIED__, routine) if is_method: # create a new method args = [result, get_method_self(routine)] if PY2: args.append(routine.im_class) result = MethodType(*args) return result
python
def proxify_routine(routine, impl=None): """Proxify a routine with input impl. :param routine: routine to proxify. :param impl: new impl to use. If None, use routine. """ # init impl impl = routine if impl is None else impl is_method = ismethod(routine) if is_method: function = get_method_function(routine) else: function = routine # flag which indicates that the function is not a pure python function # and has to be wrapped wrap_function = not hasattr(function, '__code__') try: # get params from routine args, varargs, kwargs, _ = getargspec(function) except TypeError: # in case of error, wrap the function wrap_function = True if wrap_function: # if function is not pure python, create a generic one # with assignments assigned = [] for wrapper_assignment in WRAPPER_ASSIGNMENTS: if hasattr(function, wrapper_assignment): assigned.append(wrapper_assignment) # and updates updated = [] for wrapper_update in WRAPPER_UPDATES: if hasattr(function, wrapper_update): updated.append(wrapper_update) @wraps(function, assigned=assigned, updated=updated) def wrappedfunction(*args, **kwargs): """Default wrap function.""" function = wrappedfunction # get params from function args, varargs, kwargs, _ = getargspec(function) name = function.__name__ result = _compilecode( function=function, name=name, impl=impl, args=args, varargs=varargs, kwargs=kwargs ) # set wrapping assignments for wrapper_assignment in WRAPPER_ASSIGNMENTS: try: value = getattr(function, wrapper_assignment) except AttributeError: pass else: setattr(result, wrapper_assignment, value) # set proxy module result.__module__ = proxify_routine.__module__ # update wrapping updating for wrapper_update in WRAPPER_UPDATES: try: value = getattr(function, wrapper_update) except AttributeError: pass else: getattr(result, wrapper_update).update(value) # set proxyfied element on proxy setattr(result, __PROXIFIED__, routine) if is_method: # create a new method args = [result, get_method_self(routine)] if PY2: args.append(routine.im_class) result = MethodType(*args) return result
[ "def", "proxify_routine", "(", "routine", ",", "impl", "=", "None", ")", ":", "# init impl", "impl", "=", "routine", "if", "impl", "is", "None", "else", "impl", "is_method", "=", "ismethod", "(", "routine", ")", "if", "is_method", ":", "function", "=", "get_method_function", "(", "routine", ")", "else", ":", "function", "=", "routine", "# flag which indicates that the function is not a pure python function", "# and has to be wrapped", "wrap_function", "=", "not", "hasattr", "(", "function", ",", "'__code__'", ")", "try", ":", "# get params from routine", "args", ",", "varargs", ",", "kwargs", ",", "_", "=", "getargspec", "(", "function", ")", "except", "TypeError", ":", "# in case of error, wrap the function", "wrap_function", "=", "True", "if", "wrap_function", ":", "# if function is not pure python, create a generic one", "# with assignments", "assigned", "=", "[", "]", "for", "wrapper_assignment", "in", "WRAPPER_ASSIGNMENTS", ":", "if", "hasattr", "(", "function", ",", "wrapper_assignment", ")", ":", "assigned", ".", "append", "(", "wrapper_assignment", ")", "# and updates", "updated", "=", "[", "]", "for", "wrapper_update", "in", "WRAPPER_UPDATES", ":", "if", "hasattr", "(", "function", ",", "wrapper_update", ")", ":", "updated", ".", "append", "(", "wrapper_update", ")", "@", "wraps", "(", "function", ",", "assigned", "=", "assigned", ",", "updated", "=", "updated", ")", "def", "wrappedfunction", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Default wrap function.\"\"\"", "function", "=", "wrappedfunction", "# get params from function", "args", ",", "varargs", ",", "kwargs", ",", "_", "=", "getargspec", "(", "function", ")", "name", "=", "function", ".", "__name__", "result", "=", "_compilecode", "(", "function", "=", "function", ",", "name", "=", "name", ",", "impl", "=", "impl", ",", "args", "=", "args", ",", "varargs", "=", "varargs", ",", "kwargs", "=", "kwargs", ")", "# set wrapping assignments", "for", "wrapper_assignment", "in", "WRAPPER_ASSIGNMENTS", ":", "try", ":", "value", "=", "getattr", "(", "function", ",", "wrapper_assignment", ")", "except", "AttributeError", ":", "pass", "else", ":", "setattr", "(", "result", ",", "wrapper_assignment", ",", "value", ")", "# set proxy module", "result", ".", "__module__", "=", "proxify_routine", ".", "__module__", "# update wrapping updating", "for", "wrapper_update", "in", "WRAPPER_UPDATES", ":", "try", ":", "value", "=", "getattr", "(", "function", ",", "wrapper_update", ")", "except", "AttributeError", ":", "pass", "else", ":", "getattr", "(", "result", ",", "wrapper_update", ")", ".", "update", "(", "value", ")", "# set proxyfied element on proxy", "setattr", "(", "result", ",", "__PROXIFIED__", ",", "routine", ")", "if", "is_method", ":", "# create a new method", "args", "=", "[", "result", ",", "get_method_self", "(", "routine", ")", "]", "if", "PY2", ":", "args", ".", "append", "(", "routine", ".", "im_class", ")", "result", "=", "MethodType", "(", "*", "args", ")", "return", "result" ]
Proxify a routine with input impl. :param routine: routine to proxify. :param impl: new impl to use. If None, use routine.
[ "Proxify", "a", "routine", "with", "input", "impl", "." ]
793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff
https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/proxy.py#L174-L259
249,081
b3j0f/utils
b3j0f/utils/proxy.py
_compilecode
def _compilecode(function, name, impl, args, varargs, kwargs): """Get generated code. :return: function proxy generated code. :rtype: str """ newcodestr, generatedname, impl_name = _generatecode( function=function, name=name, impl=impl, args=args, varargs=varargs, kwargs=kwargs ) try: __file__ = getfile(function) except TypeError: __file__ = '<string>' # compile newcodestr code = compile(newcodestr, __file__, 'single') # define the code with the new function _globals = {} exec_(code, _globals) # get new code _var = _globals[generatedname] newco = get_function_code(_var) # get new consts list newconsts = list(newco.co_consts) if PY3: newcode = list(newco.co_code) else: newcode = [ord(co) for co in newco.co_code] consts_values = {impl_name: impl} # change LOAD_GLOBAL to LOAD_CONST index = 0 newcodelen = len(newcode) while index < newcodelen: if newcode[index] == LOAD_GLOBAL: oparg = newcode[index + 1] + (newcode[index + 2] << 8) name = newco.co_names[oparg] if name in consts_values: const_value = consts_values[name] if const_value in newconsts: pos = newconsts.index(const_value) else: pos = len(newconsts) newconsts.append(consts_values[name]) newcode[index] = LOAD_CONST newcode[index + 1] = pos & 0xFF newcode[index + 2] = pos >> 8 index += 1 codeobj = getcodeobj(newconsts, newcode, newco, get_function_code(function)) # instanciate a new function if function is None or isbuiltin(function): result = FunctionType(codeobj, {}) else: result = type(function)( codeobj, get_function_globals(function), function.__name__, get_function_defaults(function), get_function_closure(function) ) return result
python
def _compilecode(function, name, impl, args, varargs, kwargs): """Get generated code. :return: function proxy generated code. :rtype: str """ newcodestr, generatedname, impl_name = _generatecode( function=function, name=name, impl=impl, args=args, varargs=varargs, kwargs=kwargs ) try: __file__ = getfile(function) except TypeError: __file__ = '<string>' # compile newcodestr code = compile(newcodestr, __file__, 'single') # define the code with the new function _globals = {} exec_(code, _globals) # get new code _var = _globals[generatedname] newco = get_function_code(_var) # get new consts list newconsts = list(newco.co_consts) if PY3: newcode = list(newco.co_code) else: newcode = [ord(co) for co in newco.co_code] consts_values = {impl_name: impl} # change LOAD_GLOBAL to LOAD_CONST index = 0 newcodelen = len(newcode) while index < newcodelen: if newcode[index] == LOAD_GLOBAL: oparg = newcode[index + 1] + (newcode[index + 2] << 8) name = newco.co_names[oparg] if name in consts_values: const_value = consts_values[name] if const_value in newconsts: pos = newconsts.index(const_value) else: pos = len(newconsts) newconsts.append(consts_values[name]) newcode[index] = LOAD_CONST newcode[index + 1] = pos & 0xFF newcode[index + 2] = pos >> 8 index += 1 codeobj = getcodeobj(newconsts, newcode, newco, get_function_code(function)) # instanciate a new function if function is None or isbuiltin(function): result = FunctionType(codeobj, {}) else: result = type(function)( codeobj, get_function_globals(function), function.__name__, get_function_defaults(function), get_function_closure(function) ) return result
[ "def", "_compilecode", "(", "function", ",", "name", ",", "impl", ",", "args", ",", "varargs", ",", "kwargs", ")", ":", "newcodestr", ",", "generatedname", ",", "impl_name", "=", "_generatecode", "(", "function", "=", "function", ",", "name", "=", "name", ",", "impl", "=", "impl", ",", "args", "=", "args", ",", "varargs", "=", "varargs", ",", "kwargs", "=", "kwargs", ")", "try", ":", "__file__", "=", "getfile", "(", "function", ")", "except", "TypeError", ":", "__file__", "=", "'<string>'", "# compile newcodestr", "code", "=", "compile", "(", "newcodestr", ",", "__file__", ",", "'single'", ")", "# define the code with the new function", "_globals", "=", "{", "}", "exec_", "(", "code", ",", "_globals", ")", "# get new code", "_var", "=", "_globals", "[", "generatedname", "]", "newco", "=", "get_function_code", "(", "_var", ")", "# get new consts list", "newconsts", "=", "list", "(", "newco", ".", "co_consts", ")", "if", "PY3", ":", "newcode", "=", "list", "(", "newco", ".", "co_code", ")", "else", ":", "newcode", "=", "[", "ord", "(", "co", ")", "for", "co", "in", "newco", ".", "co_code", "]", "consts_values", "=", "{", "impl_name", ":", "impl", "}", "# change LOAD_GLOBAL to LOAD_CONST", "index", "=", "0", "newcodelen", "=", "len", "(", "newcode", ")", "while", "index", "<", "newcodelen", ":", "if", "newcode", "[", "index", "]", "==", "LOAD_GLOBAL", ":", "oparg", "=", "newcode", "[", "index", "+", "1", "]", "+", "(", "newcode", "[", "index", "+", "2", "]", "<<", "8", ")", "name", "=", "newco", ".", "co_names", "[", "oparg", "]", "if", "name", "in", "consts_values", ":", "const_value", "=", "consts_values", "[", "name", "]", "if", "const_value", "in", "newconsts", ":", "pos", "=", "newconsts", ".", "index", "(", "const_value", ")", "else", ":", "pos", "=", "len", "(", "newconsts", ")", "newconsts", ".", "append", "(", "consts_values", "[", "name", "]", ")", "newcode", "[", "index", "]", "=", "LOAD_CONST", "newcode", "[", "index", "+", "1", "]", "=", "pos", "&", "0xFF", "newcode", "[", "index", "+", "2", "]", "=", "pos", ">>", "8", "index", "+=", "1", "codeobj", "=", "getcodeobj", "(", "newconsts", ",", "newcode", ",", "newco", ",", "get_function_code", "(", "function", ")", ")", "# instanciate a new function", "if", "function", "is", "None", "or", "isbuiltin", "(", "function", ")", ":", "result", "=", "FunctionType", "(", "codeobj", ",", "{", "}", ")", "else", ":", "result", "=", "type", "(", "function", ")", "(", "codeobj", ",", "get_function_globals", "(", "function", ")", ",", "function", ".", "__name__", ",", "get_function_defaults", "(", "function", ")", ",", "get_function_closure", "(", "function", ")", ")", "return", "result" ]
Get generated code. :return: function proxy generated code. :rtype: str
[ "Get", "generated", "code", "." ]
793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff
https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/proxy.py#L262-L333
249,082
b3j0f/utils
b3j0f/utils/proxy.py
get_proxy
def get_proxy(elt, bases=None, _dict=None): """Get proxy from an elt. If elt implements the proxy generator method (named ``__getproxy__``), use it instead of using this module functions. :param elt: elt to proxify. :type elt: object or function/method :param bases: base types to enrich in the result cls if not None. :param _dict: class members to proxify if not None. """ # try to find an instance proxy generator proxygenerator = getattr(elt, __GETPROXY__, None) # if a proxy generator is not found, use this module if proxygenerator is None: if isroutine(elt): result = proxify_routine(elt) else: # in case of object, result is a Proxy result = proxify_elt(elt, bases=bases, _dict=_dict) else: # otherwise, use the specific proxy generator result = proxygenerator() return result
python
def get_proxy(elt, bases=None, _dict=None): """Get proxy from an elt. If elt implements the proxy generator method (named ``__getproxy__``), use it instead of using this module functions. :param elt: elt to proxify. :type elt: object or function/method :param bases: base types to enrich in the result cls if not None. :param _dict: class members to proxify if not None. """ # try to find an instance proxy generator proxygenerator = getattr(elt, __GETPROXY__, None) # if a proxy generator is not found, use this module if proxygenerator is None: if isroutine(elt): result = proxify_routine(elt) else: # in case of object, result is a Proxy result = proxify_elt(elt, bases=bases, _dict=_dict) else: # otherwise, use the specific proxy generator result = proxygenerator() return result
[ "def", "get_proxy", "(", "elt", ",", "bases", "=", "None", ",", "_dict", "=", "None", ")", ":", "# try to find an instance proxy generator", "proxygenerator", "=", "getattr", "(", "elt", ",", "__GETPROXY__", ",", "None", ")", "# if a proxy generator is not found, use this module", "if", "proxygenerator", "is", "None", ":", "if", "isroutine", "(", "elt", ")", ":", "result", "=", "proxify_routine", "(", "elt", ")", "else", ":", "# in case of object, result is a Proxy", "result", "=", "proxify_elt", "(", "elt", ",", "bases", "=", "bases", ",", "_dict", "=", "_dict", ")", "else", ":", "# otherwise, use the specific proxy generator", "result", "=", "proxygenerator", "(", ")", "return", "result" ]
Get proxy from an elt. If elt implements the proxy generator method (named ``__getproxy__``), use it instead of using this module functions. :param elt: elt to proxify. :type elt: object or function/method :param bases: base types to enrich in the result cls if not None. :param _dict: class members to proxify if not None.
[ "Get", "proxy", "from", "an", "elt", "." ]
793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff
https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/proxy.py#L414-L440
249,083
b3j0f/utils
b3j0f/utils/proxy.py
proxified_elt
def proxified_elt(proxy): """Get proxified element. :param proxy: proxy element from where get proxified element. :return: proxified element. None if proxy is not proxified. """ if ismethod(proxy): proxy = get_method_function(proxy) result = getattr(proxy, __PROXIFIED__, None) return result
python
def proxified_elt(proxy): """Get proxified element. :param proxy: proxy element from where get proxified element. :return: proxified element. None if proxy is not proxified. """ if ismethod(proxy): proxy = get_method_function(proxy) result = getattr(proxy, __PROXIFIED__, None) return result
[ "def", "proxified_elt", "(", "proxy", ")", ":", "if", "ismethod", "(", "proxy", ")", ":", "proxy", "=", "get_method_function", "(", "proxy", ")", "result", "=", "getattr", "(", "proxy", ",", "__PROXIFIED__", ",", "None", ")", "return", "result" ]
Get proxified element. :param proxy: proxy element from where get proxified element. :return: proxified element. None if proxy is not proxified.
[ "Get", "proxified", "element", "." ]
793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff
https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/proxy.py#L443-L454
249,084
b3j0f/utils
b3j0f/utils/proxy.py
is_proxy
def is_proxy(elt): """Return True if elt is a proxy. :param elt: elt to check such as a proxy. :return: True iif elt is a proxy. :rtype: bool """ if ismethod(elt): elt = get_method_function(elt) result = hasattr(elt, __PROXIFIED__) return result
python
def is_proxy(elt): """Return True if elt is a proxy. :param elt: elt to check such as a proxy. :return: True iif elt is a proxy. :rtype: bool """ if ismethod(elt): elt = get_method_function(elt) result = hasattr(elt, __PROXIFIED__) return result
[ "def", "is_proxy", "(", "elt", ")", ":", "if", "ismethod", "(", "elt", ")", ":", "elt", "=", "get_method_function", "(", "elt", ")", "result", "=", "hasattr", "(", "elt", ",", "__PROXIFIED__", ")", "return", "result" ]
Return True if elt is a proxy. :param elt: elt to check such as a proxy. :return: True iif elt is a proxy. :rtype: bool
[ "Return", "True", "if", "elt", "is", "a", "proxy", "." ]
793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff
https://github.com/b3j0f/utils/blob/793871b98e90fd1c7ce9ef0dce839cc18fcbc6ff/b3j0f/utils/proxy.py#L457-L470
249,085
mlavin/django-hilbert
hilbert/decorators.py
ajax_login_required
def ajax_login_required(view_func): """Handle non-authenticated users differently if it is an AJAX request.""" @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): if request.user.is_authenticated(): return view_func(request, *args, **kwargs) else: response = http.HttpResponse() response['X-Django-Requires-Auth'] = True response['X-Django-Login-Url'] = settings.LOGIN_URL return response else: return login_required(view_func)(request, *args, **kwargs) return _wrapped_view
python
def ajax_login_required(view_func): """Handle non-authenticated users differently if it is an AJAX request.""" @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): if request.user.is_authenticated(): return view_func(request, *args, **kwargs) else: response = http.HttpResponse() response['X-Django-Requires-Auth'] = True response['X-Django-Login-Url'] = settings.LOGIN_URL return response else: return login_required(view_func)(request, *args, **kwargs) return _wrapped_view
[ "def", "ajax_login_required", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "_wrapped_view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "request", ".", "is_ajax", "(", ")", ":", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "return", "view_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "response", "=", "http", ".", "HttpResponse", "(", ")", "response", "[", "'X-Django-Requires-Auth'", "]", "=", "True", "response", "[", "'X-Django-Login-Url'", "]", "=", "settings", ".", "LOGIN_URL", "return", "response", "else", ":", "return", "login_required", "(", "view_func", ")", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_wrapped_view" ]
Handle non-authenticated users differently if it is an AJAX request.
[ "Handle", "non", "-", "authenticated", "users", "differently", "if", "it", "is", "an", "AJAX", "request", "." ]
e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6
https://github.com/mlavin/django-hilbert/blob/e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6/hilbert/decorators.py#L20-L35
249,086
mlavin/django-hilbert
hilbert/decorators.py
ajax_only
def ajax_only(view_func): """Required the view is only accessed via AJAX.""" @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): return view_func(request, *args, **kwargs) else: return http.HttpResponseBadRequest() return _wrapped_view
python
def ajax_only(view_func): """Required the view is only accessed via AJAX.""" @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.is_ajax(): return view_func(request, *args, **kwargs) else: return http.HttpResponseBadRequest() return _wrapped_view
[ "def", "ajax_only", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "_wrapped_view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "request", ".", "is_ajax", "(", ")", ":", "return", "view_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "http", ".", "HttpResponseBadRequest", "(", ")", "return", "_wrapped_view" ]
Required the view is only accessed via AJAX.
[ "Required", "the", "view", "is", "only", "accessed", "via", "AJAX", "." ]
e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6
https://github.com/mlavin/django-hilbert/blob/e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6/hilbert/decorators.py#L38-L47
249,087
mlavin/django-hilbert
hilbert/decorators.py
anonymous_required
def anonymous_required(func=None, url=None): """Required that the user is not logged in.""" url = url or "/" def _dec(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): return redirect(url) else: return view_func(request, *args, **kwargs) return _wrapped_view if func is None: return _dec else: return _dec(func)
python
def anonymous_required(func=None, url=None): """Required that the user is not logged in.""" url = url or "/" def _dec(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): return redirect(url) else: return view_func(request, *args, **kwargs) return _wrapped_view if func is None: return _dec else: return _dec(func)
[ "def", "anonymous_required", "(", "func", "=", "None", ",", "url", "=", "None", ")", ":", "url", "=", "url", "or", "\"/\"", "def", "_dec", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "_wrapped_view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "return", "redirect", "(", "url", ")", "else", ":", "return", "view_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_wrapped_view", "if", "func", "is", "None", ":", "return", "_dec", "else", ":", "return", "_dec", "(", "func", ")" ]
Required that the user is not logged in.
[ "Required", "that", "the", "user", "is", "not", "logged", "in", "." ]
e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6
https://github.com/mlavin/django-hilbert/blob/e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6/hilbert/decorators.py#L50-L67
249,088
mlavin/django-hilbert
hilbert/decorators.py
secure
def secure(view_func): """Handles SSL redirect on the view level.""" @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if not request.is_secure(): redirect = _redirect(request, True) if redirect: # Redirect might be None if SSL is not enabled return redirect return view_func(request, *args, **kwargs) return _wrapped_view
python
def secure(view_func): """Handles SSL redirect on the view level.""" @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if not request.is_secure(): redirect = _redirect(request, True) if redirect: # Redirect might be None if SSL is not enabled return redirect return view_func(request, *args, **kwargs) return _wrapped_view
[ "def", "secure", "(", "view_func", ")", ":", "@", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_func", ")", ")", "def", "_wrapped_view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "request", ".", "is_secure", "(", ")", ":", "redirect", "=", "_redirect", "(", "request", ",", "True", ")", "if", "redirect", ":", "# Redirect might be None if SSL is not enabled", "return", "redirect", "return", "view_func", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_wrapped_view" ]
Handles SSL redirect on the view level.
[ "Handles", "SSL", "redirect", "on", "the", "view", "level", "." ]
e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6
https://github.com/mlavin/django-hilbert/blob/e77b685f4afc6e1224dc7e616e9ee9f7c2bc55b6/hilbert/decorators.py#L70-L81
249,089
jmgilman/Neolib
neolib/pyamf/amf3.py
encode_int
def encode_int(n): """ Encodes an int as a variable length signed 29-bit integer as defined by the spec. @param n: The integer to be encoded @return: The encoded string @rtype: C{str} @raise OverflowError: Out of range. """ global ENCODED_INT_CACHE try: return ENCODED_INT_CACHE[n] except KeyError: pass if n < MIN_29B_INT or n > MAX_29B_INT: raise OverflowError("Out of range") if n < 0: n += 0x20000000 bytes = '' real_value = None if n > 0x1fffff: real_value = n n >>= 1 bytes += chr(0x80 | ((n >> 21) & 0xff)) if n > 0x3fff: bytes += chr(0x80 | ((n >> 14) & 0xff)) if n > 0x7f: bytes += chr(0x80 | ((n >> 7) & 0xff)) if real_value is not None: n = real_value if n > 0x1fffff: bytes += chr(n & 0xff) else: bytes += chr(n & 0x7f) ENCODED_INT_CACHE[n] = bytes return bytes
python
def encode_int(n): """ Encodes an int as a variable length signed 29-bit integer as defined by the spec. @param n: The integer to be encoded @return: The encoded string @rtype: C{str} @raise OverflowError: Out of range. """ global ENCODED_INT_CACHE try: return ENCODED_INT_CACHE[n] except KeyError: pass if n < MIN_29B_INT or n > MAX_29B_INT: raise OverflowError("Out of range") if n < 0: n += 0x20000000 bytes = '' real_value = None if n > 0x1fffff: real_value = n n >>= 1 bytes += chr(0x80 | ((n >> 21) & 0xff)) if n > 0x3fff: bytes += chr(0x80 | ((n >> 14) & 0xff)) if n > 0x7f: bytes += chr(0x80 | ((n >> 7) & 0xff)) if real_value is not None: n = real_value if n > 0x1fffff: bytes += chr(n & 0xff) else: bytes += chr(n & 0x7f) ENCODED_INT_CACHE[n] = bytes return bytes
[ "def", "encode_int", "(", "n", ")", ":", "global", "ENCODED_INT_CACHE", "try", ":", "return", "ENCODED_INT_CACHE", "[", "n", "]", "except", "KeyError", ":", "pass", "if", "n", "<", "MIN_29B_INT", "or", "n", ">", "MAX_29B_INT", ":", "raise", "OverflowError", "(", "\"Out of range\"", ")", "if", "n", "<", "0", ":", "n", "+=", "0x20000000", "bytes", "=", "''", "real_value", "=", "None", "if", "n", ">", "0x1fffff", ":", "real_value", "=", "n", "n", ">>=", "1", "bytes", "+=", "chr", "(", "0x80", "|", "(", "(", "n", ">>", "21", ")", "&", "0xff", ")", ")", "if", "n", ">", "0x3fff", ":", "bytes", "+=", "chr", "(", "0x80", "|", "(", "(", "n", ">>", "14", ")", "&", "0xff", ")", ")", "if", "n", ">", "0x7f", ":", "bytes", "+=", "chr", "(", "0x80", "|", "(", "(", "n", ">>", "7", ")", "&", "0xff", ")", ")", "if", "real_value", "is", "not", "None", ":", "n", "=", "real_value", "if", "n", ">", "0x1fffff", ":", "bytes", "+=", "chr", "(", "n", "&", "0xff", ")", "else", ":", "bytes", "+=", "chr", "(", "n", "&", "0x7f", ")", "ENCODED_INT_CACHE", "[", "n", "]", "=", "bytes", "return", "bytes" ]
Encodes an int as a variable length signed 29-bit integer as defined by the spec. @param n: The integer to be encoded @return: The encoded string @rtype: C{str} @raise OverflowError: Out of range.
[ "Encodes", "an", "int", "as", "a", "variable", "length", "signed", "29", "-", "bit", "integer", "as", "defined", "by", "the", "spec", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L1515-L1562
249,090
jmgilman/Neolib
neolib/pyamf/amf3.py
DataOutput.writeBoolean
def writeBoolean(self, value): """ Writes a Boolean value. @type value: C{bool} @param value: A C{Boolean} value determining which byte is written. If the parameter is C{True}, C{1} is written; if C{False}, C{0} is written. @raise ValueError: Non-boolean value found. """ if not isinstance(value, bool): raise ValueError("Non-boolean value found") if value is True: self.stream.write_uchar(1) else: self.stream.write_uchar(0)
python
def writeBoolean(self, value): """ Writes a Boolean value. @type value: C{bool} @param value: A C{Boolean} value determining which byte is written. If the parameter is C{True}, C{1} is written; if C{False}, C{0} is written. @raise ValueError: Non-boolean value found. """ if not isinstance(value, bool): raise ValueError("Non-boolean value found") if value is True: self.stream.write_uchar(1) else: self.stream.write_uchar(0)
[ "def", "writeBoolean", "(", "self", ",", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "bool", ")", ":", "raise", "ValueError", "(", "\"Non-boolean value found\"", ")", "if", "value", "is", "True", ":", "self", ".", "stream", ".", "write_uchar", "(", "1", ")", "else", ":", "self", ".", "stream", ".", "write_uchar", "(", "0", ")" ]
Writes a Boolean value. @type value: C{bool} @param value: A C{Boolean} value determining which byte is written. If the parameter is C{True}, C{1} is written; if C{False}, C{0} is written. @raise ValueError: Non-boolean value found.
[ "Writes", "a", "Boolean", "value", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L173-L190
249,091
jmgilman/Neolib
neolib/pyamf/amf3.py
DataOutput.writeMultiByte
def writeMultiByte(self, value, charset): """ Writes a multibyte string to the datastream using the specified character set. @type value: C{str} @param value: The string value to be written. @type charset: C{str} @param charset: The string denoting the character set to use. Possible character set strings include C{shift-jis}, C{cn-gb}, C{iso-8859-1} and others. @see: U{Supported character sets on Livedocs (external) <http://livedocs.adobe.com/flex/201/langref/charset-codes.html>} """ if type(value) is unicode: value = value.encode(charset) self.stream.write(value)
python
def writeMultiByte(self, value, charset): """ Writes a multibyte string to the datastream using the specified character set. @type value: C{str} @param value: The string value to be written. @type charset: C{str} @param charset: The string denoting the character set to use. Possible character set strings include C{shift-jis}, C{cn-gb}, C{iso-8859-1} and others. @see: U{Supported character sets on Livedocs (external) <http://livedocs.adobe.com/flex/201/langref/charset-codes.html>} """ if type(value) is unicode: value = value.encode(charset) self.stream.write(value)
[ "def", "writeMultiByte", "(", "self", ",", "value", ",", "charset", ")", ":", "if", "type", "(", "value", ")", "is", "unicode", ":", "value", "=", "value", ".", "encode", "(", "charset", ")", "self", ".", "stream", ".", "write", "(", "value", ")" ]
Writes a multibyte string to the datastream using the specified character set. @type value: C{str} @param value: The string value to be written. @type charset: C{str} @param charset: The string denoting the character set to use. Possible character set strings include C{shift-jis}, C{cn-gb}, C{iso-8859-1} and others. @see: U{Supported character sets on Livedocs (external) <http://livedocs.adobe.com/flex/201/langref/charset-codes.html>}
[ "Writes", "a", "multibyte", "string", "to", "the", "datastream", "using", "the", "specified", "character", "set", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L235-L252
249,092
jmgilman/Neolib
neolib/pyamf/amf3.py
DataOutput.writeUTF
def writeUTF(self, value): """ Writes a UTF-8 string to the data stream. The length of the UTF-8 string in bytes is written first, as a 16-bit integer, followed by the bytes representing the characters of the string. @type value: C{str} @param value: The string value to be written. """ buf = util.BufferedByteStream() buf.write_utf8_string(value) bytes = buf.getvalue() self.stream.write_ushort(len(bytes)) self.stream.write(bytes)
python
def writeUTF(self, value): """ Writes a UTF-8 string to the data stream. The length of the UTF-8 string in bytes is written first, as a 16-bit integer, followed by the bytes representing the characters of the string. @type value: C{str} @param value: The string value to be written. """ buf = util.BufferedByteStream() buf.write_utf8_string(value) bytes = buf.getvalue() self.stream.write_ushort(len(bytes)) self.stream.write(bytes)
[ "def", "writeUTF", "(", "self", ",", "value", ")", ":", "buf", "=", "util", ".", "BufferedByteStream", "(", ")", "buf", ".", "write_utf8_string", "(", "value", ")", "bytes", "=", "buf", ".", "getvalue", "(", ")", "self", ".", "stream", ".", "write_ushort", "(", "len", "(", "bytes", ")", ")", "self", ".", "stream", ".", "write", "(", "bytes", ")" ]
Writes a UTF-8 string to the data stream. The length of the UTF-8 string in bytes is written first, as a 16-bit integer, followed by the bytes representing the characters of the string. @type value: C{str} @param value: The string value to be written.
[ "Writes", "a", "UTF", "-", "8", "string", "to", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L290-L306
249,093
jmgilman/Neolib
neolib/pyamf/amf3.py
DataInput.readMultiByte
def readMultiByte(self, length, charset): """ Reads a multibyte string of specified length from the data stream using the specified character set. @type length: C{int} @param length: The number of bytes from the data stream to read. @type charset: C{str} @param charset: The string denoting the character set to use. @rtype: C{str} @return: UTF-8 encoded string. """ #FIXME nick: how to work out the code point byte size (on the fly)? bytes = self.stream.read(length) return unicode(bytes, charset)
python
def readMultiByte(self, length, charset): """ Reads a multibyte string of specified length from the data stream using the specified character set. @type length: C{int} @param length: The number of bytes from the data stream to read. @type charset: C{str} @param charset: The string denoting the character set to use. @rtype: C{str} @return: UTF-8 encoded string. """ #FIXME nick: how to work out the code point byte size (on the fly)? bytes = self.stream.read(length) return unicode(bytes, charset)
[ "def", "readMultiByte", "(", "self", ",", "length", ",", "charset", ")", ":", "#FIXME nick: how to work out the code point byte size (on the fly)?", "bytes", "=", "self", ".", "stream", ".", "read", "(", "length", ")", "return", "unicode", "(", "bytes", ",", "charset", ")" ]
Reads a multibyte string of specified length from the data stream using the specified character set. @type length: C{int} @param length: The number of bytes from the data stream to read. @type charset: C{str} @param charset: The string denoting the character set to use. @rtype: C{str} @return: UTF-8 encoded string.
[ "Reads", "a", "multibyte", "string", "of", "specified", "length", "from", "the", "data", "stream", "using", "the", "specified", "character", "set", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L401-L417
249,094
jmgilman/Neolib
neolib/pyamf/amf3.py
DataInput.readUTF
def readUTF(self): """ Reads a UTF-8 string from the data stream. The string is assumed to be prefixed with an unsigned short indicating the length in bytes. @rtype: C{str} @return: A UTF-8 string produced by the byte representation of characters. """ length = self.stream.read_ushort() return self.stream.read_utf8_string(length)
python
def readUTF(self): """ Reads a UTF-8 string from the data stream. The string is assumed to be prefixed with an unsigned short indicating the length in bytes. @rtype: C{str} @return: A UTF-8 string produced by the byte representation of characters. """ length = self.stream.read_ushort() return self.stream.read_utf8_string(length)
[ "def", "readUTF", "(", "self", ")", ":", "length", "=", "self", ".", "stream", ".", "read_ushort", "(", ")", "return", "self", ".", "stream", ".", "read_utf8_string", "(", "length", ")" ]
Reads a UTF-8 string from the data stream. The string is assumed to be prefixed with an unsigned short indicating the length in bytes. @rtype: C{str} @return: A UTF-8 string produced by the byte representation of characters.
[ "Reads", "a", "UTF", "-", "8", "string", "from", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L463-L475
249,095
jmgilman/Neolib
neolib/pyamf/amf3.py
Decoder.readBytes
def readBytes(self): """ Reads and returns a utf-8 encoded byte array. """ length, is_reference = self._readLength() if is_reference: return self.context.getString(length) if length == 0: return '' result = self.stream.read(length) self.context.addString(result) return result
python
def readBytes(self): """ Reads and returns a utf-8 encoded byte array. """ length, is_reference = self._readLength() if is_reference: return self.context.getString(length) if length == 0: return '' result = self.stream.read(length) self.context.addString(result) return result
[ "def", "readBytes", "(", "self", ")", ":", "length", ",", "is_reference", "=", "self", ".", "_readLength", "(", ")", "if", "is_reference", ":", "return", "self", ".", "context", ".", "getString", "(", "length", ")", "if", "length", "==", "0", ":", "return", "''", "result", "=", "self", ".", "stream", ".", "read", "(", "length", ")", "self", ".", "context", ".", "addString", "(", "result", ")", "return", "result" ]
Reads and returns a utf-8 encoded byte array.
[ "Reads", "and", "returns", "a", "utf", "-", "8", "encoded", "byte", "array", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L847-L862
249,096
jmgilman/Neolib
neolib/pyamf/amf3.py
Decoder.readString
def readString(self): """ Reads and returns a string from the stream. """ length, is_reference = self._readLength() if is_reference: result = self.context.getString(length) return self.context.getStringForBytes(result) if length == 0: return '' result = self.stream.read(length) self.context.addString(result) return self.context.getStringForBytes(result)
python
def readString(self): """ Reads and returns a string from the stream. """ length, is_reference = self._readLength() if is_reference: result = self.context.getString(length) return self.context.getStringForBytes(result) if length == 0: return '' result = self.stream.read(length) self.context.addString(result) return self.context.getStringForBytes(result)
[ "def", "readString", "(", "self", ")", ":", "length", ",", "is_reference", "=", "self", ".", "_readLength", "(", ")", "if", "is_reference", ":", "result", "=", "self", ".", "context", ".", "getString", "(", "length", ")", "return", "self", ".", "context", ".", "getStringForBytes", "(", "result", ")", "if", "length", "==", "0", ":", "return", "''", "result", "=", "self", ".", "stream", ".", "read", "(", "length", ")", "self", ".", "context", ".", "addString", "(", "result", ")", "return", "self", ".", "context", ".", "getStringForBytes", "(", "result", ")" ]
Reads and returns a string from the stream.
[ "Reads", "and", "returns", "a", "string", "from", "the", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L864-L881
249,097
jmgilman/Neolib
neolib/pyamf/amf3.py
Decoder.readDate
def readDate(self): """ Read date from the stream. The timezone is ignored as the date is always in UTC. """ ref = self.readInteger(False) if ref & REFERENCE_BIT == 0: return self.context.getObject(ref >> 1) ms = self.stream.read_double() result = util.get_datetime(ms / 1000.0) if self.timezone_offset is not None: result += self.timezone_offset self.context.addObject(result) return result
python
def readDate(self): """ Read date from the stream. The timezone is ignored as the date is always in UTC. """ ref = self.readInteger(False) if ref & REFERENCE_BIT == 0: return self.context.getObject(ref >> 1) ms = self.stream.read_double() result = util.get_datetime(ms / 1000.0) if self.timezone_offset is not None: result += self.timezone_offset self.context.addObject(result) return result
[ "def", "readDate", "(", "self", ")", ":", "ref", "=", "self", ".", "readInteger", "(", "False", ")", "if", "ref", "&", "REFERENCE_BIT", "==", "0", ":", "return", "self", ".", "context", ".", "getObject", "(", "ref", ">>", "1", ")", "ms", "=", "self", ".", "stream", ".", "read_double", "(", ")", "result", "=", "util", ".", "get_datetime", "(", "ms", "/", "1000.0", ")", "if", "self", ".", "timezone_offset", "is", "not", "None", ":", "result", "+=", "self", ".", "timezone_offset", "self", ".", "context", ".", "addObject", "(", "result", ")", "return", "result" ]
Read date from the stream. The timezone is ignored as the date is always in UTC.
[ "Read", "date", "from", "the", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L883-L902
249,098
jmgilman/Neolib
neolib/pyamf/amf3.py
Decoder.readArray
def readArray(self): """ Reads an array from the stream. @warning: There is a very specific problem with AMF3 where the first three bytes of an encoded empty C{dict} will mirror that of an encoded C{{'': 1, '2': 2}} """ size = self.readInteger(False) if size & REFERENCE_BIT == 0: return self.context.getObject(size >> 1) size >>= 1 key = self.readBytes() if key == '': # integer indexes only -> python list result = [] self.context.addObject(result) for i in xrange(size): result.append(self.readElement()) return result result = pyamf.MixedArray() self.context.addObject(result) while key: result[key] = self.readElement() key = self.readBytes() for i in xrange(size): el = self.readElement() result[i] = el return result
python
def readArray(self): """ Reads an array from the stream. @warning: There is a very specific problem with AMF3 where the first three bytes of an encoded empty C{dict} will mirror that of an encoded C{{'': 1, '2': 2}} """ size = self.readInteger(False) if size & REFERENCE_BIT == 0: return self.context.getObject(size >> 1) size >>= 1 key = self.readBytes() if key == '': # integer indexes only -> python list result = [] self.context.addObject(result) for i in xrange(size): result.append(self.readElement()) return result result = pyamf.MixedArray() self.context.addObject(result) while key: result[key] = self.readElement() key = self.readBytes() for i in xrange(size): el = self.readElement() result[i] = el return result
[ "def", "readArray", "(", "self", ")", ":", "size", "=", "self", ".", "readInteger", "(", "False", ")", "if", "size", "&", "REFERENCE_BIT", "==", "0", ":", "return", "self", ".", "context", ".", "getObject", "(", "size", ">>", "1", ")", "size", ">>=", "1", "key", "=", "self", ".", "readBytes", "(", ")", "if", "key", "==", "''", ":", "# integer indexes only -> python list", "result", "=", "[", "]", "self", ".", "context", ".", "addObject", "(", "result", ")", "for", "i", "in", "xrange", "(", "size", ")", ":", "result", ".", "append", "(", "self", ".", "readElement", "(", ")", ")", "return", "result", "result", "=", "pyamf", ".", "MixedArray", "(", ")", "self", ".", "context", ".", "addObject", "(", "result", ")", "while", "key", ":", "result", "[", "key", "]", "=", "self", ".", "readElement", "(", ")", "key", "=", "self", ".", "readBytes", "(", ")", "for", "i", "in", "xrange", "(", "size", ")", ":", "el", "=", "self", ".", "readElement", "(", ")", "result", "[", "i", "]", "=", "el", "return", "result" ]
Reads an array from the stream. @warning: There is a very specific problem with AMF3 where the first three bytes of an encoded empty C{dict} will mirror that of an encoded C{{'': 1, '2': 2}}
[ "Reads", "an", "array", "from", "the", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L904-L942
249,099
jmgilman/Neolib
neolib/pyamf/amf3.py
Decoder._getClassDefinition
def _getClassDefinition(self, ref): """ Reads class definition from the stream. """ is_ref = ref & REFERENCE_BIT == 0 ref >>= 1 if is_ref: class_def = self.context.getClassByReference(ref) return class_def name = self.readBytes() alias = None if name == '': name = pyamf.ASObject try: alias = pyamf.get_class_alias(name) except pyamf.UnknownClassAlias: if self.strict: raise alias = pyamf.TypedObjectClassAlias(name) class_def = ClassDefinition(alias) class_def.encoding = ref & 0x03 class_def.attr_len = ref >> 2 class_def.static_properties = [] if class_def.attr_len > 0: for i in xrange(class_def.attr_len): key = self.readBytes() class_def.static_properties.append(key) self.context.addClass(class_def, alias.klass) return class_def
python
def _getClassDefinition(self, ref): """ Reads class definition from the stream. """ is_ref = ref & REFERENCE_BIT == 0 ref >>= 1 if is_ref: class_def = self.context.getClassByReference(ref) return class_def name = self.readBytes() alias = None if name == '': name = pyamf.ASObject try: alias = pyamf.get_class_alias(name) except pyamf.UnknownClassAlias: if self.strict: raise alias = pyamf.TypedObjectClassAlias(name) class_def = ClassDefinition(alias) class_def.encoding = ref & 0x03 class_def.attr_len = ref >> 2 class_def.static_properties = [] if class_def.attr_len > 0: for i in xrange(class_def.attr_len): key = self.readBytes() class_def.static_properties.append(key) self.context.addClass(class_def, alias.klass) return class_def
[ "def", "_getClassDefinition", "(", "self", ",", "ref", ")", ":", "is_ref", "=", "ref", "&", "REFERENCE_BIT", "==", "0", "ref", ">>=", "1", "if", "is_ref", ":", "class_def", "=", "self", ".", "context", ".", "getClassByReference", "(", "ref", ")", "return", "class_def", "name", "=", "self", ".", "readBytes", "(", ")", "alias", "=", "None", "if", "name", "==", "''", ":", "name", "=", "pyamf", ".", "ASObject", "try", ":", "alias", "=", "pyamf", ".", "get_class_alias", "(", "name", ")", "except", "pyamf", ".", "UnknownClassAlias", ":", "if", "self", ".", "strict", ":", "raise", "alias", "=", "pyamf", ".", "TypedObjectClassAlias", "(", "name", ")", "class_def", "=", "ClassDefinition", "(", "alias", ")", "class_def", ".", "encoding", "=", "ref", "&", "0x03", "class_def", ".", "attr_len", "=", "ref", ">>", "2", "class_def", ".", "static_properties", "=", "[", "]", "if", "class_def", ".", "attr_len", ">", "0", ":", "for", "i", "in", "xrange", "(", "class_def", ".", "attr_len", ")", ":", "key", "=", "self", ".", "readBytes", "(", ")", "class_def", ".", "static_properties", ".", "append", "(", "key", ")", "self", ".", "context", ".", "addClass", "(", "class_def", ",", "alias", ".", "klass", ")", "return", "class_def" ]
Reads class definition from the stream.
[ "Reads", "class", "definition", "from", "the", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf3.py#L944-L984