text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Get a list of objects <END_TASK> <USER_TASK:> Description: def objectlist(self, event): """Get a list of objects"""
self.log('LEGACY LIST FUNCTION CALLED!', lvl=warn) try: data, schema, user, client = self._get_args(event) except AttributeError: return object_filter = self._get_filter(event) self.log('Object list for', schema, 'requested from', user.account.name, lvl=debug) if 'fields' in data: fields = data['fields'] else: fields = [] object_list = [] opts = schemastore[schema].get('options', {}) hidden = opts.get('hidden', []) if objectmodels[schema].count(object_filter) > WARNSIZE: self.log("Getting a very long list of items for ", schema, lvl=warn) try: for item in objectmodels[schema].find(object_filter): try: if not self._check_permissions(user, 'list', item): continue if fields in ('*', ['*']): item_fields = item.serializablefields() for field in hidden: item_fields.pop(field, None) object_list.append(item_fields) else: list_item = {'uuid': item.uuid} if 'name' in item._fields: list_item['name'] = item._fields['name'] for field in fields: if field in item._fields and field not in hidden: list_item[field] = item._fields[field] else: list_item[field] = None object_list.append(list_item) except Exception as e: self.log("Faulty object or field: ", e, type(e), item._fields, fields, lvl=error, exc=True) except ValidationError as e: self.log('Invalid object in database encountered!', e, exc=True, lvl=warn) # self.log("Generated object list: ", object_list) result = { 'component': 'hfos.events.objectmanager', 'action': 'getlist', 'data': { 'schema': schema, 'list': object_list } } self._respond(None, result, event)
<SYSTEM_TASK:> Subscribe to an object's future changes <END_TASK> <USER_TASK:> Description: def subscribe(self, event): """Subscribe to an object's future changes"""
uuids = event.data if not isinstance(uuids, list): uuids = [uuids] subscribed = [] for uuid in uuids: try: self._add_subscription(uuid, event) subscribed.append(uuid) except KeyError: continue result = { 'component': 'hfos.events.objectmanager', 'action': 'subscribe', 'data': { 'uuid': subscribed, 'success': True } } self._respond(None, result, event)
<SYSTEM_TASK:> Unsubscribe from an object's future changes <END_TASK> <USER_TASK:> Description: def unsubscribe(self, event): """Unsubscribe from an object's future changes"""
# TODO: Automatic Unsubscription uuids = event.data if not isinstance(uuids, list): uuids = [uuids] result = [] for uuid in uuids: if uuid in self.subscriptions: self.subscriptions[uuid].pop(event.client.uuid) if len(self.subscriptions[uuid]) == 0: del (self.subscriptions[uuid]) result.append(uuid) result = { 'component': 'hfos.events.objectmanager', 'action': 'unsubscribe', 'data': { 'uuid': result, 'success': True } } self._respond(None, result, event)
<SYSTEM_TASK:> Compile a list of all available language translations <END_TASK> <USER_TASK:> Description: def all_languages(): """Compile a list of all available language translations"""
rv = [] for lang in os.listdir(localedir): base = lang.split('_')[0].split('.')[0].split('@')[0] if 2 <= len(base) <= 3 and all(c.islower() for c in base): if base != 'all': rv.append(lang) rv.sort() rv.append('en') l10n_log('Registered languages:', rv, lvl=verbose) return rv
<SYSTEM_TASK:> Get a descriptive title for all languages <END_TASK> <USER_TASK:> Description: def language_token_to_name(languages): """Get a descriptive title for all languages"""
result = {} with open(os.path.join(localedir, 'languages.json'), 'r') as f: language_lookup = json.load(f) for language in languages: language = language.lower() try: result[language] = language_lookup[language] except KeyError: l10n_log('Language token lookup not found:', language, lvl=warn) result[language] = language return result
<SYSTEM_TASK:> Debugging function to print all message language variants <END_TASK> <USER_TASK:> Description: def print_messages(domain, msg): """Debugging function to print all message language variants"""
domain = Domain(domain) for lang in all_languages(): print(lang, ':', domain.get(lang, msg))
<SYSTEM_TASK:> Gettext function wrapper to return a message in a specified language by domain <END_TASK> <USER_TASK:> Description: def i18n(msg, event=None, lang='en', domain='backend'): """Gettext function wrapper to return a message in a specified language by domain To use internationalization (i18n) on your messages, import it as '_' and use as usual. Do not forget to supply the client's language setting."""
if event is not None: language = event.client.language else: language = lang domain = Domain(domain) return domain.get(language, msg)
<SYSTEM_TASK:> Return a random generated human-friendly phrase as low-probability unique id <END_TASK> <USER_TASK:> Description: def std_human_uid(kind=None): """Return a random generated human-friendly phrase as low-probability unique id"""
kind_list = alphabet if kind == 'animal': kind_list = animals elif kind == 'place': kind_list = places name = "{color} {adjective} {kind} of {attribute}".format( color=choice(colors), adjective=choice(adjectives), kind=choice(kind_list), attribute=choice(attributes) ) return name
<SYSTEM_TASK:> Add a new translation language to the live gettext translator <END_TASK> <USER_TASK:> Description: def _get_translation(self, lang): """Add a new translation language to the live gettext translator"""
try: return self._translations[lang] except KeyError: # The fact that `fallback=True` is not the default is a serious design flaw. rv = self._translations[lang] = gettext.translation(self._domain, localedir=localedir, languages=[lang], fallback=True) return rv
<SYSTEM_TASK:> Creates an Event Handler <END_TASK> <USER_TASK:> Description: def handler(*names, **kwargs): """Creates an Event Handler This decorator can be applied to methods of classes derived from :class:`circuits.core.components.BaseComponent`. It marks the method as a handler for the events passed as arguments to the ``@handler`` decorator. The events are specified by their name. The decorated method's arguments must match the arguments passed to the :class:`circuits.core.events.Event` on creation. Optionally, the method may have an additional first argument named *event*. If declared, the event object that caused the handler to be invoked is assigned to it. By default, the handler is invoked by the component's root :class:`~.manager.Manager` for events that are propagated on the channel determined by the BaseComponent's *channel* attribute. This may be overridden by specifying a different channel as a keyword parameter of the decorator (``channel=...``). Keyword argument ``priority`` influences the order in which handlers for a specific event are invoked. The higher the priority, the earlier the handler is executed. If you want to override a handler defined in a base class of your component, you must specify ``override=True``, else your method becomes an additional handler for the event. **Return value** Normally, the results returned by the handlers for an event are simply collected in the :class:`circuits.core.events.Event`'s :attr:`value` attribute. As a special case, a handler may return a :class:`types.GeneratorType`. This signals to the dispatcher that the handler isn't ready to deliver a result yet. Rather, it has interrupted it's execution with a ``yield None`` statement, thus preserving its current execution state. The dispatcher saves the returned generator object as a task. All tasks are reexamined (i.e. their :meth:`next()` method is invoked) when the pending events have been executed. This feature avoids an unnecessarily complicated chaining of event handlers. Imagine a handler A that needs the results from firing an event E in order to complete. Then without this feature, the final action of A would be to fire event E, and another handler for an event ``SuccessE`` would be required to complete handler A's operation, now having the result from invoking E available (actually it's even a bit more complicated). Using this "suspend" feature, the handler simply fires event E and then yields ``None`` until e.g. it finds a result in E's :attr:`value` attribute. For the simplest scenario, there even is a utility method :meth:`circuits.core.manager.Manager.callEvent` that combines firing and waiting. """
def wrapper(f): if names and isinstance(names[0], bool) and not names[0]: f.handler = False return f if len(names) > 0 and inspect.isclass(names[0]) and \ issubclass(names[0], hfosEvent): f.names = (str(names[0].realname()),) else: f.names = names f.handler = True f.priority = kwargs.get("priority", 0) f.channel = kwargs.get("channel", None) f.override = kwargs.get("override", False) args = inspect.getargspec(f)[0] if args and args[0] == "self": del args[0] f.event = getattr(f, "event", bool(args and args[0] == "event")) return f return wrapper
<SYSTEM_TASK:> Log a statement from this component <END_TASK> <USER_TASK:> Description: def log(self, *args, **kwargs): """Log a statement from this component"""
func = inspect.currentframe().f_back.f_code # Dump the message + the name of this function to the log. if 'exc' in kwargs and kwargs['exc'] is True: exc_type, exc_obj, exc_tb = exc_info() line_no = exc_tb.tb_lineno # print('EXCEPTION DATA:', line_no, exc_type, exc_obj, exc_tb) args += traceback.extract_tb(exc_tb), else: line_no = func.co_firstlineno sourceloc = "[%.10s@%s:%i]" % ( func.co_name, func.co_filename, line_no ) hfoslog(sourceloc=sourceloc, emitter=self.uniquename, *args, **kwargs)
<SYSTEM_TASK:> Register a configurable component in the configuration schema <END_TASK> <USER_TASK:> Description: def register(self, *args): """Register a configurable component in the configuration schema store"""
super(ConfigurableMeta, self).register(*args) from hfos.database import configschemastore # self.log('ADDING SCHEMA:') # pprint(self.configschema) configschemastore[self.name] = self.configschema
<SYSTEM_TASK:> Removes the unique name from the systems unique name list <END_TASK> <USER_TASK:> Description: def unregister(self): """Removes the unique name from the systems unique name list"""
self.names.remove(self.uniquename) super(ConfigurableMeta, self).unregister()
<SYSTEM_TASK:> Read this component's configuration from the database <END_TASK> <USER_TASK:> Description: def _read_config(self): """Read this component's configuration from the database"""
try: self.config = self.componentmodel.find_one( {'name': self.uniquename}) except ServerSelectionTimeoutError: # pragma: no cover self.log("No database access! Check if mongodb is running " "correctly.", lvl=critical) if self.config: self.log("Configuration read.", lvl=verbose) else: self.log("No configuration found.", lvl=warn)
<SYSTEM_TASK:> Write this component's configuration back to the database <END_TASK> <USER_TASK:> Description: def _write_config(self): """Write this component's configuration back to the database"""
if not self.config: self.log("Unable to write non existing configuration", lvl=error) return self.config.save() self.log("Configuration stored.")
<SYSTEM_TASK:> Set this component's initial configuration <END_TASK> <USER_TASK:> Description: def _set_config(self, config=None): """Set this component's initial configuration"""
if not config: config = {} try: # pprint(self.configschema) self.config = self.componentmodel(config) # self.log("Config schema:", lvl=critical) # pprint(self.config.__dict__) # pprint(self.config._fields) try: name = self.config.name self.log("Name set to: ", name, lvl=verbose) except (AttributeError, KeyError): # pragma: no cover self.log("Has no name.", lvl=verbose) try: self.config.name = self.uniquename except (AttributeError, KeyError) as e: # pragma: no cover self.log("Cannot set component name for configuration: ", e, type(e), self.name, exc=True, lvl=critical) try: uuid = self.config.uuid self.log("UUID set to: ", uuid, lvl=verbose) except (AttributeError, KeyError): self.log("Has no UUID", lvl=verbose) self.config.uuid = str(uuid4()) try: notes = self.config.notes self.log("Notes set to: ", notes, lvl=verbose) except (AttributeError, KeyError): self.log("Has no notes, trying docstring", lvl=verbose) notes = self.__doc__ if notes is None: notes = "No notes." else: notes = notes.lstrip().rstrip() self.log(notes) self.config.notes = notes try: componentclass = self.config.componentclass self.log("Componentclass set to: ", componentclass, lvl=verbose) except (AttributeError, KeyError): self.log("Has no component class", lvl=verbose) self.config.componentclass = self.name except ValidationError as e: self.log("Not setting invalid component configuration: ", e, type(e), exc=True, lvl=error)
<SYSTEM_TASK:> Event triggered configuration reload <END_TASK> <USER_TASK:> Description: def reload_configuration(self, event): """Event triggered configuration reload"""
if event.target == self.uniquename: self.log('Reloading configuration') self._read_config()
<SYSTEM_TASK:> Build a module from templates and user supplied information <END_TASK> <USER_TASK:> Description: def _construct_module(info, target): """Build a module from templates and user supplied information"""
for path in paths: real_path = os.path.abspath(os.path.join(target, path.format(**info))) log("Making directory '%s'" % real_path) os.makedirs(real_path) # pprint(info) for item in templates.values(): source = os.path.join('dev/templates', item[0]) filename = os.path.abspath( os.path.join(target, item[1].format(**info))) log("Creating file from template '%s'" % filename, emitter='MANAGE') write_template_file(source, filename, info)
<SYSTEM_TASK:> Generates a lookup field for form definitions <END_TASK> <USER_TASK:> Description: def lookup_field(key, lookup_type=None, placeholder=None, html_class="div", select_type="strapselect", mapping="uuid"): """Generates a lookup field for form definitions"""
if lookup_type is None: lookup_type = key if placeholder is None: placeholder = "Select a " + lookup_type result = { 'key': key, 'htmlClass': html_class, 'type': select_type, 'placeholder': placeholder, 'options': { "type": lookup_type, "asyncCallback": "$ctrl.getFormData", "map": {'valueProperty': mapping, 'nameProperty': 'name'} } } return result
<SYSTEM_TASK:> A field set with a title and sub items <END_TASK> <USER_TASK:> Description: def fieldset(title, items, options=None): """A field set with a title and sub items"""
result = { 'title': title, 'type': 'fieldset', 'items': items } if options is not None: result.update(options) return result
<SYSTEM_TASK:> A section consisting of rows and columns <END_TASK> <USER_TASK:> Description: def section(rows, columns, items, label=None): """A section consisting of rows and columns"""
# TODO: Integrate label sections = [] column_class = "section-column col-sm-%i" % (12 / columns) for vertical in range(columns): column_items = [] for horizontal in range(rows): try: item = items[horizontal][vertical] column_items.append(item) except IndexError: hfoslog('Field in', label, 'omitted, due to missing row/column:', vertical, horizontal, lvl=warn, emitter='FORMS', tb=True, frame=2) column = { 'type': 'section', 'htmlClass': column_class, 'items': column_items } sections.append(column) result = { 'type': 'section', 'htmlClass': 'row', 'items': sections } return result
<SYSTEM_TASK:> An array that starts empty <END_TASK> <USER_TASK:> Description: def emptyArray(key, add_label=None): """An array that starts empty"""
result = { 'key': key, 'startEmpty': True } if add_label is not None: result['add'] = add_label result['style'] = {'add': 'btn-success'} return result
<SYSTEM_TASK:> A tabbed container widget <END_TASK> <USER_TASK:> Description: def tabset(titles, contents): """A tabbed container widget"""
tabs = [] for no, title in enumerate(titles): tab = { 'title': title, } content = contents[no] if isinstance(content, list): tab['items'] = content else: tab['items'] = [content] tabs.append(tab) result = { 'type': 'tabs', 'tabs': tabs } return result
<SYSTEM_TASK:> Tests internet connectivity in regular intervals and updates the nodestate accordingly <END_TASK> <USER_TASK:> Description: def timed_connectivity_check(self, event): """Tests internet connectivity in regular intervals and updates the nodestate accordingly"""
self.status = self._can_connect() self.log('Timed connectivity check:', self.status, lvl=verbose) if self.status: if not self.old_status: self.log('Connectivity gained') self.fireEvent(backend_nodestate_toggle(STATE_UUID_CONNECTIVITY, on=True, force=True)) else: if self.old_status: self.log('Connectivity lost', lvl=warn) self.old_status = False self.fireEvent(backend_nodestate_toggle(STATE_UUID_CONNECTIVITY, off=True, force=True)) self.old_status = self.status
<SYSTEM_TASK:> ActivityMonitor event handler for incoming events <END_TASK> <USER_TASK:> Description: def activityrequest(self, event): """ActivityMonitor event handler for incoming events :param event with incoming ActivityMonitor message """
# self.log("Event: '%s'" % event.__dict__) try: action = event.action data = event.data self.log("Activityrequest: ", action, data) except Exception as e: self.log("Error: '%s' %s" % (e, type(e)), lvl=error)
<SYSTEM_TASK:> Validates all objects or all objects of a given schema. <END_TASK> <USER_TASK:> Description: def validate(ctx, schema, all_schemata): """Validates all objects or all objects of a given schema."""
database = ctx.obj['db'] if schema is None: if all_schemata is False: log('No schema given. Read the help', lvl=warn) return else: schemata = database.objectmodels.keys() else: schemata = [schema] for schema in schemata: try: things = database.objectmodels[schema] with click.progressbar(things.find(), length=things.count(), label='Validating %15s' % schema) as object_bar: for obj in object_bar: obj.validate() except Exception as e: log('Exception while validating:', schema, e, type(e), '\n\nFix this object and rerun validation!', emitter='MANAGE', lvl=error) log('Done')
<SYSTEM_TASK:> Find fields in registered data models. <END_TASK> <USER_TASK:> Description: def find_field(ctx, search, by_type, obj): """Find fields in registered data models."""
# TODO: Fix this to work recursively on all possible subschemes if search is not None: search = search else: search = _ask("Enter search term") database = ctx.obj['db'] def find(search_schema, search_field, find_result=None, key=""): """Examine a schema to find fields by type or name""" if find_result is None: find_result = [] fields = search_schema['properties'] if not by_type: if search_field in fields: find_result.append(key) # log("Found queried fieldname in ", model) else: for field in fields: try: if "type" in fields[field]: # log(fields[field], field) if fields[field]["type"] == search_field: find_result.append((key, field)) # log("Found field", field, "in", model) except KeyError as e: log("Field access error:", e, type(e), exc=True, lvl=debug) if 'properties' in fields: # log('Sub properties checking:', fields['properties']) find_result.append(find(fields['properties'], search_field, find_result, key=fields['name'])) for field in fields: if 'items' in fields[field]: if 'properties' in fields[field]['items']: # log('Sub items checking:', fields[field]) find_result.append(find(fields[field]['items'], search_field, find_result, key=field)) else: pass # log('Items without proper definition!') return find_result if obj is not None: schema = database.objectmodels[obj]._schema result = find(schema, search, [], key="top") if result: # log(args.object, result) print(obj) pprint(result) else: for model, thing in database.objectmodels.items(): schema = thing._schema result = find(schema, search, [], key="top") if result: print(model) # log(model, result) print(result)
<SYSTEM_TASK:> Get distance between pairs of lat-lon points <END_TASK> <USER_TASK:> Description: def Distance(lat1, lon1, lat2, lon2): """Get distance between pairs of lat-lon points"""
az12, az21, dist = wgs84_geod.inv(lon1, lat1, lon2, lat2) return az21, dist
<SYSTEM_TASK:> Display known details about a given client <END_TASK> <USER_TASK:> Description: def client_details(self, *args): """Display known details about a given client"""
self.log(_('Client details:', lang='de')) client = self._clients[args[0]] self.log('UUID:', client.uuid, 'IP:', client.ip, 'Name:', client.name, 'User:', self._users[client.useruuid], pretty=True)
<SYSTEM_TASK:> Display a list of connected clients <END_TASK> <USER_TASK:> Description: def client_list(self, *args): """Display a list of connected clients"""
if len(self._clients) == 0: self.log('No clients connected') else: self.log(self._clients, pretty=True)
<SYSTEM_TASK:> Display a list of connected users <END_TASK> <USER_TASK:> Description: def users_list(self, *args): """Display a list of connected users"""
if len(self._users) == 0: self.log('No users connected') else: self.log(self._users, pretty=True)
<SYSTEM_TASK:> Display a table of connected users and clients <END_TASK> <USER_TASK:> Description: def who(self, *args): """Display a table of connected users and clients"""
if len(self._users) == 0: self.log('No users connected') if len(self._clients) == 0: self.log('No clients connected') return Row = namedtuple("Row", ['User', 'Client', 'IP']) rows = [] for user in self._users.values(): for key, client in self._clients.items(): if client.useruuid == user.uuid: row = Row(user.account.name, key, client.ip) rows.append(row) for key, client in self._clients.items(): if client.useruuid is None: row = Row('ANON', key, client.ip) rows.append(row) self.log("\n" + std_table(rows))
<SYSTEM_TASK:> Handles socket disconnections <END_TASK> <USER_TASK:> Description: def disconnect(self, sock): """Handles socket disconnections"""
self.log("Disconnect ", sock, lvl=debug) try: if sock in self._sockets: self.log("Getting socket", lvl=debug) sockobj = self._sockets[sock] self.log("Getting clientuuid", lvl=debug) clientuuid = sockobj.clientuuid self.log("getting useruuid", lvl=debug) useruuid = self._clients[clientuuid].useruuid self.log("Firing disconnect event", lvl=debug) self.fireEvent(clientdisconnect(clientuuid, self._clients[ clientuuid].useruuid)) self.log("Logging out relevant client", lvl=debug) if useruuid is not None: self.log("Client was logged in", lvl=debug) try: self._logoutclient(useruuid, clientuuid) self.log("Client logged out", useruuid, clientuuid) except Exception as e: self.log("Couldn't clean up logged in user! ", self._users[useruuid], e, type(e), lvl=critical) self.log("Deleting Client (", self._clients.keys, ")", lvl=debug) del self._clients[clientuuid] self.log("Deleting Socket", lvl=debug) del self._sockets[sock] except Exception as e: self.log("Error during disconnect handling: ", e, type(e), lvl=critical)
<SYSTEM_TASK:> Log out a client and possibly associated user <END_TASK> <USER_TASK:> Description: def _logoutclient(self, useruuid, clientuuid): """Log out a client and possibly associated user"""
self.log("Cleaning up client of logged in user.", lvl=debug) try: self._users[useruuid].clients.remove(clientuuid) if len(self._users[useruuid].clients) == 0: self.log("Last client of user disconnected.", lvl=verbose) self.fireEvent(userlogout(useruuid, clientuuid)) del self._users[useruuid] self._clients[clientuuid].useruuid = None except Exception as e: self.log("Error during client logout: ", e, type(e), clientuuid, useruuid, lvl=error, exc=True)
<SYSTEM_TASK:> Registers new sockets and their clients and allocates uuids <END_TASK> <USER_TASK:> Description: def connect(self, *args): """Registers new sockets and their clients and allocates uuids"""
self.log("Connect ", args, lvl=verbose) try: sock = args[0] ip = args[1] if sock not in self._sockets: self.log("New client connected:", ip, lvl=debug) clientuuid = str(uuid4()) self._sockets[sock] = Socket(ip, clientuuid) # Key uuid is temporary, until signin, will then be replaced # with account uuid self._clients[clientuuid] = Client( sock=sock, ip=ip, clientuuid=clientuuid, ) self.log("Client connected:", clientuuid, lvl=debug) else: self.log("Old IP reconnected!", lvl=warn) # self.fireEvent(write(sock, "Another client is # connecting from your IP!")) # self._sockets[sock] = (ip, uuid.uuid4()) except Exception as e: self.log("Error during connect: ", e, type(e), lvl=critical)
<SYSTEM_TASK:> Sends a packet to an already known user or one of his clients by <END_TASK> <USER_TASK:> Description: def send(self, event): """Sends a packet to an already known user or one of his clients by UUID"""
try: jsonpacket = json.dumps(event.packet, cls=ComplexEncoder) if event.sendtype == "user": # TODO: I think, caching a user name <-> uuid table would # make sense instead of looking this up all the time. if event.uuid is None: userobject = objectmodels['user'].find_one({ 'name': event.username }) else: userobject = objectmodels['user'].find_one({ 'uuid': event.uuid }) if userobject is None: self.log("No user by that name known.", lvl=warn) return else: uuid = userobject.uuid self.log("Broadcasting to all of users clients: '%s': '%s" % ( uuid, str(event.packet)[:20]), lvl=network) if uuid not in self._users: self.log("User not connected!", event, lvl=critical) return clients = self._users[uuid].clients for clientuuid in clients: sock = self._clients[clientuuid].sock if not event.raw: self.log("Sending json to client", jsonpacket[:50], lvl=network) self.fireEvent(write(sock, jsonpacket), "wsserver") else: self.log("Sending raw data to client") self.fireEvent(write(sock, event.packet), "wsserver") else: # only to client self.log("Sending to user's client: '%s': '%s'" % ( event.uuid, jsonpacket[:20]), lvl=network) if event.uuid not in self._clients: if not event.fail_quiet: self.log("Unknown client!", event.uuid, lvl=critical) self.log("Clients:", self._clients, lvl=debug) return sock = self._clients[event.uuid].sock if not event.raw: self.fireEvent(write(sock, jsonpacket), "wsserver") else: self.log("Sending raw data to client", lvl=network) self.fireEvent(write(sock, event.packet[:20]), "wsserver") except Exception as e: self.log("Exception during sending: %s (%s)" % (e, type(e)), lvl=critical, exc=True)
<SYSTEM_TASK:> Broadcasts an event either to all users or clients, depending on <END_TASK> <USER_TASK:> Description: def broadcast(self, event): """Broadcasts an event either to all users or clients, depending on event flag"""
try: if event.broadcasttype == "users": if len(self._users) > 0: self.log("Broadcasting to all users:", event.content, lvl=network) for useruuid in self._users.keys(): self.fireEvent( send(useruuid, event.content, sendtype="user")) # else: # self.log("Not broadcasting, no users connected.", # lvl=debug) elif event.broadcasttype == "clients": if len(self._clients) > 0: self.log("Broadcasting to all clients: ", event.content, lvl=network) for client in self._clients.values(): self.fireEvent(write(client.sock, event.content), "wsserver") # else: # self.log("Not broadcasting, no clients # connected.", # lvl=debug) elif event.broadcasttype == "socks": if len(self._sockets) > 0: self.log("Emergency?! Broadcasting to all sockets: ", event.content) for sock in self._sockets: self.fireEvent(write(sock, event.content), "wsserver") # else: # self.log("Not broadcasting, no sockets # connected.", # lvl=debug) except Exception as e: self.log("Error during broadcast: ", e, type(e), lvl=critical)
<SYSTEM_TASK:> Checks if the user has in any role that allows to fire the event. <END_TASK> <USER_TASK:> Description: def _checkPermissions(self, user, event): """Checks if the user has in any role that allows to fire the event."""
for role in user.account.roles: if role in event.roles: self.log('Access granted', lvl=verbose) return True self.log('Access denied', lvl=verbose) return False
<SYSTEM_TASK:> Isolated communication link for authorized events. <END_TASK> <USER_TASK:> Description: def _handleAuthorizedEvents(self, component, action, data, user, client): """Isolated communication link for authorized events."""
try: if component == "debugger": self.log(component, action, data, user, client, lvl=info) if not user and component in self.authorized_events.keys(): self.log("Unknown client tried to do an authenticated " "operation: %s", component, action, data, user) return event = self.authorized_events[component][action]['event'](user, action, data, client) self.log('Authorized event roles:', event.roles, lvl=verbose) if not self._checkPermissions(user, event): result = { 'component': 'hfos.ui.clientmanager', 'action': 'Permission', 'data': _('You have no role that allows this action.', lang='de') } self.fireEvent(send(event.client.uuid, result)) return self.log("Firing authorized event: ", component, action, str(data)[:100], lvl=debug) # self.log("", (user, action, data, client), lvl=critical) self.fireEvent(event) except Exception as e: self.log("Critical error during authorized event handling:", component, action, e, type(e), lvl=critical, exc=True)
<SYSTEM_TASK:> Handler for authentication events <END_TASK> <USER_TASK:> Description: def _handleAuthenticationEvents(self, requestdata, requestaction, clientuuid, sock): """Handler for authentication events"""
# TODO: Move this stuff over to ./auth.py if requestaction in ("login", "autologin"): try: self.log("Login request", lvl=verbose) if requestaction == "autologin": username = password = None requestedclientuuid = requestdata auto = True self.log("Autologin for", requestedclientuuid, lvl=debug) else: username = requestdata['username'] password = requestdata['password'] if 'clientuuid' in requestdata: requestedclientuuid = requestdata['clientuuid'] else: requestedclientuuid = None auto = False self.log("Auth request by", username, lvl=verbose) self.fireEvent(authenticationrequest( username, password, clientuuid, requestedclientuuid, sock, auto, ), "auth") return except Exception as e: self.log("Login failed: ", e, type(e), lvl=warn, exc=True) elif requestaction == "logout": self.log("User logged out, refreshing client.", lvl=network) try: if clientuuid in self._clients: client = self._clients[clientuuid] user_id = client.useruuid if client.useruuid: self.log("Logout client uuid: ", clientuuid) self._logoutclient(client.useruuid, clientuuid) self.fireEvent(clientdisconnect(clientuuid)) else: self.log("Client is not connected!", lvl=warn) except Exception as e: self.log("Error during client logout: ", e, type(e), lvl=error, exc=True) else: self.log("Unsupported auth action requested:", requestaction, lvl=warn)
<SYSTEM_TASK:> Resets the list of flood offenders on event trigger <END_TASK> <USER_TASK:> Description: def _reset_flood_offenders(self, *args): """Resets the list of flood offenders on event trigger"""
offenders = [] # self.log('Resetting flood offenders') for offender, offence_time in self._flooding.items(): if time() - offence_time < 10: self.log('Removed offender from flood list:', offender) offenders.append(offender) for offender in offenders: del self._flooding[offender]
<SYSTEM_TASK:> Checks if any clients have been flooding the node <END_TASK> <USER_TASK:> Description: def _check_flood_protection(self, component, action, clientuuid): """Checks if any clients have been flooding the node"""
if clientuuid not in self._flood_counter: self._flood_counter[clientuuid] = 0 self._flood_counter[clientuuid] += 1 if self._flood_counter[clientuuid] > 100: packet = { 'component': 'hfos.ui.clientmanager', 'action': 'Flooding', 'data': True } self.fireEvent(send(clientuuid, packet)) self.log('Flooding from', clientuuid) return True
<SYSTEM_TASK:> Links the client to the granted account and profile, <END_TASK> <USER_TASK:> Description: def authentication(self, event): """Links the client to the granted account and profile, then notifies the client"""
try: self.log("Authorization has been granted by DB check:", event.username, lvl=debug) account, profile, clientconfig = event.userdata useruuid = event.useruuid originatingclientuuid = event.clientuuid clientuuid = clientconfig.uuid if clientuuid != originatingclientuuid: self.log("Mutating client uuid to request id:", clientuuid, lvl=network) # Assign client to user if useruuid in self._users: signedinuser = self._users[useruuid] else: signedinuser = User(account, profile, useruuid) self._users[account.uuid] = signedinuser if clientuuid in signedinuser.clients: self.log("Client configuration already logged in.", lvl=critical) # TODO: What now?? # Probably senseful would be to add the socket to the # client's other socket # The clients would be identical then - that could cause # problems # which could be remedied by duplicating the configuration else: signedinuser.clients.append(clientuuid) self.log("Active client (", clientuuid, ") registered to " "user", useruuid, lvl=debug) # Update socket.. socket = self._sockets[event.sock] socket.clientuuid = clientuuid self._sockets[event.sock] = socket # ..and client lists try: language = clientconfig.language except AttributeError: language = "en" # TODO: Rewrite and simplify this: newclient = Client( sock=event.sock, ip=socket.ip, clientuuid=clientuuid, useruuid=useruuid, name=clientconfig.name, config=clientconfig, language=language ) del (self._clients[originatingclientuuid]) self._clients[clientuuid] = newclient authpacket = {"component": "auth", "action": "login", "data": account.serializablefields()} self.log("Transmitting Authorization to client", authpacket, lvl=network) self.fireEvent( write(event.sock, json.dumps(authpacket)), "wsserver" ) profilepacket = {"component": "profile", "action": "get", "data": profile.serializablefields()} self.log("Transmitting Profile to client", profilepacket, lvl=network) self.fireEvent(write(event.sock, json.dumps(profilepacket)), "wsserver") clientconfigpacket = {"component": "clientconfig", "action": "get", "data": clientconfig.serializablefields()} self.log("Transmitting client configuration to client", clientconfigpacket, lvl=network) self.fireEvent(write(event.sock, json.dumps(clientconfigpacket)), "wsserver") self.fireEvent(userlogin(clientuuid, useruuid, clientconfig, signedinuser)) self.log("User configured: Name", signedinuser.account.name, "Profile", signedinuser.profile.uuid, "Clients", signedinuser.clients, lvl=debug) except Exception as e: self.log("Error (%s, %s) during auth grant: %s" % ( type(e), e, event), lvl=error)
<SYSTEM_TASK:> Store client's selection of a new translation <END_TASK> <USER_TASK:> Description: def selectlanguage(self, event): """Store client's selection of a new translation"""
self.log('Language selection event:', event.client, pretty=True) if event.data not in all_languages(): self.log('Unavailable language selected:', event.data, lvl=warn) language = None else: language = event.data if language is None: language = 'en' event.client.language = language if event.client.config is not None: event.client.config.language = language event.client.config.save()
<SYSTEM_TASK:> Compile and return a human readable list of registered translations <END_TASK> <USER_TASK:> Description: def getlanguages(self, event): """Compile and return a human readable list of registered translations"""
self.log('Client requests all languages.', lvl=verbose) result = { 'component': 'hfos.ui.clientmanager', 'action': 'getlanguages', 'data': language_token_to_name(all_languages()) } self.fireEvent(send(event.client.uuid, result))
<SYSTEM_TASK:> Converts between geodetic, modified apex, quasi-dipole and MLT. <END_TASK> <USER_TASK:> Description: def convert(self, lat, lon, source, dest, height=0, datetime=None, precision=1e-10, ssheight=50*6371): """Converts between geodetic, modified apex, quasi-dipole and MLT. Parameters ========== lat : array_like Latitude lon : array_like Longitude/MLT source : {'geo', 'apex', 'qd', 'mlt'} Input coordinate system dest : {'geo', 'apex', 'qd', 'mlt'} Output coordinate system height : array_like, optional Altitude in km datetime : :class:`datetime.datetime` Date and time for MLT conversions (required for MLT conversions) precision : float, optional Precision of output (degrees) when converting to geo. A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision (all coordinates being converted to geo are converted to QD first and passed through APXG2Q). ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= lat : ndarray or float Converted latitude (if converting to MLT, output latitude is apex) lat : ndarray or float Converted longitude/MLT """
if datetime is None and ('mlt' in [source, dest]): raise ValueError('datetime must be given for MLT calculations') lat = helpers.checklat(lat) if source == dest: return lat, lon # from geo elif source == 'geo' and dest == 'apex': lat, lon = self.geo2apex(lat, lon, height) elif source == 'geo' and dest == 'qd': lat, lon = self.geo2qd(lat, lon, height) elif source == 'geo' and dest == 'mlt': lat, lon = self.geo2apex(lat, lon, height) lon = self.mlon2mlt(lon, datetime, ssheight=ssheight) # from apex elif source == 'apex' and dest == 'geo': lat, lon, _ = self.apex2geo(lat, lon, height, precision=precision) elif source == 'apex' and dest == 'qd': lat, lon = self.apex2qd(lat, lon, height=height) elif source == 'apex' and dest == 'mlt': lon = self.mlon2mlt(lon, datetime, ssheight=ssheight) # from qd elif source == 'qd' and dest == 'geo': lat, lon, _ = self.qd2geo(lat, lon, height, precision=precision) elif source == 'qd' and dest == 'apex': lat, lon = self.qd2apex(lat, lon, height=height) elif source == 'qd' and dest == 'mlt': lat, lon = self.qd2apex(lat, lon, height=height) lon = self.mlon2mlt(lon, datetime, ssheight=ssheight) # from mlt (input latitude assumed apex) elif source == 'mlt' and dest == 'geo': lon = self.mlt2mlon(lon, datetime, ssheight=ssheight) lat, lon, _ = self.apex2geo(lat, lon, height, precision=precision) elif source == 'mlt' and dest == 'apex': lon = self.mlt2mlon(lon, datetime, ssheight=ssheight) elif source == 'mlt' and dest == 'qd': lon = self.mlt2mlon(lon, datetime, ssheight=ssheight) lat, lon = self.apex2qd(lat, lon, height=height) # no other transformations are implemented else: estr = 'Unknown coordinate transformation: ' estr += '{} -> {}'.format(source, dest) raise NotImplementedError(estr) return lat, lon
<SYSTEM_TASK:> Converts geodetic to modified apex coordinates. <END_TASK> <USER_TASK:> Description: def geo2apex(self, glat, glon, height): """Converts geodetic to modified apex coordinates. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Altitude in km Returns ======= alat : ndarray or float Modified apex latitude alon : ndarray or float Modified apex longitude """
glat = helpers.checklat(glat, name='glat') alat, alon = self._geo2apex(glat, glon, height) if np.any(np.float64(alat) == -9999): warnings.warn('Apex latitude set to -9999 where undefined ' '(apex height may be < reference height)') # if array is returned, dtype is object, so convert to float return np.float64(alat), np.float64(alon)
<SYSTEM_TASK:> Converts modified apex to geodetic coordinates. <END_TASK> <USER_TASK:> Description: def apex2geo(self, alat, alon, height, precision=1e-10): """Converts modified apex to geodetic coordinates. Parameters ========== alat : array_like Modified apex latitude alon : array_like Modified apex longitude height : array_like Altitude in km precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= glat : ndarray or float Geodetic latitude glon : ndarray or float Geodetic longitude error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q) """
alat = helpers.checklat(alat, name='alat') qlat, qlon = self.apex2qd(alat, alon, height=height) glat, glon, error = self.qd2geo(qlat, qlon, height, precision=precision) return glat, glon, error
<SYSTEM_TASK:> Converts geodetic to quasi-dipole coordinates. <END_TASK> <USER_TASK:> Description: def geo2qd(self, glat, glon, height): """Converts geodetic to quasi-dipole coordinates. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Altitude in km Returns ======= qlat : ndarray or float Quasi-dipole latitude qlon : ndarray or float Quasi-dipole longitude """
glat = helpers.checklat(glat, name='glat') qlat, qlon = self._geo2qd(glat, glon, height) # if array is returned, dtype is object, so convert to float return np.float64(qlat), np.float64(qlon)
<SYSTEM_TASK:> Converts quasi-dipole to geodetic coordinates. <END_TASK> <USER_TASK:> Description: def qd2geo(self, qlat, qlon, height, precision=1e-10): """Converts quasi-dipole to geodetic coordinates. Parameters ========== qlat : array_like Quasi-dipole latitude qlon : array_like Quasi-dipole longitude height : array_like Altitude in km precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= glat : ndarray or float Geodetic latitude glon : ndarray or float Geodetic longitude error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q) """
qlat = helpers.checklat(qlat, name='qlat') glat, glon, error = self._qd2geo(qlat, qlon, height, precision) # if array is returned, dtype is object, so convert to float return np.float64(glat), np.float64(glon), np.float64(error)
<SYSTEM_TASK:> Converts modified apex to quasi-dipole coordinates. <END_TASK> <USER_TASK:> Description: def apex2qd(self, alat, alon, height): """Converts modified apex to quasi-dipole coordinates. Parameters ========== alat : array_like Modified apex latitude alon : array_like Modified apex longitude height : array_like Altitude in km Returns ======= qlat : ndarray or float Quasi-dipole latitude qlon : ndarray or float Quasi-dipole longitude Raises ====== ApexHeightError if `height` > apex height """
qlat, qlon = self._apex2qd(alat, alon, height) # if array is returned, the dtype is object, so convert to float return np.float64(qlat), np.float64(qlon)
<SYSTEM_TASK:> Converts quasi-dipole to modified apex coordinates. <END_TASK> <USER_TASK:> Description: def qd2apex(self, qlat, qlon, height): """Converts quasi-dipole to modified apex coordinates. Parameters ========== qlat : array_like Quasi-dipole latitude qlon : array_like Quasi-dipole longitude height : array_like Altitude in km Returns ======= alat : ndarray or float Modified apex latitude alon : ndarray or float Modified apex longitude Raises ====== ApexHeightError if apex height < reference height """
alat, alon = self._qd2apex(qlat, qlon, height) # if array is returned, the dtype is object, so convert to float return np.float64(alat), np.float64(alon)
<SYSTEM_TASK:> Computes the magnetic local time at the specified magnetic longitude <END_TASK> <USER_TASK:> Description: def mlon2mlt(self, mlon, datetime, ssheight=50*6371): """Computes the magnetic local time at the specified magnetic longitude and UT. Parameters ========== mlon : array_like Magnetic longitude (apex and quasi-dipole longitude are always equal) datetime : :class:`datetime.datetime` Date and time ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= mlt : ndarray or float Magnetic local time [0, 24) Notes ===== To compute the MLT, we find the apex longitude of the subsolar point at the given time. Then the MLT of the given point will be computed from the separation in magnetic longitude from this point (1 hour = 15 degrees). """
ssglat, ssglon = helpers.subsol(datetime) ssalat, ssalon = self.geo2apex(ssglat, ssglon, ssheight) # np.float64 will ensure lists are converted to arrays return (180 + np.float64(mlon) - ssalon)/15 % 24
<SYSTEM_TASK:> Computes the magnetic longitude at the specified magnetic local time <END_TASK> <USER_TASK:> Description: def mlt2mlon(self, mlt, datetime, ssheight=50*6371): """Computes the magnetic longitude at the specified magnetic local time and UT. Parameters ========== mlt : array_like Magnetic local time datetime : :class:`datetime.datetime` Date and time ssheight : float, optional Altitude in km to use for converting the subsolar point from geographic to magnetic coordinates. A high altitude is used to ensure the subsolar point is mapped to high latitudes, which prevents the South-Atlantic Anomaly (SAA) from influencing the MLT. Returns ======= mlon : ndarray or float Magnetic longitude [0, 360) (apex and quasi-dipole longitude are always equal) Notes ===== To compute the magnetic longitude, we find the apex longitude of the subsolar point at the given time. Then the magnetic longitude of the given point will be computed from the separation in magnetic local time from this point (1 hour = 15 degrees). """
ssglat, ssglon = helpers.subsol(datetime) ssalat, ssalon = self.geo2apex(ssglat, ssglon, ssheight) # np.float64 will ensure lists are converted to arrays return (15*np.float64(mlt) - 180 + ssalon + 360) % 360
<SYSTEM_TASK:> Performs mapping of points along the magnetic field to the closest <END_TASK> <USER_TASK:> Description: def map_to_height(self, glat, glon, height, newheight, conjugate=False, precision=1e-10): """Performs mapping of points along the magnetic field to the closest or conjugate hemisphere. Parameters ========== glat : array_like Geodetic latitude glon : array_like Geodetic longitude height : array_like Source altitude in km newheight : array_like Destination altitude in km conjugate : bool, optional Map to `newheight` in the conjugate hemisphere instead of the closest hemisphere precision : float, optional Precision of output (degrees). A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision. Returns ======= newglat : ndarray or float Geodetic latitude of mapped point newglon : ndarray or float Geodetic longitude of mapped point error : ndarray or float The angular difference (degrees) between the input QD coordinates and the qlat/qlon produced by feeding the output glat and glon into geo2qd (APXG2Q) Notes ===== The mapping is done by converting glat/glon/height to modified apex lat/lon, and converting back to geographic using newheight (if conjugate, use negative apex latitude when converting back) """
alat, alon = self.geo2apex(glat, glon, height) if conjugate: alat = -alat try: newglat, newglon, error = self.apex2geo(alat, alon, newheight, precision=precision) except ApexHeightError: raise ApexHeightError("newheight is > apex height") return newglat, newglon, error
<SYSTEM_TASK:> Performs mapping of electric field along the magnetic field. <END_TASK> <USER_TASK:> Description: def map_E_to_height(self, alat, alon, height, newheight, E): """Performs mapping of electric field along the magnetic field. It is assumed that the electric field is perpendicular to B. Parameters ========== alat : (N,) array_like or float Modified apex latitude alon : (N,) array_like or float Modified apex longitude height : (N,) array_like or float Source altitude in km newheight : (N,) array_like or float Destination altitude in km E : (3,) or (3, N) array_like Electric field (at `alat`, `alon`, `height`) in geodetic east, north, and up components Returns ======= E : (3, N) or (3,) ndarray The electric field at `newheight` (geodetic east, north, and up components) """
return self._map_EV_to_height(alat, alon, height, newheight, E, 'E')
<SYSTEM_TASK:> Performs mapping of electric drift velocity along the magnetic field. <END_TASK> <USER_TASK:> Description: def map_V_to_height(self, alat, alon, height, newheight, V): """Performs mapping of electric drift velocity along the magnetic field. It is assumed that the electric field is perpendicular to B. Parameters ========== alat : (N,) array_like or float Modified apex latitude alon : (N,) array_like or float Modified apex longitude height : (N,) array_like or float Source altitude in km newheight : (N,) array_like or float Destination altitude in km V : (3,) or (3, N) array_like Electric drift velocity (at `alat`, `alon`, `height`) in geodetic east, north, and up components Returns ======= V : (3, N) or (3,) ndarray The electric drift velocity at `newheight` (geodetic east, north, and up components) """
return self._map_EV_to_height(alat, alon, height, newheight, V, 'V')
<SYSTEM_TASK:> Returns quasi-dipole base vectors f1 and f2 at the specified <END_TASK> <USER_TASK:> Description: def basevectors_qd(self, lat, lon, height, coords='geo', precision=1e-10): """Returns quasi-dipole base vectors f1 and f2 at the specified coordinates. The vectors are described by Richmond [1995] [2]_ and Emmert et al. [2010] [3]_. The vector components are geodetic east and north. Parameters ========== lat : (N,) array_like or float Latitude lon : (N,) array_like or float Longitude height : (N,) array_like or float Altitude in km coords : {'geo', 'apex', 'qd'}, optional Input coordinate system precision : float, optional Precision of output (degrees) when converting to geo. A negative value of this argument produces a low-precision calculation of geodetic lat/lon based only on their spherical harmonic representation. A positive value causes the underlying Fortran routine to iterate until feeding the output geo lat/lon into geo2qd (APXG2Q) reproduces the input QD lat/lon to within the specified precision (all coordinates being converted to geo are converted to QD first and passed through APXG2Q). Returns ======= f1 : (2, N) or (2,) ndarray f2 : (2, N) or (2,) ndarray References ========== .. [2] Richmond, A. D. (1995), Ionospheric Electrodynamics Using Magnetic Apex Coordinates, Journal of geomagnetism and geoelectricity, 47(2), 191–212, :doi:`10.5636/jgg.47.191`. .. [3] Emmert, J. T., A. D. Richmond, and D. P. Drob (2010), A computationally compact representation of Magnetic-Apex and Quasi-Dipole coordinates with smooth base vectors, J. Geophys. Res., 115(A8), A08322, :doi:`10.1029/2010JA015326`. """
glat, glon = self.convert(lat, lon, coords, 'geo', height=height, precision=precision) f1, f2 = self._basevec(glat, glon, height) # if inputs are not scalar, each vector is an array of arrays, # so reshape to a single array if f1.dtype == object: f1 = np.vstack(f1).T f2 = np.vstack(f2).T return f1, f2
<SYSTEM_TASK:> Calculate apex height <END_TASK> <USER_TASK:> Description: def get_apex(self, lat, height=None): """ Calculate apex height Parameters ----------- lat : (float) Latitude in degrees height : (float or NoneType) Height above the surface of the earth in km or NoneType to use reference height (default=None) Returns ---------- apex_height : (float) Height of the field line apex in km """
lat = helpers.checklat(lat, name='alat') if height is None: height = self.refh cos_lat_squared = np.cos(np.radians(lat))**2 apex_height = (self.RE + height) / cos_lat_squared - self.RE return apex_height
<SYSTEM_TASK:> Updates the epoch for all subsequent conversions. <END_TASK> <USER_TASK:> Description: def set_epoch(self, year): """Updates the epoch for all subsequent conversions. Parameters ========== year : float Decimal year """
fa.loadapxsh(self.datafile, np.float(year)) self.year = year
<SYSTEM_TASK:> A Reparse parser description. <END_TASK> <USER_TASK:> Description: def parser(parser_type=basic_parser, functions=None, patterns=None, expressions=None, patterns_yaml_path=None, expressions_yaml_path=None): """ A Reparse parser description. Simply provide the functions, patterns, & expressions to build. If you are using YAML for expressions + patterns, you can use ``expressions_yaml_path`` & ``patterns_yaml_path`` for convenience. The default parser_type is the basic ordered parser. """
from reparse.builders import build_all from reparse.validators import validate def _load_yaml(file_path): import yaml with open(file_path) as f: return yaml.safe_load(f) assert expressions or expressions_yaml_path, "Reparse can't build a parser without expressions" assert patterns or patterns_yaml_path, "Reparse can't build a parser without patterns" assert functions, "Reparse can't build without a functions" if patterns_yaml_path: patterns = _load_yaml(patterns_yaml_path) if expressions_yaml_path: expressions = _load_yaml(expressions_yaml_path) validate(patterns, expressions) return parser_type(build_all(patterns, expressions, functions))
<SYSTEM_TASK:> Worker task to send out an email, which is a blocking process unless it is threaded <END_TASK> <USER_TASK:> Description: def send_mail_worker(config, mail, event): """Worker task to send out an email, which is a blocking process unless it is threaded"""
log = "" try: if config.get('ssl', True): server = SMTP_SSL(config['server'], port=config['port'], timeout=30) else: server = SMTP(config['server'], port=config['port'], timeout=30) if config['tls']: log += 'Starting TLS\n' server.starttls() if config['username'] != '': log += 'Logging in with ' + str(config['username']) + "\n" server.login(config['username'], config['password']) else: log += 'No username, trying anonymous access\n' log += 'Sending Mail\n' response_send = server.send_message(mail) server.quit() except timeout as e: log += 'Could not send email: ' + str(e) + "\n" return False, log, event log += 'Server response:' + str(response_send) return True, log, event
<SYSTEM_TASK:> Provision a system user <END_TASK> <USER_TASK:> Description: def provision_system_user(items, database_name, overwrite=False, clear=False, skip_user_check=False): """Provision a system user"""
from hfos.provisions.base import provisionList from hfos.database import objectmodels # TODO: Add a root user and make sure owner can access it later. # Setting up details and asking for a password here is not very useful, # since this process is usually run automated. if overwrite is True: hfoslog('Refusing to overwrite system user!', lvl=warn, emitter='PROVISIONS') overwrite = False system_user_count = objectmodels['user'].count({'name': 'System'}) if system_user_count == 0 or clear is False: provisionList(Users, 'user', overwrite, clear, skip_user_check=True) hfoslog('Provisioning: Users: Done.', emitter="PROVISIONS") else: hfoslog('System user already present.', lvl=warn, emitter='PROVISIONS')
<SYSTEM_TASK:> Group expressions together with ``inbetweens`` and with the output of a ``final_functions``. <END_TASK> <USER_TASK:> Description: def Group(expressions, final_function, inbetweens, name=""): """ Group expressions together with ``inbetweens`` and with the output of a ``final_functions``. """
lengths = [] functions = [] regex = "" i = 0 for expression in expressions: regex += inbetweens[i] regex += "(?:" + expression.regex + ")" lengths.append(sum(expression.group_lengths)) functions.append(expression.run) i += 1 regex += inbetweens[i] return Expression(regex, functions, lengths, final_function, name)
<SYSTEM_TASK:> Parse string, returning all outputs as parsed by functions <END_TASK> <USER_TASK:> Description: def findall(self, string): """ Parse string, returning all outputs as parsed by functions """
output = [] for match in self.pattern.findall(string): if hasattr(match, 'strip'): match = [match] self._list_add(output, self.run(match)) return output
<SYSTEM_TASK:> Like findall, but also returning matching start and end string locations <END_TASK> <USER_TASK:> Description: def scan(self, string): """ Like findall, but also returning matching start and end string locations """
return list(self._scanner_to_matches(self.pattern.scanner(string), self.run))
<SYSTEM_TASK:> Run group functions over matches <END_TASK> <USER_TASK:> Description: def run(self, matches): """ Run group functions over matches """
def _run(matches): group_starting_pos = 0 for current_pos, (group_length, group_function) in enumerate(zip(self.group_lengths, self.group_functions)): start_pos = current_pos + group_starting_pos end_pos = current_pos + group_starting_pos + group_length yield group_function(matches[start_pos:end_pos]) group_starting_pos += group_length - 1 return self.final_function(list(_run(matches)))
<SYSTEM_TASK:> Checks if a logged event is to be muted for debugging purposes. <END_TASK> <USER_TASK:> Description: def is_muted(what): """ Checks if a logged event is to be muted for debugging purposes. Also goes through the solo list - only items in there will be logged! :param what: :return: """
state = False for item in solo: if item not in what: state = True else: state = False break for item in mute: if item in what: state = True break return state
<SYSTEM_TASK:> Provisions the default system vessel <END_TASK> <USER_TASK:> Description: def provision_system_vessel(items, database_name, overwrite=False, clear=False, skip_user_check=False): """Provisions the default system vessel"""
from hfos.provisions.base import provisionList from hfos.database import objectmodels vessel = objectmodels['vessel'].find_one({'name': 'Default System Vessel'}) if vessel is not None: if overwrite is False: hfoslog('Default vessel already existing. Skipping provisions.') return else: vessel.delete() provisionList([SystemVessel], 'vessel', overwrite, clear, skip_user_check) sysconfig = objectmodels['systemconfig'].find_one({'active': True}) hfoslog('Adapting system config for default vessel:', sysconfig) sysconfig.vesseluuid = SystemVessel['uuid'] sysconfig.save() hfoslog('Provisioning: Vessel: Done.', emitter='PROVISIONS')
<SYSTEM_TASK:> Convert coordintes from TWD97 to WGS84 <END_TASK> <USER_TASK:> Description: def towgs84(E, N, pkm=False, presentation=None): """ Convert coordintes from TWD97 to WGS84 The east and north coordinates should be in meters and in float pkm true for Penghu, Kinmen and Matsu area You can specify one of the following presentations of the returned values: dms - A tuple with degrees (int), minutes (int) and seconds (float) dmsstr - [+/-]DDD°MMM'DDD.DDDDD" (unicode) mindec - A tuple with degrees (int) and minutes (float) mindecstr - [+/-]DDD°MMM.MMMMM' (unicode) (default)degdec - DDD.DDDDD (float) """
_lng0 = lng0pkm if pkm else lng0 E /= 1000.0 N /= 1000.0 epsilon = (N-N0) / (k0*A) eta = (E-E0) / (k0*A) epsilonp = epsilon - beta1*sin(2*1*epsilon)*cosh(2*1*eta) - \ beta2*sin(2*2*epsilon)*cosh(2*2*eta) - \ beta3*sin(2*3*epsilon)*cosh(2*3*eta) etap = eta - beta1*cos(2*1*epsilon)*sinh(2*1*eta) - \ beta2*cos(2*2*epsilon)*sinh(2*2*eta) - \ beta3*cos(2*3*epsilon)*sinh(2*3*eta) sigmap = 1 - 2*1*beta1*cos(2*1*epsilon)*cosh(2*1*eta) - \ 2*2*beta2*cos(2*2*epsilon)*cosh(2*2*eta) - \ 2*3*beta3*cos(2*3*epsilon)*cosh(2*3*eta) taup = 2*1*beta1*sin(2*1*epsilon)*sinh(2*1*eta) + \ 2*2*beta2*sin(2*2*epsilon)*sinh(2*2*eta) + \ 2*3*beta3*sin(2*3*epsilon)*sinh(2*3*eta) chi = asin(sin(epsilonp) / cosh(etap)) latitude = chi + delta1*sin(2*1*chi) + \ delta2*sin(2*2*chi) + \ delta3*sin(2*3*chi) longitude = _lng0 + atan(sinh(etap) / cos(epsilonp)) func = None presentation = 'to%s' % presentation if presentation else None if presentation in presentations: func = getattr(sys.modules[__name__], presentation) if func and func != 'todegdec': return func(degrees(latitude)), func(degrees(longitude)) return (degrees(latitude), degrees(longitude))
<SYSTEM_TASK:> Convert coordintes from WGS84 to TWD97 <END_TASK> <USER_TASK:> Description: def fromwgs84(lat, lng, pkm=False): """ Convert coordintes from WGS84 to TWD97 pkm true for Penghu, Kinmen and Matsu area The latitude and longitude can be in the following formats: [+/-]DDD°MMM'SSS.SSSS" (unicode) [+/-]DDD°MMM.MMMM' (unicode) [+/-]DDD.DDDDD (string, unicode or float) The returned coordinates are in meters """
_lng0 = lng0pkm if pkm else lng0 lat = radians(todegdec(lat)) lng = radians(todegdec(lng)) t = sinh((atanh(sin(lat)) - 2*pow(n,0.5)/(1+n)*atanh(2*pow(n,0.5)/(1+n)*sin(lat)))) epsilonp = atan(t/cos(lng-_lng0)) etap = atan(sin(lng-_lng0) / pow(1+t*t, 0.5)) E = E0 + k0*A*(etap + alpha1*cos(2*1*epsilonp)*sinh(2*1*etap) + alpha2*cos(2*2*epsilonp)*sinh(2*2*etap) + alpha3*cos(2*3*epsilonp)*sinh(2*3*etap)) N = N0 + k0*A*(epsilonp + alpha1*sin(2*1*epsilonp)*cosh(2*1*etap) + alpha2*sin(2*2*epsilonp)*cosh(2*2*etap) + alpha3*sin(2*3*epsilonp)*cosh(2*3*etap)) return E*1000, N*1000
<SYSTEM_TASK:> Checks if an alert is ongoing and alerts the newly connected <END_TASK> <USER_TASK:> Description: def userlogin(self, event): """Checks if an alert is ongoing and alerts the newly connected client, if so."""
client_uuid = event.clientuuid self.log(event.user, pretty=True, lvl=verbose) self.log('Adding client') self.clients[event.clientuuid] = event.user for topic, alert in self.alerts.items(): self.alert(client_uuid, alert)
<SYSTEM_TASK:> Isomer Management Tool <END_TASK> <USER_TASK:> Description: def cli(ctx, instance, quiet, verbose, log_level, dbhost, dbname): """Isomer Management Tool This tool supports various operations to manage isomer instances. Most of the commands are grouped. To obtain more information about the groups' available sub commands/groups, try iso [group] To display details of a command or its sub groups, try iso [group] [subgroup] [..] [command] --help To get a map of all available commands, try iso cmdmap """
ctx.obj['instance'] = instance if dbname == db_default and instance != 'default': dbname = instance ctx.obj['quiet'] = quiet ctx.obj['verbose'] = verbose verbosity['console'] = log_level verbosity['global'] = log_level ctx.obj['dbhost'] = dbhost ctx.obj['dbname'] = dbname
<SYSTEM_TASK:> Primary entry point for all AstroCats catalogs. <END_TASK> <USER_TASK:> Description: def main(): """Primary entry point for all AstroCats catalogs. From this entry point, all internal catalogs can be accessed and their public methods executed (for example: import scripts). """
from datetime import datetime # Initialize Command-Line and User-Config Settings, Log # ----------------------------------------------------- beg_time = datetime.now() # Process command-line arguments to determine action # If no subcommand (e.g. 'import') is given, returns 'None' --> exit args, sub_clargs = load_command_line_args() if args is None: return # Create a logging object log = load_log(args) # Run configuration/setup interactive script if args.command == 'setup': setup_user_config(log) return # Make sure configuration file exists, or that's what we're doing # (with the 'setup' subcommand) if not os.path.isfile(_CONFIG_PATH): raise RuntimeError("'{}' does not exist. " "Run `astrocats setup` to configure." "".format(_CONFIG_PATH)) git_vers = get_git() title_str = "Astrocats, version: {}, SHA: {}".format(__version__, git_vers) log.warning("\n\n{}\n{}\n{}\n".format(title_str, '=' * len(title_str), beg_time.ctime())) # Load the user settings from the home directory args = load_user_config(args, log) # Choose Catalog and Operation(s) to perform # ------------------------------------------ mod_name = args.command log.debug("Importing specified module: '{}'".format(mod_name)) # Try to import the specified module try: mod = importlib.import_module('.' + mod_name, package='astrocats') except Exception as err: log.error("Import of specified module '{}' failed.".format(mod_name)) log_raise(log, str(err), type(err)) # Run the `main.main` method of the specified module log.debug("Running `main.main()`") mod.main.main(args, sub_clargs, log) end_time = datetime.now() log.warning("\nAll complete at {}, After {}".format(end_time, end_time - beg_time)) return
<SYSTEM_TASK:> Setup a configuration file in the user's home directory. <END_TASK> <USER_TASK:> Description: def setup_user_config(log): """Setup a configuration file in the user's home directory. Currently this method stores default values to a fixed configuration filename. It should be modified to run an interactive prompt session asking for parameters (or at least confirming the default ones). Arguments --------- log : `logging.Logger` object """
log.warning("AstroCats Setup") log.warning("Configure filepath: '{}'".format(_CONFIG_PATH)) # Create path to configuration file as needed config_path_dir = os.path.split(_CONFIG_PATH)[0] if not os.path.exists(config_path_dir): log.debug("Creating config directory '{}'".format(config_path_dir)) os.makedirs(config_path_dir) if not os.path.isdir(config_path_dir): log_raise(log, "Configure path error '{}'".format(config_path_dir)) # Determine default settings # Get this containing directory and use that as default data path def_base_path = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) log.warning("Setting '{}' to default path: '{}'".format(_BASE_PATH_KEY, def_base_path)) config = {_BASE_PATH_KEY: def_base_path} # Write settings to configuration file json.dump(config, open(_CONFIG_PATH, 'w')) if not os.path.exists(def_base_path): log_raise(log, "Problem creating configuration file.") return
<SYSTEM_TASK:> Load settings from the user's confiuration file, and add them to `args`. <END_TASK> <USER_TASK:> Description: def load_user_config(args, log): """Load settings from the user's confiuration file, and add them to `args`. Settings are loaded from the configuration file in the user's home directory. Those parameters are added (as attributes) to the `args` object. Arguments --------- args : `argparse.Namespace` Namespace object to which configuration attributes will be added. Returns ------- args : `argparse.Namespace` Namespace object with added attributes. """
if not os.path.exists(_CONFIG_PATH): err_str = ( "Configuration file does not exists ({}).\n".format(_CONFIG_PATH) + "Run `python -m astrocats setup` to configure.") log_raise(log, err_str) config = json.load(open(_CONFIG_PATH, 'r')) setattr(args, _BASE_PATH_KEY, config[_BASE_PATH_KEY]) log.debug("Loaded configuration: {}: {}".format(_BASE_PATH_KEY, config[ _BASE_PATH_KEY])) return args
<SYSTEM_TASK:> Load and parse command-line arguments. <END_TASK> <USER_TASK:> Description: def load_command_line_args(clargs=None): """Load and parse command-line arguments. Arguments --------- args : str or None 'Faked' commandline arguments passed to `argparse`. Returns ------- args : `argparse.Namespace` object Namespace in which settings are stored - default values modified by the given command-line arguments. """
import argparse git_vers = get_git() parser = argparse.ArgumentParser( prog='astrocats', description='Generate catalogs for astronomical data.') parser.add_argument('command', nargs='?', default=None) parser.add_argument( '--version', action='version', version='AstroCats v{}, SHA: {}'.format(__version__, git_vers)) parser.add_argument( '--verbose', '-v', dest='verbose', default=False, action='store_true', help='Print more messages to the screen.') parser.add_argument( '--debug', '-d', dest='debug', default=False, action='store_true', help='Print excessive messages to the screen.') parser.add_argument( '--include-private', dest='private', default=False, action='store_true', help='Include private data in import.') parser.add_argument( '--travis', '-t', dest='travis', default=False, action='store_true', help='Run import script in test mode for Travis.') parser.add_argument( '--clone-depth', dest='clone_depth', default=0, type=int, help=('When cloning git repos, only clone out to this depth ' '(default: 0 = all levels).')) parser.add_argument( '--purge-outputs', dest='purge_outputs', default=False, action='store_true', help=('Purge git outputs after cloning.')) parser.add_argument( '--log', dest='log_filename', default=None, help='Filename to which to store logging information.') # If output files should be written or not # ---------------------------------------- write_group = parser.add_mutually_exclusive_group() write_group.add_argument( '--write', action='store_true', dest='write_entries', default=True, help='Write entries to files [default].') write_group.add_argument( '--no-write', action='store_false', dest='write_entries', default=True, help='do not write entries to file.') # If previously cleared output files should be deleted or not # ----------------------------------------------------------- delete_group = parser.add_mutually_exclusive_group() delete_group.add_argument( '--predelete', action='store_true', dest='delete_old', default=True, help='Delete all old event files to begin [default].') delete_group.add_argument( '--no-predelete', action='store_false', dest='delete_old', default=True, help='Do not delete all old event files to start.') args, sub_clargs = parser.parse_known_args(args=clargs) # Print the help information if no command is given if args.command is None: parser.print_help() return None, None return args, sub_clargs
<SYSTEM_TASK:> Function compares dictionaries by key-value recursively. <END_TASK> <USER_TASK:> Description: def compare_dicts(old_full, new_full, old_data, new_data, depth=0): """Function compares dictionaries by key-value recursively. Old and new input data are both dictionaries """
depth = depth + 1 indent = " "*depth # Print with an indentation matching the nested-dictionary depth def my_print(str): print("{}{}".format(indent, str)) old_keys = list(old_data.keys()) # Compare data key by key, in *this* dictionary level # Note: since we're comparing by keys explicity, order doesnt matter for key in old_keys: # Remove elements as we go old_vals = old_data.pop(key) # Current key my_print("{}".format(key)) # If `new_data` doesnt also have this key, return False if key not in new_data: my_print("Key '{}' not in new_data.".format(key)) my_print("Old:") my_print(pprint(new_data)) my_print("New:") my_print(pprint(new_data)) return False # If it does have the key, extract the values (remove as we go) new_vals = new_data.pop(key) # If these values are a sub-dictionary, compare those if isinstance(old_vals, dict) and isinstance(new_vals, dict): # If the sub-dictionary are not the same, return False if not compare_dicts(old_full, new_full, old_vals, new_vals, depth=depth): return False # If these values are a list of sub-dictionaries, compare each of those elif (isinstance(old_vals, list) and isinstance(old_vals[0], dict) and isinstance(old_vals, list) and isinstance(old_vals[0], dict)): for old_elem, new_elem in zip_longest(old_vals, new_vals): # If one or the other has extra elements, print message, but # continue on if old_elem is None or new_elem is None: my_print("Missing element!") my_print("\tOld: '{}'".format(old_elem)) my_print("\tNew: '{}'".format(new_elem)) else: if not compare_dicts(old_full, new_full, old_elem, new_elem, depth=depth): return False # At the lowest-dictionary level, compare the values themselves else: # Turn everything into a list for convenience (most things should be # already) if (not isinstance(old_vals, list) and not isinstance(new_vals, list)): old_vals = [old_vals] new_vals = [new_vals] # Sort both lists old_vals = sorted(old_vals) new_vals = sorted(new_vals) for oldv, newv in zip_longest(old_vals, new_vals): # If one or the other has extra elements, print message, but # continue on if oldv is None or newv is None: my_print("Missing element!") my_print("\tOld: '{}'".format(oldv)) my_print("\tNew: '{}'".format(newv)) # If values match, continue elif oldv == newv: my_print("Good Match: '{}'".format(key)) # If values dont match, return False else: my_print("Bad Match: '{}'".format(key)) my_print("\tOld: '{}'".format(oldv)) my_print("\tNew: '{}'".format(newv)) return False return True
<SYSTEM_TASK:> Clips a line to a rectangular area. <END_TASK> <USER_TASK:> Description: def cohensutherland(xmin, ymax, xmax, ymin, x1, y1, x2, y2): """Clips a line to a rectangular area. This implements the Cohen-Sutherland line clipping algorithm. xmin, ymax, xmax and ymin denote the clipping area, into which the line defined by x1, y1 (start point) and x2, y2 (end point) will be clipped. If the line does not intersect with the rectangular clipping area, four None values will be returned as tuple. Otherwise a tuple of the clipped line points will be returned in the form (cx1, cy1, cx2, cy2). """
INSIDE, LEFT, RIGHT, LOWER, UPPER = 0, 1, 2, 4, 8 def _getclip(xa, ya): #if dbglvl>1: print('point: '),; print(xa,ya) p = INSIDE # default is inside # consider x if xa < xmin: p |= LEFT elif xa > xmax: p |= RIGHT # consider y if ya < ymin: p |= LOWER # bitwise OR elif ya > ymax: p |= UPPER # bitwise OR return p # check for trivially outside lines k1 = _getclip(x1, y1) k2 = _getclip(x2, y2) # %% examine non-trivially outside points # bitwise OR | while (k1 | k2) != 0: # if both points are inside box (0000) , ACCEPT trivial whole line in box # if line trivially outside window, REJECT if (k1 & k2) != 0: # bitwise AND & #if dbglvl>1: print(' REJECT trivially outside box') # return nan, nan, nan, nan return None, None, None, None # non-trivial case, at least one point outside window # this is not a bitwise or, it's the word "or" opt = k1 or k2 # take first non-zero point, short circuit logic if opt & UPPER: # these are bitwise ANDS x = x1 + (x2 - x1) * (ymax - y1) / (y2 - y1) y = ymax elif opt & LOWER: x = x1 + (x2 - x1) * (ymin - y1) / (y2 - y1) y = ymin elif opt & RIGHT: y = y1 + (y2 - y1) * (xmax - x1) / (x2 - x1) x = xmax elif opt & LEFT: y = y1 + (y2 - y1) * (xmin - x1) / (x2 - x1) x = xmin else: raise RuntimeError('Undefined clipping state') if opt == k1: x1, y1 = x, y k1 = _getclip(x1, y1) #if dbglvl>1: print('checking k1: ' + str(x) + ',' + str(y) + ' ' + str(k1)) elif opt == k2: #if dbglvl>1: print('checking k2: ' + str(x) + ',' + str(y) + ' ' + str(k2)) x2, y2 = x, y k2 = _getclip(x2, y2) return x1, y1, x2, y2
<SYSTEM_TASK:> Horn Schunck legacy OpenCV function requires we use these old-fashioned cv matrices, not numpy array <END_TASK> <USER_TASK:> Description: def setupuv(rc): """ Horn Schunck legacy OpenCV function requires we use these old-fashioned cv matrices, not numpy array """
if cv is not None: (r, c) = rc u = cv.CreateMat(r, c, cv.CV_32FC1) v = cv.CreateMat(r, c, cv.CV_32FC1) return (u, v) else: return [None]*2
<SYSTEM_TASK:> Initialize a CatDict object, checking for errors. <END_TASK> <USER_TASK:> Description: def _init_cat_dict(self, cat_dict_class, key_in_self, **kwargs): """Initialize a CatDict object, checking for errors. """
# Catch errors associated with crappy, but not unexpected data try: new_entry = cat_dict_class(self, key=key_in_self, **kwargs) except CatDictError as err: if err.warn: self._log.info("'{}' Not adding '{}': '{}'".format(self[ self._KEYS.NAME], key_in_self, str(err))) return None return new_entry
<SYSTEM_TASK:> Add a CatDict to this Entry if initialization succeeds and it <END_TASK> <USER_TASK:> Description: def _add_cat_dict(self, cat_dict_class, key_in_self, check_for_dupes=True, **kwargs): """Add a CatDict to this Entry if initialization succeeds and it doesn't already exist within the Entry. """
# Try to create a new instance of this subclass of `CatDict` new_entry = self._init_cat_dict(cat_dict_class, key_in_self, **kwargs) if new_entry is None: return False # Compare this new entry with all previous entries to make sure is new if cat_dict_class != Error: for item in self.get(key_in_self, []): if new_entry.is_duplicate_of(item): item.append_sources_from(new_entry) # Return the entry in case we want to use any additional # tags to augment the old entry return new_entry self.setdefault(key_in_self, []).append(new_entry) return True
<SYSTEM_TASK:> Wrapper for `tqdm` progress bar. <END_TASK> <USER_TASK:> Description: def pbar(iter, desc='', **kwargs): """Wrapper for `tqdm` progress bar. """
return tqdm( iter, desc=('<' + str(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) + '> ' + desc), dynamic_ncols=True, **kwargs)
<SYSTEM_TASK:> Wrapper for `tqdm` progress bar which also sorts list of strings <END_TASK> <USER_TASK:> Description: def pbar_strings(files, desc='', **kwargs): """Wrapper for `tqdm` progress bar which also sorts list of strings """
return tqdm( sorted(files, key=lambda s: s.lower()), desc=('<' + str(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) + '> ' + desc), dynamic_ncols=True, **kwargs)
<SYSTEM_TASK:> Get the task `priority` corresponding to the given `task_priority`. <END_TASK> <USER_TASK:> Description: def _get_task_priority(tasks, task_priority): """Get the task `priority` corresponding to the given `task_priority`. If `task_priority` is an integer or 'None', return it. If `task_priority` is a str, return the priority of the task it matches. Otherwise, raise `ValueError`. """
if task_priority is None: return None if is_integer(task_priority): return task_priority if isinstance(task_priority, basestring): if task_priority in tasks: return tasks[task_priority].priority raise ValueError("Unrecognized task priority '{}'".format(task_priority))
<SYSTEM_TASK:> Run all of the import tasks. <END_TASK> <USER_TASK:> Description: def import_data(self): """Run all of the import tasks. This is executed by the 'scripts.main.py' when the module is run as an executable. This can also be run as a method, in which case default arguments are loaded, but can be overriden using `**kwargs`. """
tasks_list = self.load_task_list() warnings.filterwarnings( 'ignore', r'Warning: converting a masked element to nan.') # FIX warnings.filterwarnings('ignore', category=DeprecationWarning) # Delete all old (previously constructed) output files if self.args.delete_old: self.log.warning("Deleting all old entry files.") self.delete_old_entry_files() # In update mode, load all entry stubs. if self.args.load_stubs or self.args.update: self.load_stubs() if self.args.travis: self.log.warning("Running in `travis` mode.") prev_priority = 0 prev_task_name = '' # for task, task_obj in tasks_list.items(): for task_name, task_obj in tasks_list.items(): if not task_obj.active: continue self.log.warning("Task: '{}'".format(task_name)) nice_name = task_obj.nice_name mod_name = task_obj.module func_name = task_obj.function priority = task_obj.priority # Make sure things are running in the correct order if priority < prev_priority and priority > 0: raise RuntimeError("Priority for '{}': '{}', less than prev," "'{}': '{}'.\n{}" .format(task_name, priority, prev_task_name, prev_priority, task_obj)) self.log.debug("\t{}, {}, {}, {}".format(nice_name, priority, mod_name, func_name)) mod = importlib.import_module('.' + mod_name, package='astrocats') self.current_task = task_obj getattr(mod, func_name)(self) num_events, num_stubs = self.count() self.log.warning("Task finished. Events: {}, Stubs: {}".format( num_events, num_stubs)) self.journal_entries() num_events, num_stubs = self.count() self.log.warning("Journal finished. Events: {}, Stubs: {}".format( num_events, num_stubs)) prev_priority = priority prev_task_name = task_name process = psutil.Process(os.getpid()) memory = process.memory_info().rss self.log.warning('Memory used (MBs): ' '{:,}'.format(memory / 1024. / 1024.)) return
<SYSTEM_TASK:> Find an existing entry in, or add a new one to, the `entries` dict. <END_TASK> <USER_TASK:> Description: def add_entry(self, name, load=True, delete=True): """Find an existing entry in, or add a new one to, the `entries` dict. FIX: rename to `create_entry`??? Returns ------- entries : OrderedDict of Entry objects newname : str Name of matching entry found in `entries`, or new entry added to `entries` """
newname = self.clean_entry_name(name) if not newname: raise (ValueError('Fatal: Attempted to add entry with no name.')) # If entry already exists, return if newname in self.entries: self.log.debug("`newname`: '{}' (name: '{}') already exists.". format(newname, name)) # If this is a stub, we need to continue, possibly load file if self.entries[newname]._stub: self.log.debug("'{}' is a stub".format(newname)) # If a full (non-stub) event exists, return its name else: self.log.debug("'{}' is not a stub, returning".format(newname)) return newname # If entry is alias of another entry in `entries`, find and return that match_name = self.find_entry_name_of_alias(newname) if match_name is not None: self.log.debug( "`newname`: '{}' (name: '{}') already exists as alias for " "'{}'.".format(newname, name, match_name)) newname = match_name # Load entry from file if load: loaded_name = self.load_entry_from_name(newname, delete=delete) if loaded_name: return loaded_name # If we match an existing event, return that if match_name is not None: return match_name # Create new entry new_entry = self.proto(catalog=self, name=newname) new_entry[self.proto._KEYS.SCHEMA] = self.SCHEMA.URL self.log.log(self.log._LOADED, "Created new entry for '{}'".format(newname)) # Add entry to dictionary self.entries[newname] = new_entry return newname
<SYSTEM_TASK:> Return the first entry name with the given 'alias' included in its <END_TASK> <USER_TASK:> Description: def find_entry_name_of_alias(self, alias): """Return the first entry name with the given 'alias' included in its list of aliases. Returns ------- name of matching entry (str) or 'None' if no matches """
if alias in self.aliases: name = self.aliases[alias] if name in self.entries: return name else: # Name wasn't found, possibly merged or deleted. Now look # really hard. for name, entry in self.entries.items(): aliases = entry.get_aliases(includename=False) if alias in aliases: if (ENTRY.DISTINCT_FROM not in entry or alias not in entry[ENTRY.DISTINCT_FROM]): return name return None
<SYSTEM_TASK:> Delete the file associated with the given entry. <END_TASK> <USER_TASK:> Description: def _delete_entry_file(self, entry_name=None, entry=None): """Delete the file associated with the given entry. """
if entry_name is None and entry is None: raise RuntimeError("Either `entry_name` or `entry` must be given.") elif entry_name is not None and entry is not None: raise RuntimeError("Cannot use both `entry_name` and `entry`.") if entry_name is not None: entry = self.entries[entry_name] else: entry_name = entry[ENTRY.NAME] # FIX: do we also need to check for gzipped files?? entry_filename = self.entry_filename(entry_name) if self.args.write_entries: self.log.info("Deleting entry file '{}' of entry '{}'".format( entry_filename, entry_name)) if not os.path.exists(entry_filename): self.log.error( "Filename '{}' does not exist".format(entry_filename)) os.remove(entry_filename) else: self.log.debug("Not deleting '{}' because `write_entries`" " is False".format(entry_filename)) return
<SYSTEM_TASK:> Write all entries in `entries` to files, and clear. Depending on <END_TASK> <USER_TASK:> Description: def journal_entries(self, clear=True, gz=False, bury=False, write_stubs=False, final=False): """Write all entries in `entries` to files, and clear. Depending on arguments and `tasks`. Iterates over all elements of `entries`, saving (possibly 'burying') and deleting. - If ``clear == True``, then each element of `entries` is deleted, and a `stubs` entry is added """
# if (self.current_task.priority >= 0 and # self.current_task.priority < self.min_journal_priority): # return # Write it all out! # NOTE: this needs to use a `list` wrapper to allow modification of # dict for name in list(self.entries.keys()): if self.args.write_entries: # If this is a stub and we aren't writing stubs, skip if self.entries[name]._stub and not write_stubs: continue # Bury non-SN entries here if only claimed type is non-SN type, # or if primary name starts with a non-SN prefix. bury_entry = False save_entry = True if bury: (bury_entry, save_entry) = self.should_bury(name) if save_entry: save_name = self.entries[name].save( bury=bury_entry, final=final) self.log.info( "Saved {} to '{}'.".format(name.ljust(20), save_name)) if (gz and os.path.getsize(save_name) > self.COMPRESS_ABOVE_FILESIZE): save_name = compress_gz(save_name) self.log.debug( "Compressed '{}' to '{}'".format(name, save_name)) # FIX: use subprocess outdir, filename = os.path.split(save_name) filename = filename.split('.')[0] os.system('cd ' + outdir + '; git rm --cached ' + filename + '.json; git add -f ' + filename + '.json.gz; cd ' + self.PATHS.PATH_BASE) if clear: self.entries[name] = self.entries[name].get_stub() self.log.debug("Entry for '{}' converted to stub".format(name)) return
<SYSTEM_TASK:> Choose between each entries given name and its possible aliases for <END_TASK> <USER_TASK:> Description: def set_preferred_names(self): """Choose between each entries given name and its possible aliases for the best one. """
if len(self.entries) == 0: self.log.error("WARNING: `entries` is empty, loading stubs") self.load_stubs() task_str = self.get_current_task_str() for ni, oname in enumerate(pbar(self.entries, task_str)): name = self.add_entry(oname) self.entries[name].set_preferred_name() if self.args.travis and ni > self.TRAVIS_QUERY_LIMIT: break return
<SYSTEM_TASK:> Get a list of files which should be added to the given repository. <END_TASK> <USER_TASK:> Description: def _prep_git_add_file_list(self, repo, size_limit, fail=True, file_types=None): """Get a list of files which should be added to the given repository. Notes ----- * Finds files in the *root* of the given repository path. * If `file_types` is given, only use those file types. * If an uncompressed file is above the `size_limit`, it is compressed. * If a compressed file is above the file limit, an error is raised (if `fail = True`) or it is skipped (if `fail == False`). Arguments --------- repo : str Path to repository size_limit : scalar fail : bool Raise an error if a compressed file is still above the size limit. file_types : list of str or None Exclusive list of file types to add. 'None' to add all filetypes. """
add_files = [] if file_types is None: file_patterns = ['*'] else: self.log.error( "WARNING: uncertain behavior with specified file types!") file_patterns = ['*.' + ft for ft in file_types] # Construct glob patterns for each file-type file_patterns = [os.path.join(repo, fp) for fp in file_patterns] for pattern in file_patterns: file_list = glob(pattern) for ff in file_list: fsize = os.path.getsize(ff) fname = str(ff) comp_failed = False # If the found file is too large if fsize > size_limit: self.log.debug("File '{}' size '{}' MB.".format( fname, fsize / 1028 / 1028)) # If the file is already compressed... fail or skip if ff.endswith('.gz'): self.log.error( "File '{}' is already compressed.".format(fname)) comp_failed = True # Not yet compressed - compress it else: fname = compress_gz(fname) fsize = os.path.getsize(fname) self.log.info("Compressed to '{}', size '{}' MB". format(fname, fsize / 1028 / 1028)) # If still too big, fail or skip if fsize > size_limit: comp_failed = True # If compressed file is too large, skip file or raise error if comp_failed: # Raise an error if fail: raise RuntimeError( "File '{}' cannot be added!".format(fname)) # Skip file without adding it self.log.info("Skipping file.") continue # If everything is good, add file to list add_files.append(fname) return add_files
<SYSTEM_TASK:> Download text from the given url. <END_TASK> <USER_TASK:> Description: def download_url(self, url, timeout, fail=False, post=None, verify=True): """Download text from the given url. Returns `None` on failure. Arguments --------- self url : str URL web address to download. timeout : int Duration after which URL request should terminate. fail : bool If `True`, then an error will be raised on failure. If `False`, then 'None' is returned on failure. post : dict List of arguments to post to URL when requesting it. verify : bool Whether to check for valid SSL cert when downloading Returns ------- url_txt : str or None On success the text of the url is returned. On failure `None` is returned. """
_CODE_ERRORS = [500, 307, 404] import requests session = requests.Session() try: headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X ' '10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/39.0.2171.95 Safari/537.36' } if post: response = session.post( url, timeout=timeout, headers=headers, data=post, verify=verify) else: response = session.get( url, timeout=timeout, headers=headers, verify=verify) response.raise_for_status() # Look for errors for xx in response.history: xx.raise_for_status() if xx.status_code in _CODE_ERRORS: self.log.error("URL response returned status code '{}'". format(xx.status_code)) raise url_txt = response.text self.log.debug("Task {}: Loaded `url_txt` from '{}'.".format( self.current_task.name, url)) except (KeyboardInterrupt, SystemExit): raise except Exception as err: err_str = ("URL Download of '{}' failed ('{}')." .format(url, str(err))) # Raise an error on failure if fail: err_str += " and `fail` is set." self.log.error(err_str) raise RuntimeError(err_str) # Log a warning on error, and return None else: self.log.warning(err_str) return None return url_txt
<SYSTEM_TASK:> Merge the source alias lists of two CatDicts. <END_TASK> <USER_TASK:> Description: def append_sources_from(self, other): """Merge the source alias lists of two CatDicts."""
# Get aliases lists from this `CatDict` and other self_aliases = self[self._KEYS.SOURCE].split(',') other_aliases = other[self._KEYS.SOURCE].split(',') # Store alias to `self` self[self._KEYS.SOURCE] = uniq_cdl(self_aliases + other_aliases) return
<SYSTEM_TASK:> Name of current action for progress-bar output. <END_TASK> <USER_TASK:> Description: def current_task(self, args): """Name of current action for progress-bar output. The specific task string is depends on the configuration via `args`. Returns ------- ctask : str String representation of this task. """
ctask = self.nice_name if self.nice_name is not None else self.name if args is not None: if args.update: ctask = ctask.replace('%pre', 'Updating') else: ctask = ctask.replace('%pre', 'Loading') return ctask
<SYSTEM_TASK:> Whether previously archived data should be loaded. <END_TASK> <USER_TASK:> Description: def load_archive(self, args): """Whether previously archived data should be loaded. """
import warnings warnings.warn("`Task.load_archive()` is deprecated! " "`Catalog.load_url` handles the same functionality.") return self.archived or args.archived
<SYSTEM_TASK:> Add all files in each data repository tree, commit, push. <END_TASK> <USER_TASK:> Description: def git_add_commit_push_all_repos(cat): """Add all files in each data repository tree, commit, push. Creates a commit message based on the current catalog version info. If either the `git add` or `git push` commands fail, an error will be raised. Currently, if `commit` fails an error *WILL NOT* be raised because the `commit` command will return a nonzero exit status if there are no files to add... which we dont want to raise an error. FIX: improve the error checking on this. """
log = cat.log log.debug("gitter.git_add_commit_push_all_repos()") # Do not commit/push private repos all_repos = cat.PATHS.get_all_repo_folders(private=False) for repo in all_repos: log.info("Repo in: '{}'".format(repo)) # Get the initial git SHA sha_beg = get_sha(repo) log.debug("Current SHA: '{}'".format(sha_beg)) # Get files that should be added, compress and check sizes add_files = cat._prep_git_add_file_list(repo, cat.COMPRESS_ABOVE_FILESIZE) log.info("Found {} Files to add.".format(len(add_files))) if len(add_files) == 0: continue try: # Add all files in the repository directory tree git_comm = ["git", "add"] if cat.args.travis: git_comm.append("-f") git_comm.extend(add_files) _call_command_in_repo( git_comm, repo, cat.log, fail=True, log_flag=False) # Commit these files commit_msg = "'push' - adding all files." commit_msg = "{} : {}".format(cat._version_long, commit_msg) log.info(commit_msg) git_comm = ["git", "commit", "-am", commit_msg] _call_command_in_repo(git_comm, repo, cat.log) # Add all files in the repository directory tree git_comm = ["git", "push"] if not cat.args.travis: _call_command_in_repo(git_comm, repo, cat.log, fail=True) except Exception as err: try: git_comm = ["git", "reset", "HEAD"] _call_command_in_repo(git_comm, repo, cat.log, fail=True) except: pass raise err return
<SYSTEM_TASK:> Perform a 'git pull' in each data repository. <END_TASK> <USER_TASK:> Description: def git_pull_all_repos(cat, strategy_recursive=True, strategy='theirs'): """Perform a 'git pull' in each data repository. > `git pull -s recursive -X theirs` """
# raise RuntimeError("THIS DOESNT WORK YET!") log = cat.log log.debug("gitter.git_pull_all_repos()") log.warning("WARNING: using experimental `git_pull_all_repos()`!") all_repos = cat.PATHS.get_all_repo_folders() for repo_name in all_repos: log.info("Repo in: '{}'".format(repo_name)) # Get the initial git SHA sha_beg = get_sha(repo_name) log.debug("Current SHA: '{}'".format(sha_beg)) # Initialize the git repository repo = git.Repo(repo_name) # Construct the command to call git_comm = "git pull --verbose" if strategy_recursive: git_comm += " -s recursive" if strategy is not None: git_comm += " -X {:s}".format(strategy) log.debug("Calling '{}'".format(git_comm)) # Call git command (do this manually to use desired options) # Set `with_exceptions=False` to handle errors ourselves (below) code, out, err = repo.git.execute( git_comm.split(), with_stdout=True, with_extended_output=True, with_exceptions=False) # Handle output of git command if len(out): log.info(out) if len(err): log.info(err) # Hangle error-codes if code != 0: err_str = "Command '{}' returned exit code '{}'!".format(git_comm, code) err_str += "\n\tout: '{}'\n\terr: '{}'".format(out, err) log.error(err_str) raise RuntimeError(err_str) sha_end = get_sha(repo_name) if sha_end != sha_beg: log.info("Updated SHA: '{}'".format(sha_end)) return
<SYSTEM_TASK:> Perform a 'git clone' for each data repository that doesnt exist. <END_TASK> <USER_TASK:> Description: def git_clone_all_repos(cat): """Perform a 'git clone' for each data repository that doesnt exist. """
log = cat.log log.debug("gitter.git_clone_all_repos()") all_repos = cat.PATHS.get_all_repo_folders() out_repos = cat.PATHS.get_repo_output_folders() for repo in all_repos: log.info("Repo in: '{}'".format(repo)) if os.path.isdir(repo): log.info("Directory exists.") else: log.debug("Cloning directory...") clone(repo, cat.log, depth=max(cat.args.clone_depth, 1)) if cat.args.purge_outputs and repo in out_repos: for fil in glob(os.path.join(repo, '*.json')): os.remove(fil) grepo = git.cmd.Git(repo) try: grepo.status() except git.GitCommandError: log.error("Repository does not exist!") raise # Get the initial git SHA sha_beg = get_sha(repo) log.debug("Current SHA: '{}'".format(sha_beg)) return
<SYSTEM_TASK:> Perform a 'git reset' in each data repository. <END_TASK> <USER_TASK:> Description: def git_reset_all_repos(cat, hard=True, origin=False, clean=True): """Perform a 'git reset' in each data repository. """
log = cat.log log.debug("gitter.git_reset_all_repos()") all_repos = cat.PATHS.get_all_repo_folders() for repo in all_repos: log.warning("Repo in: '{}'".format(repo)) # Get the initial git SHA sha_beg = get_sha(repo) log.debug("Current SHA: '{}'".format(sha_beg)) grepo = git.cmd.Git(repo) # Fetch first log.info("fetching") grepo.fetch() args = [] if hard: args.append('--hard') if origin: args.append('origin/master') log.info("resetting") retval = grepo.reset(*args) if len(retval): log.warning("Git says: '{}'".format(retval)) # Clean if clean: log.info("cleaning") # [q]uiet, [f]orce, [d]irectories retval = grepo.clean('-qdf') if len(retval): log.warning("Git says: '{}'".format(retval)) sha_end = get_sha(repo) if sha_end != sha_beg: log.debug("Updated SHA: '{}'".format(sha_end)) return