text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Starts the gevent-socketio server. <END_TASK> <USER_TASK:> Description: def serve_forever(django=False): """ Starts the gevent-socketio server. """
logger = getLogger("irc.dispatch") logger.setLevel(settings.LOG_LEVEL) logger.addHandler(StreamHandler()) app = IRCApplication(django) server = SocketIOServer((settings.HTTP_HOST, settings.HTTP_PORT), app) print "%s [Bot: %s] listening on %s:%s" % ( settings.GNOTTY_VERSION_STRING, app.bot.__class__.__name__, settings.HTTP_HOST, settings.HTTP_PORT, ) server.serve_forever()
<SYSTEM_TASK:> Attempts to shut down a previously started daemon. <END_TASK> <USER_TASK:> Description: def kill(pid_file): """ Attempts to shut down a previously started daemon. """
try: with open(pid_file) as f: os.kill(int(f.read()), 9) os.remove(pid_file) except (IOError, OSError): return False return True
<SYSTEM_TASK:> CLI entry point. Parses args and starts the gevent-socketio server. <END_TASK> <USER_TASK:> Description: def run(): """ CLI entry point. Parses args and starts the gevent-socketio server. """
settings.parse_args() pid_name = "gnotty-%s-%s.pid" % (settings.HTTP_HOST, settings.HTTP_PORT) pid_file = settings.PID_FILE or os.path.join(gettempdir(), pid_name) if settings.KILL: if kill(pid_file): print "Daemon killed" else: print "Could not kill any daemons" return elif kill(pid_file): print "Running daemon killed" if settings.DAEMON: daemonize(pid_file) serve_forever()
<SYSTEM_TASK:> A WebSocket session has started - create a greenlet to host <END_TASK> <USER_TASK:> Description: def on_start(self, host, port, channel, nickname, password): """ A WebSocket session has started - create a greenlet to host the IRC client, and start it. """
self.client = WebSocketIRCClient(host, port, channel, nickname, password, self) self.spawn(self.client.start)
<SYSTEM_TASK:> WebSocket was disconnected - leave the IRC channel. <END_TASK> <USER_TASK:> Description: def disconnect(self, *args, **kwargs): """ WebSocket was disconnected - leave the IRC channel. """
quit_message = "%s %s" % (settings.GNOTTY_VERSION_STRING, settings.GNOTTY_PROJECT_URL) self.client.connection.quit(quit_message) super(IRCNamespace, self).disconnect(*args, **kwargs)
<SYSTEM_TASK:> Passes the request onto a bot with a webhook if the webhook <END_TASK> <USER_TASK:> Description: def respond_webhook(self, environ): """ Passes the request onto a bot with a webhook if the webhook path is requested. """
request = FieldStorage(fp=environ["wsgi.input"], environ=environ) url = environ["PATH_INFO"] params = dict([(k, request[k].value) for k in request]) try: if self.bot is None: raise NotImplementedError response = self.bot.handle_webhook_event(environ, url, params) except NotImplementedError: return 404 except: self.logger.debug(format_exc()) return 500 return response or 200
<SYSTEM_TASK:> Serves a static file when Django isn't being used. <END_TASK> <USER_TASK:> Description: def respond_static(self, environ): """ Serves a static file when Django isn't being used. """
path = os.path.normpath(environ["PATH_INFO"]) if path == "/": content = self.index() content_type = "text/html" else: path = os.path.join(os.path.dirname(__file__), path.lstrip("/")) try: with open(path, "r") as f: content = f.read() except IOError: return 404 content_type = guess_type(path)[0] return (200, [("Content-Type", content_type)], content)
<SYSTEM_TASK:> Loads the chat interface template when Django isn't being <END_TASK> <USER_TASK:> Description: def index(self): """ Loads the chat interface template when Django isn't being used, manually dealing with the Django template bits. """
root_dir = os.path.dirname(__file__) template_dir = os.path.join(root_dir, "templates", "gnotty") with open(os.path.join(template_dir, "base.html"), "r") as f: base = f.read() with open(os.path.join(template_dir, "chat.html"), "r") as f: base = base.replace("{% block content %}", f.read()) replace = { "{% block content %}": "", "{% block extrahead %}": "", "{% endblock %}": "", "{% load gnotty_tags %}": "", "{% extends \"gnotty/base.html\" %}": "", "{% url gnotty_chat %}": "/", "{% gnotty_nav %}": "", "{% templatetag openvariable %}": "{{", "{% templatetag closevariable %}": "}}", } for k, v in replace.items(): base = base.replace(k, v) for k, v in settings.items(): base = base.replace("{{ %s }}" % k, unicode(v or "")) return base
<SYSTEM_TASK:> If we're running Django and ``GNOTTY_LOGIN_REQUIRED`` is set <END_TASK> <USER_TASK:> Description: def authorized(self, environ): """ If we're running Django and ``GNOTTY_LOGIN_REQUIRED`` is set to ``True``, pull the session cookie from the environment and validate that the user is authenticated. """
if self.django and settings.LOGIN_REQUIRED: try: from django.conf import settings as django_settings from django.contrib.auth import SESSION_KEY from django.contrib.auth.models import User from django.contrib.sessions.models import Session from django.core.exceptions import ObjectDoesNotExist cookie = SimpleCookie(environ["HTTP_COOKIE"]) cookie_name = django_settings.SESSION_COOKIE_NAME session_key = cookie[cookie_name].value session = Session.objects.get(session_key=session_key) user_id = session.get_decoded().get(SESSION_KEY) user = User.objects.get(id=user_id) except (ImportError, KeyError, ObjectDoesNotExist): return False return True
<SYSTEM_TASK:> returns base query for specific service <END_TASK> <USER_TASK:> Description: def base_query(cls, db_session=None): """ returns base query for specific service :param db_session: :return: query """
db_session = get_db_session(db_session) return db_session.query(cls.model)
<SYSTEM_TASK:> Event method wrapper for bot mixins. When a bot is constructed, <END_TASK> <USER_TASK:> Description: def on(event, *args, **kwargs): """ Event method wrapper for bot mixins. When a bot is constructed, its metaclass inspects all members of all base classes, and looks for methods marked with an event attribute which is assigned via this wrapper. It then stores all the methods in a dict that maps event names to lists of these methods, which are each called when the event occurs. """
def wrapper(func): for i, arg in args: kwargs[i] = arg func.event = Event(event, kwargs) return func return wrapper
<SYSTEM_TASK:> return dictionary of keys and values corresponding to this model's <END_TASK> <USER_TASK:> Description: def get_dict(self, exclude_keys=None, include_keys=None): """ return dictionary of keys and values corresponding to this model's data - if include_keys is null the function will return all keys :param exclude_keys: (optional) is a list of columns from model that should not be returned by this function :param include_keys: (optional) is a list of columns from model that should be returned by this function :return: """
d = {} exclude_keys_list = exclude_keys or [] include_keys_list = include_keys or [] for k in self._get_keys(): if k not in exclude_keys_list and ( k in include_keys_list or not include_keys ): d[k] = getattr(self, k) return d
<SYSTEM_TASK:> return list of tuples keys and values corresponding to this model's <END_TASK> <USER_TASK:> Description: def get_appstruct(self): """ return list of tuples keys and values corresponding to this model's data """
result = [] for k in self._get_keys(): result.append((k, getattr(self, k))) return result
<SYSTEM_TASK:> Deletes the object via session, this will permanently delete the <END_TASK> <USER_TASK:> Description: def delete(self, db_session=None): """ Deletes the object via session, this will permanently delete the object from storage on commit :param db_session: :return: """
db_session = get_db_session(db_session, self) db_session.delete(self)
<SYSTEM_TASK:> power up the HX711 <END_TASK> <USER_TASK:> Description: def power_up(self): """ power up the HX711 :return: always True :rtype bool """
GPIO.output(self._pd_sck, False) time.sleep(0.01) return True
<SYSTEM_TASK:> reset the HX711 and prepare it for the next reading <END_TASK> <USER_TASK:> Description: def reset(self): """ reset the HX711 and prepare it for the next reading :return: True on success :rtype bool :raises GenericHX711Exception """
logging.debug("power down") self.power_down() logging.debug("power up") self.power_up() logging.debug("read some raw data") result = self.get_raw_data(6) if result is False: raise GenericHX711Exception("failed to reset HX711") else: return True
<SYSTEM_TASK:> check if "times" is within the borders defined in the class <END_TASK> <USER_TASK:> Description: def _validate_measure_count(self, times): """ check if "times" is within the borders defined in the class :param times: "times" to check :type times: int """
if not self.min_measures <= times <= self.max_measures: raise ParameterValidationError( "{times} is not within the borders defined in the class".format( times=times ) )
<SYSTEM_TASK:> validate a given value for gain_A <END_TASK> <USER_TASK:> Description: def _validate_gain_A_value(self, gain_A): """ validate a given value for gain_A :type gain_A: int :raises: ValueError """
if gain_A not in self._valid_gains_for_channel_A: raise ParameterValidationError("{gain_A} is not a valid gain".format(gain_A=gain_A))
<SYSTEM_TASK:> Finish data transmission from HX711 by setting <END_TASK> <USER_TASK:> Description: def _set_channel_gain(self, num): """ Finish data transmission from HX711 by setting next required gain and channel Only called from the _read function. :param num: how often so do the set (1...3) :type num: int :return True on success :rtype bool """
if not 1 <= num <= 3: raise AttributeError( """"num" has to be in the range of 1 to 3""" ) for _ in range(num): logging.debug("_set_channel_gain called") start_counter = time.perf_counter() # start timer now. GPIO.output(self._pd_sck, True) # set high GPIO.output(self._pd_sck, False) # set low end_counter = time.perf_counter() # stop timer time_elapsed = float(end_counter - start_counter) # check if HX711 did not turn off... # if pd_sck pin is HIGH for 60 µs and more the HX 711 enters power down mode. if time_elapsed >= 0.00006: logging.warning( 'setting gain and channel took more than 60µs. ' 'Time elapsed: {:0.8f}'.format(time_elapsed) ) # hx711 has turned off. First few readings are inaccurate. # Despite this reading was ok and data can be used. result = self.get_raw_data(times=6) # set for the next reading. if result is False: raise GenericHX711Exception("channel was not set properly") return True
<SYSTEM_TASK:> do some readings and aggregate them using the defined statistics function <END_TASK> <USER_TASK:> Description: def get_raw_data(self, times=5): """ do some readings and aggregate them using the defined statistics function :param times: how many measures to aggregate :type times: int :return: the aggregate of the measured values :rtype float """
self._validate_measure_count(times) data_list = [] while len(data_list) < times: data = self._read() if data not in [False, -1]: data_list.append(data) return data_list
<SYSTEM_TASK:> Shifts ordering to "close gaps" after node deletion or being moved <END_TASK> <USER_TASK:> Description: def shift_ordering_down( self, parent_id, position, db_session=None, *args, **kwargs ): """ Shifts ordering to "close gaps" after node deletion or being moved to another branch, begins the shift from given position :param parent_id: :param position: :param db_session: :return: """
return self.service.shift_ordering_down( parent_id=parent_id, position=position, db_session=db_session, *args, **kwargs )
<SYSTEM_TASK:> takes a list of lists, l and returns a flat list <END_TASK> <USER_TASK:> Description: def flatten_list(l: List[list]) -> list: """ takes a list of lists, l and returns a flat list """
return [v for inner_l in l for v in inner_l]
<SYSTEM_TASK:> Read in NEM file and return meter readings named tuple <END_TASK> <USER_TASK:> Description: def read_nem_file(file_path: str) -> NEMFile: """ Read in NEM file and return meter readings named tuple :param file_path: The NEM file to process :returns: The file that was created """
_, file_extension = os.path.splitext(file_path) if file_extension.lower() == '.zip': with zipfile.ZipFile(file_path, 'r') as archive: for csv_file in archive.namelist(): with archive.open(csv_file) as csv_text: # Zip file is open in binary mode # So decode then convert back to list nmi_file = csv_text.read().decode('utf-8').splitlines() reader = csv.reader(nmi_file, delimiter=',') return parse_nem_rows(reader, file_name=csv_file) with open(file_path) as nmi_file: return parse_nem_file(nmi_file)
<SYSTEM_TASK:> Calculate the interval between two manual readings <END_TASK> <USER_TASK:> Description: def calculate_manual_reading(basic_data: BasicMeterData) -> Reading: """ Calculate the interval between two manual readings """
t_start = basic_data.previous_register_read_datetime t_end = basic_data.current_register_read_datetime read_start = basic_data.previous_register_read read_end = basic_data.current_register_read value = basic_data.quantity uom = basic_data.uom quality_method = basic_data.current_quality_method return Reading(t_start, t_end, value, uom, quality_method, "", "", read_start, read_end)
<SYSTEM_TASK:> Updates readings from a 300 row to reflect any events found in a <END_TASK> <USER_TASK:> Description: def update_reading_events(readings, event_record): """ Updates readings from a 300 row to reflect any events found in a subsequent 400 row """
# event intervals are 1-indexed for i in range(event_record.start_interval - 1, event_record.end_interval): readings[i] = Reading( t_start=readings[i].t_start, t_end=readings[i].t_end, read_value=readings[i].read_value, uom=readings[i].uom, quality_method=event_record.quality_method, event_code=event_record.reason_code, event_desc=event_record.reason_description, read_start=readings[i].read_start, read_end=readings[i].read_end) return readings
<SYSTEM_TASK:> Parse a datetime string into a python datetime object <END_TASK> <USER_TASK:> Description: def parse_datetime(record: str) -> Optional[datetime]: """ Parse a datetime string into a python datetime object """
# NEM defines Date8, DateTime12 and DateTime14 format_strings = {8: '%Y%m%d', 12: '%Y%m%d%H%M', 14: '%Y%m%d%H%M%S'} if record == '': return None return datetime.strptime(record.strip(), format_strings[len(record.strip())])
<SYSTEM_TASK:> Provides a consistent color for a nickname. Uses first 6 chars <END_TASK> <USER_TASK:> Description: def color(nickname): """ Provides a consistent color for a nickname. Uses first 6 chars of nickname's md5 hash, and then slightly darkens the rgb values for use on a light background. """
_hex = md5(nickname).hexdigest()[:6] darken = lambda s: str(int(round(int(s, 16) * .7))) return "rgb(%s)" % ",".join([darken(_hex[i:i+2]) for i in range(6)[::2]])
<SYSTEM_TASK:> Join the channel once connected to the IRC server. <END_TASK> <USER_TASK:> Description: def on_welcome(self, connection, event): """ Join the channel once connected to the IRC server. """
connection.join(self.channel, key=settings.IRC_CHANNEL_KEY or "")
<SYSTEM_TASK:> Increment a digit on the nickname if it's in use, and <END_TASK> <USER_TASK:> Description: def on_nicknameinuse(self, connection, event): """ Increment a digit on the nickname if it's in use, and re-connect. """
digits = "" while self.nickname[-1].isdigit(): digits = self.nickname[-1] + digits self.nickname = self.nickname[:-1] digits = 1 if not digits else int(digits) + 1 self.nickname += str(digits) self.connect(self.host, self.port, self.nickname)
<SYSTEM_TASK:> Nicer shortcut for sending a message to a channel. Also <END_TASK> <USER_TASK:> Description: def message_channel(self, message): """ Nicer shortcut for sending a message to a channel. Also irclib doesn't handle unicode so we bypass its privmsg -> send_raw methods and use its socket directly. """
data = "PRIVMSG %s :%s\r\n" % (self.channel, message) self.connection.socket.send(data.encode("utf-8"))
<SYSTEM_TASK:> Send a message to the channel. We also emit the message <END_TASK> <USER_TASK:> Description: def emit_message(self, message): """ Send a message to the channel. We also emit the message back to the sender's WebSocket. """
try: nickname_color = self.nicknames[self.nickname] except KeyError: # Only accept messages if we've joined. return message = message[:settings.MAX_MESSAGE_LENGTH] # Handle IRC commands. if message.startswith("/"): self.connection.send_raw(message.lstrip("/")) return self.message_channel(message) self.namespace.emit("message", self.nickname, message, nickname_color)
<SYSTEM_TASK:> Send the nickname list to the Websocket. Called whenever the <END_TASK> <USER_TASK:> Description: def emit_nicknames(self): """ Send the nickname list to the Websocket. Called whenever the nicknames list changes. """
nicknames = [{"nickname": name, "color": color(name)} for name in sorted(self.nicknames.keys())] self.namespace.emit("nicknames", nicknames)
<SYSTEM_TASK:> Someone joined the channel - send the nicknames list to the <END_TASK> <USER_TASK:> Description: def on_join(self, connection, event): """ Someone joined the channel - send the nicknames list to the WebSocket. """
#from time import sleep; sleep(10) # Simulate a slow connection nickname = self.get_nickname(event) nickname_color = color(nickname) self.nicknames[nickname] = nickname_color self.namespace.emit("join") self.namespace.emit("message", nickname, "joins", nickname_color) self.emit_nicknames()
<SYSTEM_TASK:> Someone changed their nickname - send the nicknames list to the <END_TASK> <USER_TASK:> Description: def on_nick(self, connection, event): """ Someone changed their nickname - send the nicknames list to the WebSocket. """
old_nickname = self.get_nickname(event) old_color = self.nicknames.pop(old_nickname) new_nickname = event.target() message = "is now known as %s" % new_nickname self.namespace.emit("message", old_nickname, message, old_color) new_color = color(new_nickname) self.nicknames[new_nickname] = new_color self.emit_nicknames() if self.nickname == old_nickname: self.nickname = new_nickname
<SYSTEM_TASK:> Someone left the channel - send the nicknames list to the <END_TASK> <USER_TASK:> Description: def on_quit(self, connection, event): """ Someone left the channel - send the nicknames list to the WebSocket. """
nickname = self.get_nickname(event) nickname_color = self.nicknames[nickname] del self.nicknames[nickname] self.namespace.emit("message", nickname, "leaves", nickname_color) self.emit_nicknames()
<SYSTEM_TASK:> Messages received in the channel - send them to the WebSocket. <END_TASK> <USER_TASK:> Description: def on_pubmsg(self, connection, event): """ Messages received in the channel - send them to the WebSocket. """
for message in event.arguments(): nickname = self.get_nickname(event) nickname_color = self.nicknames[nickname] self.namespace.emit("message", nickname, message, nickname_color)
<SYSTEM_TASK:> returns all permissions that given user has for this resource <END_TASK> <USER_TASK:> Description: def perms_for_user(cls, instance, user, db_session=None): """ returns all permissions that given user has for this resource from groups and directly set ones too :param instance: :param user: :param db_session: :return: """
db_session = get_db_session(db_session, instance) query = db_session.query( cls.models_proxy.GroupResourcePermission.group_id.label("owner_id"), cls.models_proxy.GroupResourcePermission.perm_name, sa.literal("group").label("type"), ) query = query.filter( cls.models_proxy.GroupResourcePermission.group_id.in_( [gr.id for gr in user.groups] ) ) query = query.filter( cls.models_proxy.GroupResourcePermission.resource_id == instance.resource_id ) query2 = db_session.query( cls.models_proxy.UserResourcePermission.user_id.label("owner_id"), cls.models_proxy.UserResourcePermission.perm_name, sa.literal("user").label("type"), ) query2 = query2.filter( cls.models_proxy.UserResourcePermission.user_id == user.id ) query2 = query2.filter( cls.models_proxy.UserResourcePermission.resource_id == instance.resource_id ) query = query.union(query2) groups_dict = dict([(g.id, g) for g in user.groups]) perms = [ PermissionTuple( user, row.perm_name, row.type, groups_dict.get(row.owner_id) if row.type == "group" else None, instance, False, True, ) for row in query ] # include all perms if user is the owner of this resource if instance.owner_user_id == user.id: perms.append( PermissionTuple( user, ALL_PERMISSIONS, "user", None, instance, True, True ) ) groups_dict = dict([(g.id, g) for g in user.groups]) if instance.owner_group_id in groups_dict: perms.append( PermissionTuple( user, ALL_PERMISSIONS, "group", groups_dict.get(instance.owner_group_id), instance, True, True, ) ) return perms
<SYSTEM_TASK:> returns permissions that given user has for this resource <END_TASK> <USER_TASK:> Description: def direct_perms_for_user(cls, instance, user, db_session=None): """ returns permissions that given user has for this resource without ones inherited from groups that user belongs to :param instance: :param user: :param db_session: :return: """
db_session = get_db_session(db_session, instance) query = db_session.query( cls.models_proxy.UserResourcePermission.user_id, cls.models_proxy.UserResourcePermission.perm_name, ) query = query.filter(cls.models_proxy.UserResourcePermission.user_id == user.id) query = query.filter( cls.models_proxy.UserResourcePermission.resource_id == instance.resource_id ) perms = [ PermissionTuple(user, row.perm_name, "user", None, instance, False, True) for row in query ] # include all perms if user is the owner of this resource if instance.owner_user_id == user.id: perms.append( PermissionTuple(user, ALL_PERMISSIONS, "user", None, instance, True) ) return perms
<SYSTEM_TASK:> returns permissions that given user has for this resource <END_TASK> <USER_TASK:> Description: def group_perms_for_user(cls, instance, user, db_session=None): """ returns permissions that given user has for this resource that are inherited from groups :param instance: :param user: :param db_session: :return: """
db_session = get_db_session(db_session, instance) perms = resource_permissions_for_users( cls.models_proxy, ANY_PERMISSION, resource_ids=[instance.resource_id], user_ids=[user.id], db_session=db_session, ) perms = [p for p in perms if p.type == "group"] # include all perms if user is the owner of this resource groups_dict = dict([(g.id, g) for g in user.groups]) if instance.owner_group_id in groups_dict: perms.append( PermissionTuple( user, ALL_PERMISSIONS, "group", groups_dict.get(instance.owner_group_id), instance, True, True, ) ) return perms
<SYSTEM_TASK:> fetch permissions by group and permission name <END_TASK> <USER_TASK:> Description: def perm_by_group_and_perm_name( cls, resource_id, group_id, perm_name, db_session=None ): """ fetch permissions by group and permission name :param resource_id: :param group_id: :param perm_name: :param db_session: :return: """
db_session = get_db_session(db_session) query = db_session.query(cls.models_proxy.GroupResourcePermission) query = query.filter( cls.models_proxy.GroupResourcePermission.group_id == group_id ) query = query.filter( cls.models_proxy.GroupResourcePermission.perm_name == perm_name ) query = query.filter( cls.models_proxy.GroupResourcePermission.resource_id == resource_id ) return query.first()
<SYSTEM_TASK:> returns all non-resource permissions based on what groups user <END_TASK> <USER_TASK:> Description: def permissions(cls, instance, db_session=None): """ returns all non-resource permissions based on what groups user belongs and directly set ones for this user :param instance: :param db_session: :return: """
db_session = get_db_session(db_session, instance) query = db_session.query( cls.models_proxy.GroupPermission.group_id.label("owner_id"), cls.models_proxy.GroupPermission.perm_name.label("perm_name"), sa.literal("group").label("type"), ) query = query.filter( cls.models_proxy.GroupPermission.group_id == cls.models_proxy.UserGroup.group_id ) query = query.filter( cls.models_proxy.User.id == cls.models_proxy.UserGroup.user_id ) query = query.filter(cls.models_proxy.User.id == instance.id) query2 = db_session.query( cls.models_proxy.UserPermission.user_id.label("owner_id"), cls.models_proxy.UserPermission.perm_name.label("perm_name"), sa.literal("user").label("type"), ) query2 = query2.filter(cls.models_proxy.UserPermission.user_id == instance.id) query = query.union(query2) groups_dict = dict([(g.id, g) for g in instance.groups]) return [ PermissionTuple( instance, row.perm_name, row.type, groups_dict.get(row.owner_id) if row.type == "group" else None, None, False, True, ) for row in query ]
<SYSTEM_TASK:> Returns a list of groups users belongs to with eager loaded <END_TASK> <USER_TASK:> Description: def groups_with_resources(cls, instance): """ Returns a list of groups users belongs to with eager loaded resources owned by those groups :param instance: :return: """
return instance.groups_dynamic.options( sa.orm.eagerload(cls.models_proxy.Group.resources) )
<SYSTEM_TASK:> returns list of permissions and resources for this user <END_TASK> <USER_TASK:> Description: def resources_with_possible_perms( cls, instance, resource_ids=None, resource_types=None, db_session=None ): """ returns list of permissions and resources for this user :param instance: :param resource_ids: restricts the search to specific resources :param resource_types: restricts the search to specific resource types :param db_session: :return: """
perms = resource_permissions_for_users( cls.models_proxy, ANY_PERMISSION, resource_ids=resource_ids, resource_types=resource_types, user_ids=[instance.id], db_session=db_session, ) for resource in instance.resources: perms.append( PermissionTuple( instance, ALL_PERMISSIONS, "user", None, resource, True, True ) ) for group in cls.groups_with_resources(instance): for resource in group.resources: perms.append( PermissionTuple( instance, ALL_PERMISSIONS, "group", group, resource, True, True ) ) return perms
<SYSTEM_TASK:> sets new password on a user using password manager <END_TASK> <USER_TASK:> Description: def set_password(cls, instance, raw_password): """ sets new password on a user using password manager :param instance: :param raw_password: :return: """
# support API for both passlib 1.x and 2.x hash_callable = getattr( instance.passwordmanager, "hash", instance.passwordmanager.encrypt ) password = hash_callable(raw_password) if six.PY2: instance.user_password = password.decode("utf8") else: instance.user_password = password cls.regenerate_security_code(instance)
<SYSTEM_TASK:> checks string with users password hash using password manager <END_TASK> <USER_TASK:> Description: def check_password(cls, instance, raw_password, enable_hash_migration=True): """ checks string with users password hash using password manager :param instance: :param raw_password: :param enable_hash_migration: if legacy hashes should be migrated :return: """
verified, replacement_hash = instance.passwordmanager.verify_and_update( raw_password, instance.user_password ) if enable_hash_migration and replacement_hash: if six.PY2: instance.user_password = replacement_hash.decode("utf8") else: instance.user_password = replacement_hash return verified
<SYSTEM_TASK:> fetch user objects by user name and security code <END_TASK> <USER_TASK:> Description: def by_user_name_and_security_code(cls, user_name, security_code, db_session=None): """ fetch user objects by user name and security code :param user_name: :param security_code: :param db_session: :return: """
db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter( sa.func.lower(cls.model.user_name) == (user_name or "").lower() ) query = query.filter(cls.model.security_code == security_code) return query.first()
<SYSTEM_TASK:> fetch users with similar names using LIKE clause <END_TASK> <USER_TASK:> Description: def user_names_like(cls, user_name, db_session=None): """ fetch users with similar names using LIKE clause :param user_name: :param db_session: :return: """
db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter( sa.func.lower(cls.model.user_name).like((user_name or "").lower()) ) query = query.order_by(cls.model.user_name) # q = q.options(sa.orm.eagerload('groups')) return query
<SYSTEM_TASK:> fetch user object by email <END_TASK> <USER_TASK:> Description: def by_email(cls, email, db_session=None): """ fetch user object by email :param email: :param db_session: :return: """
db_session = get_db_session(db_session) query = db_session.query(cls.model).filter( sa.func.lower(cls.model.email) == (email or "").lower() ) query = query.options(sa.orm.eagerload("groups")) return query.first()
<SYSTEM_TASK:> return users hat have one of given permissions <END_TASK> <USER_TASK:> Description: def users_for_perms(cls, perm_names, db_session=None): """ return users hat have one of given permissions :param perm_names: :param db_session: :return: """
db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter( cls.models_proxy.User.id == cls.models_proxy.UserGroup.user_id ) query = query.filter( cls.models_proxy.UserGroup.group_id == cls.models_proxy.GroupPermission.group_id ) query = query.filter(cls.models_proxy.GroupPermission.perm_name.in_(perm_names)) query2 = db_session.query(cls.model) query2 = query2.filter( cls.models_proxy.User.id == cls.models_proxy.UserPermission.user_id ) query2 = query2.filter( cls.models_proxy.UserPermission.perm_name.in_(perm_names) ) users = query.union(query2).order_by(cls.model.id) return users
<SYSTEM_TASK:> Store join times for current nicknames when we first join. <END_TASK> <USER_TASK:> Description: def handle_joined(self, connection, event): """ Store join times for current nicknames when we first join. """
nicknames = [s.lstrip("@+") for s in event.arguments()[-1].split()] for nickname in nicknames: self.joined[nickname] = datetime.now()
<SYSTEM_TASK:> Store join time for a nickname when it joins. <END_TASK> <USER_TASK:> Description: def handle_join(self, connection, event): """ Store join time for a nickname when it joins. """
nickname = self.get_nickname(event) self.joined[nickname] = datetime.now()
<SYSTEM_TASK:> Store quit time for a nickname when it quits. <END_TASK> <USER_TASK:> Description: def handle_quit(self, connection, event): """ Store quit time for a nickname when it quits. """
nickname = self.get_nickname(event) self.quit[nickname] = datetime.now() del self.joined[nickname]
<SYSTEM_TASK:> Returns human friendly version of the timespan between now <END_TASK> <USER_TASK:> Description: def timesince(self, when): """ Returns human friendly version of the timespan between now and the given datetime. """
units = ( ("year", 60 * 60 * 24 * 365), ("week", 60 * 60 * 24 * 7), ("day", 60 * 60 * 24), ("hour", 60 * 60), ("minute", 60), ("second", 1), ) delta = datetime.now() - when total_seconds = delta.days * 60 * 60 * 24 + delta.seconds parts = [] for name, seconds in units: value = total_seconds / seconds if value > 0: total_seconds %= seconds s = "s" if value != 1 else "" parts.append("%s %s%s" % (value, name, s)) return " and ".join(", ".join(parts).rsplit(", ", 1))
<SYSTEM_TASK:> Lists all available commands. <END_TASK> <USER_TASK:> Description: def commands(self, event): """ Lists all available commands. """
commands = sorted(self.commands_dict().keys()) return "Available commands: %s" % " ".join(commands)
<SYSTEM_TASK:> Shows the help message for the bot. Takes an optional command name <END_TASK> <USER_TASK:> Description: def help(self, event, command_name=None): """ Shows the help message for the bot. Takes an optional command name which when given, will show help for that command. """
if command_name is None: return ("Type !commands for a list of all commands. Type " "!help [command] to see help for a specific command.") try: command = self.commands_dict()[command_name] except KeyError: return "%s is not a command" % command_name argspec = getargspec(command) args = argspec.args[2:] defaults = argspec.defaults or [] for i in range(-1, -len(defaults) - 1, -1): args[i] = "%s [default: %s]" % (args[i], defaults[i]) args = ", ".join(args) help = getdoc(command).replace("\n", " ") return "help for %s: (args: %s) %s" % (command_name, args, help)
<SYSTEM_TASK:> Shows the amount of time since the given nickname has been <END_TASK> <USER_TASK:> Description: def uptime(self, event, nickname=None): """ Shows the amount of time since the given nickname has been in the channel. If no nickname is given, I'll use my own. """
if nickname and nickname != self.nickname: try: uptime = self.timesince(self.joined[nickname]) except KeyError: return "%s is not in the channel" % nickname else: if nickname == self.get_nickname(event): prefix = "you have" else: prefix = "%s has" % nickname return "%s been here for %s" % (prefix, uptime) uptime = self.timesince(self.joined[self.nickname]) return "I've been here for %s" % uptime
<SYSTEM_TASK:> Shows the amount of time since the given nickname was last <END_TASK> <USER_TASK:> Description: def seen(self, event, nickname): """ Shows the amount of time since the given nickname was last seen in the channel. """
try: self.joined[nickname] except KeyError: pass else: if nickname == self.get_nickname(event): prefix = "you are" else: prefix = "%s is" % nickname return "%s here right now" % prefix try: seen = self.timesince(self.quit[nickname]) except KeyError: return "%s has never been seen" % nickname else: return "%s was last seen %s ago" % (nickname, seen)
<SYSTEM_TASK:> returns dynamic relationship for groups - allowing for <END_TASK> <USER_TASK:> Description: def groups_dynamic(self): """ returns dynamic relationship for groups - allowing for filtering of data """
return sa.orm.relationship( "Group", secondary="users_groups", lazy="dynamic", passive_deletes=True, passive_updates=True, )
<SYSTEM_TASK:> validates if group can get assigned with permission <END_TASK> <USER_TASK:> Description: def validate_permission(self, key, permission): """ validates if group can get assigned with permission"""
if permission.perm_name not in self.__possible_permissions__: raise AssertionError( "perm_name is not one of {}".format(self.__possible_permissions__) ) return permission
<SYSTEM_TASK:> Iterates through each of the feed URLs, parses their items, and <END_TASK> <USER_TASK:> Description: def parse_feeds(self, message_channel=True): """ Iterates through each of the feed URLs, parses their items, and sends any items to the channel that have not been previously been parsed. """
if parse: for feed_url in self.feeds: feed = parse(feed_url) for item in feed.entries: if item["id"] not in self.feed_items: self.feed_items.add(item["id"]) if message_channel: message = self.format_item_message(feed, item) self.message_channel(message) return
<SYSTEM_TASK:> Output readings for specified number of rows to console <END_TASK> <USER_TASK:> Description: def print_meter_record(file_path, rows=5): """ Output readings for specified number of rows to console """
m = nr.read_nem_file(file_path) print('Header:', m.header) print('Transactions:', m.transactions) for nmi in m.readings: for channel in m.readings[nmi]: print(nmi, 'Channel', channel) for reading in m.readings[nmi][channel][-rows:]: print('', reading)
<SYSTEM_TASK:> returns all users that have permissions for this resource <END_TASK> <USER_TASK:> Description: def users(self): """ returns all users that have permissions for this resource"""
return sa.orm.relationship( "User", secondary="users_resources_permissions", passive_deletes=True, passive_updates=True, )
<SYSTEM_TASK:> Add the needed transformations and supressions. <END_TASK> <USER_TASK:> Description: def register(linter): """Add the needed transformations and supressions. """
linter.register_checker(MongoEngineChecker(linter)) add_transform('mongoengine') add_transform('mongomotor') suppress_qs_decorator_messages(linter) suppress_fields_attrs_messages(linter)
<SYSTEM_TASK:> Transpose all channels and output a csv that is easier <END_TASK> <USER_TASK:> Description: def output_as_csv(file_name, nmi=None, output_file=None): """ Transpose all channels and output a csv that is easier to read and do charting on :param file_name: The NEM file to process :param nmi: Which NMI to output if more than one :param output_file: Specify different output location :returns: The file that was created """
m = read_nem_file(file_name) if nmi is None: nmi = list(m.readings.keys())[0] # Use first NMI channels = list(m.transactions[nmi].keys()) num_records = len(m.readings[nmi][channels[0]]) last_date = m.readings[nmi][channels[0]][-1].t_end if output_file is None: output_file = '{}_{}_transposed.csv'.format( nmi, last_date.strftime('%Y%m%d')) with open(output_file, 'w', newline='') as csvfile: cwriter = csv.writer( csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) heading_list = ['period_start', 'period_end'] for channel in channels: heading_list.append(channel) heading_list.append('quality_method') cwriter.writerow(heading_list) for i in range(0, num_records): t_start = m.readings[nmi][channels[0]][i].t_start t_end = m.readings[nmi][channels[0]][i].t_end quality_method = m.readings[nmi][channels[0]][i].quality_method row_list = [t_start, t_end] for ch in channels: val = m.readings[nmi][ch][i].read_value row_list.append(val) row_list.append(quality_method) cwriter.writerow(row_list) return output_file
<SYSTEM_TASK:> fetch group by name <END_TASK> <USER_TASK:> Description: def by_group_name(cls, group_name, db_session=None): """ fetch group by name :param group_name: :param db_session: :return: """
db_session = get_db_session(db_session) query = db_session.query(cls.model).filter(cls.model.group_name == group_name) return query.first()
<SYSTEM_TASK:> returns paginator over users belonging to the group <END_TASK> <USER_TASK:> Description: def get_user_paginator( cls, instance, page=1, item_count=None, items_per_page=50, user_ids=None, GET_params=None, ): """ returns paginator over users belonging to the group :param instance: :param page: :param item_count: :param items_per_page: :param user_ids: :param GET_params: :return: """
if not GET_params: GET_params = {} GET_params.pop("page", None) query = instance.users_dynamic if user_ids: query = query.filter(cls.models_proxy.UserGroup.user_id.in_(user_ids)) return SqlalchemyOrmPage( query, page=page, item_count=item_count, items_per_page=items_per_page, **GET_params )
<SYSTEM_TASK:> returns list of permissions and resources for this group, <END_TASK> <USER_TASK:> Description: def resources_with_possible_perms( cls, instance, perm_names=None, resource_ids=None, resource_types=None, db_session=None, ): """ returns list of permissions and resources for this group, resource_ids restricts the search to specific resources :param instance: :param perm_names: :param resource_ids: :param resource_types: :param db_session: :return: """
db_session = get_db_session(db_session, instance) query = db_session.query( cls.models_proxy.GroupResourcePermission.perm_name, cls.models_proxy.Group, cls.models_proxy.Resource, ) query = query.filter( cls.models_proxy.Resource.resource_id == cls.models_proxy.GroupResourcePermission.resource_id ) query = query.filter( cls.models_proxy.Group.id == cls.models_proxy.GroupResourcePermission.group_id ) if resource_ids: query = query.filter( cls.models_proxy.GroupResourcePermission.resource_id.in_(resource_ids) ) if resource_types: query = query.filter( cls.models_proxy.Resource.resource_type.in_(resource_types) ) if perm_names not in ([ANY_PERMISSION], ANY_PERMISSION) and perm_names: query = query.filter( cls.models_proxy.GroupResourcePermission.perm_name.in_(perm_names) ) query = query.filter( cls.models_proxy.GroupResourcePermission.group_id == instance.id ) perms = [ PermissionTuple( None, row.perm_name, "group", instance, row.Resource, False, True ) for row in query ] for resource in instance.resources: perms.append( PermissionTuple( None, ALL_PERMISSIONS, "group", instance, resource, True, True ) ) return perms
<SYSTEM_TASK:> Wait for closing all pool's connections. <END_TASK> <USER_TASK:> Description: def wait_closed(self): """ Wait for closing all pool's connections. """
if self._closed: return if not self._closing: raise RuntimeError( ".wait_closed() should be called " "after .close()" ) while self._free: conn = self._free.popleft() if not conn.closed: yield from conn.close() else: # pragma: no cover pass with (yield from self._cond): while self.size > self.freesize: yield from self._cond.wait() self._used.clear() self._closed = True
<SYSTEM_TASK:> iterate over free connections and remove timeouted ones <END_TASK> <USER_TASK:> Description: def _fill_free_pool(self, override_min): """ iterate over free connections and remove timeouted ones """
while self.size < self.minsize: self._acquiring += 1 try: conn = yield from connect( database=self._database, echo=self._echo, loop=self._loop, **self._conn_kwargs ) self._free.append(conn) self._cond.notify() finally: self._acquiring -= 1 if self._free: return if override_min and self.size < self.maxsize: self._acquiring += 1 try: conn = yield from connect( database=self._database, echo=self._echo, loop=self._loop, **self._conn_kwargs ) self._free.append(conn) self._cond.notify() finally: self._acquiring -= 1
<SYSTEM_TASK:> Adds the function to the list of registered functions. <END_TASK> <USER_TASK:> Description: def add_function(self, function): """ Adds the function to the list of registered functions. """
function = self.build_function(function) if function.name in self.functions: raise FunctionAlreadyRegistered(function.name) self.functions[function.name] = function
<SYSTEM_TASK:> Returns a function if it is registered, the context is ignored. <END_TASK> <USER_TASK:> Description: def get_one(self, context, name): """ Returns a function if it is registered, the context is ignored. """
try: return self.functions[name] except KeyError: raise FunctionNotFound(name)
<SYSTEM_TASK:> Replace generic key related attribute with filters by object_id and content_type fields <END_TASK> <USER_TASK:> Description: def _preprocess_kwargs(self, initial_kwargs): """ Replace generic key related attribute with filters by object_id and content_type fields """
kwargs = initial_kwargs.copy() generic_key_related_kwargs = self._get_generic_key_related_kwargs(initial_kwargs) for key, value in generic_key_related_kwargs.items(): # delete old kwarg that was related to generic key del kwargs[key] try: suffix = key.split('__')[1] except IndexError: suffix = None # add new kwargs that related to object_id and content_type fields new_kwargs = self._get_filter_object_id_and_content_type_filter_kwargs(value, suffix) kwargs.update(new_kwargs) return kwargs
<SYSTEM_TASK:> Remove columns when the proportion <END_TASK> <USER_TASK:> Description: def drop_columns_with_unique_values( data: pd.DataFrame, max_unique_values: int = 0.25 ): """ Remove columns when the proportion of the total of unique values is more than the max_unique_values threshold, just for columns with type as object or category :param data: :param max_unique_values: :return: """
size = data.shape[0] df_uv = data.apply( lambda se: ( (se.dropna().unique().shape[0]/size) > max_unique_values and se.dtype in ['object', 'category'] ) ) data.drop(df_uv[df_uv].index, axis=1, inplace=True)
<SYSTEM_TASK:> Serializes any object to his url representation <END_TASK> <USER_TASK:> Description: def to_representation(self, obj): """ Serializes any object to his url representation """
kwargs = None for field in self.lookup_fields: if hasattr(obj, field): kwargs = {field: getattr(obj, field)} break if kwargs is None: raise AttributeError('Related object does not have any of lookup_fields') request = self._get_request() return request.build_absolute_uri(reverse(self._get_url(obj), kwargs=kwargs))
<SYSTEM_TASK:> Restores model instance from its url <END_TASK> <USER_TASK:> Description: def to_internal_value(self, data): """ Restores model instance from its url """
if not data: return None request = self._get_request() user = request.user try: obj = core_utils.instance_from_url(data, user=user) model = obj.__class__ except ValueError: raise serializers.ValidationError(_('URL is invalid: %s.') % data) except (Resolver404, AttributeError, MultipleObjectsReturned, ObjectDoesNotExist): raise serializers.ValidationError(_("Can't restore object from url: %s") % data) if model not in self.related_models: raise serializers.ValidationError(_('%s object does not support such relationship.') % six.text_type(obj)) return obj
<SYSTEM_TASK:> Check that the start is before the end. <END_TASK> <USER_TASK:> Description: def validate(self, data): """ Check that the start is before the end. """
if 'start' in data and 'end' in data and data['start'] >= data['end']: raise serializers.ValidationError(_('End must occur after start.')) return data
<SYSTEM_TASK:> Deserialize input data and start backend operation execution <END_TASK> <USER_TASK:> Description: def run(self, serialized_instance, *args, **kwargs): """ Deserialize input data and start backend operation execution """
try: instance = utils.deserialize_instance(serialized_instance) except ObjectDoesNotExist: message = ('Cannot restore instance from serialized object %s. Probably it was deleted.' % serialized_instance) six.reraise(ObjectDoesNotExist, message) self.args = args self.kwargs = kwargs self.pre_execute(instance) result = self.execute(instance, *self.args, **self.kwargs) self.post_execute(instance) if result and isinstance(result, django_models.Model): result = utils.serialize_instance(result) return result
<SYSTEM_TASK:> Return True if exist task that is equal to current and is uncompleted <END_TASK> <USER_TASK:> Description: def is_previous_task_processing(self, *args, **kwargs): """ Return True if exist task that is equal to current and is uncompleted """
app = self._get_app() inspect = app.control.inspect() active = inspect.active() or {} scheduled = inspect.scheduled() or {} reserved = inspect.reserved() or {} uncompleted = sum(list(active.values()) + list(scheduled.values()) + reserved.values(), []) return any(self.is_equal(task, *args, **kwargs) for task in uncompleted)
<SYSTEM_TASK:> Do not run background task if previous task is uncompleted <END_TASK> <USER_TASK:> Description: def apply_async(self, args=None, kwargs=None, **options): """ Do not run background task if previous task is uncompleted """
if self.is_previous_task_processing(*args, **kwargs): message = 'Background task %s was not scheduled, because its predecessor is not completed yet.' % self.name logger.info(message) # It is expected by Celery that apply_async return AsyncResult, otherwise celerybeat dies return self.AsyncResult(options.get('task_id') or str(uuid4())) return super(BackgroundTask, self).apply_async(args=args, kwargs=kwargs, **options)
<SYSTEM_TASK:> Returns key to be used in cache <END_TASK> <USER_TASK:> Description: def _get_cache_key(self, args, kwargs): """ Returns key to be used in cache """
hash_input = json.dumps({'name': self.name, 'args': args, 'kwargs': kwargs}, sort_keys=True) # md5 is used for internal caching, not need to care about security return hashlib.md5(hash_input).hexdigest()
<SYSTEM_TASK:> Checks whether task must be skipped and decreases the counter in that case. <END_TASK> <USER_TASK:> Description: def apply_async(self, args=None, kwargs=None, **options): """ Checks whether task must be skipped and decreases the counter in that case. """
key = self._get_cache_key(args, kwargs) counter, penalty = cache.get(key, (0, 0)) if not counter: return super(PenalizedBackgroundTask, self).apply_async(args=args, kwargs=kwargs, **options) cache.set(key, (counter - 1, penalty), self.CACHE_LIFETIME) logger.info('The task %s will not be executed due to the penalty.' % self.name) return self.AsyncResult(options.get('task_id') or str(uuid4()))
<SYSTEM_TASK:> Increases penalty for the task and resets the counter. <END_TASK> <USER_TASK:> Description: def on_failure(self, exc, task_id, args, kwargs, einfo): """ Increases penalty for the task and resets the counter. """
key = self._get_cache_key(args, kwargs) _, penalty = cache.get(key, (0, 0)) if penalty < self.MAX_PENALTY: penalty += 1 logger.debug('The task %s is penalized and will be executed on %d run.' % (self.name, penalty)) cache.set(key, (penalty, penalty), self.CACHE_LIFETIME) return super(PenalizedBackgroundTask, self).on_failure(exc, task_id, args, kwargs, einfo)
<SYSTEM_TASK:> Clears cache for the task. <END_TASK> <USER_TASK:> Description: def on_success(self, retval, task_id, args, kwargs): """ Clears cache for the task. """
key = self._get_cache_key(args, kwargs) if cache.get(key) is not None: cache.delete(key) logger.debug('Penalty for the task %s has been removed.' % self.name) return super(PenalizedBackgroundTask, self).on_success(retval, task_id, args, kwargs)
<SYSTEM_TASK:> Logging for backend method. <END_TASK> <USER_TASK:> Description: def log_backend_action(action=None): """ Logging for backend method. Expects django model instance as first argument. """
def decorator(func): @functools.wraps(func) def wrapped(self, instance, *args, **kwargs): action_name = func.func_name.replace('_', ' ') if action is None else action logger.debug('About to %s `%s` (PK: %s).', action_name, instance, instance.pk) result = func(self, instance, *args, **kwargs) logger.debug('Action `%s` was executed successfully for `%s` (PK: %s).', action_name, instance, instance.pk) return result return wrapped return decorator
<SYSTEM_TASK:> Check is model app name is in list of INSTALLED_APPS <END_TASK> <USER_TASK:> Description: def _is_active_model(cls, model): """ Check is model app name is in list of INSTALLED_APPS """
# We need to use such tricky way to check because of inconsistent apps names: # some apps are included in format "<module_name>.<app_name>" like "waldur_core.openstack" # other apps are included in format "<app_name>" like "nodecondcutor_sugarcrm" return ('.'.join(model.__module__.split('.')[:2]) in settings.INSTALLED_APPS or '.'.join(model.__module__.split('.')[:1]) in settings.INSTALLED_APPS)
<SYSTEM_TASK:> Extracts context data from request headers according to specified schema. <END_TASK> <USER_TASK:> Description: def get_context_data_from_headers(request, headers_schema): """ Extracts context data from request headers according to specified schema. >>> from lxml import etree as et >>> from datetime import date >>> from pyws.functions.args import TypeFactory >>> Fake = type('Fake', (object, ), {}) >>> request = Fake() >>> request.parsed_data = Fake() >>> request.parsed_data.xml = et.fromstring( ... '<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/">' ... '<s:Header>' ... '<headers>' ... '<string>hello</string>' ... '<number>100</number>' ... '<date>2011-08-12</date>' ... '</headers>' ... '</s:Header>' ... '</s:Envelope>') >>> data = get_context_data_from_headers(request, TypeFactory( ... {0: 'Headers', 'string': str, 'number': int, 'date': date})) >>> data == {'string': 'hello', 'number': 100, 'date': date(2011, 8, 12)} True """
if not headers_schema: return None env = request.parsed_data.xml.xpath( '/soap:Envelope', namespaces=SoapProtocol.namespaces)[0] header = env.xpath( './soap:Header/*', namespaces=SoapProtocol.namespaces) if len(header) < 1: return None return headers_schema.validate(xml2obj(header[0], headers_schema))
<SYSTEM_TASK:> Decorator to make a function that takes no arguments use the LazyConstant class. <END_TASK> <USER_TASK:> Description: def lazy_constant(fn): """Decorator to make a function that takes no arguments use the LazyConstant class."""
class NewLazyConstant(LazyConstant): @functools.wraps(fn) def __call__(self): return self.get_value() return NewLazyConstant(fn)
<SYSTEM_TASK:> Decorator that adds an LRU cache of size maxsize to the decorated function. <END_TASK> <USER_TASK:> Description: def lru_cache(maxsize=128, key_fn=None): """Decorator that adds an LRU cache of size maxsize to the decorated function. maxsize is the number of different keys cache can accomodate. key_fn is the function that builds key from args. The default key function creates a tuple out of args and kwargs. If you use the default, there is no reason not to use functools.lru_cache directly. Possible use cases: - Your cache key is very large, so you don't want to keep the whole key in memory. - The function takes some arguments that don't affect the result. """
def decorator(fn): cache = LRUCache(maxsize) argspec = inspect2.getfullargspec(fn) arg_names = argspec.args[1:] + argspec.kwonlyargs # remove self kwargs_defaults = get_kwargs_defaults(argspec) cache_key = key_fn if cache_key is None: def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) @functools.wraps(fn) def wrapper(*args, **kwargs): key = cache_key(args, kwargs) try: return cache[key] except KeyError: value = fn(*args, **kwargs) cache[key] = value return value wrapper.clear = cache.clear return wrapper return decorator
<SYSTEM_TASK:> Decorator that adds caching to an instance method. <END_TASK> <USER_TASK:> Description: def cached_per_instance(): """Decorator that adds caching to an instance method. The cached value is stored so that it gets garbage collected together with the instance. The cached values are not stored when the object is pickled. """
def cache_fun(fun): argspec = inspect2.getfullargspec(fun) arg_names = argspec.args[1:] + argspec.kwonlyargs # remove self kwargs_defaults = get_kwargs_defaults(argspec) cache = {} def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) def clear_cache(instance_key, ref): del cache[instance_key] @functools.wraps(fun) def new_fun(self, *args, **kwargs): instance_key = id(self) if instance_key not in cache: ref = weakref.ref(self, functools.partial(clear_cache, instance_key)) cache[instance_key] = (ref, {}) instance_cache = cache[instance_key][1] k = cache_key(args, kwargs) if k not in instance_cache: instance_cache[k] = fun(self, *args, **kwargs) return instance_cache[k] # just so unit tests can check that this is cleaned up correctly new_fun.__cached_per_instance_cache__ = cache return new_fun return cache_fun
<SYSTEM_TASK:> Generates a cache key from the passed in arguments. <END_TASK> <USER_TASK:> Description: def get_args_tuple(args, kwargs, arg_names, kwargs_defaults): """Generates a cache key from the passed in arguments."""
args_list = list(args) args_len = len(args) all_args_len = len(arg_names) try: while args_len < all_args_len: arg_name = arg_names[args_len] if arg_name in kwargs_defaults: args_list.append(kwargs.get(arg_name, kwargs_defaults[arg_name])) else: args_list.append(kwargs[arg_name]) args_len += 1 except KeyError as e: raise TypeError("Missing argument %r" % (e.args[0],)) return tuple(args_list)
<SYSTEM_TASK:> Computes a kwargs_defaults dictionary for use by get_args_tuple given an argspec. <END_TASK> <USER_TASK:> Description: def get_kwargs_defaults(argspec): """Computes a kwargs_defaults dictionary for use by get_args_tuple given an argspec."""
arg_names = tuple(argspec.args) defaults = argspec.defaults or () num_args = len(argspec.args) - len(defaults) kwargs_defaults = {} for i, default_value in enumerate(defaults): kwargs_defaults[arg_names[num_args + i]] = default_value if getattr(argspec, "kwonlydefaults", None): kwargs_defaults.update(argspec.kwonlydefaults) return kwargs_defaults
<SYSTEM_TASK:> Memoizes return values of the decorated function. <END_TASK> <USER_TASK:> Description: def memoize(fun): """Memoizes return values of the decorated function. Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache() is called on the function. """
argspec = inspect2.getfullargspec(fun) arg_names = argspec.args + argspec.kwonlyargs kwargs_defaults = get_kwargs_defaults(argspec) def cache_key(args, kwargs): return get_args_tuple(args, kwargs, arg_names, kwargs_defaults) @functools.wraps(fun) def new_fun(*args, **kwargs): k = cache_key(args, kwargs) if k not in new_fun.__cache: new_fun.__cache[k] = fun(*args, **kwargs) return new_fun.__cache[k] def clear_cache(): """Removes all cached values for this function.""" new_fun.__cache.clear() new_fun.__cache = {} new_fun.clear_cache = clear_cache return new_fun
<SYSTEM_TASK:> Memoizes return values of the decorated function for a given time-to-live. <END_TASK> <USER_TASK:> Description: def memoize_with_ttl(ttl_secs=60 * 60 * 24): """Memoizes return values of the decorated function for a given time-to-live. Similar to l0cache, but the cache persists for the duration of the process, unless clear_cache() is called on the function or the time-to-live expires. By default, the time-to-live is set to 24 hours. """
error_msg = ( "Incorrect usage of qcore.caching.memoize_with_ttl: " "ttl_secs must be a positive integer." ) assert_is_instance(ttl_secs, six.integer_types, error_msg) assert_gt(ttl_secs, 0, error_msg) def cache_fun(fun): argspec = inspect2.getfullargspec(fun) arg_names = argspec.args + argspec.kwonlyargs kwargs_defaults = get_kwargs_defaults(argspec) def cache_key(args, kwargs): return repr(get_args_tuple(args, kwargs, arg_names, kwargs_defaults)) @functools.wraps(fun) def new_fun(*args, **kwargs): k = cache_key(args, kwargs) current_time = int(time.time()) # k is not in the cache; perform the function and cache the result. if k not in new_fun.__cache or k not in new_fun.__cache_times: new_fun.__cache[k] = fun(*args, **kwargs) new_fun.__cache_times[k] = current_time return new_fun.__cache[k] # k is in the cache at this point. Check if the ttl has expired; # if so, recompute the value and cache it. cache_time = new_fun.__cache_times[k] if current_time - cache_time > ttl_secs: new_fun.__cache[k] = fun(*args, **kwargs) new_fun.__cache_times[k] = current_time # finally, return the cached result. return new_fun.__cache[k] def clear_cache(): """Removes all cached values for this function.""" new_fun.__cache.clear() new_fun.__cache_times.clear() def dirty(*args, **kwargs): """Dirties the function for a given set of arguments.""" k = cache_key(args, kwargs) new_fun.__cache.pop(k, None) new_fun.__cache_times.pop(k, None) new_fun.__cache = {} new_fun.__cache_times = {} new_fun.clear_cache = clear_cache new_fun.dirty = dirty return new_fun return cache_fun
<SYSTEM_TASK:> Returns the value of the constant. <END_TASK> <USER_TASK:> Description: def get_value(self): """Returns the value of the constant."""
if self.value is not_computed: self.value = self.value_provider() if self.value is not_computed: return None return self.value
<SYSTEM_TASK:> Computes the value. Does not look at the cache. <END_TASK> <USER_TASK:> Description: def compute(self): """Computes the value. Does not look at the cache."""
self.value = self.value_provider() if self.value is not_computed: return None else: return self.value
<SYSTEM_TASK:> Return the value for given key if it exists. <END_TASK> <USER_TASK:> Description: def get(self, key, default=miss): """Return the value for given key if it exists."""
if key not in self._dict: return default # invokes __getitem__, which updates the item return self[key]
<SYSTEM_TASK:> Empty the cache and optionally invoke item_evicted callback. <END_TASK> <USER_TASK:> Description: def clear(self, omit_item_evicted=False): """Empty the cache and optionally invoke item_evicted callback."""
if not omit_item_evicted: items = self._dict.items() for key, value in items: self._evict_item(key, value) self._dict.clear()
<SYSTEM_TASK:> Identify the contents of `buf` <END_TASK> <USER_TASK:> Description: def from_buffer(self, buf): """ Identify the contents of `buf` """
with self.lock: try: # if we're on python3, convert buf to bytes # otherwise this string is passed as wchar* # which is not what libmagic expects if isinstance(buf, str) and str != bytes: buf = buf.encode('utf-8', errors='replace') return maybe_decode(magic_buffer(self.cookie, buf)) except MagicException as e: return self._handle509Bug(e)
<SYSTEM_TASK:> Starts listening to events. <END_TASK> <USER_TASK:> Description: def start(self, timeout=None, root_object=None): """ Starts listening to events. Args: timeout (int): number of seconds before timeout. Used for testing purpose only. root_object (bambou.NURESTRootObject): NURESTRootObject object that is listening. Used for testing purpose only. """
if self._is_running: return if timeout: self._timeout = timeout self._start_time = int(time()) pushcenter_logger.debug("[NURESTPushCenter] Starting push center on url %s ..." % self.url) self._is_running = True self.__root_object = root_object from .nurest_session import NURESTSession current_session = NURESTSession.get_current_session() args_session = {'session': current_session} self._thread = StoppableThread(target=self._listen, name='push-center', kwargs=args_session) self._thread.daemon = True self._thread.start()
<SYSTEM_TASK:> Stops listening for events. <END_TASK> <USER_TASK:> Description: def stop(self): """ Stops listening for events. """
if not self._is_running: return pushcenter_logger.debug("[NURESTPushCenter] Stopping...") self._thread.stop() self._thread.join() self._is_running = False self._current_connection = None self._start_time = None self._timeout = None