function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def test_get_user_positions(self): """ Get user positions """ user1 = UserFactory(weekly_result=100) user2 = UserFactory(weekly_result=300, monthly_result=300, reputation=Decimal(300)) user3 = AdminFactory() user4 = UserFactory(monthly_result=100, reputation=Decimal(50)) # TODO mock self.assertEqual({ 'week_rank': '-', 'month_rank': '-', 'overall_rank': '-' }, UserProfile.objects.get_user_positions(user1)) self.assertEqual({ 'week_rank': '-', 'month_rank': '-', 'overall_rank': '-' }, UserProfile.objects.get_user_positions(user2)) self.assertEqual({ 'week_rank': '-', 'month_rank': '-', 'overall_rank': '-' }, UserProfile.objects.get_user_positions(user3)) self.assertEqual({ 'week_rank': '-', 'month_rank': '-', 'overall_rank': '-' }, UserProfile.objects.get_user_positions(user4)) # self.assertEqual({ # 'week_rank': 2, # 'month_rank': '-', # 'overall_rank': 2 # }, UserProfile.objects.get_user_positions(user1)) # self.assertEqual({ # 'week_rank': 1, # 'month_rank': 1, # 'overall_rank': 1 # }, UserProfile.objects.get_user_positions(user2)) # self.assertEqual({ # 'week_rank': '-', # 'month_rank': '-', # 'overall_rank': '-' # }, UserProfile.objects.get_user_positions(user3)) # self.assertEqual({ # 'week_rank': '-', # 'month_rank': 2, # 'overall_rank': 3 # }, UserProfile.objects.get_user_positions(user4))
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_save_profile(self): """ Save profile """ user = UserFactory() # save_profile(user,
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_topup_accounts_task(self): """ Topup """ user = UserFactory() topup_accounts_task() user.refresh_from_db() self.assertEqual(config.DAILY_TOPUP, user.total_cash) # TODO mock and test exception
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_topup_accounts_task_error(self, logger, topup_cash): UserFactory() topup_cash.side_effect = Exception() topup_accounts_task() logger.exception.assert_called_once()
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_create_accounts_snapshot(self): user = UserFactory() create_accounts_snapshot() # TODO mock logger and create_snapshot()
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_user_home(self): """ User home """ user = UserFactory() user_templatetag = user_home(user, 1000, True) self.assertEqual({ 'user': user, 'reputation_change': 1000, 'is_formatted': True }, user_templatetag) user_templatetag = user_home(user, -100) self.assertEqual({ 'user': user, 'reputation_change': -100, 'is_formatted': False }, user_templatetag)
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_get_reputation_history(self): """ Get reputation history """ # TODO
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_last_week_reputation_change(self): """ Get last week reputation change """ # TODO
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_startswith(self): """ Startswith """ start_path = reverse('accounts:rank') path = reverse('accounts:rank') self.assertTrue(startswith(path, start_path))
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def test_process_username(self): """ Process username """ username = process_username(u"zażółćgęśląjaźń") self.assertEqual('zazolcgeslajazn', username) UserFactory(username='zazolcgeslajazn') username2 = process_username(u"zażółćgęśląjaźń") self.assertNotEqual('zazolcgeslajazn', username2)
KlubJagiellonski/Politikon
[ 20, 21, 20, 29, 1433923333 ]
def kwargs_required(*required_args): def decorate(func): @wraps(func) def wrapper(*args, **kwargs): for arg in required_args: if arg not in kwargs: return abort(400, "Argument <{0}> is required".format(arg)) return func(*args, **kwargs) return wrapper return decorate
pycook/cmdb
[ 473, 186, 473, 44, 1451356369 ]
def decorate(func): @wraps(func) def wrapper(*args, **kwargs): for arg in required_args: if arg not in request.values: return abort(400, "Argument <{0}> is required".format(arg)) return func(*args, **kwargs) return wrapper
pycook/cmdb
[ 473, 186, 473, 44, 1451356369 ]
def __init__( self, xml, cwd, default): xbmcgui.WindowXMLDialog.__init__(self)
Xycl/plugin.image.mypicsdb
[ 12, 20, 12, 2, 1344354926 ]
def onAction( self, action ): # Close if ( action.getId() in CANCEL_DIALOG or self.getFocusId() == BUTTON_CLOSE and action.getId() in SELECT_ITEM ): self.close() # Zoom in elif ( action.getId() in SELECT_ITEM and self.getFocusId() == BUTTON_ZOOM_IN or action in ACTION_UP): self.zoom('+') # Zoom out elif ( action.getId() in SELECT_ITEM and self.getFocusId() == BUTTON_ZOOM_OUT or action in ACTION_DOWN): self.zoom('-')
Xycl/plugin.image.mypicsdb
[ 12, 20, 12, 2, 1344354926 ]
def set_place(self, place): self.place = place
Xycl/plugin.image.mypicsdb
[ 12, 20, 12, 2, 1344354926 ]
def set_pic(self, pic): pass
Xycl/plugin.image.mypicsdb
[ 12, 20, 12, 2, 1344354926 ]
def set_map(self, mapfile): self.getControl( GOOGLE_MAP ).setImage(mapfile)
Xycl/plugin.image.mypicsdb
[ 12, 20, 12, 2, 1344354926 ]
def setup_all( self, filtersettings = ""): self.getControl( LABEL_TEXT ).setLabel( common.getstring(30220) )
Xycl/plugin.image.mypicsdb
[ 12, 20, 12, 2, 1344354926 ]
def zoom(self,way,step=1): if way=="+": self.zoomlevel = self.zoomlevel + step elif way=="-": self.zoomlevel = self.zoomlevel - step else: self.zoomlevel = step if self.zoomlevel > self.zoom_max: self.zoomlevel = self.zoom_max elif self.zoomlevel < self.zoom_min: self.zoomlevel = self.zoom_min self.load_map()
Xycl/plugin.image.mypicsdb
[ 12, 20, 12, 2, 1344354926 ]
def test_1(self, completion): assert completion
scop/bash-completion
[ 2349, 352, 2349, 219, 1455029043 ]
def test_2(self, completion): assert completion
scop/bash-completion
[ 2349, 352, 2349, 219, 1455029043 ]
def test_all_message_ids(self, completion): assert any("-" in x for x in completion)
scop/bash-completion
[ 2349, 352, 2349, 219, 1455029043 ]
def test_enabled_message_ids(self, completion): assert any("-" in x for x in completion)
scop/bash-completion
[ 2349, 352, 2349, 219, 1455029043 ]
def init(mode=None, **kwargs): """Wrapper to create the API object you need to acces the CCU API. By default it detects whether or not this code is being executed on the CCU or on another system. And initializes either a LocalAPI() object when run directly on a CCU or, in all other cases, a RemoteAPI() object. This object is then being returned. You can provide the mode argument to disable auto detection and either set it to "local" to enforce a LocalAPI() object to be created or "remote" to enforce a RemoteAPI() object. In case a RemoteAPI() object is being created, you need to provide at least the additional keyword arguments address="http://[HOST]" which needs to contain the base URL to your CCU together with credentials=("[USER]", "PASSWORD") which must be valid credentials to authenticate with the CCU. """ if mode is None: mode = utils.is_ccu() and "local" or "remote" if mode == "local": if not utils.is_ccu(): raise PMException("local mode can only be used on the CCU.") return LocalAPI() elif mode == "remote": try: return RemoteAPI(**kwargs) except TypeError as e: raise PMException("You need to provide at least the address and credentials " "to access your CCU (%s)." % e) else: raise PMException("Invalid mode given. Valid ones are \"local\" and \"remote\".")
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _replace_wrong_encoded_json(self, text): return text.replace("\\{", "{")\ .replace("\\[", "[")\ .replace("\\/", "/")
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _register_atexit_handler(self): """Can be called to register a cleanup handler on interpreter exit. The APIs can register this to ensures the close() method is called on interpreter shutdown.""" atexit.register(self.close)
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _parse_api_response(self, method_name_int, kwargs, body): # FIXME: The ccu is performing wrong encoding at least for output of # executed rega scripts. But maybe this is a generic problem. Let's see # and only fix the known issues for the moment. if method_name_int in [ "rega_run_script", "interface_get_paramset_description", "room_get_all" ]: body = AbstractAPI._replace_wrong_encoded_json(body) try: msg = json.loads(body) except Exception as e: raise PMException("Failed to parse response to %s (%s):\n%s\n" % (method_name_int, e, body)) if msg["error"] is not None: if msg["error"]["code"] == 501 and not self._call('rega_is_present'): raise PMConnectionError("The logic layer (ReGa) is not available (yet). When " "the CCU has just been started, please wait some time " "and retry.") else: raise PMException("[%s] %s: %s (Code: %s, Request: %r)" % (method_name_int, msg["error"]["name"], msg["error"]["message"], msg["error"]["code"], kwargs)) return msg["result"]
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def __del__(self): """When object is removed, the close() method is called.""" if self._constructed: self.close()
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def __getattr__(self, method_name_int): """Realizes dynamic methods based on the methods supported by the API. The method names are nearly the same as provided by the CCU (see http://[CCU_ADDRESS]/api/homematic.cgi or API.print_methods()). The method names are slighly renamed. For example CCU.getSerial() is available as API.ccu_get_serial() in pmatic. The translation is made by the _to_internal_name() method. For details take a look at that function. """ with self._api_lock: self._initialize() def lowlevel_call(*args, **kwargs): if args: raise PMException("You need to specify your arguments as named arguments. " "For example api.sys_var_get_value_by_name(name=\"...\").") return self._call(method_name_int, **kwargs) return lowlevel_call
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _initialize(self): if self.initialized: return self._fail_exc = None self.logger.debug("[API] Initializing...") try: self._initialize_api() self._initialized = True self.logger.debug("[API] Initialized") except Exception as e: self._initialized = False self._fail_exc = e raise
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _get_methods_config(self): """Gathers the method configuration file from the CCU. Returns the method configuration as list of lines. Each of these lines is a unicode string. Has to be implemented by the specific API class.""" raise NotImplementedError()
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def initialized(self): """Tells the caller whether or not the "connection" with the CCU is ready for other API calls.""" with self._api_lock: return self._initialized
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def fail_reason(self): """When the API has not been initialized successfully, this provides access to the exception caused the problem. Otherwise it is set to *None*.""" return self._fail_exc
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _call(self, method_name_int, **kwargs): # pylint:disable=unused-argument """Realizes the API calls. Has to be implemented by the specific API class.""" raise NotImplementedError()
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def print_methods(self): """Prints a description of the available API methods. This information has been fetched from the CCU before. This might be useful for working with the API to gather infos about the available calls. """ with self._api_lock: self._initialize() line_format = "%-60s %s\n" sys.stdout.write(line_format % ("Method", "Description")) # Output device API methods for method_name_int, method in sorted(self._methods.items()): call_txt = "API.%s(%s)" % (method_name_int, ", ".join(method["INT_ARGUMENTS"])) sys.stdout.write(line_format % (call_txt, method["INFO"]))
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _init_methods(self): """Parses the method configuration read from the CCU. The method configuration read with _get_methods_config() is being parsed here to initialize the self._methods dictionary which holds all need information about the available API methods. """ self._methods.clear() method_name_int = None for l in self._get_methods_config(): line = l.rstrip() if not line: continue elif line[0] not in [ " ", "\t" ] and line[-1] == "{": real_method_name = line.split(" ", 1)[0] method_name_int = self._to_internal_name(real_method_name) self._methods.setdefault(method_name_int, {"NAME": real_method_name}) elif method_name_int and line == "}": method_name_int = False elif method_name_int: key, val = line.lstrip().split(None, 1) if key == "INFO": val = val[1:-1] # strip off surrounding braces elif key == "ARGUMENTS": val = val[1:-1].split() # strip off surrounding braces, split by spaces # Internal arguments have the _session_id_ removed self._methods[method_name_int]["INT_ARGUMENTS"] = \ [ a for a in val if a != "_session_id_" ] self._methods[method_name_int][key] = val
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def __init__(self, address, credentials, connect_timeout=10, http_auth=None): self._session_id = None self._address = None self._credentials = None self._http_auth = None self._connect_timeout = None super(RemoteAPI, self).__init__() self._set_address(address) self._set_credentials(credentials) self._set_http_auth(http_auth) self._set_connect_timeout(connect_timeout) self._constructed = True
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _set_credentials(self, credentials): if not isinstance(credentials, tuple): raise PMException("Please specify the user credentials to log in to the CCU " "like this: \"(username, password)\".") elif len(credentials) != 2: raise PMException("The credentials must be given as tuple of two elements.") elif not utils.is_string(credentials[0]): raise PMException("The username is of unhandled type.") elif not utils.is_string(credentials[1]): raise PMException("The password is of unhandled type.") self._credentials = credentials
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _set_connect_timeout(self, timeout): if type(timeout) not in [ int, float ]: raise PMException("Invalid timeout value. Must be of type int or float.") self._connect_timeout = timeout
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def address(self): return self._address
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def close(self): with self._api_lock: self._logout()
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _initialize_api(self): self._login() self._init_methods() self._register_atexit_handler()
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _get_methods_config(self): # Can not use API.rega_run_script() here since the method infos are not yet # available. User code should use API.rega_run_script(). response = self._call("rega_run_script", _session_id_=self._session_id, script="string stderr;\n" "string stdout;\n" "system.Exec(\"cat /www/api/methods.conf\", &stdout, &stderr);\n" "Write(stdout);\n" ) return response.split("\r\n")
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _login(self): if self._session_id is not None: raise PMException("Already logged in.") response = self._call("session_login", username=self._credentials[0], password=self._credentials[1]) if response is None: raise PMException("Login failed: Got no session id.") self._session_id = response
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _logout(self): if self._session_id is not None: self._call("session_logout", _session_id_=self._session_id) self._session_id = None
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _call(self, method_name_int, **kwargs): """Runs the provided method, which needs to be one of the methods which are available on the device (with the given arguments) on the CCU.""" with self._api_lock: return self._do_call(method_name_int, **kwargs)
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _do_call(self, method_name_int, **kwargs): method = self._get_method(method_name_int) args = self._get_arguments(method, kwargs) self.logger.debug("CALL: %s ARGS: %r", method["NAME"], args) #import traceback #stack = "" #("".join(traceback.format_stack()[:-1])).encode("utf-8") #print(b"".join(traceback.format_stack()[:-1])) #self.logger.debug(" Callstack: %s\n" % stack) json_data = json.dumps({ "method": method["NAME"], "params": args, }) url = "%s/api/homematic.cgi" % self._address try: self.logger.debug(" URL: %s DATA: %s", url, json_data) request = Request(url, data=json_data.encode("utf-8")) if self._http_auth: base64string = base64.encodestring('%s:%s' % self._http_auth).replace('\n', '') request.add_header("Authorization", "Basic %s" % base64string) handle = urlopen(request, timeout=self._connect_timeout) except Exception as e: if isinstance(e, URLError): msg = e.reason elif isinstance(e, BadStatusLine): msg = "Request terminated. Is the device rebooting?" else: msg = e raise PMConnectionError("Unable to open \"%s\" [%s]: %s" % (url, type(e).__name__, msg)) response_txt = "" for line in handle.readlines(): response_txt += line.decode("utf-8") http_status = handle.getcode() self.logger.debug(" HTTP-STATUS: %d", http_status) if http_status != 200: raise PMException("Error %d opening \"%s\" occured: %s" % (http_status, url, response_txt)) self.logger.debug(" RESPONSE: %s", response_txt) return self._parse_api_response(method_name_int, kwargs, response_txt)
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _get_method(self, method_name_int): """This wrapper allows specific API calls which are needed before the real list of methods is available, so allow it to be not validated and fake the method response.""" try: return super(RemoteAPI, self)._get_method(method_name_int) except PMException: if method_name_int == "session_login" and not self._methods: return { "NAME": "Session.login", "INFO": "Führt die Benutzeranmeldung durch", "ARGUMENTS": [ "username", "password" ], } elif method_name_int == "rega_is_present" and not self._methods: return { "NAME": "ReGa.isPresent", "INFO": "Prüft, ob die Logikschicht (ReGa) aktiv ist", "ARGUMENTS": [ ], } elif method_name_int == "rega_run_script" and not self._methods: return { "NAME": "ReGa.runScript", "INFO": "Führt ein HomeMatic Script aus", "ARGUMENTS": [ "_session_id_", "script" ], } elif method_name_int == "session_logout" and not self._methods: return { "NAME": "Session.logout", "INFO": "Beendet eine Sitzung", "ARGUMENTS": [ "_session_id_" ], } else: raise
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _get_arguments(self, method, args): if "_session_id_" in method["ARGUMENTS"] and self._session_id: args["_session_id_"] = self._session_id return args
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def __init__(self): super(LocalAPI, self).__init__() self._tclsh = None self._constructed = True
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _initialize_api(self): self._init_tclsh() self._init_methods() self._register_atexit_handler()
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _init_tclsh(self): try: self._tclsh = subprocess.Popen(["/bin/tclsh"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, #stderr=subprocess.PIPE, cwd="/www/api", shell=False) except OSError as e: if e.errno == 2: raise PMException("Could not find /bin/tclsh. Maybe running local API on " "non CCU device?") else: raise self._tclsh.stdin.write( "load tclrpc.so\n" "load tclrega.so\n" "source /www/api/eq3/common.tcl\n" "source /www/api/eq3/ipc.tcl\n" "source /www/api/eq3/json.tcl\n" "source /www/api/eq3/jsonrpc.tcl\n" "source /www/api/eq3/hmscript.tcl\n" "source /www/api/eq3/event.tcl\n" "array set INTERFACE_LIST [ipc_getInterfaces]\n" "array set METHOD_LIST [file_load %s]\n" % self._methods_file )
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _get_methods_config(self): return open(self._methods_file).read().decode("latin-1").split("\r\n")
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _call(self, method_name_int, **kwargs): """Runs the given API method directly on the CCU using a tclsh process. The API method needs to be one of the methods which are available on the device (with the given arguments).""" with self._api_lock: try: return self._do_call(method_name_int, **kwargs) except PMException: raise except IOError as e: # Try to restart in case of issues with the tclsh process. This seem to often happen # after 601 (TCL error) responses. Try to deal with it. self.logger.warning("Exception in API call (%s). Restarting tclsh " "and retrying this API call.", e) self._init_tclsh() response = self._do_call(method_name_int, **kwargs) return response
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _do_call(self, method_name_int, **kwargs): method = self._get_method(method_name_int) parsed_args = self._get_args(method, kwargs) file_path = "/www/api/methods/%s" % method["SCRIPT_FILE"] self.logger.debug("CALL: %s ARGS: %r", method["SCRIPT_FILE"], parsed_args) tcl = "" # Some of the methods use variables with same name but different types. # Since we use one continous running tclsh things get mixed up and cause # problems. Depending on which scripts are called. Some known vars are # cleaned up here. If this is too much trouble, switch to start/stop new # tclsh processes per call. tcl += "if { [info exists device] } {unset device}\n" tcl += "if { [info exists description] } {unset description}\n" # \0\n is written to stdout of the tclsh process to mark and detect the # end of the output of the API call. tcl += \ "array set method $METHOD_LIST(%s)\n" \ "array set args %s\n" \ "source %s\n" \ "puts \0\n" % (method["NAME"], parsed_args, file_path) self.logger.debug(" TCL: %r", tcl) self._tclsh.stdin.write(tcl.encode("utf-8")) response_txt = "" while True: line = self._tclsh.stdout.readline().decode("utf-8") if not line or (len(line) > 1 and line[-2] == "\0"): response_txt += line[:-2] + "\n" break # found our terminator (see above) else: response_txt += line self.logger.debug(" RESPONSE: %r", response_txt) # index 0 would be the header, but we don't need it body = response_txt.split("\n\n", 1)[1] try: return self._parse_api_response(method_name_int, kwargs, body) except PMException: self.logger.warning("Exception in API call.") self.logger.warning(" TCL: %r", tcl) self.logger.warning(" BODY: %r", body) raise
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _get_args(self, method, args): def quote_string(s): return "\"%s\"" % s.replace("\"", "\\\"") args_parsed = "[list " for arg_name in method["ARGUMENTS"]: try: if arg_name == "_session_id_" and arg_name not in args: val = quote_string("") # Fake default session id. Not needed for local API else: val = args[arg_name] if val is None: val = quote_string("") elif type(val) in [ int, float ]: val = "%s" % val elif type(val) == bool: val = 1 if val else 0 else: val = quote_string("%s" % val) args_parsed += "%s %s " % (quote_string(arg_name), val) except KeyError: raise PMException("Missing argument \"%s\". Needs: %s" % (arg_name, ", ".join(method["ARGUMENTS"]))) return args_parsed.rstrip(" ") + "]"
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def close(self): """Closes the "connection" with the CCU. In fact it terminates the tclsh process.""" with self._api_lock: if self._tclsh: self._tclsh.kill() self._tclsh = None
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def __init__(self, api, max_cache_age=360): dict.__init__(self) self._api = api self._max_cache_age = max_cache_age # seconds self._last_update = None self._lock = threading.Lock()
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def __getitem__(self, key): self._update_data() return dict.__getitem__(self, key)
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def values(self): self._update_data() return dict.values(self)
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def __setitem__(self, key, val): raise PMException("Can not be changed.")
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _update(self): # Incoming dict keys are camel cased. uah. # The dict keys are directly handed over to the device/channel objects. So they # need ot be equalized and with internal naming specs just like the also different # keys from the XML-RPC messages. def decamel_dict_keys(d): temp = {} for k in d.keys(): value = d[k] if isinstance(value, list): for idx, entry in enumerate(value): if isinstance(entry, dict): value[idx] = decamel_dict_keys(entry) temp[utils.decamel(k)] = value return temp for spec in self._api.device_list_all_detail(): dict.__setitem__(self, spec["address"], decamel_dict_keys(spec))
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def _update(self): # Incoming dict keys are camel cased. uah. # The dict keys are directly handed over to the device/channel objects. So they # need ot be equalized and with internal naming specs just like the also different # keys from the XML-RPC messages. def decamel_dict_keys(d): temp = {} for k in d.keys(): temp[utils.decamel(k)] = d[k] return temp devices = {} for spec in self._api.interface_list_devices(interface="BidCos-RF"): spec = decamel_dict_keys(spec) if "parent" not in spec: devices[spec["address"]] = spec else: device = devices[spec["parent"]] channels = device.setdefault("channels", []) channels.append(spec) for key, val in devices.items(): dict.__setitem__(self, key, val)
LaMi-/pmatic
[ 33, 23, 33, 14, 1451661313 ]
def update_print(apibase, password, print_id, progress): """ """ params = {'id': print_id} data = dict(progress=progress, password=password)
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def finish_print(apibase, password, print_id, print_info): """ """ params = {'id': print_id} print_info.update(dict(password=password))
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def update_scan(apibase, password, scan_id, progress): """ """ params = {'id': scan_id}
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def finish_scan(apibase, password, scan_id, uploaded_file, print_id, print_page_number, print_href, min_coord, max_coord, geojpeg_bounds): """ """ params = {'id': scan_id} data = { 'print_id': print_id, 'print_page_number': print_page_number, 'print_href': print_href, 'password': password, 'uploaded_file': uploaded_file, 'has_geotiff': 'yes', 'has_geojpeg': 'yes', 'has_stickers': 'no', 'min_row': min_coord.row, 'max_row': max_coord.row, 'min_column': min_coord.column, 'max_column': max_coord.column, 'min_zoom': min_coord.zoom, 'max_zoom': max_coord.zoom, 'geojpeg_bounds': '%.8f,%.8f,%.8f,%.8f' % geojpeg_bounds }
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def fail_scan(apibase, password, scan_id): """ """ params = {'id': scan_id} data = {'password': password}
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def finish_form(apibase, password, form_id, action_url, http_method, title, fields): """ """ data = dict(password=password, action_url=action_url, http_method=http_method, title=title)
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def fail_form(apibase, password, form_id): """ """ params = {'id': form_id} data = {'password': password}
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def upload(params, file_path, file_contents, apibase, password): """ Upload a file via the API append.php form input provision thingie. This allows uploads to either target S3 or the app itself. """ params.update(dict(password=password, dirname=dirname(file_path), mimetype=(guess_type(file_path)[0] or ''))) res = requests.get(urljoin(apibase, '/append.php'), params=params, headers=dict(Accept='application/paperwalking+xml'))
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def append_print_file(print_id, file_path, file_contents, apibase, password): """ Upload a print. """ params = { "print": print_id, } return upload(params, file_path, file_contents, apibase, password)
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def get_print_info(print_url): """ """ print print_url res = requests.get(print_url, headers=dict(Accept='application/paperwalking+xml')) if res.status_code == 404: raise Exception("No such atlas: %s" % print_url) print_ = ElementTree.parse(StringIO(res.text)).getroot()
stamen/fieldpapers
[ 100, 10, 100, 3, 1320352055 ]
def __init__(self, db): """ NOTE: This class does not commit any changes to the db. That must be done from the calling environment. @type db: Cerebrum.database.Database @param db: A database connection. """ self.db = db self.co = Factory.get('Constants')(db) self.clconst = Factory.get('CLConstants')(db) self.account_class = Factory.get('Account') self.group_class = Factory.get('Group') self.vhutils = VirthomeUtils(db)
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def group_create(self, group_name, description, creator, owner, url=None, forward=None): """ This method creates a new VirtHome group. NOTE: Some group name formats are reserved for specific applications! This method WILL allow creation of reserved group names. @type group_name: str @param group_name: The name of the new group @type description: str @param description: The group description @type creator: self.account_class @param creator: The account object of the creator of this group. @type owner: self.account_class @param owner: The account object of the owner of this group. @type url: str @param url: A url resource associated with the group @type forward: str @param forward: A url resource to an external app that uses this group """ gr = self.group_class(self.db) if self.vhutils.group_exists(group_name): raise CerebrumError("Group name '%s' already exists" % group_name) # TBD: Verify owner.np_type is FEDaccount? Must it be? try: gr.populate(creator.entity_id, group_name, description) gr.write_db() gr.set_group_resource(url) except (ValueError, AssertionError): raise CerebrumError(str(sys.exc_info()[1])) forward = self.vhutils.whitelist_url(forward) if forward: gr.populate_trait(self.co.trait_group_forward, strval=forward) for spread in getattr(cereconf, "BOFHD_NEW_GROUP_SPREADS", ()): gr.add_spread(self.co.human2constant(spread, self.co.Spread)) gr.write_db() roles = GroupRoles(self.db) roles.add_admin_to_group(owner.entity_id, gr.entity_id) return gr
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def group_disable(self, group): """This method removes all members and auth data related to a group, effectively disabling it without actually 'nuking' it. @type group_name: str @param group_name: The name of the group that should be joined @rtype: str @return: The name of the group that was disabled, nice for feedback. """ assert hasattr(group, 'entity_id') # Yank all the spreads for row in group.get_spread(): group.delete_spread(row["spread"]) # Remove all members for membership in group.search_members(group_id=group.entity_id, member_filter_expired=False): group.remove_member(membership["member_id"]) group.write_db() # Clean up the permissions (granted ON the group and TO the group) self.vhutils.remove_auth_targets(group.entity_id) self.vhutils.remove_auth_roles(group.entity_id) return group.group_name
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def __init__(self, db): self.db = db self.co = Factory.get('Constants')(db) self.clconst = Factory.get('CLConstants')(db) self.group_class = Factory.get('Group') self.account_class = Factory.get('Account') # Or compile on each call to self.url_whitelist = [re.compile(r) for r in cereconf.FORWARD_URL_WHITELIST]
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def list_group_members(self, group, indirect_members=False): """ This methid lists members of a group. It does NOT include operators or moderators, unless they are also members. @type group: Cerebrum.Group @param group: The group to list members of @type indirect: bool @param indirect: If we should include indirect members """ ac = self.account_class(self.db) gr = self.group_class(self.db) assert hasattr(group, 'entity_id') result = list() for x in group.search_members(group_id=group.entity_id, indirect_members=indirect_members): owner_name = None member_name = None email_address = None member_type = self.co.EntityType(x['member_type']) if member_type == self.co.entity_account: ac.clear() ac.find(x['member_id']) if ac.np_type in (self.co.fedaccount_type, self.co.virtaccount_type): member_name = ac.account_name owner_name = ac.get_owner_name(self.co.human_full_name) email_address = ac.get_email_address() elif member_type == self.co.entity_group: gr.clear() gr.find(x['member_id']) member_name = gr.group_name result.append({'member_id': x['member_id'], 'member_type': str(member_type), 'member_name': member_name, 'owner_name': owner_name, 'email_address': email_address,}) result.sort(lambda x, y: cmp(x['member_name'], y['member_name'])) return result
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def get_trait_val(self, entity, trait_const, val='strval'): """Get the trait value of type L{val} of L{entity} that is of type L{trait_const}. @type entity: Cerebrum.Entity @param entity: The entity which trait is being looked up @type trait_const: _EntityTraitCode @param trait_const: The type of trait to load @rtype: str @return: The L{val} of the trait, if it exists. None if the L{entity} doesn't have a trait of type L{trait_const}, or the trait doesn't have a value L{val} """ assert hasattr(entity, 'entity_id') and hasattr(entity, 'get_trait') try: trait = entity.get_trait(trait_const) return trait.get(val, None) except AttributeError: pass return None
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def setup_event_request(self, issuer_id, event_type, params=None, change_by=None ): """ Perform the necessary magic when creating a pending confirmation event (i.e. create a changelog entry with the event). @type issuer_id: int @param issuer_id: The C{entity_id} of the event creator/inviter @type event_type: Constants._ChangeTypeCode @param event_type: The changelog type that should be used to store this event. @type params: obj @param params: An object containing other arbitrary information that relates to the L{event_type}. @rtype: str @return: The confirmation key, or ID, of the newly created event. """ return self.db.log_pending_change(issuer_id, event_type, None, change_params=params, # From CIS, we don't have the # change_by parameter set up, should # set this in the request change_by=change_by)
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def account_exists(self, account_name): """Check that L{account_name} is available in Cerebrum/Virthome. Names are case sensitive, but there should not exist two accounts with same name in lowercase, due to LDAP, so we have to check this too. (The combination if this call and account.write_db() is in no way atomic, but checking for availability lets us produce more meaningful error messages in (hopefully) most cases). @type account_name: str @param account_name: The account name to check @rtype: bool @return: True if account_name is available for use, False otherwise. """ # Does not matter which one. ac = self.account_class(self.db) return not ac.uname_is_available(account_name)
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def create_account(self, account_type, account_name, email, expire_date, human_first_name, human_last_name, with_confirmation=True): """Create an account of the specified type. This is a convenience function to avoid duplicating some of the work. @type account_type: subclass of BaseVirtHomeAccount @param account_type: The account class type to use. @type account_name: str @param account_name: Account name to give the new account @type email: str @param email: The email address of the account owner @type expire_date: mx.DateTime.DateTime @param expire_date: The expire date for the account @type human_first_name: str @param human_first_name: The first name(s) of the account owner @type human_last_name: str @param human_last_name: The last name(s) of the account owner @type with_confirmation: bool @param with_confirmation: Controls whether a confirmation request should be issued for this account. In some situations it makes no sense to confirm anything. NOTE: The caller must dispatch any confirmation mail. This controls whether a confirmation event/code should be created and returned. @rtype: list [ BaseVirtHomeAccount, str ] @return: The newly created account, and the confirmation key needed to confirm the given email address. NOTE: If with_confirmation was False, the confirmation key will be empty. """ assert issubclass(account_type, BaseVirtHomeAccount) # Creation can still fail later, but hopefully this captures most # situations and produces a sensible account_name. if self.account_exists(account_name): raise CerebrumError("Account '%s' already exists" % account_name) account = account_type(self.db) account.populate(email, account_name, human_first_name, human_last_name, expire_date) account.write_db() account.populate_trait(self.co.trait_user_retained, numval=0) # Never exported to ldap account.write_db() self.assign_default_user_spreads(account) magic_key = "" if with_confirmation: magic_key = self.setup_event_request( account.entity_id, self.clconst.va_pending_create) return account, magic_key
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def find_or_create_op_target(self, entity_id, target_type): """ Finds an op-target of type L{target_type} that points to L{entity_id}. If no targets exist, one will be created. """ aot = BofhdAuthOpTarget(self.db) op_targets = [t for t in aot.list(entity_id=entity_id, target_type=target_type)] # No target exists, create one if not op_targets: aot.populate(entity_id, target_type) aot.write_db() return aot assert len(op_targets) == 1 # This method will never create more than one assert op_targets[0]['attr'] is None # ... and never populates attr # Target exists, return it aot.find(op_targets[0]['op_target_id']) return aot
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def remove_auth_roles(self, entity_id): """ This method will remove all authorization roles that has been given to an entity. It will also remove any remaining authorization targets that no longer have auth roles pointing to it as a result. @type entity_id: int @param entity_id: The entity_id of an object. """ ar = BofhdAuthRole(self.db) aot = BofhdAuthOpTarget(self.db) # Remove all auth-roles the entity have over other targets for target in ar.list(entity_ids=entity_id): ar.revoke_auth(entity_id, target['op_set_id'], target['op_target_id']) # Remove auth-target if there aren't any more auth-roles pointing # to it remaining = ar.list(op_target_id=target['op_target_id']) if len(remaining) == 0: aot.clear() aot.find(target['op_target_id']) aot.delete()
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def list_group_moderators(self, group): """ List moderators of C{group}. @type group: Cerebrum.Group @param group: A populated group object to list 'admins' for. @rtype: list @return: A list of dictionaries with keys: ['account_id', 'account_name', 'owner_name', 'email_address', 'group_id', 'group_name', 'description'] """ ret = [] ac = self.account_class(self.db) roles = GroupRoles(self.db) for mod in roles.search_moderators( group_id=group.entity_id, moderator_type=self.co.entity_account): ac.clear() ac.find(mod['moderator_id']) ret.append({ 'account_id': ac.entity_id, 'account_name': ac.get_account_name(), 'owner_name': ac.get_owner_name(self.co.human_full_name), 'email_address': ac.get_email_address(), 'group_id': group.entity_id, 'group_name': group.group_name, 'description': group.description, }) return ret
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def list_groups_moderated(self, account): """ List groups moderated by C{account} @type account: Cerebrum.Account @param account: A populated account object to list 'groups' for. @rtype: list @return: A list of dictionaries with keys: ['group_id', 'group_name', 'url', 'description', 'account_id', 'account_name'] """ ret = [] gr = self.group_class(self.db) assert hasattr(account, 'entity_id') for group in gr.search(moderator_id=account.entity_id): gr.clear() gr.find(group['group_id']) ret.append({ 'group_id': gr.entity_id, 'group_name': gr.group_name, 'url': gr.get_contact_info(self.co.virthome_group_url), 'description': group.description, 'account_id': account.entity_id, 'account_name': account.get_account_name(), }) return ret
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def in_realm(self, name, realm, strict=True): """ Simple test - is the given L{name} in the realm L{realm} @type name: str @param name: The name (account name group name) @type realm: str @param realm: The realm, e.g. 'webid.uio.no' @type strict: bool @param strict: If false, we consider 'some.sub.realm' as being in the realm 'realm'. Otherwise, the realm must be an exact match. @rtype: bool @return: True if the name is within the realm, false if not. """ assert isinstance(name, (str, six.text_type)), 'Invalid name' assert not self.illegal_realm(realm), 'Invalid realm' # We know the realm only contains :alnum: and periods. Should be safe to do: regex_realm = realm.replace('.', '\.') regex = re.compile('^(.+)@((.+\.)?%s)$' % regex_realm) # Groups: 1: <name>, 2: <matched realm> (3: <subrealm.>) match = regex.match(name) if not match: return False assert isinstance(match.group(1), (str, six.text_type)) assert isinstance(match.group(2), (str, six.text_type)) if not strict: return not self.illegal_realm(match.group(2)) return match.group(2) == realm
unioslo/cerebrum
[ 9, 3, 9, 40, 1396362121 ]
def __init__(self): self._setupped = False
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def _process_7z_names(self, fd): START = "----------" names = [] started = False item = {} while True: try: line = fd.readline() except: break if line: line = line.rstrip(os.linesep) try: # For non-ascii files names line = line.decode("utf-8") except: pass if line.startswith(START): started = True item = {} continue if started: if line == "": if item["Attributes"].find("D") == -1: names.append(item["Path"]) item = {} else: key = line.split("=")[0].strip() value = "=".join(line.split("=")[1:]).strip() item[key] = value else: break return names
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def set_files(self, files, extracted=False): """Set the files that the extractor should extract from the archive in the order of extraction. Normally one would get the list of all files in the archive using get_files(), then filter and/or permute this list before sending it back using set_files(). The second parameter, extracted allows a trick for the subarchive managing : setting files as extracted, in order to avoid any blocking wait on files not present in the original archive. Note: Random access on gzip or bzip2 compressed tar archives is no good idea. These formats are supported *only* for backwards compability. They are fine formats for some purposes, but should not be used for scanned comic books. So, we cheat and ignore the ordering applied with this method on such archives. """ if extracted: self._files = files for filename in files: self._extracted[filename] = True return if self._type in (GZIP, BZIP2): self._files = [x for x in self._files if x in files] else: self._files = files
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def get_mime_type(self): """Return the mime type name of the extractor's current archive.""" return self._type
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def extract(self): """Start extracting the files in the file list one by one using a new thread. Every time a new file is extracted a notify() will be signalled on the Condition that was returned by setup(). """ self._extract_thread = threading.Thread(target=self._thread_extract) self._extract_thread.setDaemon(False) self._extract_thread.start()
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def _thread_extract(self): """Extract the files in the file list one by one.""" # Extract 7z and rar whole archive - if it SOLID - extract one file is SLOW if self._type in (SEVENZIP,) and _7z_exec is not None: cmd = [_7z_exec, 'x', '-bd', '-p-', '-o' + self._dst, '-y', self._src] proc = process.Process(cmd) proc.spawn() proc.wait() self._condition.acquire() for name in self._files: self._extracted[name] = True self._condition.notify() self._condition.release() if self._type in (RAR,) and _rar_exec is not None: cwd = os.getcwd() os.chdir(self._dst) cmd = [_rar_exec, 'x', '-kb', '-p-', '-o-', '-inul', '--', self._src] proc = process.Process(cmd) proc.spawn() proc.wait() os.chdir(cwd) self._condition.acquire() for name in self._files: self._extracted[name] = True self._condition.notify() self._condition.release() else: for name in self._files: self._extract_file(name) self.close()
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def extract_file_io(self, chosen): """Extract the file named <name> to the destination directory, mark the file as "ready", then signal a notify() on the Condition returned by setup(). """ if os.path.exists(os.path.join(self._dst, chosen)): cStringIO.StringIO(open(os.path.join(self._dst, chosen), 'rb').read()) if self._type == ZIP: return cStringIO.StringIO(self._zfile.read(chosen)) elif self._type in [TAR, GZIP, BZIP2]: return cStringIO.StringIO(self._tfile.extractfile(chosen).read()) elif self._type == RAR: proc = process.Process([_rar_exec, 'p', '-inul', '-p-', '--', self._src, chosen]) fobj = proc.spawn() return cStringIO.StringIO(fobj.read()) elif self._type == SEVENZIP: if Archive7z is not None: return cStringIO.StringIO(self._szfile.getmember(chosen).read()) elif _7z_exec is not None: proc = process.Process([_7z_exec, 'e', '-bd', '-p-', '-so', self._src, chosen]) fobj = proc.spawn() return cStringIO.StringIO(fobj.read())
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def __init__(self, image_files, other_files, archive_path, base_name): """Setup a Packer object to create a ZIP archive at <archive_path>. All files pointed to by paths in the sequences <image_files> and <other_files> will be included in the archive when packed. The files in <image_files> will be renamed on the form "NN - <base_name>.ext", so that the lexical ordering of their filenames match that of their order in the list. The files in <other_files> will be included as they are, assuming their filenames does not clash with other filenames in the archive. All files are placed in the archive root. """ self._image_files = image_files self._other_files = other_files self._archive_path = archive_path self._base_name = base_name self._pack_thread = None self._packing_successful = False
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def wait(self): """Block until the packer thread has finished. Return True if the packer finished its work successfully. """ if self._pack_thread is not None: self._pack_thread.join() return self._packing_successful
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def archive_mime_type(path): """Return the archive type of <path> or None for non-archives.""" try: if os.path.isfile(path): if not os.access(path, os.R_OK): return None if zipfile.is_zipfile(path): return ZIP fd = open(path, 'rb') magic = fd.read(4) fd.seek(60) magic2 = fd.read(8) fd.close() if tarfile.is_tarfile(path) and os.path.getsize(path) > 0: if magic.startswith('BZh'): return BZIP2 if magic.startswith('\037\213'): return GZIP return TAR if magic == 'Rar!': return RAR if magic == '7z\xbc\xaf': return SEVENZIP if magic2 == 'BOOKMOBI': return MOBI except Exception: print('! Error while reading {}'.format(path)) return None
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def get_archive_info(path): """Return a tuple (mime, num_pages, size) with info about the archive at <path>, or None if <path> doesn't point to a supported archive. """ image_re = re.compile('\.(' + '|'.join(get_supported_format_extensions_preg()) + ')\s*$', re.I) extractor = Extractor() extractor.setup(path, None) mime = extractor.get_mime_type() if mime is None: return None files = extractor.get_files() extractor.close() num_pages = len(filter(image_re.search, files)) size = os.stat(path).st_size return mime, num_pages, size
Joacchim/Comix
[ 18, 14, 18, 1, 1286194888 ]
def setup_page_type(cls, gdbtype: gdb.Type) -> None: # TODO: should check config, but that failed to work on ppc64, hardcode # 64k for now if crash.current_target().arch.name() == "powerpc:common64": cls.PAGE_SHIFT = 16 # also a config cls.directmap_base = 0xc000000000000000 cls.sparsemem = True cls.SECTION_SIZE_BITS = 24 cls.PAGE_SIZE = 1 << cls.PAGE_SHIFT cls.slab_cache_name = find_member_variant(gdbtype, ['slab_cache', 'lru']) cls.slab_page_name = find_member_variant(gdbtype, ['slab_page', 'lru']) cls.compound_head_name = find_member_variant(gdbtype, ['compound_head', 'first_page']) if not hasattr(cls, 'vmemmap'): cls.vmemmap = gdb.Value(cls.vmemmap_base).cast(gdbtype.pointer()) cls.setup_page_type_done = True if cls.setup_pageflags_done and not cls.setup_pageflags_finish_done: cls.setup_pageflags_finish()
jeffmahoney/crash-python
[ 61, 23, 61, 13, 1455127548 ]
def setup_mem_section(cls, gdbtype: gdb.Type) -> None: # TODO assumes SPARSEMEM_EXTREME cls.SECTIONS_PER_ROOT = cls.PAGE_SIZE // gdbtype.sizeof
jeffmahoney/crash-python
[ 61, 23, 61, 13, 1455127548 ]