function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
sequence
def extractSweetjamtranslationsCom(item): ''' Parser for 'sweetjamtranslations.com' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
fake-name/ReadableWebProxy
[ 191, 16, 191, 3, 1437712243 ]
def extractMiratlsWordpressCom(item): ''' Parser for 'miratls.wordpress.com' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
fake-name/ReadableWebProxy
[ 191, 16, 191, 3, 1437712243 ]
def create(kernel): result = Building() result.template = "object/building/player/shared_player_city_bank_corellia_style_01.iff" result.attribute_template_id = -1 result.stfName("","")
anhstudios/swganh
[ 62, 37, 62, 37, 1297996365 ]
def create(kernel): result = Building() result.template = "object/building/poi/shared_corellia_solitude_medium3.iff" result.attribute_template_id = -1 result.stfName("poi_n","base_poi_building")
anhstudios/swganh
[ 62, 37, 62, 37, 1297996365 ]
def create(kernel): result = Tangible() result.template = "object/tangible/inventory/shared_creature_inventory_6.iff" result.attribute_template_id = -1 result.stfName("item_n","inventory")
anhstudios/swganh
[ 62, 37, 62, 37, 1297996365 ]
def __init__(self, plotly_name="pad", parent_name="layout.title", **kwargs): super(PadValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, data_class_str=kwargs.pop("data_class_str", "Pad"), data_docs=kwargs.pop( "data_docs", """ b The amount of padding (in px) along the bottom of the component. l The amount of padding (in px) on the left side of the component. r The amount of padding (in px) on the right side of the component. t The amount of padding (in px) along the top of the component.
plotly/python-api
[ 13052, 2308, 13052, 1319, 1385013188 ]
def setUp(self): super().setUp() self.course = CourseFactory.create() self.instructor = AdminFactory.create() self.user = UserFactory()
eduNEXT/edunext-platform
[ 28, 7, 28, 10, 1414072000 ]
def test_wiki_enabled_and_public(self): """ Test wiki tab when Enabled setting is True and the wiki is open to the public. """ settings.WIKI_ENABLED = True self.course.allow_public_wiki_access = True assert self.get_wiki_tab(self.user, self.course) is not None
eduNEXT/edunext-platform
[ 28, 7, 28, 10, 1414072000 ]
def test_wiki_enabled_false(self): """Test wiki tab when Enabled setting is False""" settings.WIKI_ENABLED = False assert self.get_wiki_tab(self.user, self.course) is None assert self.get_wiki_tab(self.instructor, self.course) is None
eduNEXT/edunext-platform
[ 28, 7, 28, 10, 1414072000 ]
def __init__(self, magics_manager, ignore=None): self.ignore = ignore if ignore else [] self.magics_manager = magics_manager
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def _lsmagic(self): """The main implementation of the %lsmagic""" mesc = magic_escapes['line'] cesc = magic_escapes['cell'] mman = self.magics_manager magics = mman.lsmagic() out = ['Available line magics:', mesc + (' '+mesc).join(sorted([m for m,v in magics['line'].items() if (v not in self.ignore)])), '', 'Available cell magics:', cesc + (' '+cesc).join(sorted([m for m,v in magics['cell'].items() if (v not in self.ignore)])), '', mman.auto_status()] return '\n'.join(out)
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def __str__(self): return self._lsmagic()
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def _jsonable(self): """turn magics dict into jsonable dict of the same structure
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def _repr_json_(self): return self._jsonable()
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def alias_magic(self, line=''): """Create an alias for an existing line or cell magic. Examples -------- :: In [1]: %alias_magic t timeit Created `%t` as an alias for `%timeit`. Created `%%t` as an alias for `%%timeit`. In [2]: %t -n1 pass 1 loops, best of 3: 954 ns per loop In [3]: %%t -n1 ...: pass ...: 1 loops, best of 3: 954 ns per loop In [4]: %alias_magic --cell whereami pwd UsageError: Cell magic function `%%pwd` not found. In [5]: %alias_magic --line whereami pwd Created `%whereami` as an alias for `%pwd`. In [6]: %whereami Out[6]: u'/home/testuser'
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def lsmagic(self, parameter_s=''): """List currently available magic functions.""" return MagicsDisplay(self.shell.magics_manager, ignore=[self.pip])
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def magic(self, parameter_s=''): """Print information about the magic function system. Supported formats: -latex, -brief, -rest """ mode = '' try: mode = parameter_s.split()[0][1:] except IndexError: pass brief = (mode == 'brief') rest = (mode == 'rest') magic_docs = self._magic_docs(brief, rest) if mode == 'latex': print(self.format_latex(magic_docs)) return else: magic_docs = format_screen(magic_docs) out = ["""
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def page(self, parameter_s=''): """Pretty print the object and display it through a pager. %page [options] OBJECT If no object is given, use _ (last output). Options: -r: page str(object), don't pretty-print it.""" # After a function contributed by Olivier Aubert, slightly modified. # Process options/args opts, args = self.parse_options(parameter_s, 'r') raw = 'r' in opts oname = args and args or '_' info = self.shell._ofind(oname) if info['found']: txt = (raw and str or pformat)( info['obj'] ) page.page(txt) else: print('Object `%s` not found' % oname)
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def profile(self, parameter_s=''): """Print your currently active IPython profile. See Also -------- prun : run code using the Python profiler (:meth:`~IPython.core.magics.execution.ExecutionMagics.prun`) """ raise UsageError("The `%profile` magic has been deprecated since IPython 2.0. " "and removed in IPython 6.0. Please use the value of `get_ipython().profile` instead " "to see current profile in use. Perhaps you meant to use `%prun` to profile code?")
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def pprint(self, parameter_s=''): """Toggle pretty printing on/off.""" ptformatter = self.shell.display_formatter.formatters['text/plain'] ptformatter.pprint = bool(1 - ptformatter.pprint) print('Pretty printing has been turned', ['OFF','ON'][ptformatter.pprint])
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def colors(self, parameter_s=''): """Switch color scheme for prompts, info system and exception handlers. Currently implemented schemes: NoColor, Linux, LightBG. Color scheme names are not case-sensitive. Examples -------- To get a plain black and white terminal:: %colors nocolor """ def color_switch_err(name): warn('Error changing %s color schemes.\n%s' % (name, sys.exc_info()[1]), stacklevel=2) new_scheme = parameter_s.strip() if not new_scheme: raise UsageError( "%colors: you must specify a color scheme. See '%colors?'") # local shortcut shell = self.shell # Set shell colour scheme try: shell.colors = new_scheme shell.refresh_style() except: color_switch_err('shell') # Set exception colors try: shell.InteractiveTB.set_colors(scheme = new_scheme) shell.SyntaxTB.set_colors(scheme = new_scheme) except: color_switch_err('exception') # Set info (for 'object?') colors if shell.color_info: try: shell.inspector.set_active_scheme(new_scheme) except: color_switch_err('object inspector') else: shell.inspector.set_active_scheme('NoColor')
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def xmode(self, parameter_s=''): """Switch modes for the exception handlers. Valid modes: Plain, Context and Verbose. If called without arguments, acts as a toggle.""" def xmode_switch_err(name): warn('Error changing %s exception modes.\n%s' % (name,sys.exc_info()[1])) shell = self.shell new_mode = parameter_s.strip().capitalize() try: shell.InteractiveTB.set_mode(mode=new_mode) print('Exception reporting mode:',shell.InteractiveTB.mode) except: xmode_switch_err('user')
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def pip(self, args=''): """ Intercept usage of ``pip`` in IPython and direct user to run command outside of IPython. """ print(textwrap.dedent(''' The following command must be run outside of the IPython shell: $ pip {args} The Python package manager (pip) can only be used from outside of IPython. Please reissue the `pip` command in a separate terminal or command prompt. See the Python documentation for more informations on how to install packages: https://docs.python.org/3/installing/'''.format(args=args)))
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def quickref(self, arg): """ Show a quick reference sheet """ from IPython.core.usage import quick_reference qr = quick_reference + self._magic_docs(brief=True) page.page(qr)
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def doctest_mode(self, parameter_s=''): """Toggle doctest mode on and off. This mode is intended to make IPython behave as much as possible like a plain Python shell, from the perspective of how its prompts, exceptions and output look. This makes it easy to copy and paste parts of a session into doctests. It does so by: - Changing the prompts to the classic ``>>>`` ones. - Changing the exception reporting mode to 'Plain'. - Disabling pretty-printing of output. Note that IPython also supports the pasting of code snippets that have leading '>>>' and '...' prompts in them. This means that you can paste doctests from files or docstrings (even if they have leading whitespace), and the code will execute correctly. You can then use '%history -t' to see the translated history; this will give you the input after removal of all the leading prompts and whitespace, which can be pasted back into an editor. With these features, you can switch into this mode easily whenever you need to do testing and changes to doctests, without having to leave your existing IPython session. """ # Shorthands shell = self.shell meta = shell.meta disp_formatter = self.shell.display_formatter ptformatter = disp_formatter.formatters['text/plain'] # dstore is a data store kept in the instance metadata bag to track any # changes we make, so we can undo them later. dstore = meta.setdefault('doctest_mode',Struct()) save_dstore = dstore.setdefault # save a few values we'll need to recover later mode = save_dstore('mode',False) save_dstore('rc_pprint',ptformatter.pprint) save_dstore('xmode',shell.InteractiveTB.mode) save_dstore('rc_separate_out',shell.separate_out) save_dstore('rc_separate_out2',shell.separate_out2) save_dstore('rc_separate_in',shell.separate_in) save_dstore('rc_active_types',disp_formatter.active_types) if not mode: # turn on # Prompt separators like plain python shell.separate_in = '' shell.separate_out = '' shell.separate_out2 = '' ptformatter.pprint = False disp_formatter.active_types = ['text/plain'] shell.magic('xmode Plain') else: # turn off shell.separate_in = dstore.rc_separate_in shell.separate_out = dstore.rc_separate_out shell.separate_out2 = dstore.rc_separate_out2 ptformatter.pprint = dstore.rc_pprint disp_formatter.active_types = dstore.rc_active_types shell.magic('xmode ' + dstore.xmode) # mode here is the state before we switch; switch_doctest_mode takes # the mode we're switching to. shell.switch_doctest_mode(not mode) # Store new mode and inform dstore.mode = bool(not mode) mode_label = ['OFF','ON'][dstore.mode] print('Doctest mode is:', mode_label)
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def gui(self, parameter_s=''): """Enable or disable IPython GUI event loop integration. %gui [GUINAME] This magic replaces IPython's threaded shells that were activated using the (pylab/wthread/etc.) command line flags. GUI toolkits can now be enabled at runtime and keyboard interrupts should work without any problems. The following toolkits are supported: wxPython, PyQt4, PyGTK, Tk and Cocoa (OSX):: %gui wx # enable wxPython event loop integration %gui qt4|qt # enable PyQt4 event loop integration %gui qt5 # enable PyQt5 event loop integration %gui gtk # enable PyGTK event loop integration %gui gtk3 # enable Gtk3 event loop integration %gui tk # enable Tk event loop integration %gui osx # enable Cocoa event loop integration # (requires %matplotlib 1.1) %gui # disable all event loop integration WARNING: after any of these has been called you can simply create an application object, but DO NOT start the event loop yourself, as we have already handled that. """ opts, arg = self.parse_options(parameter_s, '') if arg=='': arg = None try: return self.shell.enable_gui(arg) except Exception as e: # print simple error message, rather than traceback if we can't # hook up the GUI error(str(e))
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def precision(self, s=''): """Set floating point precision for pretty printing. Can set either integer precision or a format string. If numpy has been imported and precision is an int, numpy display precision will also be set, via ``numpy.set_printoptions``. If no argument is given, defaults will be restored. Examples -------- :: In [1]: from math import pi In [2]: %precision 3 Out[2]: u'%.3f' In [3]: pi Out[3]: 3.142 In [4]: %precision %i Out[4]: u'%i' In [5]: pi Out[5]: 3 In [6]: %precision %e Out[6]: u'%e' In [7]: pi**10 Out[7]: 9.364805e+04 In [8]: %precision Out[8]: u'%r' In [9]: pi**10 Out[9]: 93648.047476082982 """ ptformatter = self.shell.display_formatter.formatters['text/plain'] ptformatter.float_precision = s return ptformatter.float_format
unnikrishnankgs/va
[ 1, 5, 1, 10, 1496432585 ]
def setUp(self): # pylint: disable=arguments-differ """ Test Setup """ super().setUp() self.url = reverse('mfe_context') self.query_params = {'next': '/dashboard'} hostname = socket.gethostname() ip_address = socket.gethostbyname(hostname) self.country_code = country_code_from_ip(ip_address) # Several third party auth providers are created for these tests: self.configure_google_provider(enabled=True, visible=True) self.configure_facebook_provider(enabled=True, visible=True) self.hidden_enabled_provider = self.configure_linkedin_provider( visible=False, enabled=True, )
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def get_provider_data(self, params): """ Returns the expected provider data based on providers enabled in test setup """ return [ { 'id': 'oa2-facebook', 'name': 'Facebook', 'iconClass': 'fa-facebook', 'iconImage': None, 'skipHintedLogin': False, 'loginUrl': self._third_party_login_url('facebook', 'login', params), 'registerUrl': self._third_party_login_url('facebook', 'register', params) }, { 'id': 'oa2-google-oauth2', 'name': 'Google', 'iconClass': 'fa-google-plus', 'iconImage': None, 'skipHintedLogin': False, 'loginUrl': self._third_party_login_url('google-oauth2', 'login', params), 'registerUrl': self._third_party_login_url('google-oauth2', 'register', params) }, ]
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def test_no_third_party_auth_providers(self): """ Test that if third party auth is enabled, context returned by API contains the provider information """ response = self.client.get(self.url, self.query_params) assert response.status_code == 200 assert response.data == self.get_context()
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def test_running_pipeline(self, current_backend, current_provider, add_user_details): """ Test that when third party pipeline is running, the api returns details of current provider """ email = '[email protected]' if add_user_details else None params = { 'next': self.query_params['next'] } # Simulate a running pipeline pipeline_target = 'openedx.core.djangoapps.user_authn.views.login_form.third_party_auth.pipeline' with simulate_running_pipeline(pipeline_target, current_backend, email=email): response = self.client.get(self.url, self.query_params) assert response.status_code == 200 assert response.data == self.get_context(params, current_provider, current_backend, add_user_details)
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def test_user_country_code(self): """ Test api that returns country code of user """ response = self.client.get(self.url, self.query_params) assert response.status_code == 200 assert response.data['countryCode'] == self.country_code
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def setUp(self): """ Create a user, then log in. """ super().setUp() self.user = UserFactory() Registration().register(self.user) result = self.client.login(username=self.user.username, password="test") assert result, 'Could not log in' self.path = reverse('send_account_activation_email')
edx/edx-platform
[ 6290, 3437, 6290, 280, 1369945238 ]
def __init__(self, job, function_handle_string, trace_canonical_url, failure_type_name, description, stack): assert isinstance(job, job_module.Job) self.job = job self.function_handle_string = function_handle_string self.trace_canonical_url = trace_canonical_url self.failure_type_name = failure_type_name self.description = description self.stack = stack
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def AsDict(self): return { 'job_guid': str(self.job.guid), 'function_handle_string': self.function_handle_string, 'trace_canonical_url': self.trace_canonical_url, 'type': self.failure_type_name, 'description': self.description, 'stack': self.stack }
catapult-project/catapult
[ 1835, 570, 1835, 1039, 1429033745 ]
def fetch_spec(props): url = 'https://chromium.googlesource.com/devtools/devtools-frontend.git' solution = { 'name' : 'devtools-frontend', 'url' : url, 'deps_file' : 'DEPS', 'managed' : False, 'custom_deps' : {}, } spec = { 'solutions': [solution], 'with_branch_heads': True, } return { 'type': 'gclient_git', 'gclient_git_spec': spec, }
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def expected_root(_props): return 'devtools-frontend'
endlessm/chromium-browser
[ 21, 16, 21, 3, 1435959644 ]
def __init__(self, key = None, padding = padWithPadLen(), keySize=16, blockSize=16 ): self.name = 'RIJNDAEL' self.keySize = keySize self.strength = keySize*8 self.blockSize = blockSize # blockSize is in bytes self.padding = padding # change default to noPadding() to get normal ECB behavior assert( keySize%4==0 and NrTable[4].has_key(keySize/4)),'key size must be 16,20,24,29 or 32 bytes' assert( blockSize%4==0 and NrTable.has_key(blockSize/4)), 'block size must be 16,20,24,29 or 32 bytes' self.Nb = self.blockSize/4 # Nb is number of columns of 32 bit words self.Nk = keySize/4 # Nk is the key length in 32-bit words self.Nr = NrTable[self.Nb][self.Nk] # The number of rounds (Nr) is a function of # the block (Nb) and key (Nk) sizes. if key != None: self.setKey(key)
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def encryptBlock(self, plainTextBlock): """ Encrypt a block, plainTextBlock must be a array of bytes [Nb by 4] """ self.state = self._toBlock(plainTextBlock) AddRoundKey(self, self.__expandedKey[0:self.Nb]) for round in range(1,self.Nr): #for round = 1 step 1 to Nr–1 SubBytes(self) ShiftRows(self) MixColumns(self) AddRoundKey(self, self.__expandedKey[round*self.Nb:(round+1)*self.Nb]) SubBytes(self) ShiftRows(self) AddRoundKey(self, self.__expandedKey[self.Nr*self.Nb:(self.Nr+1)*self.Nb]) return self._toBString(self.state)
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def _toBlock(self, bs): """ Convert binary string to array of bytes, state[col][row]""" assert ( len(bs) == 4*self.Nb ), 'Rijndarl blocks must be of size blockSize' return [[ord(bs[4*i]),ord(bs[4*i+1]),ord(bs[4*i+2]),ord(bs[4*i+3])] for i in range(self.Nb)]
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def keyExpansion(algInstance, keyString): """ Expand a string of size keySize into a larger array """ Nk, Nb, Nr = algInstance.Nk, algInstance.Nb, algInstance.Nr # for readability key = [ord(byte) for byte in keyString] # convert string to list w = [[key[4*i],key[4*i+1],key[4*i+2],key[4*i+3]] for i in range(Nk)] for i in range(Nk,Nb*(Nr+1)): temp = w[i-1] # a four byte column if (i%Nk) == 0 : temp = temp[1:]+[temp[0]] # RotWord(temp) temp = [ Sbox[byte] for byte in temp ] temp[0] ^= Rcon[i/Nk] elif Nk > 6 and i%Nk == 4 : temp = [ Sbox[byte] for byte in temp ] # SubWord(temp) w.append( [ w[i-Nk][byte]^temp[byte] for byte in range(4) ] ) return w
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def AddRoundKey(algInstance, keyBlock): """ XOR the algorithm state with a block of key material """ for column in range(algInstance.Nb): for row in range(4): algInstance.state[column][row] ^= keyBlock[column][row]
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def SubBytes(algInstance): for column in range(algInstance.Nb): for row in range(4): algInstance.state[column][row] = Sbox[algInstance.state[column][row]]
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def ShiftRows(algInstance): tmp = [0]*algInstance.Nb # list of size Nb for r in range(1,4): # row 0 reamains unchanged and can be skipped for c in range(algInstance.Nb): tmp[c] = algInstance.state[(c+shiftOffset[algInstance.Nb][r]) % algInstance.Nb][r] for c in range(algInstance.Nb): algInstance.state[c][r] = tmp[c]
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def MixColumns(a): Sprime = [0,0,0,0] for j in range(a.Nb): # for each column Sprime[0] = mul(2,a.state[j][0])^mul(3,a.state[j][1])^mul(1,a.state[j][2])^mul(1,a.state[j][3]) Sprime[1] = mul(1,a.state[j][0])^mul(2,a.state[j][1])^mul(3,a.state[j][2])^mul(1,a.state[j][3]) Sprime[2] = mul(1,a.state[j][0])^mul(1,a.state[j][1])^mul(2,a.state[j][2])^mul(3,a.state[j][3]) Sprime[3] = mul(3,a.state[j][0])^mul(1,a.state[j][1])^mul(1,a.state[j][2])^mul(2,a.state[j][3]) for i in range(4): a.state[j][i] = Sprime[i]
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def mul(a, b): """ Multiply two elements of GF(2^m) needed for MixColumn and InvMixColumn """ if (a !=0 and b!=0): return Alogtable[(Logtable[a] + Logtable[b])%255] else: return 0
felipenaselva/felipe.repository
[ 2, 6, 2, 1, 1474110890 ]
def __init__(self, env): super(IndexEntries, self).__init__(env) self.data = env.indexentries
axbaretto/beam
[ 9, 2, 9, 74, 1474583398 ]
def merge_other(self, docnames, other): for docname in docnames: self.data[docname] = other.indexentries[docname]
axbaretto/beam
[ 9, 2, 9, 74, 1474583398 ]
def _get_logger(self): return logging.getLogger(self.__class__.__module__)
kartoza/geonode
[ 6, 17, 6, 113, 1439813567 ]
def ready(self): signals.post_migrate.connect(self._register_notifications, sender=self)
kartoza/geonode
[ 6, 17, 6, 113, 1439813567 ]
def wrap(*args, **kwargs): ret = func(*args, **kwargs) if settings.PINAX_NOTIFICATIONS_QUEUE_ALL: send_queued_notifications.delay() return ret
kartoza/geonode
[ 6, 17, 6, 113, 1439813567 ]
def send_now_notification(*args, **kwargs): """ Simple wrapper around notifications.model send(). This can be called safely if notifications are not installed. """ if has_notifications: return notifications.models.send_now(*args, **kwargs)
kartoza/geonode
[ 6, 17, 6, 113, 1439813567 ]
def send_notification(*args, **kwargs): """ Simple wrapper around notifications.model send(). This can be called safely if notifications are not installed. """ if has_notifications: # queue for further processing if required if settings.PINAX_NOTIFICATIONS_QUEUE_ALL: return queue_notification(*args, **kwargs) try: return notifications.models.send(*args, **kwargs) except Exception: logging.exception("Could not send notifications.") return False
kartoza/geonode
[ 6, 17, 6, 113, 1439813567 ]
def __init__(self, imagepath, name, pos = None,border=40): pygame.sprite.Sprite.__init__(self) component.__init__(self) self.imagepath = imagepath self.image = None self.original = None self.rect = None self.pos = pos if self.pos == None: self.pos = [100,100] self.dir = "" self.name = name self.update = self.sprite_logic().next self.screensize = (924,658) self.border = border self.__class__.allsprites.append(self)
sparkslabs/kamaelia_
[ 13, 3, 13, 2, 1348148442 ]
def sprite_logic(self): while 1: yield 1
sparkslabs/kamaelia_
[ 13, 3, 13, 2, 1348148442 ]
def setUp(self): self._vmutils = vmutils.VMUtils() self._vmutils._conn = mock.MagicMock() super(VMUtilsTestCase, self).setUp()
ntt-sic/nova
[ 1, 2, 1, 1, 1382427064 ]
def _lookup_vm(self): mock_vm = mock.MagicMock() self._vmutils._lookup_vm_check = mock.MagicMock( return_value=mock_vm) mock_vm.path_.return_value = self._FAKE_VM_PATH return mock_vm
ntt-sic/nova
[ 1, 2, 1, 1, 1382427064 ]
def test_set_vm_memory_dynamic(self): self._test_set_vm_memory_dynamic(2.0)
ntt-sic/nova
[ 1, 2, 1, 1, 1382427064 ]
def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_map)
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_map)[0]
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0]
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def getregentry(): return codecs.CodecInfo( name='cp437', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, )
Microvellum/Fluid-Designer
[ 69, 30, 69, 37, 1461884765 ]
def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_for_resource_group.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def list_for_resource( self, resource_group_name: str, resource_provider_namespace: str, parent_resource_path: str, resource_type: str, resource_name: str, **kwargs: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_for_resource.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), 'resourceProviderNamespace': self._serialize.url("resource_provider_namespace", resource_provider_namespace, 'str', skip_quote=True), 'parentResourcePath': self._serialize.url("parent_resource_path", parent_resource_path, 'str', skip_quote=True), 'resourceType': self._serialize.url("resource_type", resource_type, 'str', skip_quote=True), 'resourceName': self._serialize.url("resource_name", resource_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def __init__( self, plotly_name="size", parent_name="histogram.marker.colorbar.tickfont", **kwargs
plotly/python-api
[ 13052, 2308, 13052, 1319, 1385013188 ]
def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def begin_delete( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {})
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def _create_or_update_initial( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str parameters, # type: "_models.VirtualRouterPeering" **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def begin_create_or_update( self, resource_group_name, # type: str virtual_router_name, # type: str peering_name, # type: str parameters, # type: "_models.VirtualRouterPeering" **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): deserialized = self._deserialize('VirtualRouterPeering', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def list( self, resource_group_name, # type: str virtual_router_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.Error, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def fail(): for t in [TypeA, TypeB]: x = TypeA() run_test(x)
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def OK1(seq): for _ in seq: do_something() print("Hi")
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def OK2(seq): i = 3 for x in seq: i += 1 return i
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def OK3(seq): for thing in seq: return "Not empty" return "empty"
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def OK4(n): r = range(n) for i in r: print("x")
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def OK5(seq): for unused_x in seq: print("x")
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def OK6(seq): for thing in seq: if sum(1 for s in STATUSES if thing <= s < thing + 100) >= quorum: return True
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def OK7(seq): for x in seq: queue.add(None)
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def OK7(seq, queue): for x in seq: queue.add(None)
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def fail2(sequence): for x in sequence: for y in sequence: do_something(x+1)
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def fail4(coll, sequence): while coll: x = coll.pop() for s in sequence: do_something(x+1)
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def fail5(t): x, y = t return x
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def __init__(self): self.attr = self.cls_attr
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def rand_list(): return [ random.random() for i in range(100) ]
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def cleanup(sessions): for sess in sessions: # Original code had some comment about deleting sessions del sess
github/codeql
[ 5783, 1304, 5783, 842, 1533054951 ]
def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def begin_create_or_update( self, resource_group_name, # type: str express_route_gateway_name, # type: str connection_name, # type: str put_express_route_connection_parameters, # type: "_models.ExpressRouteConnection" **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): deserialized = self._deserialize('ExpressRouteConnection', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get( self, resource_group_name, # type: str express_route_gateway_name, # type: str connection_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def _delete_initial( self, resource_group_name, # type: str express_route_gateway_name, # type: str connection_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def begin_delete( self, resource_group_name, # type: str express_route_gateway_name, # type: str connection_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {})
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]
def list( self, resource_group_name, # type: str express_route_gateway_name, # type: str **kwargs # type: Any
Azure/azure-sdk-for-python
[ 3526, 2256, 3526, 986, 1335285972 ]