text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def create(dataset, symbol, degree): """ Create a model object from the data set for the property specified by the supplied symbol, using the specified polynomial degree. :param dataset: a DataSet object :param symbol: the symbol of the property to be described, e.g. 'rho' :param degree: the polynomial degree to use :returns: a new PolynomialModelT object """ x_vals = dataset.data['T'].tolist() y_vals = dataset.data[symbol].tolist() coeffs = np.polyfit(x_vals, y_vals, degree) result = PolynomialModelT(dataset.material, dataset.names_dict[symbol], symbol, dataset.display_symbols_dict[symbol], dataset.units_dict[symbol], None, [dataset.name], coeffs) result.state_schema['T']['min'] = float(min(x_vals)) result.state_schema['T']['max'] = float(max(x_vals)) return result
[ "def", "create", "(", "dataset", ",", "symbol", ",", "degree", ")", ":", "x_vals", "=", "dataset", ".", "data", "[", "'T'", "]", ".", "tolist", "(", ")", "y_vals", "=", "dataset", ".", "data", "[", "symbol", "]", ".", "tolist", "(", ")", "coeffs", "=", "np", ".", "polyfit", "(", "x_vals", ",", "y_vals", ",", "degree", ")", "result", "=", "PolynomialModelT", "(", "dataset", ".", "material", ",", "dataset", ".", "names_dict", "[", "symbol", "]", ",", "symbol", ",", "dataset", ".", "display_symbols_dict", "[", "symbol", "]", ",", "dataset", ".", "units_dict", "[", "symbol", "]", ",", "None", ",", "[", "dataset", ".", "name", "]", ",", "coeffs", ")", "result", ".", "state_schema", "[", "'T'", "]", "[", "'min'", "]", "=", "float", "(", "min", "(", "x_vals", ")", ")", "result", ".", "state_schema", "[", "'T'", "]", "[", "'max'", "]", "=", "float", "(", "max", "(", "x_vals", ")", ")", "return", "result" ]
38.961538
21.423077
def slugify(value, allow_unicode=False): """Normalizes string, converts to lowercase, removes non-alpha characters, and converts spaces to hyphens. :param value: string :param allow_unicode: allow utf8 characters :type allow_unicode: bool :return: slugified string :rtype: str :Example: >>> slugify('pekná líščička') 'pekna-liscicka' """ value = str(value) if allow_unicode: value = unicodedata.normalize('NFKC', value) value = re.sub(r'[^\w\s-]', '', value, flags=re.U).strip().lower() return re.sub(r'[-\s]+', '-', value, flags=re.U) else: value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii') value = re.sub(r'[^\w\s-]', '', value).strip().lower() return re.sub('[-\s]+', '-', value)
[ "def", "slugify", "(", "value", ",", "allow_unicode", "=", "False", ")", ":", "value", "=", "str", "(", "value", ")", "if", "allow_unicode", ":", "value", "=", "unicodedata", ".", "normalize", "(", "'NFKC'", ",", "value", ")", "value", "=", "re", ".", "sub", "(", "r'[^\\w\\s-]'", ",", "''", ",", "value", ",", "flags", "=", "re", ".", "U", ")", ".", "strip", "(", ")", ".", "lower", "(", ")", "return", "re", ".", "sub", "(", "r'[-\\s]+'", ",", "'-'", ",", "value", ",", "flags", "=", "re", ".", "U", ")", "else", ":", "value", "=", "unicodedata", ".", "normalize", "(", "'NFKD'", ",", "value", ")", ".", "encode", "(", "'ascii'", ",", "'ignore'", ")", ".", "decode", "(", "'ascii'", ")", "value", "=", "re", ".", "sub", "(", "r'[^\\w\\s-]'", ",", "''", ",", "value", ")", ".", "strip", "(", ")", ".", "lower", "(", ")", "return", "re", ".", "sub", "(", "'[-\\s]+'", ",", "'-'", ",", "value", ")" ]
33.791667
18.75
def _coords2vec(self, coords): """ Converts from sky coordinates to unit vectors. Before conversion to unit vectors, the coordiantes are transformed to the coordinate system used internally by the :obj:`UnstructuredDustMap`, which can be set during initialization of the class. Args: coords (:obj:`astropy.coordinates.SkyCoord`): Input coordinates to convert to unit vectors. Returns: Cartesian unit vectors corresponding to the input coordinates, after transforming to the coordinate system used internally by the :obj:`UnstructuredDustMap`. """ # c = coords.transform_to(self._frame) # vec = np.empty((c.shape[0], 2), dtype='f8') # vec[:,0] = coordinates.Longitude(coords.l, wrap_angle=360.*units.deg).deg[:] # vec[:,1] = coords.b.deg[:] # return np.radians(vec) c = coords.transform_to(self._frame).represent_as('cartesian') vec_norm = np.sqrt(c.x**2 + c.y**2 + c.z**2) vec = np.empty((c.shape[0], 3), dtype=c.x.dtype) vec[:,0] = (c.x / vec_norm).value[:] vec[:,1] = (c.y / vec_norm).value[:] vec[:,2] = (c.z / vec_norm).value[:] return vec
[ "def", "_coords2vec", "(", "self", ",", "coords", ")", ":", "# c = coords.transform_to(self._frame)", "# vec = np.empty((c.shape[0], 2), dtype='f8')", "# vec[:,0] = coordinates.Longitude(coords.l, wrap_angle=360.*units.deg).deg[:]", "# vec[:,1] = coords.b.deg[:]", "# return np.radians(vec)", "c", "=", "coords", ".", "transform_to", "(", "self", ".", "_frame", ")", ".", "represent_as", "(", "'cartesian'", ")", "vec_norm", "=", "np", ".", "sqrt", "(", "c", ".", "x", "**", "2", "+", "c", ".", "y", "**", "2", "+", "c", ".", "z", "**", "2", ")", "vec", "=", "np", ".", "empty", "(", "(", "c", ".", "shape", "[", "0", "]", ",", "3", ")", ",", "dtype", "=", "c", ".", "x", ".", "dtype", ")", "vec", "[", ":", ",", "0", "]", "=", "(", "c", ".", "x", "/", "vec_norm", ")", ".", "value", "[", ":", "]", "vec", "[", ":", ",", "1", "]", "=", "(", "c", ".", "y", "/", "vec_norm", ")", ".", "value", "[", ":", "]", "vec", "[", ":", ",", "2", "]", "=", "(", "c", ".", "z", "/", "vec_norm", ")", ".", "value", "[", ":", "]", "return", "vec" ]
38.8125
21.875
def get_msi_token_webapp(resource): """Get a MSI token from inside a webapp or functions. Env variable will look like: - MSI_ENDPOINT = http://127.0.0.1:41741/MSI/token/ - MSI_SECRET = 69418689F1E342DD946CB82994CDA3CB """ try: msi_endpoint = os.environ['MSI_ENDPOINT'] msi_secret = os.environ['MSI_SECRET'] except KeyError as err: err_msg = "{} required env variable was not found. You might need to restart your app/function.".format(err) _LOGGER.critical(err_msg) raise RuntimeError(err_msg) request_uri = '{}/?resource={}&api-version=2017-09-01'.format(msi_endpoint, resource) headers = { 'secret': msi_secret } err = None try: result = requests.get(request_uri, headers=headers) _LOGGER.debug("MSI: Retrieving a token from %s", request_uri) if result.status_code != 200: err = result.text # Workaround since not all failures are != 200 if 'ExceptionMessage' in result.text: err = result.text except Exception as ex: # pylint: disable=broad-except err = str(ex) if err: err_msg = "MSI: Failed to retrieve a token from '{}' with an error of '{}'.".format( request_uri, err ) _LOGGER.critical(err_msg) raise RuntimeError(err_msg) _LOGGER.debug('MSI: token retrieved') token_entry = result.json() return token_entry['token_type'], token_entry['access_token'], token_entry
[ "def", "get_msi_token_webapp", "(", "resource", ")", ":", "try", ":", "msi_endpoint", "=", "os", ".", "environ", "[", "'MSI_ENDPOINT'", "]", "msi_secret", "=", "os", ".", "environ", "[", "'MSI_SECRET'", "]", "except", "KeyError", "as", "err", ":", "err_msg", "=", "\"{} required env variable was not found. You might need to restart your app/function.\"", ".", "format", "(", "err", ")", "_LOGGER", ".", "critical", "(", "err_msg", ")", "raise", "RuntimeError", "(", "err_msg", ")", "request_uri", "=", "'{}/?resource={}&api-version=2017-09-01'", ".", "format", "(", "msi_endpoint", ",", "resource", ")", "headers", "=", "{", "'secret'", ":", "msi_secret", "}", "err", "=", "None", "try", ":", "result", "=", "requests", ".", "get", "(", "request_uri", ",", "headers", "=", "headers", ")", "_LOGGER", ".", "debug", "(", "\"MSI: Retrieving a token from %s\"", ",", "request_uri", ")", "if", "result", ".", "status_code", "!=", "200", ":", "err", "=", "result", ".", "text", "# Workaround since not all failures are != 200", "if", "'ExceptionMessage'", "in", "result", ".", "text", ":", "err", "=", "result", ".", "text", "except", "Exception", "as", "ex", ":", "# pylint: disable=broad-except", "err", "=", "str", "(", "ex", ")", "if", "err", ":", "err_msg", "=", "\"MSI: Failed to retrieve a token from '{}' with an error of '{}'.\"", ".", "format", "(", "request_uri", ",", "err", ")", "_LOGGER", ".", "critical", "(", "err_msg", ")", "raise", "RuntimeError", "(", "err_msg", ")", "_LOGGER", ".", "debug", "(", "'MSI: token retrieved'", ")", "token_entry", "=", "result", ".", "json", "(", ")", "return", "token_entry", "[", "'token_type'", "]", ",", "token_entry", "[", "'access_token'", "]", ",", "token_entry" ]
35.804878
20.439024
def get_formatter(name): """ Looks up a formatter class given a prefix to it. The names are sorted, and the first matching class is returned. """ for k in sorted(_FORMATTERS): if k.startswith(name): return _FORMATTERS[k]
[ "def", "get_formatter", "(", "name", ")", ":", "for", "k", "in", "sorted", "(", "_FORMATTERS", ")", ":", "if", "k", ".", "startswith", "(", "name", ")", ":", "return", "_FORMATTERS", "[", "k", "]" ]
31.625
9.875
def GET(self, token=None, salt_token=None): r''' An HTTP stream of the Salt master event bus This stream is formatted per the Server Sent Events (SSE) spec. Each event is formatted as JSON. .. http:get:: /events :status 200: |200| :status 401: |401| :status 406: |406| :query token: **optional** parameter containing the token ordinarily supplied via the X-Auth-Token header in order to allow cross-domain requests in browsers that do not include CORS support in the EventSource API. E.g., ``curl -NsS localhost:8000/events?token=308650d`` :query salt_token: **optional** parameter containing a raw Salt *eauth token* (not to be confused with the token returned from the /login URL). E.g., ``curl -NsS localhost:8000/events?salt_token=30742765`` **Example request:** .. code-block:: bash curl -NsS localhost:8000/events .. code-block:: text GET /events HTTP/1.1 Host: localhost:8000 **Example response:** Note, the ``tag`` field is not part of the spec. SSE compliant clients should ignore unknown fields. This addition allows non-compliant clients to only watch for certain tags without having to deserialze the JSON object each time. .. code-block:: text HTTP/1.1 200 OK Connection: keep-alive Cache-Control: no-cache Content-Type: text/event-stream;charset=utf-8 retry: 400 tag: salt/job/20130802115730568475/new data: {'tag': 'salt/job/20130802115730568475/new', 'data': {'minions': ['ms-4', 'ms-3', 'ms-2', 'ms-1', 'ms-0']}} tag: salt/job/20130802115730568475/ret/jerry data: {'tag': 'salt/job/20130802115730568475/ret/jerry', 'data': {'jid': '20130802115730568475', 'return': True, 'retcode': 0, 'success': True, 'cmd': '_return', 'fun': 'test.ping', 'id': 'ms-1'}} The event stream can be easily consumed via JavaScript: .. code-block:: javascript var source = new EventSource('/events'); source.onopen = function() { console.info('Listening ...') }; source.onerror = function(err) { console.error(err) }; source.onmessage = function(message) { var saltEvent = JSON.parse(message.data); console.log(saltEvent.tag, saltEvent.data); }; Note, the SSE stream is fast and completely asynchronous and Salt is very fast. If a job is created using a regular POST request, it is possible that the job return will be available on the SSE stream before the response for the POST request arrives. It is important to take that asynchronicity into account when designing an application. Below are some general guidelines. * Subscribe to the SSE stream _before_ creating any events. * Process SSE events directly as they arrive and don't wait for any other process to "complete" first (like an ajax request). * Keep a buffer of events if the event stream must be used for synchronous lookups. * Be cautious in writing Salt's event stream directly to the DOM. It is very busy and can quickly overwhelm the memory allocated to a browser tab. A full, working proof-of-concept JavaScript application is available :blob:`adjacent to this file <salt/netapi/rest_cherrypy/index.html>`. It can be viewed by pointing a browser at the ``/app`` endpoint in a running ``rest_cherrypy`` instance. Or using CORS: .. code-block:: javascript var source = new EventSource('/events?token=ecd589e4e01912cf3c4035afad73426dbb8dba75', {withCredentials: true}); It is also possible to consume the stream via the shell. Records are separated by blank lines; the ``data:`` and ``tag:`` prefixes will need to be removed manually before attempting to unserialize the JSON. curl's ``-N`` flag turns off input buffering which is required to process the stream incrementally. Here is a basic example of printing each event as it comes in: .. code-block:: bash curl -NsS localhost:8000/events |\ while IFS= read -r line ; do echo $line done Here is an example of using awk to filter events based on tag: .. code-block:: bash curl -NsS localhost:8000/events |\ awk ' BEGIN { RS=""; FS="\\n" } $1 ~ /^tag: salt\/job\/[0-9]+\/new$/ { print $0 } ' tag: salt/job/20140112010149808995/new data: {"tag": "salt/job/20140112010149808995/new", "data": {"tgt_type": "glob", "jid": "20140112010149808995", "tgt": "jerry", "_stamp": "2014-01-12_01:01:49.809617", "user": "shouse", "arg": [], "fun": "test.ping", "minions": ["jerry"]}} tag: 20140112010149808995 data: {"tag": "20140112010149808995", "data": {"fun_args": [], "jid": "20140112010149808995", "return": true, "retcode": 0, "success": true, "cmd": "_return", "_stamp": "2014-01-12_01:01:49.819316", "fun": "test.ping", "id": "jerry"}} ''' cookies = cherrypy.request.cookie auth_token = token or salt_token or ( cookies['session_id'].value if 'session_id' in cookies else None) if not self._is_valid_token(auth_token): raise cherrypy.HTTPError(401) # Release the session lock before starting the long-running response cherrypy.session.release_lock() cherrypy.response.headers['Content-Type'] = 'text/event-stream' cherrypy.response.headers['Cache-Control'] = 'no-cache' cherrypy.response.headers['Connection'] = 'keep-alive' def listen(): ''' An iterator to yield Salt events ''' event = salt.utils.event.get_event( 'master', sock_dir=self.opts['sock_dir'], transport=self.opts['transport'], opts=self.opts, listen=True) stream = event.iter_events(full=True, auto_reconnect=True) yield str('retry: 400\n') # future lint: disable=blacklisted-function while True: data = next(stream) yield str('tag: {0}\n').format(data.get('tag', '')) # future lint: disable=blacklisted-function yield str('data: {0}\n\n').format(salt.utils.json.dumps(data)) # future lint: disable=blacklisted-function return listen()
[ "def", "GET", "(", "self", ",", "token", "=", "None", ",", "salt_token", "=", "None", ")", ":", "cookies", "=", "cherrypy", ".", "request", ".", "cookie", "auth_token", "=", "token", "or", "salt_token", "or", "(", "cookies", "[", "'session_id'", "]", ".", "value", "if", "'session_id'", "in", "cookies", "else", "None", ")", "if", "not", "self", ".", "_is_valid_token", "(", "auth_token", ")", ":", "raise", "cherrypy", ".", "HTTPError", "(", "401", ")", "# Release the session lock before starting the long-running response", "cherrypy", ".", "session", ".", "release_lock", "(", ")", "cherrypy", ".", "response", ".", "headers", "[", "'Content-Type'", "]", "=", "'text/event-stream'", "cherrypy", ".", "response", ".", "headers", "[", "'Cache-Control'", "]", "=", "'no-cache'", "cherrypy", ".", "response", ".", "headers", "[", "'Connection'", "]", "=", "'keep-alive'", "def", "listen", "(", ")", ":", "'''\n An iterator to yield Salt events\n '''", "event", "=", "salt", ".", "utils", ".", "event", ".", "get_event", "(", "'master'", ",", "sock_dir", "=", "self", ".", "opts", "[", "'sock_dir'", "]", ",", "transport", "=", "self", ".", "opts", "[", "'transport'", "]", ",", "opts", "=", "self", ".", "opts", ",", "listen", "=", "True", ")", "stream", "=", "event", ".", "iter_events", "(", "full", "=", "True", ",", "auto_reconnect", "=", "True", ")", "yield", "str", "(", "'retry: 400\\n'", ")", "# future lint: disable=blacklisted-function", "while", "True", ":", "data", "=", "next", "(", "stream", ")", "yield", "str", "(", "'tag: {0}\\n'", ")", ".", "format", "(", "data", ".", "get", "(", "'tag'", ",", "''", ")", ")", "# future lint: disable=blacklisted-function", "yield", "str", "(", "'data: {0}\\n\\n'", ")", ".", "format", "(", "salt", ".", "utils", ".", "json", ".", "dumps", "(", "data", ")", ")", "# future lint: disable=blacklisted-function", "return", "listen", "(", ")" ]
42.225
29.1125
def read (self, stream): """Reads PCapPacketHeader data from the given stream.""" self._data = stream.read(self._size) if len(self._data) >= self._size: values = struct.unpack(self._swap + self._format, self._data) else: values = None, None, None, None self.ts_sec = values[0] self.ts_usec = values[1] self.incl_len = values[2] self.orig_len = values[3]
[ "def", "read", "(", "self", ",", "stream", ")", ":", "self", ".", "_data", "=", "stream", ".", "read", "(", "self", ".", "_size", ")", "if", "len", "(", "self", ".", "_data", ")", ">=", "self", ".", "_size", ":", "values", "=", "struct", ".", "unpack", "(", "self", ".", "_swap", "+", "self", ".", "_format", ",", "self", ".", "_data", ")", "else", ":", "values", "=", "None", ",", "None", ",", "None", ",", "None", "self", ".", "ts_sec", "=", "values", "[", "0", "]", "self", ".", "ts_usec", "=", "values", "[", "1", "]", "self", ".", "incl_len", "=", "values", "[", "2", "]", "self", ".", "orig_len", "=", "values", "[", "3", "]" ]
31.846154
15.076923
def get_balances(): ''' Gets information about fund in the user's account. This method returns the following information: Available Balance, Account Balance, Earned Amount, Withdrawable Amount and Funds Required for AutoRenew. .. note:: If a domain setup with automatic renewal is expiring within the next 90 days, the FundsRequiredForAutoRenew attribute shows the amount needed in your Namecheap account to complete auto renewal. CLI Example: .. code-block:: bash salt 'my-minion' namecheap_users.get_balances ''' opts = salt.utils.namecheap.get_opts('namecheap.users.getBalances') response_xml = salt.utils.namecheap.get_request(opts) if response_xml is None: return {} balance_response = response_xml.getElementsByTagName("UserGetBalancesResult")[0] return salt.utils.namecheap.atts_to_dict(balance_response)
[ "def", "get_balances", "(", ")", ":", "opts", "=", "salt", ".", "utils", ".", "namecheap", ".", "get_opts", "(", "'namecheap.users.getBalances'", ")", "response_xml", "=", "salt", ".", "utils", ".", "namecheap", ".", "get_request", "(", "opts", ")", "if", "response_xml", "is", "None", ":", "return", "{", "}", "balance_response", "=", "response_xml", ".", "getElementsByTagName", "(", "\"UserGetBalancesResult\"", ")", "[", "0", "]", "return", "salt", ".", "utils", ".", "namecheap", ".", "atts_to_dict", "(", "balance_response", ")" ]
34.038462
30.115385
def join_summaries(summary_frames, selected_summaries, keep_old_header=False): """parse the summaries and combine based on column (selected_summaries)""" selected_summaries_dict = create_selected_summaries_dict(selected_summaries) frames = [] keys = [] for key in summary_frames: keys.append(key) if summary_frames[key].empty: logging.debug("Empty summary_frame encountered") frames.append(summary_frames[key]) out = [] summary_df = pd.concat(frames, keys=keys, axis=1) for key, value in selected_summaries_dict.items(): _summary_df = summary_df.iloc[ :, summary_df.columns.get_level_values(1) == value ] _summary_df.name = key if not keep_old_header: try: _summary_df.columns = _summary_df.columns.droplevel(-1) except AttributeError as e: logging.debug("could not drop level from frame") logging.debug(e) out.append(_summary_df) logger.debug("finished joining summaries") return out
[ "def", "join_summaries", "(", "summary_frames", ",", "selected_summaries", ",", "keep_old_header", "=", "False", ")", ":", "selected_summaries_dict", "=", "create_selected_summaries_dict", "(", "selected_summaries", ")", "frames", "=", "[", "]", "keys", "=", "[", "]", "for", "key", "in", "summary_frames", ":", "keys", ".", "append", "(", "key", ")", "if", "summary_frames", "[", "key", "]", ".", "empty", ":", "logging", ".", "debug", "(", "\"Empty summary_frame encountered\"", ")", "frames", ".", "append", "(", "summary_frames", "[", "key", "]", ")", "out", "=", "[", "]", "summary_df", "=", "pd", ".", "concat", "(", "frames", ",", "keys", "=", "keys", ",", "axis", "=", "1", ")", "for", "key", ",", "value", "in", "selected_summaries_dict", ".", "items", "(", ")", ":", "_summary_df", "=", "summary_df", ".", "iloc", "[", ":", ",", "summary_df", ".", "columns", ".", "get_level_values", "(", "1", ")", "==", "value", "]", "_summary_df", ".", "name", "=", "key", "if", "not", "keep_old_header", ":", "try", ":", "_summary_df", ".", "columns", "=", "_summary_df", ".", "columns", ".", "droplevel", "(", "-", "1", ")", "except", "AttributeError", "as", "e", ":", "logging", ".", "debug", "(", "\"could not drop level from frame\"", ")", "logging", ".", "debug", "(", "e", ")", "out", ".", "append", "(", "_summary_df", ")", "logger", ".", "debug", "(", "\"finished joining summaries\"", ")", "return", "out" ]
34.935484
20.483871
def createPartyFromName(apps, name): ''' For creating/matching TransactionParty objects using names alone. Look for staff members with the same name and match to them first if there is exactly one match. Then, look for users and match them if there is exactly one match. Otherwise, just generate a TransactionParty for the name only. ''' TransactionParty = apps.get_model('financial', 'TransactionParty') StaffMember = apps.get_model('core', 'StaffMember') User = apps.get_model('auth', 'User') firstName = name.split(' ')[0] lastName = ' '.join(name.split(' ')[1:]) members = StaffMember.objects.filter( firstName__istartswith=firstName, lastName__istartswith=lastName ) users = User.objects.filter( first_name__istartswith=firstName, last_name__istartswith=lastName ) if members.count() == 1: this_member = members.first() party = TransactionParty.objects.get_or_create( staffMember=this_member, defaults={ 'name': getFullName(this_member), 'user': this_member.userAccount, } )[0] elif users.count() == 1: this_user = users.first() party = TransactionParty.objects.get_or_create( user=this_user, defaults={ 'name': getFullName(this_user), 'staffMember': getattr(this_user, 'staffmember', None), } )[0] else: party = TransactionParty.objects.get_or_create( name=name )[0] return party
[ "def", "createPartyFromName", "(", "apps", ",", "name", ")", ":", "TransactionParty", "=", "apps", ".", "get_model", "(", "'financial'", ",", "'TransactionParty'", ")", "StaffMember", "=", "apps", ".", "get_model", "(", "'core'", ",", "'StaffMember'", ")", "User", "=", "apps", ".", "get_model", "(", "'auth'", ",", "'User'", ")", "firstName", "=", "name", ".", "split", "(", "' '", ")", "[", "0", "]", "lastName", "=", "' '", ".", "join", "(", "name", ".", "split", "(", "' '", ")", "[", "1", ":", "]", ")", "members", "=", "StaffMember", ".", "objects", ".", "filter", "(", "firstName__istartswith", "=", "firstName", ",", "lastName__istartswith", "=", "lastName", ")", "users", "=", "User", ".", "objects", ".", "filter", "(", "first_name__istartswith", "=", "firstName", ",", "last_name__istartswith", "=", "lastName", ")", "if", "members", ".", "count", "(", ")", "==", "1", ":", "this_member", "=", "members", ".", "first", "(", ")", "party", "=", "TransactionParty", ".", "objects", ".", "get_or_create", "(", "staffMember", "=", "this_member", ",", "defaults", "=", "{", "'name'", ":", "getFullName", "(", "this_member", ")", ",", "'user'", ":", "this_member", ".", "userAccount", ",", "}", ")", "[", "0", "]", "elif", "users", ".", "count", "(", ")", "==", "1", ":", "this_user", "=", "users", ".", "first", "(", ")", "party", "=", "TransactionParty", ".", "objects", ".", "get_or_create", "(", "user", "=", "this_user", ",", "defaults", "=", "{", "'name'", ":", "getFullName", "(", "this_user", ")", ",", "'staffMember'", ":", "getattr", "(", "this_user", ",", "'staffmember'", ",", "None", ")", ",", "}", ")", "[", "0", "]", "else", ":", "party", "=", "TransactionParty", ".", "objects", ".", "get_or_create", "(", "name", "=", "name", ")", "[", "0", "]", "return", "party" ]
34.444444
21.688889
def _initialize_statevector(self): """Set the initial statevector for simulation""" if self._initial_statevector is None: # Set to default state of all qubits in |0> self._statevector = np.zeros(2 ** self._number_of_qubits, dtype=complex) self._statevector[0] = 1 else: self._statevector = self._initial_statevector.copy() # Reshape to rank-N tensor self._statevector = np.reshape(self._statevector, self._number_of_qubits * [2])
[ "def", "_initialize_statevector", "(", "self", ")", ":", "if", "self", ".", "_initial_statevector", "is", "None", ":", "# Set to default state of all qubits in |0>", "self", ".", "_statevector", "=", "np", ".", "zeros", "(", "2", "**", "self", ".", "_number_of_qubits", ",", "dtype", "=", "complex", ")", "self", ".", "_statevector", "[", "0", "]", "=", "1", "else", ":", "self", ".", "_statevector", "=", "self", ".", "_initial_statevector", ".", "copy", "(", ")", "# Reshape to rank-N tensor", "self", ".", "_statevector", "=", "np", ".", "reshape", "(", "self", ".", "_statevector", ",", "self", ".", "_number_of_qubits", "*", "[", "2", "]", ")" ]
48.833333
14.666667
def _create_rule(path, rule): # type: (List[Type[Rule]], Type[Rule]) -> Type[ReducedUnitRule] """ Create ReducedUnitRule based on sequence of unit rules and end, generating rule. :param path: Sequence of unit rules. :param rule: Rule that is attached after sequence of unit rules. :return: ReducedUnitRule class. """ created = type('Reduced[' + rule.__name__ + ']', (ReducedUnitRule,), ReducedUnitRule.__dict__.copy()) # type: Type[ReducedUnitRule] created.rule = ([path[0].fromSymbol], rule.right) created.end_rule = rule created.by_rules = path return created
[ "def", "_create_rule", "(", "path", ",", "rule", ")", ":", "# type: (List[Type[Rule]], Type[Rule]) -> Type[ReducedUnitRule]", "created", "=", "type", "(", "'Reduced['", "+", "rule", ".", "__name__", "+", "']'", ",", "(", "ReducedUnitRule", ",", ")", ",", "ReducedUnitRule", ".", "__dict__", ".", "copy", "(", ")", ")", "# type: Type[ReducedUnitRule]", "created", ".", "rule", "=", "(", "[", "path", "[", "0", "]", ".", "fromSymbol", "]", ",", "rule", ".", "right", ")", "created", ".", "end_rule", "=", "rule", "created", ".", "by_rules", "=", "path", "return", "created" ]
42.266667
15.466667
def keypress(self, size, key): """Handle marking messages as read and keeping client active.""" # Set the client as active. self._coroutine_queue.put(self._client.set_active()) # Mark the newest event as read. self._coroutine_queue.put(self._conversation.update_read_timestamp()) return super().keypress(size, key)
[ "def", "keypress", "(", "self", ",", "size", ",", "key", ")", ":", "# Set the client as active.", "self", ".", "_coroutine_queue", ".", "put", "(", "self", ".", "_client", ".", "set_active", "(", ")", ")", "# Mark the newest event as read.", "self", ".", "_coroutine_queue", ".", "put", "(", "self", ".", "_conversation", ".", "update_read_timestamp", "(", ")", ")", "return", "super", "(", ")", ".", "keypress", "(", "size", ",", "key", ")" ]
39.555556
17.111111
def _update_params(self, constants): """Update params and return new influence.""" for k, v in constants.items(): self.params[k]['value'] *= v influence = self._calculate_influence(self.params['infl']['value']) return influence * self.params['lr']['value']
[ "def", "_update_params", "(", "self", ",", "constants", ")", ":", "for", "k", ",", "v", "in", "constants", ".", "items", "(", ")", ":", "self", ".", "params", "[", "k", "]", "[", "'value'", "]", "*=", "v", "influence", "=", "self", ".", "_calculate_influence", "(", "self", ".", "params", "[", "'infl'", "]", "[", "'value'", "]", ")", "return", "influence", "*", "self", ".", "params", "[", "'lr'", "]", "[", "'value'", "]" ]
42.142857
13.428571
def put(self, items, panic=True): """ Load a single row into the target table. :param list items: A list of values in the row corresponding to the fields specified by :code:`self.columns` :param bool panic: If :code:`True`, when an error is encountered it will be raised. Otherwise, the error will be logged and :code:`self.error_count` is incremented. :raises `giraffez.errors.GiraffeEncodeError`: if :code:`panic` is :code:`True` and there are format errors in the row values. :raises `giraffez.errors.GiraffeError`: if table name is not set. :raises `giraffez.TeradataPTError`: if there is a problem connecting to Teradata. """ if not self.initiated: self._initiate() try: row_status = self.mload.put_row(self.preprocessor(items)) self.applied_count += 1 except (TeradataPTError, EncoderError) as error: self.error_count += 1 if panic: raise error log.info("BulkLoad", error)
[ "def", "put", "(", "self", ",", "items", ",", "panic", "=", "True", ")", ":", "if", "not", "self", ".", "initiated", ":", "self", ".", "_initiate", "(", ")", "try", ":", "row_status", "=", "self", ".", "mload", ".", "put_row", "(", "self", ".", "preprocessor", "(", "items", ")", ")", "self", ".", "applied_count", "+=", "1", "except", "(", "TeradataPTError", ",", "EncoderError", ")", "as", "error", ":", "self", ".", "error_count", "+=", "1", "if", "panic", ":", "raise", "error", "log", ".", "info", "(", "\"BulkLoad\"", ",", "error", ")" ]
43.64
18.76
def CreateConfigHBuilder(env): """Called if necessary just before the building targets phase begins.""" action = SCons.Action.Action(_createConfigH, _stringConfigH) sconfigHBld = SCons.Builder.Builder(action=action) env.Append( BUILDERS={'SConfigHBuilder':sconfigHBld} ) for k in list(_ac_config_hs.keys()): env.SConfigHBuilder(k, env.Value(_ac_config_hs[k]))
[ "def", "CreateConfigHBuilder", "(", "env", ")", ":", "action", "=", "SCons", ".", "Action", ".", "Action", "(", "_createConfigH", ",", "_stringConfigH", ")", "sconfigHBld", "=", "SCons", ".", "Builder", ".", "Builder", "(", "action", "=", "action", ")", "env", ".", "Append", "(", "BUILDERS", "=", "{", "'SConfigHBuilder'", ":", "sconfigHBld", "}", ")", "for", "k", "in", "list", "(", "_ac_config_hs", ".", "keys", "(", ")", ")", ":", "env", ".", "SConfigHBuilder", "(", "k", ",", "env", ".", "Value", "(", "_ac_config_hs", "[", "k", "]", ")", ")" ]
51.625
9.625
def verify_link_in_task_graph(chain, decision_link, task_link): """Compare the runtime task definition against the decision task graph. Args: chain (ChainOfTrust): the chain we're operating on. decision_link (LinkOfTrust): the decision task link task_link (LinkOfTrust): the task link we're testing Raises: CoTError: on failure. """ log.info("Verifying the {} {} task definition is part of the {} {} task graph...".format( task_link.name, task_link.task_id, decision_link.name, decision_link.task_id )) if task_link.task_id in decision_link.task_graph: graph_defn = deepcopy(decision_link.task_graph[task_link.task_id]) verify_task_in_task_graph(task_link, graph_defn) log.info("Found {} in the graph; it's a match".format(task_link.task_id)) return raise_on_errors(["Can't find task {} {} in {} {} task-graph.json!".format( task_link.name, task_link.task_id, decision_link.name, decision_link.task_id )])
[ "def", "verify_link_in_task_graph", "(", "chain", ",", "decision_link", ",", "task_link", ")", ":", "log", ".", "info", "(", "\"Verifying the {} {} task definition is part of the {} {} task graph...\"", ".", "format", "(", "task_link", ".", "name", ",", "task_link", ".", "task_id", ",", "decision_link", ".", "name", ",", "decision_link", ".", "task_id", ")", ")", "if", "task_link", ".", "task_id", "in", "decision_link", ".", "task_graph", ":", "graph_defn", "=", "deepcopy", "(", "decision_link", ".", "task_graph", "[", "task_link", ".", "task_id", "]", ")", "verify_task_in_task_graph", "(", "task_link", ",", "graph_defn", ")", "log", ".", "info", "(", "\"Found {} in the graph; it's a match\"", ".", "format", "(", "task_link", ".", "task_id", ")", ")", "return", "raise_on_errors", "(", "[", "\"Can't find task {} {} in {} {} task-graph.json!\"", ".", "format", "(", "task_link", ".", "name", ",", "task_link", ".", "task_id", ",", "decision_link", ".", "name", ",", "decision_link", ".", "task_id", ")", "]", ")" ]
43.565217
28.173913
def append(self, text): """Append line to the end """ cursor = QTextCursor(self._doc) cursor.movePosition(QTextCursor.End) cursor.insertBlock() cursor.insertText(text)
[ "def", "append", "(", "self", ",", "text", ")", ":", "cursor", "=", "QTextCursor", "(", "self", ".", "_doc", ")", "cursor", ".", "movePosition", "(", "QTextCursor", ".", "End", ")", "cursor", ".", "insertBlock", "(", ")", "cursor", ".", "insertText", "(", "text", ")" ]
29.857143
6.142857
def tween(self, t): """ t is number between 0 and 1 to indicate how far the tween has progressed """ if t is None: return None if self.method in self.method_to_tween: return self.method_to_tween[self.method](t) elif self.method in self.method_1param: return self.method_1param[self.method](t, self.param1) elif self.method in self.method_2param: return self.method_2param[self.method](t, self.param1, self.param2) else: raise Exception("Unsupported tween method {0}".format(self.method))
[ "def", "tween", "(", "self", ",", "t", ")", ":", "if", "t", "is", "None", ":", "return", "None", "if", "self", ".", "method", "in", "self", ".", "method_to_tween", ":", "return", "self", ".", "method_to_tween", "[", "self", ".", "method", "]", "(", "t", ")", "elif", "self", ".", "method", "in", "self", ".", "method_1param", ":", "return", "self", ".", "method_1param", "[", "self", ".", "method", "]", "(", "t", ",", "self", ".", "param1", ")", "elif", "self", ".", "method", "in", "self", ".", "method_2param", ":", "return", "self", ".", "method_2param", "[", "self", ".", "method", "]", "(", "t", ",", "self", ".", "param1", ",", "self", ".", "param2", ")", "else", ":", "raise", "Exception", "(", "\"Unsupported tween method {0}\"", ".", "format", "(", "self", ".", "method", ")", ")" ]
39.866667
20.266667
def RfiltersBM(dataset,database,host=rbiomart_host): """ Lists BioMart filters through a RPY2 connection. :param dataset: a dataset listed in RdatasetsBM() :param database: a database listed in RdatabasesBM() :param host: address of the host server, default='www.ensembl.org' :returns: nothing """ biomaRt = importr("biomaRt") ensemblMart=biomaRt.useMart(database, host=host) ensembl=biomaRt.useDataset(dataset, mart=ensemblMart) print(biomaRt.listFilters(ensembl))
[ "def", "RfiltersBM", "(", "dataset", ",", "database", ",", "host", "=", "rbiomart_host", ")", ":", "biomaRt", "=", "importr", "(", "\"biomaRt\"", ")", "ensemblMart", "=", "biomaRt", ".", "useMart", "(", "database", ",", "host", "=", "host", ")", "ensembl", "=", "biomaRt", ".", "useDataset", "(", "dataset", ",", "mart", "=", "ensemblMart", ")", "print", "(", "biomaRt", ".", "listFilters", "(", "ensembl", ")", ")" ]
33.2
17.333333
def refresh(self, module=None): """Recompute the salience values of the Activations on the Agenda and then reorder the agenda. The Python equivalent of the CLIPS refresh-agenda command. If no Module is specified, the current one is used. """ module = module._mdl if module is not None else ffi.NULL lib.EnvRefreshAgenda(self._env, module)
[ "def", "refresh", "(", "self", ",", "module", "=", "None", ")", ":", "module", "=", "module", ".", "_mdl", "if", "module", "is", "not", "None", "else", "ffi", ".", "NULL", "lib", ".", "EnvRefreshAgenda", "(", "self", ".", "_env", ",", "module", ")" ]
35.181818
19
def put_object(self, mpath, content=None, path=None, file=None, content_length=None, content_type="application/octet-stream", durability_level=None): """PutObject https://apidocs.joyent.com/manta/api.html#PutObject Examples: client.put_object('/trent/stor/foo', 'foo\nbar\nbaz') client.put_object('/trent/stor/foo', path='path/to/foo.txt') client.put_object('/trent/stor/foo', file=open('path/to/foo.txt'), size=11) One of `content`, `path` or `file` is required. @param mpath {str} Required. A manta path, e.g. '/trent/stor/myobj'. @param content {bytes} @param path {str} @param file {file-like object} @param content_length {int} Not currently used. Expect this to be used when streaming support is added. @param content_type {string} Optional, but suggested. Default is 'application/octet-stream'. @param durability_level {int} Optional. Default is 2. This tells Manta the number of copies to keep. """ log.debug('PutObject %r', mpath) headers = {"Content-Type": content_type, } if durability_level: headers["x-durability-level"] = durability_level methods = [m for m in [content, path, file] if m is not None] if len(methods) != 1: raise errors.MantaError("exactly one of 'content', 'path' or " "'file' must be provided") if content is not None: pass elif path: f = io.open(path, 'rb') try: content = f.read() finally: f.close() else: content = file.read() try: # python 3 content_bytes = bytes(content, encoding='utf-8') except: # python 2 content_bytes = content headers["Content-Length"] = str(len(content)) md5 = hashlib.md5(content_bytes) headers["Content-MD5"] = base64.b64encode(md5.digest()) res, content = self._request(mpath, "PUT", body=content, headers=headers) if res["status"] != "204": raise errors.MantaAPIError(res, content)
[ "def", "put_object", "(", "self", ",", "mpath", ",", "content", "=", "None", ",", "path", "=", "None", ",", "file", "=", "None", ",", "content_length", "=", "None", ",", "content_type", "=", "\"application/octet-stream\"", ",", "durability_level", "=", "None", ")", ":", "log", ".", "debug", "(", "'PutObject %r'", ",", "mpath", ")", "headers", "=", "{", "\"Content-Type\"", ":", "content_type", ",", "}", "if", "durability_level", ":", "headers", "[", "\"x-durability-level\"", "]", "=", "durability_level", "methods", "=", "[", "m", "for", "m", "in", "[", "content", ",", "path", ",", "file", "]", "if", "m", "is", "not", "None", "]", "if", "len", "(", "methods", ")", "!=", "1", ":", "raise", "errors", ".", "MantaError", "(", "\"exactly one of 'content', 'path' or \"", "\"'file' must be provided\"", ")", "if", "content", "is", "not", "None", ":", "pass", "elif", "path", ":", "f", "=", "io", ".", "open", "(", "path", ",", "'rb'", ")", "try", ":", "content", "=", "f", ".", "read", "(", ")", "finally", ":", "f", ".", "close", "(", ")", "else", ":", "content", "=", "file", ".", "read", "(", ")", "try", ":", "# python 3", "content_bytes", "=", "bytes", "(", "content", ",", "encoding", "=", "'utf-8'", ")", "except", ":", "# python 2", "content_bytes", "=", "content", "headers", "[", "\"Content-Length\"", "]", "=", "str", "(", "len", "(", "content", ")", ")", "md5", "=", "hashlib", ".", "md5", "(", "content_bytes", ")", "headers", "[", "\"Content-MD5\"", "]", "=", "base64", ".", "b64encode", "(", "md5", ".", "digest", "(", ")", ")", "res", ",", "content", "=", "self", ".", "_request", "(", "mpath", ",", "\"PUT\"", ",", "body", "=", "content", ",", "headers", "=", "headers", ")", "if", "res", "[", "\"status\"", "]", "!=", "\"204\"", ":", "raise", "errors", ".", "MantaAPIError", "(", "res", ",", "content", ")" ]
37.287879
17.454545
def run_once_per_node(func): """ Decorator preventing wrapped function from running more than once per host (not just interpreter session). Using env.patch = True will allow the wrapped function to be run if it has been previously executed, but not otherwise Stores the result of a function as server state """ @wraps(func) def decorated(*args, **kwargs): if not hasattr(env,'patch'): env.patch = False state = version_state(func.__name__) if not env.patch and state: verbose = " ".join([env.host,func.__name__,"completed. Skipping..."]) elif env.patch and not state: verbose = " ".join([env.host,func.__name__,"not previously completed. Skipping..."]) else: results = func(*args, **kwargs) verbose ='' if results: set_version_state(func.__name__,object=results) else: set_version_state(func.__name__) return results if env.verbosity and verbose: print verbose return return decorated
[ "def", "run_once_per_node", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "decorated", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "hasattr", "(", "env", ",", "'patch'", ")", ":", "env", ".", "patch", "=", "False", "state", "=", "version_state", "(", "func", ".", "__name__", ")", "if", "not", "env", ".", "patch", "and", "state", ":", "verbose", "=", "\" \"", ".", "join", "(", "[", "env", ".", "host", ",", "func", ".", "__name__", ",", "\"completed. Skipping...\"", "]", ")", "elif", "env", ".", "patch", "and", "not", "state", ":", "verbose", "=", "\" \"", ".", "join", "(", "[", "env", ".", "host", ",", "func", ".", "__name__", ",", "\"not previously completed. Skipping...\"", "]", ")", "else", ":", "results", "=", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "verbose", "=", "''", "if", "results", ":", "set_version_state", "(", "func", ".", "__name__", ",", "object", "=", "results", ")", "else", ":", "set_version_state", "(", "func", ".", "__name__", ")", "return", "results", "if", "env", ".", "verbosity", "and", "verbose", ":", "print", "verbose", "return", "return", "decorated" ]
38.107143
18.035714
def genInterval(self, month=(), day=(), week=(), weekday=(), hour=(), minute=()): '''Generate list of config dictionarie(s) that represent a interval of time. Used to be passed into add() or remove(). For example:: genInterval(month=(1,4), week(1,4)) # generate list contains from first to third week in from January to March Args: month (tuple): (start, end) month in a year, from 1 to 12 week (tuple): (start, end) week in a month, from 1 to 4 day (tuple): (start, end) day in a month, from 1 to 31 weekday (tuple): (start, end) weekday in a week, from 0 to 7. 0 and 7 both represent Sunday hour (tuple): (start, end) hour in a day, from 0 to 24 minute (tuple): (start, end) minute in an hour, from 0 to 59 Returns: list: a list of dictionarie(s) with form [{'Day':12, 'Month':3}, {}, etc] ''' dic = { 'Month': month, 'Day': day, 'Week': week, 'Weekday': weekday, 'Day': day, 'Hour': hour, 'Minute': minute } dic = {k: v for k, v in dic.items() if v != ()} # e.g. dic: {'month': (1,5), 'day': (2,4)} grandList = [] for k in dic: # e.g. k: 'month', dic[k]: (1,5) l = [] # rangeTuple = (dic[k][0], dic[k][1] + 1) # e.g. (1,6) rangeTuple = dic[k] for num in range(rangeTuple[0], rangeTuple[1]): # e.g. 1, 2, 3, 4, 5 l.append({k: num}) # e.g. [{'month': 1}, {'month': 2}] grandList.append(l) # e.g. [[list of month], [list of day]] print(grandList) # grandList: [[list of month], [list of day]] # l: [[a,a1,a2,...], [b,b1,b2,...]] # combineDict return: [{a,b}, {a,b1}, {a,b2}, {a1,b}, {a1,b1}, {a1, b2}, {a2,b}, {a2,b1}, {a2,b2}] return crossCombine(grandList)
[ "def", "genInterval", "(", "self", ",", "month", "=", "(", ")", ",", "day", "=", "(", ")", ",", "week", "=", "(", ")", ",", "weekday", "=", "(", ")", ",", "hour", "=", "(", ")", ",", "minute", "=", "(", ")", ")", ":", "dic", "=", "{", "'Month'", ":", "month", ",", "'Day'", ":", "day", ",", "'Week'", ":", "week", ",", "'Weekday'", ":", "weekday", ",", "'Day'", ":", "day", ",", "'Hour'", ":", "hour", ",", "'Minute'", ":", "minute", "}", "dic", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "dic", ".", "items", "(", ")", "if", "v", "!=", "(", ")", "}", "# e.g. dic: {'month': (1,5), 'day': (2,4)}", "grandList", "=", "[", "]", "for", "k", "in", "dic", ":", "# e.g. k: 'month', dic[k]: (1,5)", "l", "=", "[", "]", "# rangeTuple = (dic[k][0], dic[k][1] + 1) # e.g. (1,6)", "rangeTuple", "=", "dic", "[", "k", "]", "for", "num", "in", "range", "(", "rangeTuple", "[", "0", "]", ",", "rangeTuple", "[", "1", "]", ")", ":", "# e.g. 1, 2, 3, 4, 5", "l", ".", "append", "(", "{", "k", ":", "num", "}", ")", "# e.g. [{'month': 1}, {'month': 2}]", "grandList", ".", "append", "(", "l", ")", "# e.g. [[list of month], [list of day]]", "print", "(", "grandList", ")", "# grandList: [[list of month], [list of day]]", "# l: [[a,a1,a2,...], [b,b1,b2,...]]", "# combineDict return: [{a,b}, {a,b1}, {a,b2}, {a1,b}, {a1,b1}, {a1, b2}, {a2,b}, {a2,b1}, {a2,b2}]", "return", "crossCombine", "(", "grandList", ")" ]
41.46
23.22
def show(what='contents', calc_id=-1, extra=()): """ Show the content of a datastore (by default the last one). """ datadir = datastore.get_datadir() if what == 'all': # show all if not os.path.exists(datadir): return rows = [] for calc_id in datastore.get_calc_ids(datadir): try: ds = util.read(calc_id) oq = ds['oqparam'] cmode, descr = oq.calculation_mode, oq.description except Exception: # invalid datastore file, or missing calculation_mode # and description attributes, perhaps due to a manual kill f = os.path.join(datadir, 'calc_%s.hdf5' % calc_id) logging.warning('Unreadable datastore %s', f) continue else: rows.append((calc_id, cmode, descr.encode('utf-8'))) for row in sorted(rows, key=lambda row: row[0]): # by calc_id print('#%d %s: %s' % row) return ds = util.read(calc_id) # this part is experimental if what == 'rlzs' and 'poes' in ds: min_value = 0.01 # used in rmsep getter = getters.PmapGetter(ds) pmaps = getter.get_pmaps() weights = [rlz.weight for rlz in getter.rlzs] mean = stats.compute_pmap_stats( pmaps, [numpy.mean], weights, getter.imtls) dists = [] for rlz, pmap in zip(getter.rlzs, pmaps): dist = util.rmsep(mean.array, pmap.array, min_value) dists.append((dist, rlz)) print('Realizations in order of distance from the mean curves') for dist, rlz in sorted(dists): print('%s: rmsep=%s' % (rlz, dist)) elif view.keyfunc(what) in view: print(view(what, ds)) elif what.split('/', 1)[0] in extract: print(extract(ds, what, *extra)) elif what in ds: obj = ds[what] if hasattr(obj, 'value'): # an array print(write_csv(io.BytesIO(), obj.value).decode('utf8')) else: print(obj) else: print('%s not found' % what) ds.close()
[ "def", "show", "(", "what", "=", "'contents'", ",", "calc_id", "=", "-", "1", ",", "extra", "=", "(", ")", ")", ":", "datadir", "=", "datastore", ".", "get_datadir", "(", ")", "if", "what", "==", "'all'", ":", "# show all", "if", "not", "os", ".", "path", ".", "exists", "(", "datadir", ")", ":", "return", "rows", "=", "[", "]", "for", "calc_id", "in", "datastore", ".", "get_calc_ids", "(", "datadir", ")", ":", "try", ":", "ds", "=", "util", ".", "read", "(", "calc_id", ")", "oq", "=", "ds", "[", "'oqparam'", "]", "cmode", ",", "descr", "=", "oq", ".", "calculation_mode", ",", "oq", ".", "description", "except", "Exception", ":", "# invalid datastore file, or missing calculation_mode", "# and description attributes, perhaps due to a manual kill", "f", "=", "os", ".", "path", ".", "join", "(", "datadir", ",", "'calc_%s.hdf5'", "%", "calc_id", ")", "logging", ".", "warning", "(", "'Unreadable datastore %s'", ",", "f", ")", "continue", "else", ":", "rows", ".", "append", "(", "(", "calc_id", ",", "cmode", ",", "descr", ".", "encode", "(", "'utf-8'", ")", ")", ")", "for", "row", "in", "sorted", "(", "rows", ",", "key", "=", "lambda", "row", ":", "row", "[", "0", "]", ")", ":", "# by calc_id", "print", "(", "'#%d %s: %s'", "%", "row", ")", "return", "ds", "=", "util", ".", "read", "(", "calc_id", ")", "# this part is experimental", "if", "what", "==", "'rlzs'", "and", "'poes'", "in", "ds", ":", "min_value", "=", "0.01", "# used in rmsep", "getter", "=", "getters", ".", "PmapGetter", "(", "ds", ")", "pmaps", "=", "getter", ".", "get_pmaps", "(", ")", "weights", "=", "[", "rlz", ".", "weight", "for", "rlz", "in", "getter", ".", "rlzs", "]", "mean", "=", "stats", ".", "compute_pmap_stats", "(", "pmaps", ",", "[", "numpy", ".", "mean", "]", ",", "weights", ",", "getter", ".", "imtls", ")", "dists", "=", "[", "]", "for", "rlz", ",", "pmap", "in", "zip", "(", "getter", ".", "rlzs", ",", "pmaps", ")", ":", "dist", "=", "util", ".", "rmsep", "(", "mean", ".", "array", ",", "pmap", ".", "array", ",", "min_value", ")", "dists", ".", "append", "(", "(", "dist", ",", "rlz", ")", ")", "print", "(", "'Realizations in order of distance from the mean curves'", ")", "for", "dist", ",", "rlz", "in", "sorted", "(", "dists", ")", ":", "print", "(", "'%s: rmsep=%s'", "%", "(", "rlz", ",", "dist", ")", ")", "elif", "view", ".", "keyfunc", "(", "what", ")", "in", "view", ":", "print", "(", "view", "(", "what", ",", "ds", ")", ")", "elif", "what", ".", "split", "(", "'/'", ",", "1", ")", "[", "0", "]", "in", "extract", ":", "print", "(", "extract", "(", "ds", ",", "what", ",", "*", "extra", ")", ")", "elif", "what", "in", "ds", ":", "obj", "=", "ds", "[", "what", "]", "if", "hasattr", "(", "obj", ",", "'value'", ")", ":", "# an array", "print", "(", "write_csv", "(", "io", ".", "BytesIO", "(", ")", ",", "obj", ".", "value", ")", ".", "decode", "(", "'utf8'", ")", ")", "else", ":", "print", "(", "obj", ")", "else", ":", "print", "(", "'%s not found'", "%", "what", ")", "ds", ".", "close", "(", ")" ]
36.631579
15.368421
def client(*args, **kwargs): """ Create a low-level service client by name using the default session. Socket level timeouts are preconfigured according to the defaults set via the `fleece.boto3.set_default_timeout()` function, or they can also be set explicitly for a client by passing the `timeout`, `connect_timeout` or `read_timeout` arguments. """ timeout = kwargs.pop('timeout', DEFAULT_TIMEOUT) connect_timeout = kwargs.pop('connect_timeout', DEFAULT_CONNECT_TIMEOUT or timeout) read_timeout = kwargs.pop('read_timeout', DEFAULT_READ_TIMEOUT or timeout) config = Config(connect_timeout=connect_timeout, read_timeout=read_timeout) return real_boto3.client(*args, config=config, **kwargs)
[ "def", "client", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "timeout", "=", "kwargs", ".", "pop", "(", "'timeout'", ",", "DEFAULT_TIMEOUT", ")", "connect_timeout", "=", "kwargs", ".", "pop", "(", "'connect_timeout'", ",", "DEFAULT_CONNECT_TIMEOUT", "or", "timeout", ")", "read_timeout", "=", "kwargs", ".", "pop", "(", "'read_timeout'", ",", "DEFAULT_READ_TIMEOUT", "or", "timeout", ")", "config", "=", "Config", "(", "connect_timeout", "=", "connect_timeout", ",", "read_timeout", "=", "read_timeout", ")", "return", "real_boto3", ".", "client", "(", "*", "args", ",", "config", "=", "config", ",", "*", "*", "kwargs", ")" ]
48.6875
20.6875
def register_route(self, route_rules, label=None): """Registers a routing rule. :param RouteRule|list[RouteRule] route_rules: :param str|unicode label: Label to mark the given set of rules. This can be used in conjunction with ``do_goto`` rule action. * http://uwsgi.readthedocs.io/en/latest/InternalRouting.html#goto """ route_rules = listify(route_rules) if route_rules and label: self._set(route_rules[0].command_label, label, multi=True) for route_rules in route_rules: self._set(route_rules.command, route_rules.value, multi=True) return self._section
[ "def", "register_route", "(", "self", ",", "route_rules", ",", "label", "=", "None", ")", ":", "route_rules", "=", "listify", "(", "route_rules", ")", "if", "route_rules", "and", "label", ":", "self", ".", "_set", "(", "route_rules", "[", "0", "]", ".", "command_label", ",", "label", ",", "multi", "=", "True", ")", "for", "route_rules", "in", "route_rules", ":", "self", ".", "_set", "(", "route_rules", ".", "command", ",", "route_rules", ".", "value", ",", "multi", "=", "True", ")", "return", "self", ".", "_section" ]
32.8
24.45
def as_plain_ordered_dict(self): """return a deep copy of this config as a plain OrderedDict The config tree should be fully resolved. This is useful to get an object with no special semantics such as path expansion for the keys. In particular this means that keys that contain dots are not surrounded with '"' in the plain OrderedDict. :return: this config as an OrderedDict :type return: OrderedDict """ def plain_value(v): if isinstance(v, list): return [plain_value(e) for e in v] elif isinstance(v, ConfigTree): return v.as_plain_ordered_dict() else: if isinstance(v, ConfigValues): raise ConfigException("The config tree contains unresolved elements") return v return OrderedDict((key.strip('"'), plain_value(value)) for key, value in self.items())
[ "def", "as_plain_ordered_dict", "(", "self", ")", ":", "def", "plain_value", "(", "v", ")", ":", "if", "isinstance", "(", "v", ",", "list", ")", ":", "return", "[", "plain_value", "(", "e", ")", "for", "e", "in", "v", "]", "elif", "isinstance", "(", "v", ",", "ConfigTree", ")", ":", "return", "v", ".", "as_plain_ordered_dict", "(", ")", "else", ":", "if", "isinstance", "(", "v", ",", "ConfigValues", ")", ":", "raise", "ConfigException", "(", "\"The config tree contains unresolved elements\"", ")", "return", "v", "return", "OrderedDict", "(", "(", "key", ".", "strip", "(", "'\"'", ")", ",", "plain_value", "(", "value", ")", ")", "for", "key", ",", "value", "in", "self", ".", "items", "(", ")", ")" ]
42.227273
23.409091
def load_inference_roidbs(self, name): """ Args: name (str): name of one inference dataset, e.g. 'minival2014' Returns: roidbs (list[dict]): Each dict corresponds to one image to run inference on. The following keys in the dict are expected: file_name (str): full path to the image image_id (str): an id for the image. The inference results will be stored with this id. """ return COCODetection.load_many(cfg.DATA.BASEDIR, name, add_gt=False)
[ "def", "load_inference_roidbs", "(", "self", ",", "name", ")", ":", "return", "COCODetection", ".", "load_many", "(", "cfg", ".", "DATA", ".", "BASEDIR", ",", "name", ",", "add_gt", "=", "False", ")" ]
36.2
24.2
def _init_itemid2name(self): """Print gene symbols instead of gene IDs, if provided.""" if not hasattr(self.args, 'id2sym'): return None fin_id2sym = self.args.id2sym if fin_id2sym is not None and os.path.exists(fin_id2sym): id2sym = {} cmpl = re.compile(r'^\s*(\S+)[\s,;]+(\S+)') with open(fin_id2sym) as ifstrm: for line in ifstrm: mtch = cmpl.search(line) if mtch: id2sym[mtch.group(1)] = mtch.group(2) return id2sym
[ "def", "_init_itemid2name", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ".", "args", ",", "'id2sym'", ")", ":", "return", "None", "fin_id2sym", "=", "self", ".", "args", ".", "id2sym", "if", "fin_id2sym", "is", "not", "None", "and", "os", ".", "path", ".", "exists", "(", "fin_id2sym", ")", ":", "id2sym", "=", "{", "}", "cmpl", "=", "re", ".", "compile", "(", "r'^\\s*(\\S+)[\\s,;]+(\\S+)'", ")", "with", "open", "(", "fin_id2sym", ")", "as", "ifstrm", ":", "for", "line", "in", "ifstrm", ":", "mtch", "=", "cmpl", ".", "search", "(", "line", ")", "if", "mtch", ":", "id2sym", "[", "mtch", ".", "group", "(", "1", ")", "]", "=", "mtch", ".", "group", "(", "2", ")", "return", "id2sym" ]
41.285714
11
def download_url(url, root, filename=None, md5=None): """Download a file from a url and place it in root. Args: url (str): URL to download file from root (str): Directory to place downloaded file in filename (str, optional): Name to save the file under. If None, use the basename of the URL md5 (str, optional): MD5 checksum of the download. If None, do not check """ from six.moves import urllib root = os.path.expanduser(root) if not filename: filename = os.path.basename(url) fpath = os.path.join(root, filename) makedir_exist_ok(root) # downloads file if os.path.isfile(fpath) and check_integrity(fpath, md5): print('Using downloaded and verified file: ' + fpath) else: try: print('Downloading ' + url + ' to ' + fpath) urllib.request.urlretrieve( url, fpath, reporthook=gen_bar_updater() ) except OSError: if url[:5] == 'https': url = url.replace('https:', 'http:') print('Failed download. Trying https -> http instead.' ' Downloading ' + url + ' to ' + fpath) urllib.request.urlretrieve( url, fpath, reporthook=gen_bar_updater() )
[ "def", "download_url", "(", "url", ",", "root", ",", "filename", "=", "None", ",", "md5", "=", "None", ")", ":", "from", "six", ".", "moves", "import", "urllib", "root", "=", "os", ".", "path", ".", "expanduser", "(", "root", ")", "if", "not", "filename", ":", "filename", "=", "os", ".", "path", ".", "basename", "(", "url", ")", "fpath", "=", "os", ".", "path", ".", "join", "(", "root", ",", "filename", ")", "makedir_exist_ok", "(", "root", ")", "# downloads file", "if", "os", ".", "path", ".", "isfile", "(", "fpath", ")", "and", "check_integrity", "(", "fpath", ",", "md5", ")", ":", "print", "(", "'Using downloaded and verified file: '", "+", "fpath", ")", "else", ":", "try", ":", "print", "(", "'Downloading '", "+", "url", "+", "' to '", "+", "fpath", ")", "urllib", ".", "request", ".", "urlretrieve", "(", "url", ",", "fpath", ",", "reporthook", "=", "gen_bar_updater", "(", ")", ")", "except", "OSError", ":", "if", "url", "[", ":", "5", "]", "==", "'https'", ":", "url", "=", "url", ".", "replace", "(", "'https:'", ",", "'http:'", ")", "print", "(", "'Failed download. Trying https -> http instead.'", "' Downloading '", "+", "url", "+", "' to '", "+", "fpath", ")", "urllib", ".", "request", ".", "urlretrieve", "(", "url", ",", "fpath", ",", "reporthook", "=", "gen_bar_updater", "(", ")", ")" ]
35.621622
18.432432
def commented(self, user): """ True if comment was added in given time frame """ for comment in self.comments: # Description (comment #0) is not considered as a comment if comment["count"] == 0: continue if (comment.get('author', comment.get('creator')) == user.email and comment["creation_time"] >= self.options.since.date and comment["creation_time"] < self.options.until.date): return True return False
[ "def", "commented", "(", "self", ",", "user", ")", ":", "for", "comment", "in", "self", ".", "comments", ":", "# Description (comment #0) is not considered as a comment", "if", "comment", "[", "\"count\"", "]", "==", "0", ":", "continue", "if", "(", "comment", ".", "get", "(", "'author'", ",", "comment", ".", "get", "(", "'creator'", ")", ")", "==", "user", ".", "email", "and", "comment", "[", "\"creation_time\"", "]", ">=", "self", ".", "options", ".", "since", ".", "date", "and", "comment", "[", "\"creation_time\"", "]", "<", "self", ".", "options", ".", "until", ".", "date", ")", ":", "return", "True", "return", "False" ]
47.909091
18.545455
def prune_hashes(self, hashes, list_type): """Prune any hashes not in source resource or change list.""" discarded = [] for hash in hashes: if (hash in self.hashes): self.hashes.discard(hash) discarded.append(hash) self.logger.info("Not calculating %s hash(es) on destination as not present " "in source %s list" % (', '.join(sorted(discarded)), list_type))
[ "def", "prune_hashes", "(", "self", ",", "hashes", ",", "list_type", ")", ":", "discarded", "=", "[", "]", "for", "hash", "in", "hashes", ":", "if", "(", "hash", "in", "self", ".", "hashes", ")", ":", "self", ".", "hashes", ".", "discard", "(", "hash", ")", "discarded", ".", "append", "(", "hash", ")", "self", ".", "logger", ".", "info", "(", "\"Not calculating %s hash(es) on destination as not present \"", "\"in source %s list\"", "%", "(", "', '", ".", "join", "(", "sorted", "(", "discarded", ")", ")", ",", "list_type", ")", ")" ]
50
14.777778
def while_until_true(interval, max_attempts): """Decorator that executes a function until it returns True. Executes wrapped function at every number of seconds specified by interval, until wrapped function either returns True or max_attempts are exhausted, whichever comes 1st. The difference between while_until_true and wait_until_true is that the latter will always loop to a max_attempts, whereas while_until_true will keep going indefinitely. The other notable difference to wait_until_true is that the wrapped function signature must be: func(counter, *args, **kwargs) This is because this decorator injects the while loop counter into the invoked function. Args: interval: In seconds. How long to wait between executing the wrapped function. max_attempts: int. Execute wrapped function up to this limit. None means infinite (or until wrapped function returns True). Passing anything <0 also means infinite. Returns: Bool. True if wrapped function returned True. False if reached max_attempts without the wrapped function ever returning True. """ def decorator(f): logger.debug("started") def sleep_looper(*args, **kwargs): if max_attempts: logger.debug(f"Looping every {interval} seconds for " f"{max_attempts} attempts") else: logger.debug(f"Looping every {interval} seconds.") i = 0 result = False # pragma for coverage: cov can't figure out the branch construct # with the dynamic function invocation, it seems, so marks the # branch partial. unit test cov is 100%, though. while not result: # pragma: no branch i += 1 result = f(i, *args, **kwargs) if result: logger.debug(f"iteration {i}. Desired state reached.") break elif max_attempts: if i < max_attempts: logger.debug(f"iteration {i}. Still waiting. . .") time.sleep(interval) else: logger.debug(f"iteration {i}. Max attempts exhausted.") break else: # result False AND max_attempts is None means keep looping # because None = infinite logger.debug(f"iteration {i}. Still waiting. . .") time.sleep(interval) logger.debug("done") return result return sleep_looper return decorator
[ "def", "while_until_true", "(", "interval", ",", "max_attempts", ")", ":", "def", "decorator", "(", "f", ")", ":", "logger", ".", "debug", "(", "\"started\"", ")", "def", "sleep_looper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "max_attempts", ":", "logger", ".", "debug", "(", "f\"Looping every {interval} seconds for \"", "f\"{max_attempts} attempts\"", ")", "else", ":", "logger", ".", "debug", "(", "f\"Looping every {interval} seconds.\"", ")", "i", "=", "0", "result", "=", "False", "# pragma for coverage: cov can't figure out the branch construct", "# with the dynamic function invocation, it seems, so marks the", "# branch partial. unit test cov is 100%, though.", "while", "not", "result", ":", "# pragma: no branch", "i", "+=", "1", "result", "=", "f", "(", "i", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "result", ":", "logger", ".", "debug", "(", "f\"iteration {i}. Desired state reached.\"", ")", "break", "elif", "max_attempts", ":", "if", "i", "<", "max_attempts", ":", "logger", ".", "debug", "(", "f\"iteration {i}. Still waiting. . .\"", ")", "time", ".", "sleep", "(", "interval", ")", "else", ":", "logger", ".", "debug", "(", "f\"iteration {i}. Max attempts exhausted.\"", ")", "break", "else", ":", "# result False AND max_attempts is None means keep looping", "# because None = infinite", "logger", ".", "debug", "(", "f\"iteration {i}. Still waiting. . .\"", ")", "time", ".", "sleep", "(", "interval", ")", "logger", ".", "debug", "(", "\"done\"", ")", "return", "result", "return", "sleep_looper", "return", "decorator" ]
39.115942
23.246377
def render_html(self, *args, **kwargs): """ Renders the template. :rtype: str """ static_url = '%s://%s%s' % (self.request.scheme, self.request.get_host(), settings.STATIC_URL) media_url = '%s://%s%s' % (self.request.scheme, self.request.get_host(), settings.MEDIA_URL) with override_settings(STATIC_URL=static_url, MEDIA_URL=media_url): template = loader.get_template(self.template_name) context = self.get_context_data(*args, **kwargs) html = template.render(context) return html
[ "def", "render_html", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "static_url", "=", "'%s://%s%s'", "%", "(", "self", ".", "request", ".", "scheme", ",", "self", ".", "request", ".", "get_host", "(", ")", ",", "settings", ".", "STATIC_URL", ")", "media_url", "=", "'%s://%s%s'", "%", "(", "self", ".", "request", ".", "scheme", ",", "self", ".", "request", ".", "get_host", "(", ")", ",", "settings", ".", "MEDIA_URL", ")", "with", "override_settings", "(", "STATIC_URL", "=", "static_url", ",", "MEDIA_URL", "=", "media_url", ")", ":", "template", "=", "loader", ".", "get_template", "(", "self", ".", "template_name", ")", "context", "=", "self", ".", "get_context_data", "(", "*", "args", ",", "*", "*", "kwargs", ")", "html", "=", "template", ".", "render", "(", "context", ")", "return", "html" ]
41
23.714286
def on_task(self, task, response): '''Deal one task''' start_time = time.time() response = rebuild_response(response) try: assert 'taskid' in task, 'need taskid in task' project = task['project'] updatetime = task.get('project_updatetime', None) md5sum = task.get('project_md5sum', None) project_data = self.project_manager.get(project, updatetime, md5sum) assert project_data, "no such project!" if project_data.get('exception'): ret = ProcessorResult(logs=(project_data.get('exception_log'), ), exception=project_data['exception']) else: ret = project_data['instance'].run_task( project_data['module'], task, response) except Exception as e: logstr = traceback.format_exc() ret = ProcessorResult(logs=(logstr, ), exception=e) process_time = time.time() - start_time if not ret.extinfo.get('not_send_status', False): if ret.exception: track_headers = dict(response.headers) else: track_headers = {} for name in ('etag', 'last-modified'): if name not in response.headers: continue track_headers[name] = response.headers[name] status_pack = { 'taskid': task['taskid'], 'project': task['project'], 'url': task.get('url'), 'track': { 'fetch': { 'ok': response.isok(), 'redirect_url': response.url if response.url != response.orig_url else None, 'time': response.time, 'error': response.error, 'status_code': response.status_code, 'encoding': getattr(response, '_encoding', None), 'headers': track_headers, 'content': response.text[:500] if ret.exception else None, }, 'process': { 'ok': not ret.exception, 'time': process_time, 'follows': len(ret.follows), 'result': ( None if ret.result is None else utils.text(ret.result)[:self.RESULT_RESULT_LIMIT] ), 'logs': ret.logstr()[-self.RESULT_LOGS_LIMIT:], 'exception': ret.exception, }, 'save': ret.save, }, } if 'schedule' in task: status_pack['schedule'] = task['schedule'] # FIXME: unicode_obj should used in scheduler before store to database # it's used here for performance. self.status_queue.put(utils.unicode_obj(status_pack)) # FIXME: unicode_obj should used in scheduler before store to database # it's used here for performance. if ret.follows: for each in (ret.follows[x:x + 1000] for x in range(0, len(ret.follows), 1000)): self.newtask_queue.put([utils.unicode_obj(newtask) for newtask in each]) for project, msg, url in ret.messages: try: self.on_task({ 'taskid': utils.md5string(url), 'project': project, 'url': url, 'process': { 'callback': '_on_message', } }, { 'status_code': 200, 'url': url, 'save': (task['project'], msg), }) except Exception as e: logger.exception('Sending message error.') continue if ret.exception: logger_func = logger.error else: logger_func = logger.info logger_func('process %s:%s %s -> [%d] len:%d -> result:%.10r fol:%d msg:%d err:%r' % ( task['project'], task['taskid'], task.get('url'), response.status_code, len(response.content), ret.result, len(ret.follows), len(ret.messages), ret.exception)) return True
[ "def", "on_task", "(", "self", ",", "task", ",", "response", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "response", "=", "rebuild_response", "(", "response", ")", "try", ":", "assert", "'taskid'", "in", "task", ",", "'need taskid in task'", "project", "=", "task", "[", "'project'", "]", "updatetime", "=", "task", ".", "get", "(", "'project_updatetime'", ",", "None", ")", "md5sum", "=", "task", ".", "get", "(", "'project_md5sum'", ",", "None", ")", "project_data", "=", "self", ".", "project_manager", ".", "get", "(", "project", ",", "updatetime", ",", "md5sum", ")", "assert", "project_data", ",", "\"no such project!\"", "if", "project_data", ".", "get", "(", "'exception'", ")", ":", "ret", "=", "ProcessorResult", "(", "logs", "=", "(", "project_data", ".", "get", "(", "'exception_log'", ")", ",", ")", ",", "exception", "=", "project_data", "[", "'exception'", "]", ")", "else", ":", "ret", "=", "project_data", "[", "'instance'", "]", ".", "run_task", "(", "project_data", "[", "'module'", "]", ",", "task", ",", "response", ")", "except", "Exception", "as", "e", ":", "logstr", "=", "traceback", ".", "format_exc", "(", ")", "ret", "=", "ProcessorResult", "(", "logs", "=", "(", "logstr", ",", ")", ",", "exception", "=", "e", ")", "process_time", "=", "time", ".", "time", "(", ")", "-", "start_time", "if", "not", "ret", ".", "extinfo", ".", "get", "(", "'not_send_status'", ",", "False", ")", ":", "if", "ret", ".", "exception", ":", "track_headers", "=", "dict", "(", "response", ".", "headers", ")", "else", ":", "track_headers", "=", "{", "}", "for", "name", "in", "(", "'etag'", ",", "'last-modified'", ")", ":", "if", "name", "not", "in", "response", ".", "headers", ":", "continue", "track_headers", "[", "name", "]", "=", "response", ".", "headers", "[", "name", "]", "status_pack", "=", "{", "'taskid'", ":", "task", "[", "'taskid'", "]", ",", "'project'", ":", "task", "[", "'project'", "]", ",", "'url'", ":", "task", ".", "get", "(", "'url'", ")", ",", "'track'", ":", "{", "'fetch'", ":", "{", "'ok'", ":", "response", ".", "isok", "(", ")", ",", "'redirect_url'", ":", "response", ".", "url", "if", "response", ".", "url", "!=", "response", ".", "orig_url", "else", "None", ",", "'time'", ":", "response", ".", "time", ",", "'error'", ":", "response", ".", "error", ",", "'status_code'", ":", "response", ".", "status_code", ",", "'encoding'", ":", "getattr", "(", "response", ",", "'_encoding'", ",", "None", ")", ",", "'headers'", ":", "track_headers", ",", "'content'", ":", "response", ".", "text", "[", ":", "500", "]", "if", "ret", ".", "exception", "else", "None", ",", "}", ",", "'process'", ":", "{", "'ok'", ":", "not", "ret", ".", "exception", ",", "'time'", ":", "process_time", ",", "'follows'", ":", "len", "(", "ret", ".", "follows", ")", ",", "'result'", ":", "(", "None", "if", "ret", ".", "result", "is", "None", "else", "utils", ".", "text", "(", "ret", ".", "result", ")", "[", ":", "self", ".", "RESULT_RESULT_LIMIT", "]", ")", ",", "'logs'", ":", "ret", ".", "logstr", "(", ")", "[", "-", "self", ".", "RESULT_LOGS_LIMIT", ":", "]", ",", "'exception'", ":", "ret", ".", "exception", ",", "}", ",", "'save'", ":", "ret", ".", "save", ",", "}", ",", "}", "if", "'schedule'", "in", "task", ":", "status_pack", "[", "'schedule'", "]", "=", "task", "[", "'schedule'", "]", "# FIXME: unicode_obj should used in scheduler before store to database", "# it's used here for performance.", "self", ".", "status_queue", ".", "put", "(", "utils", ".", "unicode_obj", "(", "status_pack", ")", ")", "# FIXME: unicode_obj should used in scheduler before store to database", "# it's used here for performance.", "if", "ret", ".", "follows", ":", "for", "each", "in", "(", "ret", ".", "follows", "[", "x", ":", "x", "+", "1000", "]", "for", "x", "in", "range", "(", "0", ",", "len", "(", "ret", ".", "follows", ")", ",", "1000", ")", ")", ":", "self", ".", "newtask_queue", ".", "put", "(", "[", "utils", ".", "unicode_obj", "(", "newtask", ")", "for", "newtask", "in", "each", "]", ")", "for", "project", ",", "msg", ",", "url", "in", "ret", ".", "messages", ":", "try", ":", "self", ".", "on_task", "(", "{", "'taskid'", ":", "utils", ".", "md5string", "(", "url", ")", ",", "'project'", ":", "project", ",", "'url'", ":", "url", ",", "'process'", ":", "{", "'callback'", ":", "'_on_message'", ",", "}", "}", ",", "{", "'status_code'", ":", "200", ",", "'url'", ":", "url", ",", "'save'", ":", "(", "task", "[", "'project'", "]", ",", "msg", ")", ",", "}", ")", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "'Sending message error.'", ")", "continue", "if", "ret", ".", "exception", ":", "logger_func", "=", "logger", ".", "error", "else", ":", "logger_func", "=", "logger", ".", "info", "logger_func", "(", "'process %s:%s %s -> [%d] len:%d -> result:%.10r fol:%d msg:%d err:%r'", "%", "(", "task", "[", "'project'", "]", ",", "task", "[", "'taskid'", "]", ",", "task", ".", "get", "(", "'url'", ")", ",", "response", ".", "status_code", ",", "len", "(", "response", ".", "content", ")", ",", "ret", ".", "result", ",", "len", "(", "ret", ".", "follows", ")", ",", "len", "(", "ret", ".", "messages", ")", ",", "ret", ".", "exception", ")", ")", "return", "True" ]
42.470588
18.313725
def optimize(self, angles0, target): """Calculate an optimum argument of an objective function.""" def new_objective(angles): return self.f(angles, target) return scipy.optimize.minimize( new_objective, angles0, **self.optimizer_opt).x
[ "def", "optimize", "(", "self", ",", "angles0", ",", "target", ")", ":", "def", "new_objective", "(", "angles", ")", ":", "return", "self", ".", "f", "(", "angles", ",", "target", ")", "return", "scipy", ".", "optimize", ".", "minimize", "(", "new_objective", ",", "angles0", ",", "*", "*", "self", ".", "optimizer_opt", ")", ".", "x" ]
33.333333
10.111111
def load_kallisto(self): """ Load Kallisto transcript quantification data for a cohort Parameters ---------- Returns ------- kallisto_data : Pandas dataframe Pandas dataframe with Kallisto data for all patients columns include patient_id, gene_name, est_counts """ kallisto_data = pd.concat( [self._load_single_patient_kallisto(patient) for patient in self], copy=False ) if self.kallisto_ensembl_version is None: raise ValueError("Required a kallisto_ensembl_version but none was specified") ensembl_release = cached_release(self.kallisto_ensembl_version) kallisto_data["gene_name"] = \ kallisto_data["target_id"].map(lambda t: ensembl_release.gene_name_of_transcript_id(t)) # sum counts across genes kallisto_data = \ kallisto_data.groupby(["patient_id", "gene_name"])[["est_counts"]].sum().reset_index() return kallisto_data
[ "def", "load_kallisto", "(", "self", ")", ":", "kallisto_data", "=", "pd", ".", "concat", "(", "[", "self", ".", "_load_single_patient_kallisto", "(", "patient", ")", "for", "patient", "in", "self", "]", ",", "copy", "=", "False", ")", "if", "self", ".", "kallisto_ensembl_version", "is", "None", ":", "raise", "ValueError", "(", "\"Required a kallisto_ensembl_version but none was specified\"", ")", "ensembl_release", "=", "cached_release", "(", "self", ".", "kallisto_ensembl_version", ")", "kallisto_data", "[", "\"gene_name\"", "]", "=", "kallisto_data", "[", "\"target_id\"", "]", ".", "map", "(", "lambda", "t", ":", "ensembl_release", ".", "gene_name_of_transcript_id", "(", "t", ")", ")", "# sum counts across genes", "kallisto_data", "=", "kallisto_data", ".", "groupby", "(", "[", "\"patient_id\"", ",", "\"gene_name\"", "]", ")", "[", "[", "\"est_counts\"", "]", "]", ".", "sum", "(", ")", ".", "reset_index", "(", ")", "return", "kallisto_data" ]
32.774194
25.677419
def info(name, path=None): ''' Returns information about a container path path to the container parent directory default: /var/lib/lxc (system) .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' lxc.info name ''' cachekey = 'lxc.info.{0}{1}'.format(name, path) try: return __context__[cachekey] except KeyError: _ensure_exists(name, path=path) cpath = get_root_path(path) try: conf_file = os.path.join(cpath, name, 'config') except AttributeError: conf_file = os.path.join(cpath, six.text_type(name), 'config') if not os.path.isfile(conf_file): raise CommandExecutionError( 'LXC config file {0} does not exist'.format(conf_file) ) ret = {} config = [] with salt.utils.files.fopen(conf_file) as fp_: for line in fp_: line = salt.utils.stringutils.to_unicode(line) comps = [x.strip() for x in line.split('#', 1)[0].strip().split('=', 1)] if len(comps) == 2: config.append(tuple(comps)) ifaces = [] current = None for key, val in config: if key == 'lxc.network.type': current = {'type': val} ifaces.append(current) elif not current: continue elif key.startswith('lxc.network.'): current[key.replace('lxc.network.', '', 1)] = val if ifaces: ret['nics'] = ifaces ret['rootfs'] = next( (x[1] for x in config if x[0] == 'lxc.rootfs'), None ) ret['state'] = state(name, path=path) ret['ips'] = [] ret['public_ips'] = [] ret['private_ips'] = [] ret['public_ipv4_ips'] = [] ret['public_ipv6_ips'] = [] ret['private_ipv4_ips'] = [] ret['private_ipv6_ips'] = [] ret['ipv4_ips'] = [] ret['ipv6_ips'] = [] ret['size'] = None ret['config'] = conf_file if ret['state'] == 'running': try: limit = int(get_parameter(name, 'memory.limit_in_bytes')) except (CommandExecutionError, TypeError, ValueError): limit = 0 try: usage = int(get_parameter(name, 'memory.usage_in_bytes')) except (CommandExecutionError, TypeError, ValueError): usage = 0 free = limit - usage ret['memory_limit'] = limit ret['memory_free'] = free size = run_stdout(name, 'df /', path=path, python_shell=False) # The size is the 2nd column of the last line ret['size'] = size.splitlines()[-1].split()[1] # First try iproute2 ip_cmd = run_all( name, 'ip link show', path=path, python_shell=False) if ip_cmd['retcode'] == 0: ip_data = ip_cmd['stdout'] ip_cmd = run_all( name, 'ip addr show', path=path, python_shell=False) ip_data += '\n' + ip_cmd['stdout'] ip_data = salt.utils.network._interfaces_ip(ip_data) else: # That didn't work, try ifconfig ip_cmd = run_all( name, 'ifconfig', path=path, python_shell=False) if ip_cmd['retcode'] == 0: ip_data = \ salt.utils.network._interfaces_ifconfig( ip_cmd['stdout']) else: # Neither was successful, give up log.warning( 'Unable to run ip or ifconfig in container \'%s\'', name ) ip_data = {} ret['ipv4_ips'] = salt.utils.network.ip_addrs( include_loopback=True, interface_data=ip_data ) ret['ipv6_ips'] = salt.utils.network.ip_addrs6( include_loopback=True, interface_data=ip_data ) ret['ips'] = ret['ipv4_ips'] + ret['ipv6_ips'] for address in ret['ipv4_ips']: if address == '127.0.0.1': ret['private_ips'].append(address) ret['private_ipv4_ips'].append(address) elif salt.utils.cloud.is_public_ip(address): ret['public_ips'].append(address) ret['public_ipv4_ips'].append(address) else: ret['private_ips'].append(address) ret['private_ipv4_ips'].append(address) for address in ret['ipv6_ips']: if address == '::1' or address.startswith('fe80'): ret['private_ips'].append(address) ret['private_ipv6_ips'].append(address) else: ret['public_ips'].append(address) ret['public_ipv6_ips'].append(address) for key in [x for x in ret if x == 'ips' or x.endswith('ips')]: ret[key].sort(key=_ip_sort) __context__[cachekey] = ret return __context__[cachekey]
[ "def", "info", "(", "name", ",", "path", "=", "None", ")", ":", "cachekey", "=", "'lxc.info.{0}{1}'", ".", "format", "(", "name", ",", "path", ")", "try", ":", "return", "__context__", "[", "cachekey", "]", "except", "KeyError", ":", "_ensure_exists", "(", "name", ",", "path", "=", "path", ")", "cpath", "=", "get_root_path", "(", "path", ")", "try", ":", "conf_file", "=", "os", ".", "path", ".", "join", "(", "cpath", ",", "name", ",", "'config'", ")", "except", "AttributeError", ":", "conf_file", "=", "os", ".", "path", ".", "join", "(", "cpath", ",", "six", ".", "text_type", "(", "name", ")", ",", "'config'", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "conf_file", ")", ":", "raise", "CommandExecutionError", "(", "'LXC config file {0} does not exist'", ".", "format", "(", "conf_file", ")", ")", "ret", "=", "{", "}", "config", "=", "[", "]", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "conf_file", ")", "as", "fp_", ":", "for", "line", "in", "fp_", ":", "line", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", "comps", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "line", ".", "split", "(", "'#'", ",", "1", ")", "[", "0", "]", ".", "strip", "(", ")", ".", "split", "(", "'='", ",", "1", ")", "]", "if", "len", "(", "comps", ")", "==", "2", ":", "config", ".", "append", "(", "tuple", "(", "comps", ")", ")", "ifaces", "=", "[", "]", "current", "=", "None", "for", "key", ",", "val", "in", "config", ":", "if", "key", "==", "'lxc.network.type'", ":", "current", "=", "{", "'type'", ":", "val", "}", "ifaces", ".", "append", "(", "current", ")", "elif", "not", "current", ":", "continue", "elif", "key", ".", "startswith", "(", "'lxc.network.'", ")", ":", "current", "[", "key", ".", "replace", "(", "'lxc.network.'", ",", "''", ",", "1", ")", "]", "=", "val", "if", "ifaces", ":", "ret", "[", "'nics'", "]", "=", "ifaces", "ret", "[", "'rootfs'", "]", "=", "next", "(", "(", "x", "[", "1", "]", "for", "x", "in", "config", "if", "x", "[", "0", "]", "==", "'lxc.rootfs'", ")", ",", "None", ")", "ret", "[", "'state'", "]", "=", "state", "(", "name", ",", "path", "=", "path", ")", "ret", "[", "'ips'", "]", "=", "[", "]", "ret", "[", "'public_ips'", "]", "=", "[", "]", "ret", "[", "'private_ips'", "]", "=", "[", "]", "ret", "[", "'public_ipv4_ips'", "]", "=", "[", "]", "ret", "[", "'public_ipv6_ips'", "]", "=", "[", "]", "ret", "[", "'private_ipv4_ips'", "]", "=", "[", "]", "ret", "[", "'private_ipv6_ips'", "]", "=", "[", "]", "ret", "[", "'ipv4_ips'", "]", "=", "[", "]", "ret", "[", "'ipv6_ips'", "]", "=", "[", "]", "ret", "[", "'size'", "]", "=", "None", "ret", "[", "'config'", "]", "=", "conf_file", "if", "ret", "[", "'state'", "]", "==", "'running'", ":", "try", ":", "limit", "=", "int", "(", "get_parameter", "(", "name", ",", "'memory.limit_in_bytes'", ")", ")", "except", "(", "CommandExecutionError", ",", "TypeError", ",", "ValueError", ")", ":", "limit", "=", "0", "try", ":", "usage", "=", "int", "(", "get_parameter", "(", "name", ",", "'memory.usage_in_bytes'", ")", ")", "except", "(", "CommandExecutionError", ",", "TypeError", ",", "ValueError", ")", ":", "usage", "=", "0", "free", "=", "limit", "-", "usage", "ret", "[", "'memory_limit'", "]", "=", "limit", "ret", "[", "'memory_free'", "]", "=", "free", "size", "=", "run_stdout", "(", "name", ",", "'df /'", ",", "path", "=", "path", ",", "python_shell", "=", "False", ")", "# The size is the 2nd column of the last line", "ret", "[", "'size'", "]", "=", "size", ".", "splitlines", "(", ")", "[", "-", "1", "]", ".", "split", "(", ")", "[", "1", "]", "# First try iproute2", "ip_cmd", "=", "run_all", "(", "name", ",", "'ip link show'", ",", "path", "=", "path", ",", "python_shell", "=", "False", ")", "if", "ip_cmd", "[", "'retcode'", "]", "==", "0", ":", "ip_data", "=", "ip_cmd", "[", "'stdout'", "]", "ip_cmd", "=", "run_all", "(", "name", ",", "'ip addr show'", ",", "path", "=", "path", ",", "python_shell", "=", "False", ")", "ip_data", "+=", "'\\n'", "+", "ip_cmd", "[", "'stdout'", "]", "ip_data", "=", "salt", ".", "utils", ".", "network", ".", "_interfaces_ip", "(", "ip_data", ")", "else", ":", "# That didn't work, try ifconfig", "ip_cmd", "=", "run_all", "(", "name", ",", "'ifconfig'", ",", "path", "=", "path", ",", "python_shell", "=", "False", ")", "if", "ip_cmd", "[", "'retcode'", "]", "==", "0", ":", "ip_data", "=", "salt", ".", "utils", ".", "network", ".", "_interfaces_ifconfig", "(", "ip_cmd", "[", "'stdout'", "]", ")", "else", ":", "# Neither was successful, give up", "log", ".", "warning", "(", "'Unable to run ip or ifconfig in container \\'%s\\''", ",", "name", ")", "ip_data", "=", "{", "}", "ret", "[", "'ipv4_ips'", "]", "=", "salt", ".", "utils", ".", "network", ".", "ip_addrs", "(", "include_loopback", "=", "True", ",", "interface_data", "=", "ip_data", ")", "ret", "[", "'ipv6_ips'", "]", "=", "salt", ".", "utils", ".", "network", ".", "ip_addrs6", "(", "include_loopback", "=", "True", ",", "interface_data", "=", "ip_data", ")", "ret", "[", "'ips'", "]", "=", "ret", "[", "'ipv4_ips'", "]", "+", "ret", "[", "'ipv6_ips'", "]", "for", "address", "in", "ret", "[", "'ipv4_ips'", "]", ":", "if", "address", "==", "'127.0.0.1'", ":", "ret", "[", "'private_ips'", "]", ".", "append", "(", "address", ")", "ret", "[", "'private_ipv4_ips'", "]", ".", "append", "(", "address", ")", "elif", "salt", ".", "utils", ".", "cloud", ".", "is_public_ip", "(", "address", ")", ":", "ret", "[", "'public_ips'", "]", ".", "append", "(", "address", ")", "ret", "[", "'public_ipv4_ips'", "]", ".", "append", "(", "address", ")", "else", ":", "ret", "[", "'private_ips'", "]", ".", "append", "(", "address", ")", "ret", "[", "'private_ipv4_ips'", "]", ".", "append", "(", "address", ")", "for", "address", "in", "ret", "[", "'ipv6_ips'", "]", ":", "if", "address", "==", "'::1'", "or", "address", ".", "startswith", "(", "'fe80'", ")", ":", "ret", "[", "'private_ips'", "]", ".", "append", "(", "address", ")", "ret", "[", "'private_ipv6_ips'", "]", ".", "append", "(", "address", ")", "else", ":", "ret", "[", "'public_ips'", "]", ".", "append", "(", "address", ")", "ret", "[", "'public_ipv6_ips'", "]", ".", "append", "(", "address", ")", "for", "key", "in", "[", "x", "for", "x", "in", "ret", "if", "x", "==", "'ips'", "or", "x", ".", "endswith", "(", "'ips'", ")", "]", ":", "ret", "[", "key", "]", ".", "sort", "(", "key", "=", "_ip_sort", ")", "__context__", "[", "cachekey", "]", "=", "ret", "return", "__context__", "[", "cachekey", "]" ]
35.993056
17.104167
def natural_keys(input_str: str) -> List[Union[int, str]]: """ Converts a string into a list of integers and strings to support natural sorting (see natural_sort). For example: natural_keys('abc123def') -> ['abc', '123', 'def'] :param input_str: string to convert :return: list of strings and integers """ return [try_int_or_force_to_lower_case(substr) for substr in re.split(r'(\d+)', input_str)]
[ "def", "natural_keys", "(", "input_str", ":", "str", ")", "->", "List", "[", "Union", "[", "int", ",", "str", "]", "]", ":", "return", "[", "try_int_or_force_to_lower_case", "(", "substr", ")", "for", "substr", "in", "re", ".", "split", "(", "r'(\\d+)'", ",", "input_str", ")", "]" ]
46.444444
22.888889
def _make_triangles_lines(shape, wrapx=False, wrapy=False): """Transform rectangular regular grid into triangles. :param x: {x2d} :param y: {y2d} :param z: {z2d} :param bool wrapx: when True, the x direction is assumed to wrap, and polygons are drawn between the end end begin points :param bool wrapy: simular for the y coordinate :return: triangles and lines used to plot Mesh """ nx, ny = shape mx = nx if wrapx else nx - 1 my = ny if wrapy else ny - 1 """ create all pair of indices (i,j) of the rectangular grid minus last row if wrapx = False => mx minus last column if wrapy = False => my | (0,0) ... (0,j) ... (0,my-1) | | . . . . . | | (i,0) ... (i,j) ... (i,my-1) | | . . . . . | |(mx-1,0) ... (mx-1,j) ... (mx-1,my-1) | """ i, j = np.mgrid[0:mx, 0:my] """ collapsed i and j in one dimensional array, row-major order ex : array([[0, 1, 2], => array([0, 1, 2, 3, *4*, 5]) [3, *4*, 5]]) if we want vertex 4 at (i=1,j=1) we must transform it in i*ny+j = 4 """ i, j = np.ravel(i), np.ravel(j) """ Let's go for the triangles : (i,j) - (i,j+1) -> y dir (i+1,j) - (i+1,j+1) | v x dir in flatten coordinates: i*ny+j - i*ny+j+1 (i+1)*ny+j - (i+1)*ny+j+1 """ t1 = (i * ny + j, (i + 1) % nx * ny + j, (i + 1) % nx * ny + (j + 1) % ny) t2 = (i * ny + j, (i + 1) % nx * ny + (j + 1) % ny, i * ny + (j + 1) % ny) """ %nx and %ny are used for wrapx and wrapy : if (i+1)=nx => (i+1)%nx=0 => close mesh in x direction if (j+1)=ny => (j+1)%ny=0 => close mesh in y direction """ nt = len(t1[0]) triangles = np.zeros((nt * 2, 3), dtype=np.uint32) triangles[0::2, 0], triangles[0::2, 1], triangles[0::2, 2] = t1 triangles[1::2, 0], triangles[1::2, 1], triangles[1::2, 2] = t2 lines = np.zeros((nt * 4, 2), dtype=np.uint32) lines[::4, 0], lines[::4, 1] = t1[:2] lines[1::4, 0], lines[1::4, 1] = t1[0], t2[2] lines[2::4, 0], lines[2::4, 1] = t2[2:0:-1] lines[3::4, 0], lines[3::4, 1] = t1[1], t2[1] return triangles, lines
[ "def", "_make_triangles_lines", "(", "shape", ",", "wrapx", "=", "False", ",", "wrapy", "=", "False", ")", ":", "nx", ",", "ny", "=", "shape", "mx", "=", "nx", "if", "wrapx", "else", "nx", "-", "1", "my", "=", "ny", "if", "wrapy", "else", "ny", "-", "1", "\"\"\"\n create all pair of indices (i,j) of the rectangular grid\n minus last row if wrapx = False => mx\n minus last column if wrapy = False => my\n | (0,0) ... (0,j) ... (0,my-1) |\n | . . . . . |\n | (i,0) ... (i,j) ... (i,my-1) |\n | . . . . . |\n |(mx-1,0) ... (mx-1,j) ... (mx-1,my-1) |\n \"\"\"", "i", ",", "j", "=", "np", ".", "mgrid", "[", "0", ":", "mx", ",", "0", ":", "my", "]", "\"\"\"\n collapsed i and j in one dimensional array, row-major order\n ex :\n array([[0, 1, 2], => array([0, 1, 2, 3, *4*, 5])\n [3, *4*, 5]])\n if we want vertex 4 at (i=1,j=1) we must transform it in i*ny+j = 4\n \"\"\"", "i", ",", "j", "=", "np", ".", "ravel", "(", "i", ")", ",", "np", ".", "ravel", "(", "j", ")", "\"\"\"\n Let's go for the triangles :\n (i,j) - (i,j+1) -> y dir\n (i+1,j) - (i+1,j+1)\n |\n v\n x dir\n\n in flatten coordinates:\n i*ny+j - i*ny+j+1\n (i+1)*ny+j - (i+1)*ny+j+1\n \"\"\"", "t1", "=", "(", "i", "*", "ny", "+", "j", ",", "(", "i", "+", "1", ")", "%", "nx", "*", "ny", "+", "j", ",", "(", "i", "+", "1", ")", "%", "nx", "*", "ny", "+", "(", "j", "+", "1", ")", "%", "ny", ")", "t2", "=", "(", "i", "*", "ny", "+", "j", ",", "(", "i", "+", "1", ")", "%", "nx", "*", "ny", "+", "(", "j", "+", "1", ")", "%", "ny", ",", "i", "*", "ny", "+", "(", "j", "+", "1", ")", "%", "ny", ")", "\"\"\"\n %nx and %ny are used for wrapx and wrapy :\n if (i+1)=nx => (i+1)%nx=0 => close mesh in x direction\n if (j+1)=ny => (j+1)%ny=0 => close mesh in y direction\n \"\"\"", "nt", "=", "len", "(", "t1", "[", "0", "]", ")", "triangles", "=", "np", ".", "zeros", "(", "(", "nt", "*", "2", ",", "3", ")", ",", "dtype", "=", "np", ".", "uint32", ")", "triangles", "[", "0", ":", ":", "2", ",", "0", "]", ",", "triangles", "[", "0", ":", ":", "2", ",", "1", "]", ",", "triangles", "[", "0", ":", ":", "2", ",", "2", "]", "=", "t1", "triangles", "[", "1", ":", ":", "2", ",", "0", "]", ",", "triangles", "[", "1", ":", ":", "2", ",", "1", "]", ",", "triangles", "[", "1", ":", ":", "2", ",", "2", "]", "=", "t2", "lines", "=", "np", ".", "zeros", "(", "(", "nt", "*", "4", ",", "2", ")", ",", "dtype", "=", "np", ".", "uint32", ")", "lines", "[", ":", ":", "4", ",", "0", "]", ",", "lines", "[", ":", ":", "4", ",", "1", "]", "=", "t1", "[", ":", "2", "]", "lines", "[", "1", ":", ":", "4", ",", "0", "]", ",", "lines", "[", "1", ":", ":", "4", ",", "1", "]", "=", "t1", "[", "0", "]", ",", "t2", "[", "2", "]", "lines", "[", "2", ":", ":", "4", ",", "0", "]", ",", "lines", "[", "2", ":", ":", "4", ",", "1", "]", "=", "t2", "[", "2", ":", "0", ":", "-", "1", "]", "lines", "[", "3", ":", ":", "4", ",", "0", "]", ",", "lines", "[", "3", ":", ":", "4", ",", "1", "]", "=", "t1", "[", "1", "]", ",", "t2", "[", "1", "]", "return", "triangles", ",", "lines" ]
31.549296
18.394366
def get_current_history_length(self): u'''Return the number of lines currently in the history. (This is different from get_history_length(), which returns the maximum number of lines that will be written to a history file.)''' value = len(self.history) log(u"get_current_history_length:%d"%value) return value
[ "def", "get_current_history_length", "(", "self", ")", ":", "value", "=", "len", "(", "self", ".", "history", ")", "log", "(", "u\"get_current_history_length:%d\"", "%", "value", ")", "return", "value" ]
51.142857
19.142857
def ensure_environment(variables): """ Check os.environ to ensure that a given collection of variables has been set. :param variables: A collection of environment variable names :returns: os.environ :raises IncompleteEnvironment: if any variables are not set, with the exception's ``variables`` attribute populated with the missing variables """ missing = [v for v in variables if v not in os.environ] if missing: formatted = ', '.join(missing) message = 'Environment variables not set: {}'.format(formatted) raise IncompleteEnvironment(message, missing) return os.environ
[ "def", "ensure_environment", "(", "variables", ")", ":", "missing", "=", "[", "v", "for", "v", "in", "variables", "if", "v", "not", "in", "os", ".", "environ", "]", "if", "missing", ":", "formatted", "=", "', '", ".", "join", "(", "missing", ")", "message", "=", "'Environment variables not set: {}'", ".", "format", "(", "formatted", ")", "raise", "IncompleteEnvironment", "(", "message", ",", "missing", ")", "return", "os", ".", "environ" ]
37.470588
17.352941
def UNIFAC_groups(self): r'''Dictionary of UNIFAC subgroup: count groups for the original UNIFAC subgroups, as determined by `DDBST's online service <http://www.ddbst.com/unifacga.html>`_. Examples -------- >>> pprint(Chemical('Cumene').UNIFAC_groups) {1: 2, 9: 5, 13: 1} ''' if self.__UNIFAC_groups: return self.__UNIFAC_groups else: load_group_assignments_DDBST() if self.InChI_Key in DDBST_UNIFAC_assignments: self.__UNIFAC_groups = DDBST_UNIFAC_assignments[self.InChI_Key] return self.__UNIFAC_groups else: return None
[ "def", "UNIFAC_groups", "(", "self", ")", ":", "if", "self", ".", "__UNIFAC_groups", ":", "return", "self", ".", "__UNIFAC_groups", "else", ":", "load_group_assignments_DDBST", "(", ")", "if", "self", ".", "InChI_Key", "in", "DDBST_UNIFAC_assignments", ":", "self", ".", "__UNIFAC_groups", "=", "DDBST_UNIFAC_assignments", "[", "self", ".", "InChI_Key", "]", "return", "self", ".", "__UNIFAC_groups", "else", ":", "return", "None" ]
37.444444
21.666667
def get_request(self, request): """Sets token-based auth headers.""" request.transport_user = self.username request.transport_password = self.api_key return request
[ "def", "get_request", "(", "self", ",", "request", ")", ":", "request", ".", "transport_user", "=", "self", ".", "username", "request", ".", "transport_password", "=", "self", ".", "api_key", "return", "request" ]
38.4
8.4
def setup_prompt(self, prompt_name, prefix='default', capture_exit_code=False, loglevel=logging.DEBUG): """Use this when you've opened a new shell to set the PS1 to something sane. By default, it sets up the default expect so you don't have to worry about it and can just call shutit.send('a command'). If you want simple login and logout, please use login() and logout() within this module. Typically it would be used in this boilerplate pattern:: shutit.send('su - auser', expect=shutit_global.shutit_global_object.base_prompt, check_exit=False) shutit.setup_prompt('tmp_prompt') shutit.send('some command') [...] shutit.set_default_shutit_pexpect_session_expect() shutit.send('exit') This function is assumed to be called whenever there is a change of environment. @param prompt_name: Reference name for prompt. @param prefix: Prompt prefix. Default: 'default' @param capture_exit_code: Captures the exit code of the previous command into a SHUTIT_EC variable. Useful for when we want to work out whether the login worked. @type prompt_name: string @type prefix: string """ shutit = self.shutit local_prompt = prefix + ':' + shutit_util.random_id() + '# ' shutit.expect_prompts[prompt_name] = local_prompt # Set up the PS1 value. # Override the PROMPT_COMMAND as this can cause nasty surprises in the # output, and we need a short pause before returning the prompt to # overcome an obscure bash bug (cf. # https://github.com/pexpect/pexpect/issues/483). # Set the cols value, as unpleasant escapes are put in the output if the # input is > n chars wide. # checkwinsize is required for similar reasons. # The newline in the expect list is a hack. On my work laptop this line hangs # and times out very frequently. This workaround seems to work, but I # haven't figured out why yet - imiell. # Split the local prompt into two parts and separate with quotes to protect against the expect matching the command rather than the output. shutit.log('Setting up prompt.', level=logging.DEBUG) send_str = '' if capture_exit_code: send_str = r' SHUTIT_EC=$? && ' send_str += """ export PS1_""" + str(prompt_name) + """=$PS1 && PS1='""" + str(local_prompt[:2]) + "''" + str(local_prompt[2:]) + """' && PROMPT_COMMAND=""" + shutit_global.shutit_global_object.prompt_command self.send(ShutItSendSpec(self, send=send_str, expect=['\r\n' + shutit.expect_prompts[prompt_name]], fail_on_empty_before=False, echo=False, loglevel=loglevel, ignore_background=True)) # Set default expect to new. shutit.log('Resetting default expect to: ' + shutit.expect_prompts[prompt_name], level=loglevel) self.default_expect = shutit.expect_prompts[prompt_name] # Sometimes stty resets to 0x0 (?), so we must override here. self.send(ShutItSendSpec(self, send=" stty cols 65535", echo=False, check_exit=False, loglevel=loglevel, ignore_background=True)) self.send(ShutItSendSpec(self, send=" stty rows 65535", echo=False, check_exit=False, loglevel=loglevel, ignore_background=True)) # Avoid dumb terminals self.send(ShutItSendSpec(self, send=""" if [ $TERM=dumb ];then export TERM=xterm;fi""", echo=False, check_exit=False, loglevel=loglevel, ignore_background=True)) # Get the hostname # Lack of space after > is deliberate to avoid issues with prompt matching. hostname = shutit.send_and_get_output(""" if [ $(echo $SHELL) == '/bin/bash' ]; then echo $HOSTNAME; elif [ $(command hostname 2>/dev/null) != '' ]; then hostname -s 2>/dev/null; fi""", echo=False, loglevel=logging.DEBUG) local_prompt_with_hostname = hostname + ':' + local_prompt shutit.expect_prompts[prompt_name] = local_prompt_with_hostname self.default_expect = shutit.expect_prompts[prompt_name] # Set up a shell expect to check whether we're still in a shell later. self.shell_expect = self.default_expect # Split the local prompt into two parts and separate with quotes to protect against the expect matching the command rather than the output. self.send(ShutItSendSpec(self, send=""" PS1='""" + shutit.expect_prompts[prompt_name][:2] + "''" + shutit.expect_prompts[prompt_name][2:] + """'""", echo=False, loglevel=loglevel, ignore_background=True)) # Set up history the way shutit likes it. self.send(ShutItSendSpec(self, send=' command export HISTCONTROL=$HISTCONTROL:ignoredups:ignorespace', echo=False, loglevel=loglevel, ignore_background=True)) # Ensure environment is set up OK. _ = self.init_pexpect_session_environment(prefix) return True
[ "def", "setup_prompt", "(", "self", ",", "prompt_name", ",", "prefix", "=", "'default'", ",", "capture_exit_code", "=", "False", ",", "loglevel", "=", "logging", ".", "DEBUG", ")", ":", "shutit", "=", "self", ".", "shutit", "local_prompt", "=", "prefix", "+", "':'", "+", "shutit_util", ".", "random_id", "(", ")", "+", "'# '", "shutit", ".", "expect_prompts", "[", "prompt_name", "]", "=", "local_prompt", "# Set up the PS1 value.", "# Override the PROMPT_COMMAND as this can cause nasty surprises in the", "# output, and we need a short pause before returning the prompt to", "# overcome an obscure bash bug (cf.", "# https://github.com/pexpect/pexpect/issues/483).", "# Set the cols value, as unpleasant escapes are put in the output if the", "# input is > n chars wide.", "# checkwinsize is required for similar reasons.", "# The newline in the expect list is a hack. On my work laptop this line hangs", "# and times out very frequently. This workaround seems to work, but I", "# haven't figured out why yet - imiell.", "# Split the local prompt into two parts and separate with quotes to protect against the expect matching the command rather than the output.", "shutit", ".", "log", "(", "'Setting up prompt.'", ",", "level", "=", "logging", ".", "DEBUG", ")", "send_str", "=", "''", "if", "capture_exit_code", ":", "send_str", "=", "r' SHUTIT_EC=$? && '", "send_str", "+=", "\"\"\" export PS1_\"\"\"", "+", "str", "(", "prompt_name", ")", "+", "\"\"\"=$PS1 && PS1='\"\"\"", "+", "str", "(", "local_prompt", "[", ":", "2", "]", ")", "+", "\"''\"", "+", "str", "(", "local_prompt", "[", "2", ":", "]", ")", "+", "\"\"\"' && PROMPT_COMMAND=\"\"\"", "+", "shutit_global", ".", "shutit_global_object", ".", "prompt_command", "self", ".", "send", "(", "ShutItSendSpec", "(", "self", ",", "send", "=", "send_str", ",", "expect", "=", "[", "'\\r\\n'", "+", "shutit", ".", "expect_prompts", "[", "prompt_name", "]", "]", ",", "fail_on_empty_before", "=", "False", ",", "echo", "=", "False", ",", "loglevel", "=", "loglevel", ",", "ignore_background", "=", "True", ")", ")", "# Set default expect to new.", "shutit", ".", "log", "(", "'Resetting default expect to: '", "+", "shutit", ".", "expect_prompts", "[", "prompt_name", "]", ",", "level", "=", "loglevel", ")", "self", ".", "default_expect", "=", "shutit", ".", "expect_prompts", "[", "prompt_name", "]", "# Sometimes stty resets to 0x0 (?), so we must override here.", "self", ".", "send", "(", "ShutItSendSpec", "(", "self", ",", "send", "=", "\" stty cols 65535\"", ",", "echo", "=", "False", ",", "check_exit", "=", "False", ",", "loglevel", "=", "loglevel", ",", "ignore_background", "=", "True", ")", ")", "self", ".", "send", "(", "ShutItSendSpec", "(", "self", ",", "send", "=", "\" stty rows 65535\"", ",", "echo", "=", "False", ",", "check_exit", "=", "False", ",", "loglevel", "=", "loglevel", ",", "ignore_background", "=", "True", ")", ")", "# Avoid dumb terminals", "self", ".", "send", "(", "ShutItSendSpec", "(", "self", ",", "send", "=", "\"\"\" if [ $TERM=dumb ];then export TERM=xterm;fi\"\"\"", ",", "echo", "=", "False", ",", "check_exit", "=", "False", ",", "loglevel", "=", "loglevel", ",", "ignore_background", "=", "True", ")", ")", "# Get the hostname", "# Lack of space after > is deliberate to avoid issues with prompt matching.", "hostname", "=", "shutit", ".", "send_and_get_output", "(", "\"\"\" if [ $(echo $SHELL) == '/bin/bash' ]; then echo $HOSTNAME; elif [ $(command hostname 2>/dev/null) != '' ]; then hostname -s 2>/dev/null; fi\"\"\"", ",", "echo", "=", "False", ",", "loglevel", "=", "logging", ".", "DEBUG", ")", "local_prompt_with_hostname", "=", "hostname", "+", "':'", "+", "local_prompt", "shutit", ".", "expect_prompts", "[", "prompt_name", "]", "=", "local_prompt_with_hostname", "self", ".", "default_expect", "=", "shutit", ".", "expect_prompts", "[", "prompt_name", "]", "# Set up a shell expect to check whether we're still in a shell later.", "self", ".", "shell_expect", "=", "self", ".", "default_expect", "# Split the local prompt into two parts and separate with quotes to protect against the expect matching the command rather than the output.", "self", ".", "send", "(", "ShutItSendSpec", "(", "self", ",", "send", "=", "\"\"\" PS1='\"\"\"", "+", "shutit", ".", "expect_prompts", "[", "prompt_name", "]", "[", ":", "2", "]", "+", "\"''\"", "+", "shutit", ".", "expect_prompts", "[", "prompt_name", "]", "[", "2", ":", "]", "+", "\"\"\"'\"\"\"", ",", "echo", "=", "False", ",", "loglevel", "=", "loglevel", ",", "ignore_background", "=", "True", ")", ")", "# Set up history the way shutit likes it.", "self", ".", "send", "(", "ShutItSendSpec", "(", "self", ",", "send", "=", "' command export HISTCONTROL=$HISTCONTROL:ignoredups:ignorespace'", ",", "echo", "=", "False", ",", "loglevel", "=", "loglevel", ",", "ignore_background", "=", "True", ")", ")", "# Ensure environment is set up OK.", "_", "=", "self", ".", "init_pexpect_session_environment", "(", "prefix", ")", "return", "True" ]
51.979381
22.072165
def min(self, key=None): """ Find the minimum item in this RDD. :param key: A function used to generate key for comparing >>> rdd = sc.parallelize([2.0, 5.0, 43.0, 10.0]) >>> rdd.min() 2.0 >>> rdd.min(key=str) 10.0 """ if key is None: return self.reduce(min) return self.reduce(lambda a, b: min(a, b, key=key))
[ "def", "min", "(", "self", ",", "key", "=", "None", ")", ":", "if", "key", "is", "None", ":", "return", "self", ".", "reduce", "(", "min", ")", "return", "self", ".", "reduce", "(", "lambda", "a", ",", "b", ":", "min", "(", "a", ",", "b", ",", "key", "=", "key", ")", ")" ]
26.533333
17.866667
def read(self, size): """ Read wrapper. Parameters ---------- size : int Number of bytes to read. """ try: return_val = self.handle.read(size) if return_val == '': print() print("Piksi disconnected") print() raise IOError return return_val except OSError: print() print("Piksi disconnected") print() raise IOError
[ "def", "read", "(", "self", ",", "size", ")", ":", "try", ":", "return_val", "=", "self", ".", "handle", ".", "read", "(", "size", ")", "if", "return_val", "==", "''", ":", "print", "(", ")", "print", "(", "\"Piksi disconnected\"", ")", "print", "(", ")", "raise", "IOError", "return", "return_val", "except", "OSError", ":", "print", "(", ")", "print", "(", "\"Piksi disconnected\"", ")", "print", "(", ")", "raise", "IOError" ]
23.409091
14.863636
def pset_field(item_type, optional=False, initial=()): """ Create checked ``PSet`` field. :param item_type: The required type for the items in the set. :param optional: If true, ``None`` can be used as a value for this field. :param initial: Initial value to pass to factory if no value is given for the field. :return: A ``field`` containing a ``CheckedPSet`` of the given type. """ return _sequence_field(CheckedPSet, item_type, optional, initial)
[ "def", "pset_field", "(", "item_type", ",", "optional", "=", "False", ",", "initial", "=", "(", ")", ")", ":", "return", "_sequence_field", "(", "CheckedPSet", ",", "item_type", ",", "optional", ",", "initial", ")" ]
36.642857
19.928571
def store_source_info(self, calc_times): """ Save (weight, num_sites, calc_time) inside the source_info dataset """ if calc_times: source_info = self.datastore['source_info'] arr = numpy.zeros((len(source_info), 3), F32) ids, vals = zip(*sorted(calc_times.items())) arr[numpy.array(ids)] = vals source_info['weight'] += arr[:, 0] source_info['num_sites'] += arr[:, 1] source_info['calc_time'] += arr[:, 2]
[ "def", "store_source_info", "(", "self", ",", "calc_times", ")", ":", "if", "calc_times", ":", "source_info", "=", "self", ".", "datastore", "[", "'source_info'", "]", "arr", "=", "numpy", ".", "zeros", "(", "(", "len", "(", "source_info", ")", ",", "3", ")", ",", "F32", ")", "ids", ",", "vals", "=", "zip", "(", "*", "sorted", "(", "calc_times", ".", "items", "(", ")", ")", ")", "arr", "[", "numpy", ".", "array", "(", "ids", ")", "]", "=", "vals", "source_info", "[", "'weight'", "]", "+=", "arr", "[", ":", ",", "0", "]", "source_info", "[", "'num_sites'", "]", "+=", "arr", "[", ":", ",", "1", "]", "source_info", "[", "'calc_time'", "]", "+=", "arr", "[", ":", ",", "2", "]" ]
42.5
10.333333
def execute_scheduler(self): """Main entry point for the scheduler. This method will start two scheduled jobs, `schedule_jobs` which takes care of scheduling the actual SQS messaging and `process_status_queue` which will track the current status of the jobs as workers are executing them Returns: `None` """ try: # Schedule periodic scheduling of jobs self.scheduler.add_job( self.schedule_jobs, trigger='interval', name='schedule_jobs', minutes=15, start_date=datetime.now() + timedelta(seconds=1) ) self.scheduler.add_job( self.process_status_queue, trigger='interval', name='process_status_queue', seconds=30, start_date=datetime.now() + timedelta(seconds=5), max_instances=1 ) self.scheduler.start() except KeyboardInterrupt: self.scheduler.shutdown()
[ "def", "execute_scheduler", "(", "self", ")", ":", "try", ":", "# Schedule periodic scheduling of jobs", "self", ".", "scheduler", ".", "add_job", "(", "self", ".", "schedule_jobs", ",", "trigger", "=", "'interval'", ",", "name", "=", "'schedule_jobs'", ",", "minutes", "=", "15", ",", "start_date", "=", "datetime", ".", "now", "(", ")", "+", "timedelta", "(", "seconds", "=", "1", ")", ")", "self", ".", "scheduler", ".", "add_job", "(", "self", ".", "process_status_queue", ",", "trigger", "=", "'interval'", ",", "name", "=", "'process_status_queue'", ",", "seconds", "=", "30", ",", "start_date", "=", "datetime", ".", "now", "(", ")", "+", "timedelta", "(", "seconds", "=", "5", ")", ",", "max_instances", "=", "1", ")", "self", ".", "scheduler", ".", "start", "(", ")", "except", "KeyboardInterrupt", ":", "self", ".", "scheduler", ".", "shutdown", "(", ")" ]
34.16129
17.064516
def secure_authorized_channel( credentials, request, target, ssl_credentials=None, **kwargs): """Creates a secure authorized gRPC channel. This creates a channel with SSL and :class:`AuthMetadataPlugin`. This channel can be used to create a stub that can make authorized requests. Example:: import google.auth import google.auth.transport.grpc import google.auth.transport.requests from google.cloud.speech.v1 import cloud_speech_pb2 # Get credentials. credentials, _ = google.auth.default() # Get an HTTP request function to refresh credentials. request = google.auth.transport.requests.Request() # Create a channel. channel = google.auth.transport.grpc.secure_authorized_channel( credentials, 'speech.googleapis.com:443', request) # Use the channel to create a stub. cloud_speech.create_Speech_stub(channel) Args: credentials (google.auth.credentials.Credentials): The credentials to add to requests. request (google.auth.transport.Request): A HTTP transport request object used to refresh credentials as needed. Even though gRPC is a separate transport, there's no way to refresh the credentials without using a standard http transport. target (str): The host and port of the service. ssl_credentials (grpc.ChannelCredentials): Optional SSL channel credentials. This can be used to specify different certificates. kwargs: Additional arguments to pass to :func:`grpc.secure_channel`. Returns: grpc.Channel: The created gRPC channel. """ # Create the metadata plugin for inserting the authorization header. metadata_plugin = AuthMetadataPlugin(credentials, request) # Create a set of grpc.CallCredentials using the metadata plugin. google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin) if ssl_credentials is None: ssl_credentials = grpc.ssl_channel_credentials() # Combine the ssl credentials and the authorization credentials. composite_credentials = grpc.composite_channel_credentials( ssl_credentials, google_auth_credentials) return grpc.secure_channel(target, composite_credentials, **kwargs)
[ "def", "secure_authorized_channel", "(", "credentials", ",", "request", ",", "target", ",", "ssl_credentials", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# Create the metadata plugin for inserting the authorization header.", "metadata_plugin", "=", "AuthMetadataPlugin", "(", "credentials", ",", "request", ")", "# Create a set of grpc.CallCredentials using the metadata plugin.", "google_auth_credentials", "=", "grpc", ".", "metadata_call_credentials", "(", "metadata_plugin", ")", "if", "ssl_credentials", "is", "None", ":", "ssl_credentials", "=", "grpc", ".", "ssl_channel_credentials", "(", ")", "# Combine the ssl credentials and the authorization credentials.", "composite_credentials", "=", "grpc", ".", "composite_channel_credentials", "(", "ssl_credentials", ",", "google_auth_credentials", ")", "return", "grpc", ".", "secure_channel", "(", "target", ",", "composite_credentials", ",", "*", "*", "kwargs", ")" ]
40.642857
25.303571
def to_str(delta, extended=False): """Format a datetime.timedelta to a duration string""" total_seconds = delta.total_seconds() sign = "-" if total_seconds < 0 else "" nanoseconds = abs(total_seconds * _second_size) if total_seconds < 1: result_str = _to_str_small(nanoseconds, extended) else: result_str = _to_str_large(nanoseconds, extended) return "{}{}".format(sign, result_str)
[ "def", "to_str", "(", "delta", ",", "extended", "=", "False", ")", ":", "total_seconds", "=", "delta", ".", "total_seconds", "(", ")", "sign", "=", "\"-\"", "if", "total_seconds", "<", "0", "else", "\"\"", "nanoseconds", "=", "abs", "(", "total_seconds", "*", "_second_size", ")", "if", "total_seconds", "<", "1", ":", "result_str", "=", "_to_str_small", "(", "nanoseconds", ",", "extended", ")", "else", ":", "result_str", "=", "_to_str_large", "(", "nanoseconds", ",", "extended", ")", "return", "\"{}{}\"", ".", "format", "(", "sign", ",", "result_str", ")" ]
32.076923
17.153846
def recursive_derived(self): """list of all :class:`derive classes <hierarchy_info_t>`""" if self._recursive_derived is None: to_go = self.derived[:] all_derived = [] while to_go: derive = to_go.pop() if derive not in all_derived: all_derived.append(derive) to_go.extend(derive.related_class.derived) self._recursive_derived = all_derived return self._recursive_derived
[ "def", "recursive_derived", "(", "self", ")", ":", "if", "self", ".", "_recursive_derived", "is", "None", ":", "to_go", "=", "self", ".", "derived", "[", ":", "]", "all_derived", "=", "[", "]", "while", "to_go", ":", "derive", "=", "to_go", ".", "pop", "(", ")", "if", "derive", "not", "in", "all_derived", ":", "all_derived", ".", "append", "(", "derive", ")", "to_go", ".", "extend", "(", "derive", ".", "related_class", ".", "derived", ")", "self", ".", "_recursive_derived", "=", "all_derived", "return", "self", ".", "_recursive_derived" ]
41.833333
8
def Parse(self, stat, file_object, knowledge_base): """Parse the status file.""" _, _ = stat, knowledge_base packages = [] sw_data = utils.ReadFileBytesAsUnicode(file_object) try: for pkg in self._deb822.Packages.iter_paragraphs(sw_data.splitlines()): if self.installed_re.match(pkg["Status"]): packages.append( rdf_client.SoftwarePackage( name=pkg["Package"], description=pkg["Description"], version=pkg["Version"], architecture=pkg["Architecture"], publisher=pkg["Maintainer"], install_state="INSTALLED")) except SystemError: yield rdf_anomaly.Anomaly( type="PARSER_ANOMALY", symptom="Invalid dpkg status file") finally: if packages: yield rdf_client.SoftwarePackages(packages=packages)
[ "def", "Parse", "(", "self", ",", "stat", ",", "file_object", ",", "knowledge_base", ")", ":", "_", ",", "_", "=", "stat", ",", "knowledge_base", "packages", "=", "[", "]", "sw_data", "=", "utils", ".", "ReadFileBytesAsUnicode", "(", "file_object", ")", "try", ":", "for", "pkg", "in", "self", ".", "_deb822", ".", "Packages", ".", "iter_paragraphs", "(", "sw_data", ".", "splitlines", "(", ")", ")", ":", "if", "self", ".", "installed_re", ".", "match", "(", "pkg", "[", "\"Status\"", "]", ")", ":", "packages", ".", "append", "(", "rdf_client", ".", "SoftwarePackage", "(", "name", "=", "pkg", "[", "\"Package\"", "]", ",", "description", "=", "pkg", "[", "\"Description\"", "]", ",", "version", "=", "pkg", "[", "\"Version\"", "]", ",", "architecture", "=", "pkg", "[", "\"Architecture\"", "]", ",", "publisher", "=", "pkg", "[", "\"Maintainer\"", "]", ",", "install_state", "=", "\"INSTALLED\"", ")", ")", "except", "SystemError", ":", "yield", "rdf_anomaly", ".", "Anomaly", "(", "type", "=", "\"PARSER_ANOMALY\"", ",", "symptom", "=", "\"Invalid dpkg status file\"", ")", "finally", ":", "if", "packages", ":", "yield", "rdf_client", ".", "SoftwarePackages", "(", "packages", "=", "packages", ")" ]
37.869565
15.173913
def create_flavor(self, name): """Create a new baremetal flavor. :param name: the name of the flavor """ self.add_environment_file(user='stack', filename='stackrc') self.run('openstack flavor create --id auto --ram 4096 --disk 40 --vcpus 1 baremetal', user='stack', success_status=(0, 1)) self.run('openstack flavor set --property "cpu_arch"="x86_64" --property "capabilities:boot_option"="local" baremetal', user='stack') self.run('openstack flavor set --property "capabilities:profile"="baremetal" baremetal', user='stack')
[ "def", "create_flavor", "(", "self", ",", "name", ")", ":", "self", ".", "add_environment_file", "(", "user", "=", "'stack'", ",", "filename", "=", "'stackrc'", ")", "self", ".", "run", "(", "'openstack flavor create --id auto --ram 4096 --disk 40 --vcpus 1 baremetal'", ",", "user", "=", "'stack'", ",", "success_status", "=", "(", "0", ",", "1", ")", ")", "self", ".", "run", "(", "'openstack flavor set --property \"cpu_arch\"=\"x86_64\" --property \"capabilities:boot_option\"=\"local\" baremetal'", ",", "user", "=", "'stack'", ")", "self", ".", "run", "(", "'openstack flavor set --property \"capabilities:profile\"=\"baremetal\" baremetal'", ",", "user", "=", "'stack'", ")" ]
63.777778
38
def lstring_as_obj(true_or_false=None): """Toggles whether lstrings should be treated as strings or as objects. When FieldArrays is first loaded, the default is True. Parameters ---------- true_or_false : {None|bool} Pass True to map lstrings to objects; False otherwise. If None provided, just returns the current state. Return ------ current_stat : bool The current state of lstring_as_obj. Examples -------- >>> from pycbc.io import FieldArray >>> FieldArray.lstring_as_obj() True >>> FieldArray.FieldArray.from_arrays([numpy.zeros(10)], dtype=[('foo', 'lstring')]) FieldArray([(0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,), (0.0,)], dtype=[('foo', 'O')]) >>> FieldArray.lstring_as_obj(False) False >>> FieldArray.FieldArray.from_arrays([numpy.zeros(10)], dtype=[('foo', 'lstring')]) FieldArray([('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',), ('0.0',)], dtype=[('foo', 'S50')]) """ if true_or_false is not None: _default_types_status['lstring_as_obj'] = true_or_false # update the typeDict numpy.typeDict[u'lstring'] = numpy.object_ \ if _default_types_status['lstring_as_obj'] \ else 'S%i' % _default_types_status['default_strlen'] return _default_types_status['lstring_as_obj']
[ "def", "lstring_as_obj", "(", "true_or_false", "=", "None", ")", ":", "if", "true_or_false", "is", "not", "None", ":", "_default_types_status", "[", "'lstring_as_obj'", "]", "=", "true_or_false", "# update the typeDict", "numpy", ".", "typeDict", "[", "u'lstring'", "]", "=", "numpy", ".", "object_", "if", "_default_types_status", "[", "'lstring_as_obj'", "]", "else", "'S%i'", "%", "_default_types_status", "[", "'default_strlen'", "]", "return", "_default_types_status", "[", "'lstring_as_obj'", "]" ]
37.526316
19.736842
def fixed_poch(a, n): """Implementation of the Pochhammer symbol :math:`(a)_n` which handles negative integer arguments properly. Need conditional statement because scipy's impelementation of the Pochhammer symbol is wrong for negative integer arguments. This function uses the definition from http://functions.wolfram.com/GammaBetaErf/Pochhammer/02/ Parameters ---------- a : float The argument. n : nonnegative int The order. """ # Old form, calls gamma function: # if a < 0.0 and a % 1 == 0 and n <= -a: # p = (-1.0)**n * scipy.misc.factorial(-a) / scipy.misc.factorial(-a - n) # else: # p = scipy.special.poch(a, n) # return p if (int(n) != n) or (n < 0): raise ValueError("Parameter n must be a nonnegative int!") n = int(n) # Direct form based on product: terms = [a + k for k in range(0, n)] return scipy.prod(terms)
[ "def", "fixed_poch", "(", "a", ",", "n", ")", ":", "# Old form, calls gamma function:", "# if a < 0.0 and a % 1 == 0 and n <= -a:", "# p = (-1.0)**n * scipy.misc.factorial(-a) / scipy.misc.factorial(-a - n)", "# else:", "# p = scipy.special.poch(a, n)", "# return p", "if", "(", "int", "(", "n", ")", "!=", "n", ")", "or", "(", "n", "<", "0", ")", ":", "raise", "ValueError", "(", "\"Parameter n must be a nonnegative int!\"", ")", "n", "=", "int", "(", "n", ")", "# Direct form based on product:", "terms", "=", "[", "a", "+", "k", "for", "k", "in", "range", "(", "0", ",", "n", ")", "]", "return", "scipy", ".", "prod", "(", "terms", ")" ]
34.185185
19.444444
def _allocate_address(self, instance): """Allocates a free public ip address to the given instance :param instance: instance to assign address to :type instance: py:class:`boto.ec2.instance.Reservation` :return: public ip address """ connection = self._connect() free_addresses = [ ip for ip in connection.get_all_addresses() if not ip.instance_id] if not free_addresses: try: address = connection.allocate_address() except Exception as ex: log.error("Unable to allocate a public IP address to instance `%s`", instance.id) return None try: address = free_addresses.pop() instance.use_ip(address) return address.public_ip except Exception as ex: log.error("Unable to associate IP address %s to instance `%s`", address, instance.id) return None
[ "def", "_allocate_address", "(", "self", ",", "instance", ")", ":", "connection", "=", "self", ".", "_connect", "(", ")", "free_addresses", "=", "[", "ip", "for", "ip", "in", "connection", ".", "get_all_addresses", "(", ")", "if", "not", "ip", ".", "instance_id", "]", "if", "not", "free_addresses", ":", "try", ":", "address", "=", "connection", ".", "allocate_address", "(", ")", "except", "Exception", "as", "ex", ":", "log", ".", "error", "(", "\"Unable to allocate a public IP address to instance `%s`\"", ",", "instance", ".", "id", ")", "return", "None", "try", ":", "address", "=", "free_addresses", ".", "pop", "(", ")", "instance", ".", "use_ip", "(", "address", ")", "return", "address", ".", "public_ip", "except", "Exception", "as", "ex", ":", "log", ".", "error", "(", "\"Unable to associate IP address %s to instance `%s`\"", ",", "address", ",", "instance", ".", "id", ")", "return", "None" ]
37.692308
16.846154
def simple_lesk(context_sentence: str, ambiguous_word: str, pos: str = None, lemma=True, stem=False, hyperhypo=True, stop=True, context_is_lemmatized=False, nbest=False, keepscore=False, normalizescore=False, from_cache=True) -> "wn.Synset": """ Simple Lesk is somewhere in between using more than the original Lesk algorithm (1986) and using less signature words than adapted Lesk (Banerjee and Pederson, 2002) :param context_sentence: String, sentence or document. :param ambiguous_word: String, a single word. :param pos: String, one of 'a', 'r', 's', 'n', 'v', or None. :return: A Synset for the estimated best sense. """ # Ensure that ambiguous word is a lemma. ambiguous_word = lemmatize(ambiguous_word, pos=pos) # If ambiguous word not in WordNet return None if not wn.synsets(ambiguous_word): return None # Get the signatures for each synset. ss_sign = simple_signatures(ambiguous_word, pos, lemma, stem, hyperhypo, stop, from_cache=from_cache) # Disambiguate the sense in context. context_sentence = context_sentence.split() if context_is_lemmatized else lemmatize_sentence(context_sentence) return compare_overlaps(context_sentence, ss_sign, nbest=nbest, keepscore=keepscore, normalizescore=normalizescore)
[ "def", "simple_lesk", "(", "context_sentence", ":", "str", ",", "ambiguous_word", ":", "str", ",", "pos", ":", "str", "=", "None", ",", "lemma", "=", "True", ",", "stem", "=", "False", ",", "hyperhypo", "=", "True", ",", "stop", "=", "True", ",", "context_is_lemmatized", "=", "False", ",", "nbest", "=", "False", ",", "keepscore", "=", "False", ",", "normalizescore", "=", "False", ",", "from_cache", "=", "True", ")", "->", "\"wn.Synset\"", ":", "# Ensure that ambiguous word is a lemma.", "ambiguous_word", "=", "lemmatize", "(", "ambiguous_word", ",", "pos", "=", "pos", ")", "# If ambiguous word not in WordNet return None", "if", "not", "wn", ".", "synsets", "(", "ambiguous_word", ")", ":", "return", "None", "# Get the signatures for each synset.", "ss_sign", "=", "simple_signatures", "(", "ambiguous_word", ",", "pos", ",", "lemma", ",", "stem", ",", "hyperhypo", ",", "stop", ",", "from_cache", "=", "from_cache", ")", "# Disambiguate the sense in context.", "context_sentence", "=", "context_sentence", ".", "split", "(", ")", "if", "context_is_lemmatized", "else", "lemmatize_sentence", "(", "context_sentence", ")", "return", "compare_overlaps", "(", "context_sentence", ",", "ss_sign", ",", "nbest", "=", "nbest", ",", "keepscore", "=", "keepscore", ",", "normalizescore", "=", "normalizescore", ")" ]
51.666667
18.777778
def handle_options(): '''Handle options. ''' parser = OptionParser() parser.set_defaults(cmaglin=False) parser.set_defaults(single=False) parser.set_defaults(alpha_cov=False) parser.add_option('-x', '--xmin', dest='xmin', help='Minium X range', type='float', ) parser.add_option('-X', '--xmax', dest='xmax', help='Maximum X range', type='float', ) parser.add_option('-z', '--zmin', dest='zmin', help='Minium Z range', type='float', ) parser.add_option('-Z', '--zmax', dest='zmax', help='Maximum Z range', type='float', ) parser.add_option('-c', '--column', dest='column', help='column to plot of input file', type='int', default=2, ) parser.add_option('-u', '--unit', dest='xunit', help='Unit of length scale, typically meters (m) ' + 'or centimeters (cm)', metavar='UNIT', type='str', default='m', ) parser.add_option("--alpha_cov", action="store_true", dest="alpha_cov", help="use coverage for transparency", ) parser.add_option('--cbtiks', dest='cbtiks', help="Number of CB tiks", type=int, metavar="INT", default=3, ) parser.add_option("--cmaglin", action="store_true", dest="cmaglin", help="linear colorbar for magnitude", ) parser.add_option('--mag_vmin', dest='mag_vmin', help='Minium of colorbar', type='float', ) parser.add_option('--mag_vmax', dest='mag_vmax', help='Maximum of colorbar', type='float', ) parser.add_option('--pha_vmin', dest='pha_vmin', help='Minium of colorbar', type='float', ) parser.add_option('--pha_vmax', dest='pha_vmax', help='Maximum of colorbar', type='float', ) parser.add_option('--real_vmin', dest='real_vmin', help='Minium of colorbar', type='float', ) parser.add_option('--real_vmax', dest='real_vmax', help='Maximum of colorbar', type='float', ) parser.add_option('--imag_vmin', dest='imag_vmin', help='Minium of colorbar', type='float', ) parser.add_option('--imag_vmax', dest='imag_vmax', help='Maximum of colorbar', type='float', ) (options, args) = parser.parse_args() return options
[ "def", "handle_options", "(", ")", ":", "parser", "=", "OptionParser", "(", ")", "parser", ".", "set_defaults", "(", "cmaglin", "=", "False", ")", "parser", ".", "set_defaults", "(", "single", "=", "False", ")", "parser", ".", "set_defaults", "(", "alpha_cov", "=", "False", ")", "parser", ".", "add_option", "(", "'-x'", ",", "'--xmin'", ",", "dest", "=", "'xmin'", ",", "help", "=", "'Minium X range'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'-X'", ",", "'--xmax'", ",", "dest", "=", "'xmax'", ",", "help", "=", "'Maximum X range'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'-z'", ",", "'--zmin'", ",", "dest", "=", "'zmin'", ",", "help", "=", "'Minium Z range'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'-Z'", ",", "'--zmax'", ",", "dest", "=", "'zmax'", ",", "help", "=", "'Maximum Z range'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'-c'", ",", "'--column'", ",", "dest", "=", "'column'", ",", "help", "=", "'column to plot of input file'", ",", "type", "=", "'int'", ",", "default", "=", "2", ",", ")", "parser", ".", "add_option", "(", "'-u'", ",", "'--unit'", ",", "dest", "=", "'xunit'", ",", "help", "=", "'Unit of length scale, typically meters (m) '", "+", "'or centimeters (cm)'", ",", "metavar", "=", "'UNIT'", ",", "type", "=", "'str'", ",", "default", "=", "'m'", ",", ")", "parser", ".", "add_option", "(", "\"--alpha_cov\"", ",", "action", "=", "\"store_true\"", ",", "dest", "=", "\"alpha_cov\"", ",", "help", "=", "\"use coverage for transparency\"", ",", ")", "parser", ".", "add_option", "(", "'--cbtiks'", ",", "dest", "=", "'cbtiks'", ",", "help", "=", "\"Number of CB tiks\"", ",", "type", "=", "int", ",", "metavar", "=", "\"INT\"", ",", "default", "=", "3", ",", ")", "parser", ".", "add_option", "(", "\"--cmaglin\"", ",", "action", "=", "\"store_true\"", ",", "dest", "=", "\"cmaglin\"", ",", "help", "=", "\"linear colorbar for magnitude\"", ",", ")", "parser", ".", "add_option", "(", "'--mag_vmin'", ",", "dest", "=", "'mag_vmin'", ",", "help", "=", "'Minium of colorbar'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'--mag_vmax'", ",", "dest", "=", "'mag_vmax'", ",", "help", "=", "'Maximum of colorbar'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'--pha_vmin'", ",", "dest", "=", "'pha_vmin'", ",", "help", "=", "'Minium of colorbar'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'--pha_vmax'", ",", "dest", "=", "'pha_vmax'", ",", "help", "=", "'Maximum of colorbar'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'--real_vmin'", ",", "dest", "=", "'real_vmin'", ",", "help", "=", "'Minium of colorbar'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'--real_vmax'", ",", "dest", "=", "'real_vmax'", ",", "help", "=", "'Maximum of colorbar'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'--imag_vmin'", ",", "dest", "=", "'imag_vmin'", ",", "help", "=", "'Minium of colorbar'", ",", "type", "=", "'float'", ",", ")", "parser", ".", "add_option", "(", "'--imag_vmax'", ",", "dest", "=", "'imag_vmax'", ",", "help", "=", "'Maximum of colorbar'", ",", "type", "=", "'float'", ",", ")", "(", "options", ",", "args", ")", "=", "parser", ".", "parse_args", "(", ")", "return", "options" ]
34.439252
9.149533
def OauthGetAccessToken(self): """ Use token_verifier to obtain an access token for the user. If this function returns True, the clients __oauth_token__ member contains the access token. @return (boolean) - Boolean indicating whether OauthGetRequestToken was successful """ self.__setAuthenticationMethod__('authenticating_oauth') # obtain access token oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.__oauth_consumer__, \ token = self.__oauth_token__, \ callback = '', \ verifier = self.__oauth_token__.verifier, \ http_url = 'http://api.sense-os.nl/oauth/access_token') oauth_request.sign_request(oauth.OAuthSignatureMethod_HMAC_SHA1(), self.__oauth_consumer__, self.__oauth_token__) parameters = [] for key in oauth_request.parameters.iterkeys(): parameters.append((key, oauth_request.parameters[key])) parameters.sort() if self.__SenseApiCall__('/oauth/access_token', 'GET', parameters = parameters): response = urlparse.parse_qs(self.__response__) self.__oauth_token__ = oauth.OAuthToken(response['oauth_token'][0], response['oauth_token_secret'][0]) self.__setAuthenticationMethod__('oauth') return True else: self.__setAuthenticationMethod__('session_id') self.__error__ = "error getting access token" return False
[ "def", "OauthGetAccessToken", "(", "self", ")", ":", "self", ".", "__setAuthenticationMethod__", "(", "'authenticating_oauth'", ")", "# obtain access token\r", "oauth_request", "=", "oauth", ".", "OAuthRequest", ".", "from_consumer_and_token", "(", "self", ".", "__oauth_consumer__", ",", "token", "=", "self", ".", "__oauth_token__", ",", "callback", "=", "''", ",", "verifier", "=", "self", ".", "__oauth_token__", ".", "verifier", ",", "http_url", "=", "'http://api.sense-os.nl/oauth/access_token'", ")", "oauth_request", ".", "sign_request", "(", "oauth", ".", "OAuthSignatureMethod_HMAC_SHA1", "(", ")", ",", "self", ".", "__oauth_consumer__", ",", "self", ".", "__oauth_token__", ")", "parameters", "=", "[", "]", "for", "key", "in", "oauth_request", ".", "parameters", ".", "iterkeys", "(", ")", ":", "parameters", ".", "append", "(", "(", "key", ",", "oauth_request", ".", "parameters", "[", "key", "]", ")", ")", "parameters", ".", "sort", "(", ")", "if", "self", ".", "__SenseApiCall__", "(", "'/oauth/access_token'", ",", "'GET'", ",", "parameters", "=", "parameters", ")", ":", "response", "=", "urlparse", ".", "parse_qs", "(", "self", ".", "__response__", ")", "self", ".", "__oauth_token__", "=", "oauth", ".", "OAuthToken", "(", "response", "[", "'oauth_token'", "]", "[", "0", "]", ",", "response", "[", "'oauth_token_secret'", "]", "[", "0", "]", ")", "self", ".", "__setAuthenticationMethod__", "(", "'oauth'", ")", "return", "True", "else", ":", "self", ".", "__setAuthenticationMethod__", "(", "'session_id'", ")", "self", ".", "__error__", "=", "\"error getting access token\"", "return", "False" ]
54.71875
34.59375
def parse(cls, s, schema_only=False): """ Parse an ARFF File already loaded into a string. """ a = cls() a.state = 'comment' a.lineno = 1 for l in s.splitlines(): a.parseline(l) a.lineno += 1 if schema_only and a.state == 'data': # Don't parse data if we're only loading the schema. break return a
[ "def", "parse", "(", "cls", ",", "s", ",", "schema_only", "=", "False", ")", ":", "a", "=", "cls", "(", ")", "a", ".", "state", "=", "'comment'", "a", ".", "lineno", "=", "1", "for", "l", "in", "s", ".", "splitlines", "(", ")", ":", "a", ".", "parseline", "(", "l", ")", "a", ".", "lineno", "+=", "1", "if", "schema_only", "and", "a", ".", "state", "==", "'data'", ":", "# Don't parse data if we're only loading the schema.", "break", "return", "a" ]
29.714286
13.714286
def hacking_no_old_style_class(logical_line, noqa): r"""Check for old style classes. Examples: Okay: class Foo(object):\n pass Okay: class Foo(Bar, Baz):\n pass Okay: class Foo(object, Baz):\n pass Okay: class Foo(somefunc()):\n pass H238: class Bar:\n pass H238: class Bar():\n pass """ if noqa: return line = core.import_normalize(logical_line.strip()) if line.startswith("class ") and not RE_NEW_STYLE_CLASS.match(line): yield (0, "H238: old style class declaration, " "use new style (inherit from `object`)")
[ "def", "hacking_no_old_style_class", "(", "logical_line", ",", "noqa", ")", ":", "if", "noqa", ":", "return", "line", "=", "core", ".", "import_normalize", "(", "logical_line", ".", "strip", "(", ")", ")", "if", "line", ".", "startswith", "(", "\"class \"", ")", "and", "not", "RE_NEW_STYLE_CLASS", ".", "match", "(", "line", ")", ":", "yield", "(", "0", ",", "\"H238: old style class declaration, \"", "\"use new style (inherit from `object`)\"", ")" ]
35.411765
14.176471
def stop_refreshing_token(self): """ The timer needs to be canceled if the application is terminating, if not the timer will keep going. """ with self.lock: self.timer_stopped = True self.timer.cancel()
[ "def", "stop_refreshing_token", "(", "self", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "timer_stopped", "=", "True", "self", ".", "timer", ".", "cancel", "(", ")" ]
36
14.857143
def asyncPipeSubstr(context=None, _INPUT=None, conf=None, **kwargs): """A string module that asynchronously returns a substring. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : twisted Deferred iterable of items or strings conf : { 'from': {'type': 'number', value': <starting position>}, 'length': {'type': 'number', 'value': <count of characters to return>} } returns ------- _OUTPUT : twisted.internet.defer.Deferred generator of substrings """ conf['start'] = conf.pop('from', dict.get(conf, 'start')) splits = yield asyncGetSplits(_INPUT, conf, **cdicts(opts, kwargs)) parsed = yield asyncDispatch(splits, *get_async_dispatch_funcs()) _OUTPUT = yield asyncStarMap(partial(maybeDeferred, parse_result), parsed) returnValue(iter(_OUTPUT))
[ "def", "asyncPipeSubstr", "(", "context", "=", "None", ",", "_INPUT", "=", "None", ",", "conf", "=", "None", ",", "*", "*", "kwargs", ")", ":", "conf", "[", "'start'", "]", "=", "conf", ".", "pop", "(", "'from'", ",", "dict", ".", "get", "(", "conf", ",", "'start'", ")", ")", "splits", "=", "yield", "asyncGetSplits", "(", "_INPUT", ",", "conf", ",", "*", "*", "cdicts", "(", "opts", ",", "kwargs", ")", ")", "parsed", "=", "yield", "asyncDispatch", "(", "splits", ",", "*", "get_async_dispatch_funcs", "(", ")", ")", "_OUTPUT", "=", "yield", "asyncStarMap", "(", "partial", "(", "maybeDeferred", ",", "parse_result", ")", ",", "parsed", ")", "returnValue", "(", "iter", "(", "_OUTPUT", ")", ")" ]
39.47619
24.904762
def host(value): """ Validates that the value is a valid network location """ if not value: return (True, "") try: host,port = value.split(":") except ValueError as _: return (False, "value needs to be <host>:<port>") try: int(port) except ValueError as _: return (False, "port component of the host address needs to be a number") return (True, "")
[ "def", "host", "(", "value", ")", ":", "if", "not", "value", ":", "return", "(", "True", ",", "\"\"", ")", "try", ":", "host", ",", "port", "=", "value", ".", "split", "(", "\":\"", ")", "except", "ValueError", "as", "_", ":", "return", "(", "False", ",", "\"value needs to be <host>:<port>\"", ")", "try", ":", "int", "(", "port", ")", "except", "ValueError", "as", "_", ":", "return", "(", "False", ",", "\"port component of the host address needs to be a number\"", ")", "return", "(", "True", ",", "\"\"", ")" ]
26.933333
22.4
def GetRowMatch(self, attributes): """Returns the row number that matches the supplied attributes.""" for row in self.compiled: try: for key in attributes: # Silently skip attributes not present in the index file. # pylint: disable=E1103 if ( key in row.header and row[key] and not row[key].match(attributes[key]) ): # This line does not match, so break and try next row. raise StopIteration() return row.row except StopIteration: pass return 0
[ "def", "GetRowMatch", "(", "self", ",", "attributes", ")", ":", "for", "row", "in", "self", ".", "compiled", ":", "try", ":", "for", "key", "in", "attributes", ":", "# Silently skip attributes not present in the index file.", "# pylint: disable=E1103", "if", "(", "key", "in", "row", ".", "header", "and", "row", "[", "key", "]", "and", "not", "row", "[", "key", "]", ".", "match", "(", "attributes", "[", "key", "]", ")", ")", ":", "# This line does not match, so break and try next row.", "raise", "StopIteration", "(", ")", "return", "row", ".", "row", "except", "StopIteration", ":", "pass", "return", "0" ]
40.166667
13.611111
def to_dict(obj): """ If value wasn't isn't a primitive scalar or collection then it needs to either implement to_dict (instances of Serializable) or has member data matching each required arg of __init__. """ if isinstance(obj, dict): return obj elif hasattr(obj, "to_dict"): return obj.to_dict() try: return simple_object_to_dict(obj) except: raise ValueError( "Cannot convert %s : %s to dictionary" % ( obj, type(obj)))
[ "def", "to_dict", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "return", "obj", "elif", "hasattr", "(", "obj", ",", "\"to_dict\"", ")", ":", "return", "obj", ".", "to_dict", "(", ")", "try", ":", "return", "simple_object_to_dict", "(", "obj", ")", "except", ":", "raise", "ValueError", "(", "\"Cannot convert %s : %s to dictionary\"", "%", "(", "obj", ",", "type", "(", "obj", ")", ")", ")" ]
31.4375
15.4375
def color( self, text=None, fore=None, back=None, style=None, no_closing=False): """ A method that colorizes strings, not Colr objects. Raises InvalidColr for invalid color names. The 'reset_all' code is appended if text is given. """ has_args = ( (fore is not None) or (back is not None) or (style is not None) ) if hasattr(text, '__colr__') and not has_args: # Use custom __colr__ method in the absence of arguments. return str(self._call_dunder_colr(text)) # Stringify everything before operating on it. text = str(text) if text is not None else '' if _disabled: return text # Considered to have unclosed codes if embedded codes exist and # the last code was not a color code. embedded_codes = get_codes(text) has_end_code = embedded_codes and embedded_codes[-1] == closing_code # Add closing code if not already added, there is text, and # some kind of color/style was used (whether from args, or # color codes were included in the text already). # If the last code embedded in the text was a closing code, # then it is not added. # This can be overriden with `no_closing`. needs_closing = ( text and (not no_closing) and (not has_end_code) and (has_args or embedded_codes) ) if needs_closing: end = closing_code else: end = '' return ''.join(( self.color_code(fore=fore, back=back, style=style), text, end, ))
[ "def", "color", "(", "self", ",", "text", "=", "None", ",", "fore", "=", "None", ",", "back", "=", "None", ",", "style", "=", "None", ",", "no_closing", "=", "False", ")", ":", "has_args", "=", "(", "(", "fore", "is", "not", "None", ")", "or", "(", "back", "is", "not", "None", ")", "or", "(", "style", "is", "not", "None", ")", ")", "if", "hasattr", "(", "text", ",", "'__colr__'", ")", "and", "not", "has_args", ":", "# Use custom __colr__ method in the absence of arguments.", "return", "str", "(", "self", ".", "_call_dunder_colr", "(", "text", ")", ")", "# Stringify everything before operating on it.", "text", "=", "str", "(", "text", ")", "if", "text", "is", "not", "None", "else", "''", "if", "_disabled", ":", "return", "text", "# Considered to have unclosed codes if embedded codes exist and", "# the last code was not a color code.", "embedded_codes", "=", "get_codes", "(", "text", ")", "has_end_code", "=", "embedded_codes", "and", "embedded_codes", "[", "-", "1", "]", "==", "closing_code", "# Add closing code if not already added, there is text, and", "# some kind of color/style was used (whether from args, or", "# color codes were included in the text already).", "# If the last code embedded in the text was a closing code,", "# then it is not added.", "# This can be overriden with `no_closing`.", "needs_closing", "=", "(", "text", "and", "(", "not", "no_closing", ")", "and", "(", "not", "has_end_code", ")", "and", "(", "has_args", "or", "embedded_codes", ")", ")", "if", "needs_closing", ":", "end", "=", "closing_code", "else", ":", "end", "=", "''", "return", "''", ".", "join", "(", "(", "self", ".", "color_code", "(", "fore", "=", "fore", ",", "back", "=", "back", ",", "style", "=", "style", ")", ",", "text", ",", "end", ",", ")", ")" ]
36.76087
17.956522
def dump(data, abspath, pk_protocol=py23.pk_protocol, overwrite=False, enable_verbose=True): """Dump picklable object to file. Provides multiple choice to customize the behavior. :param data: picklable python object. :type data: dict or list :param abspath: ``save as`` path, file extension has to be ``.pickle`` or ``.gz`` (for compressed Pickle) :type abspath: string :param pk_protocol: default = your python version, use 2, to make a py2.x/3.x compatible pickle file. But 3 is faster. :type pk_protocol: int :param overwrite: default ``False``, If ``True``, when you dump to existing file, it silently overwrite it. If ``False``, an alert message is shown. Default setting ``False`` is to prevent overwrite file by mistake. :type overwrite: boolean :param enable_verbose: default True, help-message-display trigger. :type enable_verbose: boolean Usage:: >>> from dataIO import pk >>> data = {"a": 1, "b": 2} >>> dump(data, "test.pickle", overwrite=True) Dump to `test.pickle` ... Complete! Elapse 0.002432 sec **中文文档** 将Python中可被序列化的"字典", "列表"以及他们的组合, 按照Json的编码方式写入文件 文件 参数列表 :param data: 可Pickle化的Python对象 :type data: ``字典`` 或 ``列表`` :param abspath: Pickle文件绝对路径, 扩展名需为 ``.pickle`` 或 ``.gz``, 其中 ``.gz`` 是被压缩后的Pickle文件 :type abspath: ``字符串`` :param pk_protocol: 默认值为你的Python大版本号, 使用2可以使得Python2/3都能 兼容你的Pickle文件。不过Python3的速度更快。 :type pk_protocol: int :param overwrite: 默认 ``False``, 当为``True``时, 如果写入路径已经存在, 则会 自动覆盖原文件。而为``False``时, 则会打印警告文件, 防止误操作覆盖源文件。 :type overwrite: "布尔值" :param enable_verbose: 默认 ``True``, 信息提示的开关, 批处理时建议关闭 :type enable_verbose: ``布尔值`` """ prt("\nDump to '%s' ..." % abspath, enable_verbose) abspath = lower_ext(str(abspath)) is_pickle = is_pickle_file(abspath) if os.path.exists(abspath): if not overwrite: # 存在, 并且overwrite=False prt(" Stop! File exists and overwrite is not allowed", enable_verbose) return st = time.clock() content = pickle.dumps(data, pk_protocol) if is_pickle: textfile.writebytes(content, abspath) else: compress.write_gzip(content, abspath) prt(" Complete! Elapse %.6f sec." % (time.clock() - st), enable_verbose)
[ "def", "dump", "(", "data", ",", "abspath", ",", "pk_protocol", "=", "py23", ".", "pk_protocol", ",", "overwrite", "=", "False", ",", "enable_verbose", "=", "True", ")", ":", "prt", "(", "\"\\nDump to '%s' ...\"", "%", "abspath", ",", "enable_verbose", ")", "abspath", "=", "lower_ext", "(", "str", "(", "abspath", ")", ")", "is_pickle", "=", "is_pickle_file", "(", "abspath", ")", "if", "os", ".", "path", ".", "exists", "(", "abspath", ")", ":", "if", "not", "overwrite", ":", "# 存在, 并且overwrite=False", "prt", "(", "\" Stop! File exists and overwrite is not allowed\"", ",", "enable_verbose", ")", "return", "st", "=", "time", ".", "clock", "(", ")", "content", "=", "pickle", ".", "dumps", "(", "data", ",", "pk_protocol", ")", "if", "is_pickle", ":", "textfile", ".", "writebytes", "(", "content", ",", "abspath", ")", "else", ":", "compress", ".", "write_gzip", "(", "content", ",", "abspath", ")", "prt", "(", "\" Complete! Elapse %.6f sec.\"", "%", "(", "time", ".", "clock", "(", ")", "-", "st", ")", ",", "enable_verbose", ")" ]
30.723684
22.197368
def estimate_from_ssr(histograms, readout_povm, pre_channel_ops, post_channel_ops, settings): """ Estimate a quantum process from single shot histograms obtained by preparing specific input states and measuring bitstrings in the Z-eigenbasis after application of given channel operators. :param numpy.ndarray histograms: The single shot histograms. :param DiagonalPOVM readout_povm: The POVM corresponding to readout plus classifier. :param list pre_channel_ops: The input state preparation channels as `qutip.Qobj`'s. :param list post_channel_ops: The tomography post-process channels as `qutip.Qobj`'s. :param TomographySettings settings: The solver and estimation settings. :return: The ProcessTomography object and results from the the given data. :rtype: ProcessTomography """ nqc = len(pre_channel_ops[0].dims[0]) pauli_basis = grove.tomography.operator_utils.PAULI_BASIS ** nqc pi_basis = readout_povm.pi_basis if not histograms.shape[-1] == pi_basis.dim: # pragma no coverage raise ValueError("Currently tomography is only implemented for two-level systems") rho0 = grove.tomography.operator_utils.n_qubit_ground_state(nqc) n_lkj = np.asarray(histograms) b_jkl_mn = _prepare_b_jkl_mn(readout_povm, pauli_basis, pre_channel_ops, post_channel_ops, rho0) r_mn = cvxpy.Variable(pauli_basis.dim ** 2) p_jkl = b_jkl_mn.real * r_mn obj = -np.matrix(n_lkj.ravel()) * cvxpy.log(p_jkl) # cvxpy has col-major order and we collapse k and l onto single dimension p_jkl_mat = cvxpy.reshape(p_jkl, pi_basis.dim, len(pre_channel_ops) * len(post_channel_ops)) # Default constraints: # MLE must describe valid probability distribution # i.e., for each k and l, p_jkl must sum to one and be element-wise non-negative: # 1. \sum_j p_jkl == 1 for all k, l # 2. p_jkl >= 0 for all j, k, l # where p_jkl = \sum_m b_jkl_mn r_mn constraints = [p_jkl >= 0, np.matrix(np.ones((1, pi_basis.dim))) * p_jkl_mat == 1] r_mn_mat = cvxpy.reshape(r_mn, pauli_basis.dim, pauli_basis.dim) super_pauli_basis = pauli_basis.super_basis() choi_real_imag = sum((r_mn_mat[jj, kk] * o_ut.to_realimag( super_pauli_basis.ops[jj + kk * pauli_basis.dim]) for jj in range(pauli_basis.dim) for kk in range(pauli_basis.dim)), 0) if COMPLETELY_POSITIVE in settings.constraints: if tomography._SDP_SOLVER.is_functional(): constraints.append(choi_real_imag >> 0) else: # pragma no coverage _log.warning("No convex solver capable of semi-definite problems installed.\n" "Dropping the complete positivity constraint on the process") if TRACE_PRESERVING in settings.constraints: constraints.append(r_mn_mat[0, 0] == 1) constraints.append(r_mn_mat[0, 1:] == 0) prob = cvxpy.Problem(cvxpy.Minimize(obj), constraints) _ = prob.solve(solver=tomography.SOLVER, **settings.solver_kwargs) r_mn_est = r_mn.value.reshape((pauli_basis.dim, pauli_basis.dim)).transpose() return ProcessTomography(r_mn_est, pauli_basis, settings)
[ "def", "estimate_from_ssr", "(", "histograms", ",", "readout_povm", ",", "pre_channel_ops", ",", "post_channel_ops", ",", "settings", ")", ":", "nqc", "=", "len", "(", "pre_channel_ops", "[", "0", "]", ".", "dims", "[", "0", "]", ")", "pauli_basis", "=", "grove", ".", "tomography", ".", "operator_utils", ".", "PAULI_BASIS", "**", "nqc", "pi_basis", "=", "readout_povm", ".", "pi_basis", "if", "not", "histograms", ".", "shape", "[", "-", "1", "]", "==", "pi_basis", ".", "dim", ":", "# pragma no coverage", "raise", "ValueError", "(", "\"Currently tomography is only implemented for two-level systems\"", ")", "rho0", "=", "grove", ".", "tomography", ".", "operator_utils", ".", "n_qubit_ground_state", "(", "nqc", ")", "n_lkj", "=", "np", ".", "asarray", "(", "histograms", ")", "b_jkl_mn", "=", "_prepare_b_jkl_mn", "(", "readout_povm", ",", "pauli_basis", ",", "pre_channel_ops", ",", "post_channel_ops", ",", "rho0", ")", "r_mn", "=", "cvxpy", ".", "Variable", "(", "pauli_basis", ".", "dim", "**", "2", ")", "p_jkl", "=", "b_jkl_mn", ".", "real", "*", "r_mn", "obj", "=", "-", "np", ".", "matrix", "(", "n_lkj", ".", "ravel", "(", ")", ")", "*", "cvxpy", ".", "log", "(", "p_jkl", ")", "# cvxpy has col-major order and we collapse k and l onto single dimension", "p_jkl_mat", "=", "cvxpy", ".", "reshape", "(", "p_jkl", ",", "pi_basis", ".", "dim", ",", "len", "(", "pre_channel_ops", ")", "*", "len", "(", "post_channel_ops", ")", ")", "# Default constraints:", "# MLE must describe valid probability distribution", "# i.e., for each k and l, p_jkl must sum to one and be element-wise non-negative:", "# 1. \\sum_j p_jkl == 1 for all k, l", "# 2. p_jkl >= 0 for all j, k, l", "# where p_jkl = \\sum_m b_jkl_mn r_mn", "constraints", "=", "[", "p_jkl", ">=", "0", ",", "np", ".", "matrix", "(", "np", ".", "ones", "(", "(", "1", ",", "pi_basis", ".", "dim", ")", ")", ")", "*", "p_jkl_mat", "==", "1", "]", "r_mn_mat", "=", "cvxpy", ".", "reshape", "(", "r_mn", ",", "pauli_basis", ".", "dim", ",", "pauli_basis", ".", "dim", ")", "super_pauli_basis", "=", "pauli_basis", ".", "super_basis", "(", ")", "choi_real_imag", "=", "sum", "(", "(", "r_mn_mat", "[", "jj", ",", "kk", "]", "*", "o_ut", ".", "to_realimag", "(", "super_pauli_basis", ".", "ops", "[", "jj", "+", "kk", "*", "pauli_basis", ".", "dim", "]", ")", "for", "jj", "in", "range", "(", "pauli_basis", ".", "dim", ")", "for", "kk", "in", "range", "(", "pauli_basis", ".", "dim", ")", ")", ",", "0", ")", "if", "COMPLETELY_POSITIVE", "in", "settings", ".", "constraints", ":", "if", "tomography", ".", "_SDP_SOLVER", ".", "is_functional", "(", ")", ":", "constraints", ".", "append", "(", "choi_real_imag", ">>", "0", ")", "else", ":", "# pragma no coverage", "_log", ".", "warning", "(", "\"No convex solver capable of semi-definite problems installed.\\n\"", "\"Dropping the complete positivity constraint on the process\"", ")", "if", "TRACE_PRESERVING", "in", "settings", ".", "constraints", ":", "constraints", ".", "append", "(", "r_mn_mat", "[", "0", ",", "0", "]", "==", "1", ")", "constraints", ".", "append", "(", "r_mn_mat", "[", "0", ",", "1", ":", "]", "==", "0", ")", "prob", "=", "cvxpy", ".", "Problem", "(", "cvxpy", ".", "Minimize", "(", "obj", ")", ",", "constraints", ")", "_", "=", "prob", ".", "solve", "(", "solver", "=", "tomography", ".", "SOLVER", ",", "*", "*", "settings", ".", "solver_kwargs", ")", "r_mn_est", "=", "r_mn", ".", "value", ".", "reshape", "(", "(", "pauli_basis", ".", "dim", ",", "pauli_basis", ".", "dim", ")", ")", ".", "transpose", "(", ")", "return", "ProcessTomography", "(", "r_mn_est", ",", "pauli_basis", ",", "settings", ")" ]
51.5
28.5
def websocket(self, uri, *args, **kwargs): """Create a websocket route from a decorated function :param uri: endpoint at which the socket endpoint will be accessible. :type uri: str :param args: captures all of the positional arguments passed in :type args: tuple(Any) :param kwargs: captures the keyword arguments passed in :type kwargs: dict(Any) :return: The exception function to use as the decorator :rtype: fn """ kwargs.setdefault('host', None) kwargs.setdefault('strict_slashes', None) kwargs.setdefault('subprotocols', None) kwargs.setdefault('name', None) kwargs['with_context'] = True # This is the whole point of this plugin def wrapper(handler_f): nonlocal self, uri, args, kwargs return super(Contextualize, self).websocket( uri, *args, **kwargs)(handler_f) return wrapper
[ "def", "websocket", "(", "self", ",", "uri", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "kwargs", ".", "setdefault", "(", "'host'", ",", "None", ")", "kwargs", ".", "setdefault", "(", "'strict_slashes'", ",", "None", ")", "kwargs", ".", "setdefault", "(", "'subprotocols'", ",", "None", ")", "kwargs", ".", "setdefault", "(", "'name'", ",", "None", ")", "kwargs", "[", "'with_context'", "]", "=", "True", "# This is the whole point of this plugin", "def", "wrapper", "(", "handler_f", ")", ":", "nonlocal", "self", ",", "uri", ",", "args", ",", "kwargs", "return", "super", "(", "Contextualize", ",", "self", ")", ".", "websocket", "(", "uri", ",", "*", "args", ",", "*", "*", "kwargs", ")", "(", "handler_f", ")", "return", "wrapper" ]
41
15.956522
def get_annotation(self, id_): """Data for a specific annotation.""" endpoint = "annotations/{id}".format(id=id_) return self._make_request(endpoint)
[ "def", "get_annotation", "(", "self", ",", "id_", ")", ":", "endpoint", "=", "\"annotations/{id}\"", ".", "format", "(", "id", "=", "id_", ")", "return", "self", ".", "_make_request", "(", "endpoint", ")" ]
42.5
6.25
def channel(self, channel_id=None, synchronous=False): """ Fetch a Channel object identified by the numeric channel_id, or create that object if it doesn't already exist. If channel_id is not None but no channel exists for that id, will raise InvalidChannel. If there are already too many channels open, will raise TooManyChannels. If synchronous=True, then the channel will act synchronous in all cases where a protocol method supports `nowait=False`, or where there is an implied callback in the protocol. """ if channel_id is None: # adjust for channel 0 if len(self._channels) - 1 >= self._channel_max: raise Connection.TooManyChannels( "%d channels already open, max %d", len(self._channels) - 1, self._channel_max) channel_id = self._next_channel_id() while channel_id in self._channels: channel_id = self._next_channel_id() elif channel_id in self._channels: return self._channels[channel_id] else: raise Connection.InvalidChannel( "%s is not a valid channel id", channel_id) # Call open() here so that ConnectionChannel doesn't have it called. # Could also solve this other ways, but it's a HACK regardless. rval = Channel( self, channel_id, self._class_map, synchronous=synchronous) self._channels[channel_id] = rval rval.add_close_listener(self._channel_closed) rval.open() return rval
[ "def", "channel", "(", "self", ",", "channel_id", "=", "None", ",", "synchronous", "=", "False", ")", ":", "if", "channel_id", "is", "None", ":", "# adjust for channel 0", "if", "len", "(", "self", ".", "_channels", ")", "-", "1", ">=", "self", ".", "_channel_max", ":", "raise", "Connection", ".", "TooManyChannels", "(", "\"%d channels already open, max %d\"", ",", "len", "(", "self", ".", "_channels", ")", "-", "1", ",", "self", ".", "_channel_max", ")", "channel_id", "=", "self", ".", "_next_channel_id", "(", ")", "while", "channel_id", "in", "self", ".", "_channels", ":", "channel_id", "=", "self", ".", "_next_channel_id", "(", ")", "elif", "channel_id", "in", "self", ".", "_channels", ":", "return", "self", ".", "_channels", "[", "channel_id", "]", "else", ":", "raise", "Connection", ".", "InvalidChannel", "(", "\"%s is not a valid channel id\"", ",", "channel_id", ")", "# Call open() here so that ConnectionChannel doesn't have it called.", "# Could also solve this other ways, but it's a HACK regardless.", "rval", "=", "Channel", "(", "self", ",", "channel_id", ",", "self", ".", "_class_map", ",", "synchronous", "=", "synchronous", ")", "self", ".", "_channels", "[", "channel_id", "]", "=", "rval", "rval", ".", "add_close_listener", "(", "self", ".", "_channel_closed", ")", "rval", ".", "open", "(", ")", "return", "rval" ]
45.971429
18.142857
def _organize_qc_files(program, qc_dir): """Organize outputs from quality control runs into a base file and secondary outputs. Provides compatibility with CWL output. Returns None if no files created during processing. """ base_files = {"fastqc": "fastqc_report.html", "qualimap_rnaseq": "qualimapReport.html", "qualimap": "qualimapReport.html"} if os.path.exists(qc_dir): out_files = [] for fname in [os.path.join(qc_dir, x) for x in os.listdir(qc_dir)]: if os.path.isfile(fname) and not fname.endswith(".bcbiotmp"): out_files.append(fname) elif os.path.isdir(fname) and not fname.endswith("tx"): for root, dirs, files in os.walk(fname): for f in files: if not f.endswith(".bcbiotmp"): out_files.append(os.path.join(root, f)) if len(out_files) > 0 and all([not f.endswith("-failed.log") for f in out_files]): if len(out_files) == 1: base = out_files[0] secondary = [] else: base = None if program in base_files: base_choices = [x for x in out_files if x.endswith("/%s" % base_files[program])] if len(base_choices) == 1: base = base_choices[0] if not base: base = out_files[0] secondary = [x for x in out_files if x != base] return {"base": base, "secondary": secondary}
[ "def", "_organize_qc_files", "(", "program", ",", "qc_dir", ")", ":", "base_files", "=", "{", "\"fastqc\"", ":", "\"fastqc_report.html\"", ",", "\"qualimap_rnaseq\"", ":", "\"qualimapReport.html\"", ",", "\"qualimap\"", ":", "\"qualimapReport.html\"", "}", "if", "os", ".", "path", ".", "exists", "(", "qc_dir", ")", ":", "out_files", "=", "[", "]", "for", "fname", "in", "[", "os", ".", "path", ".", "join", "(", "qc_dir", ",", "x", ")", "for", "x", "in", "os", ".", "listdir", "(", "qc_dir", ")", "]", ":", "if", "os", ".", "path", ".", "isfile", "(", "fname", ")", "and", "not", "fname", ".", "endswith", "(", "\".bcbiotmp\"", ")", ":", "out_files", ".", "append", "(", "fname", ")", "elif", "os", ".", "path", ".", "isdir", "(", "fname", ")", "and", "not", "fname", ".", "endswith", "(", "\"tx\"", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "fname", ")", ":", "for", "f", "in", "files", ":", "if", "not", "f", ".", "endswith", "(", "\".bcbiotmp\"", ")", ":", "out_files", ".", "append", "(", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", ")", "if", "len", "(", "out_files", ")", ">", "0", "and", "all", "(", "[", "not", "f", ".", "endswith", "(", "\"-failed.log\"", ")", "for", "f", "in", "out_files", "]", ")", ":", "if", "len", "(", "out_files", ")", "==", "1", ":", "base", "=", "out_files", "[", "0", "]", "secondary", "=", "[", "]", "else", ":", "base", "=", "None", "if", "program", "in", "base_files", ":", "base_choices", "=", "[", "x", "for", "x", "in", "out_files", "if", "x", ".", "endswith", "(", "\"/%s\"", "%", "base_files", "[", "program", "]", ")", "]", "if", "len", "(", "base_choices", ")", "==", "1", ":", "base", "=", "base_choices", "[", "0", "]", "if", "not", "base", ":", "base", "=", "out_files", "[", "0", "]", "secondary", "=", "[", "x", "for", "x", "in", "out_files", "if", "x", "!=", "base", "]", "return", "{", "\"base\"", ":", "base", ",", "\"secondary\"", ":", "secondary", "}" ]
48.875
17.3125
def raise_error(self, message, *params, **key_params): """ Raise a parse error. """ s = 'Parser error in ' self.xml_node_stack.reverse() if len(self.xml_node_stack) > 1: node = self.xml_node_stack[0] s += '<{0}'.format(node.tag) if 'name' in node.lattrib: s += ' name=\"{0}\"'.format(node.lattrib['name']) if 'id' in node.lattrib: s += ' id=\"{0}\"'.format(node.lattrib['id']) s += '>' for node in self.xml_node_stack[1:]: s += '.<{0}'.format(node.tag) if 'name' in node.lattrib: s += ' name=\"{0}\"'.format(node.lattrib['name']) if 'id' in node.lattrib: s += ' id=\"{0}\"'.format(node.lattrib['id']) s += '>' s += ':\n ' + message raise ParseError(s, *params, **key_params) self.xml_node_stack.reverse()
[ "def", "raise_error", "(", "self", ",", "message", ",", "*", "params", ",", "*", "*", "key_params", ")", ":", "s", "=", "'Parser error in '", "self", ".", "xml_node_stack", ".", "reverse", "(", ")", "if", "len", "(", "self", ".", "xml_node_stack", ")", ">", "1", ":", "node", "=", "self", ".", "xml_node_stack", "[", "0", "]", "s", "+=", "'<{0}'", ".", "format", "(", "node", ".", "tag", ")", "if", "'name'", "in", "node", ".", "lattrib", ":", "s", "+=", "' name=\\\"{0}\\\"'", ".", "format", "(", "node", ".", "lattrib", "[", "'name'", "]", ")", "if", "'id'", "in", "node", ".", "lattrib", ":", "s", "+=", "' id=\\\"{0}\\\"'", ".", "format", "(", "node", ".", "lattrib", "[", "'id'", "]", ")", "s", "+=", "'>'", "for", "node", "in", "self", ".", "xml_node_stack", "[", "1", ":", "]", ":", "s", "+=", "'.<{0}'", ".", "format", "(", "node", ".", "tag", ")", "if", "'name'", "in", "node", ".", "lattrib", ":", "s", "+=", "' name=\\\"{0}\\\"'", ".", "format", "(", "node", ".", "lattrib", "[", "'name'", "]", ")", "if", "'id'", "in", "node", ".", "lattrib", ":", "s", "+=", "' id=\\\"{0}\\\"'", ".", "format", "(", "node", ".", "lattrib", "[", "'id'", "]", ")", "s", "+=", "'>'", "s", "+=", "':\\n '", "+", "message", "raise", "ParseError", "(", "s", ",", "*", "params", ",", "*", "*", "key_params", ")", "self", ".", "xml_node_stack", ".", "reverse", "(", ")" ]
31.4
14.8
def get_device_elements(self): """Get the DOM elements for the device list.""" plain = self._aha_request('getdevicelistinfos') dom = xml.dom.minidom.parseString(plain) _LOGGER.debug(dom) return dom.getElementsByTagName("device")
[ "def", "get_device_elements", "(", "self", ")", ":", "plain", "=", "self", ".", "_aha_request", "(", "'getdevicelistinfos'", ")", "dom", "=", "xml", ".", "dom", ".", "minidom", ".", "parseString", "(", "plain", ")", "_LOGGER", ".", "debug", "(", "dom", ")", "return", "dom", ".", "getElementsByTagName", "(", "\"device\"", ")" ]
43.833333
9.333333
def distance(vec1, vec2): """Calculate the distance between two Vectors""" if isinstance(vec1, Vector2) \ and isinstance(vec2, Vector2): dist_vec = vec2 - vec1 return dist_vec.length() else: raise TypeError("vec1 and vec2 must be Vector2's")
[ "def", "distance", "(", "vec1", ",", "vec2", ")", ":", "if", "isinstance", "(", "vec1", ",", "Vector2", ")", "and", "isinstance", "(", "vec2", ",", "Vector2", ")", ":", "dist_vec", "=", "vec2", "-", "vec1", "return", "dist_vec", ".", "length", "(", ")", "else", ":", "raise", "TypeError", "(", "\"vec1 and vec2 must be Vector2's\"", ")" ]
38.75
10.25
def edit(self, id_equip, nome, id_tipo_equipamento, id_modelo, maintenance=None): """Change Equipment from by the identifier. :param id_equip: Identifier of the Equipment. Integer value and greater than zero. :param nome: Equipment name. String with a minimum 3 and maximum of 30 characters :param id_tipo_equipamento: Identifier of the Equipment Type. Integer value and greater than zero. :param id_modelo: Identifier of the Model. Integer value and greater than zero. :return: None :raise InvalidParameterError: The identifier of Equipment, model, equipment type or name is null and invalid. :raise EquipamentoNaoExisteError: Equipment not registered. :raise TipoEquipamentoNaoExisteError: Equipment Type not registered. :raise ModeloEquipamentoNaoExisteError: Model not registered. :raise GrupoEquipamentoNaoExisteError: Group not registered. :raise EquipamentoError: Equipamento com o nome duplicado ou Equipamento do grupo “Equipamentos Orquestração” somente poderá ser criado com tipo igual a “Servidor Virtual". :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response """ equip_map = dict() equip_map['id_equip'] = id_equip equip_map['id_tipo_equipamento'] = id_tipo_equipamento equip_map['id_modelo'] = id_modelo equip_map['nome'] = nome if maintenance is not None: equip_map['maintenance'] = maintenance url = 'equipamento/edit/' + str(id_equip) + '/' code, xml = self.submit({'equipamento': equip_map}, 'POST', url) return self.response(code, xml)
[ "def", "edit", "(", "self", ",", "id_equip", ",", "nome", ",", "id_tipo_equipamento", ",", "id_modelo", ",", "maintenance", "=", "None", ")", ":", "equip_map", "=", "dict", "(", ")", "equip_map", "[", "'id_equip'", "]", "=", "id_equip", "equip_map", "[", "'id_tipo_equipamento'", "]", "=", "id_tipo_equipamento", "equip_map", "[", "'id_modelo'", "]", "=", "id_modelo", "equip_map", "[", "'nome'", "]", "=", "nome", "if", "maintenance", "is", "not", "None", ":", "equip_map", "[", "'maintenance'", "]", "=", "maintenance", "url", "=", "'equipamento/edit/'", "+", "str", "(", "id_equip", ")", "+", "'/'", "code", ",", "xml", "=", "self", ".", "submit", "(", "{", "'equipamento'", ":", "equip_map", "}", ",", "'POST'", ",", "url", ")", "return", "self", ".", "response", "(", "code", ",", "xml", ")" ]
45.552632
29.552632
def color_nodes(graph, labelattr='label', brightness=.878, outof=None, sat_adjust=None): """ Colors edges and nodes by nid """ import plottool as pt import utool as ut node_to_lbl = nx.get_node_attributes(graph, labelattr) unique_lbls = sorted(set(node_to_lbl.values())) ncolors = len(unique_lbls) if outof is None: if (ncolors) == 1: unique_colors = [pt.LIGHT_BLUE] elif (ncolors) == 2: # https://matplotlib.org/examples/color/named_colors.html unique_colors = ['royalblue', 'orange'] unique_colors = list(map(pt.color_funcs.ensure_base01, unique_colors)) else: unique_colors = pt.distinct_colors(ncolors, brightness=brightness) else: unique_colors = pt.distinct_colors(outof, brightness=brightness) if sat_adjust: unique_colors = [ pt.color_funcs.adjust_hsv_of_rgb(c, sat_adjust=sat_adjust) for c in unique_colors ] # Find edges and aids strictly between two nids if outof is None: lbl_to_color = ut.dzip(unique_lbls, unique_colors) else: gray = pt.color_funcs.ensure_base01('lightgray') unique_colors = [gray] + unique_colors offset = max(1, min(unique_lbls)) - 1 node_to_lbl = ut.map_vals(lambda nid: max(0, nid - offset), node_to_lbl) lbl_to_color = ut.dzip(range(outof + 1), unique_colors) node_to_color = ut.map_vals(lbl_to_color, node_to_lbl) nx.set_node_attributes(graph, name='color', values=node_to_color) ut.nx_ensure_agraph_color(graph)
[ "def", "color_nodes", "(", "graph", ",", "labelattr", "=", "'label'", ",", "brightness", "=", ".878", ",", "outof", "=", "None", ",", "sat_adjust", "=", "None", ")", ":", "import", "plottool", "as", "pt", "import", "utool", "as", "ut", "node_to_lbl", "=", "nx", ".", "get_node_attributes", "(", "graph", ",", "labelattr", ")", "unique_lbls", "=", "sorted", "(", "set", "(", "node_to_lbl", ".", "values", "(", ")", ")", ")", "ncolors", "=", "len", "(", "unique_lbls", ")", "if", "outof", "is", "None", ":", "if", "(", "ncolors", ")", "==", "1", ":", "unique_colors", "=", "[", "pt", ".", "LIGHT_BLUE", "]", "elif", "(", "ncolors", ")", "==", "2", ":", "# https://matplotlib.org/examples/color/named_colors.html", "unique_colors", "=", "[", "'royalblue'", ",", "'orange'", "]", "unique_colors", "=", "list", "(", "map", "(", "pt", ".", "color_funcs", ".", "ensure_base01", ",", "unique_colors", ")", ")", "else", ":", "unique_colors", "=", "pt", ".", "distinct_colors", "(", "ncolors", ",", "brightness", "=", "brightness", ")", "else", ":", "unique_colors", "=", "pt", ".", "distinct_colors", "(", "outof", ",", "brightness", "=", "brightness", ")", "if", "sat_adjust", ":", "unique_colors", "=", "[", "pt", ".", "color_funcs", ".", "adjust_hsv_of_rgb", "(", "c", ",", "sat_adjust", "=", "sat_adjust", ")", "for", "c", "in", "unique_colors", "]", "# Find edges and aids strictly between two nids", "if", "outof", "is", "None", ":", "lbl_to_color", "=", "ut", ".", "dzip", "(", "unique_lbls", ",", "unique_colors", ")", "else", ":", "gray", "=", "pt", ".", "color_funcs", ".", "ensure_base01", "(", "'lightgray'", ")", "unique_colors", "=", "[", "gray", "]", "+", "unique_colors", "offset", "=", "max", "(", "1", ",", "min", "(", "unique_lbls", ")", ")", "-", "1", "node_to_lbl", "=", "ut", ".", "map_vals", "(", "lambda", "nid", ":", "max", "(", "0", ",", "nid", "-", "offset", ")", ",", "node_to_lbl", ")", "lbl_to_color", "=", "ut", ".", "dzip", "(", "range", "(", "outof", "+", "1", ")", ",", "unique_colors", ")", "node_to_color", "=", "ut", ".", "map_vals", "(", "lbl_to_color", ",", "node_to_lbl", ")", "nx", ".", "set_node_attributes", "(", "graph", ",", "name", "=", "'color'", ",", "values", "=", "node_to_color", ")", "ut", ".", "nx_ensure_agraph_color", "(", "graph", ")" ]
42.432432
19.378378
def IsEnabled(self, *args, **kwargs): "check if all menu items are enabled" for i in range(self.GetMenuItemCount()): it = self.FindItemByPosition(i) if not it.IsEnabled(): return False return True
[ "def", "IsEnabled", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "i", "in", "range", "(", "self", ".", "GetMenuItemCount", "(", ")", ")", ":", "it", "=", "self", ".", "FindItemByPosition", "(", "i", ")", "if", "not", "it", ".", "IsEnabled", "(", ")", ":", "return", "False", "return", "True" ]
37.285714
8.428571
def main(self): """ Run all the methods required for pipeline outputs """ self.fasta_records() self.fasta_stats() self.find_largest_contig() self.find_genome_length() self.find_num_contigs() self.find_n50() self.perform_pilon() self.clear_attributes()
[ "def", "main", "(", "self", ")", ":", "self", ".", "fasta_records", "(", ")", "self", ".", "fasta_stats", "(", ")", "self", ".", "find_largest_contig", "(", ")", "self", ".", "find_genome_length", "(", ")", "self", ".", "find_num_contigs", "(", ")", "self", ".", "find_n50", "(", ")", "self", ".", "perform_pilon", "(", ")", "self", ".", "clear_attributes", "(", ")" ]
27.333333
10.666667
async def copy_to(self, dest, container, buffering = True): """ Coroutine method to copy content from this stream to another stream. """ if self.eof: await dest.write(u'' if self.isunicode else b'', True) elif self.errored: await dest.error(container) else: try: while not self.eof: await self.prepareRead(container) data = self.readonce() try: await dest.write(data, container, self.eof, buffering = buffering) except IOError: break except: async def _cleanup(): try: await dest.error(container) except IOError: pass container.subroutine(_cleanup(), False) raise finally: self.close(container.scheduler)
[ "async", "def", "copy_to", "(", "self", ",", "dest", ",", "container", ",", "buffering", "=", "True", ")", ":", "if", "self", ".", "eof", ":", "await", "dest", ".", "write", "(", "u''", "if", "self", ".", "isunicode", "else", "b''", ",", "True", ")", "elif", "self", ".", "errored", ":", "await", "dest", ".", "error", "(", "container", ")", "else", ":", "try", ":", "while", "not", "self", ".", "eof", ":", "await", "self", ".", "prepareRead", "(", "container", ")", "data", "=", "self", ".", "readonce", "(", ")", "try", ":", "await", "dest", ".", "write", "(", "data", ",", "container", ",", "self", ".", "eof", ",", "buffering", "=", "buffering", ")", "except", "IOError", ":", "break", "except", ":", "async", "def", "_cleanup", "(", ")", ":", "try", ":", "await", "dest", ".", "error", "(", "container", ")", "except", "IOError", ":", "pass", "container", ".", "subroutine", "(", "_cleanup", "(", ")", ",", "False", ")", "raise", "finally", ":", "self", ".", "close", "(", "container", ".", "scheduler", ")" ]
36.37037
14.740741
def alt_names(names: str) -> Callable[..., Any]: """Add alternative names to you custom commands. `names` is a single string with a space separated list of aliases for the decorated command. """ names_split = names.split() def decorator(func: Callable[..., Any]) -> Callable[..., Any]: func.alt_names = names_split # type: ignore return func return decorator
[ "def", "alt_names", "(", "names", ":", "str", ")", "->", "Callable", "[", "...", ",", "Any", "]", ":", "names_split", "=", "names", ".", "split", "(", ")", "def", "decorator", "(", "func", ":", "Callable", "[", "...", ",", "Any", "]", ")", "->", "Callable", "[", "...", ",", "Any", "]", ":", "func", ".", "alt_names", "=", "names_split", "# type: ignore", "return", "func", "return", "decorator" ]
32.833333
19.25
def _let_to_py_ast(ctx: GeneratorContext, node: Let) -> GeneratedPyAST: """Return a Python AST Node for a `let*` expression.""" assert node.op == NodeOp.LET with ctx.new_symbol_table("let"): let_body_ast: List[ast.AST] = [] for binding in node.bindings: init_node = binding.init assert init_node is not None init_ast = gen_py_ast(ctx, init_node) binding_name = genname(munge(binding.name)) let_body_ast.extend(init_ast.dependencies) let_body_ast.append( ast.Assign( targets=[ast.Name(id=binding_name, ctx=ast.Store())], value=init_ast.node, ) ) ctx.symbol_table.new_symbol( sym.symbol(binding.name), binding_name, LocalType.LET ) let_result_name = genname("let_result") body_ast = _synthetic_do_to_py_ast(ctx, node.body) let_body_ast.extend(map(statementize, body_ast.dependencies)) let_body_ast.append( ast.Assign( targets=[ast.Name(id=let_result_name, ctx=ast.Store())], value=body_ast.node, ) ) return GeneratedPyAST( node=ast.Name(id=let_result_name, ctx=ast.Load()), dependencies=let_body_ast )
[ "def", "_let_to_py_ast", "(", "ctx", ":", "GeneratorContext", ",", "node", ":", "Let", ")", "->", "GeneratedPyAST", ":", "assert", "node", ".", "op", "==", "NodeOp", ".", "LET", "with", "ctx", ".", "new_symbol_table", "(", "\"let\"", ")", ":", "let_body_ast", ":", "List", "[", "ast", ".", "AST", "]", "=", "[", "]", "for", "binding", "in", "node", ".", "bindings", ":", "init_node", "=", "binding", ".", "init", "assert", "init_node", "is", "not", "None", "init_ast", "=", "gen_py_ast", "(", "ctx", ",", "init_node", ")", "binding_name", "=", "genname", "(", "munge", "(", "binding", ".", "name", ")", ")", "let_body_ast", ".", "extend", "(", "init_ast", ".", "dependencies", ")", "let_body_ast", ".", "append", "(", "ast", ".", "Assign", "(", "targets", "=", "[", "ast", ".", "Name", "(", "id", "=", "binding_name", ",", "ctx", "=", "ast", ".", "Store", "(", ")", ")", "]", ",", "value", "=", "init_ast", ".", "node", ",", ")", ")", "ctx", ".", "symbol_table", ".", "new_symbol", "(", "sym", ".", "symbol", "(", "binding", ".", "name", ")", ",", "binding_name", ",", "LocalType", ".", "LET", ")", "let_result_name", "=", "genname", "(", "\"let_result\"", ")", "body_ast", "=", "_synthetic_do_to_py_ast", "(", "ctx", ",", "node", ".", "body", ")", "let_body_ast", ".", "extend", "(", "map", "(", "statementize", ",", "body_ast", ".", "dependencies", ")", ")", "let_body_ast", ".", "append", "(", "ast", ".", "Assign", "(", "targets", "=", "[", "ast", ".", "Name", "(", "id", "=", "let_result_name", ",", "ctx", "=", "ast", ".", "Store", "(", ")", ")", "]", ",", "value", "=", "body_ast", ".", "node", ",", ")", ")", "return", "GeneratedPyAST", "(", "node", "=", "ast", ".", "Name", "(", "id", "=", "let_result_name", ",", "ctx", "=", "ast", ".", "Load", "(", ")", ")", ",", "dependencies", "=", "let_body_ast", ")" ]
37.6
18.085714
def get_data(context, resource, **kwargs): """Retrieve data field from a resource""" url_suffix = '' if 'keys' in kwargs and kwargs['keys']: url_suffix = '/?keys=%s' % ','.join(kwargs.pop('keys')) uri = '%s/%s/%s/data%s' % (context.dci_cs_api, resource, kwargs.pop('id'), url_suffix) r = context.session.get(uri, timeout=HTTP_TIMEOUT, params=kwargs) return r
[ "def", "get_data", "(", "context", ",", "resource", ",", "*", "*", "kwargs", ")", ":", "url_suffix", "=", "''", "if", "'keys'", "in", "kwargs", "and", "kwargs", "[", "'keys'", "]", ":", "url_suffix", "=", "'/?keys=%s'", "%", "','", ".", "join", "(", "kwargs", ".", "pop", "(", "'keys'", ")", ")", "uri", "=", "'%s/%s/%s/data%s'", "%", "(", "context", ".", "dci_cs_api", ",", "resource", ",", "kwargs", ".", "pop", "(", "'id'", ")", ",", "url_suffix", ")", "r", "=", "context", ".", "session", ".", "get", "(", "uri", ",", "timeout", "=", "HTTP_TIMEOUT", ",", "params", "=", "kwargs", ")", "return", "r" ]
34.416667
22.166667
def recarray2dict(recarray): """Return numpy.recarray as dict.""" # TODO: subarrays result = {} for descr, value in zip(recarray.dtype.descr, recarray): name, dtype = descr[:2] if dtype[1] == 'S': value = bytes2str(stripnull(value)) elif value.ndim < 2: value = value.tolist() result[name] = value return result
[ "def", "recarray2dict", "(", "recarray", ")", ":", "# TODO: subarrays", "result", "=", "{", "}", "for", "descr", ",", "value", "in", "zip", "(", "recarray", ".", "dtype", ".", "descr", ",", "recarray", ")", ":", "name", ",", "dtype", "=", "descr", "[", ":", "2", "]", "if", "dtype", "[", "1", "]", "==", "'S'", ":", "value", "=", "bytes2str", "(", "stripnull", "(", "value", ")", ")", "elif", "value", ".", "ndim", "<", "2", ":", "value", "=", "value", ".", "tolist", "(", ")", "result", "[", "name", "]", "=", "value", "return", "result" ]
31.333333
13.166667
def _single_true(iterable): '''This returns True if only one True-ish element exists in `iterable`. Parameters ---------- iterable : iterable Returns ------- bool True if only one True-ish element exists in `iterable`. False otherwise. ''' # return True if exactly one true found iterator = iter(iterable) # consume from "i" until first true or it's exhausted has_true = any(iterator) # carry on consuming until another true value / exhausted has_another_true = any(iterator) return has_true and not has_another_true
[ "def", "_single_true", "(", "iterable", ")", ":", "# return True if exactly one true found", "iterator", "=", "iter", "(", "iterable", ")", "# consume from \"i\" until first true or it's exhausted", "has_true", "=", "any", "(", "iterator", ")", "# carry on consuming until another true value / exhausted", "has_another_true", "=", "any", "(", "iterator", ")", "return", "has_true", "and", "not", "has_another_true" ]
21.846154
27.384615
def get_virtualenv_path(self, requirements_option: RequirementsOptions, requirements_hash: Optional[str]) -> Path: """ Returns the path to the virtualenv the current state of the repository. """ if requirements_option == RequirementsOptions.no_requirements: venv_name = "no_requirements" else: venv_name = requirements_hash return Path(self._arca.base_dir) / "venvs" / venv_name
[ "def", "get_virtualenv_path", "(", "self", ",", "requirements_option", ":", "RequirementsOptions", ",", "requirements_hash", ":", "Optional", "[", "str", "]", ")", "->", "Path", ":", "if", "requirements_option", "==", "RequirementsOptions", ".", "no_requirements", ":", "venv_name", "=", "\"no_requirements\"", "else", ":", "venv_name", "=", "requirements_hash", "return", "Path", "(", "self", ".", "_arca", ".", "base_dir", ")", "/", "\"venvs\"", "/", "venv_name" ]
44.2
23.4
def peek(self): """ Obtain the next record from this result without consuming it. This leaves the record in the buffer for further processing. :returns: the next :class:`.Record` or :const:`None` if none remain """ records = self._records if records: return records[0] if not self.attached(): return None if self.attached(): self._session.send() while self.attached() and not records: self._session.fetch() if records: return records[0] return None
[ "def", "peek", "(", "self", ")", ":", "records", "=", "self", ".", "_records", "if", "records", ":", "return", "records", "[", "0", "]", "if", "not", "self", ".", "attached", "(", ")", ":", "return", "None", "if", "self", ".", "attached", "(", ")", ":", "self", ".", "_session", ".", "send", "(", ")", "while", "self", ".", "attached", "(", ")", "and", "not", "records", ":", "self", ".", "_session", ".", "fetch", "(", ")", "if", "records", ":", "return", "records", "[", "0", "]", "return", "None" ]
32.666667
15.222222
def _initialize(self): """Read the SharQ configuration and set appropriate variables. Open a redis connection pool and load all the Lua scripts. """ self._key_prefix = self._config.get('redis', 'key_prefix') self._job_expire_interval = int( self._config.get('sharq', 'job_expire_interval') ) self._default_job_requeue_limit = int( self._config.get('sharq', 'default_job_requeue_limit') ) # initalize redis redis_connection_type = self._config.get('redis', 'conn_type') db = self._config.get('redis', 'db') if redis_connection_type == 'unix_sock': self._r = redis.StrictRedis( db=db, unix_socket_path=self._config.get('redis', 'unix_socket_path') ) elif redis_connection_type == 'tcp_sock': self._r = redis.StrictRedis( db=db, host=self._config.get('redis', 'host'), port=self._config.get('redis', 'port') ) self._load_lua_scripts()
[ "def", "_initialize", "(", "self", ")", ":", "self", ".", "_key_prefix", "=", "self", ".", "_config", ".", "get", "(", "'redis'", ",", "'key_prefix'", ")", "self", ".", "_job_expire_interval", "=", "int", "(", "self", ".", "_config", ".", "get", "(", "'sharq'", ",", "'job_expire_interval'", ")", ")", "self", ".", "_default_job_requeue_limit", "=", "int", "(", "self", ".", "_config", ".", "get", "(", "'sharq'", ",", "'default_job_requeue_limit'", ")", ")", "# initalize redis", "redis_connection_type", "=", "self", ".", "_config", ".", "get", "(", "'redis'", ",", "'conn_type'", ")", "db", "=", "self", ".", "_config", ".", "get", "(", "'redis'", ",", "'db'", ")", "if", "redis_connection_type", "==", "'unix_sock'", ":", "self", ".", "_r", "=", "redis", ".", "StrictRedis", "(", "db", "=", "db", ",", "unix_socket_path", "=", "self", ".", "_config", ".", "get", "(", "'redis'", ",", "'unix_socket_path'", ")", ")", "elif", "redis_connection_type", "==", "'tcp_sock'", ":", "self", ".", "_r", "=", "redis", ".", "StrictRedis", "(", "db", "=", "db", ",", "host", "=", "self", ".", "_config", ".", "get", "(", "'redis'", ",", "'host'", ")", ",", "port", "=", "self", ".", "_config", ".", "get", "(", "'redis'", ",", "'port'", ")", ")", "self", ".", "_load_lua_scripts", "(", ")" ]
37.137931
17.413793
def get_host_usage(self): """ get details of CPU, RAM usage of this PC """ import psutil process_names = [proc.name for proc in psutil.process_iter()] cpu_pct = psutil.cpu_percent(interval=1) mem = psutil.virtual_memory() return str(cpu_pct), str(len(process_names)), str(mem.available), str(mem.total)
[ "def", "get_host_usage", "(", "self", ")", ":", "import", "psutil", "process_names", "=", "[", "proc", ".", "name", "for", "proc", "in", "psutil", ".", "process_iter", "(", ")", "]", "cpu_pct", "=", "psutil", ".", "cpu_percent", "(", "interval", "=", "1", ")", "mem", "=", "psutil", ".", "virtual_memory", "(", ")", "return", "str", "(", "cpu_pct", ")", ",", "str", "(", "len", "(", "process_names", ")", ")", ",", "str", "(", "mem", ".", "available", ")", ",", "str", "(", "mem", ".", "total", ")" ]
40
14.555556
def setSSID(self, ssid, wifiInterfaceId=1, timeout=1): """Set the SSID (name of the Wifi network) :param str ssid: the SSID/wifi network name :param int wifiInterfaceId: the id of the Wifi interface :param float timeout: the timeout to wait for the action to be executed """ namespace = Wifi.getServiceType("setChannel") + str(wifiInterfaceId) uri = self.getControlURL(namespace) self.execute(uri, namespace, "SetChannel", timeout=timeout, NewSSID=ssid)
[ "def", "setSSID", "(", "self", ",", "ssid", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"setChannel\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"SetChannel\"", ",", "timeout", "=", "timeout", ",", "NewSSID", "=", "ssid", ")" ]
46.272727
22.545455
def add(self, elt): """Generic function to add objects into the scheduler daemon internal lists:: Brok -> self.broks Check -> self.checks Notification -> self.actions EventHandler -> self.actions For an ExternalCommand, tries to resolve the command :param elt: element to add :type elt: :return: None """ if elt is None: return logger.debug("Adding: %s / %s", elt.my_type, elt.__dict__) fun = self.__add_actions.get(elt.__class__, None) if fun: fun(self, elt) else: logger.warning("self.add(): Unmanaged object class: %s (object=%r)", elt.__class__, elt)
[ "def", "add", "(", "self", ",", "elt", ")", ":", "if", "elt", "is", "None", ":", "return", "logger", ".", "debug", "(", "\"Adding: %s / %s\"", ",", "elt", ".", "my_type", ",", "elt", ".", "__dict__", ")", "fun", "=", "self", ".", "__add_actions", ".", "get", "(", "elt", ".", "__class__", ",", "None", ")", "if", "fun", ":", "fun", "(", "self", ",", "elt", ")", "else", ":", "logger", ".", "warning", "(", "\"self.add(): Unmanaged object class: %s (object=%r)\"", ",", "elt", ".", "__class__", ",", "elt", ")" ]
32.952381
19.52381
def cmp_code_objects(version, is_pypy, code_obj1, code_obj2, verify, name=''): """ Compare two code-objects. This is the main part of this module. """ # print code_obj1, type(code_obj2) assert iscode(code_obj1), \ "cmp_code_object first object type is %s, not code" % type(code_obj1) assert iscode(code_obj2), \ "cmp_code_object second object type is %s, not code" % type(code_obj2) # print dir(code_obj1) if isinstance(code_obj1, object): # new style classes (Python 2.2) # assume _both_ code objects to be new stle classes assert dir(code_obj1) == dir(code_obj2) else: # old style classes assert dir(code_obj1) == code_obj1.__members__ assert dir(code_obj2) == code_obj2.__members__ assert code_obj1.__members__ == code_obj2.__members__ if name == '__main__': name = code_obj1.co_name else: name = '%s.%s' % (name, code_obj1.co_name) if name == '.?': name = '__main__' if isinstance(code_obj1, object) and code_equal(code_obj1, code_obj2): # use the new style code-classes' __cmp__ method, which # should be faster and more sophisticated # if this compare fails, we use the old routine to # find out, what exactly is nor equal # if this compare succeds, simply return # return pass if isinstance(code_obj1, object): members = [x for x in dir(code_obj1) if x.startswith('co_')] else: members = dir(code_obj1) members.sort() # ; members.reverse() tokens1 = None for member in members: if member in __IGNORE_CODE_MEMBERS__ or verify != 'verify': pass elif member == 'co_code': if verify != 'strong': continue scanner = get_scanner(version, is_pypy, show_asm=False) global JUMP_OPS JUMP_OPS = list(scan.JUMP_OPS) + ['JUMP_BACK'] # use changed Token class # We (re)set this here to save exception handling, # which would get confusing. scanner.setTokenClass(Token) try: # ingest both code-objects tokens1, customize = scanner.ingest(code_obj1) del customize # save memory tokens2, customize = scanner.ingest(code_obj2) del customize # save memory finally: scanner.resetTokenClass() # restore Token class targets1 = dis.findlabels(code_obj1.co_code) tokens1 = [t for t in tokens1 if t.kind != 'COME_FROM'] tokens2 = [t for t in tokens2 if t.kind != 'COME_FROM'] i1 = 0; i2 = 0 offset_map = {}; check_jumps = {} while i1 < len(tokens1): if i2 >= len(tokens2): if len(tokens1) == len(tokens2) + 2 \ and tokens1[-1].kind == 'RETURN_VALUE' \ and tokens1[-2].kind == 'LOAD_CONST' \ and tokens1[-2].pattr is None \ and tokens1[-3].kind == 'RETURN_VALUE': break else: raise CmpErrorCodeLen(name, tokens1, tokens2) offset_map[tokens1[i1].offset] = tokens2[i2].offset for idx1, idx2, offset2 in check_jumps.get(tokens1[i1].offset, []): if offset2 != tokens2[i2].offset: raise CmpErrorCode(name, tokens1[idx1].offset, tokens1[idx1], tokens2[idx2], tokens1, tokens2) if tokens1[i1].kind != tokens2[i2].kind: if tokens1[i1].kind == 'LOAD_CONST' == tokens2[i2].kind: i = 1 while tokens1[i1+i].kind == 'LOAD_CONST': i += 1 if tokens1[i1+i].kind.startswith(('BUILD_TUPLE', 'BUILD_LIST')) \ and i == int(tokens1[i1+i].kind.split('_')[-1]): t = tuple([ elem.pattr for elem in tokens1[i1:i1+i] ]) if t != tokens2[i2].pattr: raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1], tokens2[i2], tokens1, tokens2) i1 += i + 1 i2 += 1 continue elif i == 2 and tokens1[i1+i].kind == 'ROT_TWO' and tokens2[i2+1].kind == 'UNPACK_SEQUENCE_2': i1 += 3 i2 += 2 continue elif i == 2 and tokens1[i1+i].kind in BIN_OP_FUNCS: f = BIN_OP_FUNCS[tokens1[i1+i].kind] if f(tokens1[i1].pattr, tokens1[i1+1].pattr) == tokens2[i2].pattr: i1 += 3 i2 += 1 continue elif tokens1[i1].kind == 'UNARY_NOT': if tokens2[i2].kind == 'POP_JUMP_IF_TRUE': if tokens1[i1+1].kind == 'POP_JUMP_IF_FALSE': i1 += 2 i2 += 1 continue elif tokens2[i2].kind == 'POP_JUMP_IF_FALSE': if tokens1[i1+1].kind == 'POP_JUMP_IF_TRUE': i1 += 2 i2 += 1 continue elif tokens1[i1].kind in ('JUMP_FORWARD', 'JUMP_BACK') \ and tokens1[i1-1].kind == 'RETURN_VALUE' \ and tokens2[i2-1].kind in ('RETURN_VALUE', 'RETURN_END_IF') \ and int(tokens1[i1].offset) not in targets1: i1 += 1 continue elif tokens1[i1].kind == 'JUMP_BACK' and tokens2[i2].kind == 'CONTINUE': # FIXME: should make sure that offset is inside loop, not outside of it i1 += 2 i2 += 2 continue elif tokens1[i1].kind == 'JUMP_FORWARD' and tokens2[i2].kind == 'JUMP_BACK' \ and tokens1[i1+1].kind == 'JUMP_BACK' and tokens2[i2+1].kind == 'JUMP_BACK' \ and int(tokens1[i1].pattr) == int(tokens1[i1].offset) + 3: if int(tokens1[i1].pattr) == int(tokens1[i1+1].offset): i1 += 2 i2 += 2 continue elif tokens1[i1].kind == 'LOAD_NAME' and tokens2[i2].kind == 'LOAD_CONST' \ and tokens1[i1].pattr == 'None' and tokens2[i2].pattr is None: pass elif tokens1[i1].kind == 'LOAD_GLOBAL' and tokens2[i2].kind == 'LOAD_NAME' \ and tokens1[i1].pattr == tokens2[i2].pattr: pass elif tokens1[i1].kind == 'LOAD_ASSERT' and tokens2[i2].kind == 'LOAD_NAME' \ and tokens1[i1].pattr == tokens2[i2].pattr: pass elif (tokens1[i1].kind == 'RETURN_VALUE' and tokens2[i2].kind == 'RETURN_END_IF'): pass elif (tokens1[i1].kind == 'BUILD_TUPLE_0' and tokens2[i2].pattr == ()): pass else: raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1], tokens2[i2], tokens1, tokens2) elif tokens1[i1].kind in JUMP_OPS and tokens1[i1].pattr != tokens2[i2].pattr: if tokens1[i1].kind == 'JUMP_BACK': dest1 = int(tokens1[i1].pattr) dest2 = int(tokens2[i2].pattr) if offset_map[dest1] != dest2: raise CmpErrorCode(name, tokens1[i1].offset, tokens1[i1], tokens2[i2], tokens1, tokens2) else: # import pdb; pdb.set_trace() try: dest1 = int(tokens1[i1].pattr) if dest1 in check_jumps: check_jumps[dest1].append((i1, i2, dest2)) else: check_jumps[dest1] = [(i1, i2, dest2)] except: pass i1 += 1 i2 += 1 del tokens1, tokens2 # save memory elif member == 'co_consts': # partial optimization can make the co_consts look different, # so we'll just compare the code consts codes1 = ( c for c in code_obj1.co_consts if hasattr(c, 'co_consts') ) codes2 = ( c for c in code_obj2.co_consts if hasattr(c, 'co_consts') ) for c1, c2 in zip(codes1, codes2): cmp_code_objects(version, is_pypy, c1, c2, verify, name=name) elif member == 'co_flags': flags1 = code_obj1.co_flags flags2 = code_obj2.co_flags if is_pypy: # For PYPY for now we don't care about PYPY_SOURCE_IS_UTF8: flags2 &= ~0x0100 # PYPY_SOURCE_IS_UTF8 # We also don't care about COROUTINE or GENERATOR for now flags1 &= ~0x000000a0 flags2 &= ~0x000000a0 if flags1 != flags2: raise CmpErrorMember(name, 'co_flags', pretty_flags(flags1), pretty_flags(flags2)) else: # all other members must be equal if getattr(code_obj1, member) != getattr(code_obj2, member): raise CmpErrorMember(name, member, getattr(code_obj1, member), getattr(code_obj2, member))
[ "def", "cmp_code_objects", "(", "version", ",", "is_pypy", ",", "code_obj1", ",", "code_obj2", ",", "verify", ",", "name", "=", "''", ")", ":", "# print code_obj1, type(code_obj2)", "assert", "iscode", "(", "code_obj1", ")", ",", "\"cmp_code_object first object type is %s, not code\"", "%", "type", "(", "code_obj1", ")", "assert", "iscode", "(", "code_obj2", ")", ",", "\"cmp_code_object second object type is %s, not code\"", "%", "type", "(", "code_obj2", ")", "# print dir(code_obj1)", "if", "isinstance", "(", "code_obj1", ",", "object", ")", ":", "# new style classes (Python 2.2)", "# assume _both_ code objects to be new stle classes", "assert", "dir", "(", "code_obj1", ")", "==", "dir", "(", "code_obj2", ")", "else", ":", "# old style classes", "assert", "dir", "(", "code_obj1", ")", "==", "code_obj1", ".", "__members__", "assert", "dir", "(", "code_obj2", ")", "==", "code_obj2", ".", "__members__", "assert", "code_obj1", ".", "__members__", "==", "code_obj2", ".", "__members__", "if", "name", "==", "'__main__'", ":", "name", "=", "code_obj1", ".", "co_name", "else", ":", "name", "=", "'%s.%s'", "%", "(", "name", ",", "code_obj1", ".", "co_name", ")", "if", "name", "==", "'.?'", ":", "name", "=", "'__main__'", "if", "isinstance", "(", "code_obj1", ",", "object", ")", "and", "code_equal", "(", "code_obj1", ",", "code_obj2", ")", ":", "# use the new style code-classes' __cmp__ method, which", "# should be faster and more sophisticated", "# if this compare fails, we use the old routine to", "# find out, what exactly is nor equal", "# if this compare succeds, simply return", "# return", "pass", "if", "isinstance", "(", "code_obj1", ",", "object", ")", ":", "members", "=", "[", "x", "for", "x", "in", "dir", "(", "code_obj1", ")", "if", "x", ".", "startswith", "(", "'co_'", ")", "]", "else", ":", "members", "=", "dir", "(", "code_obj1", ")", "members", ".", "sort", "(", ")", "# ; members.reverse()", "tokens1", "=", "None", "for", "member", "in", "members", ":", "if", "member", "in", "__IGNORE_CODE_MEMBERS__", "or", "verify", "!=", "'verify'", ":", "pass", "elif", "member", "==", "'co_code'", ":", "if", "verify", "!=", "'strong'", ":", "continue", "scanner", "=", "get_scanner", "(", "version", ",", "is_pypy", ",", "show_asm", "=", "False", ")", "global", "JUMP_OPS", "JUMP_OPS", "=", "list", "(", "scan", ".", "JUMP_OPS", ")", "+", "[", "'JUMP_BACK'", "]", "# use changed Token class", "# We (re)set this here to save exception handling,", "# which would get confusing.", "scanner", ".", "setTokenClass", "(", "Token", ")", "try", ":", "# ingest both code-objects", "tokens1", ",", "customize", "=", "scanner", ".", "ingest", "(", "code_obj1", ")", "del", "customize", "# save memory", "tokens2", ",", "customize", "=", "scanner", ".", "ingest", "(", "code_obj2", ")", "del", "customize", "# save memory", "finally", ":", "scanner", ".", "resetTokenClass", "(", ")", "# restore Token class", "targets1", "=", "dis", ".", "findlabels", "(", "code_obj1", ".", "co_code", ")", "tokens1", "=", "[", "t", "for", "t", "in", "tokens1", "if", "t", ".", "kind", "!=", "'COME_FROM'", "]", "tokens2", "=", "[", "t", "for", "t", "in", "tokens2", "if", "t", ".", "kind", "!=", "'COME_FROM'", "]", "i1", "=", "0", "i2", "=", "0", "offset_map", "=", "{", "}", "check_jumps", "=", "{", "}", "while", "i1", "<", "len", "(", "tokens1", ")", ":", "if", "i2", ">=", "len", "(", "tokens2", ")", ":", "if", "len", "(", "tokens1", ")", "==", "len", "(", "tokens2", ")", "+", "2", "and", "tokens1", "[", "-", "1", "]", ".", "kind", "==", "'RETURN_VALUE'", "and", "tokens1", "[", "-", "2", "]", ".", "kind", "==", "'LOAD_CONST'", "and", "tokens1", "[", "-", "2", "]", ".", "pattr", "is", "None", "and", "tokens1", "[", "-", "3", "]", ".", "kind", "==", "'RETURN_VALUE'", ":", "break", "else", ":", "raise", "CmpErrorCodeLen", "(", "name", ",", "tokens1", ",", "tokens2", ")", "offset_map", "[", "tokens1", "[", "i1", "]", ".", "offset", "]", "=", "tokens2", "[", "i2", "]", ".", "offset", "for", "idx1", ",", "idx2", ",", "offset2", "in", "check_jumps", ".", "get", "(", "tokens1", "[", "i1", "]", ".", "offset", ",", "[", "]", ")", ":", "if", "offset2", "!=", "tokens2", "[", "i2", "]", ".", "offset", ":", "raise", "CmpErrorCode", "(", "name", ",", "tokens1", "[", "idx1", "]", ".", "offset", ",", "tokens1", "[", "idx1", "]", ",", "tokens2", "[", "idx2", "]", ",", "tokens1", ",", "tokens2", ")", "if", "tokens1", "[", "i1", "]", ".", "kind", "!=", "tokens2", "[", "i2", "]", ".", "kind", ":", "if", "tokens1", "[", "i1", "]", ".", "kind", "==", "'LOAD_CONST'", "==", "tokens2", "[", "i2", "]", ".", "kind", ":", "i", "=", "1", "while", "tokens1", "[", "i1", "+", "i", "]", ".", "kind", "==", "'LOAD_CONST'", ":", "i", "+=", "1", "if", "tokens1", "[", "i1", "+", "i", "]", ".", "kind", ".", "startswith", "(", "(", "'BUILD_TUPLE'", ",", "'BUILD_LIST'", ")", ")", "and", "i", "==", "int", "(", "tokens1", "[", "i1", "+", "i", "]", ".", "kind", ".", "split", "(", "'_'", ")", "[", "-", "1", "]", ")", ":", "t", "=", "tuple", "(", "[", "elem", ".", "pattr", "for", "elem", "in", "tokens1", "[", "i1", ":", "i1", "+", "i", "]", "]", ")", "if", "t", "!=", "tokens2", "[", "i2", "]", ".", "pattr", ":", "raise", "CmpErrorCode", "(", "name", ",", "tokens1", "[", "i1", "]", ".", "offset", ",", "tokens1", "[", "i1", "]", ",", "tokens2", "[", "i2", "]", ",", "tokens1", ",", "tokens2", ")", "i1", "+=", "i", "+", "1", "i2", "+=", "1", "continue", "elif", "i", "==", "2", "and", "tokens1", "[", "i1", "+", "i", "]", ".", "kind", "==", "'ROT_TWO'", "and", "tokens2", "[", "i2", "+", "1", "]", ".", "kind", "==", "'UNPACK_SEQUENCE_2'", ":", "i1", "+=", "3", "i2", "+=", "2", "continue", "elif", "i", "==", "2", "and", "tokens1", "[", "i1", "+", "i", "]", ".", "kind", "in", "BIN_OP_FUNCS", ":", "f", "=", "BIN_OP_FUNCS", "[", "tokens1", "[", "i1", "+", "i", "]", ".", "kind", "]", "if", "f", "(", "tokens1", "[", "i1", "]", ".", "pattr", ",", "tokens1", "[", "i1", "+", "1", "]", ".", "pattr", ")", "==", "tokens2", "[", "i2", "]", ".", "pattr", ":", "i1", "+=", "3", "i2", "+=", "1", "continue", "elif", "tokens1", "[", "i1", "]", ".", "kind", "==", "'UNARY_NOT'", ":", "if", "tokens2", "[", "i2", "]", ".", "kind", "==", "'POP_JUMP_IF_TRUE'", ":", "if", "tokens1", "[", "i1", "+", "1", "]", ".", "kind", "==", "'POP_JUMP_IF_FALSE'", ":", "i1", "+=", "2", "i2", "+=", "1", "continue", "elif", "tokens2", "[", "i2", "]", ".", "kind", "==", "'POP_JUMP_IF_FALSE'", ":", "if", "tokens1", "[", "i1", "+", "1", "]", ".", "kind", "==", "'POP_JUMP_IF_TRUE'", ":", "i1", "+=", "2", "i2", "+=", "1", "continue", "elif", "tokens1", "[", "i1", "]", ".", "kind", "in", "(", "'JUMP_FORWARD'", ",", "'JUMP_BACK'", ")", "and", "tokens1", "[", "i1", "-", "1", "]", ".", "kind", "==", "'RETURN_VALUE'", "and", "tokens2", "[", "i2", "-", "1", "]", ".", "kind", "in", "(", "'RETURN_VALUE'", ",", "'RETURN_END_IF'", ")", "and", "int", "(", "tokens1", "[", "i1", "]", ".", "offset", ")", "not", "in", "targets1", ":", "i1", "+=", "1", "continue", "elif", "tokens1", "[", "i1", "]", ".", "kind", "==", "'JUMP_BACK'", "and", "tokens2", "[", "i2", "]", ".", "kind", "==", "'CONTINUE'", ":", "# FIXME: should make sure that offset is inside loop, not outside of it", "i1", "+=", "2", "i2", "+=", "2", "continue", "elif", "tokens1", "[", "i1", "]", ".", "kind", "==", "'JUMP_FORWARD'", "and", "tokens2", "[", "i2", "]", ".", "kind", "==", "'JUMP_BACK'", "and", "tokens1", "[", "i1", "+", "1", "]", ".", "kind", "==", "'JUMP_BACK'", "and", "tokens2", "[", "i2", "+", "1", "]", ".", "kind", "==", "'JUMP_BACK'", "and", "int", "(", "tokens1", "[", "i1", "]", ".", "pattr", ")", "==", "int", "(", "tokens1", "[", "i1", "]", ".", "offset", ")", "+", "3", ":", "if", "int", "(", "tokens1", "[", "i1", "]", ".", "pattr", ")", "==", "int", "(", "tokens1", "[", "i1", "+", "1", "]", ".", "offset", ")", ":", "i1", "+=", "2", "i2", "+=", "2", "continue", "elif", "tokens1", "[", "i1", "]", ".", "kind", "==", "'LOAD_NAME'", "and", "tokens2", "[", "i2", "]", ".", "kind", "==", "'LOAD_CONST'", "and", "tokens1", "[", "i1", "]", ".", "pattr", "==", "'None'", "and", "tokens2", "[", "i2", "]", ".", "pattr", "is", "None", ":", "pass", "elif", "tokens1", "[", "i1", "]", ".", "kind", "==", "'LOAD_GLOBAL'", "and", "tokens2", "[", "i2", "]", ".", "kind", "==", "'LOAD_NAME'", "and", "tokens1", "[", "i1", "]", ".", "pattr", "==", "tokens2", "[", "i2", "]", ".", "pattr", ":", "pass", "elif", "tokens1", "[", "i1", "]", ".", "kind", "==", "'LOAD_ASSERT'", "and", "tokens2", "[", "i2", "]", ".", "kind", "==", "'LOAD_NAME'", "and", "tokens1", "[", "i1", "]", ".", "pattr", "==", "tokens2", "[", "i2", "]", ".", "pattr", ":", "pass", "elif", "(", "tokens1", "[", "i1", "]", ".", "kind", "==", "'RETURN_VALUE'", "and", "tokens2", "[", "i2", "]", ".", "kind", "==", "'RETURN_END_IF'", ")", ":", "pass", "elif", "(", "tokens1", "[", "i1", "]", ".", "kind", "==", "'BUILD_TUPLE_0'", "and", "tokens2", "[", "i2", "]", ".", "pattr", "==", "(", ")", ")", ":", "pass", "else", ":", "raise", "CmpErrorCode", "(", "name", ",", "tokens1", "[", "i1", "]", ".", "offset", ",", "tokens1", "[", "i1", "]", ",", "tokens2", "[", "i2", "]", ",", "tokens1", ",", "tokens2", ")", "elif", "tokens1", "[", "i1", "]", ".", "kind", "in", "JUMP_OPS", "and", "tokens1", "[", "i1", "]", ".", "pattr", "!=", "tokens2", "[", "i2", "]", ".", "pattr", ":", "if", "tokens1", "[", "i1", "]", ".", "kind", "==", "'JUMP_BACK'", ":", "dest1", "=", "int", "(", "tokens1", "[", "i1", "]", ".", "pattr", ")", "dest2", "=", "int", "(", "tokens2", "[", "i2", "]", ".", "pattr", ")", "if", "offset_map", "[", "dest1", "]", "!=", "dest2", ":", "raise", "CmpErrorCode", "(", "name", ",", "tokens1", "[", "i1", "]", ".", "offset", ",", "tokens1", "[", "i1", "]", ",", "tokens2", "[", "i2", "]", ",", "tokens1", ",", "tokens2", ")", "else", ":", "# import pdb; pdb.set_trace()", "try", ":", "dest1", "=", "int", "(", "tokens1", "[", "i1", "]", ".", "pattr", ")", "if", "dest1", "in", "check_jumps", ":", "check_jumps", "[", "dest1", "]", ".", "append", "(", "(", "i1", ",", "i2", ",", "dest2", ")", ")", "else", ":", "check_jumps", "[", "dest1", "]", "=", "[", "(", "i1", ",", "i2", ",", "dest2", ")", "]", "except", ":", "pass", "i1", "+=", "1", "i2", "+=", "1", "del", "tokens1", ",", "tokens2", "# save memory", "elif", "member", "==", "'co_consts'", ":", "# partial optimization can make the co_consts look different,", "# so we'll just compare the code consts", "codes1", "=", "(", "c", "for", "c", "in", "code_obj1", ".", "co_consts", "if", "hasattr", "(", "c", ",", "'co_consts'", ")", ")", "codes2", "=", "(", "c", "for", "c", "in", "code_obj2", ".", "co_consts", "if", "hasattr", "(", "c", ",", "'co_consts'", ")", ")", "for", "c1", ",", "c2", "in", "zip", "(", "codes1", ",", "codes2", ")", ":", "cmp_code_objects", "(", "version", ",", "is_pypy", ",", "c1", ",", "c2", ",", "verify", ",", "name", "=", "name", ")", "elif", "member", "==", "'co_flags'", ":", "flags1", "=", "code_obj1", ".", "co_flags", "flags2", "=", "code_obj2", ".", "co_flags", "if", "is_pypy", ":", "# For PYPY for now we don't care about PYPY_SOURCE_IS_UTF8:", "flags2", "&=", "~", "0x0100", "# PYPY_SOURCE_IS_UTF8", "# We also don't care about COROUTINE or GENERATOR for now", "flags1", "&=", "~", "0x000000a0", "flags2", "&=", "~", "0x000000a0", "if", "flags1", "!=", "flags2", ":", "raise", "CmpErrorMember", "(", "name", ",", "'co_flags'", ",", "pretty_flags", "(", "flags1", ")", ",", "pretty_flags", "(", "flags2", ")", ")", "else", ":", "# all other members must be equal", "if", "getattr", "(", "code_obj1", ",", "member", ")", "!=", "getattr", "(", "code_obj2", ",", "member", ")", ":", "raise", "CmpErrorMember", "(", "name", ",", "member", ",", "getattr", "(", "code_obj1", ",", "member", ")", ",", "getattr", "(", "code_obj2", ",", "member", ")", ")" ]
47.586854
20.544601
def _use_widgets(objs): ''' Whether a collection of Bokeh objects contains a any Widget Args: objs (seq[Model or Document]) : Returns: bool ''' from ..models.widgets import Widget return _any(objs, lambda obj: isinstance(obj, Widget))
[ "def", "_use_widgets", "(", "objs", ")", ":", "from", ".", ".", "models", ".", "widgets", "import", "Widget", "return", "_any", "(", "objs", ",", "lambda", "obj", ":", "isinstance", "(", "obj", ",", "Widget", ")", ")" ]
22.166667
25.333333
def add_note(self, note, octave=None, dynamics={}): """Add a note to the container and sorts the notes from low to high. The note can either be a string, in which case you could also use the octave and dynamics arguments, or a Note object. """ if type(note) == str: if octave is not None: note = Note(note, octave, dynamics) elif len(self.notes) == 0: note = Note(note, 4, dynamics) else: if Note(note, self.notes[-1].octave) < self.notes[-1]: note = Note(note, self.notes[-1].octave + 1, dynamics) else: note = Note(note, self.notes[-1].octave, dynamics) if not hasattr(note, 'name'): raise UnexpectedObjectError("Object '%s' was not expected. " "Expecting a mingus.containers.Note object." % note) if note not in self.notes: self.notes.append(note) self.notes.sort() return self.notes
[ "def", "add_note", "(", "self", ",", "note", ",", "octave", "=", "None", ",", "dynamics", "=", "{", "}", ")", ":", "if", "type", "(", "note", ")", "==", "str", ":", "if", "octave", "is", "not", "None", ":", "note", "=", "Note", "(", "note", ",", "octave", ",", "dynamics", ")", "elif", "len", "(", "self", ".", "notes", ")", "==", "0", ":", "note", "=", "Note", "(", "note", ",", "4", ",", "dynamics", ")", "else", ":", "if", "Note", "(", "note", ",", "self", ".", "notes", "[", "-", "1", "]", ".", "octave", ")", "<", "self", ".", "notes", "[", "-", "1", "]", ":", "note", "=", "Note", "(", "note", ",", "self", ".", "notes", "[", "-", "1", "]", ".", "octave", "+", "1", ",", "dynamics", ")", "else", ":", "note", "=", "Note", "(", "note", ",", "self", ".", "notes", "[", "-", "1", "]", ".", "octave", ",", "dynamics", ")", "if", "not", "hasattr", "(", "note", ",", "'name'", ")", ":", "raise", "UnexpectedObjectError", "(", "\"Object '%s' was not expected. \"", "\"Expecting a mingus.containers.Note object.\"", "%", "note", ")", "if", "note", "not", "in", "self", ".", "notes", ":", "self", ".", "notes", ".", "append", "(", "note", ")", "self", ".", "notes", ".", "sort", "(", ")", "return", "self", ".", "notes" ]
44.565217
16.521739
def multivariate_neg_logposterior(self,beta): """ Returns negative log posterior, for a model with a covariance matrix Parameters ---------- beta : np.array Contains untransformed starting values for latent_variables Returns ---------- Negative log posterior """ post = self.neg_loglik(beta) for k in range(0,self.z_no): if self.latent_variables.z_list[k].prior.covariance_prior is True: post += -self.latent_variables.z_list[k].prior.logpdf(self.custom_covariance(beta)) break else: post += -self.latent_variables.z_list[k].prior.logpdf(beta[k]) return post
[ "def", "multivariate_neg_logposterior", "(", "self", ",", "beta", ")", ":", "post", "=", "self", ".", "neg_loglik", "(", "beta", ")", "for", "k", "in", "range", "(", "0", ",", "self", ".", "z_no", ")", ":", "if", "self", ".", "latent_variables", ".", "z_list", "[", "k", "]", ".", "prior", ".", "covariance_prior", "is", "True", ":", "post", "+=", "-", "self", ".", "latent_variables", ".", "z_list", "[", "k", "]", ".", "prior", ".", "logpdf", "(", "self", ".", "custom_covariance", "(", "beta", ")", ")", "break", "else", ":", "post", "+=", "-", "self", ".", "latent_variables", ".", "z_list", "[", "k", "]", ".", "prior", ".", "logpdf", "(", "beta", "[", "k", "]", ")", "return", "post" ]
34
22.857143
def initGrid(self): """ Initialise the game grid """ blinker = [(4, 4), (4, 5), (4, 6)] toad = [(9, 5), (9, 6), (9, 7), (10, 4), (10, 5), (10, 6)] glider = [(4, 11), (5, 12), (6, 10), (6, 11), (6, 12)] r_pentomino = [(10, 60), (9, 61), (10, 61), (11, 61), (9, 62)] self.grid = {} if self.test: for cell in chain(blinker, toad, glider, r_pentomino): self.grid[cell] = 1 else: for _ in range(self.initsize): ry = random.randint(self.y_pad, self.y_grid - 1) rx = random.randint(self.x_pad, self.x_grid - 1) self.grid[(ry, rx)] = 1
[ "def", "initGrid", "(", "self", ")", ":", "blinker", "=", "[", "(", "4", ",", "4", ")", ",", "(", "4", ",", "5", ")", ",", "(", "4", ",", "6", ")", "]", "toad", "=", "[", "(", "9", ",", "5", ")", ",", "(", "9", ",", "6", ")", ",", "(", "9", ",", "7", ")", ",", "(", "10", ",", "4", ")", ",", "(", "10", ",", "5", ")", ",", "(", "10", ",", "6", ")", "]", "glider", "=", "[", "(", "4", ",", "11", ")", ",", "(", "5", ",", "12", ")", ",", "(", "6", ",", "10", ")", ",", "(", "6", ",", "11", ")", ",", "(", "6", ",", "12", ")", "]", "r_pentomino", "=", "[", "(", "10", ",", "60", ")", ",", "(", "9", ",", "61", ")", ",", "(", "10", ",", "61", ")", ",", "(", "11", ",", "61", ")", ",", "(", "9", ",", "62", ")", "]", "self", ".", "grid", "=", "{", "}", "if", "self", ".", "test", ":", "for", "cell", "in", "chain", "(", "blinker", ",", "toad", ",", "glider", ",", "r_pentomino", ")", ":", "self", ".", "grid", "[", "cell", "]", "=", "1", "else", ":", "for", "_", "in", "range", "(", "self", ".", "initsize", ")", ":", "ry", "=", "random", ".", "randint", "(", "self", ".", "y_pad", ",", "self", ".", "y_grid", "-", "1", ")", "rx", "=", "random", ".", "randint", "(", "self", ".", "x_pad", ",", "self", ".", "x_grid", "-", "1", ")", "self", ".", "grid", "[", "(", "ry", ",", "rx", ")", "]", "=", "1" ]
35.736842
17.631579