_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q276200
admin_footer
test
def admin_footer(parser, token): """ Template tag that renders the footer information based on the authenticated user's permissions. """ # split_contents() doesn't know how to split quoted strings. tag_name = token.split_contents() if len(tag_name) > 1: raise base.TemplateSyntaxError( '{} tag does not accept any argument(s): {}'.format( token.contents.split()[0], ', '.join(token.contents.split()[1:]) )) return AdminFooterNode()
python
{ "resource": "" }
q276201
build_payment_parameters
test
def build_payment_parameters(amount: Money, client_ref: str) -> PaymentParameters: """ Builds the parameters needed to present the user with a datatrans payment form. :param amount: The amount and currency we want the user to pay :param client_ref: A unique reference for this payment :return: The parameters needed to display the datatrans form """ merchant_id = web_merchant_id amount, currency = money_to_amount_and_currency(amount) refno = client_ref sign = sign_web(merchant_id, amount, currency, refno) parameters = PaymentParameters( merchant_id=merchant_id, amount=amount, currency=currency, refno=refno, sign=sign, use_alias=False, ) logger.info('build-payment-parameters', parameters=parameters) return parameters
python
{ "resource": "" }
q276202
build_register_credit_card_parameters
test
def build_register_credit_card_parameters(client_ref: str) -> PaymentParameters: """ Builds the parameters needed to present the user with a datatrans form to register a credit card. Contrary to a payment form, datatrans will not show an amount. :param client_ref: A unique reference for this alias capture. :return: The parameters needed to display the datatrans form """ amount = 0 currency = 'CHF' # Datatrans requires this value to be filled, so we use this arbitrary currency. merchant_id = web_merchant_id refno = client_ref sign = sign_web(merchant_id, amount, currency, refno) parameters = PaymentParameters( merchant_id=merchant_id, amount=amount, currency=currency, refno=refno, sign=sign, use_alias=True, ) logger.info('building-payment-parameters', parameters=parameters) return parameters
python
{ "resource": "" }
q276203
pay_with_alias
test
def pay_with_alias(amount: Money, alias_registration_id: str, client_ref: str) -> Payment: """ Charges money using datatrans, given a previously registered credit card alias. :param amount: The amount and currency we want to charge :param alias_registration_id: The alias registration to use :param client_ref: A unique reference for this charge :return: a Payment (either successful or not) """ if amount.amount <= 0: raise ValueError('Pay with alias takes a strictly positive amount') alias_registration = AliasRegistration.objects.get(pk=alias_registration_id) logger.info('paying-with-alias', amount=amount, client_ref=client_ref, alias_registration=alias_registration) request_xml = build_pay_with_alias_request_xml(amount, client_ref, alias_registration) logger.info('sending-pay-with-alias-request', url=datatrans_authorize_url, data=request_xml) response = requests.post( url=datatrans_authorize_url, headers={'Content-Type': 'application/xml'}, data=request_xml) logger.info('processing-pay-with-alias-response', response=response.content) charge_response = parse_pay_with_alias_response_xml(response.content) charge_response.save() charge_response.send_signal() return charge_response
python
{ "resource": "" }
q276204
get_version
test
def get_version(version=None): """ Return full version nr, inc. rc, beta etc tags. For example: `2.0.0a1` :rtype: str """ v = version or __version__ if len(v) == 4: return '{0}{1}'.format(short_version(v), v[3]) return short_version(v)
python
{ "resource": "" }
q276205
FilesystemBrowser._construct
test
def _construct(self): '''Construct widget.''' self.setLayout(QtGui.QVBoxLayout()) self._headerLayout = QtGui.QHBoxLayout() self._locationWidget = QtGui.QComboBox() self._headerLayout.addWidget(self._locationWidget, stretch=1) self._upButton = QtGui.QToolButton() self._upButton.setIcon(QtGui.QIcon(':riffle/icon/up')) self._headerLayout.addWidget(self._upButton) self.layout().addLayout(self._headerLayout) self._contentSplitter = QtGui.QSplitter() self._bookmarksWidget = QtGui.QListView() self._contentSplitter.addWidget(self._bookmarksWidget) self._filesystemWidget = QtGui.QTableView() self._filesystemWidget.setSelectionBehavior( self._filesystemWidget.SelectRows ) self._filesystemWidget.setSelectionMode( self._filesystemWidget.SingleSelection ) self._filesystemWidget.verticalHeader().hide() self._contentSplitter.addWidget(self._filesystemWidget) proxy = riffle.model.FilesystemSortProxy(self) model = riffle.model.Filesystem( path=self._root, parent=self, iconFactory=self._iconFactory ) proxy.setSourceModel(model) proxy.setDynamicSortFilter(True) self._filesystemWidget.setModel(proxy) self._filesystemWidget.setSortingEnabled(True) self._contentSplitter.setStretchFactor(1, 1) self.layout().addWidget(self._contentSplitter) self._footerLayout = QtGui.QHBoxLayout() self._footerLayout.addStretch(1) self._cancelButton = QtGui.QPushButton('Cancel') self._footerLayout.addWidget(self._cancelButton) self._acceptButton = QtGui.QPushButton('Choose') self._footerLayout.addWidget(self._acceptButton) self.layout().addLayout(self._footerLayout)
python
{ "resource": "" }
q276206
FilesystemBrowser._postConstruction
test
def _postConstruction(self): '''Perform post-construction operations.''' self.setWindowTitle('Filesystem Browser') self._filesystemWidget.sortByColumn(0, QtCore.Qt.AscendingOrder) # TODO: Remove once bookmarks widget implemented. self._bookmarksWidget.hide() self._acceptButton.setDefault(True) self._acceptButton.setDisabled(True) self._acceptButton.clicked.connect(self.accept) self._cancelButton.clicked.connect(self.reject) self._configureShortcuts() self.setLocation(self._root) self._filesystemWidget.horizontalHeader().setResizeMode( QtGui.QHeaderView.ResizeToContents ) self._filesystemWidget.horizontalHeader().setResizeMode( 0, QtGui.QHeaderView.Stretch ) self._upButton.clicked.connect(self._onNavigateUpButtonClicked) self._locationWidget.currentIndexChanged.connect( self._onNavigate ) self._filesystemWidget.activated.connect(self._onActivateItem) selectionModel = self._filesystemWidget.selectionModel() selectionModel.currentRowChanged.connect(self._onSelectItem)
python
{ "resource": "" }
q276207
FilesystemBrowser._configureShortcuts
test
def _configureShortcuts(self): '''Add keyboard shortcuts to navigate the filesystem.''' self._upShortcut = QtGui.QShortcut( QtGui.QKeySequence('Backspace'), self ) self._upShortcut.setAutoRepeat(False) self._upShortcut.activated.connect(self._onNavigateUpButtonClicked)
python
{ "resource": "" }
q276208
FilesystemBrowser._onActivateItem
test
def _onActivateItem(self, index): '''Handle activation of item in listing.''' item = self._filesystemWidget.model().item(index) if not isinstance(item, riffle.model.File): self._acceptButton.setDisabled(True) self.setLocation(item.path, interactive=True)
python
{ "resource": "" }
q276209
FilesystemBrowser._onSelectItem
test
def _onSelectItem(self, selection, previousSelection): '''Handle selection of item in listing.''' self._acceptButton.setEnabled(True) del self._selected[:] item = self._filesystemWidget.model().item(selection) self._selected.append(item.path)
python
{ "resource": "" }
q276210
FilesystemBrowser._onNavigate
test
def _onNavigate(self, index): '''Handle selection of path segment.''' if index > 0: self.setLocation( self._locationWidget.itemData(index), interactive=True )
python
{ "resource": "" }
q276211
BuildResources.finalize_options
test
def finalize_options(self): '''Finalize options to be used.''' self.resource_source_path = os.path.join(RESOURCE_PATH, 'resource.qrc') self.resource_target_path = RESOURCE_TARGET_PATH
python
{ "resource": "" }
q276212
BuildResources.run
test
def run(self): '''Run build.''' if ON_READ_THE_DOCS: # PySide not available. return try: pyside_rcc_command = 'pyside-rcc' # On Windows, pyside-rcc is not automatically available on the # PATH so try to find it manually. if sys.platform == 'win32': import PySide pyside_rcc_command = os.path.join( os.path.dirname(PySide.__file__), 'pyside-rcc.exe' ) subprocess.check_call([ pyside_rcc_command, '-o', self.resource_target_path, self.resource_source_path ]) except (subprocess.CalledProcessError, OSError): print( 'Error compiling resource.py using pyside-rcc. Possibly ' 'pyside-rcc could not be found. You might need to manually add ' 'it to your PATH.' ) raise SystemExit()
python
{ "resource": "" }
q276213
Clean.run
test
def run(self): '''Run clean.''' relative_resource_path = os.path.relpath( RESOURCE_TARGET_PATH, ROOT_PATH ) if os.path.exists(relative_resource_path): os.remove(relative_resource_path) else: distutils.log.warn( '\'{0}\' does not exist -- can\'t clean it' .format(relative_resource_path) ) relative_compiled_resource_path = relative_resource_path + 'c' if os.path.exists(relative_compiled_resource_path): os.remove(relative_compiled_resource_path) else: distutils.log.warn( '\'{0}\' does not exist -- can\'t clean it' .format(relative_compiled_resource_path) ) CleanCommand.run(self)
python
{ "resource": "" }
q276214
Item.fetchChildren
test
def fetchChildren(self): '''Fetch and return new children. Will only fetch children whilst canFetchMore is True. .. note:: It is the caller's responsibility to add each fetched child to this parent if desired using :py:meth:`Item.addChild`. ''' if not self.canFetchMore(): return [] children = self._fetchChildren() self._fetched = True return children
python
{ "resource": "" }
q276215
Item.refetch
test
def refetch(self): '''Reload children.''' # Reset children for child in self.children[:]: self.removeChild(child) # Enable children fetching self._fetched = False
python
{ "resource": "" }
q276216
FilesystemSortProxy.icon
test
def icon(self, index): '''Return icon for index.''' sourceModel = self.sourceModel() if not sourceModel: return None return sourceModel.icon(self.mapToSource(index))
python
{ "resource": "" }
q276217
call
test
def call(args, stdout=None, stderr=None, stdin=None, daemonize=False, preexec_fn=None, shell=False, cwd=None, env=None): """ Run an external command in a separate process and detach it from the current process. Excepting `stdout`, `stderr`, and `stdin` all file descriptors are closed after forking. If `daemonize` is True then the parent process exits. All stdio is redirected to `os.devnull` unless specified. The `preexec_fn`, `shell`, `cwd`, and `env` parameters are the same as their `Popen` counterparts. Return the PID of the child process if not daemonized. """ stream = lambda s, m: s is None and os.open(os.devnull, m) or s stdout = stream(stdout, os.O_WRONLY) stderr = stream(stderr, os.O_WRONLY) stdin = stream(stdin, os.O_RDONLY) shared_pid = Value('i', 0) pid = os.fork() if pid > 0: os.waitpid(pid, 0) child_pid = shared_pid.value del shared_pid if daemonize: sys.exit(0) return child_pid else: os.setsid() proc = subprocess.Popen(args, stdout=stdout, stderr=stderr, stdin=stdin, close_fds=True, preexec_fn=preexec_fn, shell=shell, cwd=cwd, env=env) shared_pid.value = proc.pid os._exit(0)
python
{ "resource": "" }
q276218
Detach._get_max_fd
test
def _get_max_fd(self): """Return the maximum file descriptor value.""" limits = resource.getrlimit(resource.RLIMIT_NOFILE) result = limits[1] if result == resource.RLIM_INFINITY: result = maxfd return result
python
{ "resource": "" }
q276219
Detach._close_fd
test
def _close_fd(self, fd): """Close a file descriptor if it is open.""" try: os.close(fd) except OSError, exc: if exc.errno != errno.EBADF: msg = "Failed to close file descriptor {}: {}".format(fd, exc) raise Error(msg)
python
{ "resource": "" }
q276220
Detach._close_open_fds
test
def _close_open_fds(self): """Close open file descriptors.""" maxfd = self._get_max_fd() for fd in reversed(range(maxfd)): if fd not in self.exclude_fds: self._close_fd(fd)
python
{ "resource": "" }
q276221
Detach._redirect
test
def _redirect(self, stream, target): """Redirect a system stream to the provided target.""" if target is None: target_fd = os.open(os.devnull, os.O_RDWR) else: target_fd = target.fileno() os.dup2(target_fd, stream.fileno())
python
{ "resource": "" }
q276222
set_form_widgets_attrs
test
def set_form_widgets_attrs(form, attrs): """Applies a given HTML attributes to each field widget of a given form. Example: set_form_widgets_attrs(my_form, {'class': 'clickable'}) """ for _, field in form.fields.items(): attrs_ = dict(attrs) for name, val in attrs.items(): if hasattr(val, '__call__'): attrs_[name] = val(field) field.widget.attrs = field.widget.build_attrs(attrs_)
python
{ "resource": "" }
q276223
import_app_module
test
def import_app_module(app_name, module_name): """Returns a module from a given app by its name. :param str app_name: :param str module_name: :rtype: module or None """ name_split = app_name.split('.') if name_split[-1][0].isupper(): # Seems that we have app config class path here. app_name = '.'.join(name_split[:-2]) module = import_module(app_name) try: sub_module = import_module('%s.%s' % (app_name, module_name)) return sub_module except: # The same bubbling strategy as in autodiscover_modules(). if module_has_submodule(module, module_name): # Module is in a package. raise return None
python
{ "resource": "" }
q276224
import_project_modules
test
def import_project_modules(module_name): """Imports modules from registered apps using given module name and returns them as a list. :param str module_name: :rtype: list """ from django.conf import settings submodules = [] for app in settings.INSTALLED_APPS: module = import_app_module(app, module_name) if module is not None: submodules.append(module) return submodules
python
{ "resource": "" }
q276225
include_
test
def include_(parser, token): """Similar to built-in ``include`` template tag, but allowing template variables to be used in template name and a fallback template, thus making the tag more dynamic. .. warning:: Requires Django 1.8+ Example: {% load etc_misc %} {% include_ "sub_{{ postfix_var }}.html" fallback "default.html" %} """ bits = token.split_contents() dynamic = False # We fallback to built-in `include` if a template name contains no variables. if len(bits) >= 2: dynamic = '{{' in bits[1] if dynamic: fallback = None bits_new = [] for bit in bits: if fallback is True: # This bit is a `fallback` argument. fallback = bit continue if bit == 'fallback': fallback = True else: bits_new.append(bit) if fallback: fallback = parser.compile_filter(construct_relative_path_(parser, fallback)) token.contents = ' '.join(bits_new) token.contents = token.contents.replace('include_', 'include') include_node = do_include(parser, token) if dynamic: # swap simple include with dynamic include_node = DynamicIncludeNode( include_node.template, extra_context=include_node.extra_context, isolated_context=include_node.isolated_context, fallback=fallback or None, ) return include_node
python
{ "resource": "" }
q276226
gravatar_get_url
test
def gravatar_get_url(obj, size=65, default='identicon'): """Returns Gravatar image URL for a given string or UserModel. Example: {% load gravatar %} {% gravatar_get_url user_model %} :param UserModel, str obj: :param int size: :param str default: :return: """ return get_gravatar_url(obj, size=size, default=default)
python
{ "resource": "" }
q276227
gravatar_get_img
test
def gravatar_get_img(obj, size=65, default='identicon'): """Returns Gravatar image HTML tag for a given string or UserModel. Example: {% load gravatar %} {% gravatar_get_img user_model %} :param UserModel, str obj: :param int size: :param str default: :return: """ url = get_gravatar_url(obj, size=size, default=default) if url: return safe('<img src="%s" class="gravatar">' % url) return ''
python
{ "resource": "" }
q276228
Port.is_valid_filesys
test
def is_valid_filesys(path): """Checks if the path is correct and exists, must be abs-> a dir -> and not a file.""" if os.path.isabs(path) and os.path.isdir(path) and \ not os.path.isfile(path): return True else: raise LocalPortValidationError( 'Port value %s is not a valid filesystem location' % path )
python
{ "resource": "" }
q276229
Port.is_valid_s3_url
test
def is_valid_s3_url(url): """Checks if the url contains S3. Not an accurate validation of the url""" # Skip if the url start with source: (gbdxtools syntax) if url.startswith('source:'): return True scheme, netloc, path, _, _, _ = urlparse(url) port_except = RemotePortValidationError( 'Port value %s is not a valid s3 location' % url ) if len(scheme) < 2: raise port_except if 's3' in scheme or 's3' in netloc or 's3' in path: return True else: raise port_except
python
{ "resource": "" }
q276230
TaskController._get_template_abs_path
test
def _get_template_abs_path(filename): """ Return a valid absolute path. filename can be relative or absolute. """ if os.path.isabs(filename) and os.path.isfile(filename): return filename else: return os.path.join(os.getcwd(), filename)
python
{ "resource": "" }
q276231
AccountStorageService.list
test
def list(self, s3_folder='', full_key_data=False): """Get a list of keys for the accounts""" if not s3_folder.startswith('/'): s3_folder = '/' + s3_folder s3_prefix = self.prefix + s3_folder bucket_data = self.client.list_objects(Bucket=self.bucket, Prefix=s3_prefix) if full_key_data: return bucket_data['Contents'] else: return [k['Key'] for k in bucket_data['Contents']]
python
{ "resource": "" }
q276232
Workflow._build_worklfow_json
test
def _build_worklfow_json(self): """ Build a workflow definition from the cloud_harness task. """ wf_json = {'tasks': [], 'name': 'cloud-harness_%s' % str(uuid.uuid4())} task_def = json.loads(self.task_template.json()) d = { "name": task_def['name'], "outputs": [], "inputs": [], "taskType": task_def['taskType'] } # Add input ports for port in self.task_template.input_ports: port_value = port.value if port_value is False: port_value = 'false' if port_value is True: port_value = 'true' d['inputs'].append({ "name": port._name, "value": port_value }) # Add output ports for port in self.task_template.output_ports: d['outputs'].append({ "name": port._name }) # Add task to workflow wf_json['tasks'].append(d) # Add port to be saved for port in self.task_template.output_ports: # Add save data locations if hasattr(port, 'stageToS3') and port.stageToS3: save_location = '{customer_storage}/{run_name}/{port}'.format( customer_storage=self.storage.location, run_name=self.task_template.run_name, port=port.name ) new_task = dict(**self.STAGE_TO_S3) new_task['inputs'] = [ {'name': 'data', 'source': '%s:%s' % (task_def['name'], port._name)}, {'name': 'destination', 'value': save_location} ] wf_json['tasks'].append(new_task) return wf_json
python
{ "resource": "" }
q276233
Workflow.execute
test
def execute(self, override_wf_json=None): """ Execute the cloud_harness task. """ r = self.gbdx.post( self.URL, json=self.json if override_wf_json is None else override_wf_json ) try: r.raise_for_status() except: print("GBDX API Status Code: %s" % r.status_code) print("GBDX API Response: %s" % r.text) self.id = None return self.id = r.json()['id'] self._refresh_status()
python
{ "resource": "" }
q276234
archive
test
def archive(folder, dry_run=False): "Move an active project to the archive." # error handling on archive_dir already done in main() for f in folder: if not os.path.exists(f): bail('folder does not exist: ' + f) _archive_safe(folder, PROJ_ARCHIVE, dry_run=dry_run)
python
{ "resource": "" }
q276235
_mkdir
test
def _mkdir(p): "The equivalent of 'mkdir -p' in shell." isdir = os.path.isdir stack = [os.path.abspath(p)] while not isdir(stack[-1]): parent_dir = os.path.dirname(stack[-1]) stack.append(parent_dir) while stack: p = stack.pop() if not isdir(p): os.mkdir(p)
python
{ "resource": "" }
q276236
list
test
def list(pattern=()): "List the contents of the archive directory." # strategy: pick the intersection of all the patterns the user provides globs = ['*{0}*'.format(p) for p in pattern] + ['*'] matches = [] offset = len(PROJ_ARCHIVE) + 1 for suffix in globs: glob_pattern = os.path.join(PROJ_ARCHIVE, '*', '*', suffix) matches.append(set( f[offset:] for f in glob.glob(glob_pattern) )) matches = reduce(lambda x, y: x.intersection(y), matches) for m in sorted(matches): print(m)
python
{ "resource": "" }
q276237
restore
test
def restore(folder): "Restore a project from the archive." if os.path.isdir(folder): bail('a folder of the same name already exists!') pattern = os.path.join(PROJ_ARCHIVE, '*', '*', folder) matches = glob.glob(pattern) if not matches: bail('no project matches: ' + folder) if len(matches) > 1: print('Warning: multiple matches, picking the most recent', file=sys.stderr) source = sorted(matches)[-1] print(source, '-->', folder) shutil.move(source, '.')
python
{ "resource": "" }
q276238
Client.new
test
def new(cls, access_token, environment='prod'): '''Create new storage service client. Arguments: environment(str): The service environment to be used for the client. 'prod' or 'dev'. access_token(str): The access token used to authenticate with the service Returns: A storage_service.Client instance ''' api_client = ApiClient.new(access_token, environment) return cls(api_client)
python
{ "resource": "" }
q276239
Client.list
test
def list(self, path): '''List the entities found directly under the given path. Args: path (str): The path of the entity to be listed. Must start with a '/'. Returns: The list of entity names directly under the given path: u'/12345/folder_1' Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path) entity = self.api_client.get_entity_by_query(path=path) if entity['entity_type'] not in self.__BROWSABLE_TYPES: raise StorageArgumentException('The entity type "{0}" cannot be' 'listed'.format(entity['entity_type'])) entity_uuid = entity['uuid'] file_names = [] # get files more_pages = True page_number = 1 while more_pages: response = self.api_client.list_folder_content( entity_uuid, page=page_number, ordering='name') more_pages = response['next'] is not None page_number += 1 for child in response['results']: pattern = '/{name}' if child['entity_type'] == 'folder' else '{name}' file_names.append(pattern.format(name=child['name'])) return file_names
python
{ "resource": "" }
q276240
Client.download_file
test
def download_file(self, path, target_path): '''Download a file from storage service to local disk. Existing files on the target path will be overwritten. The download is not recursive, as it only works on files. Args: path (str): The path of the entity to be downloaded. Must start with a '/'. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path) entity = self.api_client.get_entity_by_query(path=path) if entity['entity_type'] != 'file': raise StorageArgumentException('Only file entities can be downloaded') signed_url = self.api_client.get_signed_url(entity['uuid']) response = self.api_client.download_signed_url(signed_url) with open(target_path, "wb") as output: for chunk in response.iter_content(chunk_size=1024): output.write(chunk)
python
{ "resource": "" }
q276241
Client.exists
test
def exists(self, path): '''Check if a certain path exists in the storage service. Args: path (str): The path to be checked Returns: True if the path exists, False otherwise Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path) try: metadata = self.api_client.get_entity_by_query(path=path) except StorageNotFoundException: return False return metadata and 'uuid' in metadata
python
{ "resource": "" }
q276242
Client.get_parent
test
def get_parent(self, path): '''Get the parent entity of the entity pointed by the given path. Args: path (str): The path of the entity whose parent is needed Returns: A JSON object of the parent entity if found. Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path, projects_allowed=False) path_steps = [step for step in path.split('/') if step] del path_steps[-1] parent_path = '/{0}'.format('/'.join(path_steps)) return self.api_client.get_entity_by_query(path=parent_path)
python
{ "resource": "" }
q276243
Client.mkdir
test
def mkdir(self, path): '''Create a folder in the storage service pointed by the given path. Args: path (str): The path of the folder to be created Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path, projects_allowed=False) parent_metadata = self.get_parent(path) self.api_client.create_folder(path.split('/')[-1], parent_metadata['uuid'])
python
{ "resource": "" }
q276244
Client.upload_file
test
def upload_file(self, local_file, dest_path, mimetype): '''Upload local file content to a storage service destination folder. Args: local_file(str) dest_path(str): absolute Storage service path '/project' prefix is essential suffix should be the name the file will have on in the destination folder i.e.: /project/folder/.../file_name mimetype(str): set the contentType attribute Returns: The uuid of created file entity as string Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(dest_path) # get the paths of the target dir and the target file name if dest_path.endswith('/'): raise StorageArgumentException('Must specify target file name in dest_path argument') if local_file.endswith(os.path.sep): raise StorageArgumentException('Must specify source file name in local_file' ' argument, directory upload not supported') # create the file container new_file = self.api_client.create_file( name=dest_path.split('/').pop(), content_type=mimetype, parent=self.get_parent(dest_path)['uuid'] ) etag = self.api_client.upload_file_content(new_file['uuid'], source=local_file) new_file['etag'] = etag return new_file
python
{ "resource": "" }
q276245
Client.delete
test
def delete(self, path): ''' Delete an entity from the storage service using its path. Args: path(str): The path of the entity to be delete Returns: The uuid of created file entity as string Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' self.__validate_storage_path(path, projects_allowed=False) entity = self.api_client.get_entity_by_query(path=path) if entity['entity_type'] in self.__BROWSABLE_TYPES: # At this point it can only be a folder contents = self.api_client.list_folder_content(entity['uuid']) if contents['count'] > 0: raise StorageArgumentException( 'This method cannot delete non-empty folder. Please empty the folder first.') self.api_client.delete_folder(entity['uuid']) elif entity['entity_type'] == 'file': self.api_client.delete_file(entity['uuid'])
python
{ "resource": "" }
q276246
Client.__validate_storage_path
test
def __validate_storage_path(cls, path, projects_allowed=True): '''Validate a string as a valid storage path''' if not path or not isinstance(path, str) or path[0] != '/' or path == '/': raise StorageArgumentException( 'The path must be a string, start with a slash (/), and be longer' ' than 1 character.') if not projects_allowed and len([elem for elem in path.split('/') if elem]) == 1: raise StorageArgumentException( 'This method does not accept projects in the path.')
python
{ "resource": "" }
q276247
Client.new
test
def new(cls, access_token, environment='prod'): '''Creates a new cross-service client.''' return cls( storage_client=StorageClient.new(access_token, environment=environment))
python
{ "resource": "" }
q276248
ApiClient.new
test
def new(cls, access_token, environment='prod'): '''Create a new storage service REST client. Arguments: environment: The service environment to be used for the client access_token: The access token used to authenticate with the service Returns: A storage_service.api.ApiClient instance Example: >>> storage_client = ApiClient.new(my_access_token) ''' request = RequestBuilder \ .request(environment) \ .to_service(cls.SERVICE_NAME, cls.SERVICE_VERSION) \ .throw( StorageForbiddenException, lambda resp: 'You are forbidden to do this.' if resp.status_code == 403 else None ) \ .throw( StorageNotFoundException, lambda resp: 'The entity is not found' if resp.status_code == 404 else None ) \ .throw( StorageException, lambda resp: 'Server response: {0} - {1}'.format(resp.status_code, resp.text) if not resp.ok else None ) authenticated_request = request.with_token(access_token) return cls(request, authenticated_request)
python
{ "resource": "" }
q276249
ApiClient.get_entity_details
test
def get_entity_details(self, entity_id): '''Get generic entity by UUID. Args: entity_id (str): The UUID of the requested entity. Returns: A dictionary describing the entity:: { u'collab_id': 2271, u'created_by': u'303447', u'created_on': u'2017-03-10T12:50:06.077891Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'303447', u'modified_on': u'2017-03-10T12:50:06.077946Z', u'name': u'2271', u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) return self._authenticated_request \ .to_endpoint('entity/{}/'.format(entity_id)) \ .return_body() \ .get()
python
{ "resource": "" }
q276250
ApiClient.set_metadata
test
def set_metadata(self, entity_type, entity_id, metadata): '''Set metadata for an entity. Args: entity_type (str): Type of the entity. Admitted values: ['project', 'folder', 'file']. entity_id (str): The UUID of the entity to be modified. metadata (dict): A dictionary of key/value pairs to be written as metadata. Warning: It will replace all existing metadata with the provided dictionary. Returns: A dictionary of the updated metadata:: { u'bar': u'200', u'foo': u'100' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) if not isinstance(metadata, dict): raise StorageArgumentException('The metadata was not provided as a ' 'dictionary') return self._authenticated_request \ .to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \ .with_json_body(metadata) \ .return_body() \ .post()
python
{ "resource": "" }
q276251
ApiClient.get_metadata
test
def get_metadata(self, entity_type, entity_id): '''Get metadata of an entity. Args: entity_type (str): Type of the entity. Admitted values: ['project', 'folder', 'file']. entity_id (str): The UUID of the entity to be modified. Returns: A dictionary of the metadata:: { u'bar': u'200', u'foo': u'100' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) return self._authenticated_request \ .to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \ .return_body() \ .get()
python
{ "resource": "" }
q276252
ApiClient.update_metadata
test
def update_metadata(self, entity_type, entity_id, metadata): '''Update the metadata of an entity. Existing non-modified metadata will not be affected. Args: entity_type (str): Type of the entity. Admitted values: 'project', 'folder', 'file'. entity_id (str): The UUID of the entity to be modified. metadata (dict): A dictionary of key/value pairs to be written as metadata. Returns: A dictionary of the updated object metadata:: { u'bar': u'200', u'foo': u'100' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) if not isinstance(metadata, dict): raise StorageArgumentException('The metadata was not provided as a ' 'dictionary') return self._authenticated_request \ .to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \ .with_json_body(metadata) \ .return_body() \ .put()
python
{ "resource": "" }
q276253
ApiClient.delete_metadata
test
def delete_metadata(self, entity_type, entity_id, metadata_keys): '''Delete the selected metadata entries of an entity. Only deletes selected metadata keys, for a complete wipe, use set_metadata. Args: entity_type (str): Type of the entity. Admitted values: ['project', 'folder', 'file']. entity_id (srt): The UUID of the entity to be modified. metadata_keys (lst): A list of metada keys to be deleted. Returns: A dictionary of the updated object metadata:: { u'bar': u'200', u'foo': u'100' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(entity_id): raise StorageArgumentException( 'Invalid UUID for entity_id: {0}'.format(entity_id)) if not isinstance(metadata_keys, list): raise StorageArgumentException('The metadata was not provided as a ' 'dictionary') return self._authenticated_request \ .to_endpoint('{}/{}/metadata/'.format(entity_type, entity_id)) \ .with_json_body({'keys': metadata_keys}) \ .return_body() \ .delete()
python
{ "resource": "" }
q276254
ApiClient.list_projects
test
def list_projects(self, hpc=None, access=None, name=None, collab_id=None, page_size=DEFAULT_PAGE_SIZE, page=None, ordering=None): '''List all the projects the user have access to. This function does not retrieve all results, pages have to be manually retrieved by the caller. Args: hpc (bool): If 'true', the result will contain only the HPC projects (Unicore projects). access (str): If provided, the result will contain only projects where the user has the provided acccess. Admitted values: ['read', 'write']. name (str): Filter on the project name. collab_id (int): Filter on the collab id. page_size (int): Number of elements per page. page (int): Number of the page ordering (str): Indicate on which fields to sort the result. Prepend '-' to invert order. Multiple values can be provided. Ordering is supported on: ['name', 'created_on', 'modified_on']. Example: ordering='name,created_on' Returns: A dictionary of the results:: { u'count': 256, u'next': u'http://link.to.next/page', u'previous': None, u'results': [{u'collab_id': 2079, u'created_by': u'258666', u'created_on': u'2017-02-23T15:09:27.626973Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'258666', u'modified_on': u'2017-02-23T15:09:27.627025Z', u'name': u'2079', u'uuid': u'64a6ad2e-acd1-44a3-a4cd-6bd96e3da2b0'}] } Raises: StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' return self._authenticated_request \ .to_endpoint('project/') \ .with_params(self._prep_params(locals())) \ .return_body() \ .get()
python
{ "resource": "" }
q276255
ApiClient.get_project_details
test
def get_project_details(self, project_id): '''Get information on a given project Args: project_id (str): The UUID of the requested project. Returns: A dictionary describing the project:: { u'collab_id': 2271, u'created_by': u'303447', u'created_on': u'2017-03-10T12:50:06.077891Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'303447', u'modified_on': u'2017-03-10T12:50:06.077946Z', u'name': u'2271', u'uuid': u'3abd8742-d069-44cf-a66b-2370df74a682' } Raises: StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(project_id): raise StorageArgumentException( 'Invalid UUID for project_id: {0}'.format(project_id)) return self._authenticated_request \ .to_endpoint('project/{}/'.format(project_id)) \ .return_body() \ .get()
python
{ "resource": "" }
q276256
ApiClient.create_project
test
def create_project(self, collab_id): '''Create a new project. Args: collab_id (int): The id of the collab the project should be created in. Returns: A dictionary of details of the created project:: { u'collab_id': 12998, u'created_by': u'303447', u'created_on': u'2017-03-21T14:06:32.293902Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'303447', u'modified_on': u'2017-03-21T14:06:32.293967Z', u'name': u'12998', u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40' } Raises: StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' return self._authenticated_request \ .to_endpoint('project/') \ .with_json_body(self._prep_params(locals())) \ .return_body() \ .post()
python
{ "resource": "" }
q276257
ApiClient.delete_project
test
def delete_project(self, project): '''Delete a project. It will recursively delete all the content. Args: project (str): The UUID of the project to be deleted. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: 403 StorageNotFoundException: 404 HTTPError: other non-20x error codes ''' if not is_valid_uuid(project): raise StorageArgumentException( 'Invalid UUID for project: {0}'.format(project)) self._authenticated_request \ .to_endpoint('project/{}/'.format(project)) \ .delete()
python
{ "resource": "" }
q276258
ApiClient.create_folder
test
def create_folder(self, name, parent): '''Create a new folder. Args: name (srt): The name of the folder. parent (str): The UUID of the parent entity. The parent must be a project or a folder. Returns: A dictionary of details of the created folder:: { u'created_by': u'303447', u'created_on': u'2017-03-21T14:06:32.293902Z', u'description': u'', u'entity_type': u'folder', u'modified_by': u'303447', u'modified_on': u'2017-03-21T14:06:32.293967Z', u'name': u'myfolder', u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682', u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(parent): raise StorageArgumentException( 'Invalid UUID for parent: {0}'.format(parent)) return self._authenticated_request \ .to_endpoint('folder/') \ .with_json_body(self._prep_params(locals())) \ .return_body() \ .post()
python
{ "resource": "" }
q276259
ApiClient.get_folder_details
test
def get_folder_details(self, folder): '''Get information on a given folder. Args: folder (str): The UUID of the requested folder. Returns: A dictionary of the folder details if found:: { u'created_by': u'303447', u'created_on': u'2017-03-21T14:06:32.293902Z', u'description': u'', u'entity_type': u'folder', u'modified_by': u'303447', u'modified_on': u'2017-03-21T14:06:32.293967Z', u'name': u'myfolder', u'parent': u'3abd8742-d069-44cf-a66b-2370df74a682', u'uuid': u'2516442e-1e26-4de1-8ed8-94523224cc40' } Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(folder): raise StorageArgumentException( 'Invalid UUID for folder: {0}'.format(folder)) return self._authenticated_request \ .to_endpoint('folder/{}/'.format(folder)) \ .return_body() \ .get()
python
{ "resource": "" }
q276260
ApiClient.delete_folder
test
def delete_folder(self, folder): '''Delete a folder. It will recursively delete all the content. Args: folder_id (str): The UUID of the folder to be deleted. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: 403 StorageNotFoundException: 404 HTTPError: other non-20x error codes ''' if not is_valid_uuid(folder): raise StorageArgumentException( 'Invalid UUID for folder: {0}'.format(folder)) self._authenticated_request \ .to_endpoint('folder/{}/'.format(folder)) \ .delete()
python
{ "resource": "" }
q276261
ApiClient.upload_file_content
test
def upload_file_content(self, file_id, etag=None, source=None, content=None): '''Upload a file content. The file entity must already exist. If an ETag is provided the file stored on the server is verified against it. If it does not match, StorageException is raised. This means the client needs to update its knowledge of the resource before attempting to update again. This can be used for optimistic concurrency control. Args: file_id (str): The UUID of the file whose content is written. etag (str): The etag to match the contents against. source (str): The path of the local file whose content to be uploaded. content (str): A string of the content to be uploaded. Note: ETags should be enclosed in double quotes:: my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"' Returns: The ETag of the file upload:: '"71e1ed9ee52e565a56aec66bc648a32c"' Raises: IOError: The source cannot be opened. StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) if not (source or content) or (source and content): raise StorageArgumentException('Either one of source file or content ' 'has to be provided.') resp = self._authenticated_request \ .to_endpoint('file/{}/content/upload/'.format(file_id)) \ .with_body(content or open(source, 'rb')) \ .with_headers({'If-Match': etag} if etag else {}) \ .post() if 'ETag' not in resp.headers: raise StorageException('No ETag received from the service after the upload') return resp.headers['ETag']
python
{ "resource": "" }
q276262
ApiClient.copy_file_content
test
def copy_file_content(self, file_id, source_file): '''Copy file content from source file to target file. Args: file_id (str): The UUID of the file whose content is written. source_file (str): The UUID of the file whose content is copied. Returns: None Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) if not is_valid_uuid(source_file): raise StorageArgumentException( 'Invalid UUID for source_file: {0}'.format(source_file)) self._authenticated_request \ .to_endpoint('file/{}/content/'.format(file_id)) \ .with_headers({'X-Copy-From': source_file}) \ .put()
python
{ "resource": "" }
q276263
ApiClient.download_file_content
test
def download_file_content(self, file_id, etag=None): '''Download file content. Args: file_id (str): The UUID of the file whose content is requested etag (str): If the content is not changed since the provided ETag, the content won't be downloaded. If the content is changed, it will be downloaded and returned with its new ETag. Note: ETags should be enclosed in double quotes:: my_etag = '"71e1ed9ee52e565a56aec66bc648a32c"' Returns: A tuple of ETag and content (etag, content) if the content was retrieved. If an etag was provided, and content didn't change returns (None, None):: ('"71e1ed9ee52e565a56aec66bc648a32c"', 'Hello world!') Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) headers = {'Accept': '*/*'} if etag: headers['If-None-Match'] = etag resp = self._authenticated_request \ .to_endpoint('file/{}/content/'.format(file_id)) \ .with_headers(headers) \ .get() if resp.status_code == 304: return (None, None) if 'ETag' not in resp.headers: raise StorageException('No ETag received from the service with the download') return (resp.headers['ETag'], resp.content)
python
{ "resource": "" }
q276264
ApiClient.get_signed_url
test
def get_signed_url(self, file_id): '''Get a signed unauthenticated URL. It can be used to download the file content without the need for a token. The signed URL expires after 5 seconds. Args: file_id (str): The UUID of the file to get the link for. Returns: The signed url as a string Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server response code 404 StorageException: other 400-600 error codes ''' if not is_valid_uuid(file_id): raise StorageArgumentException( 'Invalid UUID for file_id: {0}'.format(file_id)) return self._authenticated_request \ .to_endpoint('file/{}/content/secure_link/'.format(file_id)) \ .return_body() \ .get()['signed_url']
python
{ "resource": "" }
q276265
MongoDBHandler.emit
test
def emit(self, record): """ pymongo expects a dict """ msg = self.format(record) if not isinstance(msg, dict): msg = json.loads(msg) self.collection.insert(msg)
python
{ "resource": "" }
q276266
RequestBuilder.to_service
test
def to_service(self, service, version): '''Sets the service name and version the request should target Args: service (str): The name of the service as displayed in the services.json file version (str): The version of the service as displayed in the services.json file Returns: The request builder instance in order to chain calls ''' service_url = self._service_locator.get_service_url(service, version) return self.__copy_and_set('service_url', self.__strip_trailing_slashes(service_url))
python
{ "resource": "" }
q276267
RequestBuilder.with_headers
test
def with_headers(self, headers): '''Adds headers to the request Args: headers (dict): The headers to add the request headers Returns: The request builder instance in order to chain calls ''' copy = headers.copy() copy.update(self._headers) return self.__copy_and_set('headers', copy)
python
{ "resource": "" }
q276268
RequestBuilder.with_params
test
def with_params(self, params): '''Adds parameters to the request params Args: params (dict): The parameters to add to the request params Returns: The request builder instance in order to chain calls ''' copy = params.copy() copy.update(self._params) return self.__copy_and_set('params', copy)
python
{ "resource": "" }
q276269
RequestBuilder.throw
test
def throw(self, exception_class, should_throw): '''Defines if the an exception should be thrown after the request is sent Args: exception_class (class): The class of the exception to instantiate should_throw (function): The predicate that should indicate if the exception should be thrown. This function will be called with the response as a parameter Returns: The request builder instance in order to chain calls ''' return self.__copy_and_set('throws', self._throws + [(exception_class, should_throw)])
python
{ "resource": "" }
q276270
AdminBooleanMixin.get_list_display
test
def get_list_display(self, request): """ Return a sequence containing the fields to be displayed on the changelist. """ list_display = [] for field_name in self.list_display: try: db_field = self.model._meta.get_field(field_name) if isinstance(db_field, BooleanField): field_name = boolean_switch_field(db_field) except FieldDoesNotExist: pass list_display.append(field_name) return list_display
python
{ "resource": "" }
q276271
map_job
test
def map_job(job, func, inputs, *args): """ Spawns a tree of jobs to avoid overloading the number of jobs spawned by a single parent. This function is appropriate to use when batching samples greater than 1,000. :param JobFunctionWrappingJob job: passed automatically by Toil :param function func: Function to spawn dynamically, passes one sample as first argument :param list inputs: Array of samples to be batched :param list args: any arguments to be passed to the function """ # num_partitions isn't exposed as an argument in order to be transparent to the user. # The value for num_partitions is a tested value num_partitions = 100 partition_size = len(inputs) / num_partitions if partition_size > 1: for partition in partitions(inputs, partition_size): job.addChildJobFn(map_job, func, partition, *args) else: for sample in inputs: job.addChildJobFn(func, sample, *args)
python
{ "resource": "" }
q276272
gatk_genotype_gvcfs
test
def gatk_genotype_gvcfs(job, gvcfs, ref, fai, ref_dict, annotations=None, emit_threshold=10.0, call_threshold=30.0, unsafe_mode=False): """ Runs GenotypeGVCFs on one or more gVCFs generated by HaplotypeCaller. :param JobFunctionWrappingJob job: passed automatically by Toil :param dict gvcfs: Dictionary of GVCF FileStoreIDs {sample identifier: FileStoreID} :param str ref: FileStoreID for the reference genome fasta file :param str fai: FileStoreID for the reference genome index file :param str ref_dict: FileStoreID for the reference genome sequence dictionary :param list[str] annotations: Optional list of GATK variant annotations. Default: None. :param float emit_threshold: Minimum phred-scale confidence threshold for a variant to be emitted. GATK default: 10.0 :param float call_threshold: Minimum phred-scale confidence threshold for a variant to be called. GATK default: 30.0 :param bool unsafe_mode: If True, runs gatk UNSAFE mode: "-U ALLOW_SEQ_DICT_INCOMPATIBILITY" :return: VCF FileStoreID :rtype: str """ inputs = {'genome.fa': ref, 'genome.fa.fai': fai, 'genome.dict': ref_dict} inputs.update(gvcfs) work_dir = job.fileStore.getLocalTempDir() for name, file_store_id in inputs.iteritems(): job.fileStore.readGlobalFile(file_store_id, os.path.join(work_dir, name)) command = ['-T', 'GenotypeGVCFs', '-R', '/data/genome.fa', '--out', 'genotyped.vcf', '-stand_emit_conf', str(emit_threshold), '-stand_call_conf', str(call_threshold)] if annotations: for annotation in annotations: command.extend(['-A', annotation]) # Include all GVCFs for joint genotyping for uuid in gvcfs.keys(): command.extend(['--variant', os.path.join('/data', uuid)]) if unsafe_mode: command.extend(['-U', 'ALLOW_SEQ_DICT_INCOMPATIBILITY']) job.fileStore.logToMaster('Running GATK GenotypeGVCFs\n' 'Emit threshold: {emit_threshold}\n' 'Call threshold: {call_threshold}\n\n' 'Annotations:\n{annotations}\n\n' 'Samples:\n{samples}\n'.format(emit_threshold=emit_threshold, call_threshold=call_threshold, annotations='\n'.join(annotations) if annotations else '', samples='\n'.join(gvcfs.keys()))) docker_parameters = ['--rm', 'log-driver', 'none', '-e', 'JAVA_OPTS=-Djava.io.tmpdir=/data/ -Xmx{}'.format(job.memory)] dockerCall(job=job, workDir=work_dir, parameters=command, tool='quay.io/ucsc_cgl/gatk:3.5--dba6dae49156168a909c43330350c6161dc7ecc2', dockerParameters=docker_parameters) return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'genotyped.vcf'))
python
{ "resource": "" }
q276273
run_oncotator
test
def run_oncotator(job, vcf_id, oncotator_db): """ Uses Oncotator to add cancer relevant variant annotations to a VCF file. Oncotator can accept other genome builds, but the output VCF is based on hg19. :param JobFunctionWrappingJob job: passed automatically by Toil :param str vcf_id: FileStoreID for VCF file :param str oncotator_db: FileStoreID for Oncotator database :return: Annotated VCF FileStoreID :rtype: str """ job.fileStore.logToMaster('Running Oncotator') inputs = {'input.vcf': vcf_id, 'oncotator_db': oncotator_db} work_dir = job.fileStore.getLocalTempDir() for name, file_store_id in inputs.iteritems(): inputs[name] = job.fileStore.readGlobalFile(file_store_id, os.path.join(work_dir, name)) # The Oncotator database may be tar/gzipped if tarfile.is_tarfile(inputs['oncotator_db']): tar = tarfile.open(inputs['oncotator_db']) tar.extractall(path=work_dir) # Get the extracted database directory name inputs['oncotator_db'] = tar.getmembers()[0].name tar.close() command = ['-i', 'VCF', '-o', 'VCF', '--db-dir', inputs['oncotator_db'], 'input.vcf', 'annotated.vcf', 'hg19'] # Oncotator annotations are based on hg19 docker_parameters = ['--rm', 'log-driver', 'none', '-e', 'JAVA_OPTS=-Djava.io.tmpdir=/data/ -Xmx{}'.format(job.memory)] dockerCall(job=job, workDir=work_dir, parameters=command, tool='jpfeil/oncotator:1.9--8fffc356981862d50cfacd711b753700b886b605', dockerParameters=docker_parameters) return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'annotated.vcf'))
python
{ "resource": "" }
q276274
DatapointArray.sort
test
def sort(self, f=lambda d: d["t"]): """Sort here works by sorting by timestamp by default""" list.sort(self, key=f) return self
python
{ "resource": "" }
q276275
DatapointArray.t
test
def t(self): """Returns just the timestamp portion of the datapoints as a list. The timestamps are in python datetime's date format.""" return list(map(lambda x: datetime.datetime.fromtimestamp(x["t"]), self.raw()))
python
{ "resource": "" }
q276276
DatapointArray.loadExport
test
def loadExport(self, folder): """Adds the data from a ConnectorDB export. If it is a stream export, then the folder is the location of the export. If it is a device export, then the folder is the export folder with the stream name as a subdirectory If it is a user export, you will use the path of the export folder, with the user/device/stream appended to the end:: myuser.export("./exportdir") DatapointArray().loadExport("./exportdir/username/devicename/streamname") """ self.loadJSON(os.path.join(folder, "data.json")) return self
python
{ "resource": "" }
q276277
DatapointArray.tshift
test
def tshift(self, t): """Shifts all timestamps in the datapoint array by the given number of seconds. It is the same as the 'tshift' pipescript transform. Warning: The shift is performed in-place! This means that it modifies the underlying array:: d = DatapointArray([{"t":56,"d":1}]) d.tshift(20) print(d) # [{"t":76,"d":1}] """ raw = self.raw() for i in range(len(raw)): raw[i]["t"] += t return self
python
{ "resource": "" }
q276278
DatapointArray.sum
test
def sum(self): """Gets the sum of the data portions of all datapoints within""" raw = self.raw() s = 0 for i in range(len(raw)): s += raw[i]["d"] return s
python
{ "resource": "" }
q276279
rfxcom
test
def rfxcom(device): """Start the event loop to collect data from the serial device.""" # If the device isn't passed in, look for it in the config. if device is None: device = app.config.get('DEVICE') # If the device is *still* none, error. if device is None: print("The serial device needs to be passed in as --device or " "set in the config as DEVICE.") return rfxcom_collect(device)
python
{ "resource": "" }
q276280
create_user
test
def create_user(username): "Create a new user." password = prompt_pass("Enter password") user = User(username=username, password=password) db.session.add(user) db.session.commit()
python
{ "resource": "" }
q276281
parse_vn_results
test
async def parse_vn_results(soup): """ Parse Visual Novel search pages. :param soup: The BS4 class object :return: A list of dictionaries containing a name and id. """ soup = soup.find_all('td', class_='tc1') vns = [] for item in soup[1:]: vns.append({'name': item.string, 'id': item.a.get('href')[1:]}) return vns
python
{ "resource": "" }
q276282
parse_release_results
test
async def parse_release_results(soup): """ Parse Releases search pages. :param soup: The BS4 class object :return: A list of dictionaries containing a release dictionary. This is the same as the one returned in get_novel. It contains a Date released, Platform, Ages group and Name. """ soup = list(soup.find_all('table', class_='stripe')[0].children)[1:] releases = [] for item in soup: child = list(item.children) temp_rel = {'date': None, 'ages': None, 'platform': None, 'name': None} temp_rel['date'] = child[0].string temp_rel['ages'] = child[1].string temp_rel['platform'] = child[2].abbr.get('title') temp_rel['name'] = child[3].a.string releases.append(temp_rel) del temp_rel return releases
python
{ "resource": "" }
q276283
parse_prod_staff_results
test
async def parse_prod_staff_results(soup): """ Parse a page of producer or staff results :param soup: The BS4 class object :return: A list of dictionaries containing a name and nationality. """ soup = soup.find_all('li') producers = [] for item in soup: producers.append({'nationality': item.abbr.get('title'), 'name': item.a.string}) return producers
python
{ "resource": "" }
q276284
parse_character_results
test
async def parse_character_results(soup): """ Parse a page of character results. :param soup: The BS4 class object :return: Returns a list of dictionaries containing a name, gender and list of dictionaries containing a game name/id pair for games they appeared in. """ soup = list(soup.find_all('table', class_='stripe')[0].children)[1:] characters = [] for item in soup: temp_c = {'gender': None, 'name': None, 'games': {}} temp_c['gender'] = item.abbr.get('title') temp_c['name'] = list(item.children)[1].a.string temp_c['games'] = [] for game in list(list(list(item.children)[1].children)[1].children): if isinstance(game, NavigableString): continue temp_c['games'].append({'name': game.string, 'id': game.get('href').split('/')[1]}) characters.append(temp_c) del temp_c return characters
python
{ "resource": "" }
q276285
parse_tag_results
test
async def parse_tag_results(soup): """ Parse a page of tag or trait results. Same format. :param soup: BS4 Class Object :return: A list of tags, Nothing else really useful there """ soup = soup.find_all('td', class_='tc3') tags = [] for item in soup: tags.append(item.a.string) return tags
python
{ "resource": "" }
q276286
parse_user_results
test
async def parse_user_results(soup): """ Parse a page of user results :param soup: Bs4 Class object :return: A list of dictionaries containing a name and join date """ soup = list(soup.find_all('table', class_='stripe')[0].children)[1:] users = [] for item in soup: t_u = {'name': None, 'joined': None} t_u['name'] = list(item.children)[0].a.string t_u['joined'] = list(item.children)[1].string users.append(t_u) del t_u return users
python
{ "resource": "" }
q276287
tarball_files
test
def tarball_files(tar_name, file_paths, output_dir='.', prefix=''): """ Creates a tarball from a group of files :param str tar_name: Name of tarball :param list[str] file_paths: Absolute file paths to include in the tarball :param str output_dir: Output destination for tarball :param str prefix: Optional prefix for files in tarball """ with tarfile.open(os.path.join(output_dir, tar_name), 'w:gz') as f_out: for file_path in file_paths: if not file_path.startswith('/'): raise ValueError('Path provided is relative not absolute.') arcname = prefix + os.path.basename(file_path) f_out.add(file_path, arcname=arcname)
python
{ "resource": "" }
q276288
__forall_files
test
def __forall_files(file_paths, output_dir, op): """ Applies a function to a set of files and an output directory. :param str output_dir: Output directory :param list[str] file_paths: Absolute file paths to move """ for file_path in file_paths: if not file_path.startswith('/'): raise ValueError('Path provided (%s) is relative not absolute.' % file_path) dest = os.path.join(output_dir, os.path.basename(file_path)) op(file_path, dest)
python
{ "resource": "" }
q276289
copy_file_job
test
def copy_file_job(job, name, file_id, output_dir): """ Job version of move_files for one file :param JobFunctionWrappingJob job: passed automatically by Toil :param str name: Name of output file (including extension) :param str file_id: FileStoreID of file :param str output_dir: Location to place output file """ work_dir = job.fileStore.getLocalTempDir() fpath = job.fileStore.readGlobalFile(file_id, os.path.join(work_dir, name)) copy_files([fpath], output_dir)
python
{ "resource": "" }
q276290
_make_parameters
test
def _make_parameters(master_ip, default_parameters, memory, arguments, override_parameters): """ Makes a Spark Submit style job submission line. :param masterIP: The Spark leader IP address. :param default_parameters: Application specific Spark configuration parameters. :param memory: The memory to allocate to each Spark driver and executor. :param arguments: Arguments to pass to the submitted job. :param override_parameters: Parameters passed by the user, that override our defaults. :type masterIP: MasterAddress :type default_parameters: list of string :type arguments: list of string :type memory: int or None :type override_parameters: list of string or None """ # python doesn't support logical xor? # anywho, exactly one of memory or override_parameters must be defined require((override_parameters is not None or memory is not None) and (override_parameters is None or memory is None), "Either the memory setting must be defined or you must provide Spark configuration parameters.") # if the user hasn't provided overrides, set our defaults parameters = [] if memory is not None: parameters = ["--master", "spark://%s:%s" % (master_ip, SPARK_MASTER_PORT), "--conf", "spark.driver.memory=%sg" % memory, "--conf", "spark.executor.memory=%sg" % memory, "--conf", ("spark.hadoop.fs.default.name=hdfs://%s:%s" % (master_ip, HDFS_MASTER_PORT))] else: parameters.extend(override_parameters) # add the tool specific spark parameters parameters.extend(default_parameters) # spark submit expects a '--' to split the spark conf arguments from tool arguments parameters.append('--') # now add the tool arguments and return parameters.extend(arguments) return parameters
python
{ "resource": "" }
q276291
MasterAddress.docker_parameters
test
def docker_parameters(self, docker_parameters=None): """ Augment a list of "docker run" arguments with those needed to map the notional Spark master address to the real one, if they are different. """ if self != self.actual: add_host_option = '--add-host=spark-master:' + self.actual if docker_parameters is None: docker_parameters = [add_host_option] else: docker_parameters.append(add_host_option) return docker_parameters
python
{ "resource": "" }
q276292
ConnectorObject.refresh
test
def refresh(self): """Refresh reloads data from the server. It raises an error if it fails to get the object's metadata""" self.metadata = self.db.read(self.path).json()
python
{ "resource": "" }
q276293
run_mutect
test
def run_mutect(job, normal_bam, normal_bai, tumor_bam, tumor_bai, ref, ref_dict, fai, cosmic, dbsnp): """ Calls MuTect to perform variant analysis :param JobFunctionWrappingJob job: passed automatically by Toil :param str normal_bam: Normal BAM FileStoreID :param str normal_bai: Normal BAM index FileStoreID :param str tumor_bam: Tumor BAM FileStoreID :param str tumor_bai: Tumor BAM Index FileStoreID :param str ref: Reference genome FileStoreID :param str ref_dict: Reference dictionary FileStoreID :param str fai: Reference index FileStoreID :param str cosmic: Cosmic VCF FileStoreID :param str dbsnp: DBSNP VCF FileStoreID :return: MuTect output (tarball) FileStoreID :rtype: str """ work_dir = job.fileStore.getLocalTempDir() file_ids = [normal_bam, normal_bai, tumor_bam, tumor_bai, ref, fai, ref_dict, cosmic, dbsnp] file_names = ['normal.bam', 'normal.bai', 'tumor.bam', 'tumor.bai', 'ref.fasta', 'ref.fasta.fai', 'ref.dict', 'cosmic.vcf', 'dbsnp.vcf'] for file_store_id, name in zip(file_ids, file_names): job.fileStore.readGlobalFile(file_store_id, os.path.join(work_dir, name)) # Call: MuTect parameters = ['--analysis_type', 'MuTect', '--reference_sequence', 'ref.fasta', '--cosmic', '/data/cosmic.vcf', '--dbsnp', '/data/dbsnp.vcf', '--input_file:normal', '/data/normal.bam', '--input_file:tumor', '/data/tumor.bam', '--tumor_lod', str(10), # Taken from MC3 pipeline '--initial_tumor_lod', str(4.0), # Taken from MC3 pipeline '--out', 'mutect.out', '--coverage_file', 'mutect.cov', '--vcf', 'mutect.vcf'] dockerCall(job=job, workDir=work_dir, parameters=parameters, tool='quay.io/ucsc_cgl/mutect:1.1.7--e8bf09459cf0aecb9f55ee689c2b2d194754cbd3') # Write output to file store output_file_names = ['mutect.vcf', 'mutect.cov', 'mutect.out'] output_file_paths = [os.path.join(work_dir, x) for x in output_file_names] tarball_files('mutect.tar.gz', file_paths=output_file_paths, output_dir=work_dir) return job.fileStore.writeGlobalFile(os.path.join(work_dir, 'mutect.tar.gz'))
python
{ "resource": "" }
q276294
Device.create
test
def create(self, public=False, **kwargs): """Creates the device. Attempts to create private devices by default, but if public is set to true, creates public devices. You can also set other default properties by passing in the relevant information. For example, setting a device with the given nickname and description:: dev.create(nickname="mydevice", description="This is an example") Furthermore, ConnectorDB supports creation of a device's streams immediately, which can considerably speed up device setup:: dev.create(streams={ "stream1": {"schema": '{\"type\":\"number\"}'} }) Note that the schema must be encoded as a string when creating in this format. """ kwargs["public"] = public self.metadata = self.db.create(self.path, kwargs).json()
python
{ "resource": "" }
q276295
Device.streams
test
def streams(self): """Returns the list of streams that belong to the device""" result = self.db.read(self.path, {"q": "ls"}) if result is None or result.json() is None: return [] streams = [] for s in result.json(): strm = self[s["name"]] strm.metadata = s streams.append(strm) return streams
python
{ "resource": "" }
q276296
Device.export
test
def export(self, directory): """Exports the device to the given directory. The directory can't exist. You can later import this device by running import_device on a user. """ if os.path.exists(directory): raise FileExistsError( "The device export directory already exists") os.mkdir(directory) # Write the device's info with open(os.path.join(directory, "device.json"), "w") as f: json.dump(self.data, f) # Now export the streams one by one for s in self.streams(): s.export(os.path.join(directory, s.name))
python
{ "resource": "" }
q276297
Shosetsu.search_vndb
test
async def search_vndb(self, stype, term): """ Search vndb.org for a term and return matching results from type. :param stype: type to search for. Type should be one of: v - Visual Novels r - Releases p - Producers s - Staff c - Characters g - Tags i - traits u - Users :param term: string to search for :return: Results. Result format depends on what you searched for. See the Parsing.py module for more specific documentation. Exceptions: aiohttp.HttpBadRequest - On 404s VNDBOneResult - When you search for something but it instead redirects us to a direct content page VNDBNoResults - When nothing was found for that search VNDBBadStype - Raised when an incorrect search type is passed """ fstype = "" if stype not in ['v', 'r', 'p', 's', 'c', 'g', 'i', 'u']: raise VNDBBadStype(stype) else: if stype in ['v', 'p', 's', 'c', 'u']: fstype = '/{}/all'.format(stype) elif stype in ['g', 'i']: fstype = '/{}/list'.format(stype) elif stype == 'r': fstype = '/r' async with self.session.get(self.base_url + "{}".format(fstype), params={"q": term}, headers=self.headers) as response: if response.status == 404: raise aiohttp.HttpBadRequest("VN Not Found") elif 'q=' not in response.url: raise VNDBOneResult(term, response.url.rsplit('/', 1)[1]) text = await response.text() if 'No Results' in text: raise VNDBNoResults(term) soup = BeautifulSoup(text, 'lxml') resp = await self.parse_search(stype, soup) if resp == []: raise VNDBNoResults(term) return resp
python
{ "resource": "" }
q276298
Shosetsu.parse_search
test
async def parse_search(self, stype, soup): """ This is our parsing dispatcher :param stype: Search type category :param soup: The beautifulsoup object that contains the parsed html """ if stype == 'v': return await parse_vn_results(soup) elif stype == 'r': return await parse_release_results(soup) elif stype == 'p': return await parse_prod_staff_results(soup) elif stype == 's': return await parse_prod_staff_results(soup) elif stype == 'c': return await parse_character_results(soup) elif stype == 'g': return await parse_tag_results(soup) elif stype == 'i': return await parse_tag_results(soup) elif stype == 'u': return await parse_user_results(soup)
python
{ "resource": "" }
q276299
Dataset.addStream
test
def addStream(self, stream, interpolator="closest", t1=None, t2=None, dt=None, limit=None, i1=None, i2=None, transform=None,colname=None): """Adds the given stream to the query construction. Additionally, you can choose the interpolator to use for this stream, as well as a special name for the column in the returned dataset. If no column name is given, the full stream path will be used. addStream also supports Merge queries. You can insert a merge query instead of a stream, but be sure to name the column:: d = Dataset(cdb, t1=time.time()-1000,t2=time.time(),dt=10.) d.addStream("temperature","average") d.addStream("steps","sum") m = Merge(cdb) m.addStream("mystream") m.addStream("mystream2") d.addStream(m,colname="mycolumn") result = d.run() """ streamquery = query_maker(t1, t2, limit, i1, i2, transform) param_stream(self.cdb, streamquery, stream) streamquery["interpolator"] = interpolator if colname is None: # What do we call this column? if isinstance(stream, six.string_types): colname = stream elif isinstance(stream, Stream): colname = stream.path else: raise Exception( "Could not find a name for the column! use the 'colname' parameter.") if colname in self.query["dataset"] or colname is "x": raise Exception( "The column name either exists, or is labeled 'x'. Use the colname parameter to change the column name.") self.query["dataset"][colname] = streamquery
python
{ "resource": "" }