_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q275800
Gateway.receiver_blueprints
test
def receiver_blueprints(self): """ Get Flask blueprints for every provider that supports it Note: this requires Flask microframework. :rtype: dict :returns: A dict { provider-name: Blueprint } """ blueprints = {} for name in self._providers: try: blueprints[name] = self.receiver_blueprint_for(name) except NotImplementedError: pass # Ignore providers that does not support receivers return blueprints
python
{ "resource": "" }
q275801
IProvider._receive_message
test
def _receive_message(self, message): """ Incoming message callback Calls Gateway.onReceive event hook Providers are required to: * Cast phone numbers to digits-only * Support both ASCII and Unicode messages * Populate `message.msgid` and `message.meta` fields * If this method fails with an exception, the provider is required to respond with an error to the service :type message: IncomingMessage :param message: The received message :rtype: IncomingMessage """ # Populate fields message.provider = self.name # Fire the event hook self.gateway.onReceive(message) # Finish return message
python
{ "resource": "" }
q275802
IProvider._receive_status
test
def _receive_status(self, status): """ Incoming status callback Calls Gateway.onStatus event hook Providers are required to: * Cast phone numbers to digits-only * Use proper MessageStatus subclasses * Populate `status.msgid` and `status.meta` fields * If this method fails with an exception, the provider is required to respond with an error to the service :type status: MessageStatus :param status: The received status :rtype: MessageStatus """ # Populate fields status.provider = self.name # Fire the event hook self.gateway.onStatus(status) # Finish return status
python
{ "resource": "" }
q275803
jsonex_api
test
def jsonex_api(f): """ View wrapper for JsonEx responses. Catches exceptions as well """ @wraps(f) def wrapper(*args, **kwargs): # Call, catch exceptions try: code, res = 200, f(*args, **kwargs) except HTTPException as e: code, res = e.code, {'error': e} except Exception as e: code, res = 500, {'error': e} logger.exception('Method error') # Response response = make_response(jsonex_dumps(res), code) response.headers['Content-Type'] = 'application/json' return response return wrapper
python
{ "resource": "" }
q275804
ForwardServerProvider.forward
test
def forward(self, obj): """ Forward an object to clients. :param obj: The object to be forwarded :type obj: smsframework.data.IncomingMessage|smsframework.data.MessageStatus :raises Exception: if any of the clients failed """ assert isinstance(obj, (IncomingMessage, MessageStatus)), 'Tried to forward an object of an unsupported type: {}'.format(obj) clients = self.choose_clients(obj) if Parallel: pll = Parallel(self._forward_object_to_client) for client in clients: pll(client, obj) results, errors = pll.join() if errors: raise errors[0] else: for client in clients: self._forward_object_to_client(client, obj)
python
{ "resource": "" }
q275805
SafeCreationTx._sign_web3_transaction
test
def _sign_web3_transaction(tx: Dict[str, any], v: int, r: int, s: int) -> (bytes, HexBytes): """ Signed transaction that compatible with `w3.eth.sendRawTransaction` Is not used because `pyEthereum` implementation of Transaction was found to be more robust regarding invalid signatures """ unsigned_transaction = serializable_unsigned_transaction_from_dict(tx) rlp_encoded_transaction = encode_transaction(unsigned_transaction, vrs=(v, r, s)) # To get the address signing, just do ecrecover_to_pub(unsigned_transaction.hash(), v, r, s) return rlp_encoded_transaction, unsigned_transaction.hash()
python
{ "resource": "" }
q275806
SafeService.estimate_tx_gas_with_web3
test
def estimate_tx_gas_with_web3(self, safe_address: str, to: str, value: int, data: bytes) -> int: """ Estimate tx gas using web3 """ return self.ethereum_client.estimate_gas(safe_address, to, value, data, block_identifier='pending')
python
{ "resource": "" }
q275807
SafeService.estimate_tx_gas
test
def estimate_tx_gas(self, safe_address: str, to: str, value: int, data: bytes, operation: int) -> int: """ Estimate tx gas. Use the max of calculation using safe method and web3 if operation == CALL or use just the safe calculation otherwise """ # Costs to route through the proxy and nested calls proxy_gas = 1000 # https://github.com/ethereum/solidity/blob/dfe3193c7382c80f1814247a162663a97c3f5e67/libsolidity/codegen/ExpressionCompiler.cpp#L1764 # This was `false` before solc 0.4.21 -> `m_context.evmVersion().canOverchargeGasForCall()` # So gas needed by caller will be around 35k old_call_gas = 35000 safe_gas_estimation = (self.estimate_tx_gas_with_safe(safe_address, to, value, data, operation) + proxy_gas + old_call_gas) # We cannot estimate DELEGATECALL (different storage) if SafeOperation(operation) == SafeOperation.CALL: try: web3_gas_estimation = (self.estimate_tx_gas_with_web3(safe_address, to, value, data) + proxy_gas + old_call_gas) except ValueError: web3_gas_estimation = 0 return max(safe_gas_estimation, web3_gas_estimation) else: return safe_gas_estimation
python
{ "resource": "" }
q275808
AbstractAsyncWrapper.write
test
async def write(self, towrite: bytes, await_blocking=False): """ Appends towrite to the write queue >>> await test.write(b"HELLO") # Returns without wait time >>> await test.write(b"HELLO", await_blocking = True) # Returns when the bufer is flushed :param towrite: Write buffer :param await_blocking: wait for everything to be written """ await self._write(towrite) # Wait for the output buffer to be flushed if requested if await_blocking: return await self.flush()
python
{ "resource": "" }
q275809
Serial.readline
test
async def readline(self) -> bytes: """ Reads one line >>> # Keeps waiting for a linefeed incase there is none in the buffer >>> await test.readline() :returns: bytes forming a line """ while True: line = self._serial_instance.readline() if not line: await asyncio.sleep(self._asyncio_sleep_time) else: return line
python
{ "resource": "" }
q275810
Connection.send
test
def send(self, message): """Verifies and sends message. :param message: Message instance. :param envelope_from: Email address to be used in MAIL FROM command. """ assert message.send_to, "No recipients have been added" if message.has_bad_headers(self.mail.default_sender): raise BadHeaderError if message.date is None: message.date = time.time() sender = message.sender or self.mail.default_sender if self.host: self.host.sendmail(sanitize_address(sender) if sender is not None else None, message.send_to, message.as_string(self.mail.default_sender), message.mail_options, message.rcpt_options) email_dispatched.send(message, mail=self.mail) self.num_emails += 1 if self.num_emails == self.mail.max_emails: self.num_emails = 0 if self.host: self.host.quit() self.host = self.configure_host()
python
{ "resource": "" }
q275811
Message.as_string
test
def as_string(self, default_from=None): """Creates the email""" encoding = self.charset or 'utf-8' attachments = self.attachments or [] if len(attachments) == 0 and not self.html: # No html content and zero attachments means plain text msg = self._mimetext(self.body) elif len(attachments) > 0 and not self.html: # No html and at least one attachment means multipart msg = MIMEMultipart() msg.attach(self._mimetext(self.body)) else: # Anything else msg = MIMEMultipart() alternative = MIMEMultipart('alternative') alternative.attach(self._mimetext(self.body, 'plain')) alternative.attach(self._mimetext(self.html, 'html')) msg.attach(alternative) if self.charset: msg['Subject'] = Header(self.subject, encoding) else: msg['Subject'] = self.subject sender = self.sender or default_from if sender is not None: msg['From'] = sanitize_address(sender, encoding) msg['To'] = ', '.join(list(set(sanitize_addresses(self.recipients, encoding)))) msg['Date'] = formatdate(self.date, localtime=True) # see RFC 5322 section 3.6.4. msg['Message-ID'] = self.msgId if self.cc: msg['Cc'] = ', '.join(list(set(sanitize_addresses(self.cc, encoding)))) if self.reply_to: msg['Reply-To'] = sanitize_address(self.reply_to, encoding) if self.extra_headers: for k, v in self.extra_headers.items(): msg[k] = v for attachment in attachments: f = MIMEBase(*attachment.content_type.split('/')) f.set_payload(attachment.data) encode_base64(f) try: attachment.filename and attachment.filename.encode('ascii') except UnicodeEncodeError: filename = attachment.filename if not PY3: filename = filename.encode('utf8') f.add_header('Content-Disposition', attachment.disposition, filename=('UTF8', '', filename)) else: f.add_header('Content-Disposition', '%s;filename=%s' % (attachment.disposition, attachment.filename)) for key, value in attachment.headers: f.add_header(key, value) msg.attach(f) return msg.as_string()
python
{ "resource": "" }
q275812
Message.has_bad_headers
test
def has_bad_headers(self, default_from=None): """Checks for bad headers i.e. newlines in subject, sender or recipients. """ sender = self.sender or default_from reply_to = self.reply_to or '' for val in [self.subject, sender, reply_to] + self.recipients: for c in '\r\n': if c in val: return True return False
python
{ "resource": "" }
q275813
Message.attach
test
def attach(self, filename=None, content_type=None, data=None, disposition=None, headers=None): """Adds an attachment to the message. :param filename: filename of attachment :param content_type: file mimetype :param data: the raw file data :param disposition: content-disposition (if any) """ self.attachments.append( Attachment(filename, content_type, data, disposition, headers))
python
{ "resource": "" }
q275814
DataAccessLayer.register_services
test
def register_services(self, **services): """ Register Services that can be accessed by this DAL. Upon registration, the service is set up. :param **services: Keyword arguments where the key is the name to register the Service as and the value is the Service. """ for key, service in services.items(): if key in self._services: raise AlreadyExistsException('A Service for {} is already registered.'.format(key)) self._init_service(key, service) return self
python
{ "resource": "" }
q275815
from_module
test
def from_module(module_name): """ Load a configuration module and return a Config """ d = importlib.import_module(module_name) config = {} for key in dir(d): if key.isupper(): config[key] = getattr(d, key) return Config(config)
python
{ "resource": "" }
q275816
ResourceManager.register_resources
test
def register_resources(self, **resources): """ Register resources with the ResourceManager. """ for key, resource in resources.items(): if key in self._resources: raise AlreadyExistsException('A Service for {} is already registered.'.format(key)) self._init_resource(key, resource)
python
{ "resource": "" }
q275817
Meta.require
test
def require(self, key): """ Raises an exception if value for ``key`` is empty. """ value = self.get(key) if not value: raise ValueError('"{}" is empty.'.format(key)) return value
python
{ "resource": "" }
q275818
DataAccessContext._exit
test
def _exit(self, obj, type, value, traceback): """ Teardown a Resource or Middleware. """ if type is None: # No in-context exception occurred try: obj.next() except StopIteration: # Resource closed as expected return else: raise RuntimeError('{} yielded more than once.'.format(obj)) else: # In-context exception occurred try: obj.throw(type, value, traceback) raise RuntimeError('{} did not close after throw()'.format(obj)) except StopIteration as exc: # Suppress the exception *unless* it's the same exception that # was passed to throw(). This prevents a StopIteration # raised inside the "with" statement from being suppressed return exc is not value except: # only re-raise if it's *not* the exception that was # passed to throw(), because __exit__() must not raise # an exception unless __exit__() itself failed. But # resource.throw() will raise the exception to signal propagation, # so this fixes the impedance mismatch between the throw() protocol # and the __exit__() protocol. # # Middleware or Resources that throw exceptions before yielding # will just rethrow the same exception here which is expected. They # won't have a chance to do anything about the exception though which # seems OK since they never got to the point of being ready anyway. if sys.exc_info()[1] is not value: raise
python
{ "resource": "" }
q275819
Service.setup
test
def setup(self, data_manager): """ Hook to setup this service with a specific DataManager. Will recursively setup sub-services. """ self._data_manager = data_manager if self._data_manager: self._dal = self._data_manager.get_dal() else: self._dal = None for key, service in self._services.items(): service.setup(self._data_manager)
python
{ "resource": "" }
q275820
_Material.ng
test
def ng(self, wavelength): ''' The group index with respect to wavelength. Args: wavelength (float, list, None): The wavelength(s) the group index will be evaluated at. Returns: float, list: The group index at the target wavelength(s). ''' return self.n(wavelength) - (wavelength*1.e-9)*self.nDer1(wavelength)
python
{ "resource": "" }
q275821
_Material._cauchy_equation
test
def _cauchy_equation(wavelength, coefficients): ''' Helpful function to evaluate Cauchy equations. Args: wavelength (float, list, None): The wavelength(s) the Cauchy equation will be evaluated at. coefficients (list): A list of the coefficients of the Cauchy equation. Returns: float, list: The refractive index at the target wavelength(s). ''' n = 0. for i, c in enumerate(coefficients): exponent = 2*i n += c / wavelength**exponent return n
python
{ "resource": "" }
q275822
BackendUpdate.initialize
test
def initialize(self): # pylint: disable=attribute-defined-outside-init """Login on backend with username and password :return: None """ try: logger.info("Authenticating...") self.backend = Backend(self.backend_url) self.backend.login(self.username, self.password) except BackendException as exp: # pragma: no cover, should never happen logger.exception("Exception: %s", exp) logger.error("Response: %s", exp.response) if self.backend.token is None: print("Access denied!") print("~~~~~~~~~~~~~~~~~~~~~~~~~~") print("Exiting with error code: 1") exit(1) logger.info("Authenticated.") # Logged-in user and default realm users = self.backend.get_all('user', {'where': json.dumps({'name': self.username})}) self.logged_in_user = users['_items'][0] self.default_realm = self.logged_in_user['_realm'] # Main realm self.realm_all = None realms = self.backend.get_all('realm') for r in realms['_items']: if r['name'] == 'All' and r['_level'] == 0: self.realm_all = r['_id'] logger.info("Found realm 'All': %s", self.realm_all) if r['_id'] == self.default_realm: logger.info("Found logged-in user realm: %s", r['name']) # Default timeperiods self.tp_always = None self.tp_never = None timeperiods = self.backend.get_all('timeperiod') for tp in timeperiods['_items']: if tp['name'] == '24x7': self.tp_always = tp['_id'] logger.info("Found TP '24x7': %s", self.tp_always) if tp['name'].lower() == 'none' or tp['name'].lower() == 'never': self.tp_never = tp['_id'] logger.info("Found TP 'Never': %s", self.tp_never)
python
{ "resource": "" }
q275823
Backend.login
test
def login(self, username, password, generate='enabled', proxies=None): """ Log into the backend and get the token generate parameter may have following values: - enabled: require current token (default) - force: force new token generation - disabled if login is: - accepted, returns True - refused, returns False In case of any error, raises a BackendException :param username: login name :type username: str :param password: password :type password: str :param generate: Can have these values: enabled | force | disabled :type generate: str :param proxies: dict of proxy (http and / or https) :type proxies: dict :return: return True if authentication is successfull, otherwise False :rtype: bool """ logger.debug("login for: %s with generate: %s", username, generate) if not username or not password: raise BackendException(BACKEND_ERROR, "Missing mandatory parameters") if proxies: for key in proxies.keys(): try: assert key in PROXY_PROTOCOLS except AssertionError: raise BackendException(BACKEND_ERROR, "Wrong proxy protocol ", key) self.proxies = proxies endpoint = 'login' json = {u'username': username, u'password': password} if generate == 'force': json['action'] = 'generate' logger.debug("Asking for generating new token") response = self.get_response(method='POST', endpoint=endpoint, json=json) if response.status_code == 401: logger.error("Backend refused login with params %s", json) self.set_token(token=None) return False resp = self.decode(response=response) if 'token' in resp: self.set_token(token=resp['token']) return True if generate == 'force': # pragma: no cover - need specific backend tests self.set_token(token=None) raise BackendException(BACKEND_ERROR, "Token not provided") if generate == 'disabled': # pragma: no cover - need specific backend tests logger.error("Token disabled ... to be implemented!") return False if generate == 'enabled': # pragma: no cover - need specific backend tests logger.warning("Token enabled, but none provided, require new token generation") return self.login(username, password, 'force') return False
python
{ "resource": "" }
q275824
Backend.get_domains
test
def get_domains(self): """ Connect to alignak backend and retrieve all available child endpoints of root If connection is successful, returns a list of all the resources available in the backend: Each resource is identified with its title and provides its endpoint relative to backend root endpoint.:: [ {u'href': u'loghost', u'title': u'loghost'}, {u'href': u'escalation', u'title': u'escalation'}, ... ] If an error occurs a BackendException is raised. If an exception occurs, it is raised to caller. :return: list of available resources :rtype: list """ resp = self.get('') if "_links" in resp: _links = resp["_links"] if "child" in _links: return _links["child"] return {}
python
{ "resource": "" }
q275825
Backend.get_all
test
def get_all(self, endpoint, params=None): # pylint: disable=too-many-locals """ Get all items in the specified endpoint of alignak backend If an error occurs, a BackendException is raised. If the max_results parameter is not specified in parameters, it is set to BACKEND_PAGINATION_LIMIT (backend maximum value) to limit requests number. This method builds a response that always contains: _items and _status:: { u'_items': [ ... ], u'_status': u'OK' } :param endpoint: endpoint (API URL) relative from root endpoint :type endpoint: str :param params: list of parameters for the backend API :type params: dict :return: dict of properties :rtype: dict """ # Set max results at maximum value supported by the backend to limit requests number if not params: params = {'max_results': BACKEND_PAGINATION_LIMIT} elif params and 'max_results' not in params: params['max_results'] = BACKEND_PAGINATION_LIMIT # Get first page last_page = False items = [] if self.processes == 1: while not last_page: # Get elements ... resp = self.get(endpoint=endpoint, params=params) # Response contains: # _items: # ... # _links: # self, parent, prev, last, next # _meta: # - max_results, total, page if 'next' in resp['_links']: # Go to next page ... params['page'] = int(resp['_meta']['page']) + 1 params['max_results'] = int(resp['_meta']['max_results']) else: last_page = True items.extend(resp['_items']) else: def get_pages(endpoint, params, pages, out_q): """ Function to get pages loaded by multiprocesses :param endpoint: endpoint to get data :type endpoint: string :param params: parameters for get request :type params: dict :param pages: range of pages to get :type pages: list :param out_q: Queue object :type out_q: multiprocessing.Queue :return: None """ multi_items = [] for page in pages: params['page'] = page resp = self.get(endpoint, params) multi_items.extend(resp['_items']) out_q.put(multi_items) # Get first page resp = self.get(endpoint, params) number_pages = int(math.ceil( float(resp['_meta']['total']) / float(resp['_meta']['max_results']))) out_q = multiprocessing.Queue() chunksize = int(math.ceil(number_pages / float(self.processes))) procs = [] for i in range(self.processes): begin = i * chunksize end = begin + chunksize if end > number_pages: end = number_pages begin += 1 end += 1 p = multiprocessing.Process(target=get_pages, args=(endpoint, params, range(begin, end), out_q)) procs.append(p) p.start() # Collect all results into a single result dict. We know how many dicts # with results to expect. for i in range(self.processes): items.extend(out_q.get()) # Wait for all worker processes to finish for p in procs: p.join() return { '_items': items, '_status': 'OK' }
python
{ "resource": "" }
q275826
Backend.patch
test
def patch(self, endpoint, data, headers=None, inception=False): """ Method to update an item The headers must include an If-Match containing the object _etag. headers = {'If-Match': contact_etag} The data dictionary contain the fields that must be modified. If the patching fails because the _etag object do not match with the provided one, a BackendException is raised with code = 412. If inception is True, this method makes e new get request on the endpoint to refresh the _etag and then a new patch is called. If an HTTP 412 error occurs, a BackendException is raised. This exception is: - code: 412 - message: response content - response: backend response All other HTTP error raises a BackendException. If some _issues are provided by the backend, this exception is: - code: HTTP error code - message: response content - response: JSON encoded backend response (including '_issues' dictionary ...) If no _issues are provided and an _error is signaled by the backend, this exception is: - code: backend error code - message: backend error message - response: JSON encoded backend response :param endpoint: endpoint (API URL) :type endpoint: str :param data: properties of item to update :type data: dict :param headers: headers (example: Content-Type). 'If-Match' required :type headers: dict :param inception: if True tries to get the last _etag :type inception: bool :return: dictionary containing patch response from the backend :rtype: dict """ if not headers: raise BackendException(BACKEND_ERROR, "Header If-Match required for patching an object") response = self.get_response(method='PATCH', endpoint=endpoint, json=data, headers=headers) if response.status_code == 200: return self.decode(response=response) if response.status_code == 412: # 412 means Precondition failed, but confirm ... if inception: # update etag and retry to patch resp = self.get(endpoint) headers = {'If-Match': resp['_etag']} return self.patch(endpoint, data=data, headers=headers, inception=False) raise BackendException(response.status_code, response.content) else: # pragma: no cover - should never occur raise BackendException(response.status_code, response.content)
python
{ "resource": "" }
q275827
Backend.delete
test
def delete(self, endpoint, headers): """ Method to delete an item or all items headers['If-Match'] must contain the _etag identifier of the element to delete :param endpoint: endpoint (API URL) :type endpoint: str :param headers: headers (example: Content-Type) :type headers: dict :return: response (deletion information) :rtype: dict """ response = self.get_response(method='DELETE', endpoint=endpoint, headers=headers) logger.debug("delete, response: %s", response) if response.status_code != 204: # pragma: no cover - should not happen ... resp = self.decode(response=response) resp = {"_status": "OK"} return resp
python
{ "resource": "" }
q275828
samefile
test
def samefile(path1, path2): """ Returns True if path1 and path2 refer to the same file. """ # Check if both are on the same volume and have the same file ID info1 = fs.getfileinfo(path1) info2 = fs.getfileinfo(path2) return (info1.dwVolumeSerialNumber == info2.dwVolumeSerialNumber and info1.nFileIndexHigh == info2.nFileIndexHigh and info1.nFileIndexLow == info2.nFileIndexLow)
python
{ "resource": "" }
q275829
create
test
def create(source, link_name): """ Create a junction at link_name pointing to source. """ success = False if not os.path.isdir(source): raise Exception("%s is not a directory" % source) if os.path.exists(link_name): raise Exception("%s: junction link name already exists" % link_name) link_name = os.path.abspath(link_name) os.mkdir(link_name) # Get a handle to the directory hlink = CreateFile(link_name, fs.GENERIC_WRITE, fs.FILE_SHARE_READ | fs.FILE_SHARE_WRITE, None, fs.OPEN_EXISTING, fs.FILE_FLAG_OPEN_REPARSE_POINT | fs.FILE_FLAG_BACKUP_SEMANTICS, None) try: if hlink == fs.INVALID_HANDLE_VALUE: raise WinError() srcvolpath = unparsed_convert(source) (junctioninfo, infolen) = new_junction_reparse_buffer(srcvolpath) dummy = DWORD(0) res = DeviceIoControl( hlink, FSCTL_SET_REPARSE_POINT, byref(junctioninfo), infolen, None, 0, byref(dummy), None) if res == 0: raise WinError() success = True finally: if hlink != fs.INVALID_HANDLE_VALUE: CloseHandle(hlink) if not success: os.rmdir(link_name)
python
{ "resource": "" }
q275830
initialize_logger
test
def initialize_logger(args): """Sets command name and formatting for subsequent calls to logger""" global log_filename log_filename = os.path.join(os.getcwd(), "jacquard.log") if args.log_file: _validate_log_file(args.log_file) log_filename = args.log_file logging.basicConfig(format=_FILE_LOG_FORMAT, level="DEBUG", datefmt=_DATE_FORMAT, filename=log_filename) global _verbose if args.verbose: _verbose = args.verbose start_time = datetime.now().strftime(_DATE_FORMAT) global _logging_dict _logging_dict = {'user': getpass.getuser(), 'host': socket.gethostname(), 'start_time': start_time, 'tool': args.subparser_name}
python
{ "resource": "" }
q275831
_JacquardArgumentParser.error
test
def error(self, message): '''Suppress default exit behavior''' message = self._remessage_invalid_subparser(message) raise utils.UsageError(message)
python
{ "resource": "" }
q275832
Mutect.claim
test
def claim(self, file_readers): """Recognizes and claims MuTect VCFs form the set of all input VCFs. Each defined caller has a chance to evaluate and claim all the incoming files as something that it can process. Args: file_readers: the collection of currently unclaimed files Returns: A tuple of unclaimed readers and MuTectVcfReaders. """ unclaimed_readers = [] vcf_readers = [] for file_reader in file_readers: if self._is_mutect_vcf(file_reader): vcf_reader = vcf.VcfReader(file_reader) vcf_readers.append(_MutectVcfReader(vcf_reader)) else: unclaimed_readers.append(file_reader) return (unclaimed_readers, vcf_readers)
python
{ "resource": "" }
q275833
Mutect._get_new_column_header
test
def _get_new_column_header(self, vcf_reader): """Returns a standardized column header. MuTect sample headers include the name of input alignment, which is nice, but doesn't match up with the sample names reported in Strelka or VarScan. To fix this, we replace with NORMAL and TUMOR using the MuTect metadata command line to replace them correctly.""" mutect_dict = self._build_mutect_dict(vcf_reader.metaheaders) new_header_list = [] required_keys = set([self._NORMAL_SAMPLE_KEY, self._TUMOR_SAMPLE_KEY]) mutect_keys = set(mutect_dict.keys()) if not required_keys.issubset(mutect_keys): raise utils.JQException("Unable to determine normal " "and tumor sample ordering " "based on MuTect metaheader.") for field_name in vcf_reader.column_header.split("\t"): if field_name == mutect_dict[self._NORMAL_SAMPLE_KEY]: field_name = "NORMAL" elif field_name == mutect_dict[self._TUMOR_SAMPLE_KEY]: field_name = "TUMOR" new_header_list.append(field_name) return "\t".join(new_header_list)
python
{ "resource": "" }
q275834
Varscan.claim
test
def claim(self, file_readers): """Recognizes and claims VarScan VCFs form the set of all input VCFs. Each defined caller has a chance to evaluate and claim all the incoming files as something that it can process. Since VarScan can claim high-confidence files as well, this process is significantly more complex than for other callers. Args: file_readers: the collection of currently unclaimed files Returns: A tuple of unclaimed readers and VarScanVcfReaders. """ (prefix_to_readers, filter_files, unclaimed_set) = self._find_varscan_files(file_readers) prefix_by_patients = self._split_prefix_by_patient(prefix_to_readers) self._validate_vcf_readers(prefix_by_patients) vcf_hc_pairs = self._pair_files(prefix_to_readers, filter_files) self._validate_vcf_hc_pairs(vcf_hc_pairs) vcf_readers = self._create_vcf_readers(vcf_hc_pairs) return list(unclaimed_set), vcf_readers
python
{ "resource": "" }
q275835
_ZScoreTag._init_population_stats
test
def _init_population_stats(self, vcf_reader, dependent_tag_id): '''Derive mean and stdev. Adapted from online variance algorithm from Knuth, The Art of Computer Programming, volume 2 Returns: mean and stdev when len(values) > 1, otherwise (None, None) Values rounded to _MAX_PRECISION to ameliorate discrepancies between python versions.''' #pylint: disable=invalid-name n = 0 mean = 0 M2 = 0 try: vcf_reader.open() for vcf_record in vcf_reader.vcf_records(): for tag_values in vcf_record.sample_tag_values.values(): value = self._get_dependent_value(tag_values, dependent_tag_id) if value is not None: n += 1 delta = value - mean mean += delta / n M2 += delta * (value - mean) finally: vcf_reader.close() mean = round(mean, self._MAX_PRECISION) stdev = 0 if n == 0: mean = None stdev = None elif n >= 2: variance = M2/n stdev = round(math.sqrt(variance), self._MAX_PRECISION) return mean, stdev
python
{ "resource": "" }
q275836
VariantCallerFactory.claim
test
def claim(self, unclaimed_file_readers): """Allows each caller to claim incoming files as they are recognized. Args: unclaimed_file_readers: Usually, all files in the input dir. Returns: A tuple of unclaimed file readers and claimed VcfReaders. The presence of any unclaimed file readers could indicate stray files in the input dir. """ claimed_vcf_readers = [] for caller in self._callers: (unclaimed_file_readers, translated_vcf_readers) = caller.claim(unclaimed_file_readers) claimed_vcf_readers.extend(translated_vcf_readers) return unclaimed_file_readers, claimed_vcf_readers
python
{ "resource": "" }
q275837
Tailer.splitlines
test
def splitlines(self, data): """ Split data into lines where lines are separated by LINE_TERMINATORS. :param data: Any chunk of binary data. :return: List of lines without any characters at LINE_TERMINATORS. """ return re.split(b'|'.join(self.LINE_TERMINATORS), data)
python
{ "resource": "" }
q275838
Tailer.prefix_line_terminator
test
def prefix_line_terminator(self, data): """ Return line terminator data begins with or None. """ for t in self.LINE_TERMINATORS: if data.startswith(t): return t return None
python
{ "resource": "" }
q275839
Tailer.suffix_line_terminator
test
def suffix_line_terminator(self, data): """ Return line terminator data ends with or None. """ for t in self.LINE_TERMINATORS: if data.endswith(t): return t return None
python
{ "resource": "" }
q275840
Tailer.seek_next_line
test
def seek_next_line(self): """ Seek next line relative to the current file position. :return: Position of the line or -1 if next line was not found. """ where = self.file.tell() offset = 0 while True: data_len, data = self.read(self.read_size) data_where = 0 if not data_len: break # Consider the following example: Foo\r | \nBar where " | " denotes current position, # 'Foo\r' is the read part and '\nBar' is the remaining part. # We should completely consume terminator "\r\n" by reading one extra byte. if b'\r\n' in self.LINE_TERMINATORS and data[-1] == b'\r'[0]: terminator_where = self.file.tell() terminator_len, terminator_data = self.read(1) if terminator_len and terminator_data[0] == b'\n'[0]: data_len += 1 data += b'\n' else: self.file.seek(terminator_where) while data_where < data_len: terminator = self.prefix_line_terminator(data[data_where:]) if terminator: self.file.seek(where + offset + data_where + len(terminator)) return self.file.tell() else: data_where += 1 offset += data_len self.file.seek(where + offset) return -1
python
{ "resource": "" }
q275841
Tailer.seek_previous_line
test
def seek_previous_line(self): """ Seek previous line relative to the current file position. :return: Position of the line or -1 if previous line was not found. """ where = self.file.tell() offset = 0 while True: if offset == where: break read_size = self.read_size if self.read_size <= where else where self.file.seek(where - offset - read_size, SEEK_SET) data_len, data = self.read(read_size) # Consider the following example: Foo\r | \nBar where " | " denotes current position, # '\nBar' is the read part and 'Foo\r' is the remaining part. # We should completely consume terminator "\r\n" by reading one extra byte. if b'\r\n' in self.LINE_TERMINATORS and data[0] == b'\n'[0]: terminator_where = self.file.tell() if terminator_where > data_len + 1: self.file.seek(where - offset - data_len - 1, SEEK_SET) terminator_len, terminator_data = self.read(1) if terminator_data[0] == b'\r'[0]: data_len += 1 data = b'\r' + data self.file.seek(terminator_where) data_where = data_len while data_where > 0: terminator = self.suffix_line_terminator(data[:data_where]) if terminator and offset == 0 and data_where == data_len: # The last character is a line terminator that finishes current line. Ignore it. data_where -= len(terminator) elif terminator: self.file.seek(where - offset - (data_len - data_where)) return self.file.tell() else: data_where -= 1 offset += data_len if where == 0: # Nothing more to read. return -1 else: # Very first line. self.file.seek(0) return 0
python
{ "resource": "" }
q275842
Tailer.tail
test
def tail(self, lines=10): """ Return the last lines of the file. """ self.file.seek(0, SEEK_END) for i in range(lines): if self.seek_previous_line() == -1: break data = self.file.read() for t in self.LINE_TERMINATORS: if data.endswith(t): # Only terminators _between_ lines should be preserved. # Otherwise terminator of the last line will be treated as separtaing line and empty line. data = data[:-len(t)] break if data: return self.splitlines(data) else: return []
python
{ "resource": "" }
q275843
Tailer.head
test
def head(self, lines=10): """ Return the top lines of the file. """ self.file.seek(0) for i in range(lines): if self.seek_next_line() == -1: break end_pos = self.file.tell() self.file.seek(0) data = self.file.read(end_pos) for t in self.LINE_TERMINATORS: if data.endswith(t): # Only terminators _between_ lines should be preserved. # Otherwise terminator of the last line will be treated as separtaing line and empty line. data = data[:-len(t)] break if data: return self.splitlines(data) else: return []
python
{ "resource": "" }
q275844
Tailer.follow
test
def follow(self): """ Iterator generator that returns lines as data is added to the file. None will be yielded if no new line is available. Caller may either wait and re-try or end iteration. """ trailing = True while True: where = self.file.tell() if where > os.fstat(self.file.fileno()).st_size: # File was truncated. where = 0 self.file.seek(where) line = self.file.readline() if line: if trailing and line in self.LINE_TERMINATORS: # This is just the line terminator added to the end of the file # before a new line, ignore. trailing = False continue terminator = self.suffix_line_terminator(line) if terminator: line = line[:-len(terminator)] trailing = False yield line else: trailing = True self.file.seek(where) yield None
python
{ "resource": "" }
q275845
Strelka.claim
test
def claim(self, file_readers): """Recognizes and claims Strelka VCFs form the set of all input VCFs. Each defined caller has a chance to evaluate and claim all the incoming files as something that it can process. Args: file_readers: the collection of currently unclaimed files Returns: A tuple of unclaimed readers and StrelkaVcfReaders. """ (prefix_to_reader, unclaimed_readers) = self._find_strelka_files(file_readers) prefix_by_patients = self._split_prefix_by_patient(prefix_to_reader) self._validate_vcf_readers(prefix_by_patients) vcf_readers = self._create_vcf_readers(prefix_to_reader) return (unclaimed_readers, vcf_readers)
python
{ "resource": "" }
q275846
VcfRecord.parse_record
test
def parse_record(cls, vcf_line, sample_names): """Alternative constructor that parses VcfRecord from VCF string. Aspire to parse/represent the data such that it could be reliably round-tripped. (This nicety means INFO fields and FORMAT tags should be treated as ordered to avoid shuffling.) Args: vcf_line: the VCF variant record as a string; tab separated fields, trailing newlines are ignored. Must have at least 8 fixed fields (through INFO) sample_names: a list of sample name strings; these should match the VCF header column Returns: A mutable VcfRecord. """ vcf_fields = vcf_line.rstrip("\r\n").split("\t") chrom, pos, rid, ref, alt, qual, rfilter, info \ = vcf_fields[0:8] sample_fields = [] sample_tag_values = {} if len(vcf_fields) > 9: rformat = vcf_fields[8] sample_fields = vcf_fields[9:] sample_tag_values = VcfRecord._sample_tag_values(sample_names, rformat, sample_fields) return VcfRecord(chrom, pos, ref, alt, rid, qual, rfilter, info, sample_tag_values)
python
{ "resource": "" }
q275847
VcfRecord._sample_tag_values
test
def _sample_tag_values(cls, sample_names, rformat, sample_fields): """Creates a sample dict of tag-value dicts for a single variant record. Args: sample_names: list of sample name strings. rformat: record format string (from VCF record). sample_fields: list of strings where each string is the ';' seperated format values for an individual sample. Returns: An dict of samples, where each key is a sample and each value is an dict of format-values. See attribute below for example. Will return '.' if no values for sampe field. """ sample_tag_values = OrderedDict() tag_names = VcfRecord._format_list(rformat) for i, sample_field in enumerate(sample_fields): tag_values = sample_field.split(":") if sample_field else "." sample_tag_values[sample_names[i]] = OrderedDict(zip(tag_names, tag_values)) return sample_tag_values
python
{ "resource": "" }
q275848
VcfRecord.format_tags
test
def format_tags(self): """Returns set of format tags.""" tags = VcfRecord._EMPTY_SET if self.sample_tag_values: first_sample = list(self.sample_tag_values.keys())[0] tags = set(self.sample_tag_values[first_sample].keys()) return tags
python
{ "resource": "" }
q275849
VcfRecord._join_info_fields
test
def _join_info_fields(self): """Updates info attribute from info dict.""" if self.info_dict: info_fields = [] if len(self.info_dict) > 1: self.info_dict.pop(".", None) for field, value in self.info_dict.items(): if field == value: info_fields.append(value) else: info_fields.append("=".join([field, value])) self.info = ";".join(info_fields) else: self.info = "."
python
{ "resource": "" }
q275850
VcfRecord._format_field
test
def _format_field(self): """Returns string representation of format field.""" format_field = "." if self.sample_tag_values: first_sample = list(self.sample_tag_values.keys())[0] tag_names = self.sample_tag_values[first_sample].keys() if tag_names: format_field = ":".join(tag_names) return format_field
python
{ "resource": "" }
q275851
VcfRecord._sample_field
test
def _sample_field(self, sample): """Returns string representation of sample-format values. Raises: KeyError: if requested sample is not defined. """ tag_values = self.sample_tag_values[sample].values() if tag_values: return ":".join(tag_values) else: return "."
python
{ "resource": "" }
q275852
VcfRecord.text
test
def text(self): "Returns tab-delimited, newline terminated string of VcfRecord." stringifier = [self.chrom, self.pos, self.vcf_id, self.ref, self.alt, self.qual, self.filter, self.info, self._format_field()] for sample in self.sample_tag_values: stringifier.append(self._sample_field(sample)) return "\t".join(stringifier) + "\n"
python
{ "resource": "" }
q275853
VcfRecord.add_sample_tag_value
test
def add_sample_tag_value(self, tag_name, new_sample_values): """Appends a new format tag-value for all samples. Args: tag_name: string tag name; must not already exist new_sample Raises: KeyError: if tag_name to be added already exists """ if tag_name in self.format_tags: msg = "New format value [{}] already exists.".format(tag_name) raise KeyError(msg) if not self._samples_match(new_sample_values): raise KeyError("Sample name values must match " "existing sample names") for sample in self.sample_tag_values.keys(): value = str(new_sample_values[sample]) self.sample_tag_values[sample][tag_name] = value
python
{ "resource": "" }
q275854
VcfRecord.add_or_replace_filter
test
def add_or_replace_filter(self, new_filter): """Replaces null or blank filter or adds filter to existing list.""" if self.filter.lower() in self._FILTERS_TO_REPLACE: self.filter = new_filter elif new_filter not in self.filter.split(";"): self.filter = ";".join([self.filter, new_filter])
python
{ "resource": "" }
q275855
CategoryController.available_categories
test
def available_categories(cls, user, products=AllProducts): ''' Returns the categories available to the user. Specify `products` if you want to restrict to just the categories that hold the specified products, otherwise it'll do all. ''' # STOPGAP -- this needs to be elsewhere tbqh from .product import ProductController if products is AllProducts: products = inventory.Product.objects.all().select_related( "category", ) available = ProductController.available_products( user, products=products, ) return sorted(set(i.category for i in available), key=attrgetter("order"))
python
{ "resource": "" }
q275856
ProductsForm
test
def ProductsForm(category, products): ''' Produces an appropriate _ProductsForm subclass for the given render type. ''' # Each Category.RENDER_TYPE value has a subclass here. cat = inventory.Category RENDER_TYPES = { cat.RENDER_TYPE_QUANTITY: _QuantityBoxProductsForm, cat.RENDER_TYPE_RADIO: _RadioButtonProductsForm, cat.RENDER_TYPE_ITEM_QUANTITY: _ItemQuantityProductsForm, cat.RENDER_TYPE_CHECKBOX: _CheckboxProductsForm, } # Produce a subclass of _ProductsForm which we can alter the base_fields on class ProductsForm(RENDER_TYPES[category.render_type]): pass products = list(products) products.sort(key=lambda prod: prod.order) ProductsForm.set_fields(category, products) if category.render_type == inventory.Category.RENDER_TYPE_ITEM_QUANTITY: ProductsForm = forms.formset_factory( ProductsForm, formset=_ItemQuantityProductsFormSet, ) return ProductsForm
python
{ "resource": "" }
q275857
staff_products_form_factory
test
def staff_products_form_factory(user): ''' Creates a StaffProductsForm that restricts the available products to those that are available to a user. ''' products = inventory.Product.objects.all() products = ProductController.available_products(user, products=products) product_ids = [product.id for product in products] product_set = inventory.Product.objects.filter(id__in=product_ids) class StaffProductsForm(forms.Form): ''' Form for allowing staff to add an item to a user's cart. ''' product = forms.ModelChoiceField( widget=forms.Select, queryset=product_set, ) quantity = forms.IntegerField( min_value=0, ) return StaffProductsForm
python
{ "resource": "" }
q275858
_HasProductsFields.add_product_error
test
def add_product_error(self, product, error): ''' Adds an error to the given product's field ''' ''' if product in field_names: field = field_names[product] elif isinstance(product, inventory.Product): return else: field = None ''' self.add_error(self.field_name(product), error)
python
{ "resource": "" }
q275859
BatchController.memoise
test
def memoise(cls, func): ''' Decorator that stores the result of the stored function in the user's results cache until the batch completes. Keyword arguments are not yet supported. Arguments: func (callable(*a)): The function whose results we want to store. The positional arguments, ``a``, are used as cache keys. Returns: callable(*a): The memosing version of ``func``. ''' @functools.wraps(func) def f(*a): for arg in a: if isinstance(arg, User): user = arg break else: raise ValueError("One position argument must be a User") func_key = (func, tuple(a)) cache = cls.get_cache(user) if func_key not in cache: cache[func_key] = func(*a) return cache[func_key] return f
python
{ "resource": "" }
q275860
model_fields_form_factory
test
def model_fields_form_factory(model): ''' Creates a form for specifying fields from a model to display. ''' fields = model._meta.get_fields() choices = [] for field in fields: if hasattr(field, "verbose_name"): choices.append((field.name, field.verbose_name)) class ModelFieldsForm(forms.Form): fields = forms.MultipleChoiceField( choices=choices, required=False, ) return ModelFieldsForm
python
{ "resource": "" }
q275861
ItemController.items_pending_or_purchased
test
def items_pending_or_purchased(self): ''' Returns the items that this user has purchased or has pending. ''' status = [commerce.Cart.STATUS_PAID, commerce.Cart.STATUS_ACTIVE] return self._items(status)
python
{ "resource": "" }
q275862
Sender.send_email
test
def send_email(self, to, kind, **kwargs): ''' Sends an e-mail to the given address. to: The address kind: the ID for an e-mail kind; it should point to a subdirectory of self.template_prefix containing subject.txt and message.html, which are django templates for the subject and HTML message respectively. context: a context for rendering the e-mail. ''' return __send_email__(self.template_prefix, to, kind, **kwargs)
python
{ "resource": "" }
q275863
iter_osm_stream
test
def iter_osm_stream(start_sqn=None, base_url='https://planet.openstreetmap.org/replication/minute', expected_interval=60, parse_timestamps=True, state_dir=None): """Start processing an OSM diff stream and yield one changeset at a time to the caller.""" # If the user specifies a state_dir, read the state from the statefile there if state_dir: if not os.path.exists(state_dir): raise Exception('Specified state_dir "%s" doesn\'t exist.' % state_dir) if os.path.exists('%s/state.txt' % state_dir): with open('%s/state.txt' % state_dir) as f: state = readState(f) start_sqn = state['sequenceNumber'] # If no start_sqn, assume to start from the most recent diff if not start_sqn: u = urllib2.urlopen('%s/state.txt' % base_url) state = readState(u) else: sqnStr = str(start_sqn).zfill(9) u = urllib2.urlopen('%s/%s/%s/%s.state.txt' % (base_url, sqnStr[0:3], sqnStr[3:6], sqnStr[6:9])) state = readState(u) interval_fudge = 0.0 while True: sqnStr = state['sequenceNumber'].zfill(9) url = '%s/%s/%s/%s.osc.gz' % (base_url, sqnStr[0:3], sqnStr[3:6], sqnStr[6:9]) content = urllib2.urlopen(url) content = StringIO.StringIO(content.read()) gzipper = gzip.GzipFile(fileobj=content) for a in iter_osm_change_file(gzipper, parse_timestamps): yield a # After parsing the OSC, check to see how much time is remaining stateTs = datetime.datetime.strptime(state['timestamp'], "%Y-%m-%dT%H:%M:%SZ") yield (None, model.Finished(state['sequenceNumber'], stateTs)) nextTs = stateTs + datetime.timedelta(seconds=expected_interval + interval_fudge) if datetime.datetime.utcnow() < nextTs: timeToSleep = (nextTs - datetime.datetime.utcnow()).total_seconds() else: timeToSleep = 0.0 time.sleep(timeToSleep) # Then try to fetch the next state file sqnStr = str(int(state['sequenceNumber']) + 1).zfill(9) url = '%s/%s/%s/%s.state.txt' % (base_url, sqnStr[0:3], sqnStr[3:6], sqnStr[6:9]) delay = 1.0 while True: try: u = urllib2.urlopen(url) interval_fudge -= (interval_fudge / 2.0) break except urllib2.HTTPError as e: if e.code == 404: time.sleep(delay) delay = min(delay * 2, 13) interval_fudge += delay if state_dir: with open('%s/state.txt' % state_dir, 'w') as f: f.write(u.read()) with open('%s/state.txt' % state_dir, 'r') as f: state = readState(f) else: state = readState(u)
python
{ "resource": "" }
q275864
parse_osm_file
test
def parse_osm_file(f, parse_timestamps=True): """Parse a file-like containing OSM XML into memory and return an object with the nodes, ways, and relations it contains. """ nodes = [] ways = [] relations = [] for p in iter_osm_file(f, parse_timestamps): if type(p) == model.Node: nodes.append(p) elif type(p) == model.Way: ways.append(p) elif type(p) == model.Relation: relations.append(p) return (nodes, ways, relations)
python
{ "resource": "" }
q275865
iter_osm_notes
test
def iter_osm_notes(feed_limit=25, interval=60, parse_timestamps=True): """ Parses the global OSM Notes feed and yields as much Note information as possible. """ last_seen_guid = None while True: u = urllib2.urlopen('https://www.openstreetmap.org/api/0.6/notes/feed?limit=%d' % feed_limit) tree = etree.parse(u) new_notes = [] for note_item in tree.xpath('/rss/channel/item'): title = note_item.xpath('title')[0].text if title.startswith('new note ('): action = 'create' elif title.startswith('new comment ('): action = 'comment' elif title.startswith('closed note ('): action = 'close' # Note that (at least for now) the link and guid are the same in the feed. guid = note_item.xpath('link')[0].text if last_seen_guid == guid: break elif last_seen_guid == None: # The first time through we want the first item to be the "last seen" # because the RSS feed is newest-to-oldest last_seen_guid = guid else: note_id = int(guid.split('/')[-1].split('#c')[0]) new_notes.append((action, get_note(note_id, parse_timestamps))) # We yield the reversed list because we want to yield in change order # (i.e. "oldest to most current") for note in reversed(new_notes): yield note yield model.Finished(None, None) time.sleep(interval)
python
{ "resource": "" }
q275866
ConditionController.passes_filter
test
def passes_filter(self, user): ''' Returns true if the condition passes the filter ''' cls = type(self.condition) qs = cls.objects.filter(pk=self.condition.id) return self.condition in self.pre_filter(qs, user)
python
{ "resource": "" }
q275867
IsMetByFilter.is_met
test
def is_met(self, user, filtered=False): ''' Returns True if this flag condition is met, otherwise returns False. It determines if the condition is met by calling pre_filter with a queryset containing only self.condition. ''' if filtered: return True # Why query again? return self.passes_filter(user)
python
{ "resource": "" }
q275868
RemainderSetByFilter.user_quantity_remaining
test
def user_quantity_remaining(self, user, filtered=True): ''' returns 0 if the date range is violated, otherwise, it will return the quantity remaining under the stock limit. The filter for this condition must add an annotation called "remainder" in order for this to work. ''' if filtered: if hasattr(self.condition, "remainder"): return self.condition.remainder # Mark self.condition with a remainder qs = type(self.condition).objects.filter(pk=self.condition.id) qs = self.pre_filter(qs, user) if len(qs) > 0: return qs[0].remainder else: return 0
python
{ "resource": "" }
q275869
CategoryConditionController.pre_filter
test
def pre_filter(self, queryset, user): ''' Returns all of the items from queryset where the user has a product from a category invoking that item's condition in one of their carts. ''' in_user_carts = Q( enabling_category__product__productitem__cart__user=user ) released = commerce.Cart.STATUS_RELEASED in_released_carts = Q( enabling_category__product__productitem__cart__status=released ) queryset = queryset.filter(in_user_carts) queryset = queryset.exclude(in_released_carts) return queryset
python
{ "resource": "" }
q275870
ProductConditionController.pre_filter
test
def pre_filter(self, queryset, user): ''' Returns all of the items from queryset where the user has a product invoking that item's condition in one of their carts. ''' in_user_carts = Q(enabling_products__productitem__cart__user=user) released = commerce.Cart.STATUS_RELEASED paid = commerce.Cart.STATUS_PAID active = commerce.Cart.STATUS_ACTIVE in_released_carts = Q( enabling_products__productitem__cart__status=released ) not_in_paid_or_active_carts = ~( Q(enabling_products__productitem__cart__status=paid) | Q(enabling_products__productitem__cart__status=active) ) queryset = queryset.filter(in_user_carts) queryset = queryset.exclude( in_released_carts & not_in_paid_or_active_carts ) return queryset
python
{ "resource": "" }
q275871
TimeOrStockLimitConditionController.pre_filter
test
def pre_filter(self, queryset, user): ''' Returns all of the items from queryset where the date falls into any specified range, but not yet where the stock limit is not yet reached.''' now = timezone.now() # Keep items with no start time, or start time not yet met. queryset = queryset.filter(Q(start_time=None) | Q(start_time__lte=now)) queryset = queryset.filter(Q(end_time=None) | Q(end_time__gte=now)) # Filter out items that have been reserved beyond the limits quantity_or_zero = self._calculate_quantities(user) remainder = Case( When(limit=None, then=Value(_BIG_QUANTITY)), default=F("limit") - Sum(quantity_or_zero), ) queryset = queryset.annotate(remainder=remainder) queryset = queryset.filter(remainder__gt=0) return queryset
python
{ "resource": "" }
q275872
SpeakerConditionController.pre_filter
test
def pre_filter(self, queryset, user): ''' Returns all of the items from queryset which are enabled by a user being a presenter or copresenter of a non-cancelled proposal. ''' # Filter out cancelled proposals queryset = queryset.filter( proposal_kind__proposalbase__presentation__cancelled=False ) u = user # User is a presenter user_is_presenter = Q( is_presenter=True, proposal_kind__proposalbase__presentation__speaker__user=u, ) # User is a copresenter user_is_copresenter = Q( is_copresenter=True, proposal_kind__proposalbase__presentation__additional_speakers__user=u, # NOQA ) return queryset.filter(user_is_presenter | user_is_copresenter)
python
{ "resource": "" }
q275873
GroupMemberConditionController.pre_filter
test
def pre_filter(self, conditions, user): ''' Returns all of the items from conditions which are enabled by a user being member of a Django Auth Group. ''' return conditions.filter(group__in=user.groups.all())
python
{ "resource": "" }
q275874
_modifies_cart
test
def _modifies_cart(func): ''' Decorator that makes the wrapped function raise ValidationError if we're doing something that could modify the cart. It also wraps the execution of this function in a database transaction, and marks the boundaries of a cart operations batch. ''' @functools.wraps(func) def inner(self, *a, **k): self._fail_if_cart_is_not_active() with transaction.atomic(): with BatchController.batch(self.cart.user): # Mark the version of self in the batch cache as modified memoised = self.for_user(self.cart.user) memoised._modified_by_batch = True return func(self, *a, **k) return inner
python
{ "resource": "" }
q275875
CartController.for_user
test
def for_user(cls, user): ''' Returns the user's current cart, or creates a new cart if there isn't one ready yet. ''' try: existing = commerce.Cart.objects.get( user=user, status=commerce.Cart.STATUS_ACTIVE, ) except ObjectDoesNotExist: existing = commerce.Cart.objects.create( user=user, time_last_updated=timezone.now(), reservation_duration=datetime.timedelta(), ) return cls(existing)
python
{ "resource": "" }
q275876
CartController._autoextend_reservation
test
def _autoextend_reservation(self): ''' Updates the cart's time last updated value, which is used to determine whether the cart has reserved the items and discounts it holds. ''' time = timezone.now() # Calculate the residual of the _old_ reservation duration # if it's greater than what's in the cart now, keep it. time_elapsed_since_updated = (time - self.cart.time_last_updated) residual = self.cart.reservation_duration - time_elapsed_since_updated reservations = [datetime.timedelta(0), residual] # If we have vouchers, we're entitled to an hour at minimum. if len(self.cart.vouchers.all()) >= 1: reservations.append(inventory.Voucher.RESERVATION_DURATION) # Else, it's the maximum of the included products items = commerce.ProductItem.objects.filter(cart=self.cart) agg = items.aggregate(Max("product__reservation_duration")) product_max = agg["product__reservation_duration__max"] if product_max is not None: reservations.append(product_max) self.cart.time_last_updated = time self.cart.reservation_duration = max(reservations)
python
{ "resource": "" }
q275877
CartController.apply_voucher
test
def apply_voucher(self, voucher_code): ''' Applies the voucher with the given code to this cart. ''' # Try and find the voucher voucher = inventory.Voucher.objects.get(code=voucher_code.upper()) # Re-applying vouchers should be idempotent if voucher in self.cart.vouchers.all(): return self._test_voucher(voucher) # If successful... self.cart.vouchers.add(voucher)
python
{ "resource": "" }
q275878
CartController.validate_cart
test
def validate_cart(self): ''' Determines whether the status of the current cart is valid; this is normally called before generating or paying an invoice ''' cart = self.cart user = self.cart.user errors = [] try: self._test_vouchers(self.cart.vouchers.all()) except ValidationError as ve: errors.append(ve) items = commerce.ProductItem.objects.filter(cart=cart) items = items.select_related("product", "product__category") product_quantities = list((i.product, i.quantity) for i in items) try: self._test_limits(product_quantities) except ValidationError as ve: self._append_errors(errors, ve) try: self._test_required_categories() except ValidationError as ve: self._append_errors(errors, ve) # Validate the discounts # TODO: refactor in terms of available_discounts # why aren't we doing that here?! # def available_discounts(cls, user, categories, products): products = [i.product for i in items] discounts_with_quantity = DiscountController.available_discounts( user, [], products, ) discounts = set(i.discount.id for i in discounts_with_quantity) discount_items = commerce.DiscountItem.objects.filter(cart=cart) for discount_item in discount_items: discount = discount_item.discount if discount.id not in discounts: errors.append( ValidationError("Discounts are no longer available") ) if errors: raise ValidationError(errors)
python
{ "resource": "" }
q275879
CartController.fix_simple_errors
test
def fix_simple_errors(self): ''' This attempts to fix the easy errors raised by ValidationError. This includes removing items from the cart that are no longer available, recalculating all of the discounts, and removing voucher codes that are no longer available. ''' # Fix vouchers first (this affects available discounts) to_remove = [] for voucher in self.cart.vouchers.all(): try: self._test_voucher(voucher) except ValidationError: to_remove.append(voucher) for voucher in to_remove: self.cart.vouchers.remove(voucher) # Fix products and discounts items = commerce.ProductItem.objects.filter(cart=self.cart) items = items.select_related("product") products = set(i.product for i in items) available = set(ProductController.available_products( self.cart.user, products=products, )) not_available = products - available zeros = [(product, 0) for product in not_available] self.set_quantities(zeros)
python
{ "resource": "" }
q275880
CartController._recalculate_discounts
test
def _recalculate_discounts(self): ''' Calculates all of the discounts available for this product.''' # Delete the existing entries. commerce.DiscountItem.objects.filter(cart=self.cart).delete() # Order the products such that the most expensive ones are # processed first. product_items = self.cart.productitem_set.all().select_related( "product", "product__category" ).order_by("-product__price") products = [i.product for i in product_items] discounts = DiscountController.available_discounts( self.cart.user, [], products, ) # The highest-value discounts will apply to the highest-value # products first, because of the order_by clause for item in product_items: self._add_discount(item.product, item.quantity, discounts)
python
{ "resource": "" }
q275881
CartController._add_discount
test
def _add_discount(self, product, quantity, discounts): ''' Applies the best discounts on the given product, from the given discounts.''' def matches(discount): ''' Returns True if and only if the given discount apples to our product. ''' if isinstance(discount.clause, conditions.DiscountForCategory): return discount.clause.category == product.category else: return discount.clause.product == product def value(discount): ''' Returns the value of this discount clause as applied to this product ''' if discount.clause.percentage is not None: return discount.clause.percentage * product.price else: return discount.clause.price discounts = [i for i in discounts if matches(i)] discounts.sort(key=value) for candidate in reversed(discounts): if quantity == 0: break elif candidate.quantity == 0: # This discount clause has been exhausted by this cart continue # Get a provisional instance for this DiscountItem # with the quantity set to as much as we have in the cart discount_item = commerce.DiscountItem.objects.create( product=product, cart=self.cart, discount=candidate.discount, quantity=quantity, ) # Truncate the quantity for this DiscountItem if we exceed quantity ours = discount_item.quantity allowed = candidate.quantity if ours > allowed: discount_item.quantity = allowed discount_item.save() # Update the remaining quantity. quantity = ours - allowed else: quantity = 0 candidate.quantity -= discount_item.quantity
python
{ "resource": "" }
q275882
report_view
test
def report_view(title, form_type=None): ''' Decorator that converts a report view function into something that displays a Report. Arguments: title (str): The title of the report. form_type (Optional[forms.Form]): A form class that can make this report display things. If not supplied, no form will be displayed. ''' # Create & return view def _report(view): report_view = ReportView(view, title, form_type) report_view = user_passes_test(views._staff_only)(report_view) report_view = wraps(view)(report_view) # Add this report to the list of reports. _all_report_views.append(report_view) return report_view return _report
python
{ "resource": "" }
q275883
ListReport.rows
test
def rows(self, content_type): ''' Returns the data rows for the table. ''' for row in self._data: yield [ self.cell_text(content_type, i, cell) for i, cell in enumerate(row) ]
python
{ "resource": "" }
q275884
ReportView.get_form
test
def get_form(self, request): ''' Creates an instance of self.form_type using request.GET ''' # Create a form instance if self.form_type is not None: form = self.form_type(request.GET) # Pre-validate it form.is_valid() else: form = None return form
python
{ "resource": "" }
q275885
ReportView.render
test
def render(self, data): ''' Renders the reports based on data.content_type's value. Arguments: data (ReportViewRequestData): The report data. data.content_type is used to determine how the reports are rendered. Returns: HTTPResponse: The rendered version of the report. ''' renderers = { "text/csv": self._render_as_csv, "text/html": self._render_as_html, None: self._render_as_html, } render = renderers[data.content_type] return render(data)
python
{ "resource": "" }
q275886
reports_list
test
def reports_list(request): ''' Lists all of the reports currently available. ''' reports = [] for report in get_all_reports(): reports.append({ "name": report.__name__, "url": reverse(report), "description": report.__doc__, }) reports.sort(key=lambda report: report["name"]) ctx = { "reports": reports, } return render(request, "registrasion/reports_list.html", ctx)
python
{ "resource": "" }
q275887
items_sold
test
def items_sold(): ''' Summarises the items sold and discounts granted for a given set of products, or products from categories. ''' data = None headings = None line_items = commerce.LineItem.objects.filter( invoice__status=commerce.Invoice.STATUS_PAID, ).select_related("invoice") line_items = line_items.order_by( # sqlite requires an order_by for .values() to work "-price", "description", ).values( "price", "description", ).annotate( total_quantity=Sum("quantity"), ) headings = ["Description", "Quantity", "Price", "Total"] data = [] total_income = 0 for line in line_items: cost = line["total_quantity"] * line["price"] data.append([ line["description"], line["total_quantity"], line["price"], cost, ]) total_income += cost data.append([ "(TOTAL)", "--", "--", total_income, ]) return ListReport("Items sold", headings, data)
python
{ "resource": "" }
q275888
sales_payment_summary
test
def sales_payment_summary(): ''' Summarises paid items and payments. ''' def value_or_zero(aggregate, key): return aggregate[key] or 0 def sum_amount(payment_set): a = payment_set.values("amount").aggregate(total=Sum("amount")) return value_or_zero(a, "total") headings = ["Category", "Total"] data = [] # Summarise all sales made (= income.) sales = commerce.LineItem.objects.filter( invoice__status=commerce.Invoice.STATUS_PAID, ).values( "price", "quantity" ).aggregate( total=Sum(F("price") * F("quantity"), output_field=CURRENCY()), ) sales = value_or_zero(sales, "total") all_payments = sum_amount(commerce.PaymentBase.objects.all()) # Manual payments # Credit notes generated (total) # Payments made by credit note # Claimed credit notes all_credit_notes = 0 - sum_amount(commerce.CreditNote.objects.all()) unclaimed_credit_notes = 0 - sum_amount(commerce.CreditNote.unclaimed()) claimed_credit_notes = sum_amount( commerce.CreditNoteApplication.objects.all() ) refunded_credit_notes = 0 - sum_amount(commerce.CreditNote.refunded()) data.append(["Items on paid invoices", sales]) data.append(["All payments", all_payments]) data.append(["Sales - Payments ", sales - all_payments]) data.append(["All credit notes", all_credit_notes]) data.append(["Credit notes paid on invoices", claimed_credit_notes]) data.append(["Credit notes refunded", refunded_credit_notes]) data.append(["Unclaimed credit notes", unclaimed_credit_notes]) data.append([ "Credit notes - (claimed credit notes + unclaimed credit notes)", all_credit_notes - claimed_credit_notes - refunded_credit_notes - unclaimed_credit_notes ]) return ListReport("Sales and Payments Summary", headings, data)
python
{ "resource": "" }
q275889
payments
test
def payments(): ''' Shows the history of payments into the system ''' payments = commerce.PaymentBase.objects.all() return QuerysetReport( "Payments", ["invoice__id", "id", "reference", "amount"], payments, link_view=views.invoice, )
python
{ "resource": "" }
q275890
credit_note_refunds
test
def credit_note_refunds(): ''' Shows all of the credit notes that have been generated. ''' notes_refunded = commerce.CreditNote.refunded() return QuerysetReport( "Credit note refunds", ["id", "creditnoterefund__reference", "amount"], notes_refunded, link_view=views.credit_note, )
python
{ "resource": "" }
q275891
product_status
test
def product_status(request, form): ''' Summarises the inventory status of the given items, grouping by invoice status. ''' products = form.cleaned_data["product"] categories = form.cleaned_data["category"] items = commerce.ProductItem.objects.filter( Q(product__in=products) | Q(product__category__in=categories), ).select_related("cart", "product") items = group_by_cart_status( items, ["product__category__order", "product__order"], ["product", "product__category__name", "product__name"], ) headings = [ "Product", "Paid", "Reserved", "Unreserved", "Refunded", ] data = [] for item in items: data.append([ "%s - %s" % ( item["product__category__name"], item["product__name"] ), item["total_paid"], item["total_reserved"], item["total_unreserved"], item["total_refunded"], ]) return ListReport("Inventory", headings, data)
python
{ "resource": "" }
q275892
discount_status
test
def discount_status(request, form): ''' Summarises the usage of a given discount. ''' discounts = form.cleaned_data["discount"] items = commerce.DiscountItem.objects.filter( Q(discount__in=discounts), ).select_related("cart", "product", "product__category") items = group_by_cart_status( items, ["discount"], ["discount", "discount__description"], ) headings = [ "Discount", "Paid", "Reserved", "Unreserved", "Refunded", ] data = [] for item in items: data.append([ item["discount__description"], item["total_paid"], item["total_reserved"], item["total_unreserved"], item["total_refunded"], ]) return ListReport("Usage by item", headings, data)
python
{ "resource": "" }
q275893
product_line_items
test
def product_line_items(request, form): ''' Shows each product line item from invoices, including their date and purchashing customer. ''' products = form.cleaned_data["product"] categories = form.cleaned_data["category"] invoices = commerce.Invoice.objects.filter( ( Q(lineitem__product__in=products) | Q(lineitem__product__category__in=categories) ), status=commerce.Invoice.STATUS_PAID, ).select_related( "cart", "user", "user__attendee", "user__attendee__attendeeprofilebase" ).order_by("issue_time") headings = [ 'Invoice', 'Invoice Date', 'Attendee', 'Qty', 'Product', 'Status' ] data = [] for invoice in invoices: for item in invoice.cart.productitem_set.all(): if item.product in products or item.product.category in categories: output = [] output.append(invoice.id) output.append(invoice.issue_time.strftime('%Y-%m-%d %H:%M:%S')) output.append( invoice.user.attendee.attendeeprofilebase.attendee_name() ) output.append(item.quantity) output.append(item.product) cart = invoice.cart if cart.status == commerce.Cart.STATUS_PAID: output.append('PAID') elif cart.status == commerce.Cart.STATUS_ACTIVE: output.append('UNPAID') elif cart.status == commerce.Cart.STATUS_RELEASED: output.append('REFUNDED') data.append(output) return ListReport("Line Items", headings, data)
python
{ "resource": "" }
q275894
paid_invoices_by_date
test
def paid_invoices_by_date(request, form): ''' Shows the number of paid invoices containing given products or categories per day. ''' products = form.cleaned_data["product"] categories = form.cleaned_data["category"] invoices = commerce.Invoice.objects.filter( ( Q(lineitem__product__in=products) | Q(lineitem__product__category__in=categories) ), status=commerce.Invoice.STATUS_PAID, ) # Invoices with payments will be paid at the time of their latest payment payments = commerce.PaymentBase.objects.all() payments = payments.filter( invoice__in=invoices, ) payments = payments.order_by("invoice") invoice_max_time = payments.values("invoice").annotate( max_time=Max("time") ) # Zero-value invoices will have no payments, so they're paid at issue time zero_value_invoices = invoices.filter(value=0) times = itertools.chain( (line["max_time"] for line in invoice_max_time), (invoice.issue_time for invoice in zero_value_invoices), ) by_date = collections.defaultdict(int) for time in times: date = datetime.datetime( year=time.year, month=time.month, day=time.day ) by_date[date] += 1 data = [(date_, count) for date_, count in sorted(by_date.items())] data = [(date_.strftime("%Y-%m-%d"), count) for date_, count in data] return ListReport( "Paid Invoices By Date", ["date", "count"], data, )
python
{ "resource": "" }
q275895
credit_notes
test
def credit_notes(request, form): ''' Shows all of the credit notes in the system. ''' notes = commerce.CreditNote.objects.all().select_related( "creditnoterefund", "creditnoteapplication", "invoice", "invoice__user__attendee__attendeeprofilebase", ) return QuerysetReport( "Credit Notes", ["id", "invoice__user__attendee__attendeeprofilebase__invoice_recipient", "status", "value"], notes, headings=["id", "Owner", "Status", "Value"], link_view=views.credit_note, )
python
{ "resource": "" }
q275896
invoices
test
def invoices(request, form): ''' Shows all of the invoices in the system. ''' invoices = commerce.Invoice.objects.all().order_by("status", "id") return QuerysetReport( "Invoices", ["id", "recipient", "value", "get_status_display"], invoices, headings=["id", "Recipient", "Value", "Status"], link_view=views.invoice, )
python
{ "resource": "" }
q275897
attendee_list
test
def attendee_list(request): ''' Returns a list of all attendees. ''' attendees = people.Attendee.objects.select_related( "attendeeprofilebase", "user", ) profiles = AttendeeProfile.objects.filter( attendee__in=attendees ).select_related( "attendee", "attendee__user", ) profiles_by_attendee = dict((i.attendee, i) for i in profiles) attendees = attendees.annotate( has_registered=Count( Q(user__invoice__status=commerce.Invoice.STATUS_PAID) ), ) headings = [ "User ID", "Name", "Email", "Has registered", ] data = [] for a in attendees: data.append([ a.user.id, (profiles_by_attendee[a].attendee_name() if a in profiles_by_attendee else ""), a.user.email, a.has_registered > 0, ]) # Sort by whether they've registered, then ID. data.sort(key=lambda a: (-a[3], a[0])) return AttendeeListReport("Attendees", headings, data, link_view=attendee)
python
{ "resource": "" }
q275898
speaker_registrations
test
def speaker_registrations(request, form): ''' Shows registration status for speakers with a given proposal kind. ''' kinds = form.cleaned_data["kind"] presentations = schedule_models.Presentation.objects.filter( proposal_base__kind__in=kinds, ).exclude( cancelled=True, ) users = User.objects.filter( Q(speaker_profile__presentations__in=presentations) | Q(speaker_profile__copresentations__in=presentations) ) paid_carts = commerce.Cart.objects.filter(status=commerce.Cart.STATUS_PAID) paid_carts = Case( When(cart__in=paid_carts, then=Value(1)), default=Value(0), output_field=models.IntegerField(), ) users = users.annotate(paid_carts=Sum(paid_carts)) users = users.order_by("paid_carts") return QuerysetReport( "Speaker Registration Status", ["id", "speaker_profile__name", "email", "paid_carts"], users, link_view=attendee, ) return []
python
{ "resource": "" }
q275899
manifest
test
def manifest(request, form): ''' Produces the registration manifest for people with the given product type. ''' products = form.cleaned_data["product"] categories = form.cleaned_data["category"] line_items = ( Q(lineitem__product__in=products) | Q(lineitem__product__category__in=categories) ) invoices = commerce.Invoice.objects.filter( line_items, status=commerce.Invoice.STATUS_PAID, ).select_related( "cart", "user", "user__attendee", "user__attendee__attendeeprofilebase" ) users = set(i.user for i in invoices) carts = commerce.Cart.objects.filter( user__in=users ) items = commerce.ProductItem.objects.filter( cart__in=carts ).select_related( "product", "product__category", "cart", "cart__user", "cart__user__attendee", "cart__user__attendee__attendeeprofilebase" ).order_by("product__category__order", "product__order") users = {} for item in items: cart = item.cart if cart.user not in users: users[cart.user] = {"unpaid": [], "paid": [], "refunded": []} items = users[cart.user] if cart.status == commerce.Cart.STATUS_ACTIVE: items["unpaid"].append(item) elif cart.status == commerce.Cart.STATUS_PAID: items["paid"].append(item) elif cart.status == commerce.Cart.STATUS_RELEASED: items["refunded"].append(item) users_by_name = list(users.keys()) users_by_name.sort(key=( lambda i: i.attendee.attendeeprofilebase.attendee_name().lower() )) headings = ["User ID", "Name", "Paid", "Unpaid", "Refunded"] def format_items(item_list): strings = [ '%d x %s' % (item.quantity, str(item.product)) for item in item_list ] return ", \n".join(strings) output = [] for user in users_by_name: items = users[user] output.append([ user.id, user.attendee.attendeeprofilebase.attendee_name(), format_items(items["paid"]), format_items(items["unpaid"]), format_items(items["refunded"]), ]) return ListReport("Manifest", headings, output)
python
{ "resource": "" }