code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def doParseXMLData( self ): parser = xml2obj.Xml2Obj() # Not valid document comming from FMServer if self.data[-6:] == '</COL>': self.data += '</ROW></RESULTSET></FMPXMLRESULT>' xobj = parser.ParseString( self.data ) try: el = xobj.getElements( 'ERRORCODE') if el: self.errorcode = int( el[0].getData() ) else: self.errorcode = int( xobj.getElements('error')[0].getAttribute('code') ) except: FMErrorByNum( 954 ) if self.errorcode != 0: FMErrorByNum( self.errorcode ) return xobj
This function parses the XML output of FileMaker.
def normalizeUnicode(text, encoding='humanascii'): if text == "": return "" unicodeinput = True if not isinstance(text, unicode): text = unicode(text, 'utf-8') unicodeinput = False res = '' global allowed, allowedid if encoding == 'humanascii' or encoding == 'identifier': enc = 'ascii' else: enc = encoding for ch in text: if (encoding == 'humanascii') and (ch in allowed): # ASCII chars, digits etc. stay untouched res += ch continue if (encoding == 'identifier') and (ch in allowedid): # ASCII chars, digits etc. stay untouched res += ch continue else: try: ch.encode(enc,'strict') if encoding == 'identifier': res += '_' else: res += ch except UnicodeEncodeError: ordinal = ord(ch) if mapping.has_key(ordinal): # try to apply custom mappings res += mapping.get(ordinal) elif decomposition(ch) or len(normalize('NFKD',ch)) > 1: normalized = filter(lambda i: not combining(i), normalize('NFKD', ch)).strip() # normalized string may contain non-letter chars too. Remove them # normalized string may result to more than one char if encoding == 'identifier': res += ''.join([c for c in normalized if c in allowedid]) else: res += ''.join([c for c in normalized if c in allowed]) else: # hex string instead of unknown char res += "%x" % ordinal if encoding == 'identifier': res = res.strip('_').replace('_____','_').replace('____','_').replace('___','_').replace('__','_') if not res.strip('_')[0] in string.ascii_letters: res = '_' + res if unicodeinput: return res else: return res.encode('utf-8')
This method is used for normalization of unicode characters to the base ASCII letters. Output is ASCII encoded string (or char) with only ASCII letters, digits, punctuation and whitespace characters. Case is preserved.
def fill(metrics_headers=()): # Create an ordered dictionary with the Python version, which # should go first. answer = collections.OrderedDict(( ('gl-python', platform.python_version()), )) # Add anything that already appears in the passed metrics headers, # in order. for key, value in collections.OrderedDict(metrics_headers).items(): answer[key] = value # Add the GAX and GRPC headers to our metrics. # These come after what may have been passed in (generally the GAPIC # library). answer['gax'] = gax.__version__ # pylint: disable=no-member answer['grpc'] = pkg_resources.get_distribution('grpcio').version # pylint: enable=no-member return answer
Add the metrics headers known to GAX. Return an OrderedDict with all of the metrics headers provided to this function, as well as the metrics known to GAX (such as its own version, the GRPC version, etc.).
def stringify(metrics_headers=()): metrics_headers = collections.OrderedDict(metrics_headers) return ' '.join(['%s/%s' % (k, v) for k, v in metrics_headers.items()])
Convert the provided metrics headers to a string. Iterate over the metrics headers (a dictionary, usually ordered) and return a properly-formatted space-separated string (e.g. foo/1.2.3 bar/3.14.159).
def _str_dotted_getattr(obj, name): for part in name.split('.'): obj = getattr(obj, part) return str(obj) if obj else None
Expands extends getattr to allow dots in x to indicate nested objects. Args: obj (object): an object. name (str): a name for a field in the object. Returns: Any: the value of named attribute. Raises: AttributeError: if the named attribute does not exist.
def request_bytesize(self): return sum(len(str(e)) for elts in self._in_deque for e in elts)
The size of in bytes of the bundled field elements.
def run(self): if not self._in_deque: return req = self._bundling_request del getattr(req, self.bundled_field)[:] getattr(req, self.bundled_field).extend( [e for elts in self._in_deque for e in elts]) subresponse_field = self.subresponse_field if subresponse_field: self._run_with_subresponses(req, subresponse_field, self._kwargs) else: self._run_with_no_subresponse(req, self._kwargs)
Call the task's func. The task's func will be called with the bundling requests func
def extend(self, elts): # Use a copy, not a reference, as it is later necessary to mutate # the proto field from which elts are drawn in order to construct # the bundled request. elts = elts[:] self._in_deque.append(elts) event = self._event_for(elts) self._event_deque.append(event) return event
Adds elts to the tasks. Args: elts (Sequence): a iterable of elements that can be appended to the task's bundle_field. Returns: Event: an event that can be used to wait on the response.
def _event_for(self, elts): event = Event() event.canceller = self._canceller_for(elts, event) return event
Creates an Event that is set when the bundle with elts is sent.
def _canceller_for(self, elts, event): def canceller(): """Cancels submission of ``elts`` as part of this bundle. Returns: bool: ``False`` if any of elements had already been sent, otherwise ``True``. """ try: self._event_deque.remove(event) self._in_deque.remove(elts) return True except ValueError: return False return canceller
Obtains a cancellation function that removes elts. The returned cancellation function returns ``True`` if all elements was removed successfully from the _in_deque, and false if it was not.
def schedule(self, api_call, bundle_id, bundle_desc, bundling_request, kwargs=None): kwargs = kwargs or dict() bundle = self._bundle_for(api_call, bundle_id, bundle_desc, bundling_request, kwargs) elts = getattr(bundling_request, bundle_desc.bundled_field) event = bundle.extend(elts) # Run the bundle if the count threshold was reached. count_threshold = self._options.element_count_threshold if count_threshold > 0 and bundle.element_count >= count_threshold: self._run_now(bundle.bundle_id) # Run the bundle if the size threshold was reached. size_threshold = self._options.request_byte_threshold if size_threshold > 0 and bundle.request_bytesize >= size_threshold: self._run_now(bundle.bundle_id) return event
Schedules bundle_desc of bundling_request as part of bundle_id. The returned value an :class:`Event` that * has a ``result`` attribute that will eventually be set to the result the api call * will be used to wait for the response * holds the canceller function for canceling this part of the bundle Args: api_call (callable[[object], object]): the scheduled API call. bundle_id (str): identifies the bundle on which the API call should be made. bundle_desc (gax.BundleDescriptor): describes the structure of the bundled call. bundling_request (object): the request instance to use in the API call. kwargs (dict): optional, the keyword arguments passed to the API call. Returns: Event: the scheduled event.
def create_stub(generated_create_stub, channel=None, service_path=None, service_port=None, credentials=None, scopes=None, ssl_credentials=None): if channel is None: target = '{}:{}'.format(service_path, service_port) if credentials is None: credentials = _grpc_google_auth.get_default_credentials(scopes) channel = _grpc_google_auth.secure_authorized_channel( credentials, target, ssl_credentials=ssl_credentials) return generated_create_stub(channel)
Creates a gRPC client stub. Args: generated_create_stub (Callable): The generated gRPC method to create a stub. channel (grpc.Channel): A Channel object through which to make calls. If None, a secure channel is constructed. If specified, all remaining arguments are ignored. service_path (str): The domain name of the API remote host. service_port (int): The port on which to connect to the remote host. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify your application to the service. scopes (Sequence[str]): The OAuth scopes for this service. This parameter is ignored if a credentials is specified. ssl_credentials (grpc.ChannelCredentials): gRPC channel credentials used to create a secure gRPC channel. If not specified, SSL credentials will be created using default certificates. Returns: grpc.Client: A gRPC client stub.
def get_default_credentials(scopes): credentials, _ = google.auth.default(scopes=scopes) return credentials
Gets the Application Default Credentials.
def secure_authorized_channel( credentials, target, ssl_credentials=None): http_request = _request_factory() return google.auth.transport.grpc.secure_authorized_channel( credentials, http_request, target, ssl_credentials=ssl_credentials)
Creates a secure authorized gRPC channel.
def add_timeout_arg(a_func, timeout, **kwargs): def inner(*args): """Updates args with the timeout.""" updated_args = args + (timeout,) return a_func(*updated_args, **kwargs) return inner
Updates a_func so that it gets called with the timeout as its final arg. This converts a callable, a_func, into another callable with an additional positional arg. Args: a_func (callable): a callable to be updated timeout (int): to be added to the original callable as it final positional arg. kwargs: Addtional arguments passed through to the callable. Returns: callable: the original callable updated to the timeout arg
def create_error(msg, cause=None): status_code = config.exc_to_code(cause) status_name = config.NAME_STATUS_CODES.get(status_code) if status_name == 'INVALID_ARGUMENT': return InvalidArgumentError(msg, cause=cause) else: return GaxError(msg, cause=cause)
Creates a ``GaxError`` or subclass. Attributes: msg (string): describes the error that occurred. cause (Exception, optional): the exception raised by a lower layer of the RPC stack (for example, gRPC) that caused this exception, or None if this exception originated in GAX. Returns: .GaxError: The exception that wraps ``cause``.
def get_operation(self, name, options=None): # Create the request object. request = operations_pb2.GetOperationRequest(name=name) return self._get_operation(request, options)
Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service. Example: >>> from google.gapic.longrunning import operations_client >>> api = operations_client.OperationsClient() >>> name = '' >>> response = api.get_operation(name) Args: name (string): The name of the operation resource. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. Returns: A :class:`google.longrunning.operations_pb2.Operation` instance. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid.
def list_operations(self, name, filter_, page_size=0, options=None): # Create the request object. request = operations_pb2.ListOperationsRequest( name=name, filter=filter_, page_size=page_size) return self._list_operations(request, options)
Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns ``UNIMPLEMENTED``. NOTE: the ``name`` binding below allows API services to override the binding to use different resource name schemes, such as ``users/*/operations``. Example: >>> from google.gapic.longrunning import operations_client >>> from google.gax import CallOptions, INITIAL_PAGE >>> api = operations_client.OperationsClient() >>> name = '' >>> filter_ = '' >>> >>> # Iterate over all results >>> for element in api.list_operations(name, filter_): >>> # process element >>> pass >>> >>> # Or iterate over results one page at a time >>> for page in api.list_operations(name, filter_, options=CallOptions(page_token=INITIAL_PAGE)): >>> for element in page: >>> # process element >>> pass Args: name (string): The name of the operation collection. filter_ (string): The standard list filter. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. Returns: A :class:`google.gax.PageIterator` instance. By default, this is an iterable of :class:`google.longrunning.operations_pb2.Operation` instances. This object can also be configured to iterate over the pages of the response through the `CallOptions` parameter. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid.
def cancel_operation(self, name, options=None): # Create the request object. request = operations_pb2.CancelOperationRequest(name=name) self._cancel_operation(request, options)
Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients can use ``Operations.GetOperation`` or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an ``Operation.error`` value with a ``google.rpc.Status.code`` of 1, corresponding to ``Code.CANCELLED``. Example: >>> from google.gapic.longrunning import operations_client >>> api = operations_client.OperationsClient() >>> name = '' >>> api.cancel_operation(name) Args: name (string): The name of the operation resource to be cancelled. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid.
def delete_operation(self, name, options=None): # Create the request object. request = operations_pb2.DeleteOperationRequest(name=name) self._delete_operation(request, options)
Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Example: >>> from google.gapic.longrunning import operations_client >>> api = operations_client.OperationsClient() >>> name = '' >>> api.delete_operation(name) Args: name (string): The name of the operation resource to be deleted. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. Raises: :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid.
def check_oneof(**kwargs): # Sanity check: If no keyword arguments were sent, this is fine. if not kwargs: return None not_nones = [val for val in kwargs.values() if val is not None] if len(not_nones) > 1: raise ValueError('Only one of {fields} should be set.'.format( fields=', '.join(sorted(kwargs.keys())), ))
Raise ValueError if more than one keyword argument is not none. Args: kwargs (dict): The keyword arguments sent to the function. Returns: None Raises: ValueError: If more than one entry in kwargs is not none.
def _bundleable(desc): def inner(a_func, settings, request, **kwargs): """Schedules execution of a bundling task.""" if not settings.bundler: return a_func(request, **kwargs) the_id = bundling.compute_bundle_id( request, desc.request_discriminator_fields) return settings.bundler.schedule(a_func, the_id, desc, request, kwargs) return inner
Creates a function that transforms an API call into a bundling call. It transform a_func from an API call that receives the requests and returns the response into a callable that receives the same request, and returns a :class:`bundling.Event`. The returned Event object can be used to obtain the eventual result of the bundled call. Args: desc (gax.BundleDescriptor): describes the bundling that a_func supports. Returns: Callable: takes the API call's request and keyword args and returns a bundling.Event object.
def _page_streamable(page_descriptor): def inner(a_func, settings, request, **kwargs): """Actual page-streaming based on the settings.""" page_iterator = gax.PageIterator( a_func, page_descriptor, settings.page_token, request, **kwargs) if settings.flatten_pages: return gax.ResourceIterator(page_iterator) else: return page_iterator return inner
Creates a function that yields an iterable to performs page-streaming. Args: page_descriptor (:class:`PageDescriptor`): indicates the structure of page streaming to be performed. Returns: Callable: A function that returns an iterator.
def _construct_bundling(bundle_config, bundle_descriptor): if bundle_config and bundle_descriptor: bundler = bundling.Executor(gax.BundleOptions( element_count_threshold=bundle_config.get( 'element_count_threshold', 0), element_count_limit=bundle_config.get('element_count_limit', 0), request_byte_threshold=bundle_config.get( 'request_byte_threshold', 0), request_byte_limit=bundle_config.get('request_byte_limit', 0), delay_threshold=bundle_config.get('delay_threshold_millis', 0))) else: bundler = None return bundler
Helper for ``construct_settings()``. Args: bundle_config (dict): A dictionary specifying a bundle parameters, the value for 'bundling' field in a method config (See ``construct_settings()`` for information on this config.) bundle_descriptor (BundleDescriptor): A BundleDescriptor object describing the structure of bundling for this method. If not set, this method will not bundle. Returns: Tuple[bundling.Executor, BundleDescriptor]: A tuple that configures bundling. The bundling.Executor may be None if this method should not bundle.
def _construct_retry(method_config, retry_codes, retry_params, retry_names): if method_config is None: return None codes = None if retry_codes and 'retry_codes_name' in method_config: codes_name = method_config['retry_codes_name'] if codes_name in retry_codes and retry_codes[codes_name]: codes = [retry_names[name] for name in retry_codes[codes_name]] else: codes = [] backoff_settings = None if retry_params and 'retry_params_name' in method_config: params_name = method_config['retry_params_name'] if params_name and params_name in retry_params: backoff_settings = gax.BackoffSettings(**retry_params[params_name]) return gax.RetryOptions( backoff_settings=backoff_settings, retry_codes=codes, )
Helper for ``construct_settings()``. Args: method_config (dict): A dictionary representing a single ``methods`` entry of the standard API client config file. (See ``construct_settings()`` for information on this yaml.) retry_codes (dict): A dictionary parsed from the ``retry_codes`` entry of the standard API client config file. (See ``construct_settings()`` for information on this yaml.) retry_params (dict): A dictionary parsed from the ``retry_params`` entry of the standard API client config file. (See ``construct_settings()`` for information on this yaml.) retry_names (dict): A dictionary mapping the string names used in the standard API client config file to API response status codes. Returns: Optional[RetryOptions]: The retry options, if applicable.
def _merge_retry_options(retry_options, overrides): if overrides is None: return None if overrides.retry_codes is None and overrides.backoff_settings is None: return retry_options codes = retry_options.retry_codes if overrides.retry_codes is not None: codes = overrides.retry_codes backoff_settings = retry_options.backoff_settings if overrides.backoff_settings is not None: backoff_settings = overrides.backoff_settings return gax.RetryOptions( backoff_settings=backoff_settings, retry_codes=codes, )
Helper for ``construct_settings()``. Takes two retry options, and merges them into a single RetryOption instance. Args: retry_options (RetryOptions): The base RetryOptions. overrides (RetryOptions): The RetryOptions used for overriding ``retry``. Use the values if it is not None. If entire ``overrides`` is None, ignore the base retry and return None. Returns: RetryOptions: The merged options, or None if it will be canceled.
def _catch_errors(a_func, to_catch): def inner(*args, **kwargs): """Wraps specified exceptions""" try: return a_func(*args, **kwargs) # pylint: disable=catching-non-exception except tuple(to_catch) as exception: utils.raise_with_traceback( gax.errors.create_error('RPC failed', cause=exception)) return inner
Updates a_func to wrap exceptions with GaxError Args: a_func (callable): A callable. to_catch (list[Exception]): Configures the exceptions to wrap. Returns: Callable: A function that will wrap certain exceptions with GaxError
def _merge_options_metadata(options, settings): if not options: return options kwargs = options.kwargs if kwargs == gax.OPTION_INHERIT or 'metadata' not in kwargs: return options kwarg_meta_dict = {} merged_kwargs = options.kwargs.copy() for kwarg_meta in merged_kwargs['metadata']: kwarg_meta_dict[kwarg_meta[0].lower()] = kwarg_meta for kwarg_meta in settings.kwargs['metadata']: if kwarg_meta[0].lower() not in kwarg_meta_dict: merged_kwargs['metadata'].append(kwarg_meta) return gax.CallOptions( timeout=options.timeout, retry=options.retry, page_token=options.page_token, is_bundling=options.is_bundling, **merged_kwargs)
Merge metadata list (add all missing tuples)
def get(pb_or_dict, key, default=_SENTINEL): # We may need to get a nested key. Resolve this. key, subkey = _resolve_subkeys(key) # Attempt to get the value from the two types of objects we know baout. # If we get something else, complain. if isinstance(pb_or_dict, Message): answer = getattr(pb_or_dict, key, default) elif isinstance(pb_or_dict, collections.Mapping): answer = pb_or_dict.get(key, default) else: raise TypeError('Tried to fetch a key %s on an invalid object; ' 'expected a dict or protobuf message.') # If the object we got back is our sentinel, raise KeyError; this is # a "not found" case. if answer is _SENTINEL: raise KeyError(key) # If a subkey exists, call this method recursively against the answer. if subkey and answer is not default: return get(answer, subkey, default=default) # Return the value. return answer
Retrieve the given key off of the object. If a default is specified, return it if the key is not found, otherwise raise KeyError. Args: pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key to retrieve from the object in question. default (Any): If the key is not present on the object, and a default is set, returns that default instead. A type-appropriate falsy default is generally recommended, as protobuf messages almost always have default values for unset values and it is not always possible to tell the difference between a falsy value and an unset one. If no default is set, raises KeyError for not found values. Returns: Any: The return value from the underlying message or dict. Raises: KeyError: If the key is not found. Note that, for unset values, messages and dictionaries may not have consistent behavior. TypeError: If pb_or_dict is not a Message or Mapping.
def setdefault(pb_or_dict, key, value): if not get(pb_or_dict, key, default=None): set(pb_or_dict, key, value)
Set the key on the object to the value if the current value is falsy. Because protobuf Messages do not distinguish between unset values and falsy ones particularly well, this method treats any falsy value (e.g. 0, empty list) as a target to be overwritten, on both Messages and dictionaries. Args: pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key on the object in question. value (Any): The value to set. Raises: TypeError: If pb_or_dict is not a Message or Mapping.
def _resolve_subkeys(key, separator='.'): subkey = None if separator in key: index = key.index(separator) subkey = key[index + 1:] key = key[:index] return key, subkey
Given a key which may actually be a nested key, return the top level key and any nested subkeys as separate values. Args: key (str): A string that may or may not contain the separator. separator (str): The namespace separator. Defaults to `.`. Returns: Tuple[str, str]: The key and subkey(s).
def merge(self, options): if not options: return _CallSettings( timeout=self.timeout, retry=self.retry, page_descriptor=self.page_descriptor, page_token=self.page_token, bundler=self.bundler, bundle_descriptor=self.bundle_descriptor, kwargs=self.kwargs) else: if options.timeout == OPTION_INHERIT: timeout = self.timeout else: timeout = options.timeout if options.retry == OPTION_INHERIT: retry = self.retry else: retry = options.retry if options.page_token == OPTION_INHERIT: page_token = self.page_token else: page_token = options.page_token if options.is_bundling: bundler = self.bundler else: bundler = None if options.kwargs == OPTION_INHERIT: kwargs = self.kwargs else: kwargs = self.kwargs.copy() kwargs.update(options.kwargs) return _CallSettings( timeout=timeout, retry=retry, page_descriptor=self.page_descriptor, page_token=page_token, bundler=bundler, bundle_descriptor=self.bundle_descriptor, kwargs=kwargs)
Returns new _CallSettings merged from this and a CallOptions object. Note that passing if the CallOptions instance specifies a page_token, the merged _CallSettings will have ``flatten_pages`` disabled. This permits toggling per-resource/per-page page streaming. Args: options (CallOptions): an instance whose values override those in this object. If None, ``merge`` returns a copy of this object Returns: CallSettings: The merged settings and options.
def cancel(self): if self.done(): return False self._client.cancel_operation(self._operation.name) return True
If last Operation's value of `done` is true, returns false; otherwise, issues OperationsClient.cancel_operation and returns true.
def result(self, timeout=None): # Check exceptional case: raise if no response if not self._poll(timeout).HasField('response'): raise GaxError(self._operation.error.message) # Return expected result return _from_any(self._result_type, self._operation.response)
Enters polling loop on OperationsClient.get_operation, and once Operation.done is true, then returns Operation.response if successful or throws GaxError if not successful. This method will wait up to timeout seconds. If the call hasn't completed in timeout seconds, then a RetryError will be raised. timeout can be an int or float. If timeout is not specified or None, there is no limit to the wait time.
def exception(self, timeout=None): # Check exceptional case: return none if no error if not self._poll(timeout).HasField('error'): return None # Return expected error return self._operation.error
Similar to result(), except returns the exception if any.
def add_done_callback(self, fn): # pylint: disable=invalid-name if self._operation.done: _try_callback(self, fn) else: self._queue.put(dill.dumps(fn)) if self._process is None: self._process = mp.Process(target=self._execute_tasks) self._process.start()
Enters a polling loop on OperationsClient.get_operation, and once the operation is done or cancelled, calls the function with this _OperationFuture. Added callables are called in the order that they were added.
def metadata(self): # Check exceptional case: return none if no metadata if not self._operation.HasField('metadata'): return None # Return expected metadata return _from_any(self._metadata_type, self._operation.metadata)
Returns the value of Operation.metadata from the last call to OperationsClient.get_operation (or if only the initial API call has been made, the metadata from that first call).
def get_sql(self): return '{0} {1} ON {2}'.format(self.join_type, self.right_table.get_sql(), self.get_condition())
Generates the JOIN sql for the join tables and join condition :rtype: str :return: the JOIN sql for the join tables and join condition
def set_left_table(self, left_table=None): if left_table: self.left_table = TableFactory( table=left_table, owner=self.owner, ) else: self.left_table = self.get_left_table()
Sets the left table for this join clause. If no table is specified, the first table in the query will be used :type left_table: str or dict or :class:`Table <querybuilder.tables.Table>` or None :param left_table: The left table being joined with. This can be a string of the table name, a dict of {'alias': table}, or a ``Table`` instance. Defaults to the first table in the query.
def get_left_table(self): if self.left_table: return self.left_table if len(self.owner.tables): return self.owner.tables[0]
Returns the left table if one was specified, otherwise the first table in the query is returned :rtype: :class:`Table <querybuilder.tables.Table>` :return: the left table if one was specified, otherwise the first table in the query
def get_all_related_objects(self, table): # Django 1.7 method if hasattr(table.model._meta, 'get_all_related_objects'): return table.model._meta.get_all_related_objects() else: # Django > 1.7 return [ f for f in table.model._meta.get_fields() if (f.one_to_many or f.one_to_one) and f.auto_created and not f.concrete ]
Fix for django 1.10 to replace deprecated code. Keep support for django 1.7
def set_right_table(self, table): self.right_table = table if self.left_table is None: return # find table prefix if type(self.left_table) is ModelTable and type(self.right_table) is ModelTable: # loop through fields to find the field for this model # check if this join type is for a related field for field in self.get_all_related_objects(self.left_table): related_model = field.model if hasattr(field, 'related_model'): related_model = field.related_model if related_model == self.right_table.model: if self.right_table.field_prefix is None: self.right_table.field_prefix = field.get_accessor_name() if len(self.right_table.field_prefix) > 4 and self.right_table.field_prefix[-4:] == '_set': self.right_table.field_prefix = self.right_table.field_prefix[:-4] return # check if this join type is for a foreign key for field in self.left_table.model._meta.fields: if ( field.get_internal_type() == 'OneToOneField' or field.get_internal_type() == 'ForeignKey' ): if field.remote_field.model == self.right_table.model: if self.right_table.field_prefix is None: self.right_table.field_prefix = field.name return
Sets the right table for this join clause and try to automatically set the condition if one isn't specified
def get_condition(self): if self.condition: return self.condition if type(self.right_table) is ModelTable and type(self.right_table) is ModelTable: # loop through fields to find the field for this model # check if this join type is for a related field for field in self.get_all_related_objects(self.right_table): related_model = field.model if hasattr(field, 'related_model'): related_model = field.related_model if related_model == self.left_table.model: table_join_field = field.field.column # self.table_join_name = field.get_accessor_name() condition = '{0}.{1} = {2}.{3}'.format( self.right_table.get_identifier(), self.right_table.model._meta.pk.name, self.left_table.get_identifier(), table_join_field, ) return condition # check if this join type is for a foreign key for field in self.right_table.model._meta.fields: if ( field.get_internal_type() == 'OneToOneField' or field.get_internal_type() == 'ForeignKey' ): if field.remote_field.model == self.left_table.model: table_join_field = field.column # self.table_join_name = field.name condition = '{0}.{1} = {2}.{3}'.format( self.right_table.get_identifier(), table_join_field, self.left_table.get_identifier(), self.left_table.model._meta.pk.name ) return condition return None
Determines the condition to be used in the condition part of the join sql. :return: The condition for the join clause :rtype: str or None
def get_sql(self): # reset arg index and args self.arg_index = 0 self.args = {} # build the WHERE sql portion if needed if len(self.wheres): where = self.build_where_part(self.wheres) return 'WHERE {0} '.format(where) return ''
Builds and returns the WHERE portion of the sql :return: the WHERE portion of the sql :rtype: str
def get_condition_value(self, operator, value): if operator in ('contains', 'icontains'): value = '%{0}%'.format(value) elif operator == 'startswith': value = '{0}%'.format(value) return value
Gets the condition value based on the operator and value :param operator: the condition operator name :type operator: str :param value: the value to be formatted based on the condition operator :type value: object :return: the comparison operator from the Where class's comparison_map :rtype: str
def set_arg(self, value): named_arg = '{0}A{1}'.format(self.arg_prefix, self.arg_index) self.args[named_arg] = value self.arg_index += 1 return named_arg
Set the query param in self.args based on the prefix and arg index and auto increment the arg_index :return: the string placeholder for the arg :rtype: str
def get_name(self, use_alias=True): if self.desc: direction = 'DESC' else: direction = 'ASC' if use_alias: return '{0} {1}'.format(self.field.get_identifier(), direction) return '{0} {1}'.format(self.field.get_select_sql(), direction)
Gets the name to reference the sorted field :return: the name to reference the sorted field :rtype: str
def get_sql(self): sql = '' if self.limit and self.limit > 0: sql += 'LIMIT {0} '.format(self.limit) if self.offset and self.offset > 0: sql += 'OFFSET {0} '.format(self.offset) return sql
Generates the sql used for the limit clause of a Query :return: the sql for the limit clause of a Query :rtype: str
def init_defaults(self): self.sql = '' self.tables = [] self.joins = [] self._where = Where() self.groups = [] self.sorters = [] self._limit = None self.table_prefix = '' self.is_inner = False self.with_tables = [] self._distinct = False self.distinct_ons = [] self.field_names = [] self.field_names_pk = None self.values = []
Sets the default values for this instance
def from_table(self, table=None, fields='*', schema=None, **kwargs): # self.mark_dirty() self.tables.append(TableFactory( table=table, fields=fields, schema=schema, owner=self, **kwargs )) return self
Adds a ``Table`` and any optional fields to the list of tables this query is selecting from. :type table: str or dict or :class:`Table <querybuilder.tables.Table>` or :class:`Query <querybuilder.query.Query>` or :class:`ModelBase <django:django.db.models.base.ModelBase>` :param table: The table to select fields from. This can be a string of the table name, a dict of {'alias': table}, a ``Table`` instance, a Query instance, or a django Model instance :type fields: str or tuple or list or Field :param fields: The fields to select from ``table``. Defaults to '*'. This can be a single field, a tuple of fields, or a list of fields. Each field can be a string or ``Field`` instance :type schema: str :param schema: This is not implemented, but it will be a string of the db schema name :param kwargs: Any additional parameters to be passed into the constructor of ``TableFactory`` :return: self :rtype: :class:`Query <querybuilder.query.Query>`
def insert_into(self, table=None, field_names=None, values=None, **kwargs): table = TableFactory( table=table, **kwargs ) self.tables.append(table) self.field_names = field_names self.values = values return self
Bulk inserts a list of values into a table :type table: str or dict or :class:`Table <querybuilder.tables.Table>` or :class:`Query <querybuilder.query.Query>` or :class:`ModelBase <django:django.db.models.base.ModelBase>` :param table: The table to select fields from. This can be a string of the table name, a dict of {'alias': table}, a ``Table`` instance, a Query instance, or a django Model instance :type field_names: list :param field_names: A list of ordered field names that relate to the data in the values list :type values: list of list :param values: A list each values list with the values in the same order as the field names :param kwargs: Any additional parameters to be passed into the constructor of ``TableFactory`` :return: self :rtype: :class:`Query <querybuilder.query.Query>`
def update_table(self, table=None, field_names=None, values=None, pk=None, **kwargs): table = TableFactory( table=table, **kwargs ) self.tables.append(table) self.field_names = field_names self.values = values self.field_names_pk = pk
Bulk updates rows in a table :type table: str or dict or :class:`Table <querybuilder.tables.Table>` or :class:`Query <querybuilder.query.Query>` or :class:`ModelBase <django:django.db.models.base.ModelBase>` :param table: The table to select fields from. This can be a string of the table name, a dict of {'alias': table}, a ``Table`` instance, a Query instance, or a django Model instance :type field_names: list :param field_names: A list of ordered field names that relate to the data in the values list :type values: list of list :param values: A list each values list with the values in the same order as the field names :type pk: int :param pk: The name of the primary key in the table and field_names :param kwargs: Any additional parameters to be passed into the constructor of ``TableFactory`` :rtype: :class:`Query <querybuilder.query.Query>` :return: self
def where(self, q=None, where_type='AND', **kwargs): # self.mark_dirty() if q is not None: self._where.wheres.add(q, where_type) if len(kwargs): for key, value in kwargs.items(): q = Q(**{ key: value }) self._where.wheres.add(q, where_type) return self
Adds a where condition as a Q object to the query's ``Where`` instance. :type q: :class:`Q <django:django.db.models.Q>` :param q: A django ``Q`` instance. This will be added to the query's ``Where`` object. If no Q object is passed, the kwargs will be examined for params to be added to Q objects :param where_type: str :param where_type: The connection type of the where condition ('AND', 'OR') :return: self :rtype: :class:`Query <querybuilder.query.Query>`
def group_by(self, field=None, table=None, allow_duplicates=False): new_group_item = Group( field=field, table=table, ) if allow_duplicates is False: for group_item in self.groups: if group_item.field.get_identifier() == new_group_item.field.get_identifier(): return self self.groups.append(new_group_item) return self
Adds a group by clause to the query by adding a ``Group`` instance to the query's groups list :type field: str or dict or :class:`Field <querybuilder.fields.Field>` :param field: This can be a string of a field name, a dict of {'alias': field}, or a ``Field`` instance :type table: str or dict or :class:`Table <querybuilder.table.Table>` :param table: Optional. This can be a string of a table name, a dict of {'alias': table}, or a ``Table`` instance. A table only needs to be supplied in more complex queries where the field name is ambiguous. :return: self :rtype: :class:`Query <querybuilder.query.Query>`
def order_by(self, field=None, table=None, desc=False): self.sorters.append(Sorter( field=field, table=table, desc=desc )) return self
Adds an order by clause to the query by adding a ``Sorter`` instance to the query's sorters list :type field: str or dict or :class:`Field <querybuilder.fields.Field>` :param field: This can be a string of a field name, a dict of {'alias': field}, or a ``Field`` instance :type table: str or dict or :class:`Table <querybuilder.table.Table>` :param table: Optional. This can be a string of a table name, a dict of {'alias': table}, or a ``Table`` instance. A table only needs to be supplied in more complex queries where the field name is ambiguous. :type desc: bool :param desc: Set to True to sort by this field in DESC order or False to sort by this field in ASC order. Defaults to False. :rtype: :class:`Query <querybuilder.query.Query>` :return: self
def check_name_collisions(self): table_index = 0 table_names = {} for table in self.tables + self.with_tables: table_prefix = 'T{0}'.format(table_index) auto_alias = '{0}{1}'.format(self.table_prefix, table_prefix) identifier = table.get_identifier() if identifier is None or identifier in table_names: table.auto_alias = auto_alias table_names[identifier] = True # prefix inner query args and update self args if type(table) is QueryTable: table.query.prefix_args(auto_alias) table.query.table_prefix = auto_alias table_index += 1
Checks if there are any tables referenced by the same identifier and updated the auto_alias accordingly. This is called when generating the sql for a query and should only be called internally.
def get_sql(self, debug=False, use_cache=True): # TODO: enable caching # if self.sql and use_cache and not debug: # return self.sql # auto alias any naming collisions self.check_name_collisions() # if debugging, return the debug formatted sql if debug: return self.format_sql() # build each part of the query sql = '' sql += self.build_withs() sql += self.build_select_fields() sql += self.build_from_table() sql += self.build_joins() sql += self.build_where() sql += self.build_groups() sql += self.build_order_by() sql += self.build_limit() # remove any whitespace from the beginning and end of the sql self.sql = sql.strip() return self.sql
Generates the sql for this query and returns the sql as a string. :type debug: bool :param debug: If True, the sql will be returned in a format that is easier to read and debug. Defaults to False :type use_cache: bool :param use_cache: If True, the query will returned the cached sql if it exists rather then generating the sql again. If False, the sql will be generated again. Defaults to True. :rtype: str :return: The generated sql for this query
def get_update_sql(self, rows): field_names = self.get_field_names() pk = field_names[0] update_field_names = field_names[1:] num_columns = len(rows[0]) if num_columns < 2: raise Exception('At least 2 fields must be passed to get_update_sql') all_null_indices = [ all(row[index] is None for row in rows) for index in range(1, num_columns) ] field_names_sql = '({0})'.format(', '.join(field_names)) row_values = [] sql_args = [] for row in rows: placeholders = [] for value in row: sql_args.append(value) placeholders.append('%s') row_values.append('({0})'.format(', '.join(placeholders))) row_values_sql = ', '.join(row_values) # build field list for SET portion set_field_list = [ '{0} = NULL'.format(field_name) if all_null_indices[idx] else '{0} = new_values.{0}'.format(field_name) for idx, field_name in enumerate(update_field_names) ] set_field_list_sql = ', '.join(set_field_list) self.sql = 'UPDATE {0} SET {1} FROM (VALUES {2}) AS new_values {3} WHERE {0}.{4} = new_values.{4}'.format( self.tables[0].get_identifier(), set_field_list_sql, row_values_sql, field_names_sql, pk ) return self.sql, sql_args
Returns SQL UPDATE for rows ``rows`` .. code-block:: sql UPDATE table_name SET field1 = new_values.field1 field2 = new_values.field2 FROM ( VALUES (1, 'value1', 'value2'), (2, 'value1', 'value2') ) AS new_values (id, field1, field2) WHERE table_name.id = new_values.id;
def format_sql(self): # TODO: finish adding the other parts of the sql generation sql = '' # build SELECT select_segment = self.build_select_fields() select_segment = select_segment.replace('SELECT ', '', 1) fields = [field.strip() for field in select_segment.split(',')] sql += 'SELECT\n\t{0}\n'.format(',\n\t'.join(fields)) # build FROM from_segment = self.build_from_table() from_segment = from_segment.replace('FROM ', '', 1) tables = [table.strip() for table in from_segment.split(',')] sql += 'FROM\n\t{0}\n'.format(',\n\t'.join(tables)) # build ORDER BY order_by_segment = self.build_order_by() if len(order_by_segment): order_by_segment = order_by_segment.replace('ORDER BY ', '', 1) sorters = [sorter.strip() for sorter in order_by_segment.split(',')] sql += 'ORDER BY\n\t{0}\n'.format(',\n\t'.join(sorters)) # build LIMIT limit_segment = self.build_limit() if len(limit_segment): if 'LIMIT' in limit_segment: limit_segment = limit_segment.replace('LIMIT ', 'LIMIT\n\t', 1) if 'OFFSET' in limit_segment: limit_segment = limit_segment.replace('OFFSET ', '\nOFFSET\n\t', 1) elif 'OFFSET' in limit_segment: limit_segment = limit_segment.replace('OFFSET ', 'OFFSET\n\t', 1) sql += limit_segment return sql
Builds the sql in a format that is easy for humans to read and debug :return: The formatted sql for this query :rtype: str
def get_field_names(self): field_names = [] for table in self.tables: field_names.extend(table.get_field_names()) for join_item in self.joins: field_names.extend(join_item.right_table.get_field_names()) return field_names
Builds a list of the field names for all tables and joined tables by calling ``get_field_names()`` on each table :return: list of field names :rtype: list of str
def get_field_identifiers(self): field_identifiers = [] for table in self.tables: field_identifiers += table.get_field_identifiers() for join_item in self.joins: field_identifiers += join_item.right_table.get_field_identifiers() return field_identifiers
Builds a list of the field identifiers for all tables and joined tables by calling ``get_field_identifiers()`` on each table :return: list of field identifiers :rtype: list of str
def build_select_fields(self): field_sql = [] # get the field sql for each table for table in self.tables: field_sql += table.get_field_sql() # get the field sql for each join table for join_item in self.joins: field_sql += join_item.right_table.get_field_sql() # combine all field sql separated by a comma sql = 'SELECT {0}{1} '.format(self.get_distinct_sql(), ', '.join(field_sql)) return sql
Generates the sql for the SELECT portion of the query :return: the SELECT portion of the query :rtype: str
def build_from_table(self): table_parts = [] # get the table sql for each table for table in self.tables: sql = table.get_sql() if len(sql): table_parts.append(sql) # combine all table sql separated by a comma sql = 'FROM {0} '.format(', '.join(table_parts)) return sql
Generates the sql for the FROM portion of the query :return: the FROM portion of the query :rtype: str
def build_joins(self): join_parts = [] # get the sql for each join object for join_item in self.joins: join_parts.append(join_item.get_sql()) # if there are any joins, combine them if len(join_parts): combined_joins = ' '.join(join_parts) return '{0} '.format(combined_joins) return ''
Generates the sql for the JOIN portion of the query :return: the JOIN portion of the query :rtype: str
def build_groups(self): # check if there are any groupings if len(self.groups): groups = [] # get the group sql for each grouping for group in self.groups: groups.append(group.get_name()) return 'GROUP BY {0} '.format(', '.join(groups)) return ''
Generates the sql for the GROUP BY portion of the query :return: the GROUP BY portion of the query :rtype: str
def build_order_by(self, use_alias=True): # check if there are any sorters if len(self.sorters): sorters = [] # get the sql for each sorter for sorter in self.sorters: sorters.append(sorter.get_name(use_alias=use_alias)) return 'ORDER BY {0} '.format(', '.join(sorters)) return ''
Generates the sql for the ORDER BY portion of the query :type use_alias: bool :param use_alias: If True, the alias for the field will be used in the order by. This is an option before query windows do not use the alias. Defaults to True. :return: the ORDER BY portion of the query :rtype: str
def find_table(self, table): table = TableFactory(table) identifier = table.get_identifier() join_tables = [join_item.right_table for join_item in self.joins] for table in (self.tables + join_tables): if table.get_identifier() == identifier: return table return None
Finds a table by name or alias. The FROM tables and JOIN tables are included in the search. :type table: str or :class:`ModelBase <django:django.db.models.base.ModelBase>` :param table: string of the table name or alias or a ModelBase instance :return: The table if it is found, otherwise None :rtype: Table or None
def wrap(self, alias=None): field_names = self.get_field_names() query = Query(self.connection).from_table(deepcopy(self), alias=alias) self.__dict__.update(query.__dict__) # set explicit field names self.tables[0].set_fields(field_names) field_names = self.get_field_names() return self
Wraps the query by selecting all fields from itself :rtype: :class:`Query <querybuilder.query.Query>` :return: The wrapped query
def copy(self): connection = self.connection del self.connection copied_query = deepcopy(self) copied_query.connection = connection self.connection = connection return copied_query
Deeply copies everything in the query object except the connection object is shared
def get_args(self): for table in self.tables + self.with_tables: if type(table) is QueryTable: self._where.args.update(table.query.get_args()) return self._where.args
Gets the args for the query which will be escaped when being executed by the db. All inner queries are inspected and their args are combined with this query's args. :return: all args for this query as a dict :rtype: dict
def explain(self, sql=None, sql_args=None): cursor = self.get_cursor() if sql is None: sql = self.get_sql() sql_args = self.get_args() elif sql_args is None: sql_args = {} cursor.execute('EXPLAIN {0}'.format(sql), sql_args) rows = self._fetch_all_as_dict(cursor) return rows
Runs EXPLAIN on this query :type sql: str or None :param sql: The sql to run EXPLAIN on. If None is specified, the query will use ``self.get_sql()`` :type sql_args: dict or None :param sql_args: A dictionary of the arguments to be escaped in the query. If None and sql is None, the query will use ``self.get_args()`` :rtype: list of str :return: list of each line of output from the EXPLAIN statement
def insert(self, rows): if len(rows) == 0: return sql, sql_args = self.get_insert_sql(rows) # get the cursor to execute the query cursor = self.get_cursor() # execute the query cursor.execute(sql, sql_args)
Inserts records into the db # TODO: implement this
def update(self, rows): if len(rows) == 0: return sql, sql_args = self.get_update_sql(rows) # get the cursor to execute the query cursor = self.get_cursor() # execute the query cursor.execute(sql, sql_args)
Updates records in the db
def get_auto_field_name(self, model_class): # Get auto field name (a model can only have one AutoField) for field in model_class._meta.fields: if isinstance(field, AutoField): return field.column return None
If one of the unique_fields is the model's AutoField, return the field name, otherwise return None
def get_count_query(self): query_copy = self.copy() if not query_copy.tables: raise Exception('No tables specified to do a count') for table in query_copy.tables: del table.fields[:] query_copy.tables[0].add_field(CountField('*')) del query_copy.sorters[:] return query_copy
Copies the query object and alters the field list and order by to do a more efficient count
def count(self, field='*'): rows = self.get_count_query().select(bypass_safe_limit=True) return list(rows[0].values())[0]
Returns a COUNT of the query by wrapping the query and performing a COUNT aggregate of the specified field :param field: the field to pass to the COUNT aggregate. Defaults to '*' :type field: str :return: The number of rows that the query will return :rtype: int
def max(self, field): q = Query(self.connection).from_table(self, fields=[ MaxField(field) ]) rows = q.select(bypass_safe_limit=True) return list(rows[0].values())[0]
Returns the maximum value of a field in the result set of the query by wrapping the query and performing a MAX aggregate of the specified field :param field: the field to pass to the MAX aggregate :type field: str :return: The maximum value of the specified field :rtype: int
def min(self, field): q = Query(self.connection).from_table(self, fields=[ MinField(field) ]) rows = q.select(bypass_safe_limit=True) return list(rows[0].values())[0]
Returns the minimum value of a field in the result set of the query by wrapping the query and performing a MIN aggregate of the specified field :param field: the field to pass to the MIN aggregate :type field: str :return: The minimum value of the specified field :rtype: int
def sum(self, field): q = Query(self.connection).from_table(self, fields=[ SumField(field) ]) rows = q.select(bypass_safe_limit=True) return list(rows[0].values())[0]
Returns the sum of the field in the result set of the query by wrapping the query and performing a SUM aggregate of the specified field :param field: the field to pass to the SUM aggregate :type field: str :return: The sum of the specified field :rtype: int
def avg(self, field): q = Query(self.connection).from_table(self, fields=[ AvgField(field) ]) rows = q.select(bypass_safe_limit=True) return list(rows[0].values())[0]
Returns the average of the field in the result set of the query by wrapping the query and performing an AVG aggregate of the specified field :param field: the field to pass to the AVG aggregate :type field: str :return: The average of the specified field :rtype: int
def _fetch_all_as_dict(self, cursor): desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ]
Iterates over the result set and converts each row to a dictionary :return: A list of dictionaries where each row is a dictionary :rtype: list of dict
def get_sql(self, debug=False, use_cache=True): # TODO: implement caching and debug sql = '' sql += self.build_partition_by_fields() sql += self.build_order_by(use_alias=False) sql += self.build_limit() sql = sql.strip() sql = 'OVER ({0})'.format(sql) self.sql = sql return self.sql
Generates the sql for this query window and returns the sql as a string. :type debug: bool :param debug: If True, the sql will be returned in a format that is easier to read and debug. Defaults to False :type use_cache: bool :param use_cache: If True, the query will returned the cached sql if it exists rather then generating the sql again. If False, the sql will be generated again. Defaults to True. :rtype: str :return: The generated sql for this query window
def value_for_keypath(dict, keypath): if len(keypath) == 0: return dict keys = keypath.split('.') value = dict for key in keys: if key in value: value = value[key] else: return None return value
Returns the value of a keypath in a dictionary if the keypath exists or None if the keypath does not exist.
def set_value_for_keypath(item, keypath, value, create_if_needed=False, delimeter='.'): if len(keypath) == 0: return None keys = keypath.split(delimeter) if len(keys) > 1: key = keys[0] if create_if_needed: item[key] = item.get(key, {}) if key in item: if set_value_for_keypath(item[key], delimeter.join(keys[1:]), value, create_if_needed=create_if_needed, delimeter=delimeter): return item return None if create_if_needed: item[keypath] = item.get(keypath, {}) if keypath in item: item[keypath] = value return item else: return None
Sets the value for a keypath in a dictionary if the keypath exists. This modifies the original dictionary.
def get_sql(self): alias = self.get_alias() if alias: if self.cast: return 'CAST({0} AS {1}) AS "{2}"'.format(self.get_select_sql(), self.cast.upper(), alias) return '{0} AS "{1}"'.format(self.get_select_sql(), alias) if self.cast: return 'CAST({0} AS {1})'.format(self.get_identifier(), self.cast.upper()) return self.get_identifier()
Gets the SELECT sql part for a field Ex: field_name AS alias :return: the sql for this field used in the SELECT portion of the query :rtype: str
def get_alias(self): alias = None if self.alias: alias = self.alias elif self.auto_alias: alias = self.auto_alias if self.table and self.table.prefix_fields: field_prefix = self.table.get_field_prefix() if alias: alias = '{0}__{1}'.format(field_prefix, alias) else: alias = '{0}__{1}'.format(field_prefix, self.name) return alias
Gets the alias for the field or the auto_alias if one is set. If there isn't any kind of alias, None is returned. :return: The field alias, auto_alias, or None :rtype: str or None
def get_select_sql(self): if self.table: return '{0}.{1}'.format(self.table.get_identifier(), self.name) return '{0}'.format(self.name)
Gets the SELECT field portion for the field without the alias. If the field has a table, it will be included here like table.field :return: Gets the SELECT field portion for the field without the alias :rtype: str
def set_table(self, table): super(MultiField, self).set_table(table) if self.field and self.field.table is None: self.field.set_table(self.table)
Setter for the table of this field. Also sets the inner field's table.
def get_select_sql(self): return '{0}({1}{2}){3}'.format( self.name.upper(), self.get_distinct(), self.get_field_identifier(), self.get_over(), )
Gets the SELECT field portion for the field without the alias. If the field has a table, it will be included here like AggregateFunction(table.field) :return: Gets the SELECT field portion for the field without the alias :rtype: str
def get_field_identifier(self): if self.default is None: return '{0}, {1}'.format(self.field.get_select_sql(), self.offset) return "{0}, {1}, '{2}'".format(self.field.get_select_sql(), self.offset, self.default)
Return the lag/lead function with the offset and default value
def get_select_sql(self): return '(({0}) - ({1}({2}){3}))'.format( self.field.get_select_sql(), self.name.upper(), self.get_field_identifier(), self.get_over(), )
Calculate the difference between this record's value and the lag/lead record's value
def generate_auto_fields(self): # ignore the original date field self.ignore = True datetime_str = None # create an alias for the unix timestamp extraction epoch_alias = '{0}__{1}'.format(self.field.get_name(), 'epoch') if self.name == 'all': datetime_str = self.field self.add_to_table(AllEpoch(datetime_str, table=self.table, cast=self.cast), epoch_alias) # do not add the date order by for "all" grouping because we want to order by rank return elif self.name == 'none': datetime_str = self.field self.add_to_table(Epoch(datetime_str, table=self.table, cast=self.cast), epoch_alias, add_group=True) else: group_names = default_group_names if self.name == 'week': group_names = week_group_names group_name_index = group_names.index(self.name) + 1 for group_name in group_names[0:group_name_index]: field_alias = '{0}__{1}'.format(self.field.get_name(), group_name) auto_field = group_map[group_name](self.field, table=self.table, cast=self.cast) self.add_to_table(auto_field, field_alias, add_group=True) # check if this is the last date grouping if group_name == self.name: datetime_str = self.field self.add_to_table( GroupEpoch( datetime_str, date_group_name=group_name, table=self.table, cast=self.cast, ), epoch_alias, add_group=True ) if self.desc: self.table.owner.order_by('-{0}'.format(epoch_alias)) else: self.table.owner.order_by(epoch_alias)
Generates any auto fields needed to properly group this date part. Ex: a Day field will create Year, Month, Day fields and group by Year, Month, Day
def add_to_table(self, field, alias, add_group=False): self.table.add_field({ alias: field }) if add_group: self.table.owner.group_by(alias)
Adds this field to the field's table and optionally group by it :param field: The field to add to the table :type field: str or :class:`Field <querybuilder.fields.Field>` :param alias: The alias for the field :type alias: str :param add_group: Whether or not the table should group by this field :type: bool
def get_sql(self): alias = self.get_alias() if alias: return '{0} AS {1}'.format(self.get_from_name(), alias) return self.get_identifier()
Gets the FROM sql portion for this table Ex: table_name AS alias :returns: Returns the table identifier to be used in the FROM sql portion of the query :rtype: str
def get_alias(self): alias = None if self.alias: alias = self.alias elif self.auto_alias: alias = self.auto_alias return alias
Gets the alias for the table or the auto_alias if one is set. If there isn't any kind of alias, None is returned. :returns: The table alias, auto_alias, or None :rtype: str or None
def add_field(self, field): field = FieldFactory( field, ) field.set_table(self) # make sure field is not already added field_name = field.get_name() for existing_field in self.fields: if existing_field.get_name() == field_name: return None self.before_add_field(field) field.before_add() if field.ignore is False: self.fields.append(field) return field
Adds a field to this table :param field: This can be a string of a field name, a dict of {'alias': field}, or a ``Field`` instance :type field: str or dict or Field
def remove_field(self, field): new_field = FieldFactory( field, ) new_field.set_table(self) new_field_identifier = new_field.get_identifier() for field in self.fields: if field.get_identifier() == new_field_identifier: self.fields.remove(field) return field return None
Removes a field from this table :param field: This can be a string of a field name, a dict of {'alias': field}, or a ``Field`` instance :type field: str or dict or :class:`Field <querybuilder.fields.Field>`
def add_fields(self, fields): if isinstance(fields, string_types): fields = [fields] elif type(fields) is tuple: fields = list(fields) field_objects = [self.add_field(field) for field in fields] return field_objects
Adds all of the passed fields to the table's current field list :param fields: The fields to select from ``table``. This can be a single field, a tuple of fields, or a list of fields. Each field can be a string or ``Field`` instance :type fields: str or tuple or list of str or list of Field or :class:`Field <querybuilder.fields.Field>`
def find_field(self, field=None, alias=None): if alias: field = alias field = FieldFactory(field, table=self, alias=alias) identifier = field.get_identifier() for field in self.fields: if field.get_identifier() == identifier: return field return None
Finds a field by name or alias. :param field: string of the field name or alias, dict of {'alias': field}, or a Field instance :type field: str or dict or Field :returns: The field if it is found, otherwise None :rtype: :class:`Field <querybuilder.fields.Field>` or None
def init_defaults(self): super(SimpleTable, self).init_defaults() self.name = self.table
Sets the name of the table to the passed in table value