code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def all_of(api_call, *args, **kwargs): kwargs = kwargs.copy() pos, outer_limit = 0, kwargs.get('limit', 0) or sys.maxsize while True: response = api_call(*args, **kwargs) for item in response.get('results', []): pos += 1 if pos > outer_limit: return yield item ##print((pos, response['start'], response['limit'])) if response.get('_links', {}).get('next', None): kwargs['start'] = response['start'] + response['size'] kwargs['limit'] = response['limit'] else: return
Generator that iterates over all results of an API call that requires limit/start pagination. If the `limit` keyword argument is set, it is used to stop the generator after the given number of result items. >>> for i, v in enumerate(all_of(api.get_content)): >>> v = bunchify(v) >>> print('\t'.join((str(i), v.type, v.id, v.status, v.title))) :param api_call: Confluence API call (method). :param args: Positional arguments of the call. :param kwargs: Keyword arguments of the call.
def _start_http_session(self): api_logger.debug("Starting new HTTP session...") self.session = requests.Session() self.session.headers.update({"User-Agent": self.user_agent}) if self.username and self.password: api_logger.debug("Requests will use authorization.") self.session.auth = HTTPBasicAuth(self.username, self.password)
Start a new requests HTTP session, clearing cookies and session data. :return: None
def get_content_by_id(self, content_id, status=None, version=None, expand=None, callback=None): params = {} if status: params["status"] = status if version is not None: params["version"] = int(version) if expand: params["expand"] = expand return self._service_get_request("rest/api/content/{id}".format(id=content_id), params=params, callback=callback)
Returns a piece of Content. :param content_id (string): The id of the content. :param status (string): OPTIONAL: List of Content statuses to filter results on. Default value: [current] :param version (int): OPTIONAL: The content version to retrieve. Default: Latest. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the content. Default value: history,space,version We can also specify some extensions such as extensions.inlineProperties (for getting inline comment-specific properties) or extensions.resolution for the resolution status of each comment in the results. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_macro_by_hash(self, content_id, version, macro_hash, callback=None): return self._service_get_request("rest/api/content/{id}/history/{version}/macro/hash/{hash}" "".format(id=content_id, version=version, hash=macro_hash), callback=callback)
Returns the body of a macro (in storage format) with the given hash. This resource is primarily used by connect applications that require the body of macro to perform their work. The hash is generated by connect during render time of the local macro holder and is usually only relevant during the scope of one request. For optimisation purposes, this hash will usually live for multiple requests. Collecting a macro by its hash should now be considered deprecated and will be replaced, transparently with macroIds. This resource is currently only called from connect addons which will eventually all use the {@link #getContentById(com.atlassian.confluence.api.model.content.id.ContentId, java.util.List, Integer, String)} resource. To make the migration as seamless as possible, this resource will match macros against a generated hash or a stored macroId. This will allow add ons to work during the migration period. :param content_id (string): A string containing the id of the content. :param version (int): The version of the content which the hash belongs. :param macro_hash (string): The macroId to find the correct macro :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_macro_by_macro_id(self, content_id, version, macro_id, callback=None): return self._service_get_request("rest/api/content/{id}/history/{version}/macro/id/{macro_id}" "".format(id=content_id, version=int(version), macro_id=macro_id), callback=callback)
Returns the body of a macro (in storage format) with the given id. This resource is primarily used by connect applications that require the body of macro to perform their work. When content is created, if no macroId is specified, then Confluence will generate a random id. The id is persisted as the content is saved and only modified by Confluence if there are conflicting IDs. To preserve backwards compatibility this resource will also match on the hash of the macro body, even if a macroId is found. This check will become redundant as pages get macroId's generated for them and transparently propagate out to all instances. :param content_id (string): A string containing the id of the content. :param version (int): The version of the content to search. :param macro_id (string): The macroID to find the corresponding macro. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def search_content(self, cql_str=None, cql_context=None, expand=None, start=0, limit=None, callback=None): params = {} if cql_str: params["cql"] = cql_str if cql_context: params["cqlcontext"] = json.dumps(cql_context) if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/content/search", params=params, callback=callback)
Fetch a list of content using the Confluence Query Language (CQL). See: Advanced searching using CQL (https://developer.atlassian.com/display/CONFDEV/Advanced+Searching+using+CQL) :param cql_str (string): OPTIONAL: A cql query string to use to locate content. :param cql_context (string): OPTIONAL: The context to execute a cql search in, this is the json serialized form of SearchContext :param expand (string): OPTIONAL: A comma separated list of properties to expand on the content. Default: Empty. :param start (int): OPTIONAL: The start point of the collection to return. Default: 0 :param limit (int): OPTIONAL: The limit of the number of items to return, this may be restricted by fixed system limits. Default: 25. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/search endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_children(self, content_id, expand=None, parent_version=None, callback=None): params = {} if expand: params["expand"] = expand if parent_version: params["parentVersion"] = parent_version return self._service_get_request("rest/api/content/{id}/child".format(id=content_id), params=params, callback=callback)
Returns a map of the direct children of a piece of Content. Content can have multiple types of children - for example a Page can have children that are also Pages, but it can also have Comments and Attachments. The {@link ContentType}(s) of the children returned is specified by the "expand" query parameter in the request - this parameter can include expands for multiple child types. If no types are included in the expand parameter, the map returned will just list the child types that are available to be expanded for the {@link Content} referenced by the "content_id" parameter. :param content_id (string): A string containing the id of the content to retrieve children for. :param expand (string): OPTIONAL :A comma separated list of properties to expand on the children. Default: None. :param parent_version (int): OPTIONAL: An integer representing the version of the content to retrieve children for. Default: 0 (Latest) :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/child endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_descendants(self, content_id, expand=None, callback=None): params = {} if expand: params["expand"] = expand return self._service_get_request("rest/api/content/{id}/descendant".format(id=content_id), params=params, callback=callback)
Returns a map of the descendants of a piece of Content. Content can have multiple types of descendants - for example a Page can have descendants that are also Pages, but it can also have Comments and Attachments. The {@link ContentType}(s) of the descendants returned is specified by the "expand" query parameter in the request - this parameter can include expands for multiple descendant types. If no types are included in the expand parameter, the map returned will just list the descendant types that are available to be expanded for the {@link Content} referenced by the "content_id" parameter. :param content_id (string): A string containing the id of the content to retrieve descendants for. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the descendants. Default: None. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/child/{type} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_descendants_by_type(self, content_id, child_type, expand=None, start=None, limit=None, callback=None): params = {} if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/content/{id}/descendant/{type}" "".format(id=content_id, type=child_type), params=params, callback=callback)
Returns the direct descendants of a piece of Content, limited to a single descendant type. The {@link ContentType}(s) of the descendants returned is specified by the "type" path parameter in the request. Currently the only supported descendants are comment descendants of non-comment Content. :param content_id (string): A string containing the id of the content to retrieve descendants for :param child_type (string): A {@link ContentType} to filter descendants on. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the descendants. Default: Empty :param start (int): OPTIONAL: The index of the first item within the result set that should be returned. Default: 0. :param limit (int): OPTIONAL: How many items should be returned after the start index. Default: 25 or site limit. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/descendant/{type} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_labels(self, content_id, prefix=None, start=None, limit=None, callback=None): params = {} if prefix: params["prefix"] = prefix if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/content/{id}/label".format(id=content_id), params=params, callback=callback)
Returns the list of labels on a piece of Content. :param content_id (string): A string containing the id of the labels content container. :param prefix (string): OPTIONAL: The prefixes to filter the labels with {@see Label.Prefix}. Default: None. :param start (int): OPTIONAL: The start point of the collection to return. Default: None (0). :param limit (int): OPTIONAL: The limit of the number of labels to return, this may be restricted by fixed system limits. Default: 200. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/label endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_comments(self, content_id, expand=None, parent_version=None, start=None, limit=None, location=None, depth=None, callback=None): params = {} if expand: params["expand"] = expand if parent_version: params["parentVersion"] = parent_version if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) if location: params["location"] = location if depth: assert depth in {"", "all"} params["depth"] = depth return self._service_get_request("rest/api/content/{id}/child/comment".format(id=content_id), params=params, callback=callback)
Returns the comments associated with a piece of content. :param content_id (string): A string containing the id of the content to retrieve children for. :param expand (string): OPTIONAL: a comma separated list of properties to expand on the children. We can also specify some extensions such as extensions.inlineProperties (for getting inline comment-specific properties) or extensions.resolution for the resolution status of each comment in the results. Default: Empty :param parent_version (int): OPTIONAL: An int representing the version of the content to retrieve children for. Default: 0 :param start (int): OPTIONAL: The index of the first item within the result set that should be returned. Default: 0. :param limit (int): OPTIONAL: How many items should be returned after the start index. Default: Site limit. :param location (string): OPTIONAL: The location of the comments. Possible values are: "inline", "footer", "resolved". You can define multiple location params. The results will be the comments matched by any location. Default: "" (all). :param depth: The depth of the comments. Possible values are: "" (ROOT only), "all". Default: "". :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/child/comment endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_attachments(self, content_id, expand=None, start=None, limit=None, filename=None, media_type=None, callback=None): params = {} if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) if filename is not None: params["filename"] = filename if media_type is not None: params["mediaType"] = media_type return self._service_get_request("rest/api/content/{id}/child/attachment".format(id=content_id), params=params, callback=callback)
Returns a paginated list of attachment Content entities within a single container. :param content_id (string): A string containing the id of the attachments content container. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the Attachments returned. Default: Empty. :param start (int): OPTIONAL: The index of the first item within the result set that should be returned. Default: None (0). :param limit (int): OPTIONAL: How many items should be returned after the start index. Default: 50 :param filename (string): OPTIONAL: A filter parameter to return only the Attachment with the matching file name. Default: None. :param media_type: OPTIONAL: A filter parameter to return only Attachments with a matching Media-Type. Default: None. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/child/attachment endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_properties(self, content_id, expand=None, start=None, limit=None, callback=None): params = {} if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/content/{id}/property".format(id=content_id), params=params, callback=callback)
Returns a paginated list of content properties. Content properties are a key / value store of properties attached to a piece of Content. The key is a string, and the value is a JSON-serializable object. :param content_id (string): A string containing the id of the property content container. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the content properties. Default: Empty. :param start (int): OPTIONAL: The start point of the collection to return. Default: None (0). :param limit (int): OPTIONAL: The limit of the number of items to return, this may be restricted by fixed system limits. Default: 10. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/property endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_content_property_by_key(self, content_id, property_key, expand=None, callback=None): params = {} if expand: params["expand"] = expand return self._service_get_request("rest/api/content/{id}/property/{key}".format(id=content_id, key=property_key), params=params, callback=callback)
Returns a content property. :param content_id (string): A string containing the id of the property content container. :param property_key (string): The key associated with the property requested. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the content properties. Default value: "version" :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/property/{key} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_op_restrictions_by_content_operation(self, content_id, operation_key, expand=None, start=None, limit=None, callback=None): params = {} if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/content/{id}/restriction/byOperation/{opkey}" "".format(id=content_id, opkey=operation_key), params=params, callback=callback)
Returns info about all restrictions of given operation. :param content_id (string): The content ID to query on. :param operation_key (string): The operation key to query on. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the content properties. Default: Again, this is unclear/inconsistent when reading documentation. The REST documentation claims that both are default: "group" "restrictions.user,restrictions.group" :param start (int): Pagination start count. :param limit (int): Pagination return count limit. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/restriction/byOperation/{operationKey} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_long_tasks(self, expand=None, start=None, limit=None, callback=None): params = {} if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/longtask", params=params, callback=callback)
Returns information about all tracked long-running tasks. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the tasks. :param start (int): OPTIONAL: The pagination start count. :param limit (int): OPTIONAL: The pagination return count limit. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the longtask endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_long_task_info(self, long_task_id, expand=None, callback=None): params = {} if expand: params["expand"] = expand return self._service_get_request("rest/api/longtask/{id}".format(id=long_task_id), params=params, callback=callback)
Returns information about a long-running task. :param long_task_id (string): The key of the task to be returned. :param expand (string): A comma separated list of properties to expand on the task. Default: Empty :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the longtask/{id} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_spaces(self, space_key=None, expand=None, start=None, limit=None, callback=None): params = {} if space_key: params["spaceKey"] = space_key if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/space", params=params, callback=callback)
Returns information about the spaces present in the Confluence instance. :param space_key (string): OPTIONAL: A list of space keys to filter on. Default: None. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the spaces. Default: Empty :param start (int): OPTIONAL: The start point of the collection to return. Default: 0. :param limit (int): OPTIONAL: A limit of the number of spaces to return, this could be restricted by fixed system limits. Default: 25. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the space endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_space_information(self, space_key, expand=None, callback=None): params = {} if expand: params["expand"] = expand return self._service_get_request("rest/api/space/{key}".format(key=space_key), params=params, callback=callback)
Returns information about a space. :param space_key (string): A string containing the key of the space. :param expand (string): OPTIONAL: A comma separated list of properties to expand on the space. Default: Empty. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the space/{spaceKey} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_space_content(self, space_key, depth=None, expand=None, start=None, limit=None, callback=None): params = {} if depth: assert depth in {"all", "root"} params["depth"] = depth if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/space/{key}/content".format(key=space_key), params=params, callback=callback)
Returns the content in this given space. :param space_key (string): A string containing the key of the space. :param depth (string): OPTIONAL: A string indicating if all content, or just the root content of the space is returned. Default: "all". Valid values: "all", "root". :param expand (string): OPTIONAL: A comma separated list of properties to expand on each piece of content retrieved. Default: Empty. :param start (int): OPTIONAL: The start point of the collection to return. Default: 0. :param limit (int): OPTIONAL: The limit of the number of labels to return, this may be restricted by fixed system limits. Default: 25. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the space/{spaceKey}/content endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def get_space_content_by_type(self, space_key, content_type, depth=None, expand=None, start=None, limit=None, callback=None): assert content_type in ["page", "blogpost"] params = {} if depth: assert depth in {"all", "root"} params["depth"] = depth if expand: params["expand"] = expand if start is not None: params["start"] = int(start) if limit is not None: params["limit"] = int(limit) return self._service_get_request("rest/api/space/{key}/content/{type}".format(key=space_key, type=content_type), params=params, callback=callback)
Returns the content in this given space with the given type. :param space_key (string): A string containing the key of the space. :param content_type (string): The type of content to return with the space. Valid values: "page", "blogpost". :param depth (string): OPTIONAL: A string indicating if all content, or just the root content of the space is returned. Default: "all". Valid values: "all", "root". :param expand (string): OPTIONAL: A comma separated list of properties to expand on each piece of content retrieved. Default: Empty. :param start (int): OPTIONAL: The start point of the collection to return. Default: 0. :param limit (int): OPTIONAL: The limit of the number of labels to return, this may be restricted by fixed system limits. Default: 25. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the space/{spaceKey}/content/{type} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def create_new_content(self, content_data, callback=None): assert isinstance(content_data, dict) and set(content_data.keys()) >= self.NEW_CONTENT_REQUIRED_KEYS return self._service_post_request("rest/api/content", data=json.dumps(content_data), headers={"Content-Type": "application/json"}, callback=callback)
Creates a new piece of Content. :param content_data (dict): A dictionary representing the data for the new content. Must have keys: "type", "title", "space", "body". :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. Example content_data: { "type": "page", "title": "Example Content title", "space": { "key": "TST" }, "body": { "storage": { "value": "<p>This is a new page</p>", "representation": "storage" } } }
def create_new_attachment_by_content_id(self, content_id, attachments, callback=None): if isinstance(attachments, list): assert all(isinstance(at, dict) and "file" in list(at.keys()) for at in attachments) elif isinstance(attachments, dict): assert "file" in list(attachments.keys()) else: assert False return self._service_post_request("rest/api/content/{id}/child/attachment".format(id=content_id), headers={"X-Atlassian-Token": "nocheck"}, files=attachments, callback=callback)
Add one or more attachments to a Confluence Content entity, with optional comments. Comments are optional, but if included there must be as many comments as there are files, and the comments must be in the same order as the files. :param content_id (string): A string containing the id of the attachments content container. :param attachments (list of dicts or dict): This is a list of dictionaries or a dictionary. Each dictionary must have the key "file" with a value that is I/O like (file, StringIO, etc.), and may also have a key "comment" with a string for file comments. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/child/attachment endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def create_new_label_by_content_id(self, content_id, label_names, callback=None): assert isinstance(label_names, list) assert all(isinstance(ln, dict) and set(ln.keys()) == {"prefix", "name"} for ln in label_names) return self._service_post_request("rest/api/content/{id}/label".format(id=content_id), data=json.dumps(label_names), headers={"Content-Type": "application/json"}, callback=callback)
Adds a list of labels to the specified content. :param content_id (string): A string containing the id of the labels content container. :param label_names (list): A list of labels (strings) to apply to the content. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/label endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def create_new_content_property(self, content_id, content_property, callback=None): assert isinstance(content_property, dict) assert {"key", "value"} <= set(content_property.keys()) return self._service_post_request("rest/api/content/{id}/property".format(id=content_id), data=json.dumps(content_property), headers={"Content-Type": "application/json"}, callback=callback)
Creates a new content property. Potentially a duplicate at the REST API level of create_new_property. :param content_id (string): A string containing the id of the property content container. :param new_property_data (dict): A dictionary describing the new property for the content. Must have the keys "key" and "value". :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/property endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. Example property data: { "key": "example-property-key", "value": { "anything": "goes" } }
def create_new_space(self, space_definition, callback=None): assert isinstance(space_definition, dict) and {"key", "name", "description"} <= set(space_definition.keys()) return self._service_post_request("rest/api/space", data=json.dumps(space_definition), headers={"Content-Type": "application/json"}, callback=callback)
Creates a new Space. The incoming Space does not include an id, but must include a Key and Name, and should include a Description. :param space_definition (dict): The dictionary describing the new space. Must include keys "key", "name", and "description". :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the space endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. Example space data: { "key": "TST", "name": "Example space", "description": { "plain": { "value": "This is an example space", "representation": "plain" } } }
def update_content_by_id(self, content_data, content_id, callback=None): assert isinstance(content_data, dict) and set(content_data.keys()) >= self.UPDATE_CONTENT_REQUIRED_KEYS return self._service_put_request("rest/api/content/{id}".format(id=content_id), data=json.dumps(content_data), headers={"Content-Type": "application/json"}, callback=callback)
Updates a piece of Content, or restores if it is trashed. The body contains the representation of the content. Must include the new version number. To restore a piece of content that has the status of trashed the content must have it's version incremented, and status set to current. No other field modifications will be performed when restoring a piece of content from the trash. Request example to restore from trash: { "id": "557059", "status": "current", "version": { "number": 2 } } :param content_data (dict): The content data (with desired updates). This should be retrieved via the API call to get content data, then modified to desired state. Required keys are: "id", "type", "title", "space", "version", and "body". :param content_id (string): The id of the content to update. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. Example content data: { "id": "3604482", "type": "page", "title": "Example Content title", "space": { "key": "TST" }, "version": { "number": 2, "minorEdit": false }, "body": { "storage": { "value": "<p>This is the updated text for the new page</p>", "representation": "storage" } } }
def update_attachment_metadata(self, content_id, attachment_id, new_metadata, callback=None): assert isinstance(new_metadata, dict) and set(new_metadata.keys()) >= self.ATTACHMENT_METADATA_KEYS return self._service_put_request("rest/api/content/{id}/child/attachment/{attachment_id}" "".format(id=content_id, attachment_id=attachment_id), data=json.dumps(new_metadata), headers={"Content-Type": "application/json"}, callback=callback)
Update the non-binary data of an Attachment. This resource can be used to update an attachment's filename, media-type, comment, and parent container. :param content_id (string): A string containing the ID of the attachments content container. :param attachment_id (string): The ID of the attachment to update. :param new_metadata (dict): The updated metadata for the attachment. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/child/attachment/{attachment_id} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. Example attachment metadata: { "id": "att5678", "type": "attachment", "title": "new_file_name.txt", "version": { "number": 2, "minorEdit": false } }
def update_attachment(self, content_id, attachment_id, attachment, callback=None): if isinstance(attachment, dict): assert "file" in list(attachment.keys()) else: assert False return self._service_post_request("rest/api/content/{content_id}/child/attachment/{attachment_id}/data" "".format(content_id=content_id, attachment_id=attachment_id), headers={"X-Atlassian-Token": "nocheck"}, files=attachment, callback=callback)
Update the binary data of an Attachment, and optionally the comment and the minor edit field. This adds a new version of the attachment, containing the new binary data, filename, and content-type. When updating the binary data of an attachment, the comment related to it together with the field that specifies if it's a minor edit can be updated as well, but are not required. If an update is considered to be a minor edit, notifications will not be sent to the watchers of that content. :param content_id (string): A string containing the id of the attachments content container. :param attachment_id (string): The id of the attachment to upload a new file for. :param attachment (dict): The dictionary describing the attachment to upload. The dict must have a key "file", which has a value that is an I/O object (file, StringIO, etc.), and can also have a "comment" key describing the attachment, and a "minorEdit" key, which is a boolean used to flag that the changes to the attachment are not substantial. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{content_id}/child/attachment/{attachment_id}/data endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def update_property(self, content_id, property_key, new_property_data, callback=None): assert isinstance(new_property_data, dict) and {"key", "value", "version"} <= set(new_property_data.keys()) return self._service_put_request("rest/api/content/{id}/property/{key}".format(id=content_id, key=property_key), data=json.dumps(new_property_data), headers={"Content-Type": "application/json"}, callback=callback)
Updates a content property. The body contains the representation of the content property. Must include the property id, and the new version number. Attempts to create a new content property if the given version number is 1, just like {@link #create(com.atlassian.confluence.api.model.content.id.ContentId, String, com.atlassian.confluence.api.model.content.JsonContentProperty)}. :param content_id (string): The ID for the content to attach the property to. :param property_key (string): The key for the property to update. :param new_property_data (dict): The updated property data. This requires the keys "key", "value", and "version". :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id}/property/{key} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. Example updated property data: { "key": "example-property-key", "value": { "anything": "goes" }, "version": { "number": 2, "minorEdit": false } }
def update_space(self, space_key, space_definition, callback=None): assert isinstance(space_definition, dict) and {"key", "name", "description"} <= set(space_definition.keys()) return self._service_put_request("rest/api/space/{key}".format(key=space_key), data=json.dumps(space_definition), headers={"Content-Type": "application/json"}, callback=callback)
Updates a Space. Currently only the Space name, description and homepage can be updated. :param space_key (string): The key of the space to update. :param space_definition (dict): The dictionary describing the updated space metadata. This should include "key", "name" and "description". :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the space/{key} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially. Example updated space definition: { "key": "TST", "name": "Example space", "description": { "plain": { "value": "This is an example space", "representation": "plain" } } }
def convert_contentbody_to_new_type(self, content_data, old_representation, new_representation, callback=None): assert {old_representation, new_representation} < {"storage", "editor", "view", "export_view"} # TODO: Enforce conversion rules better here. request_data = {"value": str(content_data), "representation": old_representation} return self._service_post_request("rest/api/contentbody/convert/{to}".format(to=new_representation), data=json.dumps(request_data), headers={"Content-Type": "application/json"}, callback=callback)
Converts between content body representations. Not all representations can be converted to/from other formats. Supported conversions: Source Representation | Destination Representation Supported -------------------------------------------------------------- "storage" | "view","export_view","editor" "editor" | "storage" "view" | None "export_view" | None :param content_data (string): The content data to transform. :param old_representation (string): The representation to convert from. :param new_representation (string): The representation to convert to. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the contentbody/convert/{to} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def delete_content_by_id(self, content_id, status=None, callback=None): params = {} if status: params["status"] = status return self._service_delete_request("rest/api/content/{id}".format(id=content_id), params=params, callback=callback)
Trashes or purges a piece of Content, based on its {@link ContentType} and {@link ContentStatus}. :param content_id (string): The ID for the content to remove. :param status (string): OPTIONAL: A status code to query for the location (?) of the content. The REST API suggests you might use "trashed". Default: Empty. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: The JSON data returned from the content/{id} endpoint, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def delete_label_by_id(self, content_id, label_name, callback=None): params = {"name": label_name} return self._service_delete_request("rest/api/content/{id}/label".format(id=content_id), params=params, callback=callback)
Deletes a labels to the specified content. There is an alternative form of this delete method that is not implemented. A DELETE request to /rest/api/content/{id}/label/{label} will also delete a label, but is more limited in the label name that can be accepted (and has no real apparent upside). :param content_id (string): A string containing the id of the labels content container. :param label_name (string): OPTIONAL: The name of the label to be removed from the content. Default: Empty (probably deletes all labels). :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: Empty if successful, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def delete_property(self, content_id, property_key, callback=None): return self._service_delete_request("rest/api/content/{id}/property/{key}" "".format(id=content_id, key=property_key), callback=callback)
Deletes a content property. :param content_id (string): The ID for the content that owns the property to be deleted. :param property_key (string): The name of the property to be deleted. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: Empty if successful, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def delete_space(self, space_key, callback=None): return self._service_delete_request("rest/api/space/{key}".format(key=space_key), callback=callback)
Deletes a Space. The space is deleted in a long running task, so the space cannot be considered deleted when this method returns. Clients can follow the status link in the response and poll it until the task completes. :param space_key (string): The key of the space to delete. :param callback: OPTIONAL: The callback to execute on the resulting data, before the method returns. Default: None (no callback, raw data returned). :return: A pointer to the longpoll task if successful, or the results of the callback. Will raise requests.HTTPError on bad input, potentially.
def add(self, sensor): if isinstance(sensor, (list, tuple)): for sss in sensor: self.add(sss) return if not isinstance(sensor, Sensor): raise TypeError("pysma.Sensor expected") if sensor.name in self: old = self[sensor.name] self.__s.remove(old) _LOGGER.warning("Replacing sensor %s with %s", old, sensor) if sensor.key in self: _LOGGER.warning("Duplicate SMA sensor key %s", sensor.key) self.__s.append(sensor)
Add a sensor, warning if it exists.
def _fetch_json(self, url, payload): params = { 'data': json.dumps(payload), 'headers': {'content-type': 'application/json'}, 'params': {'sid': self.sma_sid} if self.sma_sid else None, } for _ in range(3): try: with async_timeout.timeout(3): res = yield from self._aio_session.post( self._url + url, **params) return (yield from res.json()) or {} except asyncio.TimeoutError: continue return {'err': "Could not connect to SMA at {} (timeout)" .format(self._url)}
Fetch json data for requests.
def new_session(self): body = yield from self._fetch_json(URL_LOGIN, self._new_session_data) self.sma_sid = jmespath.search('result.sid', body) if self.sma_sid: return True msg = 'Could not start session, %s, got {}'.format(body) if body.get('err'): if body.get('err') == 503: _LOGGER.error("Max amount of sessions reached") else: _LOGGER.error(msg, body.get('err')) else: _LOGGER.error(msg, "Session ID expected [result.sid]") return False
Establish a new session.
def read(self, sensors): payload = {'destDev': [], 'keys': list(set([s.key for s in sensors]))} if self.sma_sid is None: yield from self.new_session() if self.sma_sid is None: return False body = yield from self._fetch_json(URL_VALUES, payload=payload) # On the first 401 error we close the session which will re-login if body.get('err') == 401: _LOGGER.warning("401 error detected, closing session to force " "another login attempt") self.close_session() return False _LOGGER.debug(json.dumps(body)) for sen in sensors: if sen.extract_value(body): _LOGGER.debug("%s\t= %s %s", sen.name, sen.value, sen.unit) return True
Read a set of keys.
def run(self): loop = GLib.MainLoop() context = loop.get_context() while True: time.sleep(0.1) if context.pending(): context.iteration() self._manager[ATTR_POSITION] = self._position() try: method, args = self._task_queue.get(False) getattr(self, method)(**args) except queue.Empty: pass if self.state != STATE_IDLE: continue try: uri = self._media_queue.get(False) self.media(uri) except queue.Empty: pass
Run the process. Iterate the GLib main loop and process the task queue.
def media(self, uri): try: local_path, _ = urllib.request.urlretrieve(uri) metadata = mutagen.File(local_path, easy=True) if metadata.tags: self._tags = metadata.tags title = self._tags.get(TAG_TITLE, []) self._manager[ATTR_TITLE] = title[0] if len(title) else '' artist = self._tags.get(TAG_ARTIST, []) self._manager[ATTR_ARTIST] = artist[0] if len(artist) else '' album = self._tags.get(TAG_ALBUM, []) self._manager[ATTR_ALBUM] = album[0] if len(album) else '' local_uri = 'file://{}'.format(local_path) # urllib.error.HTTPError except Exception: # pylint: disable=broad-except local_uri = uri self._player.set_state(Gst.State.NULL) self._player.set_property(PROP_URI, local_uri) self._player.set_state(Gst.State.PLAYING) self.state = STATE_PLAYING self._manager[ATTR_URI] = uri self._manager[ATTR_DURATION] = self._duration() self._manager[ATTR_VOLUME] = self._player.get_property(PROP_VOLUME) _LOGGER.info('playing %s (as %s)', uri, local_uri)
Play a media file.
def play(self): if self.state == STATE_PAUSED: self._player.set_state(Gst.State.PLAYING) self.state = STATE_PLAYING
Change state to playing.
def pause(self): if self.state == STATE_PLAYING: self._player.set_state(Gst.State.PAUSED) self.state = STATE_PAUSED
Change state to paused.
def stop(self): urllib.request.urlcleanup() self._player.set_state(Gst.State.NULL) self.state = STATE_IDLE self._tags = {}
Stop pipeline.
def set_position(self, position): if position > self._duration(): return position_ns = position * _NANOSEC_MULT self._manager[ATTR_POSITION] = position self._player.seek_simple(_FORMAT_TIME, Gst.SeekFlags.FLUSH, position_ns)
Set media position.
def set_volume(self, volume): self._player.set_property(PROP_VOLUME, volume) self._manager[ATTR_VOLUME] = volume _LOGGER.info('volume set to %.2f', volume)
Set volume.
def state(self, state): self._state = state self._manager[ATTR_STATE] = state _LOGGER.info('state changed to %s', state)
Set state.
def _duration(self): duration = 0 if self.state != STATE_IDLE: resp = self._player.query_duration(_FORMAT_TIME) duration = resp[1] // _NANOSEC_MULT return duration
Get media duration.
def _position(self): position = 0 if self.state != STATE_IDLE: resp = self._player.query_position(_FORMAT_TIME) position = resp[1] // _NANOSEC_MULT return position
Get media position.
def _on_message(self, bus, message): # pylint: disable=unused-argument if message.type == Gst.MessageType.EOS: self.stop() elif message.type == Gst.MessageType.ERROR: self.stop() err, _ = message.parse_error() _LOGGER.error('%s', err)
When a message is received from Gstreamer.
def get_previous_node(node): if node.prev_sibling: return node.prev_sibling if node.parent: return get_previous_node(node.parent)
Return the node before this node.
def casperjs_command_kwargs(): kwargs = { 'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE, 'universal_newlines': True } phantom_js_cmd = app_settings['PHANTOMJS_CMD'] if phantom_js_cmd: path = '{0}:{1}'.format( os.getenv('PATH', ''), os.path.dirname(phantom_js_cmd) ) kwargs.update({'env': {'PATH': path}}) return kwargs
will construct kwargs for cmd
def casperjs_command(): method = app_settings['CAPTURE_METHOD'] cmd = app_settings['%s_CMD' % method.upper()] sys_path = os.getenv('PATH', '').split(':') if cmd is None: for binpath in sys_path: cmd = os.path.join(binpath, method) if os.path.exists(cmd): break cmd = [cmd] if app_settings['TEST_CAPTURE_SCRIPT']: try: proc = subprocess.Popen(cmd + ['--version'], **casperjs_command_kwargs()) proc.communicate() status = proc.returncode assert status == 0 except OSError: msg = "%s binary cannot be found in PATH (%s)" % (method, sys_path) raise ImproperlyConfigured(msg) except AssertionError: msg = "%s returned status code %s" % (method, status) raise ImproperlyConfigured(msg) # Add extra CLI arguments cmd += app_settings['CLI_ARGS'] # Concatenate with capture script app_path = os.path.dirname(__file__) capture = app_settings['CAPTURE_SCRIPT'] if capture.startswith('./'): capture = os.path.join(app_path, 'scripts', capture) assert os.path.exists(capture), 'Cannot find %s' % capture return cmd + [capture]
Determine which capture engine is specified. Possible options: - casperjs - phantomjs Based on this value, locate the binary of the capture engine. If setting <engine>_CMD is not defined, then look up for ``<engine>`` in shell PATH and build the whole capture command.
def casperjs_capture(stream, url, method=None, width=None, height=None, selector=None, data=None, waitfor=None, size=None, crop=None, render='png', wait=None): if isinstance(stream, six.string_types): output = stream else: with NamedTemporaryFile('wb+', suffix='.%s' % render, delete=False) as f: output = f.name try: cmd = CASPERJS_CMD + [url, output] # Extra command-line options cmd += ['--format=%s' % render] if method: cmd += ['--method=%s' % method] if width: cmd += ['--width=%s' % width] if height: cmd += ['--height=%s' % height] if selector: cmd += ['--selector=%s' % selector] if data: cmd += ['--data="%s"' % json.dumps(data)] if waitfor: cmd += ['--waitfor=%s' % waitfor] if wait: cmd += ['--wait=%s' % wait] logger.debug(cmd) # Run CasperJS process proc = subprocess.Popen(cmd, **casperjs_command_kwargs()) stdout = proc.communicate()[0] process_casperjs_stdout(stdout) size = parse_size(size) render = parse_render(render) if size or (render and render != 'png' and render != 'pdf'): # pdf isn't an image, therefore we can't postprocess it. image_postprocess(output, stream, size, crop, render) else: if stream != output: # From file to stream with open(output, 'rb') as out: stream.write(out.read()) stream.flush() finally: if stream != output: os.unlink(output)
Captures web pages using ``casperjs``
def process_casperjs_stdout(stdout): for line in stdout.splitlines(): bits = line.split(':', 1) if len(bits) < 2: bits = ('INFO', bits) level, msg = bits if level == 'FATAL': logger.fatal(msg) raise CaptureError(msg) elif level == 'ERROR': logger.error(msg) else: logger.info(msg)
Parse and digest capture script output.
def parse_url(request, url): try: validate = URLValidator() validate(url) except ValidationError: if url.startswith('/'): host = request.get_host() scheme = 'https' if request.is_secure() else 'http' url = '{scheme}://{host}{uri}'.format(scheme=scheme, host=host, uri=url) else: url = request.build_absolute_uri(reverse(url)) return url
Parse url URL parameter.
def parse_render(render): formats = { 'jpeg': guess_all_extensions('image/jpeg'), 'png': guess_all_extensions('image/png'), 'gif': guess_all_extensions('image/gif'), 'bmp': guess_all_extensions('image/x-ms-bmp'), 'tiff': guess_all_extensions('image/tiff'), 'xbm': guess_all_extensions('image/x-xbitmap'), 'pdf': guess_all_extensions('application/pdf') } if not render: render = 'png' else: render = render.lower() for k, v in formats.items(): if '.%s' % render in v: render = k break else: render = 'png' return render
Parse render URL parameter. >>> parse_render(None) 'png' >>> parse_render('html') 'png' >>> parse_render('png') 'png' >>> parse_render('jpg') 'jpeg' >>> parse_render('gif') 'gif'
def parse_size(size_raw): try: width_str, height_str = size_raw.lower().split('x') except AttributeError: size = None except ValueError: size = None else: try: width = int(width_str) assert width > 0 except (ValueError, AssertionError): width = None try: height = int(height_str) assert height > 0 except (ValueError, AssertionError): height = None size = width, height if not all(size): size = None return size
Parse size URL parameter. >>> parse_size((100,None)) None >>> parse_size('300x100') (300, 100) >>> parse_size('300x') None >>> parse_size('x100') None >>> parse_size('x') None
def image_postprocess(imagefile, output, size, crop, render): try: from PIL import Image except ImportError: import Image img = Image.open(imagefile) size_crop = None img_resized = img if size and crop and crop.lower() == 'true': width_raw, height_raw = img.size width, height = size height_better = int(height_raw * (float(width) / width_raw)) if height < height_better: size_crop = (0, 0, width, height) try: if size_crop: size_better = width, height_better img_better = img.resize(size_better, Image.ANTIALIAS) img_resized = img_better.crop(size_crop) elif size: img_resized = img.resize(size, Image.ANTIALIAS) # If save with 'bmp' use default mode('RGBA'), it will raise: # "IOError: cannot write mode RGBA as BMP". # So, we need convert image mode # from 'RGBA' to 'RGB' for 'bmp' format. if render == 'bmp': img_resized = img_resized.convert('RGB') # Fix IOError: cannot write mode RGBA as XBM elif render == 'xbm': img_resized = img_resized.convert('1') # Works with either filename or file-like object img_resized.save(output, render) except KeyError: raise UnsupportedImageFormat except IOError as e: raise CaptureError(e)
Resize and crop captured image, and saves to output. (can be stream or filename)
def build_absolute_uri(request, url): if app_settings.get('CAPTURE_ROOT_URL'): return urljoin(app_settings.get('CAPTURE_ROOT_URL'), url) return request.build_absolute_uri(url)
Allow to override printing url, not necessarily on the same server instance.
def render_template(template_name, context, format='png', output=None, using=None, **options): # output stream, as required by casperjs_capture stream = BytesIO() out_f = None # the suffix=.html is a hack for phantomjs which *will* # complain about not being able to open source file # unless it has a 'html' extension. with NamedTemporaryFile(suffix='.html') as render_file: template_content = render_to_string( template_name, context, using=using, ) # now, we need to replace all occurences of STATIC_URL # with the corresponding file://STATIC_ROOT, but only # if STATIC_URL doesn't contain a public URI (like http(s)) static_url = getattr(settings, 'STATIC_URL', '') if settings.STATIC_ROOT and\ static_url and not static_url.startswith('http'): template_content = template_content.replace( static_url, 'file://%s' % settings.STATIC_ROOT ) render_file.write(template_content.encode('utf-8')) # this is so that the temporary file actually gets filled # with the result. render_file.seek(0) casperjs_capture( stream, url='file://%s' % render_file.name, **options ) # if no output was provided, use NamedTemporaryFile # (so it is an actual file) and return it (so that # after function ends, it gets automatically removed) if not output: out_f = NamedTemporaryFile() else: # if output was provided, write the rendered # content to it out_f = open(output, 'wb') out_f.write(stream.getvalue()) out_f.seek(0) # return the output if NamedTemporaryFile was used if not output: return out_f else: # otherwise, just close the file. out_f.close()
Render a template from django project, and return the file object of the result.
def go(fn, *args, **kwargs): if not callable(fn): raise TypeError('go() requires a function, not %r' % (fn,)) result = [None] error = [] def target(): try: result[0] = fn(*args, **kwargs) except Exception: # Are we in interpreter shutdown? if sys: error.extend(sys.exc_info()) t = threading.Thread(target=target) t.daemon = True t.start() def get_result(timeout=10): t.join(timeout) if t.is_alive(): raise AssertionError('timed out waiting for %r' % fn) if error: reraise(*error) return result[0] return get_result
Launch an operation on a thread and get a handle to its future result. >>> from time import sleep >>> def print_sleep_print(duration): ... sleep(duration) ... print('hello from background thread') ... sleep(duration) ... print('goodbye from background thread') ... return 'return value' ... >>> future = go(print_sleep_print, 0.1) >>> sleep(0.15) hello from background thread >>> print('main thread') main thread >>> result = future() goodbye from background thread >>> result 'return value'
def going(fn, *args, **kwargs): future = go(fn, *args, **kwargs) try: yield future except: # We are raising an exception, just try to clean up the future. exc_info = sys.exc_info() try: # Shorter than normal timeout. future(timeout=1) except: log_message = ('\nerror in %s:\n' % format_call(inspect.currentframe())) sys.stderr.write(log_message) traceback.print_exc() # sys.stderr.write('exc in %s' % format_call(inspect.currentframe())) reraise(*exc_info) else: # Raise exception or discard result. future(timeout=10)
Launch a thread and wait for its result before exiting the code block. >>> with going(lambda: 'return value') as future: ... pass >>> future() # Won't block, the future is ready by now. 'return value' Or discard the result: >>> with going(lambda: "don't care"): ... pass If an exception is raised within the context, the result is lost: >>> with going(lambda: 'return value') as future: ... assert 1 == 0 Traceback (most recent call last): ... AssertionError
def wait_until(predicate, success_description, timeout=10): start = time.time() while True: retval = predicate() if retval: return retval if time.time() - start > timeout: raise AssertionError("Didn't ever %s" % success_description) time.sleep(0.1)
Wait up to 10 seconds (by default) for predicate to be true. E.g.: wait_until(lambda: client.primary == ('a', 1), 'connect to the primary') If the lambda-expression isn't true after 10 seconds, we raise AssertionError("Didn't ever connect to the primary"). Returns the predicate's first true value.
def _get_c_string(data, position): end = data.index(b"\x00", position) return _utf_8_decode(data[position:end], None, True)[0], end + 1
Decode a BSON 'C' string to python unicode string.
def _synchronized(meth): @functools.wraps(meth) def wrapper(self, *args, **kwargs): with self._lock: return meth(self, *args, **kwargs) return wrapper
Call method while holding a lock.
def bind_tcp_socket(address): host, port = address for res in set(socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM, 0, socket.AI_PASSIVE)): family, socktype, proto, _, sock_addr = res sock = socket.socket(family, socktype, proto) if os.name != 'nt': sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Automatic port allocation with port=None. sock.bind(sock_addr) sock.listen(128) bound_port = sock.getsockname()[1] return sock, (host, bound_port) raise socket.error('could not bind socket')
Takes (host, port) and returns (socket_object, (host, port)). If the passed-in port is None, bind an unused port and return it.
def bind_domain_socket(address): path, _ = address try: os.unlink(path) except OSError: pass sock = socket.socket(socket.AF_UNIX) sock.bind(path) sock.listen(128) return sock, (path, 0)
Takes (socket path, 0) and returns (socket_object, (path, 0)).
def mock_server_receive_request(client, server): header = mock_server_receive(client, 16) length = _UNPACK_INT(header[:4])[0] request_id = _UNPACK_INT(header[4:8])[0] opcode = _UNPACK_INT(header[12:])[0] msg_bytes = mock_server_receive(client, length - 16) if opcode not in OPCODES: raise NotImplementedError("Don't know how to unpack opcode %d yet" % opcode) return OPCODES[opcode].unpack(msg_bytes, client, server, request_id)
Take a client socket and return a Request.
def mock_server_receive(sock, length): msg = b'' while length: chunk = sock.recv(length) if chunk == b'': raise socket.error(errno.ECONNRESET, 'closed') length -= len(chunk) msg += chunk return msg
Receive `length` bytes from a socket object.
def make_docs(*args, **kwargs): err_msg = "Can't interpret args: " if not args and not kwargs: return [] if not args: # OpReply(ok=1, ismaster=True). return [kwargs] if isinstance(args[0], (int, float, bool)): # server.receives().ok(0, err='uh oh'). if args[1:]: raise_args_err(err_msg, ValueError) doc = OrderedDict({'ok': args[0]}) doc.update(kwargs) return [doc] if isinstance(args[0], (list, tuple)): # Send a batch: OpReply([{'a': 1}, {'a': 2}]). if not all(isinstance(doc, (OpReply, Mapping)) for doc in args[0]): raise_args_err('each doc must be a dict:') if kwargs: raise_args_err(err_msg, ValueError) return list(args[0]) if isinstance(args[0], (string_type, text_type)): if args[2:]: raise_args_err(err_msg, ValueError) if len(args) == 2: # Command('aggregate', 'collection', {'cursor': {'batchSize': 1}}). doc = OrderedDict({args[0]: args[1]}) else: # OpReply('ismaster', me='a.com'). doc = OrderedDict({args[0]: 1}) doc.update(kwargs) return [doc] if kwargs: raise_args_err(err_msg, ValueError) # Send a batch as varargs: OpReply({'a': 1}, {'a': 2}). if not all(isinstance(doc, (OpReply, Mapping)) for doc in args): raise_args_err('each doc must be a dict') return args
Make the documents for a `Request` or `Reply`. Takes a variety of argument styles, returns a list of dicts. Used by `make_prototype_request` and `make_reply`, which are in turn used by `MockupDB.receives`, `Request.replies`, and so on. See examples in tutorial.
def make_matcher(*args, **kwargs): if args and isinstance(args[0], Matcher): if args[1:] or kwargs: raise_args_err("can't interpret args") return args[0] return Matcher(*args, **kwargs)
Make a Matcher from a :ref:`message spec <message spec>`: >>> make_matcher() Matcher(Request()) >>> make_matcher({'ismaster': 1}, namespace='admin') Matcher(Request({"ismaster": 1}, namespace="admin")) >>> make_matcher({}, {'_id': 1}) Matcher(Request({}, {"_id": 1})) See more examples in the tutorial section for :ref:`Message Specs`.
def make_prototype_request(*args, **kwargs): if args and inspect.isclass(args[0]) and issubclass(args[0], Request): request_cls, arg_list = args[0], args[1:] return request_cls(*arg_list, **kwargs) if args and isinstance(args[0], Request): if args[1:] or kwargs: raise_args_err("can't interpret args") return args[0] # Match any opcode. return Request(*args, **kwargs)
Make a prototype Request for a Matcher.
def docs_repr(*args): sio = StringIO() for doc_idx, doc in enumerate(args): if doc_idx > 0: sio.write(u', ') sio.write(text_type(json_util.dumps(doc))) return sio.getvalue()
Stringify ordered dicts like a regular ones. Preserve order, remove 'u'-prefix on unicodes in Python 2: >>> print(docs_repr(OrderedDict([(u'_id', 2)]))) {"_id": 2} >>> print(docs_repr(OrderedDict([(u'_id', 2), (u'a', u'b')]), ... OrderedDict([(u'a', 1)]))) {"_id": 2, "a": "b"}, {"a": 1} >>> >>> import datetime >>> now = datetime.datetime.utcfromtimestamp(123456) >>> print(docs_repr(OrderedDict([(u'ts', now)]))) {"ts": {"$date": 123456000}} >>> >>> oid = bson.ObjectId(b'123456781234567812345678') >>> print(docs_repr(OrderedDict([(u'oid', oid)]))) {"oid": {"$oid": "123456781234567812345678"}}
def seq_match(seq0, seq1): len_seq1 = len(seq1) if len_seq1 < len(seq0): return False seq1_idx = 0 for i, elem in enumerate(seq0): while seq1_idx < len_seq1: if seq1[seq1_idx] == elem: break seq1_idx += 1 if seq1_idx >= len_seq1 or seq1[seq1_idx] != elem: return False seq1_idx += 1 return True
True if seq0 is a subset of seq1 and their elements are in same order. >>> seq_match([], []) True >>> seq_match([1], [1]) True >>> seq_match([1, 1], [1]) False >>> seq_match([1], [1, 2]) True >>> seq_match([1, 1], [1, 1]) True >>> seq_match([3], [1, 2, 3]) True >>> seq_match([1, 3], [1, 2, 3]) True >>> seq_match([2, 1], [1, 2, 3]) False
def raise_args_err(message='bad arguments', error_class=TypeError): frame = inspect.currentframe().f_back raise error_class(message + ': ' + format_call(frame))
Throw an error with standard message, displaying function call. >>> def f(a, *args, **kwargs): ... raise_args_err() ... >>> f(1, 2, x='y') Traceback (most recent call last): ... TypeError: bad arguments: f(1, 2, x='y')
def interactive_server(port=27017, verbose=True, all_ok=False, name='MockupDB', ssl=False, uds_path=None): if uds_path is not None: port = None server = MockupDB(port=port, verbose=verbose, request_timeout=int(1e6), ssl=ssl, auto_ismaster=True, uds_path=uds_path) if all_ok: server.append_responder({}) server.autoresponds('whatsmyuri', you='localhost:12345') server.autoresponds({'getLog': 'startupWarnings'}, log=['hello from %s!' % name]) server.autoresponds(OpMsg('buildInfo'), version='MockupDB ' + __version__) server.autoresponds(OpMsg('listCollections')) server.autoresponds('replSetGetStatus', ok=0) server.autoresponds('getFreeMonitoringStatus', ok=0) return server
A `MockupDB` that the mongo shell can connect to. Call `~.MockupDB.run` on the returned server, and clean it up with `~.MockupDB.stop`. If ``all_ok`` is True, replies {ok: 1} to anything unmatched by a specific responder.
def client_port(self): address = self._client.getpeername() if isinstance(address, tuple): return address[1] # Maybe a Unix domain socket connection. return 0
Client connection's TCP port.
def assert_matches(self, *args, **kwargs): matcher = make_matcher(*args, **kwargs) if not matcher.matches(self): raise AssertionError('%r does not match %r' % (self, matcher)) return self
Assert this matches a :ref:`message spec <message spec>`. Returns self.
def fail(self, err='MockupDB query failure', *args, **kwargs): kwargs.setdefault('flags', 0) kwargs['flags'] |= REPLY_FLAGS['QueryFailure'] kwargs['$err'] = err self.replies(*args, **kwargs) return True
Reply to a query with the QueryFailure flag and an '$err' key. Returns True so it is suitable as an `~MockupDB.autoresponds` handler.
def command_err(self, code=1, errmsg='MockupDB command failure', *args, **kwargs): kwargs.setdefault('ok', 0) kwargs['code'] = code kwargs['errmsg'] = errmsg self.replies(*args, **kwargs) return True
Error reply to a command. Returns True so it is suitable as an `~MockupDB.autoresponds` handler.
def hangup(self): if self._server: self._server._log('\t%d\thangup' % self.client_port) self._client.shutdown(socket.SHUT_RDWR) return True
Close the connection. Returns True so it is suitable as an `~MockupDB.autoresponds` handler.
def _matches_docs(self, docs, other_docs): for doc, other_doc in zip(docs, other_docs): if not self._match_map(doc, other_doc): return False return True
Overridable method.
def _replies(self, *args, **kwargs): reply_msg = make_reply(*args, **kwargs) if self._server: self._server._log('\t%d\t<-- %r' % (self.client_port, reply_msg)) reply_bytes = reply_msg.reply_bytes(self) self._client.sendall(reply_bytes)
Overridable method.
def unpack(cls, msg, client, server, request_id): payload_document = OrderedDict() flags, = _UNPACK_UINT(msg[:4]) pos = 4 if flags != 0 and flags != 2: raise ValueError('OP_MSG flag must be 0 or 2 not %r' % (flags,)) while pos < len(msg): payload_type, = _UNPACK_BYTE(msg[pos:pos + 1]) pos += 1 payload_size, = _UNPACK_INT(msg[pos:pos + 4]) if payload_type == 0: doc = bson.decode_all(msg[pos:pos + payload_size], CODEC_OPTIONS)[0] payload_document.update(doc) pos += payload_size elif payload_type == 1: section_size, = _UNPACK_INT(msg[pos:pos + 4]) pos += 4 identifier, pos = _get_c_string(msg, pos) # Section starts w/ 4-byte size prefix, identifier ends w/ nil. documents_len = section_size - len(identifier) - 1 - 4 documents = bson.decode_all(msg[pos:pos + documents_len], CODEC_OPTIONS) payload_document[identifier] = documents pos += documents_len database = payload_document['$db'] return OpMsg(payload_document, namespace=database, flags=flags, _client=client, request_id=request_id, _server=server)
Parse message and return an `OpMsg`. Takes the client message as bytes, the client and server socket objects, and the client request id.
def unpack(cls, msg, client, server, request_id): flags, = _UNPACK_INT(msg[:4]) namespace, pos = _get_c_string(msg, 4) is_command = namespace.endswith('.$cmd') num_to_skip, = _UNPACK_INT(msg[pos:pos + 4]) pos += 4 num_to_return, = _UNPACK_INT(msg[pos:pos + 4]) pos += 4 docs = bson.decode_all(msg[pos:], CODEC_OPTIONS) if is_command: assert len(docs) == 1 command_ns = namespace[:-len('.$cmd')] return Command(docs, namespace=command_ns, flags=flags, _client=client, request_id=request_id, _server=server) else: if len(docs) == 1: fields = None else: assert len(docs) == 2 fields = docs[1] return OpQuery(docs[0], fields=fields, namespace=namespace, flags=flags, num_to_skip=num_to_skip, num_to_return=num_to_return, _client=client, request_id=request_id, _server=server)
Parse message and return an `OpQuery` or `Command`. Takes the client message as bytes, the client and server socket objects, and the client request id.
def unpack(cls, msg, client, server, request_id): flags, = _UNPACK_INT(msg[:4]) namespace, pos = _get_c_string(msg, 4) num_to_return, = _UNPACK_INT(msg[pos:pos + 4]) pos += 4 cursor_id, = _UNPACK_LONG(msg[pos:pos + 8]) return OpGetMore(namespace=namespace, flags=flags, _client=client, num_to_return=num_to_return, cursor_id=cursor_id, request_id=request_id, _server=server)
Parse message and return an `OpGetMore`. Takes the client message as bytes, the client and server socket objects, and the client request id.
def unpack(cls, msg, client, server, _): # Leading 4 bytes are reserved. num_of_cursor_ids, = _UNPACK_INT(msg[4:8]) cursor_ids = [] pos = 8 for _ in range(num_of_cursor_ids): cursor_ids.append(_UNPACK_INT(msg[pos:pos + 4])[0]) pos += 4 return OpKillCursors(_client=client, cursor_ids=cursor_ids, _server=server)
Parse message and return an `OpKillCursors`. Takes the client message as bytes, the client and server socket objects, and the client request id.
def unpack(cls, msg, client, server, request_id): flags, = _UNPACK_INT(msg[:4]) namespace, pos = _get_c_string(msg, 4) docs = bson.decode_all(msg[pos:], CODEC_OPTIONS) return cls(*docs, namespace=namespace, flags=flags, _client=client, request_id=request_id, _server=server)
Parse message and return an `OpInsert`. Takes the client message as bytes, the client and server socket objects, and the client request id.
def reply_bytes(self, request): flags = struct.pack("<i", self._flags) cursor_id = struct.pack("<q", self._cursor_id) starting_from = struct.pack("<i", self._starting_from) number_returned = struct.pack("<i", len(self._docs)) reply_id = random.randint(0, 1000000) response_to = request.request_id data = b''.join([flags, cursor_id, starting_from, number_returned]) data += b''.join([bson.BSON.encode(doc) for doc in self._docs]) message = struct.pack("<i", 16 + len(data)) message += struct.pack("<i", reply_id) message += struct.pack("<i", response_to) message += struct.pack("<i", OP_REPLY) return message + data
Take a `Request` and return an OP_REPLY message as bytes.
def reply_bytes(self, request): flags = struct.pack("<I", self._flags) payload_type = struct.pack("<b", 0) payload_data = bson.BSON.encode(self.doc) data = b''.join([flags, payload_type, payload_data]) reply_id = random.randint(0, 1000000) response_to = request.request_id header = struct.pack( "<iiii", 16 + len(data), reply_id, response_to, OP_MSG) return header + data
Take a `Request` and return an OP_MSG message as bytes.
def matches(self, *args, **kwargs): request = make_prototype_request(*args, **kwargs) if self._prototype.opcode not in (None, request.opcode): return False if self._prototype.is_command not in (None, request.is_command): return False for name in dir(self._prototype): if name.startswith('_') or name in request._non_matched_attrs: # Ignore privates, and handle documents specially. continue prototype_value = getattr(self._prototype, name, None) if inspect.ismethod(prototype_value): continue actual_value = getattr(request, name, None) if prototype_value not in (None, actual_value): return False if len(self._prototype.docs) not in (0, len(request.docs)): return False return self._prototype._matches_docs(self._prototype.docs, request.docs)
Test if a request matches a :ref:`message spec <message spec>`. Returns ``True`` or ``False``.
def run(self): self._listening_sock, self._address = ( bind_domain_socket(self._address) if self._uds_path else bind_tcp_socket(self._address)) if self._ssl: certfile = os.path.join(os.path.dirname(__file__), 'server.pem') self._listening_sock = _ssl.wrap_socket( self._listening_sock, certfile=certfile, server_side=True) self._accept_thread = threading.Thread(target=self._accept_loop) self._accept_thread.daemon = True self._accept_thread.start() return self.port
Begin serving. Returns the bound port, or 0 for domain socket.
def stop(self): self._stopped = True threads = [self._accept_thread] threads.extend(self._server_threads) self._listening_sock.close() for sock in list(self._server_socks): try: sock.shutdown(socket.SHUT_RDWR) except socket.error: pass try: sock.close() except socket.error: pass with self._unlock(): for thread in threads: thread.join(10) if self._uds_path: try: os.unlink(self._uds_path) except OSError: pass
Stop serving. Always call this to clean up after yourself.
def receives(self, *args, **kwargs): timeout = kwargs.pop('timeout', self._request_timeout) end = time.time() + timeout matcher = Matcher(*args, **kwargs) while not self._stopped: try: # Short timeout so we notice if the server is stopped. request = self._request_q.get(timeout=0.05) except Empty: if time.time() > end: raise AssertionError('expected to receive %r, got nothing' % matcher.prototype) else: if matcher.matches(request): return request else: raise AssertionError('expected to receive %r, got %r' % (matcher.prototype, request))
Pop the next `Request` and assert it matches. Returns None if the server is stopped. Pass a `Request` or request pattern to specify what client request to expect. See the tutorial for examples. Pass ``timeout`` as a keyword argument to override this server's ``request_timeout``.
def got(self, *args, **kwargs): timeout = kwargs.pop('timeout', self._request_timeout) end = time.time() + timeout matcher = make_matcher(*args, **kwargs) while not self._stopped: try: # Short timeout so we notice if the server is stopped. request = self._request_q.peek(timeout=timeout) except Empty: if time.time() > end: return False else: return matcher.matches(request)
Does `.request` match the given :ref:`message spec <message spec>`? >>> s = MockupDB(auto_ismaster=True) >>> port = s.run() >>> s.got(timeout=0) # No request enqueued. False >>> from pymongo import MongoClient >>> client = MongoClient(s.uri) >>> future = go(client.db.command, 'foo') >>> s.got('foo') True >>> s.got(OpMsg('foo', namespace='db')) True >>> s.got(OpMsg('foo', key='value')) False >>> s.ok() >>> future() == {'ok': 1} True >>> s.stop()
def append_responder(self, matcher, *args, **kwargs): return self._insert_responder("bottom", matcher, *args, **kwargs)
Add a responder of last resort. Like `.autoresponds`, but instead of adding a responder to the top of the stack, add it to the bottom. This responder will be called if no others match.
def uri(self): if self._uds_path: uri = 'mongodb://%s' % (quote_plus(self._uds_path),) else: uri = 'mongodb://%s' % (format_addr(self._address),) return uri + '/?ssl=true' if self._ssl else uri
Connection string to pass to `~pymongo.mongo_client.MongoClient`.
def _accept_loop(self): self._listening_sock.setblocking(0) while not self._stopped and not _shutting_down: try: # Wait a short time to accept. if select.select([self._listening_sock.fileno()], [], [], 1): client, client_addr = self._listening_sock.accept() client.setblocking(True) self._log('connection from %s' % format_addr(client_addr)) server_thread = threading.Thread( target=functools.partial( self._server_loop, client, client_addr)) # Store weakrefs to the thread and socket, so we can # dispose them in stop(). self._server_threads[server_thread] = None self._server_socks[client] = None server_thread.daemon = True server_thread.start() except socket.error as error: if error.errno not in ( errno.EAGAIN, errno.EBADF, errno.EWOULDBLOCK): raise except select.error as error: if error.args[0] == errno.EBADF: # Closed. break else: raise
Accept client connections and spawn a thread for each.