INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
Returns a list of hashes of objects referenced by the given hash. .. code-block:: python >>> c.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Parameters ---------- multihash : str Path to the object(s) to list refs from Returns ------- list
def refs(self, multihash, **kwargs): """Returns a list of hashes of objects referenced by the given hash. .. code-block:: python >>> c.refs('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') [{'Ref': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7 … cNMV', 'Err': ''}, … {'Ref': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTi … eXJY', 'Err': ''}] Parameters ---------- multihash : str Path to the object(s) to list refs from Returns ------- list """ args = (multihash,) return self._client.request('/refs', args, decoder='json', **kwargs)
Returns a dict with the size of the block with the given hash. .. code-block:: python >>> c.block_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 258} Parameters ---------- multihash : str The base58 multihash of an existing block to stat Returns ------- dict : Information about the requested block
def block_stat(self, multihash, **kwargs): """Returns a dict with the size of the block with the given hash. .. code-block:: python >>> c.block_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Key': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 258} Parameters ---------- multihash : str The base58 multihash of an existing block to stat Returns ------- dict : Information about the requested block """ args = (multihash,) return self._client.request('/block/stat', args, decoder='json', **kwargs)
r"""Returns the raw contents of a block. .. code-block:: python >>> c.block_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' Parameters ---------- multihash : str The base58 multihash of an existing block to get Returns ------- str : Value of the requested block
def block_get(self, multihash, **kwargs): r"""Returns the raw contents of a block. .. code-block:: python >>> c.block_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x121\n"\x12 \xdaW>\x14\xe5\xc1\xf6\xe4\x92\xd1 … \n\x02\x08\x01' Parameters ---------- multihash : str The base58 multihash of an existing block to get Returns ------- str : Value of the requested block """ args = (multihash,) return self._client.request('/block/get', args, **kwargs)
Returns blocks currently on the bitswap wantlist. .. code-block:: python >>> c.bitswap_wantlist() {'Keys': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ]} Parameters ---------- peer : str Peer to show wantlist for. Returns ------- dict : List of wanted blocks
def bitswap_wantlist(self, peer=None, **kwargs): """Returns blocks currently on the bitswap wantlist. .. code-block:: python >>> c.bitswap_wantlist() {'Keys': [ 'QmeV6C6XVt1wf7V7as7Yak3mxPma8jzpqyhtRtCvpKcfBb', 'QmdCWFLDXqgdWQY9kVubbEHBbkieKd3uo7MtCm7nTZZE9K', 'QmVQ1XvYGF19X4eJqz1s7FJYJqAxFC4oqh3vWJJEXn66cp' ]} Parameters ---------- peer : str Peer to show wantlist for. Returns ------- dict : List of wanted blocks """ args = (peer,) return self._client.request('/bitswap/wantlist', args, decoder='json', **kwargs)
Remove a given block from wantlist. Parameters ---------- key : str Key to remove from wantlist.
def bitswap_unwant(self, key, **kwargs): """ Remove a given block from wantlist. Parameters ---------- key : str Key to remove from wantlist. """ args = (key,) return self._client.request('/bitswap/unwant', args, **kwargs)
r"""Returns the raw bytes in an IPFS object. .. code-block:: python >>> c.object_data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x08\x01' Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- str : Raw object data
def object_data(self, multihash, **kwargs): r"""Returns the raw bytes in an IPFS object. .. code-block:: python >>> c.object_data('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') b'\x08\x01' Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- str : Raw object data """ args = (multihash,) return self._client.request('/object/data', args, **kwargs)
Creates a new object from an IPFS template. By default this creates and returns a new empty merkledag node, but you may pass an optional template argument to create a preformatted node. .. code-block:: python >>> c.object_new() {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} Parameters ---------- template : str Blueprints from which to construct the new object. Possible values: * ``"unixfs-dir"`` * ``None`` Returns ------- dict : Object hash
def object_new(self, template=None, **kwargs): """Creates a new object from an IPFS template. By default this creates and returns a new empty merkledag node, but you may pass an optional template argument to create a preformatted node. .. code-block:: python >>> c.object_new() {'Hash': 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n'} Parameters ---------- template : str Blueprints from which to construct the new object. Possible values: * ``"unixfs-dir"`` * ``None`` Returns ------- dict : Object hash """ args = (template,) if template is not None else () return self._client.request('/object/new', args, decoder='json', **kwargs)
Returns the links pointed to by the specified object. .. code-block:: python >>> c.object_links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object hash and merkedag links
def object_links(self, multihash, **kwargs): """Returns the links pointed to by the specified object. .. code-block:: python >>> c.object_links('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDx … ca7D') {'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object hash and merkedag links """ args = (multihash,) return self._client.request('/object/links', args, decoder='json', **kwargs)
Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links
def object_get(self, multihash, **kwargs): """Get and serialize the DAG node named by multihash. .. code-block:: python >>> c.object_get('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Data': '\x08\x01', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 174}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1474}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 55}]} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict : Object data and links """ args = (multihash,) return self._client.request('/object/get', args, decoder='json', **kwargs)
Stores input as a DAG object and returns its key. .. code-block:: python >>> c.object_put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', 'Links': [ {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', 'Size': 8, 'Name': 'some link'} ] } Parameters ---------- file : io.RawIOBase (JSON) object from which the DAG object will be created Returns ------- dict : Hash and links of the created DAG object See :meth:`~ipfsapi.Object.object_links`
def object_put(self, file, **kwargs): """Stores input as a DAG object and returns its key. .. code-block:: python >>> c.object_put(io.BytesIO(b''' ... { ... "Data": "another", ... "Links": [ { ... "Name": "some link", ... "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCV … R39V", ... "Size": 8 ... } ] ... }''')) {'Hash': 'QmZZmY4KCu9r3e7M2Pcn46Fc5qbn6NpzaAGaYb22kbfTqm', 'Links': [ {'Hash': 'QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V', 'Size': 8, 'Name': 'some link'} ] } Parameters ---------- file : io.RawIOBase (JSON) object from which the DAG object will be created Returns ------- dict : Hash and links of the created DAG object See :meth:`~ipfsapi.Object.object_links` """ body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/object/put', decoder='json', data=body, headers=headers, **kwargs)
Get stats for the DAG node named by multihash. .. code-block:: python >>> c.object_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'LinksSize': 256, 'NumLinks': 5, 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict
def object_stat(self, multihash, **kwargs): """Get stats for the DAG node named by multihash. .. code-block:: python >>> c.object_stat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'LinksSize': 256, 'NumLinks': 5, 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'BlockSize': 258, 'CumulativeSize': 274169, 'DataSize': 2} Parameters ---------- multihash : str Key of the object to retrieve, in base58-encoded multihash format Returns ------- dict """ args = (multihash,) return self._client.request('/object/stat', args, decoder='json', **kwargs)
Creates a new merkledag object based on an existing one. The new object will have the provided data appended to it, and will thus have a new Hash. .. code-block:: python >>> c.object_patch_append_data("QmZZmY … fTqm", io.BytesIO(b"bla")) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- multihash : str The hash of an ipfs object to modify new_data : io.RawIOBase The data to append to the object's data section Returns ------- dict : Hash of new object
def object_patch_append_data(self, multihash, new_data, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have the provided data appended to it, and will thus have a new Hash. .. code-block:: python >>> c.object_patch_append_data("QmZZmY … fTqm", io.BytesIO(b"bla")) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- multihash : str The hash of an ipfs object to modify new_data : io.RawIOBase The data to append to the object's data section Returns ------- dict : Hash of new object """ args = (multihash,) body, headers = multipart.stream_files(new_data, self.chunk_size) return self._client.request('/object/patch/append-data', args, decoder='json', data=body, headers=headers, **kwargs)
Creates a new merkledag object based on an existing one. The new object will have a link to the provided object. .. code-block:: python >>> c.object_patch_add_link( ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2', ... 'Johnny', ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2' ... ) {'Hash': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'} Parameters ---------- root : str IPFS hash for the object being modified name : str name for the new link ref : str IPFS hash for the object being linked to create : bool Create intermediary nodes Returns ------- dict : Hash of new object
def object_patch_add_link(self, root, name, ref, create=False, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have a link to the provided object. .. code-block:: python >>> c.object_patch_add_link( ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2', ... 'Johnny', ... 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2' ... ) {'Hash': 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k'} Parameters ---------- root : str IPFS hash for the object being modified name : str name for the new link ref : str IPFS hash for the object being linked to create : bool Create intermediary nodes Returns ------- dict : Hash of new object """ kwargs.setdefault("opts", {"create": create}) args = ((root, name, ref),) return self._client.request('/object/patch/add-link', args, decoder='json', **kwargs)
Creates a new merkledag object based on an existing one. The new object will lack a link to the specified object. .. code-block:: python >>> c.object_patch_rm_link( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... 'Johnny' ... ) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- root : str IPFS hash of the object to modify link : str name of the link to remove Returns ------- dict : Hash of new object
def object_patch_rm_link(self, root, link, **kwargs): """Creates a new merkledag object based on an existing one. The new object will lack a link to the specified object. .. code-block:: python >>> c.object_patch_rm_link( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... 'Johnny' ... ) {'Hash': 'QmR79zQQj2aDfnrNgczUhvf2qWapEfQ82YQRt3QjrbhSb2'} Parameters ---------- root : str IPFS hash of the object to modify link : str name of the link to remove Returns ------- dict : Hash of new object """ args = ((root, link),) return self._client.request('/object/patch/rm-link', args, decoder='json', **kwargs)
Creates a new merkledag object based on an existing one. The new object will have the same links as the old object but with the provided data instead of the old object's data contents. .. code-block:: python >>> c.object_patch_set_data( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... io.BytesIO(b'bla') ... ) {'Hash': 'QmSw3k2qkv4ZPsbu9DVEJaTMszAQWNgM1FTFYpfZeNQWrd'} Parameters ---------- root : str IPFS hash of the object to modify data : io.RawIOBase The new data to store in root Returns ------- dict : Hash of new object
def object_patch_set_data(self, root, data, **kwargs): """Creates a new merkledag object based on an existing one. The new object will have the same links as the old object but with the provided data instead of the old object's data contents. .. code-block:: python >>> c.object_patch_set_data( ... 'QmNtXbF3AjAk59gQKRgEdVabHcSsiPUnJwHnZKyj2x8Z3k', ... io.BytesIO(b'bla') ... ) {'Hash': 'QmSw3k2qkv4ZPsbu9DVEJaTMszAQWNgM1FTFYpfZeNQWrd'} Parameters ---------- root : str IPFS hash of the object to modify data : io.RawIOBase The new data to store in root Returns ------- dict : Hash of new object """ args = (root,) body, headers = multipart.stream_files(data, self.chunk_size) return self._client.request('/object/patch/set-data', args, decoder='json', data=body, headers=headers, **kwargs)
Lists directory contents for Unix filesystem objects. The result contains size information. For files, the child size is the total size of the file contents. For directories, the child size is the IPFS link size. The path can be a prefixless reference; in this case, it is assumed that it is an ``/ipfs/`` reference and not ``/ipns/``. .. code-block:: python >>> c.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Arguments': {'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D'}, 'Objects': { 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': { 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 0, 'Type': 'Directory', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 163, 'Type': 'File'}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1463, 'Type': 'File'}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947, 'Type': 'Directory'}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261, 'Type': 'Directory'}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 47, 'Type': 'File'} ] } }} Parameters ---------- multihash : str The path to the object(s) to list links from Returns ------- dict
def file_ls(self, multihash, **kwargs): """Lists directory contents for Unix filesystem objects. The result contains size information. For files, the child size is the total size of the file contents. For directories, the child size is the IPFS link size. The path can be a prefixless reference; in this case, it is assumed that it is an ``/ipfs/`` reference and not ``/ipns/``. .. code-block:: python >>> c.file_ls('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') {'Arguments': {'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D'}, 'Objects': { 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D': { 'Hash': 'QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D', 'Size': 0, 'Type': 'Directory', 'Links': [ {'Hash': 'Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV', 'Name': 'Makefile', 'Size': 163, 'Type': 'File'}, {'Hash': 'QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX', 'Name': 'example', 'Size': 1463, 'Type': 'File'}, {'Hash': 'QmZAL3oHMQYqsV61tGvoAVtQLs1WzRe1zkkamv9qxqnDuK', 'Name': 'home', 'Size': 3947, 'Type': 'Directory'}, {'Hash': 'QmZNPyKVriMsZwJSNXeQtVQSNU4v4KEKGUQaMT61LPahso', 'Name': 'lib', 'Size': 268261, 'Type': 'Directory'}, {'Hash': 'QmSY8RfVntt3VdxWppv9w5hWgNrE31uctgTiYwKir8eXJY', 'Name': 'published-version', 'Size': 47, 'Type': 'File'} ] } }} Parameters ---------- multihash : str The path to the object(s) to list links from Returns ------- dict """ args = (multihash,) return self._client.request('/file/ls', args, decoder='json', **kwargs)
Accepts an identifier and resolves it to the referenced item. There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers. .. code-block:: python >>> c.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} >>> c.resolve("/ipns/ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- name : str The name to resolve recursive : bool Resolve until the result is an IPFS name Returns ------- dict : IPFS path of resource
def resolve(self, name, recursive=False, **kwargs): """Accepts an identifier and resolves it to the referenced item. There are a number of mutable name protocols that can link among themselves and into IPNS. For example IPNS references can (currently) point at an IPFS object, and DNS links can point at other DNS links, IPNS entries, or IPFS objects. This command accepts any of these identifiers. .. code-block:: python >>> c.resolve("/ipfs/QmTkzDwWqPbnAh5YiV5VwcTLnGdw … ca7D/Makefile") {'Path': '/ipfs/Qmd2xkBfEwEs9oMTk77A6jrsgurpF3ugXSg7dtPNFkcNMV'} >>> c.resolve("/ipns/ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- name : str The name to resolve recursive : bool Resolve until the result is an IPFS name Returns ------- dict : IPFS path of resource """ kwargs.setdefault("opts", {"recursive": recursive}) args = (name,) return self._client.request('/resolve', args, decoder='json', **kwargs)
Adds a new public key that can be used for name_publish. .. code-block:: python >>> c.key_gen('example_key_name') {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} Parameters ---------- key_name : str Name of the new Key to be generated. Used to reference the Keys. type : str Type of key to generate. The current possible keys types are: * ``"rsa"`` * ``"ed25519"`` size : int Bitsize of key to generate Returns ------- dict : Key name and Key Id
def key_gen(self, key_name, type, size=2048, **kwargs): """Adds a new public key that can be used for name_publish. .. code-block:: python >>> c.key_gen('example_key_name') {'Name': 'example_key_name', 'Id': 'QmQLaT5ZrCfSkXTH6rUKtVidcxj8jrW3X2h75Lug1AV7g8'} Parameters ---------- key_name : str Name of the new Key to be generated. Used to reference the Keys. type : str Type of key to generate. The current possible keys types are: * ``"rsa"`` * ``"ed25519"`` size : int Bitsize of key to generate Returns ------- dict : Key name and Key Id """ opts = {"type": type, "size": size} kwargs.setdefault("opts", opts) args = (key_name,) return self._client.request('/key/gen', args, decoder='json', **kwargs)
Remove a keypair .. code-block:: python >>> c.key_rm("bla") {"Keys": [ {"Name": "bla", "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} ]} Parameters ---------- key_name : str Name of the key(s) to remove. Returns ------- dict : List of key names and IDs that have been removed
def key_rm(self, key_name, *key_names, **kwargs): """Remove a keypair .. code-block:: python >>> c.key_rm("bla") {"Keys": [ {"Name": "bla", "Id": "QmfJpR6paB6h891y7SYXGe6gapyNgepBeAYMbyejWA4FWA"} ]} Parameters ---------- key_name : str Name of the key(s) to remove. Returns ------- dict : List of key names and IDs that have been removed """ args = (key_name,) + key_names return self._client.request('/key/rm', args, decoder='json', **kwargs)
Rename a keypair .. code-block:: python >>> c.key_rename("bla", "personal") {"Was": "bla", "Now": "personal", "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", "Overwrite": False} Parameters ---------- key_name : str Current name of the key to rename new_key_name : str New name of the key Returns ------- dict : List of key names and IDs that have been removed
def key_rename(self, key_name, new_key_name, **kwargs): """Rename a keypair .. code-block:: python >>> c.key_rename("bla", "personal") {"Was": "bla", "Now": "personal", "Id": "QmeyrRNxXaasZaoDXcCZgryoBCga9shaHQ4suHAYXbNZF3", "Overwrite": False} Parameters ---------- key_name : str Current name of the key to rename new_key_name : str New name of the key Returns ------- dict : List of key names and IDs that have been removed """ args = (key_name, new_key_name) return self._client.request('/key/rename', args, decoder='json', **kwargs)
Publishes an object to IPNS. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In publish, the default value of *name* is your own identity public key. .. code-block:: python >>> c.name_publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} Parameters ---------- ipfs_path : str IPFS path of the object to be published resolve : bool Resolve given path before publishing lifetime : str Time duration that the record will be valid for Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. Valid units are: * ``"ns"`` * ``"us"`` (or ``"µs"``) * ``"ms"`` * ``"s"`` * ``"m"`` * ``"h"`` ttl : int Time duration this record should be cached for key : string Name of the key to be used, as listed by 'ipfs key list'. Returns ------- dict : IPNS hash and the IPFS path it points at
def name_publish(self, ipfs_path, resolve=True, lifetime="24h", ttl=None, key=None, **kwargs): """Publishes an object to IPNS. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In publish, the default value of *name* is your own identity public key. .. code-block:: python >>> c.name_publish('/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZK … GZ5d') {'Value': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d', 'Name': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc'} Parameters ---------- ipfs_path : str IPFS path of the object to be published resolve : bool Resolve given path before publishing lifetime : str Time duration that the record will be valid for Accepts durations such as ``"300s"``, ``"1.5h"`` or ``"2h45m"``. Valid units are: * ``"ns"`` * ``"us"`` (or ``"µs"``) * ``"ms"`` * ``"s"`` * ``"m"`` * ``"h"`` ttl : int Time duration this record should be cached for key : string Name of the key to be used, as listed by 'ipfs key list'. Returns ------- dict : IPNS hash and the IPFS path it points at """ opts = {"lifetime": lifetime, "resolve": resolve} if ttl: opts["ttl"] = ttl if key: opts["key"] = key kwargs.setdefault("opts", opts) args = (ipfs_path,) return self._client.request('/name/publish', args, decoder='json', **kwargs)
Gets the value currently published at an IPNS name. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In resolve, the default value of ``name`` is your own identity public key. .. code-block:: python >>> c.name_resolve() {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} Parameters ---------- name : str The IPNS name to resolve (defaults to the connected node) recursive : bool Resolve until the result is not an IPFS name (default: false) nocache : bool Do not use cached entries (default: false) Returns ------- dict : The IPFS path the IPNS hash points at
def name_resolve(self, name=None, recursive=False, nocache=False, **kwargs): """Gets the value currently published at an IPNS name. IPNS is a PKI namespace, where names are the hashes of public keys, and the private key enables publishing new (signed) values. In resolve, the default value of ``name`` is your own identity public key. .. code-block:: python >>> c.name_resolve() {'Path': '/ipfs/QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d'} Parameters ---------- name : str The IPNS name to resolve (defaults to the connected node) recursive : bool Resolve until the result is not an IPFS name (default: false) nocache : bool Do not use cached entries (default: false) Returns ------- dict : The IPFS path the IPNS hash points at """ kwargs.setdefault("opts", {"recursive": recursive, "nocache": nocache}) args = (name,) if name is not None else () return self._client.request('/name/resolve', args, decoder='json', **kwargs)
Resolves DNS links to the referenced object. Multihashes are hard to remember, but domain names are usually easy to remember. To create memorable aliases for multihashes, DNS TXT records can point to other DNS links, IPFS objects, IPNS keys, etc. This command resolves those links to the referenced object. For example, with this DNS TXT record:: >>> import dns.resolver >>> a = dns.resolver.query("ipfs.io", "TXT") >>> a.response.answer[0].items[0].to_text() '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' The resolver will give:: >>> c.dns("ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- domain_name : str The domain-name name to resolve recursive : bool Resolve until the name is not a DNS link Returns ------- dict : Resource were a DNS entry points to
def dns(self, domain_name, recursive=False, **kwargs): """Resolves DNS links to the referenced object. Multihashes are hard to remember, but domain names are usually easy to remember. To create memorable aliases for multihashes, DNS TXT records can point to other DNS links, IPFS objects, IPNS keys, etc. This command resolves those links to the referenced object. For example, with this DNS TXT record:: >>> import dns.resolver >>> a = dns.resolver.query("ipfs.io", "TXT") >>> a.response.answer[0].items[0].to_text() '"dnslink=/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n"' The resolver will give:: >>> c.dns("ipfs.io") {'Path': '/ipfs/QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n'} Parameters ---------- domain_name : str The domain-name name to resolve recursive : bool Resolve until the name is not a DNS link Returns ------- dict : Resource were a DNS entry points to """ kwargs.setdefault("opts", {"recursive": recursive}) args = (domain_name,) return self._client.request('/dns', args, decoder='json', **kwargs)
Pins objects to local storage. Stores an IPFS object(s) from a given path locally to disk. .. code-block:: python >>> c.pin_add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d") {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path : str Path to object(s) to be pinned recursive : bool Recursively unpin the object linked to by the specified object(s) Returns ------- dict : List of IPFS objects that have been pinned
def pin_add(self, path, *paths, **kwargs): """Pins objects to local storage. Stores an IPFS object(s) from a given path locally to disk. .. code-block:: python >>> c.pin_add("QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d") {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path : str Path to object(s) to be pinned recursive : bool Recursively unpin the object linked to by the specified object(s) Returns ------- dict : List of IPFS objects that have been pinned """ #PY2: No support for kw-only parameters after glob parameters if "recursive" in kwargs: kwargs.setdefault("opts", {"recursive": kwargs.pop("recursive")}) args = (path,) + paths return self._client.request('/pin/add', args, decoder='json', **kwargs)
Removes a pinned object from local storage. Removes the pin from the given object allowing it to be garbage collected if needed. .. code-block:: python >>> c.pin_rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d') {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path : str Path to object(s) to be unpinned recursive : bool Recursively unpin the object linked to by the specified object(s) Returns ------- dict : List of IPFS objects that have been unpinned
def pin_rm(self, path, *paths, **kwargs): """Removes a pinned object from local storage. Removes the pin from the given object allowing it to be garbage collected if needed. .. code-block:: python >>> c.pin_rm('QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d') {'Pins': ['QmfZY61ukoQuCX8e5Pt7v8pRfhkyxwZKZMTodAtmvyGZ5d']} Parameters ---------- path : str Path to object(s) to be unpinned recursive : bool Recursively unpin the object linked to by the specified object(s) Returns ------- dict : List of IPFS objects that have been unpinned """ #PY2: No support for kw-only parameters after glob parameters if "recursive" in kwargs: kwargs.setdefault("opts", {"recursive": kwargs["recursive"]}) del kwargs["recursive"] args = (path,) + paths return self._client.request('/pin/rm', args, decoder='json', **kwargs)
Lists objects pinned to local storage. By default, all pinned objects are returned, but the ``type`` flag or arguments can restrict that to a specific pin type or to some specific objects respectively. .. code-block:: python >>> c.pin_ls() {'Keys': { 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}, 'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'}, 'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'}, … 'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'}}} Parameters ---------- type : "str" The type of pinned keys to list. Can be: * ``"direct"`` * ``"indirect"`` * ``"recursive"`` * ``"all"`` Returns ------- dict : Hashes of pinned IPFS objects and why they are pinned
def pin_ls(self, type="all", **kwargs): """Lists objects pinned to local storage. By default, all pinned objects are returned, but the ``type`` flag or arguments can restrict that to a specific pin type or to some specific objects respectively. .. code-block:: python >>> c.pin_ls() {'Keys': { 'QmNNPMA1eGUbKxeph6yqV8ZmRkdVat … YMuz': {'Type': 'recursive'}, 'QmNPZUCeSN5458Uwny8mXSWubjjr6J … kP5e': {'Type': 'recursive'}, 'QmNg5zWpRMxzRAVg7FTQ3tUxVbKj8E … gHPz': {'Type': 'indirect'}, … 'QmNiuVapnYCrLjxyweHeuk6Xdqfvts … wCCe': {'Type': 'indirect'}}} Parameters ---------- type : "str" The type of pinned keys to list. Can be: * ``"direct"`` * ``"indirect"`` * ``"recursive"`` * ``"all"`` Returns ------- dict : Hashes of pinned IPFS objects and why they are pinned """ kwargs.setdefault("opts", {"type": type}) return self._client.request('/pin/ls', decoder='json', **kwargs)
Replaces one pin with another. Updates one pin to another, making sure that all objects in the new pin are local. Then removes the old pin. This is an optimized version of using first using :meth:`~ipfsapi.Client.pin_add` to add a new pin for an object and then using :meth:`~ipfsapi.Client.pin_rm` to remove the pin for the old object. .. code-block:: python >>> c.pin_update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", ... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH") {"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", "/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]} Parameters ---------- from_path : str Path to the old object to_path : str Path to the new object to be pinned unpin : bool Should the pin of the old object be removed? (Default: ``True``) Returns ------- dict : List of IPFS objects affected by the pinning operation
def pin_update(self, from_path, to_path, **kwargs): """Replaces one pin with another. Updates one pin to another, making sure that all objects in the new pin are local. Then removes the old pin. This is an optimized version of using first using :meth:`~ipfsapi.Client.pin_add` to add a new pin for an object and then using :meth:`~ipfsapi.Client.pin_rm` to remove the pin for the old object. .. code-block:: python >>> c.pin_update("QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", ... "QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH") {"Pins": ["/ipfs/QmXMqez83NU77ifmcPs5CkNRTMQksBLkyfBf4H5g1NZ52P", "/ipfs/QmUykHAi1aSjMzHw3KmBoJjqRUQYNkFXm8K1y7ZsJxpfPH"]} Parameters ---------- from_path : str Path to the old object to_path : str Path to the new object to be pinned unpin : bool Should the pin of the old object be removed? (Default: ``True``) Returns ------- dict : List of IPFS objects affected by the pinning operation """ #PY2: No support for kw-only parameters after glob parameters if "unpin" in kwargs: kwargs.setdefault("opts", {"unpin": kwargs["unpin"]}) del kwargs["unpin"] args = (from_path, to_path) return self._client.request('/pin/update', args, decoder='json', **kwargs)
Verify that recursive pins are complete. Scan the repo for pinned object graphs and check their integrity. Issues will be reported back with a helpful human-readable error message to aid in error recovery. This is useful to help recover from datastore corruptions (such as when accidentally deleting files added using the filestore backend). This function returns an iterator needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with c.pin_verify("QmN…TTZ", verbose=True) as pin_verify_iter: ... for item in pin_verify_iter: ... print(item) ... {"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True} {"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True} {"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True} … Parameters ---------- path : str Path to object(s) to be checked verbose : bool Also report status of items that were OK? (Default: ``False``) Returns ------- iterable
def pin_verify(self, path, *paths, **kwargs): """Verify that recursive pins are complete. Scan the repo for pinned object graphs and check their integrity. Issues will be reported back with a helpful human-readable error message to aid in error recovery. This is useful to help recover from datastore corruptions (such as when accidentally deleting files added using the filestore backend). This function returns an iterator needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with c.pin_verify("QmN…TTZ", verbose=True) as pin_verify_iter: ... for item in pin_verify_iter: ... print(item) ... {"Cid":"QmVkNdzCBukBRdpyFiKPyL2R15qPExMr9rV9RFV2kf9eeV","Ok":True} {"Cid":"QmbPzQruAEFjUU3gQfupns6b8USr8VrD9H71GrqGDXQSxm","Ok":True} {"Cid":"Qmcns1nUvbeWiecdGDPw8JxWeUfxCV8JKhTfgzs3F8JM4P","Ok":True} … Parameters ---------- path : str Path to object(s) to be checked verbose : bool Also report status of items that were OK? (Default: ``False``) Returns ------- iterable """ #PY2: No support for kw-only parameters after glob parameters if "verbose" in kwargs: kwargs.setdefault("opts", {"verbose": kwargs["verbose"]}) del kwargs["verbose"] args = (path,) + paths return self._client.request('/pin/verify', args, decoder='json', stream=True, **kwargs)
Shows IPFS Node ID info. Returns the PublicKey, ProtocolVersion, ID, AgentVersion and Addresses of the connected daemon or some other node. .. code-block:: python >>> c.id() {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc', 'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=', 'AgentVersion': 'go-libp2p/3.3.4', 'ProtocolVersion': 'ipfs/0.1.0', 'Addresses': [ '/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc', '/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc', '/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc', '/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc', '/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc', '/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']} Parameters ---------- peer : str Peer.ID of the node to look up (local node if ``None``) Returns ------- dict : Information about the IPFS node
def id(self, peer=None, **kwargs): """Shows IPFS Node ID info. Returns the PublicKey, ProtocolVersion, ID, AgentVersion and Addresses of the connected daemon or some other node. .. code-block:: python >>> c.id() {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97MkcGvoWZir8otE9Uc', 'PublicKey': 'CAASpgIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggE … BAAE=', 'AgentVersion': 'go-libp2p/3.3.4', 'ProtocolVersion': 'ipfs/0.1.0', 'Addresses': [ '/ip4/127.0.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYo … E9Uc', '/ip4/10.1.0.172/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip4/172.18.0.1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owY … E9Uc', '/ip6/::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8owYoDEyB97 … E9Uc', '/ip6/fccc:7904:b05b:a579:957b:deef:f066:cad9/tcp/400 … E9Uc', '/ip6/fd56:1966:efd8::212/tcp/4001/ipfs/QmVgNoP89mzpg … E9Uc', '/ip6/fd56:1966:efd8:0:def1:34d0:773:48f/tcp/4001/ipf … E9Uc', '/ip6/2001:db8:1::1/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/4001/ipfs/QmVgNoP89mzpgEAAqK8 … E9Uc', '/ip4/77.116.233.54/tcp/10842/ipfs/QmVgNoP89mzpgEAAqK … E9Uc']} Parameters ---------- peer : str Peer.ID of the node to look up (local node if ``None``) Returns ------- dict : Information about the IPFS node """ args = (peer,) if peer is not None else () return self._client.request('/id', args, decoder='json', **kwargs)
Adds peers to the bootstrap list. Parameters ---------- peer : str IPFS MultiAddr of a peer to add to the list Returns ------- dict
def bootstrap_add(self, peer, *peers, **kwargs): """Adds peers to the bootstrap list. Parameters ---------- peer : str IPFS MultiAddr of a peer to add to the list Returns ------- dict """ args = (peer,) + peers return self._client.request('/bootstrap/add', args, decoder='json', **kwargs)
Adds a given multiaddr filter to the filter list. This will add an address filter to the daemons swarm. Filters applied this way will not persist daemon reboots, to achieve that, add your filters to the configuration file. .. code-block:: python >>> c.swarm_filters_add("/ip4/192.168.0.0/ipcidr/16") {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} Parameters ---------- address : str Multiaddr to filter Returns ------- dict : List of swarm filters added
def swarm_filters_add(self, address, *addresses, **kwargs): """Adds a given multiaddr filter to the filter list. This will add an address filter to the daemons swarm. Filters applied this way will not persist daemon reboots, to achieve that, add your filters to the configuration file. .. code-block:: python >>> c.swarm_filters_add("/ip4/192.168.0.0/ipcidr/16") {'Strings': ['/ip4/192.168.0.0/ipcidr/16']} Parameters ---------- address : str Multiaddr to filter Returns ------- dict : List of swarm filters added """ args = (address,) + addresses return self._client.request('/swarm/filters/add', args, decoder='json', **kwargs)
Finds the closest Peer IDs to a given Peer ID by querying the DHT. .. code-block:: python >>> c.dht_query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ") [{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs', 'Extra': '', 'Type': 2, 'Responses': None}, … {'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv', 'Extra': '', 'Type': 1, 'Responses': []}] Parameters ---------- peer_id : str The peerID to run the query against Returns ------- dict : List of peers IDs
def dht_query(self, peer_id, *peer_ids, **kwargs): """Finds the closest Peer IDs to a given Peer ID by querying the DHT. .. code-block:: python >>> c.dht_query("/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDM … uvuJ") [{'ID': 'QmPkFbxAQ7DeKD5VGSh9HQrdS574pyNzDmxJeGrRJxoucF', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'QmR1MhHVLJSLt9ZthsNNhudb1ny1WdhY4FPW21ZYFWec4f', 'Extra': '', 'Type': 2, 'Responses': None}, {'ID': 'Qmcwx1K5aVme45ab6NYWb52K2TFBeABgCLccC7ntUeDsAs', 'Extra': '', 'Type': 2, 'Responses': None}, … {'ID': 'QmYYy8L3YD1nsF4xtt4xmsc14yqvAAnKksjo3F3iZs5jPv', 'Extra': '', 'Type': 1, 'Responses': []}] Parameters ---------- peer_id : str The peerID to run the query against Returns ------- dict : List of peers IDs """ args = (peer_id,) + peer_ids return self._client.request('/dht/query', args, decoder='json', **kwargs)
Finds peers in the DHT that can provide a specific value. .. code-block:: python >>> c.dht_findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2") [{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': '', 'Extra': '', 'Type': 4, 'Responses': [ {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None} ]}, {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 1, 'Responses': [ {'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [ '/ip4/127.0.0.1/tcp/4001', '/ip4/172.17.0.8/tcp/4001', '/ip6/::1/tcp/4001', '/ip4/52.32.109.74/tcp/1028' ]} ]}] Parameters ---------- multihash : str The DHT key to find providers for Returns ------- dict : List of provider Peer IDs
def dht_findprovs(self, multihash, *multihashes, **kwargs): """Finds peers in the DHT that can provide a specific value. .. code-block:: python >>> c.dht_findprovs("QmNPXDC6wTXVmZ9Uoc8X1oqxRRJr4f1sDuyQu … mpW2") [{'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmaK6Aj5WXkfnWGoWq7V8pGUYzcHPZp4jKQ5JtmRvSzQGk', 'Extra': '', 'Type': 6, 'Responses': None}, {'ID': 'QmdUdLu8dNvr4MVW1iWXxKoQrbG6y1vAVWPdkeGK4xppds', 'Extra': '', 'Type': 6, 'Responses': None}, … {'ID': '', 'Extra': '', 'Type': 4, 'Responses': [ {'ID': 'QmVgNoP89mzpgEAAqK8owYoDEyB97Mk … E9Uc', 'Addrs': None} ]}, {'ID': 'QmaxqKpiYNr62uSFBhxJAMmEMkT6dvc3oHkrZNpH2VMTLZ', 'Extra': '', 'Type': 1, 'Responses': [ {'ID': 'QmSHXfsmN3ZduwFDjeqBn1C8b1tcLkxK6yd … waXw', 'Addrs': [ '/ip4/127.0.0.1/tcp/4001', '/ip4/172.17.0.8/tcp/4001', '/ip6/::1/tcp/4001', '/ip4/52.32.109.74/tcp/1028' ]} ]}] Parameters ---------- multihash : str The DHT key to find providers for Returns ------- dict : List of provider Peer IDs """ args = (multihash,) + multihashes return self._client.request('/dht/findprovs', args, decoder='json', **kwargs)
Queries the DHT for its best value related to given key. There may be several different values for a given key stored in the DHT; in this context *best* means the record that is most desirable. There is no one metric for *best*: it depends entirely on the key type. For IPNS, *best* is the record that is both valid and has the highest sequence number (freshest). Different key types may specify other rules for they consider to be the *best*. Parameters ---------- key : str One or more keys whose values should be looked up Returns ------- str
def dht_get(self, key, *keys, **kwargs): """Queries the DHT for its best value related to given key. There may be several different values for a given key stored in the DHT; in this context *best* means the record that is most desirable. There is no one metric for *best*: it depends entirely on the key type. For IPNS, *best* is the record that is both valid and has the highest sequence number (freshest). Different key types may specify other rules for they consider to be the *best*. Parameters ---------- key : str One or more keys whose values should be looked up Returns ------- str """ args = (key,) + keys res = self._client.request('/dht/get', args, decoder='json', **kwargs) if isinstance(res, dict) and "Extra" in res: return res["Extra"] else: for r in res: if "Extra" in r and len(r["Extra"]) > 0: return r["Extra"] raise exceptions.Error("empty response from DHT")
Provides round-trip latency information for the routing system. Finds nodes via the routing system, sends pings, waits for pongs, and prints out round-trip latency information. .. code-block:: python >>> c.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n") [{'Success': True, 'Time': 0, 'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'}, {'Success': False, 'Time': 0, 'Text': 'Peer lookup error: routing: not found'}] Parameters ---------- peer : str ID of peer to be pinged count : int Number of ping messages to send (Default: ``10``) Returns ------- list : Progress reports from the ping
def ping(self, peer, *peers, **kwargs): """Provides round-trip latency information for the routing system. Finds nodes via the routing system, sends pings, waits for pongs, and prints out round-trip latency information. .. code-block:: python >>> c.ping("QmTzQ1JRkWErjk39mryYw2WVaphAZNAREyMchXzYQ7c15n") [{'Success': True, 'Time': 0, 'Text': 'Looking up peer QmTzQ1JRkWErjk39mryYw2WVaphAZN … c15n'}, {'Success': False, 'Time': 0, 'Text': 'Peer lookup error: routing: not found'}] Parameters ---------- peer : str ID of peer to be pinged count : int Number of ping messages to send (Default: ``10``) Returns ------- list : Progress reports from the ping """ #PY2: No support for kw-only parameters after glob parameters if "count" in kwargs: kwargs.setdefault("opts", {"count": kwargs["count"]}) del kwargs["count"] args = (peer,) + peers return self._client.request('/ping', args, decoder='json', **kwargs)
Controls configuration variables. .. code-block:: python >>> c.config("Addresses.Gateway") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'} >>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'} Parameters ---------- key : str The key of the configuration entry (e.g. "Addresses.API") value : dict The value to set the configuration entry to Returns ------- dict : Requested/updated key and its (new) value
def config(self, key, value=None, **kwargs): """Controls configuration variables. .. code-block:: python >>> c.config("Addresses.Gateway") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8080'} >>> c.config("Addresses.Gateway", "/ip4/127.0.0.1/tcp/8081") {'Key': 'Addresses.Gateway', 'Value': '/ip4/127.0.0.1/tcp/8081'} Parameters ---------- key : str The key of the configuration entry (e.g. "Addresses.API") value : dict The value to set the configuration entry to Returns ------- dict : Requested/updated key and its (new) value """ args = (key, value) return self._client.request('/config', args, decoder='json', **kwargs)
Replaces the existing config with a user-defined config. Make sure to back up the config file first if neccessary, as this operation can't be undone.
def config_replace(self, *args, **kwargs): """Replaces the existing config with a user-defined config. Make sure to back up the config file first if neccessary, as this operation can't be undone. """ return self._client.request('/config/replace', args, decoder='json', **kwargs)
r"""Changes the logging output of a running daemon. .. code-block:: python >>> c.log_level("path", "info") {'Message': "Changed log level of 'path' to 'info'\n"} Parameters ---------- subsystem : str The subsystem logging identifier (Use ``"all"`` for all subsystems) level : str The desired logging level. Must be one of: * ``"debug"`` * ``"info"`` * ``"warning"`` * ``"error"`` * ``"fatal"`` * ``"panic"`` Returns ------- dict : Status message
def log_level(self, subsystem, level, **kwargs): r"""Changes the logging output of a running daemon. .. code-block:: python >>> c.log_level("path", "info") {'Message': "Changed log level of 'path' to 'info'\n"} Parameters ---------- subsystem : str The subsystem logging identifier (Use ``"all"`` for all subsystems) level : str The desired logging level. Must be one of: * ``"debug"`` * ``"info"`` * ``"warning"`` * ``"error"`` * ``"fatal"`` * ``"panic"`` Returns ------- dict : Status message """ args = (subsystem, level) return self._client.request('/log/level', args, decoder='json', **kwargs)
r"""Reads log outputs as they are written. This function returns an iterator needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with c.log_tail() as log_tail_iter: ... for item in log_tail_iter: ... print(item) ... {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.43353297Z"} {"event":"handleAddProviderBegin","system":"dht", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433642581Z"} {"event":"handleAddProvider","system":"dht","duration":91704, "key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433747513Z"} {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.435843012Z"} … Returns ------- iterable
def log_tail(self, **kwargs): r"""Reads log outputs as they are written. This function returns an iterator needs to be closed using a context manager (``with``-statement) or using the ``.close()`` method. .. code-block:: python >>> with c.log_tail() as log_tail_iter: ... for item in log_tail_iter: ... print(item) ... {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.43353297Z"} {"event":"handleAddProviderBegin","system":"dht", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433642581Z"} {"event":"handleAddProvider","system":"dht","duration":91704, "key":"QmNT9Tejg6t57Vs8XM2TVJXCwevWiGsZh3kB4HQXUZRK1o", "peer":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.433747513Z"} {"event":"updatePeer","system":"dht", "peerID":"QmepsDPxWtLDuKvEoafkpJxGij4kMax11uTH7WnKqD25Dq", "session":"7770b5e0-25ec-47cd-aa64-f42e65a10023", "time":"2016-08-22T13:25:27.435843012Z"} … Returns ------- iterable """ return self._client.request('/log/tail', decoder='json', stream=True, **kwargs)
Copies files within the MFS. Due to the nature of IPFS this will not actually involve any of the file's content being copied. .. code-block:: python >>> c.files_ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} >>> c.files_cp("/test", "/bla") '' >>> c.files_ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} Parameters ---------- source : str Filepath within the MFS to copy from dest : str Destination filepath with the MFS to which the file will be copied to
def files_cp(self, source, dest, **kwargs): """Copies files within the MFS. Due to the nature of IPFS this will not actually involve any of the file's content being copied. .. code-block:: python >>> c.files_ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} >>> c.files_cp("/test", "/bla") '' >>> c.files_ls("/") {'Entries': [ {'Size': 0, 'Hash': '', 'Name': 'Software', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'bla', 'Type': 0}, {'Size': 0, 'Hash': '', 'Name': 'test', 'Type': 0} ]} Parameters ---------- source : str Filepath within the MFS to copy from dest : str Destination filepath with the MFS to which the file will be copied to """ args = (source, dest) return self._client.request('/files/cp', args, **kwargs)
Creates a directory within the MFS. .. code-block:: python >>> c.files_mkdir("/test") b'' Parameters ---------- path : str Filepath within the MFS parents : bool Create parent directories as needed and do not raise an exception if the requested directory already exists
def files_mkdir(self, path, parents=False, **kwargs): """Creates a directory within the MFS. .. code-block:: python >>> c.files_mkdir("/test") b'' Parameters ---------- path : str Filepath within the MFS parents : bool Create parent directories as needed and do not raise an exception if the requested directory already exists """ kwargs.setdefault("opts", {"parents": parents}) args = (path,) return self._client.request('/files/mkdir', args, **kwargs)
Returns basic ``stat`` information for an MFS file (including its hash). .. code-block:: python >>> c.files_stat("/test") {'Hash': 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', 'Size': 0, 'CumulativeSize': 4, 'Type': 'directory', 'Blocks': 0} Parameters ---------- path : str Filepath within the MFS Returns ------- dict : MFS file information
def files_stat(self, path, **kwargs): """Returns basic ``stat`` information for an MFS file (including its hash). .. code-block:: python >>> c.files_stat("/test") {'Hash': 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn', 'Size': 0, 'CumulativeSize': 4, 'Type': 'directory', 'Blocks': 0} Parameters ---------- path : str Filepath within the MFS Returns ------- dict : MFS file information """ args = (path,) return self._client.request('/files/stat', args, decoder='json', **kwargs)
Removes a file from the MFS. .. code-block:: python >>> c.files_rm("/bla/file") b'' Parameters ---------- path : str Filepath within the MFS recursive : bool Recursively remove directories?
def files_rm(self, path, recursive=False, **kwargs): """Removes a file from the MFS. .. code-block:: python >>> c.files_rm("/bla/file") b'' Parameters ---------- path : str Filepath within the MFS recursive : bool Recursively remove directories? """ kwargs.setdefault("opts", {"recursive": recursive}) args = (path,) return self._client.request('/files/rm', args, **kwargs)
Reads a file stored in the MFS. .. code-block:: python >>> c.files_read("/bla/file") b'hi' Parameters ---------- path : str Filepath within the MFS offset : int Byte offset at which to begin reading at count : int Maximum number of bytes to read Returns ------- str : MFS file contents
def files_read(self, path, offset=0, count=None, **kwargs): """Reads a file stored in the MFS. .. code-block:: python >>> c.files_read("/bla/file") b'hi' Parameters ---------- path : str Filepath within the MFS offset : int Byte offset at which to begin reading at count : int Maximum number of bytes to read Returns ------- str : MFS file contents """ opts = {"offset": offset} if count is not None: opts["count"] = count kwargs.setdefault("opts", opts) args = (path,) return self._client.request('/files/read', args, **kwargs)
Writes to a mutable file in the MFS. .. code-block:: python >>> c.files_write("/test/file", io.BytesIO(b"hi"), create=True) b'' Parameters ---------- path : str Filepath within the MFS file : io.RawIOBase IO stream object with data that should be written offset : int Byte offset at which to begin writing at create : bool Create the file if it does not exist truncate : bool Truncate the file to size zero before writing count : int Maximum number of bytes to read from the source ``file``
def files_write(self, path, file, offset=0, create=False, truncate=False, count=None, **kwargs): """Writes to a mutable file in the MFS. .. code-block:: python >>> c.files_write("/test/file", io.BytesIO(b"hi"), create=True) b'' Parameters ---------- path : str Filepath within the MFS file : io.RawIOBase IO stream object with data that should be written offset : int Byte offset at which to begin writing at create : bool Create the file if it does not exist truncate : bool Truncate the file to size zero before writing count : int Maximum number of bytes to read from the source ``file`` """ opts = {"offset": offset, "create": create, "truncate": truncate} if count is not None: opts["count"] = count kwargs.setdefault("opts", opts) args = (path,) body, headers = multipart.stream_files(file, self.chunk_size) return self._client.request('/files/write', args, data=body, headers=headers, **kwargs)
Moves files and directories within the MFS. .. code-block:: python >>> c.files_mv("/test/file", "/bla/file") b'' Parameters ---------- source : str Existing filepath within the MFS dest : str Destination to which the file will be moved in the MFS
def files_mv(self, source, dest, **kwargs): """Moves files and directories within the MFS. .. code-block:: python >>> c.files_mv("/test/file", "/bla/file") b'' Parameters ---------- source : str Existing filepath within the MFS dest : str Destination to which the file will be moved in the MFS """ args = (source, dest) return self._client.request('/files/mv', args, **kwargs)
Adds a set of bytes as a file to IPFS. .. code-block:: python >>> c.add_bytes(b"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- data : bytes Content to be added as a file Returns ------- str : Hash of the added IPFS object
def add_bytes(self, data, **kwargs): """Adds a set of bytes as a file to IPFS. .. code-block:: python >>> c.add_bytes(b"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- data : bytes Content to be added as a file Returns ------- str : Hash of the added IPFS object """ body, headers = multipart.stream_bytes(data, self.chunk_size) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs)
Adds a Python string as a file to IPFS. .. code-block:: python >>> c.add_str(u"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- string : str Content to be added as a file Returns ------- str : Hash of the added IPFS object
def add_str(self, string, **kwargs): """Adds a Python string as a file to IPFS. .. code-block:: python >>> c.add_str(u"Mary had a little lamb") 'QmZfF6C9j4VtoCsTp4KSrhYH47QMd3DNXVZBKaxJdhaPab' Also accepts and will stream generator objects. Parameters ---------- string : str Content to be added as a file Returns ------- str : Hash of the added IPFS object """ body, headers = multipart.stream_text(string, self.chunk_size) return self._client.request('/add', decoder='json', data=body, headers=headers, **kwargs)
Adds a json-serializable Python dict as a json file to IPFS. .. code-block:: python >>> c.add_json({'one': 1, 'two': 2, 'three': 3}) 'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob' Parameters ---------- json_obj : dict A json-serializable Python dictionary Returns ------- str : Hash of the added IPFS object
def add_json(self, json_obj, **kwargs): """Adds a json-serializable Python dict as a json file to IPFS. .. code-block:: python >>> c.add_json({'one': 1, 'two': 2, 'three': 3}) 'QmVz9g7m5u3oHiNKHj2CJX1dbG1gtismRS3g9NaPBBLbob' Parameters ---------- json_obj : dict A json-serializable Python dictionary Returns ------- str : Hash of the added IPFS object """ return self.add_bytes(encoding.Json().encode(json_obj), **kwargs)
Adds a picklable Python object as a file to IPFS. .. deprecated:: 0.4.2 The ``*_pyobj`` APIs allow for arbitrary code execution if abused. Either switch to :meth:`~ipfsapi.Client.add_json` or use ``client.add_bytes(pickle.dumps(py_obj))`` instead. Please see :meth:`~ipfsapi.Client.get_pyobj` for the **security risks** of using these methods! .. code-block:: python >>> c.add_pyobj([0, 1.0, 2j, '3', 4e5]) 'QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji' Parameters ---------- py_obj : object A picklable Python object Returns ------- str : Hash of the added IPFS object
def add_pyobj(self, py_obj, **kwargs): """Adds a picklable Python object as a file to IPFS. .. deprecated:: 0.4.2 The ``*_pyobj`` APIs allow for arbitrary code execution if abused. Either switch to :meth:`~ipfsapi.Client.add_json` or use ``client.add_bytes(pickle.dumps(py_obj))`` instead. Please see :meth:`~ipfsapi.Client.get_pyobj` for the **security risks** of using these methods! .. code-block:: python >>> c.add_pyobj([0, 1.0, 2j, '3', 4e5]) 'QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji' Parameters ---------- py_obj : object A picklable Python object Returns ------- str : Hash of the added IPFS object """ warnings.warn("Using `*_pyobj` on untrusted data is a security risk", DeprecationWarning) return self.add_bytes(encoding.Pickle().encode(py_obj), **kwargs)
Loads a pickled Python object from IPFS. .. deprecated:: 0.4.2 The ``*_pyobj`` APIs allow for arbitrary code execution if abused. Either switch to :meth:`~ipfsapi.Client.get_json` or use ``pickle.loads(client.cat(multihash))`` instead. .. caution:: The pickle module is not intended to be secure against erroneous or maliciously constructed data. Never unpickle data received from an untrusted or unauthenticated source. Please **read** `this article <https://www.cs.uic.edu/%7Es/musings/pickle/>`_ to understand the security risks of using this method! .. code-block:: python >>> c.get_pyobj('QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji') [0, 1.0, 2j, '3', 400000.0] Parameters ---------- multihash : str Multihash of the IPFS object to load Returns ------- object : Deserialized IPFS Python object
def get_pyobj(self, multihash, **kwargs): """Loads a pickled Python object from IPFS. .. deprecated:: 0.4.2 The ``*_pyobj`` APIs allow for arbitrary code execution if abused. Either switch to :meth:`~ipfsapi.Client.get_json` or use ``pickle.loads(client.cat(multihash))`` instead. .. caution:: The pickle module is not intended to be secure against erroneous or maliciously constructed data. Never unpickle data received from an untrusted or unauthenticated source. Please **read** `this article <https://www.cs.uic.edu/%7Es/musings/pickle/>`_ to understand the security risks of using this method! .. code-block:: python >>> c.get_pyobj('QmWgXZSUTNNDD8LdkdJ8UXSn55KfFnNvTP1r7SyaQd74Ji') [0, 1.0, 2j, '3', 400000.0] Parameters ---------- multihash : str Multihash of the IPFS object to load Returns ------- object : Deserialized IPFS Python object """ warnings.warn("Using `*_pyobj` on untrusted data is a security risk", DeprecationWarning) return self.cat(multihash, decoder='pickle', **kwargs)
List the peers we are pubsubbing with. Lists the id's of other IPFS users who we are connected to via some topic. Without specifying a topic, IPFS peers from all subscribed topics will be returned in the data. If a topic is specified only the IPFS id's of the peers from the specified topic will be returned in the data. .. code-block:: python >>> c.pubsub_peers() {'Strings': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', 'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA', ... 'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a' ] } ## with a topic # subscribe to a channel >>> with c.pubsub_sub('hello') as sub: ... c.pubsub_peers(topic='hello') {'String': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', ... # other peers connected to the same channel ] } Parameters ---------- topic : str The topic to list connected peers of (defaults to None which lists peers for all topics) Returns ------- dict : Dictionary with the ke "Strings" who's value is id of IPFS peers we're pubsubbing with
def pubsub_peers(self, topic=None, **kwargs): """List the peers we are pubsubbing with. Lists the id's of other IPFS users who we are connected to via some topic. Without specifying a topic, IPFS peers from all subscribed topics will be returned in the data. If a topic is specified only the IPFS id's of the peers from the specified topic will be returned in the data. .. code-block:: python >>> c.pubsub_peers() {'Strings': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', 'QmQKiXYzoFpiGZ93DaFBFDMDWDJCRjXDARu4wne2PRtSgA', ... 'QmepgFW7BHEtU4pZJdxaNiv75mKLLRQnPi1KaaXmQN4V1a' ] } ## with a topic # subscribe to a channel >>> with c.pubsub_sub('hello') as sub: ... c.pubsub_peers(topic='hello') {'String': [ 'QmPbZ3SDgmTNEB1gNSE9DEf4xT8eag3AFn5uo7X39TbZM8', ... # other peers connected to the same channel ] } Parameters ---------- topic : str The topic to list connected peers of (defaults to None which lists peers for all topics) Returns ------- dict : Dictionary with the ke "Strings" who's value is id of IPFS peers we're pubsubbing with """ args = (topic,) if topic is not None else () return self._client.request('/pubsub/peers', args, decoder='json', **kwargs)
Publish a message to a given pubsub topic Publishing will publish the given payload (string) to everyone currently subscribed to the given topic. All data (including the id of the publisher) is automatically base64 encoded when published. .. code-block:: python # publishes the message 'message' to the topic 'hello' >>> c.pubsub_pub('hello', 'message') [] Parameters ---------- topic : str Topic to publish to payload : Data to be published to the given topic Returns ------- list : empty list
def pubsub_pub(self, topic, payload, **kwargs): """Publish a message to a given pubsub topic Publishing will publish the given payload (string) to everyone currently subscribed to the given topic. All data (including the id of the publisher) is automatically base64 encoded when published. .. code-block:: python # publishes the message 'message' to the topic 'hello' >>> c.pubsub_pub('hello', 'message') [] Parameters ---------- topic : str Topic to publish to payload : Data to be published to the given topic Returns ------- list : empty list """ args = (topic, payload) return self._client.request('/pubsub/pub', args, decoder='json', **kwargs)
Subscribe to mesages on a given topic Subscribing to a topic in IPFS means anytime a message is published to a topic, the subscribers will be notified of the publication. The connection with the pubsub topic is opened and read. The Subscription returned should be used inside a context manager to ensure that it is closed properly and not left hanging. .. code-block:: python >>> sub = c.pubsub_sub('testing') >>> with c.pubsub_sub('testing') as sub: # publish a message 'hello' to the topic 'testing' ... c.pubsub_pub('testing', 'hello') ... for message in sub: ... print(message) ... # Stop reading the subscription after ... # we receive one publication ... break {'from': '<base64encoded IPFS id>', 'data': 'aGVsbG8=', 'topicIDs': ['testing']} # NOTE: in order to receive published data # you must already be subscribed to the topic at publication # time. Parameters ---------- topic : str Name of a topic to subscribe to discover : bool Try to discover other peers subscibed to the same topic (defaults to False) Returns ------- Generator wrapped in a context manager that maintains a connection stream to the given topic.
def pubsub_sub(self, topic, discover=False, **kwargs): """Subscribe to mesages on a given topic Subscribing to a topic in IPFS means anytime a message is published to a topic, the subscribers will be notified of the publication. The connection with the pubsub topic is opened and read. The Subscription returned should be used inside a context manager to ensure that it is closed properly and not left hanging. .. code-block:: python >>> sub = c.pubsub_sub('testing') >>> with c.pubsub_sub('testing') as sub: # publish a message 'hello' to the topic 'testing' ... c.pubsub_pub('testing', 'hello') ... for message in sub: ... print(message) ... # Stop reading the subscription after ... # we receive one publication ... break {'from': '<base64encoded IPFS id>', 'data': 'aGVsbG8=', 'topicIDs': ['testing']} # NOTE: in order to receive published data # you must already be subscribed to the topic at publication # time. Parameters ---------- topic : str Name of a topic to subscribe to discover : bool Try to discover other peers subscibed to the same topic (defaults to False) Returns ------- Generator wrapped in a context manager that maintains a connection stream to the given topic. """ args = (topic, discover) return SubChannel(self._client.request('/pubsub/sub', args, stream=True, decoder='json'))
Guesses the mimetype of a file based on the given ``filename``. .. code-block:: python >>> guess_mimetype('example.txt') 'text/plain' >>> guess_mimetype('/foo/bar/example') 'application/octet-stream' Parameters ---------- filename : str The file name or path for which the mimetype is to be guessed
def guess_mimetype(filename): """Guesses the mimetype of a file based on the given ``filename``. .. code-block:: python >>> guess_mimetype('example.txt') 'text/plain' >>> guess_mimetype('/foo/bar/example') 'application/octet-stream' Parameters ---------- filename : str The file name or path for which the mimetype is to be guessed """ fn = os.path.basename(filename) return mimetypes.guess_type(fn)[0] or 'application/octet-stream'
Returns files and subdirectories within a given directory. Returns a pair of lists, containing the names of directories and files in ``dirname``. Raises ------ OSError : Accessing the given directory path failed Parameters ---------- dirname : str The path of the directory to be listed
def ls_dir(dirname): """Returns files and subdirectories within a given directory. Returns a pair of lists, containing the names of directories and files in ``dirname``. Raises ------ OSError : Accessing the given directory path failed Parameters ---------- dirname : str The path of the directory to be listed """ ls = os.listdir(dirname) files = [p for p in ls if os.path.isfile(os.path.join(dirname, p))] dirs = [p for p in ls if os.path.isdir(os.path.join(dirname, p))] return files, dirs
Generates tuples with a ``file``-like object and a close indicator. This is a generator of tuples, where the first element is the file object and the second element is a boolean which is True if this module opened the file (and thus should close it). Raises ------ OSError : Accessing the given file path failed Parameters ---------- files : list | io.IOBase | str Collection or single instance of a filepath and file-like object
def clean_files(files): """Generates tuples with a ``file``-like object and a close indicator. This is a generator of tuples, where the first element is the file object and the second element is a boolean which is True if this module opened the file (and thus should close it). Raises ------ OSError : Accessing the given file path failed Parameters ---------- files : list | io.IOBase | str Collection or single instance of a filepath and file-like object """ if isinstance(files, (list, tuple)): for f in files: yield clean_file(f) else: yield clean_file(files)
Returns the size of a file in bytes. Raises ------ OSError : Accessing the given file path failed Parameters ---------- f : io.IOBase | str The file path or object for which the size should be determined
def file_size(f): """Returns the size of a file in bytes. Raises ------ OSError : Accessing the given file path failed Parameters ---------- f : io.IOBase | str The file path or object for which the size should be determined """ if isinstance(f, (six.string_types, six.text_type)): return os.path.getsize(f) else: cur = f.tell() f.seek(0, 2) size = f.tell() f.seek(cur) return size
Create a new revision file.
def revision(directory, message, autogenerate, sql, head, splice, branch_label, version_path, rev_id): """Create a new revision file.""" _revision(directory, message, autogenerate, sql, head, splice, branch_label, version_path, rev_id)
Autogenerate a new revision file (Alias for 'revision --autogenerate')
def migrate(directory, message, sql, head, splice, branch_label, version_path, rev_id, x_arg): """Autogenerate a new revision file (Alias for 'revision --autogenerate')""" _migrate(directory, message, sql, head, splice, branch_label, version_path, rev_id, x_arg)
Merge two revisions together, creating a new revision file
def merge(directory, message, branch_label, rev_id, revisions): """Merge two revisions together, creating a new revision file""" _merge(directory, revisions, message, branch_label, rev_id)
Upgrade to a later version
def upgrade(directory, sql, tag, x_arg, revision): """Upgrade to a later version""" _upgrade(directory, revision, sql, tag, x_arg)
Revert to a previous version
def downgrade(directory, sql, tag, x_arg, revision): """Revert to a previous version""" _downgrade(directory, revision, sql, tag, x_arg)
List changeset scripts in chronological order.
def history(directory, rev_range, verbose, indicate_current): """List changeset scripts in chronological order.""" _history(directory, rev_range, verbose, indicate_current)
stamp' the revision table with the given revision; don't run any migrations
def stamp(directory, sql, tag, revision): """'stamp' the revision table with the given revision; don't run any migrations""" _stamp(directory, revision, sql, tag)
Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output.
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure( url=url, target_metadata=target_metadata, literal_binds=True ) with context.begin_transaction(): context.run_migrations()
Return the metadata for a bind.
def get_metadata(bind): """Return the metadata for a bind.""" if bind == '': bind = None m = MetaData() for t in target_metadata.tables.values(): if t.info.get('bind_key') == bind: t.tometadata(m) return m
Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output.
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ # for the --sql use case, run migrations for each URL into # individual files. engines = { '': { 'url': context.config.get_main_option('sqlalchemy.url') } } for name in bind_names: engines[name] = rec = {} rec['url'] = context.config.get_section_option(name, "sqlalchemy.url") for name, rec in engines.items(): logger.info("Migrating database %s" % (name or '<default>')) file_ = "%s.sql" % name logger.info("Writing output to %s" % file_) with open(file_, 'w') as buffer: context.configure( url=rec['url'], output_buffer=buffer, target_metadata=get_metadata(name), literal_binds=True, ) with context.begin_transaction(): context.run_migrations(engine_name=name)
Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context.
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ # this callback is used to prevent an auto-migration from being generated # when there are no changes to the schema # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html def process_revision_directives(context, revision, directives): if getattr(config.cmd_opts, 'autogenerate', False): script = directives[0] if len(script.upgrade_ops_list) >= len(bind_names) + 1: empty = True for upgrade_ops in script.upgrade_ops_list: if not upgrade_ops.is_empty(): empty = False if empty: directives[:] = [] logger.info('No changes in schema detected.') # for the direct-to-DB use case, start a transaction on all # engines, then run all migrations, then commit all transactions. engines = { '': { 'engine': engine_from_config( config.get_section(config.config_ini_section), prefix='sqlalchemy.', poolclass=pool.NullPool, ) } } for name in bind_names: engines[name] = rec = {} rec['engine'] = engine_from_config( context.config.get_section(name), prefix='sqlalchemy.', poolclass=pool.NullPool) for name, rec in engines.items(): engine = rec['engine'] rec['connection'] = conn = engine.connect() if USE_TWOPHASE: rec['transaction'] = conn.begin_twophase() else: rec['transaction'] = conn.begin() try: for name, rec in engines.items(): logger.info("Migrating database %s" % (name or '<default>')) context.configure( connection=rec['connection'], upgrade_token="%s_upgrades" % name, downgrade_token="%s_downgrades" % name, target_metadata=get_metadata(name), process_revision_directives=process_revision_directives, **current_app.extensions['migrate'].configure_args ) context.run_migrations(engine_name=name) if USE_TWOPHASE: for rec in engines.values(): rec['transaction'].prepare() for rec in engines.values(): rec['transaction'].commit() except: for rec in engines.values(): rec['transaction'].rollback() raise finally: for rec in engines.values(): rec['connection'].close()
Creates a new migration repository
def init(directory=None, multidb=False): """Creates a new migration repository""" if directory is None: directory = current_app.extensions['migrate'].directory config = Config() config.set_main_option('script_location', directory) config.config_file_name = os.path.join(directory, 'alembic.ini') config = current_app.extensions['migrate'].\ migrate.call_configure_callbacks(config) if multidb: command.init(config, directory, 'flask-multidb') else: command.init(config, directory, 'flask')
Create a new revision file.
def revision(directory=None, message=None, autogenerate=False, sql=False, head='head', splice=False, branch_label=None, version_path=None, rev_id=None): """Create a new revision file.""" config = current_app.extensions['migrate'].migrate.get_config(directory) if alembic_version >= (0, 7, 0): command.revision(config, message, autogenerate=autogenerate, sql=sql, head=head, splice=splice, branch_label=branch_label, version_path=version_path, rev_id=rev_id) else: command.revision(config, message, autogenerate=autogenerate, sql=sql)
Edit current revision.
def edit(directory=None, revision='current'): """Edit current revision.""" if alembic_version >= (0, 8, 0): config = current_app.extensions['migrate'].migrate.get_config( directory) command.edit(config, revision) else: raise RuntimeError('Alembic 0.8.0 or greater is required')
Merge two revisions together. Creates a new migration file
def merge(directory=None, revisions='', message=None, branch_label=None, rev_id=None): """Merge two revisions together. Creates a new migration file""" if alembic_version >= (0, 7, 0): config = current_app.extensions['migrate'].migrate.get_config( directory) command.merge(config, revisions, message=message, branch_label=branch_label, rev_id=rev_id) else: raise RuntimeError('Alembic 0.7.0 or greater is required')
Upgrade to a later version
def upgrade(directory=None, revision='head', sql=False, tag=None, x_arg=None): """Upgrade to a later version""" config = current_app.extensions['migrate'].migrate.get_config(directory, x_arg=x_arg) command.upgrade(config, revision, sql=sql, tag=tag)
List changeset scripts in chronological order.
def history(directory=None, rev_range=None, verbose=False, indicate_current=False): """List changeset scripts in chronological order.""" config = current_app.extensions['migrate'].migrate.get_config(directory) if alembic_version >= (0, 9, 9): command.history(config, rev_range, verbose=verbose, indicate_current=indicate_current) elif alembic_version >= (0, 7, 0): command.history(config, rev_range, verbose=verbose) else: command.history(config, rev_range)
Show current available heads in the script directory
def heads(directory=None, verbose=False, resolve_dependencies=False): """Show current available heads in the script directory""" if alembic_version >= (0, 7, 0): config = current_app.extensions['migrate'].migrate.get_config( directory) command.heads(config, verbose=verbose, resolve_dependencies=resolve_dependencies) else: raise RuntimeError('Alembic 0.7.0 or greater is required')
Show current branch points
def branches(directory=None, verbose=False): """Show current branch points""" config = current_app.extensions['migrate'].migrate.get_config(directory) if alembic_version >= (0, 7, 0): command.branches(config, verbose=verbose) else: command.branches(config)
Display the current revision for each database.
def current(directory=None, verbose=False, head_only=False): """Display the current revision for each database.""" config = current_app.extensions['migrate'].migrate.get_config(directory) if alembic_version >= (0, 7, 0): command.current(config, verbose=verbose, head_only=head_only) else: command.current(config)
stamp' the revision table with the given revision; don't run any migrations
def stamp(directory=None, revision='head', sql=False, tag=None): """'stamp' the revision table with the given revision; don't run any migrations""" config = current_app.extensions['migrate'].migrate.get_config(directory) command.stamp(config, revision, sql=sql, tag=tag)
Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` List of username & password for HTTP Basic Auth ``timeout`` Connection timeout ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``disable_warnings`` Disable requests warning useful when you have large number of testcases
def _create_session( self, alias, url, headers, cookies, auth, timeout, max_retries, backoff_factor, proxies, verify, debug, disable_warnings): """ Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` List of username & password for HTTP Basic Auth ``timeout`` Connection timeout ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``disable_warnings`` Disable requests warning useful when you have large number of testcases """ self.builtin.log('Creating session: %s' % alias, 'DEBUG') s = session = requests.Session() s.headers.update(headers) s.auth = auth if auth else s.auth s.proxies = proxies if proxies else s.proxies try: max_retries = int(max_retries) except ValueError as err: raise ValueError("Error converting max_retries parameter: %s" % err) if max_retries > 0: http = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor)) https = requests.adapters.HTTPAdapter(max_retries=Retry(total=max_retries, backoff_factor=backoff_factor)) # Replace the session's original adapters s.mount('http://', http) s.mount('https://', https) # Disable requests warnings, useful when you have large number of testcase # you will observe drastical changes in Robot log.html and output.xml files size if disable_warnings: logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from requests logging.getLogger().setLevel(logging.ERROR) requests_log = logging.getLogger("requests") requests_log.setLevel(logging.ERROR) requests_log.propagate = True if not verify: requests.packages.urllib3.disable_warnings() # verify can be a Boolean or a String if isinstance(verify, bool): s.verify = verify elif isinstance(verify, str) or isinstance(verify, unicode): if verify.lower() == 'true' or verify.lower() == 'false': s.verify = self.builtin.convert_to_boolean(verify) else: # String for CA_BUNDLE, not a Boolean String s.verify = verify else: # not a Boolean nor a String s.verify = verify # cant pass these into the Session anymore self.timeout = float(timeout) if timeout is not None else None self.cookies = cookies self.verify = verify if self.builtin.convert_to_boolean(verify) != True else None s.url = url # Enable http verbosity if int(debug) >= 1: self.debug = int(debug) httplib.HTTPConnection.debuglevel = self.debug self._cache.register(session, alias=alias) return session
Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` List of username & password for HTTP Basic Auth ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases
def create_session(self, alias, url, headers={}, cookies={}, auth=None, timeout=None, proxies=None, verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0): """ Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` List of username & password for HTTP Basic Auth ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases """ auth = requests.auth.HTTPBasicAuth(*auth) if auth else None logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \ cookies=%s, auth=%s, timeout=%s, proxies=%s, verify=%s, \ debug=%s ' % (alias, url, headers, cookies, auth, timeout, proxies, verify, debug)) return self._create_session( alias, url, headers, cookies, auth, timeout, max_retries, backoff_factor, proxies, verify, debug, disable_warnings)
Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases
def create_ntlm_session( self, alias, url, auth, headers={}, cookies={}, timeout=None, proxies=None, verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0): """ Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases """ if not HttpNtlmAuth: raise AssertionError('Requests NTLM module not loaded') elif len(auth) != 3: raise AssertionError('Incorrect number of authentication arguments' ' - expected 3, got {}'.format(len(auth))) else: ntlm_auth = HttpNtlmAuth('{}\\{}'.format(auth[0], auth[1]), auth[2]) logger.info('Creating NTLM Session using : alias=%s, url=%s, \ headers=%s, cookies=%s, ntlm_auth=%s, timeout=%s, \ proxies=%s, verify=%s, debug=%s ' % (alias, url, headers, cookies, ntlm_auth, timeout, proxies, verify, debug)) return self._create_session( alias, url, headers, cookies, ntlm_auth, timeout, max_retries, backoff_factor, proxies, verify, debug, disable_warnings)
Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases
def create_digest_session(self, alias, url, auth, headers={}, cookies={}, timeout=None, proxies=None, verify=False, debug=0, max_retries=3,backoff_factor=0.10, disable_warnings=0): """ Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``auth`` ['DOMAIN', 'username', 'password'] for NTLM Authentication ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases """ digest_auth = requests.auth.HTTPDigestAuth(*auth) if auth else None return self._create_session( alias, url, headers, cookies, digest_auth, timeout, max_retries, backoff_factor, proxies, verify, debug, disable_warnings)
Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases
def create_client_cert_session(self, alias, url, headers={}, cookies={}, client_certs=None, timeout=None, proxies=None, verify=False, debug=0, max_retries=3, backoff_factor=0.10, disable_warnings=0): """ Create Session: create a HTTP session to a server ``url`` Base url of the server ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of default headers ``cookies`` Dictionary of cookies ``client_certs`` ['client certificate', 'client key'] PEM files containing the client key and certificate ``timeout`` Connection timeout ``proxies`` Dictionary that contains proxy urls for HTTP and HTTPS communication ``verify`` Whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to False. ``debug`` Enable http verbosity option more information https://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.set_debuglevel ``max_retries`` The maximum number of retries each connection should attempt. ``backoff_factor`` The pause between for each retry ``disable_warnings`` Disable requests warning useful when you have large number of testcases """ logger.info('Creating Session using : alias=%s, url=%s, headers=%s, \ cookies=%s, client_certs=%s, timeout=%s, proxies=%s, verify=%s, \ debug=%s ' % (alias, url, headers, cookies, client_certs, timeout, proxies, verify, debug)) session = self._create_session( alias, url, headers, cookies, None, timeout, max_retries, backoff_factor, proxies, verify, debug, disable_warnings) session.cert = tuple(client_certs) return session
Update Session Headers: update a HTTP Session Headers ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of headers merge into session
def update_session(self, alias, headers=None, cookies=None): """Update Session Headers: update a HTTP Session Headers ``alias`` Robot Framework alias to identify the session ``headers`` Dictionary of headers merge into session """ session = self._cache.switch(alias) session.headers = merge_setting(headers, session.headers) session.cookies = merge_cookies(session.cookies, cookies)
Convert a string to a JSON object ``content`` String content to convert into JSON ``pretty_print`` If defined, will output JSON is pretty print format
def to_json(self, content, pretty_print=False): """ Convert a string to a JSON object ``content`` String content to convert into JSON ``pretty_print`` If defined, will output JSON is pretty print format """ if PY3: if isinstance(content, bytes): content = content.decode(encoding='utf-8') if pretty_print: json_ = self._json_pretty_print(content) else: json_ = json.loads(content) logger.info('To JSON using : content=%s ' % (content)) logger.info('To JSON using : pretty_print=%s ' % (pretty_print)) return json_
Send a GET request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the GET request to ``params`` url parameters to append to the uri ``headers`` a dictionary of headers to use with the request ``json`` json data to send in the body of the :class:`Request`. ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout
def get_request( self, alias, uri, headers=None, json=None, params=None, allow_redirects=None, timeout=None): """ Send a GET request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the GET request to ``params`` url parameters to append to the uri ``headers`` a dictionary of headers to use with the request ``json`` json data to send in the body of the :class:`Request`. ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout """ session = self._cache.switch(alias) redir = True if allow_redirects is None else allow_redirects response = self._get_request( session, uri, params, headers, json, redir, timeout) logger.info( 'Get Request using : alias=%s, uri=%s, headers=%s json=%s' % (alias, uri, headers, json)) return response
Send a POST request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the POST request to ``data`` a dictionary of key-value pairs that will be urlencoded and sent as POST data or binary data that is sent as the raw body content or passed as such for multipart form data if ``files`` is also defined ``json`` a value that will be json encoded and sent as POST data if files or data is not specified ``params`` url parameters to append to the uri ``headers`` a dictionary of headers to use with the request ``files`` a dictionary of file names containing file data to POST to the server ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout
def post_request( self, alias, uri, data=None, json=None, params=None, headers=None, files=None, allow_redirects=None, timeout=None): """ Send a POST request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the POST request to ``data`` a dictionary of key-value pairs that will be urlencoded and sent as POST data or binary data that is sent as the raw body content or passed as such for multipart form data if ``files`` is also defined ``json`` a value that will be json encoded and sent as POST data if files or data is not specified ``params`` url parameters to append to the uri ``headers`` a dictionary of headers to use with the request ``files`` a dictionary of file names containing file data to POST to the server ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout """ session = self._cache.switch(alias) if not files: data = self._format_data_according_to_header(session, data, headers) redir = True if allow_redirects is None else allow_redirects response = self._body_request( "post", session, uri, data, json, params, files, headers, redir, timeout) dataStr = self._format_data_to_log_string_according_to_header(data, headers) logger.info('Post Request using : alias=%s, uri=%s, data=%s, headers=%s, files=%s, allow_redirects=%s ' % (alias, uri, dataStr, headers, files, redir)) return response
**Deprecated- See Put Request now** Send a PUT request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the PUT request to ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout
def put( self, alias, uri, data=None, headers=None, allow_redirects=None, timeout=None): """ **Deprecated- See Put Request now** Send a PUT request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the PUT request to ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout """ logger.warn("Deprecation Warning: Use Put Request in the future") session = self._cache.switch(alias) data = self._utf8_urlencode(data) redir = True if allow_redirects is None else allow_redirects response = self._body_request( "put", session, uri, data, None, None, None, headers, redir, timeout) return response
Send a DELETE request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the DELETE request to ``json`` a value that will be json encoded and sent as request data if data is not specified ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout
def delete_request( self, alias, uri, data=None, json=None, params=None, headers=None, allow_redirects=None, timeout=None): """ Send a DELETE request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the DELETE request to ``json`` a value that will be json encoded and sent as request data if data is not specified ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout """ session = self._cache.switch(alias) data = self._format_data_according_to_header(session, data, headers) redir = True if allow_redirects is None else allow_redirects response = self._delete_request( session, uri, data, json, params, headers, redir, timeout) if isinstance(data, bytes): data = data.decode('utf-8') logger.info('Delete Request using : alias=%s, uri=%s, data=%s, \ headers=%s, allow_redirects=%s ' % (alias, uri, data, headers, redir)) return response
* * * Deprecated- See Delete Request now * * * Send a DELETE request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the DELETE request to ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout
def delete( self, alias, uri, data=(), headers=None, allow_redirects=None, timeout=None): """ * * * Deprecated- See Delete Request now * * * Send a DELETE request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the DELETE request to ``headers`` a dictionary of headers to use with the request ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``timeout`` connection timeout """ logger.warn("Deprecation Warning: Use Delete Request in the future") session = self._cache.switch(alias) data = self._utf8_urlencode(data) redir = True if allow_redirects is None else allow_redirects response = self._delete_request( session, uri, data, json, None, headers, redir, timeout) return response
Send a HEAD request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the HEAD request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request
def head_request( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ Send a HEAD request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the HEAD request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ session = self._cache.switch(alias) redir = False if allow_redirects is None else allow_redirects response = self._head_request(session, uri, headers, redir, timeout) logger.info('Head Request using : alias=%s, uri=%s, headers=%s, \ allow_redirects=%s ' % (alias, uri, headers, redir)) return response
**Deprecated- See Head Request now** Send a HEAD request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the HEAD request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request
def head( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ **Deprecated- See Head Request now** Send a HEAD request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the HEAD request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ logger.warn("Deprecation Warning: Use Head Request in the future") session = self._cache.switch(alias) redir = False if allow_redirects is None else allow_redirects response = self._head_request(session, uri, headers, redir, timeout) return response
Send an OPTIONS request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the OPTIONS request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request
def options_request( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ Send an OPTIONS request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the OPTIONS request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ session = self._cache.switch(alias) redir = True if allow_redirects is None else allow_redirects response = self._options_request(session, uri, headers, redir, timeout) logger.info( 'Options Request using : alias=%s, uri=%s, headers=%s, allow_redirects=%s ' % (alias, uri, headers, redir)) return response
**Deprecated- See Options Request now** Send an OPTIONS request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the OPTIONS request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request
def options( self, alias, uri, headers=None, allow_redirects=None, timeout=None): """ **Deprecated- See Options Request now** Send an OPTIONS request on the session object found using the given `alias` ``alias`` that will be used to identify the Session object in the cache ``uri`` to send the OPTIONS request to ``allow_redirects`` Boolean. Set to True if POST/PUT/DELETE redirect following is allowed. ``headers`` a dictionary of headers to use with the request """ logger.warn("Deprecation Warning: Use Options Request in the future") session = self._cache.switch(alias) redir = True if allow_redirects is None else allow_redirects response = self._options_request(session, uri, headers, redir, timeout) return response
Helper method to get the full url
def _get_url(self, session, uri): """ Helper method to get the full url """ url = session.url if uri: slash = '' if uri.startswith('/') else '/' url = "%s%s%s" % (session.url, slash, uri) return url
Pretty print a JSON object ``content`` JSON object to pretty print
def _json_pretty_print(self, content): """ Pretty print a JSON object ``content`` JSON object to pretty print """ temp = json.loads(content) return json.dumps( temp, sort_keys=True, indent=4, separators=( ',', ': '))
Returns measurements of a given name between two dates.
def get_measurements( self, measurement='Weight', lower_bound=None, upper_bound=None ): """ Returns measurements of a given name between two dates.""" if upper_bound is None: upper_bound = datetime.date.today() if lower_bound is None: lower_bound = upper_bound - datetime.timedelta(days=30) # If they entered the dates in the opposite order, let's # just flip them around for them as a convenience if lower_bound > upper_bound: lower_bound, upper_bound = upper_bound, lower_bound # get the URL for the main check in page document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) # select the measurement ID based on the input if measurement in measurement_ids.keys(): measurement_id = measurement_ids[measurement] else: raise ValueError( "Measurement '%s' does not exist." % measurement ) page = 1 measurements = OrderedDict() # retrieve entries until finished while True: # retrieve the HTML from MyFitnessPal document = self._get_document_for_url( self._get_url_for_measurements(page, measurement_id) ) # parse the HTML for measurement entries and add to dictionary results = self._get_measurements(document) measurements.update(results) # stop if there are no more entries if len(results) == 0: break # continue if the lower bound has not been reached elif list(results.keys())[-1] > lower_bound: page += 1 continue # otherwise stop else: break # remove entries that are not within the dates specified for date in list(measurements.keys()): if not upper_bound >= date >= lower_bound: del measurements[date] return measurements
Sets measurement for today's date.
def set_measurements( self, measurement='Weight', value=None ): """ Sets measurement for today's date.""" if value is None: raise ValueError( "Cannot update blank value." ) # get the URL for the main check in page # this is left in because we need to parse # the 'measurement' name to set the value. document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) # check if the measurement exists before going too far if measurement not in measurement_ids.keys(): raise ValueError( "Measurement '%s' does not exist." % measurement ) # build the update url. update_url = parse.urljoin( self.BASE_URL, 'measurements/save' ) # setup a dict for the post data = {} # here's where we need that required element data['authenticity_token'] = self._authenticity_token # Weight has it's own key value pair if measurement == 'Weight': data['weight[display_value]'] = value # the other measurements have generic names with # an incrementing numeric index. measurement_index = 0 # iterate all the measurement_ids for measurement_id in measurement_ids.keys(): # create the measurement_type[n] # key value pair n = str(measurement_index) meas_type = 'measurement_type[' + n + ']' meas_val = 'measurement_value[' + n + ']' data[meas_type] = measurement_ids[measurement_id] # and if it corresponds to the value we want to update if measurement == measurement_id: # create the measurement_value[n] # key value pair and assign it the value. data[meas_val] = value else: # otherwise, create the key value pair and leave it blank data[meas_val] = "" measurement_index += 1 # now post it. result = self.session.post( update_url, data=data ) # throw an error if it failed. if not result.ok: raise RuntimeError( "Unable to update measurement in MyFitnessPal: " "status code: {status}".format( status=result.status_code ) )
Returns list of measurement choices.
def get_measurement_id_options(self): """ Returns list of measurement choices.""" # get the URL for the main check in page document = self._get_document_for_url( self._get_url_for_measurements() ) # gather the IDs for all measurement types measurement_ids = self._get_measurement_ids(document) return measurement_ids